{"data":{"full_name":"shivanshka/Multilingual-Toxic-Comment-Classifier","name":"Multilingual-Toxic-Comment-Classifier","description":"Created a system which will detect whether any text (comment) is toxic or not. It can predict on any language","stars":1.0,"forks":0.0,"language":"Jupyter Notebook","license":"Apache-2.0","archived":0.0,"subcategory":"hate-speech-detection","last_pushed_at":"2024-06-23T10:47:16+00:00","pypi_package":null,"npm_package":null,"downloads_monthly":0.0,"dependency_count":0.0,"commits_30d":null,"reverse_dep_count":0.0,"maintenance_score":0.0,"adoption_score":1.0,"maturity_score":9.0,"community_score":0.0,"quality_score":10.0,"quality_tier":"experimental","risk_flags":"['stale_6m', 'no_package', 'no_dependents']"},"meta":{"timestamp":"2026-04-08T11:16:03.286977+00:00"}}