{"data":{"full_name":"protectai/llm-guard","name":"llm-guard","description":"The Security Toolkit for LLM Interactions","stars":2660.0,"forks":353.0,"language":"Python","license":"MIT","archived":0.0,"subcategory":"prompt-injection-security","last_pushed_at":"2025-12-15T13:07:25+00:00","pypi_package":"llm-guard","npm_package":null,"downloads_monthly":329796.0,"dependency_count":12.0,"commits_30d":null,"reverse_dep_count":1.0,"maintenance_score":6.0,"adoption_score":21.0,"maturity_score":25.0,"community_score":22.0,"quality_score":74.0,"quality_tier":"verified","risk_flags":"[]"},"meta":{"timestamp":"2026-04-05T18:32:10.303035+00:00"}}