{"data":{"full_name":"InternLM/xtuner","name":"xtuner","description":"A Next-Generation Training Engine Built for Ultra-Large MoE Models","stars":5096.0,"forks":405.0,"language":"Python","license":"Apache-2.0","archived":0.0,"subcategory":"mixture-of-experts-llms","last_pushed_at":"2026-03-13T08:14:34+00:00","pypi_package":"xtuner","npm_package":null,"downloads_monthly":1643.0,"dependency_count":15.0,"commits_30d":72.0,"reverse_dep_count":0.0,"maintenance_score":25.0,"adoption_score":17.0,"maturity_score":25.0,"community_score":19.0,"quality_score":86.0,"quality_tier":"verified","risk_flags":"[]"},"meta":{"timestamp":"2026-04-05T18:25:24.531235+00:00"}}