{"data":{"full_name":"EleutherAI/gpt-neox","name":"gpt-neox","description":"An implementation of model parallel autoregressive transformers on GPUs, based on the Megatron and DeepSpeed libraries","stars":7399.0,"forks":1100.0,"language":"Python","license":"Apache-2.0","archived":0.0,"subcategory":"gpt2-pretraining-fine-tuning","last_pushed_at":"2026-02-03T00:16:14+00:00","pypi_package":null,"npm_package":null,"downloads_monthly":0.0,"dependency_count":0.0,"commits_30d":0.0,"reverse_dep_count":0.0,"maintenance_score":10.0,"adoption_score":10.0,"maturity_score":16.0,"community_score":22.0,"quality_score":58.0,"quality_tier":"established","risk_flags":"['no_package', 'no_dependents']"},"meta":{"timestamp":"2026-04-05T19:13:49.170172+00:00"}}