{"data":{"full_name":"buhsnn/eli5-gpt2-language-model","name":"eli5-gpt2-language-model","description":"Decoder-only Transformer (GPT-2 style) trained from scratch on the ELI5 dataset for next-token prediction, achieving improved test perplexity over the assignment baseline.","stars":1.0,"forks":0.0,"language":"Jupyter Notebook","license":null,"archived":0.0,"subcategory":"gpt2-pretraining-fine-tuning","last_pushed_at":"2026-03-10T11:43:25+00:00","pypi_package":null,"npm_package":null,"downloads_monthly":0.0,"dependency_count":0.0,"commits_30d":null,"reverse_dep_count":0.0,"maintenance_score":10.0,"adoption_score":1.0,"maturity_score":3.0,"community_score":0.0,"quality_score":14.0,"quality_tier":"experimental","risk_flags":"['no_license', 'no_package', 'no_dependents']"},"meta":{"timestamp":"2026-04-14T22:01:30.603637+00:00"}}