{"data":{"full_name":"Brokttv/optimizers-from-scratch","name":"optimizers-from-scratch","description":"training models with different optimizers using NumPy only. Featuring SGD, Adam, Adagrad, NAG, RMSProp, and Momentum. This repo also includes a benchmark against Pytorch developed optims.","stars":13.0,"forks":1.0,"language":"Jupyter Notebook","license":"MIT","archived":0.0,"subcategory":"gradient-descent-optimizers","last_pushed_at":"2025-09-09T22:31:14+00:00","pypi_package":null,"npm_package":null,"downloads_monthly":0.0,"dependency_count":0.0,"commits_30d":null,"reverse_dep_count":0.0,"maintenance_score":2.0,"adoption_score":5.0,"maturity_score":15.0,"community_score":6.0,"quality_score":28.0,"quality_tier":"experimental","risk_flags":"['stale_6m', 'no_package', 'no_dependents']"},"meta":{"timestamp":"2026-04-09T00:00:11.423127+00:00"}}