{"data":{"full_name":"ross-sec/fractal_attention_analysis","name":"fractal_attention_analysis","description":"A mathematical framework for analyzing transformer attention mechanisms using fractal geometry and golden ratio transformations. FAA provides deep insights into how Large Language Models (LLMs) process and attend to information.","stars":0.0,"forks":0.0,"language":"Python","license":"NOASSERTION","archived":0.0,"subcategory":"attention-mechanism-implementations","last_pushed_at":"2025-11-29T11:40:48+00:00","pypi_package":null,"npm_package":null,"downloads_monthly":0.0,"dependency_count":0.0,"commits_30d":null,"reverse_dep_count":0.0,"maintenance_score":6.0,"adoption_score":0.0,"maturity_score":9.0,"community_score":0.0,"quality_score":15.0,"quality_tier":"experimental","risk_flags":"['no_package', 'no_dependents']"},"meta":{"timestamp":"2026-04-11T03:00:52.918936+00:00"}}