{"data":{"full_name":"AnderssonProgramming/llm-embeddings-text-preprocessing","name":"llm-embeddings-text-preprocessing","description":"LLM text preprocessing and embedding pipeline implementation for the Enterprise Architecture (AREP) course at Escuela Colombiana de Ingeniería Julio Garavito. Based on \"Build a Large Language Model (From Scratch),\" it covers BPE tokenization, sliding window sampling experiments, and positional embedding integration using PyTorch.","stars":0.0,"forks":0.0,"language":"Jupyter Notebook","license":"MIT","archived":0.0,"subcategory":"embedding-model-tuning","last_pushed_at":"2026-02-11T15:23:06+00:00","pypi_package":null,"npm_package":null,"downloads_monthly":0.0,"dependency_count":0.0,"commits_30d":null,"reverse_dep_count":0.0,"maintenance_score":10.0,"adoption_score":0.0,"maturity_score":9.0,"community_score":0.0,"quality_score":19.0,"quality_tier":"experimental","risk_flags":"['no_package', 'no_dependents']"},"meta":{"timestamp":"2026-04-12T10:27:57.846307+00:00"}}