Improve milestone 05 (Transformer) with letters for better visualization

- Enhanced attention proof to use A-Z letters instead of numbers
- Shows MCYWUH → HUWYCM instead of [1,2,3] → [3,2,1]
- More intuitive and fun for students
- Removed quickdemo, generation, dialogue scripts (too slow/gibberish)
This commit is contained in:
Vijay Janapa Reddi
2025-12-02 23:33:58 -08:00
parent e11195c377
commit 7f6dd19c10
9 changed files with 1185 additions and 1896 deletions

View File

@@ -80,9 +80,9 @@ MILESTONE_SCRIPTS = {
"name": "Transformer Era (2017)",
"year": 2017,
"title": "Attention is All You Need",
"script": "milestones/05_2017_transformer/03_quickdemo.py",
"script": "milestones/05_2017_transformer/00_vaswani_attention_proof.py",
"required_modules": list(range(1, 14)),
"description": "Build transformer with self-attention",
"description": "Prove attention works with sequence reversal",
"historical_context": "Vaswani et al. revolutionized NLP",
"emoji": "🤖"
},
@@ -90,10 +90,10 @@ MILESTONE_SCRIPTS = {
"id": "06",
"name": "MLPerf Benchmarks (2018)",
"year": 2018,
"title": "Production ML Systems",
"script": "milestones/06_2018_mlperf/02_compression.py",
"required_modules": list(range(1, 20)),
"description": "Optimize for production deployment",
"title": "The Optimization Olympics",
"script": "milestones/06_2018_mlperf/01_optimization_olympics.py",
"required_modules": list(range(1, 17)), # Needs up to Module 16 (Compression)
"description": "Compress and accelerate your neural network",
"historical_context": "MLPerf standardized ML benchmarks",
"emoji": "🏆"
}