mirror of
https://github.com/MLSysBook/TinyTorch.git
synced 2026-05-07 12:25:30 -05:00
✅ Rename all module directories: 00_setup → 01_setup, etc. ✅ Update convert_modules.py mappings for new directory names ✅ Update _toc.yml file paths and titles (1-14 instead of 0-13) ✅ Regenerate all overview pages with new numbering ✅ Fix all broken references in usage-paths and intro ✅ Update chapter references to use natural numbering Benefits: - More intuitive course progression starting from 1 - Matches academic course numbering conventions - Eliminates confusion about 'Module 0' concept - Cleaner mental model for students and instructors - All references and links properly updated Complete transformation: 14 modules now numbered 01-14
27 lines
728 B
YAML
27 lines
728 B
YAML
# TinyTorch Module Metadata
|
|
# Essential system information for CLI tools and build systems
|
|
|
|
name: "optimizers"
|
|
title: "Optimizers"
|
|
description: "Gradient-based parameter optimization algorithms"
|
|
|
|
# Dependencies - Used by CLI for module ordering and prerequisites
|
|
dependencies:
|
|
prerequisites: ["setup", "tensor", "autograd"]
|
|
enables: ["training", "compression", "mlops"]
|
|
|
|
# Package Export - What gets built into tinytorch package
|
|
exports_to: "tinytorch.core.optimizers"
|
|
|
|
# File Structure - What files exist in this module
|
|
files:
|
|
dev_file: "optimizers_dev.py"
|
|
readme: "README.md"
|
|
tests: "inline"
|
|
|
|
# Components - What's implemented in this module
|
|
components:
|
|
- "SGD"
|
|
- "Adam"
|
|
- "StepLR"
|
|
- "gradient_descent_step" |