Files
TinyTorch/tinytorch/_modidx.py
Vijay Janapa Reddi b47c8ef259 feat: Create clean modular architecture with activations → layers separation
��️ Major architectural improvement implementing clean separation of concerns:

 NEW: Activations Module
- Complete activations module with ReLU, Sigmoid, Tanh implementations
- Educational NBDev structure with student TODOs + instructor solutions
- Comprehensive testing suite (24 tests) with mathematical correctness validation
- Visual learning features with matplotlib plotting (disabled during testing)
- Clean export to tinytorch.core.activations

🔧 REFACTOR: Layers Module
- Removed duplicate activation function implementations
- Clean import from activations module: 'from tinytorch.core.activations import ReLU, Sigmoid, Tanh'
- Updated documentation to reflect modular architecture
- Preserved all existing functionality while improving code organization

🧪 TESTING: Comprehensive Test Coverage
- All 24 activations tests passing 
- All 17 layers tests passing 
- Integration tests verify clean architecture works end-to-end
- CLI testing with 'tito test --module' works for both modules

📦 ARCHITECTURE: Clean Dependency Graph
- activations (math functions) → layers (building blocks) → networks (applications)
- Separation of concerns: pure math vs. neural network components
- Reusable components across future modules
- Single source of truth for activation implementations

�� PEDAGOGY: Enhanced Learning Experience
- Week-sized chunks: students master activations, then build layers
- Clear progression from mathematical foundations to applications
- Real-world software architecture patterns
- Modular design principles in practice

This establishes the foundation for scalable, maintainable ML systems education.
2025-07-10 21:32:25 -04:00

72 lines
7.8 KiB
Python

# Autogenerated by nbdev
d = { 'settings': { 'branch': 'main',
'doc_baseurl': '/TinyTorch/',
'doc_host': 'https://tinytorch.github.io',
'git_url': 'https://github.com/tinytorch/TinyTorch/',
'lib_path': 'tinytorch'},
'syms': { 'tinytorch.core.activations': {},
'tinytorch.core.layers': { 'tinytorch.core.layers.Dense': ('layers/layers_dev.html#dense', 'tinytorch/core/layers.py'),
'tinytorch.core.layers.Dense.__call__': ( 'layers/layers_dev.html#dense.__call__',
'tinytorch/core/layers.py'),
'tinytorch.core.layers.Dense.__init__': ( 'layers/layers_dev.html#dense.__init__',
'tinytorch/core/layers.py'),
'tinytorch.core.layers.Dense.forward': ( 'layers/layers_dev.html#dense.forward',
'tinytorch/core/layers.py'),
'tinytorch.core.layers.ReLU': ('layers/layers_dev.html#relu', 'tinytorch/core/layers.py'),
'tinytorch.core.layers.ReLU.__call__': ( 'layers/layers_dev.html#relu.__call__',
'tinytorch/core/layers.py'),
'tinytorch.core.layers.ReLU.forward': ( 'layers/layers_dev.html#relu.forward',
'tinytorch/core/layers.py'),
'tinytorch.core.layers.Sigmoid': ('layers/layers_dev.html#sigmoid', 'tinytorch/core/layers.py'),
'tinytorch.core.layers.Sigmoid.__call__': ( 'layers/layers_dev.html#sigmoid.__call__',
'tinytorch/core/layers.py'),
'tinytorch.core.layers.Sigmoid.forward': ( 'layers/layers_dev.html#sigmoid.forward',
'tinytorch/core/layers.py'),
'tinytorch.core.layers.Tanh': ('layers/layers_dev.html#tanh', 'tinytorch/core/layers.py'),
'tinytorch.core.layers.Tanh.__call__': ( 'layers/layers_dev.html#tanh.__call__',
'tinytorch/core/layers.py'),
'tinytorch.core.layers.Tanh.forward': ( 'layers/layers_dev.html#tanh.forward',
'tinytorch/core/layers.py')},
'tinytorch.core.tensor': { 'tinytorch.core.tensor.Tensor': ('tensor/tensor_dev.html#tensor', 'tinytorch/core/tensor.py'),
'tinytorch.core.tensor.Tensor.__init__': ( 'tensor/tensor_dev.html#tensor.__init__',
'tinytorch/core/tensor.py'),
'tinytorch.core.tensor.Tensor.__repr__': ( 'tensor/tensor_dev.html#tensor.__repr__',
'tinytorch/core/tensor.py'),
'tinytorch.core.tensor.Tensor.data': ( 'tensor/tensor_dev.html#tensor.data',
'tinytorch/core/tensor.py'),
'tinytorch.core.tensor.Tensor.dtype': ( 'tensor/tensor_dev.html#tensor.dtype',
'tinytorch/core/tensor.py'),
'tinytorch.core.tensor.Tensor.shape': ( 'tensor/tensor_dev.html#tensor.shape',
'tinytorch/core/tensor.py'),
'tinytorch.core.tensor.Tensor.size': ( 'tensor/tensor_dev.html#tensor.size',
'tinytorch/core/tensor.py'),
'tinytorch.core.tensor._add_arithmetic_ops': ( 'tensor/tensor_dev.html#_add_arithmetic_ops',
'tinytorch/core/tensor.py'),
'tinytorch.core.tensor._add_utility_methods': ( 'tensor/tensor_dev.html#_add_utility_methods',
'tinytorch/core/tensor.py')},
'tinytorch.core.utils': { 'tinytorch.core.utils.DeveloperProfile': ( 'setup/setup_dev.html#developerprofile',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.DeveloperProfile.__init__': ( 'setup/setup_dev.html#developerprofile.__init__',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.DeveloperProfile.__str__': ( 'setup/setup_dev.html#developerprofile.__str__',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.DeveloperProfile._load_default_flame': ( 'setup/setup_dev.html#developerprofile._load_default_flame',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.DeveloperProfile.get_ascii_art': ( 'setup/setup_dev.html#developerprofile.get_ascii_art',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.DeveloperProfile.get_full_profile': ( 'setup/setup_dev.html#developerprofile.get_full_profile',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.DeveloperProfile.get_signature': ( 'setup/setup_dev.html#developerprofile.get_signature',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.SystemInfo': ('setup/setup_dev.html#systeminfo', 'tinytorch/core/utils.py'),
'tinytorch.core.utils.SystemInfo.__init__': ( 'setup/setup_dev.html#systeminfo.__init__',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.SystemInfo.__str__': ( 'setup/setup_dev.html#systeminfo.__str__',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.SystemInfo.is_compatible': ( 'setup/setup_dev.html#systeminfo.is_compatible',
'tinytorch/core/utils.py'),
'tinytorch.core.utils.add_numbers': ('setup/setup_dev.html#add_numbers', 'tinytorch/core/utils.py'),
'tinytorch.core.utils.hello_tinytorch': ( 'setup/setup_dev.html#hello_tinytorch',
'tinytorch/core/utils.py')}}}