mirror of
https://github.com/MLSysBook/TinyTorch.git
synced 2026-03-12 03:33:33 -05:00
This commit includes: - Exported tinytorch package files from nbdev (autograd, losses, optimizers, training, etc.) - Updated activations.py and layers.py with __call__ methods - New module exports: attention, spatial, tokenization, transformer, etc. - Removed old _modidx.py file - Cleanup of duplicate milestone directories These are the generated package files that correspond to the source modules we've been developing. Students will import from these when using TinyTorch.
65 lines
2.1 KiB
Python
Generated
65 lines
2.1 KiB
Python
Generated
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../modules/source/09_spatial/spatial_dev.ipynb.
|
|
|
|
# %% auto 0
|
|
__all__ = []
|
|
|
|
# %% ../../modules/source/09_spatial/spatial_dev.ipynb 1
|
|
import numpy as np
|
|
import sys
|
|
import os
|
|
import time
|
|
|
|
# Import dependencies from other modules
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '01_tensor'))
|
|
from tensor_dev import Tensor
|
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '03_layers'))
|
|
from layers_dev import Module
|
|
|
|
# Note: Keeping simplified implementations for reference during development
|
|
class _SimplifiedTensor:
|
|
"""Simplified tensor for spatial operations development."""
|
|
|
|
def __init__(self, data, requires_grad=False):
|
|
self.data = np.array(data, dtype=np.float32)
|
|
self.shape = self.data.shape
|
|
self.requires_grad = requires_grad
|
|
self.grad = None
|
|
|
|
def __repr__(self):
|
|
return f"Tensor(shape={self.shape}, data=\n{self.data})"
|
|
|
|
def __add__(self, other):
|
|
if isinstance(other, Tensor):
|
|
return Tensor(self.data + other.data)
|
|
return Tensor(self.data + other)
|
|
|
|
def __mul__(self, other):
|
|
if isinstance(other, Tensor):
|
|
return Tensor(self.data * other.data)
|
|
return Tensor(self.data * other)
|
|
|
|
def sum(self):
|
|
return Tensor(np.sum(self.data))
|
|
|
|
def mean(self):
|
|
return Tensor(np.mean(self.data))
|
|
|
|
# Create a simple Module base class for inheritance
|
|
class Module:
|
|
"""Simple base class for neural network modules."""
|
|
def __init__(self):
|
|
pass
|
|
|
|
def forward(self, x):
|
|
raise NotImplementedError("Subclasses must implement forward()")
|
|
|
|
def parameters(self):
|
|
"""Return list of parameters for this module."""
|
|
params = []
|
|
for attr_name in dir(self):
|
|
attr = getattr(self, attr_name)
|
|
if hasattr(attr, 'data') and hasattr(attr, 'requires_grad'):
|
|
params.append(attr)
|
|
return params
|