Revert to rock solid foundation approach for module imports

- Fix module imports to use tinytorch.core.* instead of local module imports
- Activations module now imports from tinytorch.core.tensor for stability
- Layers module imports from tinytorch.core.tensor and tinytorch.core.activations
- Test files updated to use main package imports for dependencies
- This ensures students can focus on current module without dependency issues
- Previous modules are 'locked in' and guaranteed to work
- Mirrors real-world usage patterns like PyTorch
- Maintains educational progression while ensuring system stability
This commit is contained in:
Vijay Janapa Reddi
2025-07-12 02:00:30 -04:00
parent a985323141
commit 578a00f608
4 changed files with 1092 additions and 28 deletions

View File

@@ -32,10 +32,12 @@ Every neural network you've heard of - from image recognition to language models
#| export
import math
import numpy as np
import matplotlib.pyplot as plt
import os
import sys
from typing import Union, List
# Import our Tensor class
# Import our Tensor class from the main package (rock solid foundation)
from tinytorch.core.tensor import Tensor
# %%
@@ -43,7 +45,6 @@ from tinytorch.core.tensor import Tensor
#| export
def _should_show_plots():
"""Check if we should show plots (disable during testing)"""
import os
# Check multiple conditions that indicate we're in test mode
is_pytest = (
'pytest' in sys.modules or
@@ -65,7 +66,6 @@ def visualize_activation_function(activation_fn, name: str, x_range: tuple = (-5
return
try:
import matplotlib.pyplot as plt
# Generate input values
x_vals = np.linspace(x_range[0], x_range[1], num_points)

File diff suppressed because it is too large Load Diff

View File

@@ -1,29 +1,19 @@
"""
Test suite for the TinyTorch Activations module.
This test suite validates the mathematical correctness of activation functions:
- ReLU: f(x) = max(0, x)
- Sigmoid: f(x) = 1 / (1 + e^(-x))
- Tanh: f(x) = tanh(x)
Tests focus on:
1. Mathematical correctness
2. Numerical stability
3. Edge cases
4. Shape preservation
5. Type consistency
Test suite for the activations module.
This tests the student implementations to ensure they work correctly.
"""
import pytest
import numpy as np
import math
from tinytorch.core.tensor import Tensor
# Import the activation functions
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from activations_dev import ReLU, Sigmoid, Tanh
# Import from the main package (rock solid foundation)
from tinytorch.core.tensor import Tensor
# Import our implementations from the local module for testing
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from activations_dev import ReLU, Sigmoid, Tanh, Softmax
class TestReLU:

View File

@@ -67,15 +67,11 @@ import numpy as np
import math
import sys
from typing import Union, Optional, Callable
# Import from the main package (rock solid foundation)
from tinytorch.core.tensor import Tensor
# Import activation functions from the activations module
from tinytorch.core.activations import ReLU, Sigmoid, Tanh
# Import our Tensor class
# sys.path.append('../../')
# from modules.tensor.tensor_dev import Tensor
# print("🔥 TinyTorch Layers Module")
# print(f"NumPy version: {np.__version__}")
# print(f"Python version: {sys.version_info.major}.{sys.version_info.minor}")