refactor: Standardize imports across modules 10-17 to match 01-09

Enforce consistent import pattern across all modules:
- Direct imports from tinytorch.core.* (no fallbacks)
- Remove all sys.path.append manipulations
- Remove try/except import fallbacks
- Remove mock/dummy class fallbacks

Fixed modules:
- Module 10 (tokenization): Removed try/except fallback
- Module 12 (attention): Removed sys.path.append for tensor/layers
- Module 15 (profiling): Removed sys.path + mock Tensor/Linear/Conv2d
- Module 16 (acceleration): Removed hardcoded path + importlib + mock Tensor
- Module 17 (quantization): Removed sys.path + disabled fallback block

All modules now follow the same pattern as modules 01-09:
  from tinytorch.core.tensor import Tensor
  from tinytorch.core.layers import Linear
  # etc.

No development fallbacks - assume tinytorch package is installed.
This commit is contained in:
Vijay Janapa Reddi
2025-10-24 10:36:48 -04:00
parent 191f6db7c7
commit 0e997e4a10
5 changed files with 8 additions and 122 deletions

View File

@@ -69,61 +69,10 @@ import matplotlib.pyplot as plt
from typing import Tuple, Dict, List, Optional
import warnings
# Smart import system for development and production compatibility
import sys
import os
# Import dependencies from other modules
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '01_tensor'))
from tensor_dev import Tensor
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '03_layers'))
from layers_dev import Linear, Sequential
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '02_activations'))
from activations_dev import ReLU
# Note: Keeping development fallback for reference
if False: # Disabled development fallback
# Development: Import from local module files
try:
# Try to find the current directory
current_dir = os.path.dirname(os.path.abspath(__file__))
except NameError:
# Fallback when __file__ is not available (e.g., in exec context)
current_dir = os.getcwd()
# Import Tensor from Module 01
tensor_module_path = os.path.join(current_dir, '..', '01_tensor')
sys.path.insert(0, tensor_module_path)
try:
from tensor_dev import Tensor
finally:
sys.path.pop(0)
# Import from Module 03 layers
layers_module_path = os.path.join(current_dir, '..', '03_layers')
sys.path.insert(0, layers_module_path)
try:
from layers_dev import Linear, Sequential
finally:
sys.path.pop(0)
# Import from Module 02 activations
activations_module_path = os.path.join(current_dir, '..', '02_activations')
sys.path.insert(0, activations_module_path)
try:
from activations_dev import ReLU
finally:
sys.path.pop(0)
# Create dummy profiler if needed
class Profiler:
"""Dummy profiler class for development."""
def count_parameters(self, model):
return 0
def measure_memory(self, model, input_shape):
return {"total": 0}
from tinytorch.core.tensor import Tensor
from tinytorch.core.layers import Linear
from tinytorch.core.activations import ReLU
print("✅ Quantization module imports complete")