Files
TinyTorch/tinytorch/core/_validation.py
Vijay Janapa Reddi bbd16988b4 Add advanced CIFAR-10 optimization and universal dashboard
Features:
- Universal Rich UI dashboard for all TinyTorch examples
- Advanced 7-layer MLP targeting 60% CIFAR-10 accuracy
- Real-time ASCII plotting and beautiful visualization
- Multiple optimization techniques (dropout, scheduling, augmentation)

Results:
- XOR: 100% accuracy with gorgeous UI
- CIFAR-10: 49-53%+ accuracy with engaging training visualization
2025-09-21 16:53:27 -04:00

253 lines
10 KiB
Python
Generated

# ╔═══════════════════════════════════════════════════════════════════════════════╗
# ║ 🚨 CRITICAL WARNING 🚨 ║
# ║ AUTOGENERATED! DO NOT EDIT! ║
# ║ ║
# ║ This file is AUTOMATICALLY GENERATED from source modules. ║
# ║ ANY CHANGES MADE HERE WILL BE LOST when modules are re-exported! ║
# ║ ║
# ║ ✅ TO EDIT: modules/source/_validation/_validation_dev.py ║
# ║ ✅ TO EXPORT: Run 'tito module complete <module_name>' ║
# ║ ║
# ║ 🛡️ STUDENT PROTECTION: This file contains critical fixes for Variable/ ║
# ║ Tensor compatibility. Editing it directly WILL break CIFAR-10 training. ║
# ║ ║
# ║ 🎓 LEARNING TIP: Work in modules/source/ - that's where real development ║
# ║ happens! The tinytorch/ directory is just the compiled output. ║
# ╚═══════════════════════════════════════════════════════════════════════════════╝
"""
TinyTorch Runtime Validation System
🛡️ **STUDENT PROTECTION SYSTEM**
This module provides runtime validation to detect when students accidentally
break critical Variable/Tensor compatibility in core functions.
**Purpose**: Prevent CIFAR-10 training failures due to core file modifications.
"""
import numpy as np
import warnings
from typing import Any, Callable, Optional
class TinyTorchValidationError(Exception):
"""Raised when critical TinyTorch functionality is broken."""
pass
def validate_variable_tensor_compatibility():
"""
🛡️ **STUDENT PROTECTION**: Validate that core functions handle Variables correctly.
This function tests the critical Variable/Tensor compatibility that enables
CIFAR-10 training. If this fails, students have likely edited core files.
"""
try:
# Import core components
from tinytorch.core.tensor import Tensor
from tinytorch.core.autograd import Variable
from tinytorch.core.layers import matmul
from tinytorch.core.activations import ReLU, Softmax
from tinytorch.core.training import MeanSquaredError as MSELoss
# Test 1: Matrix multiplication with Variables
a = Variable(Tensor([[1, 2], [3, 4]]), requires_grad=True)
b = Variable(Tensor([[5, 6], [7, 8]]), requires_grad=True)
try:
result = matmul(a, b)
if not hasattr(result, 'requires_grad'):
raise TinyTorchValidationError("matmul doesn't return Variables properly")
except Exception as e:
raise TinyTorchValidationError(f"Matrix multiplication with Variables failed: {e}")
# Test 2: ReLU with Variables
relu = ReLU()
x = Variable(Tensor([[-1, 0, 1]]), requires_grad=True)
try:
relu_result = relu(x)
if not hasattr(relu_result, 'requires_grad'):
raise TinyTorchValidationError("ReLU doesn't return Variables properly")
except Exception as e:
raise TinyTorchValidationError(f"ReLU with Variables failed: {e}")
# Test 3: Softmax with Variables
softmax = Softmax()
x = Variable(Tensor([[1, 2, 3]]), requires_grad=True)
try:
softmax_result = softmax(x)
if not hasattr(softmax_result, 'requires_grad'):
raise TinyTorchValidationError("Softmax doesn't return Variables properly")
# Check if it's a valid probability distribution
prob_sum = np.sum(softmax_result.data.data)
if not np.isclose(prob_sum, 1.0, atol=1e-6):
raise TinyTorchValidationError("Softmax doesn't produce valid probabilities")
except Exception as e:
raise TinyTorchValidationError(f"Softmax with Variables failed: {e}")
# Test 4: Loss function with Variables
loss_fn = MSELoss()
pred = Variable(Tensor([[0.1, 0.2, 0.7]]), requires_grad=True)
true = Variable(Tensor([[0.0, 0.0, 1.0]]), requires_grad=False)
try:
loss = loss_fn(pred, true)
# Handle Variable/Tensor data access properly
if hasattr(loss.data, 'data'):
loss_value = float(loss.data.data)
elif hasattr(loss.data, '_data'):
loss_value = float(loss.data._data)
else:
loss_value = float(loss.data)
if not isinstance(loss_value, (int, float)) or np.isnan(loss_value):
raise TinyTorchValidationError("Loss function doesn't return valid scalar")
except Exception as e:
raise TinyTorchValidationError(f"Loss function with Variables failed: {e}")
return True
except ImportError as e:
raise TinyTorchValidationError(f"Core modules not available: {e}")
def validate_training_pipeline():
"""
🛡️ **STUDENT PROTECTION**: Validate complete training pipeline works.
Tests the full forward pass that CIFAR-10 training requires.
"""
try:
from tinytorch.core.tensor import Tensor
from tinytorch.core.autograd import Variable
from tinytorch.core.layers import Dense
from tinytorch.core.activations import ReLU, Softmax
from tinytorch.core.training import MeanSquaredError as MSELoss
from tinytorch.core.optimizers import Adam
# Create a mini neural network
fc1 = Dense(10, 5)
relu = ReLU()
fc2 = Dense(5, 3)
softmax = Softmax()
# Make it trainable
fc1.weights = Variable(fc1.weights.data, requires_grad=True)
fc1.bias = Variable(fc1.bias.data, requires_grad=True)
fc2.weights = Variable(fc2.weights.data, requires_grad=True)
fc2.bias = Variable(fc2.bias.data, requires_grad=True)
# Test forward pass
x = Variable(Tensor(np.random.randn(2, 10)), requires_grad=False)
h1 = fc1(x)
h1_act = relu(h1)
h2 = fc2(h1_act)
output = softmax(h2)
# Test loss computation
target = Variable(Tensor(np.random.randn(2, 3)), requires_grad=False)
loss_fn = MSELoss()
loss = loss_fn(output, target)
# Test optimizer
optimizer = Adam([fc1.weights, fc1.bias, fc2.weights, fc2.bias], learning_rate=0.001)
# Validate shapes are preserved
original_bias_shape = fc1.bias.data.shape
optimizer.step() # This should not corrupt shapes
if fc1.bias.data.shape != original_bias_shape:
raise TinyTorchValidationError(f"Bias shape corrupted: {original_bias_shape} -> {fc1.bias.data.shape}")
return True
except Exception as e:
raise TinyTorchValidationError(f"Training pipeline validation failed: {e}")
def run_student_protection_checks(verbose: bool = False):
"""
🛡️ **MAIN PROTECTION FUNCTION**: Run all validation checks.
This function should be called before CIFAR-10 training to ensure
students haven't accidentally broken core functionality.
Args:
verbose: If True, print detailed validation results
Returns:
bool: True if all checks pass
Raises:
TinyTorchValidationError: If any critical functionality is broken
"""
checks = [
("Variable/Tensor Compatibility", validate_variable_tensor_compatibility),
("Training Pipeline", validate_training_pipeline),
]
if verbose:
print("🛡️ Running TinyTorch Student Protection Checks...")
print("=" * 60)
for check_name, check_func in checks:
try:
check_func()
if verbose:
print(f"{check_name}: PASSED")
except TinyTorchValidationError as e:
error_msg = f"""
🚨 CRITICAL ERROR: {check_name} validation failed!
{e}
🛡️ STUDENT PROTECTION TRIGGERED:
This error suggests that core TinyTorch files have been accidentally modified.
📋 TO FIX:
1. Check if you've edited any files in tinytorch/core/ directory
2. Those files are auto-generated and should NOT be edited directly
3. Make changes in modules/source/ instead
4. Run 'tito module complete <module>' to regenerate core files
⚠️ CIFAR-10 training will FAIL until this is fixed!
"""
if verbose:
print(f"{check_name}: FAILED")
print(error_msg)
raise TinyTorchValidationError(error_msg)
if verbose:
print("=" * 60)
print("🎉 All protection checks passed! CIFAR-10 training should work.")
return True
def auto_validate_on_import():
"""
🛡️ **AUTOMATIC PROTECTION**: Run validation when core modules are imported.
This provides automatic protection without requiring students to
remember to run validation checks.
"""
try:
run_student_protection_checks(verbose=False)
except TinyTorchValidationError:
# Only warn on import, don't crash
warnings.warn(
"🚨 TinyTorch core functionality may be broken. "
"Run 'from tinytorch.core._validation import run_student_protection_checks; "
"run_student_protection_checks(verbose=True)' for details.",
UserWarning
)
# Run automatic validation when this module is imported
# This provides silent protection for students
try:
auto_validate_on_import()
except Exception:
# Don't crash on import, just warn
pass