style: apply consistent whitespace and formatting across codebase

This commit is contained in:
Vijay Janapa Reddi
2025-12-13 14:05:34 -05:00
parent efc577f53f
commit 853eb03ee8
655 changed files with 20814 additions and 20965 deletions

View File

@@ -15,85 +15,85 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent))
class TestDenseModuleExports:
"""Test that all expected exports are available."""
def test_dense_class_exports(self):
"""Test Dense class exports from layers module."""
from tinytorch.core.layers import Linear as Dense
assert Dense is not None, "Dense class should be exported"
def test_dense_is_callable(self):
"""Test Dense can be instantiated."""
from tinytorch.core.layers import Linear as Dense
layer = Linear(10, 5)
assert layer is not None, "Should create Dense layer instance"
assert hasattr(layer, 'forward'), "Dense should have forward method"
class TestDenseLayerFunctionality:
"""Test that Dense layer works correctly."""
def test_dense_forward_pass(self):
"""Test Dense layer forward pass produces correct shape."""
from tinytorch.core.layers import Linear as Dense
from tinytorch.core.tensor import Tensor
# Create layer
layer = Linear(10, 5)
# Create input
batch_size = 32
x = Tensor(np.random.randn(batch_size, 10))
# Forward pass
output = layer.forward(x)
# Check output
assert output.shape == (32, 5), f"Expected shape (32, 5), got {output.shape}"
assert isinstance(output, Tensor), "Output should be a Tensor"
def test_dense_with_bias(self):
"""Test Dense layer with bias enabled."""
from tinytorch.core.layers import Linear as Dense
from tinytorch.core.tensor import Tensor
layer = Linear(10, 5, bias=True)
assert hasattr(layer, 'bias'), "Layer should have bias"
assert layer.bias is not None, "Bias should be initialized"
x = Tensor(np.random.randn(1, 10))
output = layer(x)
assert output.shape == (1, 5), "Output shape should be correct with bias"
def test_dense_without_bias(self):
"""Test Dense layer with bias disabled."""
from tinytorch.core.layers import Linear as Dense
from tinytorch.core.tensor import Tensor
layer = Linear(10, 5, bias=False)
assert layer.bias is None, "Bias should be None when disabled"
x = Tensor(np.random.randn(1, 10))
output = layer(x)
assert output.shape == (1, 5), "Output shape should be correct without bias"
def test_dense_callable_interface(self):
"""Test Dense layer can be called directly."""
from tinytorch.core.layers import Linear as Dense
from tinytorch.core.tensor import Tensor
layer = Linear(10, 5)
x = Tensor(np.random.randn(4, 10))
# Test both forward() and __call__()
output1 = layer.forward(x)
output2 = layer(x)
assert output1.shape == output2.shape, "forward() and __call__() should produce same shape"
class TestNetworkComposition:
"""Test building multi-layer networks."""
def test_sequential_exists(self):
"""Test if Sequential is available for network composition."""
try:
@@ -106,31 +106,31 @@ class TestNetworkComposition:
assert Sequential is not None
except ImportError:
pytest.skip("Sequential not yet implemented")
def test_multi_layer_network(self):
"""Test building a multi-layer network."""
from tinytorch.core.layers import Linear as Dense
from tinytorch.core.activations import ReLU, Sigmoid
from tinytorch.core.tensor import Tensor
# Build network manually (without Sequential)
layer1 = Linear(784, 128)
relu = ReLU()
layer2 = Linear(128, 10)
sigmoid = Sigmoid()
# Test forward pass through all layers
x = Tensor(np.random.randn(32, 784))
h1 = layer1(x)
assert h1.shape == (32, 128), "First layer output shape incorrect"
h1_activated = relu(h1)
assert h1_activated.shape == (32, 128), "ReLU should preserve shape"
output = layer2(h1_activated)
assert output.shape == (32, 10), "Second layer output shape incorrect"
final = sigmoid(output)
assert final.shape == (32, 10), "Sigmoid should preserve shape"
assert np.all(final.data >= 0) and np.all(final.data <= 1), "Sigmoid output should be in [0,1]"
@@ -138,28 +138,28 @@ class TestNetworkComposition:
class TestXORCapability:
"""Test that the network can represent XOR problem."""
def test_xor_network_structure(self):
"""Test building XOR network structure."""
from tinytorch.core.layers import Linear as Dense
from tinytorch.core.activations import ReLU, Sigmoid
from tinytorch.core.tensor import Tensor
# XOR network: 2 -> 4 -> 1
hidden = Linear(2, 4)
relu = ReLU()
output = Linear(4, 1)
sigmoid = Sigmoid()
# XOR inputs
X = Tensor([[0, 0], [0, 1], [1, 0], [1, 1]])
# Forward pass
h = hidden(X)
h_activated = relu(h)
out = output(h_activated)
predictions = sigmoid(out)
assert predictions.shape == (4, 1), "XOR network should produce 4 predictions"
assert np.all(predictions.data >= 0) and np.all(predictions.data <= 1), "Predictions should be probabilities"
@@ -178,7 +178,7 @@ if __name__ == "__main__":
capture_output=True,
text=True
)
print(result.stdout)
if result.returncode != 0:
print(result.stderr)
@@ -186,4 +186,4 @@ if __name__ == "__main__":
else:
print("\n✅ All integration tests passed! Module 05 is working correctly.")
print("🎯 Capability Unlocked: Neural Networks That Learn")
print("\nTry the demo: python capabilities/05_neural_networks/demonstrate.py")
print("\nTry the demo: python capabilities/05_neural_networks/demonstrate.py")