mirror of
https://github.com/MLSysBook/TinyTorch.git
synced 2026-04-28 14:33:18 -05:00
- Remove redundant fields from module.yaml files: exports_to, files, components - Keep only essential system metadata: name, title, description, dependencies - Export command now reads actual export targets from dev files (#| default_exp directive) - Status command updated to use dev files as source of truth for export targets - Export command shows detailed source → target mapping for better clarity - Dependencies field retained as it's useful for CLI module ordering and prerequisites - Eliminates duplication between YAML and dev files - dev files are the real truth
138 lines
4.0 KiB
Python
138 lines
4.0 KiB
Python
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../modules/activations/activations_dev.ipynb.
|
|
|
|
# %% auto 0
|
|
__all__ = ['ReLU', 'Sigmoid', 'Tanh']
|
|
|
|
# %% ../../modules/activations/activations_dev.ipynb 5
|
|
class ReLU:
|
|
"""
|
|
ReLU Activation: f(x) = max(0, x)
|
|
|
|
The most popular activation function in deep learning.
|
|
Simple, effective, and computationally efficient.
|
|
|
|
TODO: Implement ReLU activation function.
|
|
"""
|
|
|
|
def forward(self, x: Tensor) -> Tensor:
|
|
"""
|
|
Apply ReLU: f(x) = max(0, x)
|
|
|
|
Args:
|
|
x: Input tensor
|
|
|
|
Returns:
|
|
Output tensor with ReLU applied element-wise
|
|
|
|
TODO: Implement element-wise max(0, x) operation
|
|
Hint: Use np.maximum(0, x.data)
|
|
"""
|
|
raise NotImplementedError("Student implementation required")
|
|
|
|
def __call__(self, x: Tensor) -> Tensor:
|
|
"""Make activation callable: relu(x) same as relu.forward(x)"""
|
|
return self.forward(x)
|
|
|
|
# %% ../../modules/activations/activations_dev.ipynb 6
|
|
class ReLU:
|
|
"""ReLU Activation: f(x) = max(0, x)"""
|
|
|
|
def forward(self, x: Tensor) -> Tensor:
|
|
"""Apply ReLU: f(x) = max(0, x)"""
|
|
return Tensor(np.maximum(0, x.data))
|
|
|
|
def __call__(self, x: Tensor) -> Tensor:
|
|
return self.forward(x)
|
|
|
|
# %% ../../modules/activations/activations_dev.ipynb 12
|
|
class Sigmoid:
|
|
"""
|
|
Sigmoid Activation: f(x) = 1 / (1 + e^(-x))
|
|
|
|
Squashes input to range (0, 1). Often used for binary classification.
|
|
|
|
TODO: Implement Sigmoid activation function.
|
|
"""
|
|
|
|
def forward(self, x: Tensor) -> Tensor:
|
|
"""
|
|
Apply Sigmoid: f(x) = 1 / (1 + e^(-x))
|
|
|
|
Args:
|
|
x: Input tensor
|
|
|
|
Returns:
|
|
Output tensor with Sigmoid applied element-wise
|
|
|
|
TODO: Implement sigmoid function (be careful with numerical stability!)
|
|
|
|
Hint: For numerical stability, use:
|
|
- For x >= 0: sigmoid(x) = 1 / (1 + exp(-x))
|
|
- For x < 0: sigmoid(x) = exp(x) / (1 + exp(x))
|
|
"""
|
|
raise NotImplementedError("Student implementation required")
|
|
|
|
def __call__(self, x: Tensor) -> Tensor:
|
|
return self.forward(x)
|
|
|
|
# %% ../../modules/activations/activations_dev.ipynb 13
|
|
class Sigmoid:
|
|
"""Sigmoid Activation: f(x) = 1 / (1 + e^(-x))"""
|
|
|
|
def forward(self, x: Tensor) -> Tensor:
|
|
"""Apply Sigmoid with numerical stability"""
|
|
# Use the numerically stable version to avoid overflow
|
|
# For x >= 0: sigmoid(x) = 1 / (1 + exp(-x))
|
|
# For x < 0: sigmoid(x) = exp(x) / (1 + exp(x))
|
|
x_data = x.data
|
|
result = np.zeros_like(x_data)
|
|
|
|
# Stable computation
|
|
positive_mask = x_data >= 0
|
|
result[positive_mask] = 1.0 / (1.0 + np.exp(-x_data[positive_mask]))
|
|
result[~positive_mask] = np.exp(x_data[~positive_mask]) / (1.0 + np.exp(x_data[~positive_mask]))
|
|
|
|
return Tensor(result)
|
|
|
|
def __call__(self, x: Tensor) -> Tensor:
|
|
return self.forward(x)
|
|
|
|
# %% ../../modules/activations/activations_dev.ipynb 19
|
|
class Tanh:
|
|
"""
|
|
Tanh Activation: f(x) = tanh(x)
|
|
|
|
Squashes input to range (-1, 1). Zero-centered output.
|
|
|
|
TODO: Implement Tanh activation function.
|
|
"""
|
|
|
|
def forward(self, x: Tensor) -> Tensor:
|
|
"""
|
|
Apply Tanh: f(x) = tanh(x)
|
|
|
|
Args:
|
|
x: Input tensor
|
|
|
|
Returns:
|
|
Output tensor with Tanh applied element-wise
|
|
|
|
TODO: Implement tanh function
|
|
Hint: Use np.tanh(x.data)
|
|
"""
|
|
raise NotImplementedError("Student implementation required")
|
|
|
|
def __call__(self, x: Tensor) -> Tensor:
|
|
return self.forward(x)
|
|
|
|
# %% ../../modules/activations/activations_dev.ipynb 20
|
|
class Tanh:
|
|
"""Tanh Activation: f(x) = tanh(x)"""
|
|
|
|
def forward(self, x: Tensor) -> Tensor:
|
|
"""Apply Tanh"""
|
|
return Tensor(np.tanh(x.data))
|
|
|
|
def __call__(self, x: Tensor) -> Tensor:
|
|
return self.forward(x)
|