diff --git a/modules/source/06_optimizers/optimizers_dev.py b/modules/source/06_optimizers/optimizers_dev.py index 2a08fb79..0c778e98 100644 --- a/modules/source/06_optimizers/optimizers_dev.py +++ b/modules/source/06_optimizers/optimizers_dev.py @@ -63,10 +63,7 @@ import numpy as np from typing import List, Union, Optional, Dict, Any # Import Tensor from Module 01 (now with gradient support from Module 05) -import sys -import os -sys.path.append(os.path.join(os.path.dirname(__file__), '..', '01_tensor')) -from tensor_dev import Tensor +from tinytorch.core.tensor import Tensor # %% [markdown] """ diff --git a/modules/source/07_training/training_dev.py b/modules/source/07_training/training_dev.py index 10caeb52..77642fa4 100644 --- a/modules/source/07_training/training_dev.py +++ b/modules/source/07_training/training_dev.py @@ -69,17 +69,10 @@ import sys import os # Import dependencies from other modules -sys.path.append(os.path.join(os.path.dirname(__file__), '..', '01_tensor')) -from tensor_dev import Tensor - -sys.path.append(os.path.join(os.path.dirname(__file__), '..', '03_layers')) -from layers_dev import Linear - -sys.path.append(os.path.join(os.path.dirname(__file__), '..', '04_losses')) -from losses_dev import MSELoss, CrossEntropyLoss - -sys.path.append(os.path.join(os.path.dirname(__file__), '..', '06_optimizers')) -from optimizers_dev import SGD, AdamW +from tinytorch.core.tensor import Tensor +from tinytorch.core.layers import Linear +from tinytorch.core.losses import MSELoss, CrossEntropyLoss +from tinytorch.core.optimizers import SGD, AdamW # %% [markdown] """