Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -131,3 +131,6 @@ dmypy.json
data/
pyodideModule2.md
module2-modernization.md
task2_1_comments.md
task2_2_comments.md
task2_3_comments.md
24 changes: 19 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,25 @@ python project/run_tensor.py

## Tasks

- **Task 2.1**: Implement tensor data structures with indexing and strides
- **Task 2.2**: Implement tensor broadcasting for operations between different shapes
- **Task 2.3**: Implement tensor operations (map, zip, reduce) and mathematical functions
- **Task 2.4**: Extend autodifferentiation to work with tensors and broadcasting
- **Task 2.5**: Create tensor-based neural network training
### Task 2.1: Tensor Data - Indexing
**File to Edit**: `minitorch/tensor_data.py`

### Task 2.2: Tensor Broadcasting
**File to Edit**: `minitorch/tensor_data.py`

### Task 2.3: Tensor Operations
**Files to Edit**: `minitorch/tensor_ops.py`, `minitorch/tensor_functions.py`, `minitorch/tensor.py`

### Task 2.4: Extend autodifferentiation to work with tensors and broadcasting
**Files to Edit**: `minitorch/tensor_functions.py`

### Task 2.5: Tensor-Based Neural Network Training
**File to Edit**: `project/run_tensor.py`

**Requirements**:
- Train on all datasets and record results in README
- Record time per epoch for performance comparison
- Should match functionality of `project/run_scalar.py` but use tensor operations

## Testing

Expand Down
5 changes: 5 additions & 0 deletions tests/tensor_strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,11 @@ def indices(draw: DrawFn, layout: Tensor) -> UserIndex:
return tuple((draw(integers(min_value=0, max_value=s - 1)) for s in layout.shape))


@composite
def tensor_data_indices(draw: DrawFn, layout: TensorData) -> UserIndex:
return tuple((draw(integers(min_value=0, max_value=s - 1)) for s in layout.shape))


@composite
def tensors(
draw: DrawFn,
Expand Down
20 changes: 19 additions & 1 deletion tests/test_tensor.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from typing import Callable, Iterable, List, Tuple

import pytest
from hypothesis import given
from hypothesis import assume, given
from hypothesis.strategies import DataObject, data, lists, permutations

from minitorch import MathTestVariable, Tensor, grad_check, tensor
Expand Down Expand Up @@ -106,6 +106,15 @@ def test_two_grad(
) -> None:
name, _, tensor_fn = fn
t1, t2 = ts

# Avoid discontinuities for comparison functions
if name == "gt2" or name == "lt2":
assume(abs((t1.to_numpy().min() + 1.2) - t2.to_numpy().max()) > 1e-3)
assume(abs((t1.to_numpy().max() + 1.2) - t2.to_numpy().min()) > 1e-3)
elif name == "eq2":
assume(abs(t1.to_numpy().min() - (t2.to_numpy().max() + 5.5)) > 1e-3)
assume(abs(t1.to_numpy().max() - (t2.to_numpy().min() + 5.5)) > 1e-3)

grad_check(tensor_fn, t1, t2)


Expand All @@ -119,6 +128,15 @@ def test_two_grad_broadcast(
"Test the grad of a two argument function"
name, base_fn, tensor_fn = fn
t1, t2 = ts

# Avoid discontinuities for comparison functions
if name == "gt2" or name == "lt2":
assume(abs((t1.to_numpy().min() + 1.2) - t2.to_numpy().max()) > 1e-3)
assume(abs((t1.to_numpy().max() + 1.2) - t2.to_numpy().min()) > 1e-3)
elif name == "eq2":
assume(abs(t1.to_numpy().min() - (t2.to_numpy().max() + 5.5)) > 1e-3)
assume(abs(t1.to_numpy().max() - (t2.to_numpy().min() + 5.5)) > 1e-3)

grad_check(tensor_fn, t1, t2)

# broadcast check
Expand Down
28 changes: 26 additions & 2 deletions tests/test_tensor_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import minitorch
from minitorch import TensorData

from .tensor_strategies import indices, tensor_data
from .tensor_strategies import tensor_data, tensor_data_indices

# ## Tasks 2.1

Expand Down Expand Up @@ -81,7 +81,7 @@ def test_index(tensor_data: TensorData) -> None:
@given(data())
def test_permute(data: DataObject) -> None:
td = data.draw(tensor_data())
ind = data.draw(indices(td))
ind = data.draw(tensor_data_indices(td))
td_rev = td.permute(*list(reversed(range(td.dims))))
assert td.index(ind) == td_rev.index(tuple(reversed(ind)))

Expand Down Expand Up @@ -120,6 +120,30 @@ def test_shape_broadcast() -> None:
assert c == (2, 5)


@pytest.mark.task2_2
def test_broadcast_index() -> None:
big_index = minitorch.array([1, 2, 3])
big_shape = minitorch.array([2, 3, 4])
shape = minitorch.array([1, 3, 4])
out_index = minitorch.array([0, 0, 0])

minitorch.broadcast_index(big_index, big_shape, shape, out_index)
assert out_index[0] == 0 # dimension 1 maps to 0
assert out_index[1] == 2 # dimension 3 matches
assert out_index[2] == 3 # dimension 4 matches

# Test with more dimensions of size 1
big_index = minitorch.array([1, 0, 2])
big_shape = minitorch.array([2, 1, 3])
shape = minitorch.array([1, 1, 3])
out_index = minitorch.array([0, 0, 0])

minitorch.broadcast_index(big_index, big_shape, shape, out_index)
assert out_index[0] == 0 # dimension 1 maps to 0
assert out_index[1] == 0 # dimension 1 maps to 0
assert out_index[2] == 2 # dimension 3 matches


@given(tensor_data())
def test_string(tensor_data: TensorData) -> None:
tensor_data.to_string()
Loading