Skip to content

Commit 006c898

Browse files
committed
now can include test_project.py in tesh.sh; some test project fail just to show ops benchmark
1 parent d199c12 commit 006c898

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

tests.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ python3 -m pytest \
1010
tests/test_ops.py \
1111
tests/test_conv.py \
1212
tests/test_simple_nn.py \
13+
tests/test_project.py \
1314
# tests/test_sequence_models.py \
1415
# tests/test_mlp_resnet.py \
1516
# tests/test_data.py \

tests/test_project.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,6 @@
55
import torch
66
import pytest
77

8-
import sys; sys.path.append('project')
9-
from project.myexp import mymodel, get_torch_dropout_mask, copy_init_weights_to_torch
10-
118
def get_tensor(*shape, entropy=1):
129
np.random.seed(np.prod(shape) * len(shape) * entropy)
1310
return kim.Tensor(np.random.randint(0, 100, size=shape) / 20, dtype="float32")
@@ -33,6 +30,8 @@ def get_int_tensor(*shape, low=0, high=10, entropy=1):
3330
@pytest.mark.parametrize("batch_size", [128, 32, 64])
3431
@pytest.mark.parametrize("dropout", [True])
3532
def test_model(batch_size, dropout, eps=1e-03):
33+
import sys; sys.path.append('project')
34+
from project.myexp import mymodel, get_torch_dropout_mask, copy_init_weights_to_torch
3635
kim.autograd.CompGraph.RECORD_TIMESPENT = True
3736
# from myexp import mymodel, torch, kim
3837
model = mymodel(kim.nn, dropout=dropout)

0 commit comments

Comments
 (0)