Skip to content

Commit ee0b329

Browse files
committed
Fix tests
1 parent 89f1ef6 commit ee0b329

File tree

5 files changed

+19
-17
lines changed

5 files changed

+19
-17
lines changed

buildspec.yml

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -30,23 +30,23 @@ phases:
3030
- PY_COLORS=0
3131

3232
# run linters
33-
# - tox -e flake8,twine
33+
- tox -e flake8,twine
3434

3535
# run unit tests
36-
# - tox -e py38,py39,py310 test/unit
36+
- tox -e py38,py39,py310 test/unit
3737

3838
# define EIA tag
3939
# - DLC_EIA_TAG="$EIA_FRAMEWORK_VERSION-dlc-eia-$BUILD_ID"
4040

4141
# run local CPU integration tests (build and push the image to ECR repo)
42-
#- |
43-
# for FRAMEWORK_VERSION in $FRAMEWORK_VERSIONS;
44-
# do
45-
# DLC_CPU_TAG="$FRAMEWORK_VERSION-dlc-cpu-$BUILD_ID";
46-
# test_cmd="IGNORE_COVERAGE=- tox -e py38 -- test/integration/local -vv -rA -s --build-image --push-image --dockerfile-type dlc.cpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_VERSION --processor cpu --tag $DLC_CPU_TAG";
47-
# execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "setup.cfg";
48-
# docker system prune --all --force;
49-
# done
42+
- |
43+
for FRAMEWORK_VERSION in $FRAMEWORK_VERSIONS;
44+
do
45+
DLC_CPU_TAG="$FRAMEWORK_VERSION-dlc-cpu-$BUILD_ID";
46+
test_cmd="IGNORE_COVERAGE=- tox -e py38 -- test/integration/local -vv -rA -s --build-image --push-image --dockerfile-type dlc.cpu --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_VERSION --processor cpu --tag $DLC_CPU_TAG";
47+
execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "setup.cfg";
48+
docker system prune --all --force;
49+
done
5050
5151
# launch remote GPU instance with Deep Learning AMI GPU PyTorch 1.9 (Ubuntu 20.04)
5252
- prefix='ml.'
@@ -110,4 +110,4 @@ phases:
110110
aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$DLC_GPU_TAG;
111111
done
112112
113-
# - aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$DLC_EIA_TAG
113+
# - aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$DLC_EIA_TAG

test/resources/mnist/model_cpu/1d/code/mnist_1d.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
# ANY KIND, either express or implied. See the License for the specific
1212
# language governing permissions and limitations under the License.
1313
from __future__ import absolute_import
14-
1514
import os
1615
import torch
1716
import torch.nn as nn
@@ -45,4 +44,4 @@ def model_fn(model_dir):
4544
model = torch.nn.DataParallel(Net())
4645
with open(os.path.join(model_dir, 'torch_model.pth'), 'rb') as f:
4746
model.load_state_dict(torch.load(f))
48-
return model
47+
return model

test/resources/mnist/model_cpu/code/call_model_fn_once.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
import os
1616

17+
1718
def model_fn(model_dir):
1819
lock_file = os.path.join(model_dir, 'model_fn.lock.{}'.format(os.getpid()))
1920
if os.path.exists(lock_file):
@@ -33,4 +34,4 @@ def predict_fn(data, model):
3334

3435

3536
def output_fn(prediction, accept):
36-
return prediction
37+
return prediction

test/resources/mnist/model_gpu/code/mnist.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,4 +63,4 @@ def model_fn(model_dir):
6363
# Move the model to the GPU
6464
device = torch.device("cuda")
6565
model = model.to(device)
66-
return model
66+
return model

test/resources/model_gpu_context/code/inference.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,14 @@
1212
# language governing permissions and limitations under the License.
1313
from __future__ import absolute_import
1414

15+
import os
1516
import torch
1617

1718
def model_fn(model_dir, context):
18-
device = torch.device("cuda:" + str(context.system_properties.get("gpu_id")))
19-
file_path = "device_info.txt"
19+
script_dir = os.path.dirname(os.path.abspath(__file__))
20+
file_path = os.path.join(script_dir, "device_info.txt")
2021

22+
device = torch.device("cuda:" + str(context.system_properties.get("gpu_id")))
2123
device_str = str(device)[-1]
2224
with open(file_path, "a") as file:
2325
file.write(device_str + "\n")

0 commit comments

Comments
 (0)