Skip to content

Commit 1aa7032

Browse files
committed
Add new unit and integration tests
1 parent 1fbd3f7 commit 1aa7032

25 files changed

+952
-74
lines changed

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,11 @@
11
# Changelog
22

3+
## v2.0.18 (2023-10-10)
4+
5+
### Bug Fixes and Other Changes
6+
7+
* Fix integration tests and update Python versions
8+
39
## v2.0.17 (2023-08-07)
410

511
### Bug Fixes and Other Changes

VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
2.0.18.dev0
1+
2.0.19.dev0

buildspec.yml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,10 +55,10 @@ phases:
5555
# build DLC GPU image because the base DLC image is too big and takes too long to build as part of the test
5656
- python3 setup.py sdist
5757
- $(aws ecr get-login --registry-ids $DLC_ACCOUNT --no-include-email --region $AWS_DEFAULT_REGION)
58+
- create-key-pair
5859
- |
5960
for FRAMEWORK_VERSION in $FRAMEWORK_VERSIONS;
6061
do
61-
create-key-pair;
6262
launch-ec2-instance --instance-type $instance_type --ami-name ami-03e3ef8c92fdb39ad;
6363
DLC_GPU_TAG="$FRAMEWORK_VERSION-dlc-gpu-$BUILD_ID";
6464
build_dir="test/container/$FRAMEWORK_VERSION";
@@ -71,8 +71,10 @@ phases:
7171
execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "setup.cfg";
7272
docker system prune --all --force;
7373
cleanup-gpu-instances;
74-
cleanup-key-pairs;
74+
rm ~/.instance_id;
75+
rm ~/.ip_address;
7576
done
77+
- cleanup-key-pairs;
7678

7779
# run CPU sagemaker integration tests
7880
- |

test/integration/__init__.py

Lines changed: 116 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -19,71 +19,136 @@
1919
resources_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'resources'))
2020
mnist_path = os.path.join(resources_path, 'mnist')
2121
resnet18_path = os.path.join(resources_path, 'resnet18')
22+
mme_path = os.path.join(resources_path, 'mme')
23+
model_gpu_context_dir = os.path.join(resources_path, 'model_gpu_context')
2224
data_dir = os.path.join(mnist_path, 'data')
2325
training_dir = os.path.join(data_dir, 'training')
2426
cpu_sub_dir = 'model_cpu'
2527
gpu_sub_dir = 'model_gpu'
2628
eia_sub_dir = 'model_eia'
29+
inductor_sub_dir = 'model_inductor'
2730
code_sub_dir = 'code'
2831
default_sub_dir = 'default_model'
2932
default_sub_eia_dir = 'default_model_eia'
3033
default_sub_traced_resnet_dir = 'default_traced_resnet'
34+
resnet18_sub_dir = 'resnet18'
35+
traced_resnet18_sub_dir = 'traced_resnet18'
3136

32-
model_cpu_dir = os.path.join(mnist_path, cpu_sub_dir)
33-
mnist_cpu_script = os.path.join(model_cpu_dir, code_sub_dir, 'mnist.py')
34-
model_cpu_tar = file_utils.make_tarfile(mnist_cpu_script,
35-
os.path.join(model_cpu_dir, "torch_model.pth"),
36-
model_cpu_dir,
37-
script_path="code")
38-
39-
model_cpu_1d_dir = os.path.join(model_cpu_dir, '1d')
40-
mnist_1d_script = os.path.join(model_cpu_1d_dir, code_sub_dir, 'mnist_1d.py')
41-
model_cpu_1d_tar = file_utils.make_tarfile(mnist_1d_script,
42-
os.path.join(model_cpu_1d_dir, "torch_model.pth"),
43-
model_cpu_1d_dir,
44-
script_path="code")
45-
46-
model_gpu_dir = os.path.join(mnist_path, gpu_sub_dir)
47-
mnist_gpu_script = os.path.join(model_gpu_dir, code_sub_dir, 'mnist.py')
48-
model_gpu_tar = file_utils.make_tarfile(mnist_gpu_script,
49-
os.path.join(model_gpu_dir, "torch_model.pth"),
50-
model_gpu_dir,
51-
script_path="code")
52-
53-
model_eia_dir = os.path.join(mnist_path, eia_sub_dir)
54-
mnist_eia_script = os.path.join(model_eia_dir, 'mnist.py')
55-
model_eia_tar = file_utils.make_tarfile(mnist_eia_script,
56-
os.path.join(model_eia_dir, "torch_model.pth"),
57-
model_eia_dir)
37+
mnist_cpu_dir = os.path.join(mnist_path, cpu_sub_dir)
38+
mnist_cpu_1d_dir = os.path.join(mnist_cpu_dir, '1d')
39+
mnist_gpu_dir = os.path.join(mnist_path, gpu_sub_dir)
40+
mnist_eia_dir = os.path.join(mnist_path, eia_sub_dir)
41+
mnist_inductor_dir = os.path.join(mnist_path, inductor_sub_dir)
42+
call_model_fn_once_dir = os.path.join(mnist_path, cpu_sub_dir)
43+
default_model_dir = os.path.join(resnet18_path, default_sub_dir)
44+
default_model_traced_resnet_dir = os.path.join(resnet18_path, default_sub_traced_resnet_dir)
45+
default_model_eia_dir = os.path.join(mnist_path, default_sub_eia_dir)
46+
resnet18_dir = os.path.join(mme_path, resnet18_sub_dir)
47+
traced_resnet18_dir = os.path.join(mme_path, traced_resnet18_sub_dir)
5848

59-
call_model_fn_once_script = os.path.join(model_cpu_dir, code_sub_dir, 'call_model_fn_once.py')
60-
call_model_fn_once_tar = file_utils.make_tarfile(call_model_fn_once_script,
61-
os.path.join(model_cpu_dir, "torch_model.pth"),
62-
model_cpu_dir,
63-
"model_call_model_fn_once.tar.gz",
64-
script_path="code")
49+
all_models_info = {
50+
'mnist_cpu':
51+
{
52+
'script_name': 'mnist.py',
53+
'model': 'torch_model.pth',
54+
'code_path': code_sub_dir,
55+
'requirements': 'requirements.txt'
56+
},
57+
'mnist_cpu_1d':
58+
{
59+
'script_name': 'mnist_1d.py',
60+
'model': 'torch_model.pth',
61+
'code_path': code_sub_dir
62+
},
63+
'mnist_gpu':
64+
{
65+
'script_name': 'mnist.py',
66+
'model': 'torch_model.pth',
67+
'code_path': code_sub_dir
68+
},
69+
'mnist_eia':
70+
{
71+
'script_name': 'mnist.py',
72+
'model': 'torch_model.pth'
73+
},
74+
'mnist_inductor':
75+
{
76+
'script_name': 'mnist.py',
77+
'model': 'torch_model.pth',
78+
'code_path': code_sub_dir
79+
},
80+
'call_model_fn_once':
81+
{
82+
'script_name': 'call_model_fn_once.py',
83+
'model': 'torch_model.pth',
84+
'filename': 'model_call_model_fn_once.tar.gz',
85+
'code_path': code_sub_dir
86+
},
87+
'default_model':
88+
{
89+
'script_name': 'resnet18.py',
90+
'model': 'model.pt',
91+
'code_path': code_sub_dir
92+
},
93+
'default_model_traced_resnet':
94+
{
95+
'script_name': 'resnet18.py',
96+
'model': 'traced_resnet18.pt',
97+
'filename': 'traced_resnet18.tar.gz',
98+
'code_path': code_sub_dir
99+
},
100+
'default_model_eia':
101+
{
102+
'script_name': 'mnist.py',
103+
'model': 'model.pt',
104+
'code_path': code_sub_dir
105+
},
106+
'resnet18':
107+
{
108+
'script_name': 'inference.py',
109+
'model': 'model.pt',
110+
'filename': 'resnet18.tar.gz',
111+
'code_path': code_sub_dir
112+
},
113+
'traced_resnet18':
114+
{
115+
'script_name': 'inference.py',
116+
'model': 'traced_resnet18.pt',
117+
'filename': 'traced_resnet18.tar.gz',
118+
'code_path': code_sub_dir
119+
}
120+
}
65121

66-
default_model_dir = os.path.join(resnet18_path, default_sub_dir)
67-
default_model_script = os.path.join(default_model_dir, code_sub_dir, "resnet18.py")
68-
default_model_tar = file_utils.make_tarfile(
69-
default_model_script, os.path.join(default_model_dir, "model.pt"), default_model_dir, script_path="code"
70-
)
122+
for model_name in all_models_info.keys():
123+
model_dir = getattr(__import__('integration'), model_name + '_dir')
124+
model_info = all_models_info[model_name]
125+
script_name = model_info['script_name']
126+
model = model_info['model']
127+
if 'filename' in model_info:
128+
filename = model_info['filename']
129+
else:
130+
filename = 'model.tar.gz'
131+
if 'code_path' in model_info:
132+
code_path = model_info['code_path']
133+
script_path = 'code'
134+
else:
135+
code_path = ''
136+
script_path = None
137+
if 'requirements' in model_info:
138+
requirements = os.path.join(model_dir, code_path, 'requirements.txt')
139+
else:
140+
requirements = None
71141

72-
default_traced_resnet_dir = os.path.join(resnet18_path, default_sub_traced_resnet_dir)
73-
default_traced_resnet_script = os.path.join(default_traced_resnet_dir, code_sub_dir, "resnet18.py")
74-
default_model_traced_resnet18_tar = file_utils.make_tarfile(
75-
default_traced_resnet_script,
76-
os.path.join(default_traced_resnet_dir, "traced_resnet18.pt"),
77-
default_traced_resnet_dir,
78-
filename="traced_resnet18.tar.gz",
79-
script_path="code",
80-
)
142+
model_script = os.path.join(model_dir, code_path, script_name)
143+
model_tar = file_utils.make_tarfile(model_script,
144+
os.path.join(model_dir, model),
145+
model_dir,
146+
filename=filename,
147+
script_path=script_path,
148+
requirements=requirements)
81149

82-
default_model_eia_dir = os.path.join(mnist_path, default_sub_eia_dir)
83-
default_model_eia_script = os.path.join(default_model_eia_dir, code_sub_dir, "mnist.py")
84-
default_model_eia_tar = file_utils.make_tarfile(
85-
default_model_eia_script, os.path.join(default_model_eia_dir, "model.pt"), default_model_eia_dir
86-
)
150+
setattr(__import__('integration'), model_name + '_script', model_script)
151+
setattr(__import__('integration'), model_name + '_tar', model_tar)
87152

88153
ROLE = 'dummy/unused-role'
89154
DEFAULT_TIMEOUT = 20

test/integration/local/test_serving.py renamed to test/integration/local/test_mnist_serving.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@
2525
from sagemaker_inference import content_types
2626
from torchvision import datasets, transforms
2727

28-
from integration import training_dir, mnist_1d_script, model_cpu_tar, mnist_cpu_script, \
29-
model_gpu_tar, mnist_gpu_script, model_cpu_1d_tar, call_model_fn_once_script, ROLE, \
28+
from integration import training_dir, mnist_cpu_1d_script, mnist_cpu_tar, mnist_cpu_script, \
29+
mnist_gpu_tar, mnist_gpu_script, mnist_cpu_1d_tar, call_model_fn_once_script, ROLE, \
3030
call_model_fn_once_tar
3131
from utils import local_mode_utils
3232

@@ -50,7 +50,7 @@ def fixture_test_loader():
5050

5151

5252
def test_serve_json(test_loader, use_gpu, image_uri, sagemaker_local_session, instance_type):
53-
model_tar = model_gpu_tar if use_gpu else model_cpu_tar
53+
model_tar = mnist_gpu_tar if use_gpu else mnist_cpu_tar
5454
mnist_script = mnist_gpu_script if use_gpu else mnist_cpu_script
5555
with _predictor(model_tar, mnist_script, image_uri, sagemaker_local_session,
5656
instance_type) as predictor:
@@ -60,7 +60,7 @@ def test_serve_json(test_loader, use_gpu, image_uri, sagemaker_local_session, in
6060

6161

6262
def test_serve_npy(test_loader, use_gpu, image_uri, sagemaker_local_session, instance_type):
63-
model_tar = model_gpu_tar if use_gpu else model_cpu_tar
63+
model_tar = mnist_gpu_tar if use_gpu else mnist_cpu_tar
6464
mnist_script = mnist_gpu_script if use_gpu else mnist_cpu_script
6565
with _predictor(model_tar, mnist_script, image_uri, sagemaker_local_session,
6666
instance_type) as predictor:
@@ -70,7 +70,7 @@ def test_serve_npy(test_loader, use_gpu, image_uri, sagemaker_local_session, ins
7070

7171

7272
def test_serve_csv(test_loader, use_gpu, image_uri, sagemaker_local_session, instance_type):
73-
with _predictor(model_cpu_1d_tar, mnist_1d_script, image_uri, sagemaker_local_session,
73+
with _predictor(mnist_cpu_1d_tar, mnist_cpu_1d_script, image_uri, sagemaker_local_session,
7474
instance_type) as predictor:
7575
for accept in (content_types.JSON, content_types.CSV, content_types.NPY):
7676
_assert_prediction_csv(predictor, test_loader, accept)
@@ -79,7 +79,7 @@ def test_serve_csv(test_loader, use_gpu, image_uri, sagemaker_local_session, ins
7979
def test_serve_cpu_model_on_gpu(test_loader, image_uri, sagemaker_local_session, instance_type):
8080
if 'cpu' in image_uri:
8181
pytest.skip("Skipping because running on CPU instance")
82-
with _predictor(model_cpu_1d_tar, mnist_1d_script, image_uri, sagemaker_local_session,
82+
with _predictor(mnist_cpu_1d_tar, mnist_cpu_1d_script, image_uri, sagemaker_local_session,
8383
instance_type) as predictor:
8484
_assert_prediction_npy_json(predictor, test_loader, content_types.NPY, content_types.JSON)
8585

0 commit comments

Comments
 (0)