diff --git a/buildspec.yml b/buildspec.yml index 24c770b6..7858bee8 100644 --- a/buildspec.yml +++ b/buildspec.yml @@ -2,7 +2,7 @@ version: 0.2 env: variables: - FRAMEWORK_VERSION: '1.5.0' + FRAMEWORK_VERSION: '1.6.0' EIA_FRAMEWORK_VERSION: '1.3.1' CPU_INSTANCE_TYPE: 'ml.c4.xlarge' GPU_INSTANCE_TYPE: 'ml.p2.8xlarge' diff --git a/src/sagemaker_pytorch_serving_container/torchserve.py b/src/sagemaker_pytorch_serving_container/torchserve.py index 58c770d0..95362352 100644 --- a/src/sagemaker_pytorch_serving_container/torchserve.py +++ b/src/sagemaker_pytorch_serving_container/torchserve.py @@ -43,6 +43,7 @@ DEFAULT_TS_MODEL_DIRECTORY = os.path.join(os.getcwd(), ".sagemaker", "ts", "models") DEFAULT_TS_MODEL_NAME = "model" DEFAULT_TS_MODEL_SERIALIZED_FILE = "model.pth" +DEFAULT_TS_CODE_DIR = "code" DEFAULT_HANDLER_SERVICE = "sagemaker_pytorch_serving_container.handler_service" ENABLE_MULTI_MODEL = os.getenv("SAGEMAKER_MULTI_MODEL", "false") == "true" @@ -121,7 +122,7 @@ def _adapt_to_ts_format(handler_service): "--export-path", DEFAULT_TS_MODEL_DIRECTORY, "--extra-files", - os.path.join(environment.model_dir, environment.Environment().module_name + ".py"), + os.path.join(environment.model_dir, DEFAULT_TS_CODE_DIR, environment.Environment().module_name + ".py"), "--version", "1", ] diff --git a/test/container/1.6.0/Dockerfile.dlc.cpu b/test/container/1.6.0/Dockerfile.dlc.cpu index ea35cb94..44667c02 100644 --- a/test/container/1.6.0/Dockerfile.dlc.cpu +++ b/test/container/1.6.0/Dockerfile.dlc.cpu @@ -1,23 +1,6 @@ ARG region -FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.5.0-cpu-py3 - -ARG TS_VERSION=0.1.1 -RUN apt-get update \ - && apt-get install -y --no-install-recommends software-properties-common \ - && add-apt-repository ppa:openjdk-r/ppa \ - && apt-get update \ - && apt-get install -y --no-install-recommends openjdk-11-jdk - -RUN pip install torchserve==$TS_VERSION \ - && pip install torch-model-archiver==$TS_VERSION - -RUN pip uninstall torch \ - && pip uninstall torchvision \ - && pip install torch=1.6.0 \ - && pip install torchvision=0.7.0 +FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.6.0-cpu-py3 COPY dist/sagemaker_pytorch_inference-*.tar.gz /sagemaker_pytorch_inference.tar.gz RUN pip install --upgrade --no-cache-dir /sagemaker_pytorch_inference.tar.gz && \ rm /sagemaker_pytorch_inference.tar.gz - -CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"] diff --git a/test/container/1.6.0/Dockerfile.dlc.gpu b/test/container/1.6.0/Dockerfile.dlc.gpu index ea35cb94..e48fc985 100644 --- a/test/container/1.6.0/Dockerfile.dlc.gpu +++ b/test/container/1.6.0/Dockerfile.dlc.gpu @@ -1,23 +1,6 @@ ARG region -FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.5.0-cpu-py3 - -ARG TS_VERSION=0.1.1 -RUN apt-get update \ - && apt-get install -y --no-install-recommends software-properties-common \ - && add-apt-repository ppa:openjdk-r/ppa \ - && apt-get update \ - && apt-get install -y --no-install-recommends openjdk-11-jdk - -RUN pip install torchserve==$TS_VERSION \ - && pip install torch-model-archiver==$TS_VERSION - -RUN pip uninstall torch \ - && pip uninstall torchvision \ - && pip install torch=1.6.0 \ - && pip install torchvision=0.7.0 +FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.6.0-gpu-py3 COPY dist/sagemaker_pytorch_inference-*.tar.gz /sagemaker_pytorch_inference.tar.gz RUN pip install --upgrade --no-cache-dir /sagemaker_pytorch_inference.tar.gz && \ rm /sagemaker_pytorch_inference.tar.gz - -CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"] diff --git a/test/integration/__init__.py b/test/integration/__init__.py index d3109e80..0f10294d 100644 --- a/test/integration/__init__.py +++ b/test/integration/__init__.py @@ -23,24 +23,28 @@ cpu_sub_dir = 'model_cpu' gpu_sub_dir = 'model_gpu' eia_sub_dir = 'model_eia' +code_sub_dir = 'code' model_cpu_dir = os.path.join(mnist_path, cpu_sub_dir) -mnist_cpu_script = os.path.join(model_cpu_dir, 'mnist.py') +mnist_cpu_script = os.path.join(model_cpu_dir, code_sub_dir, 'mnist.py') model_cpu_tar = file_utils.make_tarfile(mnist_cpu_script, os.path.join(model_cpu_dir, "model.pth"), - model_cpu_dir) + model_cpu_dir, + script_path="code") model_cpu_1d_dir = os.path.join(model_cpu_dir, '1d') -mnist_1d_script = os.path.join(model_cpu_1d_dir, 'mnist_1d.py') +mnist_1d_script = os.path.join(model_cpu_1d_dir, code_sub_dir, 'mnist_1d.py') model_cpu_1d_tar = file_utils.make_tarfile(mnist_1d_script, os.path.join(model_cpu_1d_dir, "model.pth"), - model_cpu_1d_dir) + model_cpu_1d_dir, + script_path="code") model_gpu_dir = os.path.join(mnist_path, gpu_sub_dir) -mnist_gpu_script = os.path.join(model_gpu_dir, 'mnist.py') +mnist_gpu_script = os.path.join(model_gpu_dir, code_sub_dir, 'mnist.py') model_gpu_tar = file_utils.make_tarfile(mnist_gpu_script, os.path.join(model_gpu_dir, "model.pth"), - model_gpu_dir) + model_gpu_dir, + script_path="code") model_eia_dir = os.path.join(mnist_path, eia_sub_dir) mnist_eia_script = os.path.join(model_eia_dir, 'mnist.py') @@ -48,11 +52,12 @@ os.path.join(model_eia_dir, "model.pth"), model_eia_dir) -call_model_fn_once_script = os.path.join(model_cpu_dir, 'call_model_fn_once.py') +call_model_fn_once_script = os.path.join(model_cpu_dir, code_sub_dir, 'call_model_fn_once.py') call_model_fn_once_tar = file_utils.make_tarfile(call_model_fn_once_script, os.path.join(model_cpu_dir, "model.pth"), model_cpu_dir, - "model_call_model_fn_once.tar.gz") + "model_call_model_fn_once.tar.gz", + script_path="code") ROLE = 'dummy/unused-role' DEFAULT_TIMEOUT = 20 diff --git a/test/resources/mnist/model_cpu/1d/mnist_1d.py b/test/resources/mnist/model_cpu/1d/code/mnist_1d.py similarity index 100% rename from test/resources/mnist/model_cpu/1d/mnist_1d.py rename to test/resources/mnist/model_cpu/1d/code/mnist_1d.py diff --git a/test/resources/mnist/model_cpu/call_model_fn_once.py b/test/resources/mnist/model_cpu/code/call_model_fn_once.py similarity index 100% rename from test/resources/mnist/model_cpu/call_model_fn_once.py rename to test/resources/mnist/model_cpu/code/call_model_fn_once.py diff --git a/test/resources/mnist/model_cpu/mnist.py b/test/resources/mnist/model_cpu/code/mnist.py similarity index 100% rename from test/resources/mnist/model_cpu/mnist.py rename to test/resources/mnist/model_cpu/code/mnist.py diff --git a/test/resources/mnist/model_gpu/mnist.py b/test/resources/mnist/model_gpu/code/mnist.py similarity index 100% rename from test/resources/mnist/model_gpu/mnist.py rename to test/resources/mnist/model_gpu/code/mnist.py diff --git a/test/unit/test_model_server.py b/test/unit/test_model_server.py index 552a691d..108cd3c3 100644 --- a/test/unit/test_model_server.py +++ b/test/unit/test_model_server.py @@ -150,7 +150,9 @@ def test_adapt_to_ts_format(path_exists, make_dir, subprocess_check_call, set_py "--export-path", torchserve.DEFAULT_TS_MODEL_DIRECTORY, "--extra-files", - os.path.join(environment.model_dir, environment.Environment().module_name + ".py"), + os.path.join(environment.model_dir, + torchserve.DEFAULT_TS_CODE_DIR, + environment.Environment().module_name + ".py"), "--version", "1", ] diff --git a/test/utils/file_utils.py b/test/utils/file_utils.py index f81a20b5..8cc3771d 100644 --- a/test/utils/file_utils.py +++ b/test/utils/file_utils.py @@ -16,9 +16,12 @@ import tarfile -def make_tarfile(script, model, output_path, filename="model.tar.gz"): +def make_tarfile(script, model, output_path, filename="model.tar.gz", script_path=None): output_filename = os.path.join(output_path, filename) with tarfile.open(output_filename, "w:gz") as tar: - tar.add(script, arcname=os.path.basename(script)) + if(script_path): + tar.add(script, arcname=os.path.join(script_path, os.path.basename(script))) + else: + tar.add(script, arcname=os.path.basename(script)) tar.add(model, arcname=os.path.basename(model)) return output_filename