Skip to content

infra: add PyTorch + custom model bucket batch transform integ test #1407

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Apr 14, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 18 additions & 12 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import tests.integ
from botocore.config import Config

from sagemaker import Session
from sagemaker import Session, utils
from sagemaker.chainer import Chainer
from sagemaker.local import LocalSession
from sagemaker.mxnet import MXNet
Expand All @@ -30,6 +30,7 @@
from sagemaker.tensorflow.estimator import TensorFlow

DEFAULT_REGION = "us-west-2"
CUSTOM_BUCKET_NAME_PREFIX = "sagemaker-custom-bucket"

NO_M4_REGIONS = [
"eu-west-3",
Expand Down Expand Up @@ -89,16 +90,16 @@ def sagemaker_runtime_config(request):


@pytest.fixture(scope="session")
def boto_config(request):
def boto_session(request):
config = request.config.getoption("--boto-config")
return json.loads(config) if config else None
if config:
return boto3.Session(**json.loads(config))
else:
return boto3.Session(region_name=DEFAULT_REGION)


@pytest.fixture(scope="session")
def sagemaker_session(sagemaker_client_config, sagemaker_runtime_config, boto_config):
boto_session = (
boto3.Session(**boto_config) if boto_config else boto3.Session(region_name=DEFAULT_REGION)
)
def sagemaker_session(sagemaker_client_config, sagemaker_runtime_config, boto_session):
sagemaker_client_config.setdefault("config", Config(retries=dict(max_attempts=10)))
sagemaker_client = (
boto_session.client("sagemaker", **sagemaker_client_config)
Expand All @@ -119,14 +120,19 @@ def sagemaker_session(sagemaker_client_config, sagemaker_runtime_config, boto_co


@pytest.fixture(scope="session")
def sagemaker_local_session(boto_config):
if boto_config:
boto_session = boto3.Session(**boto_config)
else:
boto_session = boto3.Session(region_name=DEFAULT_REGION)
def sagemaker_local_session(boto_session):
return LocalSession(boto_session=boto_session)


@pytest.fixture(scope="module")
def custom_bucket_name(boto_session):
region = boto_session.region_name
account = boto_session.client(
"sts", region_name=region, endpoint_url=utils.sts_regional_endpoint(region)
).get_caller_identity()["Account"]
return "{}-{}-{}".format(CUSTOM_BUCKET_NAME_PREFIX, region, account)


@pytest.fixture(scope="module", params=["4.0", "4.0.0", "4.1", "4.1.0", "5.0", "5.0.0"])
def chainer_version(request):
return request.param
Expand Down
Binary file added tests/data/pytorch_mnist/model.tar.gz
Binary file not shown.
Binary file added tests/data/pytorch_mnist/transform/data.npy
Binary file not shown.
30 changes: 11 additions & 19 deletions tests/integ/test_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

import os

import boto3
import pytest
from botocore.config import Config
from sagemaker import Session
Expand All @@ -28,22 +27,16 @@
ProcessingJob,
)
from sagemaker.sklearn.processing import SKLearnProcessor
from sagemaker.utils import sts_regional_endpoint
from tests.integ import DATA_DIR
from tests.integ.kms_utils import get_or_create_kms_key

ROLE = "SageMakerRole"
DEFAULT_REGION = "us-west-2"
CUSTOM_BUCKET_PATH_PREFIX = "sagemaker-custom-bucket"


@pytest.fixture(scope="module")
def sagemaker_session_with_custom_bucket(
boto_config, sagemaker_client_config, sagemaker_runtime_config
boto_session, sagemaker_client_config, sagemaker_runtime_config, custom_bucket_name
):
boto_session = (
boto3.Session(**boto_config) if boto_config else boto3.Session(region_name=DEFAULT_REGION)
)
sagemaker_client_config.setdefault("config", Config(retries=dict(max_attempts=10)))
sagemaker_client = (
boto_session.client("sagemaker", **sagemaker_client_config)
Expand All @@ -56,17 +49,11 @@ def sagemaker_session_with_custom_bucket(
else None
)

region = boto_session.region_name
account = boto_session.client(
"sts", region_name=region, endpoint_url=sts_regional_endpoint(region)
).get_caller_identity()["Account"]
custom_default_bucket = "{}-{}-{}".format(CUSTOM_BUCKET_PATH_PREFIX, region, account)

return Session(
boto_session=boto_session,
sagemaker_client=sagemaker_client,
sagemaker_runtime_client=runtime_client,
default_bucket=custom_default_bucket,
default_bucket=custom_bucket_name,
)


Expand Down Expand Up @@ -221,6 +208,7 @@ def test_sklearn_with_customizations(

def test_sklearn_with_custom_default_bucket(
sagemaker_session_with_custom_bucket,
custom_bucket_name,
image_uri,
sklearn_full_version,
cpu_instance_type,
Expand Down Expand Up @@ -272,10 +260,10 @@ def test_sklearn_with_custom_default_bucket(
job_description = sklearn_processor.latest_job.describe()

assert job_description["ProcessingInputs"][0]["InputName"] == "dummy_input"
assert CUSTOM_BUCKET_PATH_PREFIX in job_description["ProcessingInputs"][0]["S3Input"]["S3Uri"]
assert custom_bucket_name in job_description["ProcessingInputs"][0]["S3Input"]["S3Uri"]

assert job_description["ProcessingInputs"][1]["InputName"] == "code"
assert CUSTOM_BUCKET_PATH_PREFIX in job_description["ProcessingInputs"][1]["S3Input"]["S3Uri"]
assert custom_bucket_name in job_description["ProcessingInputs"][1]["S3Input"]["S3Uri"]

assert job_description["ProcessingJobName"].startswith("test-sklearn-with-customizations")

Expand Down Expand Up @@ -583,7 +571,11 @@ def test_processor(sagemaker_session, image_uri, cpu_instance_type, output_kms_k


def test_processor_with_custom_bucket(
sagemaker_session_with_custom_bucket, image_uri, cpu_instance_type, output_kms_key
sagemaker_session_with_custom_bucket,
custom_bucket_name,
image_uri,
cpu_instance_type,
output_kms_key,
):
script_path = os.path.join(DATA_DIR, "dummy_script.py")

Expand Down Expand Up @@ -624,7 +616,7 @@ def test_processor_with_custom_bucket(
job_description = processor.latest_job.describe()

assert job_description["ProcessingInputs"][0]["InputName"] == "code"
assert CUSTOM_BUCKET_PATH_PREFIX in job_description["ProcessingInputs"][0]["S3Input"]["S3Uri"]
assert custom_bucket_name in job_description["ProcessingInputs"][0]["S3Input"]["S3Uri"]

assert job_description["ProcessingJobName"].startswith("test-processor")

Expand Down
8 changes: 2 additions & 6 deletions tests/integ/test_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,12 @@

from sagemaker import Session

DEFAULT_REGION = "us-west-2"
CUSTOM_BUCKET_NAME = "this-bucket-should-not-exist"


def test_sagemaker_session_does_not_create_bucket_on_init(
sagemaker_client_config, sagemaker_runtime_config, boto_config
sagemaker_client_config, sagemaker_runtime_config, boto_session
):
boto_session = (
boto3.Session(**boto_config) if boto_config else boto3.Session(region_name=DEFAULT_REGION)
)
sagemaker_client_config.setdefault("config", Config(retries=dict(max_attempts=10)))
sagemaker_client = (
boto_session.client("sagemaker", **sagemaker_client_config)
Expand All @@ -46,5 +42,5 @@ def test_sagemaker_session_does_not_create_bucket_on_init(
default_bucket=CUSTOM_BUCKET_NAME,
)

s3 = boto3.resource("s3", region_name=DEFAULT_REGION)
s3 = boto3.resource("s3", region_name=boto_session.region_name)
assert s3.Bucket(CUSTOM_BUCKET_NAME).creation_date is None
62 changes: 31 additions & 31 deletions tests/integ/test_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,15 @@

import pytest

from sagemaker import KMeans
from sagemaker import KMeans, s3
from sagemaker.mxnet import MXNet
from sagemaker.pytorch import PyTorchModel
from sagemaker.transformer import Transformer
from sagemaker.estimator import Estimator
from sagemaker.utils import unique_name_from_base
from tests.integ import (
DATA_DIR,
PYTHON_VERSION,
TRAINING_DEFAULT_TIMEOUT_MINUTES,
TRANSFORM_DEFAULT_TIMEOUT_MINUTES,
)
Expand Down Expand Up @@ -144,48 +146,43 @@ def test_attach_transform_kmeans(sagemaker_session, cpu_instance_type):
attached_transformer.wait()


def test_transform_mxnet_vpc(sagemaker_session, mxnet_full_version, cpu_instance_type):
data_path = os.path.join(DATA_DIR, "mxnet_mnist")
script_path = os.path.join(data_path, "mnist.py")
def test_transform_pytorch_vpc_custom_model_bucket(
sagemaker_session, pytorch_full_version, cpu_instance_type, custom_bucket_name
):
data_dir = os.path.join(DATA_DIR, "pytorch_mnist")

ec2_client = sagemaker_session.boto_session.client("ec2")
subnet_ids, security_group_id = get_or_create_vpc_resources(ec2_client)

mx = MXNet(
entry_point=script_path,
role="SageMakerRole",
train_instance_count=1,
train_instance_type=cpu_instance_type,
sagemaker_session=sagemaker_session,
framework_version=mxnet_full_version,
subnets=subnet_ids,
security_group_ids=[security_group_id],
model_data = sagemaker_session.upload_data(
path=os.path.join(data_dir, "model.tar.gz"),
bucket=custom_bucket_name,
key_prefix="integ-test-data/pytorch_mnist/model",
)

train_input = mx.sagemaker_session.upload_data(
path=os.path.join(data_path, "train"), key_prefix="integ-test-data/mxnet_mnist/train"
)
test_input = mx.sagemaker_session.upload_data(
path=os.path.join(data_path, "test"), key_prefix="integ-test-data/mxnet_mnist/test"
model = PyTorchModel(
model_data=model_data,
entry_point=os.path.join(data_dir, "mnist.py"),
role="SageMakerRole",
framework_version=pytorch_full_version,
py_version=PYTHON_VERSION,
sagemaker_session=sagemaker_session,
vpc_config={"Subnets": subnet_ids, "SecurityGroupIds": [security_group_id]},
code_location="s3://{}".format(custom_bucket_name),
)
job_name = unique_name_from_base("test-mxnet-vpc")

with timeout(minutes=TRAINING_DEFAULT_TIMEOUT_MINUTES):
mx.fit({"train": train_input, "test": test_input}, job_name=job_name)

job_desc = sagemaker_session.sagemaker_client.describe_training_job(
TrainingJobName=mx.latest_training_job.name
transform_input = sagemaker_session.upload_data(
path=os.path.join(data_dir, "transform", "data.npy"),
key_prefix="integ-test-data/pytorch_mnist/transform",
)
assert set(subnet_ids) == set(job_desc["VpcConfig"]["Subnets"])
assert [security_group_id] == job_desc["VpcConfig"]["SecurityGroupIds"]

transform_input_path = os.path.join(data_path, "transform", "data.csv")
transform_input_key_prefix = "integ-test-data/mxnet_mnist/transform"
transform_input = mx.sagemaker_session.upload_data(
path=transform_input_path, key_prefix=transform_input_key_prefix
transformer = model.transformer(1, cpu_instance_type)
transformer.transform(
transform_input,
content_type="application/x-npy",
job_name=unique_name_from_base("test-transform-vpc"),
)

transformer = _create_transformer_and_transform_job(mx, transform_input, cpu_instance_type)
with timeout_and_delete_model_with_transformer(
transformer, sagemaker_session, minutes=TRANSFORM_DEFAULT_TIMEOUT_MINUTES
):
Expand All @@ -196,6 +193,9 @@ def test_transform_mxnet_vpc(sagemaker_session, mxnet_full_version, cpu_instance
assert set(subnet_ids) == set(model_desc["VpcConfig"]["Subnets"])
assert [security_group_id] == model_desc["VpcConfig"]["SecurityGroupIds"]

model_bucket, _ = s3.parse_s3_url(model_desc["PrimaryContainer"]["ModelDataUrl"])
assert custom_bucket_name == model_bucket


def test_transform_mxnet_tags(sagemaker_session, mxnet_full_version, cpu_instance_type):
data_path = os.path.join(DATA_DIR, "mxnet_mnist")
Expand Down