Skip to content

[fix] disable modelbuilder mlflow local integ tests #4576

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
PYTORCH_SQUEEZENET_RESOURCE_DIR,
PYTORCH_SQUEEZENET_MLFLOW_RESOURCE_DIR,
SERVE_SAGEMAKER_ENDPOINT_TIMEOUT,
SERVE_LOCAL_CONTAINER_TIMEOUT,
# SERVE_LOCAL_CONTAINER_TIMEOUT,
PYTHON_VERSION_IS_NOT_310,
)
from tests.integ.timeout import timeout
Expand Down Expand Up @@ -128,36 +128,36 @@ def model_builder(request):
return request.getfixturevalue(request.param)


@pytest.mark.skipif(
PYTHON_VERSION_IS_NOT_310,
reason="The goal of these test are to test the serving components of our feature",
)
@pytest.mark.flaky(reruns=3, reruns_delay=2)
@pytest.mark.parametrize("model_builder", ["model_builder_local_builder"], indirect=True)
def test_happy_mlflow_pytorch_local_container_with_torch_serve(
sagemaker_session, model_builder, test_image
):
logger.info("Running in LOCAL_CONTAINER mode...")
caught_ex = None

model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session)

with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT):
try:
logger.info("Deploying and predicting in LOCAL_CONTAINER mode...")
predictor = model.deploy()
logger.info("Local container successfully deployed.")
predictor.predict(test_image)
except Exception as e:
logger.exception("test failed")
caught_ex = e
finally:
if model.modes[str(Mode.LOCAL_CONTAINER)].container:
model.modes[str(Mode.LOCAL_CONTAINER)].container.kill()
if caught_ex:
assert (
False
), f"{caught_ex} was thrown when running pytorch squeezenet local container test"
# @pytest.mark.skipif(
# PYTHON_VERSION_IS_NOT_310,
# reason="The goal of these test are to test the serving components of our feature",
# )
# @pytest.mark.flaky(reruns=3, reruns_delay=2)
# @pytest.mark.parametrize("model_builder", ["model_builder_local_builder"], indirect=True)
# def test_happy_mlflow_pytorch_local_container_with_torch_serve(
# sagemaker_session, model_builder, test_image
# ):
# logger.info("Running in LOCAL_CONTAINER mode...")
# caught_ex = None
#
# model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session)
#
# with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT):
# try:
# logger.info("Deploying and predicting in LOCAL_CONTAINER mode...")
# predictor = model.deploy()
# logger.info("Local container successfully deployed.")
# predictor.predict(test_image)
# except Exception as e:
# logger.exception("test failed")
# caught_ex = e
# finally:
# if model.modes[str(Mode.LOCAL_CONTAINER)].container:
# model.modes[str(Mode.LOCAL_CONTAINER)].container.kill()
# if caught_ex:
# assert (
# False
# ), f"{caught_ex} was thrown when running pytorch squeezenet local container test"


@pytest.mark.skipif(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from tests.integ.sagemaker.serve.constants import (
XGBOOST_MLFLOW_RESOURCE_DIR,
SERVE_SAGEMAKER_ENDPOINT_TIMEOUT,
SERVE_LOCAL_CONTAINER_TIMEOUT,
# SERVE_LOCAL_CONTAINER_TIMEOUT,
PYTHON_VERSION_IS_NOT_310,
)
from tests.integ.timeout import timeout
Expand Down Expand Up @@ -108,37 +108,37 @@ def model_builder(request):
return request.getfixturevalue(request.param)


@pytest.mark.skipif(
PYTHON_VERSION_IS_NOT_310,
reason="The goal of these test are to test the serving components of our feature",
)
@pytest.mark.flaky(reruns=3, reruns_delay=2)
@pytest.mark.parametrize("model_builder", ["model_builder_local_builder"], indirect=True)
def test_happy_mlflow_xgboost_local_container_with_torch_serve(
sagemaker_session, model_builder, test_data
):
logger.info("Running in LOCAL_CONTAINER mode...")
caught_ex = None

model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session)
test_x, _ = test_data

with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT):
try:
logger.info("Deploying and predicting in LOCAL_CONTAINER mode...")
predictor = model.deploy()
logger.info("Local container successfully deployed.")
predictor.predict(test_x)
except Exception as e:
logger.exception("test failed")
caught_ex = e
finally:
if model.modes[str(Mode.LOCAL_CONTAINER)].container:
model.modes[str(Mode.LOCAL_CONTAINER)].container.kill()
if caught_ex:
assert (
False
), f"{caught_ex} was thrown when running pytorch squeezenet local container test"
# @pytest.mark.skipif(
# PYTHON_VERSION_IS_NOT_310,
# reason="The goal of these test are to test the serving components of our feature",
# )
# @pytest.mark.flaky(reruns=3, reruns_delay=2)
# @pytest.mark.parametrize("model_builder", ["model_builder_local_builder"], indirect=True)
# def test_happy_mlflow_xgboost_local_container_with_torch_serve(
# sagemaker_session, model_builder, test_data
# ):
# logger.info("Running in LOCAL_CONTAINER mode...")
# caught_ex = None
#
# model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session)
# test_x, _ = test_data
#
# with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT):
# try:
# logger.info("Deploying and predicting in LOCAL_CONTAINER mode...")
# predictor = model.deploy()
# logger.info("Local container successfully deployed.")
# predictor.predict(test_x)
# except Exception as e:
# logger.exception("test failed")
# caught_ex = e
# finally:
# if model.modes[str(Mode.LOCAL_CONTAINER)].container:
# model.modes[str(Mode.LOCAL_CONTAINER)].container.kill()
# if caught_ex:
# assert (
# False
# ), f"{caught_ex} was thrown when running pytorch squeezenet local container test"


@pytest.mark.skipif(
Expand Down