Skip to content

change: enable wrong-import-position pylint check #907

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Jul 9, 2019
1 change: 0 additions & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ disable=
simplifiable-if-expression, # TODO: Simplify expressions
too-many-public-methods, # TODO: Resolve
ungrouped-imports, # TODO: Group imports
wrong-import-position, # TODO: Correct import positions
consider-using-ternary, # TODO: Consider ternary expressions
chained-comparison, # TODO: Simplify chained comparison between operands
simplifiable-if-statement, # TODO: Simplify ifs
Expand Down
3 changes: 2 additions & 1 deletion src/sagemaker/amazon/record_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

11 changes: 5 additions & 6 deletions src/sagemaker/cli/tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
# language governing permissions and limitations under the License.
from __future__ import absolute_import

from sagemaker.tensorflow import estimator
from sagemaker.tensorflow import model

from sagemaker.cli.common import HostCommand, TrainCommand


Expand All @@ -30,9 +33,7 @@ def __init__(self, args):
self.evaluation_steps = args.evaluation_steps

def create_estimator(self):
from sagemaker.tensorflow import TensorFlow

return TensorFlow(
return estimator.TensorFlow(
training_steps=self.training_steps,
evaluation_steps=self.evaluation_steps,
py_version=self.python,
Expand All @@ -47,9 +48,7 @@ def create_estimator(self):

class TensorFlowHostCommand(HostCommand):
def create_model(self, model_url):
from sagemaker.tensorflow.model import TensorFlowModel

return TensorFlowModel(
return model.TensorFlowModel(
model_data=model_url,
role=self.role_name,
entry_point=self.script,
Expand Down
6 changes: 4 additions & 2 deletions src/sagemaker/tensorflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,7 @@
# classes for tensorflow serving. Currently tensorflow_serving_api can only be pip-installed for python 2.
sys.path.append(os.path.dirname(__file__))

from sagemaker.tensorflow.estimator import TensorFlow # noqa: E402, F401
from sagemaker.tensorflow.model import TensorFlowModel, TensorFlowPredictor # noqa: E402, F401
from sagemaker.tensorflow import ( # noqa: E402,F401 # pylint: disable=wrong-import-position
estimator,
)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why not:

from sagemaker.tensorflow import model, estimator

?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That'll work as well, but shouldn't we aim to be more restrictive in our imports?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As discussed, per the google python style guide, we should aim to import modules, not classes. Good callout!
Also per the google style guide, I went ahead and made the change in two lines.

from sagemaker.tensorflow import model # noqa: E402,F401 # pylint: disable=wrong-import-position
12 changes: 6 additions & 6 deletions tests/component/test_tf_estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

import pytest
from mock import Mock
from sagemaker.tensorflow import TensorFlow
from sagemaker.tensorflow import estimator


SCRIPT = "resnet_cifar_10.py"
Expand Down Expand Up @@ -53,7 +53,7 @@ def sagemaker_session():

# Test that we pass all necessary fields from estimator to the session when we call deploy
def test_deploy(sagemaker_session, tf_version):
estimator = TensorFlow(
tensorflow_estimator = estimator.TensorFlow(
entry_point=SCRIPT,
source_dir=SOURCE_DIR,
role=ROLE,
Expand All @@ -64,13 +64,13 @@ def test_deploy(sagemaker_session, tf_version):
base_job_name="test-cifar",
)

estimator.fit("s3://mybucket/train")
print("job succeeded: {}".format(estimator.latest_training_job.name))
tensorflow_estimator.fit("s3://mybucket/train")
print("job succeeded: {}".format(tensorflow_estimator.latest_training_job.name))

estimator.deploy(initial_instance_count=1, instance_type=INSTANCE_TYPE_CPU)
tensorflow_estimator.deploy(initial_instance_count=1, instance_type=INSTANCE_TYPE_CPU)
image = IMAGE_URI_FORMAT_STRING.format(REGION, CPU_IMAGE_NAME, tf_version, "cpu", "py2")
sagemaker_session.create_model.assert_called_with(
estimator._current_job_name,
tensorflow_estimator._current_job_name,
ROLE,
{
"Environment": {
Expand Down
12 changes: 6 additions & 6 deletions tests/integ/test_horovod.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

import sagemaker.utils
import tests.integ as integ
from sagemaker.tensorflow import TensorFlow
from sagemaker.tensorflow import estimator
from tests.integ import test_region, timeout, HOSTING_NO_P3_REGIONS

horovod_dir = os.path.join(os.path.dirname(__file__), "..", "data", "horovod")
Expand All @@ -47,7 +47,7 @@ def instance_type(request):
@pytest.mark.canary_quick
def test_horovod(sagemaker_session, instance_type, tmpdir):
job_name = sagemaker.utils.unique_name_from_base("tf-horovod")
estimator = TensorFlow(
tensorflow_estimator = estimator.TensorFlow(
entry_point=os.path.join(horovod_dir, "test_hvd_basic.py"),
role="SageMakerRole",
train_instance_count=2,
Expand All @@ -60,10 +60,10 @@ def test_horovod(sagemaker_session, instance_type, tmpdir):
)

with timeout.timeout(minutes=integ.TRAINING_DEFAULT_TIMEOUT_MINUTES):
estimator.fit(job_name=job_name)
tensorflow_estimator.fit(job_name=job_name)

tmp = str(tmpdir)
extract_files_from_s3(estimator.model_data, tmp)
extract_files_from_s3(tensorflow_estimator.model_data, tmp)

for rank in range(2):
assert read_json("rank-%s" % rank, tmp)["rank"] == rank
Expand All @@ -74,7 +74,7 @@ def test_horovod(sagemaker_session, instance_type, tmpdir):
def test_horovod_local_mode(sagemaker_local_session, instances, processes, tmpdir):
output_path = "file://%s" % tmpdir
job_name = sagemaker.utils.unique_name_from_base("tf-horovod")
estimator = TensorFlow(
tensorflow_estimator = estimator.TensorFlow(
entry_point=os.path.join(horovod_dir, "test_hvd_basic.py"),
role="SageMakerRole",
train_instance_count=2,
Expand All @@ -88,7 +88,7 @@ def test_horovod_local_mode(sagemaker_local_session, instances, processes, tmpdi
)

with timeout.timeout(minutes=integ.TRAINING_DEFAULT_TIMEOUT_MINUTES):
estimator.fit(job_name=job_name)
tensorflow_estimator.fit(job_name=job_name)

tmp = str(tmpdir)
extract_files(output_path.replace("file://", ""), tmp)
Expand Down
52 changes: 26 additions & 26 deletions tests/integ/test_local_mode.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

from sagemaker.local import LocalSession, LocalSagemakerRuntimeClient, LocalSagemakerClient
from sagemaker.mxnet import MXNet
from sagemaker.tensorflow import TensorFlow
from sagemaker.tensorflow import estimator

# endpoint tests all use the same port, so we use this lock to prevent concurrent execution
LOCK_PATH = os.path.join(tempfile.gettempdir(), "sagemaker_test_local_mode_lock")
Expand Down Expand Up @@ -90,7 +90,7 @@ def test_tf_local_mode(sagemaker_local_session):
with stopit.ThreadingTimeout(5 * 60, swallow_exc=False):
script_path = os.path.join(DATA_DIR, "iris", "iris-dnn-classifier.py")

estimator = TensorFlow(
tensorflow_estimator = estimator.TensorFlow(
entry_point=script_path,
role="SageMakerRole",
framework_version="1.12",
Expand All @@ -103,16 +103,16 @@ def test_tf_local_mode(sagemaker_local_session):
sagemaker_session=sagemaker_local_session,
)

inputs = estimator.sagemaker_session.upload_data(
inputs = tensorflow_estimator.sagemaker_session.upload_data(
path=DATA_PATH, key_prefix="integ-test-data/tf_iris"
)
estimator.fit(inputs)
print("job succeeded: {}".format(estimator.latest_training_job.name))
tensorflow_estimator.fit(inputs)
print("job succeeded: {}".format(tensorflow_estimator.latest_training_job.name))

endpoint_name = estimator.latest_training_job.name
endpoint_name = tensorflow_estimator.latest_training_job.name
with lock.lock(LOCK_PATH):
try:
json_predictor = estimator.deploy(
json_predictor = tensorflow_estimator.deploy(
initial_instance_count=1, instance_type="local", endpoint_name=endpoint_name
)

Expand All @@ -124,7 +124,7 @@ def test_tf_local_mode(sagemaker_local_session):

assert dict_result == list_result
finally:
estimator.delete_endpoint()
tensorflow_estimator.delete_endpoint()


@pytest.mark.local_mode
Expand All @@ -133,7 +133,7 @@ def test_tf_distributed_local_mode(sagemaker_local_session):
with stopit.ThreadingTimeout(5 * 60, swallow_exc=False):
script_path = os.path.join(DATA_DIR, "iris", "iris-dnn-classifier.py")

estimator = TensorFlow(
tensorflow_estimator = estimator.TensorFlow(
entry_point=script_path,
role="SageMakerRole",
framework_version="1.12",
Expand All @@ -147,14 +147,14 @@ def test_tf_distributed_local_mode(sagemaker_local_session):
)

inputs = "file://" + DATA_PATH
estimator.fit(inputs)
print("job succeeded: {}".format(estimator.latest_training_job.name))
tensorflow_estimator.fit(inputs)
print("job succeeded: {}".format(tensorflow_estimator.latest_training_job.name))

endpoint_name = estimator.latest_training_job.name
endpoint_name = tensorflow_estimator.latest_training_job.name

with lock.lock(LOCK_PATH):
try:
json_predictor = estimator.deploy(
json_predictor = tensorflow_estimator.deploy(
initial_instance_count=1, instance_type="local", endpoint_name=endpoint_name
)

Expand All @@ -166,7 +166,7 @@ def test_tf_distributed_local_mode(sagemaker_local_session):

assert dict_result == list_result
finally:
estimator.delete_endpoint()
tensorflow_estimator.delete_endpoint()


@pytest.mark.local_mode
Expand All @@ -175,7 +175,7 @@ def test_tf_local_data(sagemaker_local_session):
with stopit.ThreadingTimeout(5 * 60, swallow_exc=False):
script_path = os.path.join(DATA_DIR, "iris", "iris-dnn-classifier.py")

estimator = TensorFlow(
tensorflow_estimator = estimator.TensorFlow(
entry_point=script_path,
role="SageMakerRole",
framework_version="1.12",
Expand All @@ -189,13 +189,13 @@ def test_tf_local_data(sagemaker_local_session):
)

inputs = "file://" + DATA_PATH
estimator.fit(inputs)
print("job succeeded: {}".format(estimator.latest_training_job.name))
tensorflow_estimator.fit(inputs)
print("job succeeded: {}".format(tensorflow_estimator.latest_training_job.name))

endpoint_name = estimator.latest_training_job.name
endpoint_name = tensorflow_estimator.latest_training_job.name
with lock.lock(LOCK_PATH):
try:
json_predictor = estimator.deploy(
json_predictor = tensorflow_estimator.deploy(
initial_instance_count=1, instance_type="local", endpoint_name=endpoint_name
)

Expand All @@ -207,7 +207,7 @@ def test_tf_local_data(sagemaker_local_session):

assert dict_result == list_result
finally:
estimator.delete_endpoint()
tensorflow_estimator.delete_endpoint()


@pytest.mark.local_mode
Expand All @@ -216,7 +216,7 @@ def test_tf_local_data_local_script():
with stopit.ThreadingTimeout(5 * 60, swallow_exc=False):
script_path = os.path.join(DATA_DIR, "iris", "iris-dnn-classifier.py")

estimator = TensorFlow(
tensorflow_estimator = estimator.TensorFlow(
entry_point=script_path,
role="SageMakerRole",
framework_version="1.12",
Expand All @@ -231,13 +231,13 @@ def test_tf_local_data_local_script():

inputs = "file://" + DATA_PATH

estimator.fit(inputs)
print("job succeeded: {}".format(estimator.latest_training_job.name))
tensorflow_estimator.fit(inputs)
print("job succeeded: {}".format(tensorflow_estimator.latest_training_job.name))

endpoint_name = estimator.latest_training_job.name
endpoint_name = tensorflow_estimator.latest_training_job.name
with lock.lock(LOCK_PATH):
try:
json_predictor = estimator.deploy(
json_predictor = tensorflow_estimator.deploy(
initial_instance_count=1, instance_type="local", endpoint_name=endpoint_name
)

Expand All @@ -249,7 +249,7 @@ def test_tf_local_data_local_script():

assert dict_result == list_result
finally:
estimator.delete_endpoint()
tensorflow_estimator.delete_endpoint()


@pytest.mark.local_mode
Expand Down
16 changes: 9 additions & 7 deletions tests/integ/test_tf_cifar.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import tests.integ
from tests.integ.timeout import timeout_and_delete_endpoint_by_name, timeout

from sagemaker.tensorflow import TensorFlow
from sagemaker.tensorflow import estimator
from sagemaker.utils import unique_name_from_base

PICKLE_CONTENT_TYPE = "application/python-pickle"
Expand Down Expand Up @@ -50,7 +50,7 @@ def test_cifar(sagemaker_session):

dataset_path = os.path.join(tests.integ.DATA_DIR, "cifar_10", "data")

estimator = TensorFlow(
tensorflow_estimator = estimator.TensorFlow(
entry_point="resnet_cifar_10.py",
source_dir=script_path,
role="SageMakerRole",
Expand All @@ -64,17 +64,19 @@ def test_cifar(sagemaker_session):
base_job_name="test-cifar",
)

inputs = estimator.sagemaker_session.upload_data(
inputs = tensorflow_estimator.sagemaker_session.upload_data(
path=dataset_path, key_prefix="data/cifar10"
)
job_name = unique_name_from_base("test-tf-cifar")

estimator.fit(inputs, logs=False, job_name=job_name)
print("job succeeded: {}".format(estimator.latest_training_job.name))
tensorflow_estimator.fit(inputs, logs=False, job_name=job_name)
print("job succeeded: {}".format(tensorflow_estimator.latest_training_job.name))

endpoint_name = estimator.latest_training_job.name
endpoint_name = tensorflow_estimator.latest_training_job.name
with timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
predictor = estimator.deploy(initial_instance_count=1, instance_type="ml.p2.xlarge")
predictor = tensorflow_estimator.deploy(
initial_instance_count=1, instance_type="ml.p2.xlarge"
)
predictor.serializer = PickleSerializer()
predictor.content_type = PICKLE_CONTENT_TYPE

Expand Down
14 changes: 8 additions & 6 deletions tests/integ/test_tf_keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import tests.integ
from tests.integ.timeout import timeout_and_delete_endpoint_by_name, timeout

from sagemaker.tensorflow import TensorFlow
from sagemaker.tensorflow import estimator
from sagemaker.utils import unique_name_from_base


Expand All @@ -37,7 +37,7 @@ def test_keras(sagemaker_session):
dataset_path = os.path.join(tests.integ.DATA_DIR, "cifar_10", "data")

with timeout(minutes=45):
estimator = TensorFlow(
tensorflow_estimator = estimator.TensorFlow(
entry_point="keras_cnn_cifar_10.py",
source_dir=script_path,
role="SageMakerRole",
Expand All @@ -51,16 +51,18 @@ def test_keras(sagemaker_session):
train_max_run=45 * 60,
)

inputs = estimator.sagemaker_session.upload_data(
inputs = tensorflow_estimator.sagemaker_session.upload_data(
path=dataset_path, key_prefix="data/cifar10"
)
job_name = unique_name_from_base("test-tf-keras")

estimator.fit(inputs, job_name=job_name)
tensorflow_estimator.fit(inputs, job_name=job_name)

endpoint_name = estimator.latest_training_job.name
endpoint_name = tensorflow_estimator.latest_training_job.name
with timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
predictor = estimator.deploy(initial_instance_count=1, instance_type="ml.p2.xlarge")
predictor = tensorflow_estimator.deploy(
initial_instance_count=1, instance_type="ml.p2.xlarge"
)

data = np.random.randn(32, 32, 3)
predict_response = predictor.predict(data)
Expand Down
Loading