Skip to content

Commit adb7bd7

Browse files
authored
Merge pull request #42 from aws/add-actions-for-tests
adding ci pipeline and make sure quality and tests work
2 parents f884fc6 + 49e3c21 commit adb7bd7

File tree

10 files changed

+45
-11
lines changed

10 files changed

+45
-11
lines changed

.github/workflows/quality.yml

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
name: Quality Check
2+
3+
on: [pull_request]
4+
5+
jobs:
6+
quality:
7+
runs-on: ubuntu-latest
8+
steps:
9+
- uses: actions/checkout@v2
10+
- name: Set up Python 3.6
11+
uses: actions/setup-python@v2
12+
with:
13+
python-version: 3.6
14+
- name: Install Python dependencies
15+
run: pip install -e .[quality]
16+
- name: Run Quality check
17+
run: make quality

.github/workflows/test.yml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
name: Run Tests
2+
3+
on: [pull_request]
4+
5+
jobs:
6+
test:
7+
runs-on: ubuntu-latest
8+
steps:
9+
- uses: actions/checkout@v2
10+
- name: Set up Python 3.6
11+
uses: actions/setup-python@v2
12+
with:
13+
python-version: 3.6
14+
- name: Install Python dependencies
15+
run: pip install -e .[test,dev]
16+
- name: Run Unit Tests
17+
run: make unit-test
18+
# - name: Run Integration Tests
19+
# run: make integ-test

makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
.PHONY: quality style unit-test integ-test
22

3-
check_dirs := src deployment docker tests
3+
check_dirs := src tests
44

55
# run tests
66

setup.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,8 @@
4545
extras["transformers"] = ["transformers[sklearn,sentencepiece]>=4.5.1"]
4646

4747
# framework specific dependencies
48-
extras["torch"] = ["torch>=1.4.0"]
49-
extras["tensorflow-cpu"] = ["tensorflow-cpu>=2.3"]
50-
extras["tensorflow"] = ["tensorflow>=2.3"]
48+
extras["torch"] = ["torch>=1.8.0"]
49+
extras["tensorflow"] = ["tensorflow>=2.4.0"]
5150

5251
# MMS Server dependencies
5352
extras["mms"] = ["multi-model-server>=1.1.4", "retrying"]
@@ -74,7 +73,7 @@
7473
"flake8>=3.8.3",
7574
]
7675

77-
extras["all"] = extras["test"] + extras["quality"] + extras["benchmark"] + extras["transformers"] + extras["mms"]
76+
extras["dev"] = extras["transformers"] + extras["mms"] + extras["torch"] + extras["tensorflow"]
7877

7978
setup(
8079
name="sagemaker-huggingface-inference-toolkit",

src/sagemaker_huggingface_inference_toolkit/decoder_encoder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def decode_csv(string_like): # type: (str) -> np.array
4040
# detects if the incoming csv has headers
4141
if not any(header in string_like.splitlines()[0].lower() for header in ["question", "context", "inputs"]):
4242
raise PredictionException(
43-
f"You need to provide the correct CSV with Header columns to use it with the inference toolkit default handler.",
43+
"You need to provide the correct CSV with Header columns to use it with the inference toolkit default handler.",
4444
400,
4545
)
4646
# reads csv as io

src/sagemaker_huggingface_inference_toolkit/handler_service.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
from sagemaker_inference import content_types, environment, utils
2323
from transformers.pipelines import SUPPORTED_TASKS
2424

25-
from mms import metrics
2625
from mms.service import PredictionException
2726
from sagemaker_huggingface_inference_toolkit import decoder_encoder
2827
from sagemaker_huggingface_inference_toolkit.transformers_utils import (

src/sagemaker_huggingface_inference_toolkit/transformers_utils.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@
2020
from huggingface_hub.file_download import cached_download, hf_hub_url
2121
from transformers import pipeline
2222
from transformers.file_utils import is_tf_available, is_torch_available
23-
from transformers.pipelines import Pipeline, Conversation
23+
from transformers.pipelines import Conversation, Pipeline
24+
2425

2526
if is_tf_available():
2627
import tensorflow as tf

tests/unit/test_decoder_encoder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def test_decode_csv():
4545
]
4646
}
4747
text_classification_input = "inputs\r\nI love you\r\nI like you"
48-
decoded_data = decoder_encoder.decode_csv(DECODE_CSV_INPUT)
48+
decoded_data = decoder_encoder.decode_csv(text_classification_input)
4949
assert decoded_data == {"inputs": ["I love you", "I like you"]}
5050

5151

tests/unit/test_handler_service.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
# limitations under the License.
1414
import json
1515
import os
16-
import sys
1716
import tempfile
1817

1918
import pytest

tests/unit/test_transformers_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@
1414
import os
1515
import tempfile
1616

17-
from transformers.file_utils import is_torch_available
1817
from transformers import pipeline
18+
from transformers.file_utils import is_torch_available
1919
from transformers.testing_utils import require_tf, require_torch, slow
2020

2121
from sagemaker_huggingface_inference_toolkit.transformers_utils import (

0 commit comments

Comments
 (0)