Skip to content

Commit c88b1ce

Browse files
authored
Add TensorFlow 1.8 dockerfiles (#49)
* Add 1.8 dockerfiles * Update tf versions test for 1.8 * Fix base image tag for GPU dockerfile * Add TODO for tf serving 1.8
1 parent 21a3042 commit c88b1ce

File tree

5 files changed

+226
-0
lines changed

5 files changed

+226
-0
lines changed

docker/1.8.0/base/Dockerfile.cpu

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
FROM ubuntu:16.04
2+
3+
RUN apt-get update && apt-get install -y --no-install-recommends \
4+
build-essential \
5+
curl \
6+
git \
7+
libcurl3-dev \
8+
libfreetype6-dev \
9+
libpng12-dev \
10+
libzmq3-dev \
11+
pkg-config \
12+
python-dev \
13+
rsync \
14+
software-properties-common \
15+
unzip \
16+
zip \
17+
zlib1g-dev \
18+
openjdk-8-jdk \
19+
openjdk-8-jre-headless \
20+
wget \
21+
vim \
22+
iputils-ping \
23+
nginx \
24+
&& \
25+
apt-get clean && \
26+
rm -rf /var/lib/apt/lists/*
27+
28+
RUN curl -fSsL -O https://bootstrap.pypa.io/get-pip.py && \
29+
python get-pip.py && \
30+
rm get-pip.py
31+
32+
RUN pip --no-cache-dir install \
33+
numpy \
34+
scipy \
35+
sklearn \
36+
pandas \
37+
h5py
38+
39+
WORKDIR /root
40+
41+
# TODO: upgrade to tf serving 1.8, which requires more work with updating
42+
# dependencies. See current work in progress in tfserving-1.8 branch.
43+
ENV TF_SERVING_VERSION=1.7.0
44+
45+
RUN pip install numpy boto3 six awscli flask==0.11 Jinja2==2.9 tensorflow-serving-api==$TF_SERVING_VERSION gevent gunicorn
46+
47+
RUN wget "http://storage.googleapis.com/tensorflow-serving-apt/pool/tensorflow-model-server/t/tensorflow-model-server/tensorflow-model-server_${TF_SERVING_VERSION}_all.deb" && \
48+
dpkg -i tensorflow-model-server_${TF_SERVING_VERSION}_all.deb
49+
50+
# Update libstdc++6, as required by tensorflow-serving >= 1.6: https://github.com/tensorflow/serving/issues/819
51+
RUN add-apt-repository ppa:ubuntu-toolchain-r/test -y && \
52+
apt-get update && \
53+
apt-get install -y libstdc++6

docker/1.8.0/base/Dockerfile.gpu

Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
FROM nvidia/cuda:9.0-base-ubuntu16.04
2+
3+
RUN apt-get update && apt-get install -y --no-install-recommends \
4+
build-essential \
5+
cuda-command-line-tools-9-0 \
6+
cuda-cublas-dev-9-0 \
7+
cuda-cudart-dev-9-0 \
8+
cuda-cufft-dev-9-0 \
9+
cuda-curand-dev-9-0 \
10+
cuda-cusolver-dev-9-0 \
11+
cuda-cusparse-dev-9-0 \
12+
curl \
13+
git \
14+
libcudnn7=7.0.5.15-1+cuda9.0 \
15+
libcudnn7-dev=7.0.5.15-1+cuda9.0 \
16+
libcurl3-dev \
17+
libfreetype6-dev \
18+
libpng12-dev \
19+
libzmq3-dev \
20+
pkg-config \
21+
python-dev \
22+
rsync \
23+
software-properties-common \
24+
unzip \
25+
zip \
26+
zlib1g-dev \
27+
wget \
28+
vim \
29+
nginx \
30+
iputils-ping \
31+
&& \
32+
rm -rf /var/lib/apt/lists/* && \
33+
find /usr/local/cuda-9.0/lib64/ -type f -name 'lib*_static.a' -not -name 'libcudart_static.a' -delete && \
34+
rm /usr/lib/x86_64-linux-gnu/libcudnn_static_v7.a
35+
36+
RUN curl -fSsL -O https://bootstrap.pypa.io/get-pip.py && \
37+
python get-pip.py && \
38+
rm get-pip.py
39+
40+
RUN pip --no-cache-dir install \
41+
numpy \
42+
scipy \
43+
sklearn \
44+
pandas \
45+
h5py
46+
47+
# Set up grpc
48+
RUN pip install enum34 futures mock six && \
49+
pip install --pre 'protobuf>=3.0.0a3' && \
50+
pip install -i https://testpypi.python.org/simple --pre grpcio
51+
52+
# Set up Bazel.
53+
54+
# Running bazel inside a `docker build` command causes trouble, cf:
55+
# https://github.com/bazelbuild/bazel/issues/134
56+
# The easiest solution is to set up a bazelrc file forcing --batch.
57+
RUN echo "startup --batch" >>/etc/bazel.bazelrc
58+
# Similarly, we need to workaround sandboxing issues:
59+
# https://github.com/bazelbuild/bazel/issues/418
60+
RUN echo "build --spawn_strategy=standalone --genrule_strategy=standalone" \
61+
>>/etc/bazel.bazelrc
62+
# Install the most recent bazel release which works: https://github.com/bazelbuild/bazel/issues/4652
63+
ENV BAZEL_VERSION 0.10.1
64+
WORKDIR /
65+
RUN mkdir /bazel && \
66+
cd /bazel && \
67+
curl -H "User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36" -fSsL -O https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/bazel-$BAZEL_VERSION-installer-linux-x86_64.sh && \
68+
curl -H "User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36" -fSsL -o /bazel/LICENSE.txt https://raw.githubusercontent.com/bazelbuild/bazel/master/LICENSE && \
69+
chmod +x bazel-*.sh && \
70+
./bazel-$BAZEL_VERSION-installer-linux-x86_64.sh && \
71+
cd / && \
72+
rm -f /bazel/bazel-$BAZEL_VERSION-installer-linux-x86_64.sh
73+
74+
# Configure the build for our CUDA configuration.
75+
ENV CI_BUILD_PYTHON python
76+
ENV LD_LIBRARY_PATH /usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH
77+
ENV TF_NEED_CUDA 1
78+
ENV TF_CUDA_COMPUTE_CAPABILITIES=3.7,6.1
79+
ENV TF_CUDA_VERSION=9.0
80+
ENV TF_CUDNN_VERSION=7
81+
ENV CUDNN_INSTALL_PATH=/usr/lib/x86_64-linux-gnu
82+
83+
# TODO: upgrade to tf serving 1.8, which requires more work with updating
84+
# dependencies. See current work in progress in tfserving-1.8 branch.
85+
ENV TF_SERVING_VERSION=1.7.0
86+
87+
# Install tensorflow-serving-api
88+
RUN pip install tensorflow-serving-api==$TF_SERVING_VERSION
89+
90+
# Download TensorFlow Serving
91+
RUN cd / && git clone --recurse-submodules https://github.com/tensorflow/serving && \
92+
cd serving && \
93+
git checkout $TF_SERVING_VERSION
94+
95+
# Configure Tensorflow to use the GPU
96+
WORKDIR /serving
97+
RUN git clone --recursive https://github.com/tensorflow/tensorflow.git && \
98+
cd tensorflow && \
99+
git checkout v$TF_SERVING_VERSION && \
100+
tensorflow/tools/ci_build/builds/configured GPU
101+
102+
# Build TensorFlow Serving and Install it in /usr/local/bin
103+
WORKDIR /serving
104+
RUN bazel build -c opt --config=cuda \
105+
--cxxopt="-D_GLIBCXX_USE_CXX11_ABI=0" \
106+
--crosstool_top=@local_config_cuda//crosstool:toolchain \
107+
tensorflow_serving/model_servers:tensorflow_model_server && \
108+
cp bazel-bin/tensorflow_serving/model_servers/tensorflow_model_server /usr/local/bin/ && \
109+
bazel clean --expunge
110+
111+
# Update libstdc++6, as required by tensorflow-serving >= 1.6: https://github.com/tensorflow/serving/issues/819
112+
RUN add-apt-repository ppa:ubuntu-toolchain-r/test -y && \
113+
apt-get update && \
114+
apt-get install -y libstdc++6
115+
116+
# cleaning up the container
117+
RUN rm -rf /serving && \
118+
rm -rf /bazel
119+

docker/1.8.0/final/py2/Dockerfile.cpu

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
# Use local version of image built from Dockerfile.cpu in /docker/1.8.0/base
2+
FROM tensorflow-base:1.8.0-cpu-py2
3+
MAINTAINER Amazon AI
4+
5+
ARG framework_installable
6+
ARG framework_support_installable=sagemaker_tensorflow_container-1.0.0.tar.gz
7+
8+
WORKDIR /root
9+
10+
# Will install from pypi once packages are released there. For now, copy from local file system.
11+
COPY $framework_installable .
12+
COPY $framework_support_installable .
13+
14+
RUN framework_installable_local=$(basename $framework_installable) && \
15+
framework_support_installable_local=$(basename $framework_support_installable) && \
16+
\
17+
pip install $framework_installable_local && \
18+
pip install $framework_support_installable_local && \
19+
\
20+
rm $framework_installable_local && \
21+
rm $framework_support_installable_local
22+
23+
# entry.py comes from sagemaker-container-support
24+
ENTRYPOINT ["entry.py"]

docker/1.8.0/final/py2/Dockerfile.gpu

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
# Use local version of image built from Dockerfile.gpu in /docker/1.8.0/base
2+
FROM tensorflow-base:1.8.0-gpu-py2
3+
MAINTAINER Amazon AI
4+
5+
ARG framework_installable
6+
ARG framework_support_installable=sagemaker_tensorflow_container-1.0.0.tar.gz
7+
8+
WORKDIR /root
9+
10+
# Will install from pypi once packages are released there. For now, copy from local file system.
11+
COPY $framework_installable .
12+
COPY $framework_support_installable .
13+
14+
RUN framework_installable_local=$(basename $framework_installable) && \
15+
framework_support_installable_local=$(basename $framework_support_installable) && \
16+
\
17+
pip install $framework_installable_local && \
18+
pip install $framework_support_installable_local && \
19+
\
20+
rm $framework_installable_local && \
21+
rm $framework_support_installable_local
22+
23+
# entry.py comes from sagemaker-container-support
24+
ENTRYPOINT ["entry.py"]

test/integ/test_versions.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,18 @@ def required_versions(framework_version):
2323
elif framework_version == '1.5.0':
2424
return ['tensorflow-serving-api==1.5.0',
2525
'tensorflow==1.5.0']
26+
# We released using tensorflow serving 1.5.0 for tf 1.6, due to not finding this
27+
# fix in time before launch: https://github.com/tensorflow/serving/issues/819
2628
elif framework_version == '1.6.0':
2729
return ['tensorflow-serving-api==1.5.0',
2830
'tensorflow==1.6.0']
2931
elif framework_version == '1.7.0':
3032
return ['tensorflow-serving-api==1.7.0',
3133
'tensorflow==1.7.0']
34+
# TODO: upgrade to serving 1.8.0 (see tfserving-1.8 branch)
35+
elif framework_version == '1.8.0':
36+
return ['tensorflow-serving-api==1.7.0',
37+
'tensorflow==1.8.0']
3238
else:
3339
raise ValueError("invalid internal test config")
3440

0 commit comments

Comments
 (0)