KellenSunderland closed pull request #9542: WIP: Do not merge, Dockerfile to 
create Jetson TX1 and TX2 compatible builds.
URL: https://github.com/apache/incubator-mxnet/pull/9542
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/Jenkinsfile b/Jenkinsfile
index b7a8f60cb9..6218c626db 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -204,6 +204,16 @@ try {
         }
       }
     },
+    'asdfads': {
+      node('mxnetlinux-cpu') {
+        ws('workspace/jetson') {
+          init_git()
+          sh "make clean"
+          sh "make -C amalgamation/ clean"
+          sh "docker build -f docker_multiarch/Dockerfile.build.jetson .."
+        }
+      }
+    },
     'CPU: MKLML': {
       node('mxnetlinux-cpu') {
         ws('workspace/build-mklml-cpu') {
diff --git a/docker_multiarch/Dockerfile.build.jetson 
b/docker_multiarch/Dockerfile.build.jetson
new file mode 100644
index 0000000000..93fa53dd01
--- /dev/null
+++ b/docker_multiarch/Dockerfile.build.jetson
@@ -0,0 +1,72 @@
+# -*- mode: dockerfile -*-
+# dockerfile to build libmxnet.so, and a python wheel for the Jetson TX1/TX2
+
+FROM nvidia/cuda:8.0-cudnn5-devel as cudabuilder
+
+FROM dockcross/linux-arm64
+
+ENV ARCH aarch64
+ENV NVCCFLAGS "-m64"
+ENV CUDA_ARCH "-gencode arch=compute_53,code=sm_53 -gencode 
arch=compute_62,code=sm_62"
+ENV BUILD_OPTS "USE_OPENCV=0 USE_BLAS=openblas USE_SSE=0 USE_CUDA=1 
USE_CUDNN=0 ENABLE_CUDA_RTC=0 USE_NCCL=0 USE_CUDA_PATH=/usr/local/cuda/"
+ENV CC /usr/bin/aarch64-linux-gnu-gcc
+ENV CXX /usr/bin/aarch64-linux-gnu-g++
+ENV FC /usr/bin/aarch64-linux-gnu-gfortran-4.9
+ENV HOSTCC gcc
+
+WORKDIR /work
+
+# Build OpenBLAS
+ADD https://api.github.com/repos/xianyi/OpenBLAS/git/refs/heads/master 
/tmp/openblas_version.json
+RUN git clone https://github.com/xianyi/OpenBLAS.git && \
+    cd OpenBLAS && \
+    make -j$(nproc) TARGET=ARMV8 && \
+    make install && \
+    ln -s /opt/OpenBLAS/lib/libopenblas.so /usr/lib/libopenblas.so && \
+    ln -s /opt/OpenBLAS/lib/libopenblas.a /usr/lib/libopenblas.a && \
+    ln -s /opt/OpenBLAS/lib/libopenblas.a /usr/lib/liblapack.a
+
+ENV LD_LIBRARY_PATH $LD_LIBRARY_PATH:/opt/OpenBLAS/lib
+ENV CPLUS_INCLUDE_PATH /opt/OpenBLAS/include
+
+# Setup CUDA build env (including configuring and copying nvcc)
+COPY --from=cudabuilder /usr/local/cuda /usr/local/cuda
+ENV PATH $PATH:/usr/local/cuda/bin
+ENV TARGET_ARCH aarch64
+ENV TARGET_OS linux
+
+# Install ARM depedencies based on Jetpack 3.1
+RUN wget 
http://developer.download.nvidia.com/devzone/devcenter/mobile/jetpack_l4t/013/linux-x64/cuda-repo-l4t-8-0-local_8.0.84-1_arm64.deb
 && \
+    wget 
http://developer.download.nvidia.com/devzone/devcenter/mobile/jetpack_l4t/013/linux-x64/libcudnn6_6.0.21-1+cuda8.0_arm64.deb
 && \
+    dpkg -i cuda-repo-l4t-8-0-local_8.0.84-1_arm64.deb && \
+    dpkg -i libcudnn6_6.0.21-1+cuda8.0_arm64.deb && \
+    apt update -y && \
+    apt install cuda-cudart-cross-aarch64-8-0 cuda-cublas-cross-aarch64-8-0 \
+    cuda-nvml-cross-aarch64-8-0 cuda-nvrtc-cross-aarch64-8-0 
cuda-cufft-cross-aarch64-8-0 \
+    cuda-curand-cross-aarch64-8-0 cuda-cusolver-cross-aarch64-8-0 
cuda-cusparse-cross-aarch64-8-0 \
+    cuda-misc-headers-cross-aarch64-8-0 cuda-npp-cross-aarch64-8-0 libcudnn6 
-y && \
+    cp /usr/local/cuda-8.0/targets/aarch64-linux/lib/*.so 
/usr/local/cuda/lib64/ && \
+    cp /usr/local/cuda-8.0/targets/aarch64-linux/lib/stubs/*.so 
/usr/local/cuda/lib64/stubs/ && \
+    cp -r /usr/local/cuda-8.0/targets/aarch64-linux/include/ 
/usr/local/cuda/include/ && \
+    rm cuda-repo-l4t-8-0-local_8.0.84-1_arm64.deb && rm 
libcudnn6_6.0.21-1+cuda8.0_arm64.deb
+
+# Build MXNet
+ADD incubator-mxnet incubator-mxnet
+
+WORKDIR /work/incubator-mxnet
+
+# Add ARM specific settings
+ADD arm.crosscompile.mk make/config.mk
+
+# Build and link
+RUN make -j$(nproc) $BUILD_OPTS
+
+# Create a binary wheel for easy installation.
+# When using tool.py output will be in the jetson folder.
+# Scp the .whl file to your target device, and install via
+# pip install
+WORKDIR /work/incubator-mxnet/python
+RUN python setup.py  bdist_wheel --universal
+
+# Copy build artifacts to output folder for tool.py script
+RUN mkdir -p /work/build & cp dist/*.whl /work/build && cp ../lib/* /work/build


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to