ashb commented on a change in pull request #4938: [AIRFLOW-4117] Multi-staging 
Image - Travis CI tests [Step 3/3]
URL: https://github.com/apache/airflow/pull/4938#discussion_r299530325
 
 

 ##########
 File path: scripts/ci/in_container/entrypoint_ci.sh
 ##########
 @@ -0,0 +1,220 @@
+#!/usr/bin/env bash
+#
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing,
+#  software distributed under the License is distributed on an
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#  KIND, either express or implied.  See the License for the
+#  specific language governing permissions and limitations
+#  under the License.
+
+# Bash sanity settings (error on exit, complain for undefined vars, error when 
pipe fails)
+set -euo pipefail
+MY_DIR=$(cd "$(dirname "$0")"; pwd)
+
+if [[ ${AIRFLOW_CI_VERBOSE:="false"} == "true" ]]; then
+    set -x
+fi
+
+# shellcheck source=./_check_in_container.sh
+. ${MY_DIR}/_check_in_container.sh
+
+AIRFLOW_ROOT="${MY_DIR}/../.."
+
+PYTHON_VERSION=${PYTHON_VERSION:=3.6}
+ENV=${ENV:=docker}
+BACKEND=${BACKEND:=sqlite}
+KUBERNETES_VERSION=${KUBERNETES_VERSION:=""}
+
+export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}}
+
+if [[ -z "${AIRFLOW_SOURCES:=}" ]]; then
+    echo
+    echo AIRFLOW_SOURCES not set !!!!
+    echo
+    exit 1
+fi
+
+echo
+echo "Airflow home: ${AIRFLOW_HOME}"
+echo "Airflow sources: ${AIRFLOW_SOURCES}"
+echo "Airflow core SQL connection: ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}"
+echo
+
+ARGS=( "$@" )
+
+RUN_TESTS=${RUN_TESTS:="true"}
+
+if [[ ! -d "${AIRFLOW_SOURCES}/airflow/www/node_modules" && "${CLEAN_FILES}" 
== "false" ]]; then
+    echo
+    echo "Installing NPM modules as they are not yet installed (Sources 
mounted from Host)"
+    echo
+    pushd "${AIRFLOW_SOURCES}/airflow/www/"
+    npm ci
+    echo
+    popd
+fi
+if [[ ! -d "${AIRFLOW_SOURCES}/airflow/www/static/dist" && ${CLEAN_FILES} == 
"false" ]]; then
+    pushd "${AIRFLOW_SOURCES}/airflow/www/"
+    echo
+    echo "Building production version of javascript files (Sources mounted 
from Host)"
+    echo
+    echo
+    npm run prod
+    echo
+    echo
+    popd
+fi
+
+if [[ ${AIRFLOW_CI_VERBOSE} == "true" ]]; then
+    echo
+    echo "Using ${HADOOP_DISTRO:=} distribution of Hadoop from 
${HADOOP_HOME:=}"
+    echo
+fi
+
+AIRFLOW_ROOT="$(cd ${MY_DIR}; cd ../../..; pwd)"
+
+export AIRFLOW__CORE__DAGS_FOLDER="${AIRFLOW_SOURCES}/tests/dags"
+
+# add test/test_utils to PYTHONPATH (TODO: Do we need it?)
+export PYTHONPATH=${PYTHONPATH:-${AIRFLOW_SOURCES}/tests/test_utils}
+
+export AIRFLOW__CORE__UNIT_TEST_MODE=True
+export HADOOP_DISTRO
+
+echo "#######################################################################"
+echo
+echo "  You can drop into ipdb debugger by adding this line to your code:"
+echo
+echo "         import ipdb; ipdb.set_trace()"
+echo
+echo "  Then run your tests with 'run-tests <TEST> --nocapture' "
+echo
+echo "#######################################################################"
+
+
+# Fix codecov build path
+# TODO: Check this - this should be made travis-independent
+if [[ ! -h /home/travis/build/apache/airflow ]]; then
+  sudo mkdir -p /home/travis/build/apache
+  sudo ln -s ${AIRFLOW_ROOT} /home/travis/build/apache/airflow
+fi
+
+# Fix file permissions
+if [[ -d $HOME/.minikube ]]; then
+    sudo chown -R airflow.airflow $HOME/.kube $HOME/.minikube
+fi
+
+# Cleanup the logs when entering the environment
+sudo rm -rf ${AIRFLOW_HOME}/logs/*
+
+if [[ "${ENV}" == "docker" ]]; then
+    # Start MiniCluster
+    java -cp "/tmp/minicluster-1.1-SNAPSHOT/*" com.ing.minicluster.MiniCluster 
>/dev/null 2>&1 &
+
+    # Set up ssh keys
+    echo 'yes' | ssh-keygen -t rsa -C your_em...@youremail.com -P '' -f 
~/.ssh/id_rsa >/dev/null 2>&1
+    cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
+    ln -s -f ~/.ssh/authorized_keys ~/.ssh/authorized_keys2
+    chmod 600 ~/.ssh/*
+
+    # SSH Service
+    sudo service ssh restart >/dev/null 2>&1
+
+    # Setting up kerberos
+
+    FQDN=`hostname`
+    ADMIN="admin"
+    PASS="airflow"
+    KRB5_KTNAME=/etc/airflow.keytab
+
+    if [[ ${AIRFLOW_CI_VERBOSE} == "true" ]]; then
+        echo
+        echo "Hosts:"
+        echo
+        cat /etc/hosts
+        echo
+        echo "Hostname: ${FQDN}"
+        echo
+    fi
+
+    sudo cp ${MY_DIR}/krb5/krb5.conf /etc/krb5.conf
+
+    echo -e "${PASS}\n${PASS}" | \
+        sudo kadmin -p ${ADMIN}/admin -w ${PASS} -q "addprinc -randkey 
airflow/${FQDN}" >/dev/null 2>&1
+    sudo kadmin -p ${ADMIN}/admin -w ${PASS} -q "ktadd -k ${KRB5_KTNAME} 
airflow" >/dev/null 2>&1
+    sudo kadmin -p ${ADMIN}/admin -w ${PASS} -q "ktadd -k ${KRB5_KTNAME} 
airflow/${FQDN}" >/dev/null 2>&1
+    sudo chmod 0644 ${KRB5_KTNAME}
+fi
+
+# Exporting XUNIT_FILE so that we can see summary of failed tests
+# at the end of the log
+export XUNIT_FILE=${AIRFLOW_HOME}/logs/all_tests.xml
+mkdir -pv ${AIRFLOW_HOME}/logs/
+
+cp -f ${MY_DIR}/airflow_ci.cfg ${AIRFLOW_HOME}/unittests.cfg
+
+# If we do not want to run tests, we simply drop into bash
+if [[ "${RUN_TESTS}" == "false" ]]; then
+    if [[ -z "${ARGS[*]}" ]]; then
+        exec /bin/bash
+    else
+        exec /bin/bash -c "${ARGS[*]}"
 
 Review comment:
   This is always the most difficult one to handle - preserving 
quoting/spaces/special chars to a single string/command.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to