This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive-dev-box.git

commit 23e57338f576517299a95d670d0db22e2d9b228d
Author: Ayush Saxena <ayushsax...@apache.org>
AuthorDate: Tue Jan 23 11:40:56 2024 +0530

    Initial Code by Zoltan Haindrich's Repository (kgyrtkirk/hive-dev-box)
---
 Dockerfile                         |  48 +++
 Dockerfile.bazaar                  |  23 ++
 Dockerfile.executor                |  23 ++
 Dockerfile.impala                  |  49 +++
 README.md                          | 214 ++++++++++
 artifactory_backup.zip             | Bin 0 -> 91168 bytes
 bashrc                             |  22 +
 bin/apdiff                         |   4 +
 bin/bl                             |   3 +
 bin/conf                           |  75 ++++
 bin/deploy_custom_jars             |  56 +++
 bin/deploy_jars                    |  40 ++
 bin/dev_eclipse                    |  29 ++
 bin/gerrit_pr                      |   8 +
 bin/github_pr                      |   5 +
 bin/hive_launch                    |  40 ++
 bin/hive_load_test_data            |   9 +
 bin/hive_patch_development         |  10 +
 bin/hs2_debug                      |   7 +
 bin/init_hive_src                  |  16 +
 bin/init_tsdb                      |  16 +
 bin/mrproper                       |  14 +
 bin/mvn                            |  16 +
 bin/psql                           |  42 ++
 bin/rebase                         |   4 +
 bin/reinit_metastore               | 235 +++++++++++
 bin/safe_bl                        |   4 +
 bin/send_custom_jars               |  19 +
 bin/spawn_shell_after              |  22 +
 bin/srcs                           |  75 ++++
 bin/sw                             | 276 +++++++++++++
 bin/tez_debug                      |   9 +
 bin/wait_port                      |  28 ++
 buildAll                           |  39 ++
 conf/artifactory.config.latest.xml | 346 ++++++++++++++++
 enter.bash                         |  30 ++
 etc/hive/hive-log4j2.properties    |  83 ++++
 etc/motd                           |  10 +
 etc/screenrc                       |  21 +
 hdb                                |  34 ++
 hooks/build                        |  16 +
 run.bash                           |  89 ++++
 seccomp.json                       | 815 +++++++++++++++++++++++++++++++++++++
 settings.xml                       |  57 +++
 start_artifactory.bash             |  50 +++
 tools/build_cleanup                |   4 +
 tools/cdpcli                       |  21 +
 tools/docker_entrypoint            |  43 ++
 tools/docker_entrypoint.bazaar     |  32 ++
 tools/docker_entrypoint.executor   |   5 +
 tools/entrypoint.impala            |  41 ++
 tools/i_sort                       |  30 ++
 tools/iii                          |  16 +
 tools/impala_bootstrap             |  15 +
 tools/install_basics               |  85 ++++
 tools/install_conf                 |  98 +++++
 tools/install_executor             |  10 +
 tools/install_executor2            |   9 +
 tools/install_executor3            |  27 ++
 tools/install_java_zulu            |  17 +
 tools/install_mysql.bash           |  25 ++
 tools/install_psql.bash            |  16 +
 tools/install_sdk.bash             |  31 ++
 tools/install_texturepacker        |   8 +
 tools/install_toolbox              |   7 +
 tools/install_x2go                 |   7 +
 tools/install_xmlstarlet           |  72 ++++
 tools/python3_default              |   7 +
 tools/y                            |  13 +
 69 files changed, 3670 insertions(+)

diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..0362838
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,48 @@
+#FROM debian:buster
+FROM debian:bullseye
+
+COPY tools/build_cleanup /tools/
+
+COPY tools/install_basics /tools/
+RUN /tools/install_basics
+
+COPY tools/install_java_zulu /tools/
+RUN /tools/install_java_zulu
+
+COPY tools/install_xmlstarlet /tools/
+RUN /tools/install_xmlstarlet
+
+COPY tools/install_toolbox /tools/
+RUN /tools/install_toolbox
+
+COPY tools/i_sort /tools/
+RUN /tools/i_sort
+
+#COPY tools/cdpcli /tools/
+#RUN /tools/cdpcli
+
+COPY etc  /etc
+COPY bin  /bin
+
+COPY tools/iii /tools/
+RUN /tools/iii
+
+COPY tools/install_conf /tools/
+RUN /tools/install_conf
+
+COPY tools/install_x2go /tools/
+RUN /tools/install_x2go
+
+COPY tools/y /tools/
+RUN /tools/y
+
+USER dev
+WORKDIR /home/dev
+
+ENV LANG en_US.UTF-8
+ENV LANGUAGE en_US:en
+ENV LC_ALL en_US.UTF-8
+
+COPY tools/docker_entrypoint /.entrypoint
+ENTRYPOINT ["/.entrypoint"]
+
diff --git a/Dockerfile.bazaar b/Dockerfile.bazaar
new file mode 100644
index 0000000..c37ffbd
--- /dev/null
+++ b/Dockerfile.bazaar
@@ -0,0 +1,23 @@
+FROM kgyrtkirk/hive-dev-box:latest
+
+USER root
+
+#COPY tools/install_bazaar /tools/
+#RUN /tools/install_bazaar
+
+#COPY tools/install_executor2 /tools/
+#RUN /tools/install_executor2 ${UID}
+
+#COPY tools/install_executor3 /tools/
+ARG UID=1000
+#RUN /tools/install_executor3 ${UID}
+
+COPY etc/* /etc/
+COPY bin/* /bin/
+
+VOLUME /work
+VOLUME /data
+
+COPY tools/docker_entrypoint.bazaar /.entrypoint
+ENTRYPOINT ["/.entrypoint"]
+CMD ["bash"]
diff --git a/Dockerfile.executor b/Dockerfile.executor
new file mode 100644
index 0000000..66155ef
--- /dev/null
+++ b/Dockerfile.executor
@@ -0,0 +1,23 @@
+FROM kgyrtkirk/hive-dev-box:latest
+
+USER root
+
+COPY tools/install_executor /tools/
+RUN /tools/install_executor
+
+COPY tools/install_executor2 /tools/
+RUN /tools/install_executor2 ${UID}
+
+COPY tools/install_executor3 /tools/
+ARG UID=1000
+RUN /tools/install_executor3 ${UID}
+
+COPY etc/* /etc/
+COPY bin/* /bin/
+
+WORKDIR /home/jenkins
+USER jenkins
+
+COPY tools/docker_entrypoint.executor /.entrypoint
+ENTRYPOINT ["/.entrypoint"]
+CMD ["bash"]
diff --git a/Dockerfile.impala b/Dockerfile.impala
new file mode 100644
index 0000000..2e4faa4
--- /dev/null
+++ b/Dockerfile.impala
@@ -0,0 +1,49 @@
+FROM ubuntu:18.04
+
+COPY tools/build_cleanup /tools/
+
+ENV TZ=Europe/Budapest
+RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
+
+COPY tools/install_basics /tools/
+RUN /tools/install_basics
+
+COPY tools/install_java_zulu /tools/
+RUN /tools/install_java_zulu
+
+#COPY tools/install_xmlstarlet /tools/
+#RUN /tools/install_xmlstarlet
+
+COPY tools/install_toolbox /tools/
+RUN /tools/install_toolbox
+
+COPY tools/i_sort /tools/
+RUN /tools/i_sort
+
+COPY etc  /etc
+COPY bin  /bin
+
+COPY tools/install_conf /tools/
+RUN /tools/install_conf
+
+COPY tools/install_x2go /tools/
+RUN /tools/install_x2go
+
+USER dev
+WORKDIR /home/dev
+
+ENV LANG en_US.UTF-8
+ENV LANGUAGE en_US:en
+ENV LC_ALL en_US.UTF-8
+
+COPY tools/impala_bootstrap /tools/
+RUN /tools/impala_bootstrap
+
+COPY tools/iii /tools/
+RUN /tools/iii
+
+ENV IMPALA_TOOLCHAIN /work/toolchain.impala
+
+COPY tools/entrypoint.impala /.entrypoint
+ENTRYPOINT ["/.entrypoint"]
+
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..71af4bc
--- /dev/null
+++ b/README.md
@@ -0,0 +1,214 @@
+# hive-dev-box
+
+## why?
+
+To make some easily accessible environment to run and develop Hive.
+
+## How could this project be used
+
+### containerized development platform
+
+Isolates work on different branches/etc by leveraging container isolation
+X11 apps could still run like "normal" application (I tend to multiple eclipse 
instances for every patch I'm actually working)
+
+Full isolation makes it easier to customize everything toward the goal...all 
ports can be binded/etc.
+
+You may also run hive inside...
+
+### test executor
+
+There is a prebacked image which contains some build tools in the image itself 
- that image is used at ci.hive.apache.org to run tests
+
+### bazaar
+
+Ability to run some version of hive as a standalone container;
+
+Lets launch a hive with:
+```
+docker run --rm -d -p 10000:10000 -v hive-dev-box_work:/work 
kgyrtkirk/hive-dev-box:bazaar
+```
+the above will initialize the metastore and launch a 
nodemanger/resourcemanager and hive as separate processes inside the container 
(in a screen session)
+
+* you may choose different versions by setting: HIVE_VERSION, TEZ_VERSION or 
HADOOP_VERSION
+* add `-v hive-dev-box_data:/data` to enable persistent metastore/warehouse
+
+### Testability aspect
+
+There are sometimes bugreports agains earlier releases; but testing these out 
sometimes is problematic - running/switching between versions is kinda 
problematic. I was using some vagrant based box which was usefull doing this...
+
+### patch development processes
+
+I'm working on Hive and sometimes on other projects in the last couple years - 
and since QA runs may come after 8-12 hours; I work on multiple patches 
simultaneously.
+However; working on several patches simultaniously has its own problems:
+
+I go thru all the approaches I was using ealier:
+
+* basic approach: use a single workspace - and switch the branch...
+    * unquestionably this is the most simple
+    * after switching the branch - a full rebuild is neccessary
+* 1 for each: use multiple copies of hive - with have isolated maven caches
+    * pro:
+        * capability to run maven commands simultaneuously on multiple patches
+    * con:
+        * one of the patches have to be "active" to make an IDE able to use it
+        * it falls short when it comes to working on patch simultaneous in 
multiple projects (hive+tez+hadoop)
+        * after some time it eats up space...
+* dockerized/virtualized development environment
+    * pro:
+        * everything is isolated
+        * because I'm not anymore bound to my natural environment: I may 
change a lot of things without interfering with anything else
+        * easier to "cleanup" at the end of submitting the patch (just delete 
the container)
+        * ability to have IDEs running for multiple patches at the same time
+    * con:
+        * isolated environment; configuration changes might get lost
+        * may waste disk space...
+
+## What's the goal of this?
+
+The aim of this project is to provide an easier way to test-drive hive releases
+
+* running releases:
+    * upstream apache releases
+    * HDP/CDP/CDH releases
+    * in-development builds
+* provide an evironment for developing hive patches
+
+## Getting started - with running off shelf releases
+
+```shell
+# build and launch the hive-dev-box container
+./hdb run hive-test
+# after building the container you will get a prompt inside it
+# initialize the metastore with
+reinit_metastore
+# everything should be ready to launch hive
+hive_launch
+# exit with CTRL+A CTRL+\ to kill all processes
+```
+
+## Getting started - with patch development
+
+### make X11 forwarding work (once)
+
+* on linux based systems you are already running an xserver
+* MacOSX users should follow: 
https://medium.com/@mreichelt/how-to-show-x11-windows-within-docker-on-mac-50759f4b65cb
+
+### artifactory cache (once)
+
+Every container will be reaching out to almost the same artifacts; so 
employing an artifact cache "makes sense" in this case :D
+
+```shell
+# start artifactory instance
+./start_artifactory.bash
+```
+
+To configure this instance the start_artifactory command will show a few 
commands you will need to execute to set it up - once its running.
+
+After that you will be able to acccess artifactory at http://127.0.0.1:8081/ 
by using admin/admin to login.
+
+This instance will be linked to the running development environment(s) 
automatically
+
+### set properties (once)(optional)
+
+add an export to your .bashrc or similar; like:
+
+```shell
+# to have a shared folder between all the dev containers and also the host 
system:
+export HIVE_DEV_BOX_HOST_DIR=$HOME/hdb
+```
+
+The dev environment will assume that you are working on upstream patches; and 
will always open a new branch forked from master
+If you skip this; things may not work - you will be left to do these things; 
in case you are using HIVE_SOURCES env variable you might not need to set it 
anyway.
+
+```shell
+# make sure to load the new env variables for bash
+. .bashrc
+# and also create the host dir beforehand
+mkdir $HIVE_DEV_BOX_HOST_DIR
+```
+
+### launch - with sources stored inside container
+
+```shell
+# invoking with an argument names the container and will also be the preffered 
name for the ws and the development branch
+./hdb run HIVE-12121-asd
+# when the terminal comes up
+# issuing the the following command will clone the sources based on your srcs 
dsl
+srcs hive
+# enter hive dir ; and create a local branch based on your requirements
+cd hive
+git branch `hostname` apache/master
+# if you need...patch the sources:
+cdpd-patcher hive
+#  run a full rebuild
+rebuild
+# you may run eclipse
+dev_eclipse
+```
+
+A shorter version exists for initializing upstream patch development
+
+```shell
+./hdb run HIVE-12121-asd
+# this will clone the source; creates a branch named after the containers 
hostname; runs a rebuild and open eclipse
+hive_patch_development
+```
+
+## filesystem layout
+
+beyond the "obvious" `/bin` and `/lib` folders there are some which might make 
it more clear how this works:
+
+* `/work`
+    * used to store downloaded and expanded artifacts
+    * if you switch to say apache hive 3.1.1 and then to some other version 
you shouldn't need to wait for the download and expansion of it..
+    * this is mounted as a docker volume; and shared between the containers
+    * files under `/work` are not changed
+* `/active`
+    * the `/work` folder may contain a number versions of the same component
+    * symbolic links point to actually used versions
+    * at any point doing an `ls -l /active` gives a brief overview about the 
active components
+* `/home/dev`
+    * this is the development home
+* `/home/dev/hive`
+    * the Hive sources; in case `HIVE_SOURCES` is set at launch time; this 
folder will be mapped to that directory on the host
+* `/home/dev/host`
+    * this is a directory shared with the host; can be used to exchange files 
(something.patch)
+    * will also contain the workspace "template"
+    * `bin` directory under this folder will be linked as `/home/dev/bin` so 
that scripts can be shared between containers and the host
+
+## hdb - easier access to running multiple envs
+
+* run NAME
+    * starts a new container with NAME - without attaching to it
+* enter NAME
+    * enters into the container
+
+### installation:
+```
+# create a symlink to hive-dev-box/hdb from an executable location ; eg 
$HOME/bin ?
+ln -s $PWD/hdb $HOME/bin/hdb
+# enable bash_completion for hdb
+# add the following line to .bashrc
+. <($HOME/bin/hdb bash_completion)
+```
+
+## sw - switch between versions of things
+
+```shell
+# use hadoop 3.1.0
+sw hadoop 3.1.0
+# use hive 2.3.5
+sw hive 2.3.5
+# use tez 0.8.4
+sw tez 0.8.4
+```
+
+## reinit_metastore [type]
+
+* optionally switch to a different metastore implementation
+* wipe it clean
+* populate schema and load sysdb
+
+```
+reinit_metastore [derby|postgres|mysql]
+```
diff --git a/artifactory_backup.zip b/artifactory_backup.zip
new file mode 100644
index 0000000..3820a21
Binary files /dev/null and b/artifactory_backup.zip differ
diff --git a/bashrc b/bashrc
new file mode 100644
index 0000000..353ac1d
--- /dev/null
+++ b/bashrc
@@ -0,0 +1,22 @@
+#_hive_dev_box_complete () { COMPREPLY+=( $(compgen -W "`docker ps --format 
'{{.Names}}'`" -- $2) );} 
+
+
+_hive_dev_box_complete () {
+    local cur prev words cword
+    _init_completion || return
+
+    COMPREPLY=( )
+    case "$prev" in
+        enter)
+            words="`docker ps -a --format '{{.Names}}'`"
+       ;;
+        run)
+        ;;
+        hdb)
+            words="enter run"
+        ;;
+    esac
+    COMPREPLY+=( $(compgen -W "$words" -- $2) );
+} 
+
+complete -F _hive_dev_box_complete hdb
diff --git a/bin/apdiff b/bin/apdiff
new file mode 100755
index 0000000..f5942a9
--- /dev/null
+++ b/bin/apdiff
@@ -0,0 +1,4 @@
+#!/bin/bash -e
+upstream="`git config --local --get extra.upstream || echo apache/master`"
+git diff `git merge-base HEAD $upstream` "$@"
+
diff --git a/bin/bl b/bin/bl
new file mode 100755
index 0000000..03da60b
--- /dev/null
+++ b/bin/bl
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+beeline -n ${USER:-dev} -u 'jdbc:hive2://localhost:10000/default' "$@"
diff --git a/bin/conf b/bin/conf
new file mode 100755
index 0000000..4e4f95c
--- /dev/null
+++ b/bin/conf
@@ -0,0 +1,75 @@
+#!/bin/bash -e
+
+if [ "$EUID" != 0 ] ;then
+       sudo $0 "$@"
+       exit 0
+fi
+
+D=/conf
+
+function gen_content() {
+       cat >&3 << EOF
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!-- GENERATED by `$0` at `date` -->
+<!-- BEWARE: your edits might get overwritten... -->
+<configuration>
+EOF
+
+       cd $D
+       for f in `find $1 -type f`;do
+               k="`basename "$f"`"
+               echo "<property>"               >&3
+               echo " <name>$k</name>"         >&3
+               echo " <value>`cat $f`</value>" >&3
+               echo "</property>"              >&3
+       done
+echo '</configuration>' >&3
+
+}
+
+function deploy() {
+       OUT=/etc/${1}.xml
+       rm -f $OUT
+       gen_content $1 3> $OUT
+}
+
+case "$1" in
+       init)
+               git init $D
+       ;;
+       set)
+               [ "$#" -ne 4 ] && echo "incorrect args" && exit 1
+               cd $D
+               mkdir -p "$2"
+               echo "$4" > "$2/$3"
+               git add .
+               git commit -m "$2/$3 => $4" || echo no-op
+               deploy $2
+       ;;
+       del)
+               [ "$#" -ne 3 ] && echo "incorrect args" && exit 1
+               cd $D
+               rm -f "$2/$3"
+               git add .
+               git commit -m "del $2/$3" || echo no-op
+               deploy $2
+       ;;
+       show)
+               cd $D
+               find *  -type f -printf "%p " -exec cat "{}" \;
+       ;;
+       *)
+               cat << EOF
+usage:
+       $0 show                                                                 
# lists props
+       $0 set <path> <key> <value>                             #
+       $0 del <path> <key>
+example:
+       $0 set hive/hive-site hive.in.test true
+       $0 del hive/hive-site hive.in.test
+EOF
+               exit 1
+       ;;
+esac
+
diff --git a/bin/deploy_custom_jars b/bin/deploy_custom_jars
new file mode 100755
index 0000000..5460b8f
--- /dev/null
+++ b/bin/deploy_custom_jars
@@ -0,0 +1,56 @@
+#!/bin/bash
+set -x
+
+[ "$3" == "" ] && echo "usage: $0 <kube.yml> <namespace> <jar>" && exit 1
+
+set -o pipefail
+#set -x
+set -e
+
+k="kubectl --kubeconfig=$1 --namespace=$2"
+
+if [ "${NS/warehouse*}" == "" ];then
+       podName="metastore-0"
+else
+       podName="hiveserver2-0"
+fi
+
+echo "@@@ podName: $podName"
+
+
+JAR=$3
+
+JAR_PATH="`$k logs pod/$podName -c init-custom-jars-localizer| grep -a 
CUSTOM_JARS_PATH | tr ' ' '\n'|grep '^s3a'|tail -n1|tee >(cat >&2)`"
+
+JAR_PATH="${JAR_PATH%.}"
+[ "$JAR_PATH" == "" ] && echo error JAR_PATH && exit 2
+
+echo $JAR_PATH
+
+set -x
+F="`basename "$JAR_PATH"`"
+
+S=/tmp/cj_script
+cat > $S << EOF
+#!/bin/bash
+#hdfs dfs -mkdir -p $(dirname $(dirname $JAR_PATH))
+hdfs dfs -mkdir -p           $(dirname $JAR_PATH)
+#hdfs dfs -rm -r -f     $(dirname $JAR_PATH)
+hdfs dfs -rm -r -f                    $JAR_PATH
+hdfs dfs -mkdir -p                     $JAR_PATH
+hdfs dfs -copyFromLocal /tmp/`basename $JAR` $JAR_PATH/
+hdfs dfs -ls $JAR_PATH/
+EOF
+
+chmod +x $S
+cat $S
+
+$k cp $JAR ${podName}:/tmp/
+$k cp $S ${podName}:$S
+
+$k exec -t ${podName} $S
+
+$k delete pod/${podName}
+banner ok
+#$k delete hiveserver2-0
+
diff --git a/bin/deploy_jars b/bin/deploy_jars
new file mode 100644
index 0000000..dda3ff7
--- /dev/null
+++ b/bin/deploy_jars
@@ -0,0 +1,40 @@
+#!/bin/bash
+set -e
+
+#which yum && yum install -y nano lsof
+
+[ "$#" -ne 2 ] && echo "usage: $0 $PID [show|backup|patch]" && exit 1
+
+PID=$1
+
+case "$2" in
+       show)
+               function operate() {
+                       echo "OP >$1< >$2<"
+               }
+       ;;
+       backup)
+               d="backup.`date +%s`"
+               function operate() {
+                       echo "backup $2 to $d"
+                       mkdir -p "$d"
+                       cp "$2" "$d/"
+               }
+       ;;
+       patch)
+               function operate() {
+                       echo "cp        $1      $2"
+                       cp "$1" "$2"
+               }
+       ;;
+       *)
+esac
+
+
+ls *.jar | while read jar;do
+  echo "@ $jar"
+  lsof -n -p $PID|grep -F "/${jar}" | sed -r 's/.* ([^ ]+jar).*/\1/' | sort | 
uniq | while read target;do
+    operate "$jar" "$target"
+  done
+done
+
diff --git a/bin/dev_eclipse b/bin/dev_eclipse
new file mode 100755
index 0000000..a2da305
--- /dev/null
+++ b/bin/dev_eclipse
@@ -0,0 +1,29 @@
+#!/bin/bash -e
+
+cd
+
+function safe_sw() {
+  if [ -e "/active/$1" ];then
+    echo " * $1 is already present"
+  else
+    /bin/sw "$@"
+  fi
+}
+
+safe_sw eclipse
+
+name="`hostname`"
+
+WS=ws_$name
+if [ ! -d "$WS" ]; then
+    echo " * first run detected; initializing workspace"
+    git clone https://github.com/kgyrtkirk/default-eclipse-workspace $WS
+    for p in hive tez;do
+        if [ -d "$p" ]; then
+            echo " * importing $p..."
+            eclipse -nosplash -data ~/$WS -application 
com.seeq.eclipse.importprojects.headlessimport -import ~/$p
+        fi
+    done
+fi
+
+eclipse -nosplash -data "$WS" "$@"
diff --git a/bin/gerrit_pr b/bin/gerrit_pr
new file mode 100755
index 0000000..276ee1c
--- /dev/null
+++ b/bin/gerrit_pr
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+N=$1 
+#65898/1
+S="`echo "$1" | cut -d '/' -f1|tail -c3`"
+echo git fetch cdh refs/changes/$S/$N && git checkout FETCH_HEAD
+git fetch cdh refs/changes/$S/$N && git checkout FETCH_HEAD
+
diff --git a/bin/github_pr b/bin/github_pr
new file mode 100755
index 0000000..4657b17
--- /dev/null
+++ b/bin/github_pr
@@ -0,0 +1,5 @@
+#!/bin/bash -e
+
+[ "$1" == "" ]  && echo "usage: $0 <PR_ID>" && exit 1
+git fetch apache pull/$1/head:PR-$1
+git checkout PR-$1
diff --git a/bin/hive_launch b/bin/hive_launch
new file mode 100755
index 0000000..481b4ec
--- /dev/null
+++ b/bin/hive_launch
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+set -e
+
+for i in "$@";do
+       case "$i" in
+               -d|--detached)
+                       SCREEN_OPTS+=" -dm "
+               ;;
+               -I|--init-default-metastore)
+                       D=/data/hive/metastore_db
+                       if [ ! -d "$D" ]; then
+                               echo "@@@ init metastore"
+                               reinit_metastore
+                       else
+                               echo "@@@ metastore already inited at $D"
+                       fi
+               ;;
+               *)
+                       echo "!!! unknown option $i" >&2 && exit 1
+               ;;
+       esac
+done
+
+
+wait_port close 8040
+wait_port close 8042
+wait_port close 8030
+wait_port close 8032
+wait_port close 10000
+
+cat > /tmp/screenrc1 << EOF
+screen -t term
+screen -t RM   1        spawn_shell_after yarn resourcemanager
+screen -t NM   2        spawn_shell_after yarn nodemanager
+screen -t HS2  3        spawn_shell_after hs2_debug
+screen -t BL   4        spawn_shell_after safe_bl
+EOF
+
+screen $SCREEN_OPTS -c /tmp/screenrc1
diff --git a/bin/hive_load_test_data b/bin/hive_load_test_data
new file mode 100755
index 0000000..41b242b
--- /dev/null
+++ b/bin/hive_load_test_data
@@ -0,0 +1,9 @@
+#!/bin/bash -e
+
+cd /tmp
+rm -rf hive-test-data
+git clone https://github.com/abstractdog/hive-test-data
+cd hive-test-data/
+BEELINE=bl ./load_10000.sh
+
+banner ok
diff --git a/bin/hive_patch_development b/bin/hive_patch_development
new file mode 100755
index 0000000..24e9aaa
--- /dev/null
+++ b/bin/hive_patch_development
@@ -0,0 +1,10 @@
+#!/bin/bash -e
+
+nice srcs hive
+cd hive
+n=`hostname`
+git branch $n apache/master
+git checkout $n
+git push kgyrtkirk -u HEAD
+nice -n 19 rebuild
+banner ok
diff --git a/bin/hs2_debug b/bin/hs2_debug
new file mode 100755
index 0000000..d3b92aa
--- /dev/null
+++ b/bin/hs2_debug
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+#FIXME have to come up with something to fix this
+#export HADOOP_CLASSPATH+=":/active/tez/q/*.jar:/active/tez/q/lib/*"
+export HADOOP_CLIENT_OPTS+=" 
-agentlib:jdwp=transport=dt_socket,server=y,address=8000,suspend=n"
+export HADOOP_OPTS+=" -Xmx1g"
+hiveserver2
diff --git a/bin/init_hive_src b/bin/init_hive_src
new file mode 100755
index 0000000..37d4f14
--- /dev/null
+++ b/bin/init_hive_src
@@ -0,0 +1,16 @@
+#!/bin/bash -e
+
+[ "$1" == "" ] && echo "no name?!" && exit 1
+
+n=$1
+
+git clone g...@github.com:apache/hive $n
+(
+    cd $n
+    git checkout -b $n
+)
+mkdir ws-$n
+cd ws-$n
+tar xzf /hive-dev-box/tools/def_ws.tgz
+git reset --hard
+
diff --git a/bin/init_tsdb b/bin/init_tsdb
new file mode 100755
index 0000000..bec4299
--- /dev/null
+++ b/bin/init_tsdb
@@ -0,0 +1,16 @@
+#!/bin/bash
+set -e
+
+[ -d "$PGDATA" ] && "PGDATA ($PGDATA) exists -> exiting" && exit 1
+
+initdb
+
+cat >> $PGDATA/postgresql.conf << EOF
+shared_preload_libraries = 'timescaledb'
+listen_addresses = '*'
+EOF
+
+cat >> $PGDATA/pg_hba.conf << EOF
+host all all 0.0.0.0/0 trust
+EOF
+
diff --git a/bin/mrproper b/bin/mrproper
new file mode 100755
index 0000000..838c592
--- /dev/null
+++ b/bin/mrproper
@@ -0,0 +1,14 @@
+#!/bin/bash -e
+
+if git status --porcelain=2|grep .;then
+    echo "> worktree is not clean; continue?"
+    read
+    if [ "$REPLY" != "y" ]; then
+        echo "interrupted.."
+        exit 1
+    fi
+fi
+
+git clean -dfx
+rm -rf ~/.m2/repository/
+rm ~/ws_*/.metadata/.plugins/org.eclipse.jdt.core/*.index
diff --git a/bin/mvn b/bin/mvn
new file mode 100755
index 0000000..9442afc
--- /dev/null
+++ b/bin/mvn
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+echo "@@@ auto-isolation" >&2
+C="`git config --local --get extra.mavenopts`"
+C2="`git config --local --get extra.usermavenopts`"
+echo "@@@ $C $C2" >&2
+
+MVN="/active/maven/bin/mvn"
+if [ -x /active/mvnd/bin/mvnd ];then
+  echo "@@@ using mvnd"
+  MVN=/active/mvnd/bin/mvnd
+fi
+
+$MVN $C $C2 "$@"
+exit $?
+
diff --git a/bin/psql b/bin/psql
new file mode 100755
index 0000000..144161b
--- /dev/null
+++ b/bin/psql
@@ -0,0 +1,42 @@
+#!/bin/bash
+set -e
+
+command -v which >/dev/null 2>&1 || { echo "I require which but it's not 
installed.  Aborting." >&2; exit 1; }
+
+A=( `which -a psql` )
+case "${#A[@]}" in
+    0)  echo "$0: ERROR: no psql on PATH?" && exit 1;   ;;
+    1)  psql=${A[0]};   ;;
+    *)  psql=${A[1]};   ;;
+esac
+echo    "psql: $psql"
+
+PSQLRC=<(
+[ -e "$PSQLRC" ] && echo "\i $PSQLRC"
+e='$e'
+cat << EOF
+-- save pid into file
+\o /tmp/_pgsql_debug_pid
+select pg_backend_pid();
+\! sed -r 's/ +//g;/^[0-9]+$/p;d' -i /tmp/_pgsql_debug_pid
+\o
+do $e$ begin raise notice 'backend PID stored in /tmp/_pgsql_debug_pid'; end 
$e$ ;
+
+-- wait for debugger
+\if :{?debugger_wait_port}
+do $e$ begin raise notice 'waiting for debugger; connect to 5555 after 
attach'; end $e$ ;
+\setenv debugger_wait_port 5555
+\! nc -l -p $debugger_wait_port
+\endif
+EOF
+) exec $psql "$@"
+# exec $psql -f <(
+# ) "$@"
+
+
+echo 0.${A[0]}
+echo ${A[@]}
+
+echo ${#A[@]}
+echo 1.${A[1]}
+
diff --git a/bin/rebase b/bin/rebase
new file mode 100755
index 0000000..fe8b12a
--- /dev/null
+++ b/bin/rebase
@@ -0,0 +1,4 @@
+#!/bin/bash -e
+upstream="`git config --local --get extra.upstream || echo apache/master`"
+git rebase -i $upstream
+
diff --git a/bin/reinit_metastore b/bin/reinit_metastore
new file mode 100755
index 0000000..8a82ae3
--- /dev/null
+++ b/bin/reinit_metastore
@@ -0,0 +1,235 @@
+#!/bin/bash -e
+
+type=${1:-derby}
+DOCKER_NETWORK=${DOCKER_NETWORK:-hive-dev-box-net}
+[ "$#" -gt 0 ] && shift
+force=0
+[ "$1" == "-f" ] && shift && force=1
+
+# FIXME enable hive.metastore.schema.verification
+
+echo "@ initializing: $type"
+
+DOCKER="sudo -E docker"
+RUN_OPTS+=" --network $DOCKER_NETWORK"
+function isContainerRunning() {
+    [ "`$DOCKER ps -q -f name=$1`" != "" ]
+}
+
+if [ "$DBNAME" == "" ];then
+       dbName="ms_`hostname|tr '-' '_'|tr 'A-Z' 'a-z'`"
+else
+       dbName=$DBNAME
+fi
+
+echo "metastore database name: $dbName"
+
+function installFile() {
+    D="$1"
+    URL="$2"
+    mkdir -p /apps/lib
+    cd /apps/lib
+    N="`basename "$URL"`"
+    if [ ! -f "$N" ];then
+        echo "@ installing $N"
+        wget -O "$N.tmp" -nv "$URL"
+        mv "$N.tmp" "$N"
+    fi
+
+}
+function startup_container() {
+    local containerName=$1
+    shift
+    if isContainerRunning $containerName;then
+        if [ $force == "1" ];then
+            $DOCKER rm -f "$containerName"
+        fi
+    fi
+
+    if isContainerRunning $containerName;then
+        echo "@ $containerName is running..."
+    else
+        echo "@ starting $containerName..."
+        $DOCKER run --name $containerName $*
+    fi
+}
+
+function wait_for_port() {
+    local containerName=$1
+    local port=$2
+    echo "Waiting for port $port in $containerName to be available..."
+    wait-for-port --timeout=300 --host=$containerName $port || status=$?
+    if [ $status > 0 ]; then
+      $DOCKER logs $containerName
+      exit $status
+    fi
+}
+
+case $type in
+    rebuild)
+        n="`git status -s | wc -l `"
+        if [ "$n" -ne 0 ];then
+            git status -s
+            echo ">> ok to purge and rebuild? "
+            read
+            if [ "$REPLY" != "y" ];then
+                echo " ...exiting"
+                exit 1
+            fi
+        fi
+        /usr/local/hive-toolbox/scripts/xl_hive_reinit `git config --get 
extra.ideProjects`
+        exit 0
+    ;;
+    derby)
+        D=/data/hive/metastore_db
+        rm -rf $D
+        conf del hive/hive-site javax.jdo.option.ConnectionDriverName
+        conf set hive/hive-site javax.jdo.option.ConnectionURL 
"jdbc:derby:;databaseName=/data/hive/metastore_db;create=true"
+        conf del hive/hive-site javax.jdo.option.ConnectionUserName
+        conf del hive/hive-site javax.jdo.option.ConnectionPassword
+           cp /active/hive/lib/derby-*.jar /apps/lib/
+    ;;
+    mysql)
+        containerName=dev_mysql
+        RUN_OPTS+=" -e MYSQL_ROOT_PASSWORD=mypassword"
+        RUN_OPTS+=" --restart always -d"
+        RUN_OPTS+=" mariadb:10.8.3"
+
+        startup_container $containerName $RUN_OPTS
+
+        cat > ~/.my.cnf << EOF
+            [client]
+            user=hive
+            password=mypassword
+            database=$dbName
+            host=$containerName
+EOF
+        wait_for_port $containerName 3306
+        id
+echo $dbName
+        mysql -uroot mysql -f << EOF
+            drop database if exists $dbName;
+            create database $dbName;
+            CREATE USER IF NOT EXISTS 'hive'@'%' IDENTIFIED BY 'mypassword';
+            GRANT ALL PRIVILEGES ON $dbName.* TO 'hive'@'%';
+            FLUSH PRIVILEGES;
+EOF
+        mkdir -p /apps/lib
+        cd /apps/lib
+        [ ! -f mysql-connector-java-8.0.17.jar ] && wget -nv 
https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.17/mysql-connector-java-8.0.17.jar
+
+        conf set hive/hive-site javax.jdo.option.ConnectionDriverName 
com.mysql.jdbc.Driver
+        conf set hive/hive-site javax.jdo.option.ConnectionURL 
"jdbc:mysql://$containerName:3306/$dbName"
+        conf set hive/hive-site javax.jdo.option.ConnectionUserName hive
+        conf set hive/hive-site javax.jdo.option.ConnectionPassword mypassword
+    ;;
+    postgres)
+        containerName=dev_postgres
+        RUN_OPTS+=" -e POSTGRES_PASSWORD=mypassword"
+        RUN_OPTS+=" --restart always -d"
+        RUN_OPTS+=" postgres:buster"
+        startup_container $containerName $RUN_OPTS
+
+        wait_for_port $containerName 5432
+
+        # FIXME: PGHOST/PGUSER/PGDATABASE set in _conf
+        export PGHOST=$containerName
+        export PGUSER=hive
+        export PGDATABASE=$dbName
+        #export PGDATABASE=$dbName
+        echo $containerName:5432:$dbName:hive:mypassword > ~/.pgpass
+        echo $containerName:5432:postgres:postgres:mypassword >> ~/.pgpass
+        chmod 600 ~/.pgpass
+        psql -U postgres postgres -c "CREATE ROLE hive LOGIN PASSWORD 
'mypassword'" || echo ok
+        
+        psql -U postgres postgres -c "drop database if exists $dbName"
+        psql -U postgres postgres -c "create database $dbName owner hive"
+
+        installFile /apps/lib 
https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.9/postgresql-42.2.9.jar
+
+        conf set hive/hive-site javax.jdo.option.ConnectionDriverName 
org.postgresql.Driver
+        conf set hive/hive-site javax.jdo.option.ConnectionURL 
"jdbc:postgresql://$containerName/$dbName"
+        conf set hive/hive-site javax.jdo.option.ConnectionUserName hive
+        conf set hive/hive-site javax.jdo.option.ConnectionPassword mypassword
+    ;;
+    oracle)
+        containerName=dev_oracle
+        # FIXME: consider relaxing restart always
+        RUN_OPTS+=" --restart always -d"
+        RUN_OPTS+=" quay.io/maksymbilenko/oracle-12c"
+        startup_container $containerName $RUN_OPTS
+
+        wait_for_port $containerName 1521
+        wait_for_port $containerName 8080
+
+        echo "$DOCKER exec -it $containerName /bin/bash -ic 'sqlplus -L 
\"system/oracle\"' "'$@' | sudo dd of=/bin/sqlplus_sys
+        # FIXME it would be better to detect wether stdin is a file or not...
+           echo "$DOCKER exec -i $containerName /bin/bash -ic 'sqlplus -S -L 
\"system/oracle\"' "'$@' | sudo dd of=/bin/sqlplus_sys0
+       echo "$DOCKER exec -it $containerName /bin/bash -ic 'sqlplus -L 
\"$dbName/mypassword\"' "'$@' | sudo dd of=/bin/sqlplus
+       sudo chmod +x /bin/sqlplus{,_sys*}
+
+        sqlplus_sys0 << EOF
+    drop user $dbName cascade;
+
+       CREATE USER $dbName IDENTIFIED BY mypassword;
+       GRANT SELECT_CATALOG_ROLE TO $dbName;
+       GRANT CONNECT, RESOURCE TO $dbName;
+       GRANT UNLIMITED TABLESPACE TO $dbName;
+       QUIT;
+EOF
+        conf set hive/hive-site javax.jdo.option.ConnectionDriverName 
oracle.jdbc.OracleDriver
+        conf set hive/hive-site javax.jdo.option.ConnectionURL 
"jdbc:oracle:thin:@//$containerName/xe"
+        conf set hive/hive-site javax.jdo.option.ConnectionUserName $dbName
+        conf set hive/hive-site javax.jdo.option.ConnectionPassword mypassword
+
+        #$DOCKER cp 
$containerName:/u01/app/oracle/product/11.2.0/xe/jdbc/lib/ojdbc6.jar /apps/lib
+        $DOCKER cp 
$containerName:/u01/app/oracle-product/12.2.0/SE/dmu/jlib/ojdbc6.jar /apps/lib
+    ;;
+    mssql)
+        containerName=dev_mssql
+        # FIXME: consider relaxing restart always
+        RUN_OPTS+=" --restart always -d"
+        RUN_OPTS+=" -e ACCEPT_EULA=Y"
+        RUN_OPTS+=" -e SA_PASSWORD=passwordX@ASD"
+        RUN_OPTS+=" mcr.microsoft.com/mssql/server:2019-latest"
+        startup_container $containerName $RUN_OPTS
+
+        echo "$DOCKER exec -i $containerName /opt/mssql-tools/bin/sqlcmd -S 
localhost -U SA -P 'passwordX@ASD' "'$@' | sudo dd of=/bin/sqlcmd_SA0
+        echo "$DOCKER exec -it $containerName /opt/mssql-tools/bin/sqlcmd -S 
localhost -U SA -P 'passwordX@ASD' "'$@' | sudo dd of=/bin/sqlcmd_SA
+        echo "$DOCKER exec -it $containerName /opt/mssql-tools/bin/sqlcmd -S 
localhost -d $dbName -U hiveuser -P 'mypassword@ASD' "'$@' | sudo dd 
of=/bin/sqlcmd
+        sudo chmod +x /bin/sqlcmd{_SA,_SA0,}
+
+        wait_for_port $containerName 1433
+
+        installFile /apps/lib 
https://repo1.maven.org/maven2/com/microsoft/sqlserver/mssql-jdbc/7.4.1.jre8/mssql-jdbc-7.4.1.jre8.jar
+       sleep 3
+        #make sure hiveuser exists...
+        sqlcmd_SA0 << EOF
+CREATE LOGIN hiveuser with password='mypassword@ASD';
+CREATE USER hiveuser for login hiveuser;
+GO
+EOF
+
+        sqlcmd_SA0 << EOF
+DROP DATABASE IF EXISTS $dbName;
+CREATE DATABASE $dbName;
+GO
+ALTER AUTHORIZATION ON DATABASE::$dbName TO hiveuser;
+GO
+EOF
+
+        conf set hive/hive-site javax.jdo.option.ConnectionDriverName 
com.microsoft.sqlserver.jdbc.SQLServerDriver
+        conf set hive/hive-site javax.jdo.option.ConnectionURL 
"jdbc:sqlserver://$containerName:1433;DatabaseName=$dbName"
+        conf set hive/hive-site javax.jdo.option.ConnectionUserName hiveuser
+        conf set hive/hive-site javax.jdo.option.ConnectionPassword 
mypassword@ASD
+
+    ;;
+    *)
+        banner not impl
+        exit 1
+esac
+
+schematool -verbose -dbType $type -initSchema
+schematool -verbose -dbType hive -initSchema -metaDbType $type
+
+banner ok
diff --git a/bin/safe_bl b/bin/safe_bl
new file mode 100755
index 0000000..afabae3
--- /dev/null
+++ b/bin/safe_bl
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+wait_port open 10000
+bl "$@"
diff --git a/bin/send_custom_jars b/bin/send_custom_jars
new file mode 100755
index 0000000..f5c65d2
--- /dev/null
+++ b/bin/send_custom_jars
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+set -e
+
+J=$HOME/h.tar.gz
+K=${K:-~/.kube/config}
+
+[ "$NS" == "" ] && echo "NS not set!" && exit 1
+[ ! -s "$K" ] && echo "K=$K is not set correctly !" && exit 1
+
+set -x
+
+#cd 
packaging/target/apache-hive-3.1.3000.7.2.3.0-181-bin/apache-hive-3.1.3000.7.2.3.0-181-bin/lib
+cd packaging/target/apache-hive-*-bin/apache-hive-*-bin/lib
+
+tar czf $J hive*jar
+
+deploy_custom_jars $K $NS $J
+
diff --git a/bin/spawn_shell_after b/bin/spawn_shell_after
new file mode 100755
index 0000000..33b7b2f
--- /dev/null
+++ b/bin/spawn_shell_after
@@ -0,0 +1,22 @@
+#!/bin/bash
+
+ctrlc_count=0
+
+function no_ctrlc()
+{
+    let ctrlc_count++
+    echo
+    if [[ $ctrlc_count == 1 ]]; then
+        echo "Stop that."
+    elif [[ $ctrlc_count == 2 ]]; then
+        echo "Once more and I quit."
+    else
+        echo "That's it.  I quit."
+        exit
+    fi
+}
+
+trap no_ctrlc SIGINT
+
+"$@"
+exec bash --init-file <(echo "history -s $@")
diff --git a/bin/srcs b/bin/srcs
new file mode 100755
index 0000000..5f60eb1
--- /dev/null
+++ b/bin/srcs
@@ -0,0 +1,75 @@
+#!/bin/bash -e
+
+component="$1"
+
+dsl=~/.config/srcs.dsl
+
+if [ ! -e "$dsl" ]; then
+    if [ "$component" == "__defaults__" ];then
+        (
+            echo '# srcs dsl language'
+            echo '# add_src <component> <remote_name> <git_remote_addr>'
+            echo '# git_config <component> <key> <value>'
+            for c in tez hive calcite;do
+                echo "add_src $c apache https://github.com/apache/$c";
+            done
+           echo 'git_config hive extra.ideProjects ql,common'
+           echo 'git_config hive extra.mavenOpts -Denforcer.skip 
-Phadoop-2,itests'
+        )> "$dsl"
+        exit 0
+    else
+        echo -e "ERROR: $dsl not found"
+        echo -e "you may get a default by calling invoking:\n   $0 
__defaults__"
+        exit 1
+    fi
+fi
+
+[ "$component" == "" ] && echo "usage: $0 <component>" && exit 1
+
+function add_src() {
+    if [ "$1" == "$component" ];then
+        if git remote get-url "$2" 2>&1 >/dev/null;then
+            echo " * already has remote $2"
+        else
+            echo " * adding $2 ($3)"
+            git remote add "$2" "$3"
+        fi
+    fi
+}
+
+function git_config() {
+    if [ "$1" == "$component" ];then
+        echo " * git config $2=$3"
+        git config "$2" "$3"
+    fi
+}
+
+
+refs="/work/reference/$component/"
+mkdir -p "$refs"
+
+cd "$refs"
+[ ! -d .git ] && git init .
+echo " * updating refs ($refs)"
+echo " * interpreting: $dsl"
+. $dsl
+
+git fetch -j10 --all
+
+# go to home dir
+cd
+[ ! -d "$component" ] && git init "$component"
+#&& echo "sources for $component already exists" && exit 1
+
+cd "$component"
+if [ -d "$refs" ];then
+    echo " * adding reference repo $refs"
+    echo "$refs/.git/objects" > .git/objects/info/alternates
+fi
+
+echo " * interpreting: $dsl"
+. $dsl
+
+git fetch -j10 --all
+
+banner ok
diff --git a/bin/sw b/bin/sw
new file mode 100755
index 0000000..6f5dc5f
--- /dev/null
+++ b/bin/sw
@@ -0,0 +1,276 @@
+#!/bin/bash -e
+
+#[ "$EUID" != 0 ] && sudo $0 "$@" && exit 0
+
+
+SW_DIR=${SW_DIR:-/work/}
+SW_DL_DIR=${SW_DL_DIR:-$SW_DIR/downloads}
+
+mkdir -p $SW_DIR $SW_DL_DIR
+
+apache_mirror='http://xenia.sote.hu/ftp/mirrors/www.apache.org/'
+archive_mirror='https://archive.apache.org/dist/'
+
+err_report() {
+    echo "Error on line $1"
+}
+
+trap 'err_report $LINENO' ERR
+
+function dl() {
+
+       [ -f "$1" ] && echo "already downloaded $1" && return 0
+       f=$1
+       shift
+       while [ "$1" != "" ];do
+               wget -nv -O "$f.tmp" "$1" && mv "$f.tmp" "$f" && return 0
+               shift
+       done
+       echo "unable to download $f"
+       return 1
+}
+
+function activate() {
+       echo "@ activating: $2 for $1"
+       T=/active/$1
+       rm -f "$T"
+       ln -s "$2" "$T"
+
+}
+
+IFS=, read -ra components <<< "$1"
+#components="${1}"
+shift
+
+for component in "${components[@]}";do
+
+cd $SW_DIR
+case "$component" in
+       java)
+               sudo update-java-alternatives --set zulu$1-ca-amd64
+               echo "export JAVA_HOME=/usr/lib/jvm/zulu$1-ca-amd64/" | sudo 
tee /etc/profile.d/java.sh
+               echo "open a new shell or run: . /etc/profile.d/java.sh"
+       ;;
+       hive-dev)
+               HIVE_DIR=${1:-/home/dev/hive/}
+               
d=$HIVE_DIR/packaging/target/apache-hive-*-bin/apache-hive-*-bin/
+               [ ! -d $d ] && echo "ERROR: expected a directory at: $d" && 
exit 1
+               activate hive $d
+       ;;
+       tez-dev)
+               sudo apt-get install -y xmlstarlet
+               rm -f /active/tez
+
+               v="`xmlstarlet sel  -N x=http://maven.apache.org/POM/4.0.0 -t 
-v x:project/x:version /home/dev/tez/pom.xml`"
+               d=/home/dev/tez/tez-dist/target/tez-$v-minimal
+               f=/home/dev/tez/tez-dist/target/tez-$v-minimal.tar.gz
+               [ ! -e $f ] && echo "ERROR: expected a tar.gz: $f" && exit 1
+               [ ! -d $d ] && echo "ERROR: expected a dir: $d" && exit 1
+               conf set tez/tez-site tez.use.cluster.hadoop-libs true
+               cp $f /apps/tez/tez.tar.gz
+               activate tez $d
+       ;;
+       hive|hadoop|tez)
+               sw2 $component "$@"
+       ;;
+       eclipse)
+               #version=${1:-2020-09}
+               version=${1:-2019-06}
+               state=${2:-R}
+               type=${3:-java}
+               bin_dir=$SW_DIR/eclipse-${version}-${state}-${type}
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       fn=$SW_DL_DIR/eclipse-${type}-${version}.tar.gz
+                       
#http://mirrors.uniri.hr/eclipse//technology/epp/downloads/release/2019-03/R/eclipse-java-2019-03-R-linux-gtk-x86_64.tar.gz
+                       
path=eclipse//technology/epp/downloads/release/${version}/${state}/eclipse-${type}-${version}-${state}-linux-gtk-x86_64.tar.gz
+                       dl $fn \
+                               https://mirror.dkm.cz/${path} \
+                               http://eclipse.mirror.garr.it/mirrors/${path}   
\
+                               http://mirrors.uniri.hr/${path}
+                       rm -rf _eclipse_unpack
+                       mkdir _eclipse_unpack
+                       cd _eclipse_unpack
+                       tar xzf $fn
+                       wget -nv 
https://github.com/seeq12/eclipse-import-projects-plugin/raw/master/jar/com.seeq.eclipse.importprojects_1.4.0.jar
 \
+                               -O 
eclipse/plugins/com.seeq.eclipse.importprojects_1.4.0.jar
+                       sed 's/-Xmx1024m/-Xmx3024m/' eclipse/eclipse.ini
+                       mv eclipse ${bin_dir}
+                       cd ..
+                       rmdir _eclipse_unpack
+                       #rm $fn
+               fi
+               activate $component $bin_dir
+       ;;
+       idea)
+               version=${1:-2021.3.1}
+               bin_dir=$SW_DIR/ideaIC-$version
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       fn=$SW_DL_DIR/ideaIC-${version}.tar.gz
+                       dl $fn \
+                               
https://download.jetbrains.com/idea/ideaIC-${version}.tar.gz
+                       tmp=${bin_dir}.tmp
+                       rm -rf "$tmp"
+                       mkdir "$tmp"
+                       tar -C "$tmp" -xzf "$fn"
+                       ( cd $tmp; ln -s * current)
+                       mv "$tmp" "$bin_dir"
+               fi
+               activate $component $bin_dir/current
+       ;;
+       mat)
+               version=1.12.0
+               echo "@@ only 1.12.0"
+               bin_dir=$SW_DIR/mat-$version
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       fn=$SW_DL_DIR/ideaIC-${version}.tar.gz
+                       dl $fn \
+                               
https://rhlx01.hs-esslingen.de/pub/Mirrors/eclipse/mat/1.12.0/rcp/MemoryAnalyzer-1.12.0.20210602-linux.gtk.x86_64.zip
+                       tmp=${bin_dir}.tmp
+                       rm -rf "$tmp"
+                       mkdir "$tmp"
+                       unzip -d "$tmp" "$fn"
+                       #( cd $tmp; ln -s * current)
+                       mv "$tmp" "$bin_dir"
+               fi
+               activate $component $bin_dir/mat
+       ;;
+       visualvm)
+               version=${1:-1.4.4}
+               v1=${version//./}
+               bin_dir=$SW_DIR/visualvm_$v1
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       fn=$SW_DL_DIR/visualvm-${version}.zip
+                       dl $fn \
+                               
https://github.com/visualvm/visualvm.src/releases/download/${version}/visualvm_${v1}.zip
+                       unzip $fn
+               fi
+               activate $component $bin_dir
+       ;;
+       maven)
+               version=${1:-3.8.4}
+               bin_dir=$SW_DIR/apache-maven-$version
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       fn=$SW_DL_DIR/apache-maven-${version}.tar.gz
+                       dl $fn \
+                               
${apache_mirror}/maven/maven-3/${version}/binaries/apache-maven-${version}-bin.tar.gz
 \
+                               
${archive_mirror}/maven/maven-3/${version}/binaries/apache-maven-${version}-bin.tar.gz
+                       tar xzf $fn
+                       #rm $fn
+               fi
+               activate $component $bin_dir
+       ;;
+       mvnd)
+               version=${1:-0.7.1}
+               bin_dir=$SW_DIR/mvnd-${version}-linux-amd64
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       fn=$SW_DL_DIR/mvnd-${version}.zip
+                       dl $fn \
+                               
https://github.com/mvndaemon/mvnd/releases/download/${version}/mvnd-${version}-linux-amd64.zip
+                       unzip $fn
+               fi
+               activate $component $bin_dir
+               echo "PATH+=:/active/mvnd/bin" | sudo tee /etc/profile.d/mvnd.sh
+       ;;
+       derby)
+               version=${1:-10.14.2.0}
+               bin_dir=$SW_DIR/db-derby-${version}-bin
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       fn=$SW_DL_DIR/db-derby-${version}-bin.zip
+                       dl $fn \
+                               
https://www-eu.apache.org/dist/db/derby/db-derby-${version}/db-derby-${version}-bin.zip
+                       unzip $fn
+                       #rm $fn
+               fi
+               activate $component $bin_dir
+       ;;
+       protobuf)
+               version=${1:-2.5.0}
+               bin_dir=/work/protobuf-${version}
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       sudo apt-get install -y autoconf automake libtool curl 
make g++ unzip
+                       fn=$SW_DL_DIR/protobuf-${version}.zip
+                       dl $fn \
+                               
https://github.com/google/protobuf/releases/download/v${version}/protobuf-${version}.zip
+                       d="$SW_DL_DIR/protobuf-${version}.tmp"
+                       rm -rf $d
+                       mkdir $d
+                       cd $d
+                       unzip $fn
+                       cd protobuf-${version}
+                       ./autogen.sh
+                       ./configure --prefix=$bin_dir
+                       make
+                       make install
+               fi
+               activate $component $bin_dir
+       ;;
+       thrift)
+               V=${1:-0.9.3}
+               bin_dir=/work/thrift-${V}
+               sudo apt-get install -y autoconf make automake flex bison gcc  
g++ libtool pkg-config #checkinstall
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       echo " * installing thrift compiler $V"
+                       sudo apt-get install -y autoconf make automake flex 
bison gcc  g++ libtool pkg-config #checkinstall #python3-setuptools
+                       build_dir="$SW_DL_DIR/build_thrift_$V"
+                       rm -rf "$build_dir"
+                       mkdir -p "$build_dir"
+                       cd "$build_dir"
+                       wget -nv  
https://github.com/apache/thrift/archive/${V}.tar.gz
+                       tar xzf ${V}.tar.gz && rm ${V}.tar.gz
+                       cd thrift-${V}
+                       ./bootstrap.sh
+                       ./configure --prefix=$bin_dir --without-py3 
--without-python  --without-qt5 --without-java --without-erlang 
--without-nodejs --without-nodets --without-lua --without-python --without-perl 
--without-php --without-php_extension --without-dart --without-ruby 
--without-haskell --without-go --without-swift --without-rs --without-cl 
--without-haxe --without-netstd --without-d
+                       make
+                       ln -s compiler/cpp/ bin
+                       ln -s contrib share
+                       mv "$build_dir/thrift-${V}" "$bin_dir"
+               fi
+               activate $component $bin_dir
+       ;;
+       kubebuilder)
+               V=1.0.8
+               arch=amd64
+               bin_dir=/work/kubebuilder_${V}_linux_${arch}
+               if [ -d "$bin_dir" ];then
+                       echo "$bin_dir already installed"
+               else
+                       fn=$SW_DL_DIR/kubebuilder-${version}.tar.gz
+                       dl $fn \
+                               
"https://github.com/kubernetes-sigs/kubebuilder/releases/download/v${V}/kubebuilder_${V}_linux_${arch}.tar.gz";
+                       tar xzf $fn
+               fi
+               activate $component $bin_dir
+       ;;
+       *)
+               cat << EOF
+Switches between installed components
+       $0 <hadoop|hive|hive-dev|tez> [version]
+       example:
+               $0 hive                 # activates some recent hive
+               $0 hive 3.1.1
+               $0 hadoop
+               $0 hive-dev             # switches to use /hive-dev's binary 
package
+EOF
+               exit 1
+       ;;
+esac
+
+done
diff --git a/bin/tez_debug b/bin/tez_debug
new file mode 100755
index 0000000..2fdc163
--- /dev/null
+++ b/bin/tez_debug
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+set -e
+
+conf set hive/hive-site hive.tez.java.opts 
"-agentlib:jdwp=transport=dt_socket,server=y,address=8001,suspend=y"
+conf set tez/tez-site tez.am.launch.cmd-opts 
"-agentlib:jdwp=transport=dt_socket,server=y,address=8001,suspend=y"
+
+conf set hive/hive-site hive.tez.java.opts ""
+conf set tez/tez-site tez.am.launch.cmd-opts ""
diff --git a/bin/wait_port b/bin/wait_port
new file mode 100755
index 0000000..93af199
--- /dev/null
+++ b/bin/wait_port
@@ -0,0 +1,28 @@
+#!/bin/bash
+
+[ "$#" -ne 2 ] && echo "usage: $0 [open|close] [portnumber]" && exit 1
+
+state=$1
+port=$2
+
+echo -n "waiting for port $port to be $state..."
+t=60
+while [ $t -gt 0 ];do
+        sudo netstat -n -p -l -t |grep ":$port " > /dev/null
+       if [ $? -eq 0 ];then
+               current=open
+       else
+               current=close
+       fi
+       if [ "$current" = "$state" ];then
+               echo "done!"
+               exit 0
+       fi
+        echo -n "."
+        sleep 1
+        t=$[ $t - 1 ]
+done
+
+echo "timed out..."
+
+exit 2
diff --git a/buildAll b/buildAll
new file mode 100755
index 0000000..474290a
--- /dev/null
+++ b/buildAll
@@ -0,0 +1,39 @@
+#!/bin/bash -e
+
+#buildAll push docker-sandbox.infra.cloudera.com/hive/hive-dev-box
+
+
+set -x
+d="`date +%Y%m%d%H%M%S`"
+
+push="echo NOT pushing"
+if [ "$1" == "push" ]; then
+       push="docker push"
+       shift
+fi
+
+
+baseName=kgyrtkirk/hive-dev-box
+altName=kgyrtkirk/hive-dev-box
+if [ "$1" != "" ]; then
+       altName="$1"
+       shift
+fi
+
+function build() {
+       n="$1"
+       shift
+       docker build -t  $baseName:${n} \
+                     -t $baseName:${n}-$d      "$@"
+       docker tag $baseName:${n} $altName:${n}
+       docker tag $baseName:${n} $altName:${n}-$d
+
+       $push $altName:${n}
+       $push $altName:${n}-$d
+}
+
+build latest           -f Dockerfile .
+build executor         -f Dockerfile.executor .
+build executor-1001    -f Dockerfile.executor --build-arg UID=1001 .
+build bazaar           -f Dockerfile.bazaar .
+
diff --git a/conf/artifactory.config.latest.xml 
b/conf/artifactory.config.latest.xml
new file mode 100644
index 0000000..5705a25
--- /dev/null
+++ b/conf/artifactory.config.latest.xml
@@ -0,0 +1,346 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<config xmlns="http://artifactory.jfrog.org/xsd/2.2.3"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://www.jfrog.org/xsd/artifactory-v2_2_3.xsd";>
+    <offlineMode>false</offlineMode>
+    <helpLinksEnabled>true</helpLinksEnabled>
+    <fileUploadMaxSizeMb>100</fileUploadMaxSizeMb>
+    <revision>2</revision>
+    <dateFormat>dd-MM-yy HH:mm:ss z</dateFormat>
+    <addons>
+        <showAddonsInfo>true</showAddonsInfo>
+        <showAddonsInfoCookie>1569504923425</showAddonsInfoCookie>
+    </addons>
+    <security>
+        <anonAccessEnabled>false</anonAccessEnabled>
+        <hideUnauthorizedResources>false</hideUnauthorizedResources>
+        <passwordSettings>
+            <encryptionPolicy>supported</encryptionPolicy>
+            <expirationPolicy>
+                <enabled>false</enabled>
+                <passwordMaxAge>60</passwordMaxAge>
+                <notifyByEmail>true</notifyByEmail>
+            </expirationPolicy>
+            <resetPolicy>
+                <enabled>true</enabled>
+                <maxAttemptsPerAddress>3</maxAttemptsPerAddress>
+                <timeToBlockInMinutes>60</timeToBlockInMinutes>
+            </resetPolicy>
+        </passwordSettings>
+        <ldapSettings/>
+        <ldapGroupSettings/>
+        <userLockPolicy>
+            <enabled>false</enabled>
+            <loginAttempts>5</loginAttempts>
+        </userLockPolicy>
+        <accessClientSettings/>
+        <buildGlobalBasicReadAllowed>false</buildGlobalBasicReadAllowed>
+        
<buildGlobalBasicReadForAnonymous>false</buildGlobalBasicReadForAnonymous>
+    </security>
+    <backups>
+        <backup>
+            <key>backup-daily</key>
+            <enabled>true</enabled>
+            <cronExp>0 0 2 ? * MON-FRI</cronExp>
+            <retentionPeriodHours>0</retentionPeriodHours>
+            <createArchive>false</createArchive>
+            <excludedRepositories/>
+            <sendMailOnError>true</sendMailOnError>
+            <excludeNewRepositories>false</excludeNewRepositories>
+            <precalculate>false</precalculate>
+        </backup>
+        <backup>
+            <key>backup-weekly</key>
+            <enabled>false</enabled>
+            <cronExp>0 0 2 ? * SAT</cronExp>
+            <retentionPeriodHours>336</retentionPeriodHours>
+            <createArchive>false</createArchive>
+            <excludedRepositories/>
+            <sendMailOnError>true</sendMailOnError>
+            <excludeNewRepositories>false</excludeNewRepositories>
+            <precalculate>false</precalculate>
+        </backup>
+    </backups>
+    <indexer>
+        <enabled>false</enabled>
+        <cronExp>0 23 5 * * ?</cronExp>
+    </indexer>
+    <localRepositories>
+        <localRepository>
+            <key>artifactory-build-info</key>
+            <type>buildinfo</type>
+            <description>Build Info repository</description>
+            <includesPattern>**/*</includesPattern>
+            <repoLayoutRef>simple-default</repoLayoutRef>
+            <dockerApiVersion>V2</dockerApiVersion>
+            <forceNugetAuthentication>false</forceNugetAuthentication>
+            <blackedOut>false</blackedOut>
+            <handleReleases>true</handleReleases>
+            <handleSnapshots>true</handleSnapshots>
+            <maxUniqueSnapshots>0</maxUniqueSnapshots>
+            <maxUniqueTags>0</maxUniqueTags>
+            <suppressPomConsistencyChecks>true</suppressPomConsistencyChecks>
+            <propertySets/>
+            <archiveBrowsingEnabled>false</archiveBrowsingEnabled>
+            <snapshotVersionBehavior>unique</snapshotVersionBehavior>
+            
<localRepoChecksumPolicyType>client-checksums</localRepoChecksumPolicyType>
+            <calculateYumMetadata>false</calculateYumMetadata>
+            <yumRootDepth>0</yumRootDepth>
+            <debianTrivialLayout>false</debianTrivialLayout>
+            <enableFileListsIndexing>false</enableFileListsIndexing>
+        </localRepository>
+        <localRepository>
+            <key>example-repo-local</key>
+            <type>generic</type>
+            <description>Example artifactory repository</description>
+            <includesPattern>**/*</includesPattern>
+            <repoLayoutRef>simple-default</repoLayoutRef>
+            <dockerApiVersion>V2</dockerApiVersion>
+            <forceNugetAuthentication>false</forceNugetAuthentication>
+            <blackedOut>false</blackedOut>
+            <handleReleases>true</handleReleases>
+            <handleSnapshots>true</handleSnapshots>
+            <maxUniqueSnapshots>0</maxUniqueSnapshots>
+            <maxUniqueTags>0</maxUniqueTags>
+            <suppressPomConsistencyChecks>true</suppressPomConsistencyChecks>
+            <propertySets/>
+            <archiveBrowsingEnabled>false</archiveBrowsingEnabled>
+            <snapshotVersionBehavior>unique</snapshotVersionBehavior>
+            
<localRepoChecksumPolicyType>client-checksums</localRepoChecksumPolicyType>
+            <calculateYumMetadata>false</calculateYumMetadata>
+            <yumRootDepth>0</yumRootDepth>
+            <debianTrivialLayout>false</debianTrivialLayout>
+            <enableFileListsIndexing>false</enableFileListsIndexing>
+        </localRepository>
+    </localRepositories>
+    <remoteRepositories>
+        <remoteRepository>
+            <key>central</key>
+            <type>maven</type>
+            <includesPattern>**/*</includesPattern>
+            <repoLayoutRef>maven-2-default</repoLayoutRef>
+            <dockerApiVersion>V2</dockerApiVersion>
+            <forceNugetAuthentication>false</forceNugetAuthentication>
+            <blackedOut>false</blackedOut>
+            <handleReleases>true</handleReleases>
+            <handleSnapshots>true</handleSnapshots>
+            <maxUniqueSnapshots>0</maxUniqueSnapshots>
+            <maxUniqueTags>0</maxUniqueTags>
+            <suppressPomConsistencyChecks>false</suppressPomConsistencyChecks>
+            <propertySets/>
+            <archiveBrowsingEnabled>false</archiveBrowsingEnabled>
+            <url>http://uk.maven.org/maven2</url>
+            <offline>false</offline>
+            <hardFail>false</hardFail>
+            <storeArtifactsLocally>true</storeArtifactsLocally>
+            <fetchJarsEagerly>true</fetchJarsEagerly>
+            <fetchSourcesEagerly>true</fetchSourcesEagerly>
+            <retrievalCachePeriodSecs>7200</retrievalCachePeriodSecs>
+            <assumedOfflinePeriodSecs>300</assumedOfflinePeriodSecs>
+            
<missedRetrievalCachePeriodSecs>1800</missedRetrievalCachePeriodSecs>
+            
<remoteRepoChecksumPolicyType>generate-if-absent</remoteRepoChecksumPolicyType>
+            
<unusedArtifactsCleanupPeriodHours>0</unusedArtifactsCleanupPeriodHours>
+            <shareConfiguration>false</shareConfiguration>
+            <synchronizeProperties>false</synchronizeProperties>
+            <listRemoteFolderItems>true</listRemoteFolderItems>
+            <rejectInvalidJars>false</rejectInvalidJars>
+            <p2OriginalUrl>http://uk.maven.org/maven2</p2OriginalUrl>
+            <contentSynchronisation>
+                <enabled>false</enabled>
+                <statistics>
+                    <enabled>false</enabled>
+                </statistics>
+                <properties>
+                    <enabled>false</enabled>
+                </properties>
+                <source>
+                    <originAbsenceDetection>false</originAbsenceDetection>
+                </source>
+            </contentSynchronisation>
+            <blockMismatchingMimeTypes>true</blockMismatchingMimeTypes>
+            
<mismatchingMimeTypesOverrideList></mismatchingMimeTypesOverrideList>
+            <bypassHeadRequests>false</bypassHeadRequests>
+            <allowAnyHostAuth>false</allowAnyHostAuth>
+            <socketTimeoutMillis>15000</socketTimeoutMillis>
+            <enableCookieManagement>false</enableCookieManagement>
+            <enableTokenAuthentication>false</enableTokenAuthentication>
+            <propagateQueryParams>false</propagateQueryParams>
+        </remoteRepository>
+    </remoteRepositories>
+    <virtualRepositories>
+        <virtualRepository>
+            <key>wonder</key>
+            <type>maven</type>
+            <includesPattern>**/*</includesPattern>
+            <repoLayoutRef>maven-2-default</repoLayoutRef>
+            <dockerApiVersion>V2</dockerApiVersion>
+            <forceNugetAuthentication>false</forceNugetAuthentication>
+            
<artifactoryRequestsCanRetrieveRemoteArtifacts>false</artifactoryRequestsCanRetrieveRemoteArtifacts>
+            <resolveDockerTagsByTimestamp>false</resolveDockerTagsByTimestamp>
+            <repositories>
+                <repositoryRef>central</repositoryRef>
+            </repositories>
+            
<pomRepositoryReferencesCleanupPolicy>discard_active_reference</pomRepositoryReferencesCleanupPolicy>
+            <virtualCacheConfig>
+                
<virtualRetrievalCachePeriodSecs>600</virtualRetrievalCachePeriodSecs>
+            </virtualCacheConfig>
+            <forceMavenAuthentication>false</forceMavenAuthentication>
+            <debianDefaultArchitectures>i386,amd64</debianDefaultArchitectures>
+        </virtualRepository>
+    </virtualRepositories>
+    <distributionRepositories/>
+    <releaseBundlesRepositories/>
+    <proxies/>
+    <reverseProxies/>
+    <propertySets/>
+    <repoLayouts>
+        <repoLayout>
+            <name>maven-2-default</name>
+            
<artifactPathPattern>[orgPath]/[module]/[baseRev](-[folderItegRev])/[module]-[baseRev](-[fileItegRev])(-[classifier]).[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>true</distinctiveDescriptorPathPattern>
+            
<descriptorPathPattern>[orgPath]/[module]/[baseRev](-[folderItegRev])/[module]-[baseRev](-[fileItegRev])(-[classifier]).pom</descriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>SNAPSHOT</folderIntegrationRevisionRegExp>
+            
<fileIntegrationRevisionRegExp>SNAPSHOT|(?:(?:[0-9]{8}.[0-9]{6})-(?:[0-9]+))</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>ivy-default</name>
+            
<artifactPathPattern>[org]/[module]/[baseRev](-[folderItegRev])/[type]s/[module](-[classifier])-[baseRev](-[fileItegRev]).[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>true</distinctiveDescriptorPathPattern>
+            
<descriptorPathPattern>[org]/[module]/[baseRev](-[folderItegRev])/[type]s/ivy-[baseRev](-[fileItegRev]).xml</descriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>\d{14}</folderIntegrationRevisionRegExp>
+            
<fileIntegrationRevisionRegExp>\d{14}</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>gradle-default</name>
+            
<artifactPathPattern>[org]/[module]/[baseRev](-[folderItegRev])/[module]-[baseRev](-[fileItegRev])(-[classifier]).[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>true</distinctiveDescriptorPathPattern>
+            
<descriptorPathPattern>[org]/[module]/ivy-[baseRev](-[fileItegRev]).xml</descriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>\d{14}</folderIntegrationRevisionRegExp>
+            
<fileIntegrationRevisionRegExp>\d{14}</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>maven-1-default</name>
+            
<artifactPathPattern>[org]/[type]s/[module]-[baseRev](-[fileItegRev])(-[classifier]).[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>true</distinctiveDescriptorPathPattern>
+            
<descriptorPathPattern>[org]/[type]s/[module]-[baseRev](-[fileItegRev]).pom</descriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.+</folderIntegrationRevisionRegExp>
+            <fileIntegrationRevisionRegExp>.+</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>nuget-default</name>
+            
<artifactPathPattern>[orgPath]/[module]/[module].[baseRev](-[fileItegRev]).nupkg</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.*</folderIntegrationRevisionRegExp>
+            <fileIntegrationRevisionRegExp>.*</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>npm-default</name>
+            
<artifactPathPattern>[orgPath]/[module]/[module]-[baseRev](-[fileItegRev]).tgz</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.*</folderIntegrationRevisionRegExp>
+            <fileIntegrationRevisionRegExp>.*</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>bower-default</name>
+            
<artifactPathPattern>[orgPath]/[module]/[module]-[baseRev](-[fileItegRev]).[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.*</folderIntegrationRevisionRegExp>
+            <fileIntegrationRevisionRegExp>.*</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>vcs-default</name>
+            
<artifactPathPattern>[orgPath]/[module]/[refs&lt;tags|branches&gt;]/[baseRev]/[module]-[baseRev](-[fileItegRev])(-[classifier]).[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.*</folderIntegrationRevisionRegExp>
+            
<fileIntegrationRevisionRegExp>[a-zA-Z0-9]{40}</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>sbt-default</name>
+            
<artifactPathPattern>[org]/[module]/(scala_[scalaVersion&lt;.+&gt;])/(sbt_[sbtVersion&lt;.+&gt;])/[baseRev]/[type]s/[module](-[classifier]).[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>true</distinctiveDescriptorPathPattern>
+            
<descriptorPathPattern>[org]/[module]/(scala_[scalaVersion&lt;.+&gt;])/(sbt_[sbtVersion&lt;.+&gt;])/[baseRev]/[type]s/ivy.xml</descriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>\d{14}</folderIntegrationRevisionRegExp>
+            
<fileIntegrationRevisionRegExp>\d{14}</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>simple-default</name>
+            
<artifactPathPattern>[orgPath]/[module]/[module]-[baseRev].[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.*</folderIntegrationRevisionRegExp>
+            <fileIntegrationRevisionRegExp>.*</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>composer-default</name>
+            
<artifactPathPattern>[orgPath]/[module]/[module]-[baseRev](-[fileItegRev]).[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.*</folderIntegrationRevisionRegExp>
+            <fileIntegrationRevisionRegExp>.*</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>conan-default</name>
+            
<artifactPathPattern>[org]/[module]/[baseRev]/[channel&lt;[^/]+&gt;]/[folderItegRev]/(package/[package_id&lt;[^/]+&gt;]/[fileItegRev]/)?[remainder&lt;(?:.+)&gt;]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>[^/]+</folderIntegrationRevisionRegExp>
+            
<fileIntegrationRevisionRegExp>[^/]+</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>puppet-default</name>
+            
<artifactPathPattern>[orgPath]/[module]/[orgPath]-[module]-[baseRev].tar.gz</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.*</folderIntegrationRevisionRegExp>
+            <fileIntegrationRevisionRegExp>.*</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>go-default</name>
+            
<artifactPathPattern>[orgPath]/[module]/@v/v[refs].zip</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.*</folderIntegrationRevisionRegExp>
+            <fileIntegrationRevisionRegExp>.*</fileIntegrationRevisionRegExp>
+        </repoLayout>
+        <repoLayout>
+            <name>build-default</name>
+            
<artifactPathPattern>[orgPath]/[module](-[fileItegRev]).[ext]</artifactPathPattern>
+            
<distinctiveDescriptorPathPattern>false</distinctiveDescriptorPathPattern>
+            
<folderIntegrationRevisionRegExp>.*</folderIntegrationRevisionRegExp>
+            <fileIntegrationRevisionRegExp>.*</fileIntegrationRevisionRegExp>
+        </repoLayout>
+    </repoLayouts>
+    <remoteReplications/>
+    <localReplications/>
+    <gcConfig>
+        <cronExp>0 0 /4 * * ?</cronExp>
+    </gcConfig>
+    <cleanupConfig>
+        <cronExp>0 12 5 * * ?</cronExp>
+    </cleanupConfig>
+    <virtualCacheCleanupConfig>
+        <cronExp>0 12 0 * * ?</cronExp>
+    </virtualCacheCleanupConfig>
+    <folderDownloadConfig>
+        <enabled>false</enabled>
+        <enabledForAnonymous>false</enabledForAnonymous>
+        <maxDownloadSizeMb>1024</maxDownloadSizeMb>
+        <maxFiles>5000</maxFiles>
+        <maxConcurrentRequests>10</maxConcurrentRequests>
+    </folderDownloadConfig>
+    <trashcanConfig>
+        <enabled>true</enabled>
+        <allowPermDeletes>false</allowPermDeletes>
+        <retentionPeriodDays>14</retentionPeriodDays>
+    </trashcanConfig>
+    <replicationsConfig>
+        <blockPushReplications>false</blockPushReplications>
+        <blockPullReplications>false</blockPullReplications>
+    </replicationsConfig>
+    <bintrayApplications/>
+    <sumoLogicConfig>
+        <enabled>false</enabled>
+    </sumoLogicConfig>
+    <releaseBundlesConfig>
+        <incompleteCleanupPeriodHours>720</incompleteCleanupPeriodHours>
+    </releaseBundlesConfig>
+    <signedUrlConfig>
+        <maxValidForSeconds>31536000</maxValidForSeconds>
+    </signedUrlConfig>
+    <downloadRedirectConfig>
+        <fileMinimumSize>1</fileMinimumSize>
+    </downloadRedirectConfig>
+</config>
diff --git a/enter.bash b/enter.bash
new file mode 100755
index 0000000..470347e
--- /dev/null
+++ b/enter.bash
@@ -0,0 +1,30 @@
+#!/bin/bash -e
+[ "$1" == "" ] && echo "usage: $0 <container name>" && exit 1
+
+DOCKER=docker
+function isContainerRunning() {
+    [ "`$DOCKER ps -q -f name=$1`" != "" ]
+}
+
+# FIXME: make this cleaner
+if [ "$DISPLAY" != "" ];then
+    echo " * enabling X forward..."
+    if [ "`which sw_vers`" != "" ] ; then
+        # MacOSX assumed
+        xhost + 127.0.0.1
+        RUN_OPTS+=" -e DISPLAY=host.docker.internal:0"
+    else
+        XSOCK=/tmp/.X11-unix
+        XAUTH=/tmp/.docker.xauth
+        touch $XAUTH
+        xauth nlist $DISPLAY | sed -e 's/^..../ffff/' | xauth -f $XAUTH nmerge 
-
+        RUN_OPTS+=" -e DISPLAY -e XAUTHORITY=$XAUTH -v $XSOCK:$XSOCK -v 
$XAUTH:$XAUTH"
+    fi
+fi
+
+#export HIVE_TEST_DOCKER_HOST=$(docker inspect -f 
'{{range.NetworkSettings.Networks}}{{.Gateway}}{{end}}' `hostname`)
+
+isContainerRunning "$1" || docker start "$1"
+
+docker exec -it "$1" /bin/bash -login
+
diff --git a/etc/hive/hive-log4j2.properties b/etc/hive/hive-log4j2.properties
new file mode 100644
index 0000000..a56448f
--- /dev/null
+++ b/etc/hive/hive-log4j2.properties
@@ -0,0 +1,83 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+status = DEBUG
+name = HiveLog4j2
+packages = org.apache.hadoop.hive.ql.log
+
+# list of properties
+property.hive.log.level = DEBUG
+property.hive.root.logger = DRFA
+property.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name}
+property.hive.log.file = hive.log
+property.hive.perflogger.log.level = INFO
+
+# list of all appenders
+appenders = console, DRFA
+
+# console appender
+appender.console.type = Console
+appender.console.name = console
+appender.console.target = SYSTEM_ERR
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
+
+# daily rolling file appender
+appender.DRFA.type = RollingRandomAccessFile
+appender.DRFA.name = DRFA
+appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
+# Use %pid in the filePattern to append <process-id>@<host-name> to the 
filename if you want separate log files for different CLI session
+appender.DRFA.filePattern = 
${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
+appender.DRFA.layout.type = PatternLayout
+appender.DRFA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
+appender.DRFA.policies.type = Policies
+appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
+appender.DRFA.policies.time.interval = 1
+appender.DRFA.policies.time.modulate = true
+appender.DRFA.strategy.type = DefaultRolloverStrategy
+appender.DRFA.strategy.max = 30
+
+# list of all loggers
+loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, 
PerfLogger, AmazonAws, ApacheHttp
+
+logger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn
+logger.NIOServerCnxn.level = WARN
+
+logger.ClientCnxnSocketNIO.name = org.apache.zookeeper.ClientCnxnSocketNIO
+logger.ClientCnxnSocketNIO.level = WARN
+
+logger.DataNucleus.name = DataNucleus
+logger.DataNucleus.level = ERROR
+
+logger.Datastore.name = Datastore
+logger.Datastore.level = ERROR
+
+logger.JPOX.name = JPOX
+logger.JPOX.level = ERROR
+
+logger.AmazonAws.name=com.amazonaws
+logger.AmazonAws.level = INFO
+
+logger.ApacheHttp.name=org.apache.http
+logger.ApacheHttp.level = INFO
+
+logger.PerfLogger.name = org.apache.hadoop.hive.ql.log.PerfLogger
+logger.PerfLogger.level = ${sys:hive.perflogger.log.level}
+
+# root logger
+rootLogger.level = ${sys:hive.log.level}
+rootLogger.appenderRefs = root
+rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}
diff --git a/etc/motd b/etc/motd
new file mode 100644
index 0000000..cd3b6ae
--- /dev/null
+++ b/etc/motd
@@ -0,0 +1,10 @@
+
+Welcome to hive-dev-box!
+
+quick guide:
+* hive_launch
+  launches a screen session which starts: NM,RM,HS2,BL
+  you may terminate the show with CTRL+A, CTRL+\ and answering yes
+* reinit_metastore
+  clears and recreates the derby metastore
+
diff --git a/etc/screenrc b/etc/screenrc
new file mode 100644
index 0000000..4692f79
--- /dev/null
+++ b/etc/screenrc
@@ -0,0 +1,21 @@
+# the following two lines give a two-line status, with the current window 
highlighted
+hardstatus alwayslastline
+hardstatus string '%{= kG}[%{G}%H%? %1`%?%{g}][%= %{= kw}%-w%{+b yk} 
%n*%t%?(%u)%? %{-}%+w %=%{g}][%{B}%m/%d %{W}%C%A%{g}]'
+
+# huge scrollback buffer
+defscrollback 5000
+
+# no welcome message
+startup_message off
+
+# 256 colors
+attrcolor b ".I"
+termcapinfo xterm 'Co#256:AB=\E[48;5;%dm:AF=\E[38;5;%dm'
+defbce on
+
+# mouse tracking allows to switch region focus by clicking
+mousetrack on
+
+shell "/bin/bash"
+
+shell /bin/bash
diff --git a/hdb b/hdb
new file mode 100755
index 0000000..0c7534f
--- /dev/null
+++ b/hdb
@@ -0,0 +1,34 @@
+#!/bin/bash -e
+
+# go home...
+SOURCE="${BASH_SOURCE[0]}"
+while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a 
symlink
+  DIR="$( cd -P "$( dirname "$SOURCE" )" >/dev/null 2>&1 && pwd )"
+  SOURCE="$(readlink "$SOURCE")"
+  [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative 
symlink, we need to resolve it relative to the path where the symlink file was 
located
+done
+DIR="$( cd -P "$( dirname "$SOURCE" )" >/dev/null 2>&1 && pwd )"
+cd "$DIR"
+
+cmd=$1
+[ "$cmd" != "" ] && shift
+case "$cmd" in
+       enter)
+               ./enter.bash "$@"
+       ;;
+       run)
+               ./run.bash -d "$@"
+               ./enter.bash "$@"
+       ;;
+       run0)
+               ./run.bash "$@"
+       ;;
+       bash_completion)
+               cat bashrc
+       ;;
+       *)
+               echo "usage: $0 run <new_container_name>"
+               echo "usage: $0 enter <existing_container_name>"
+               exit 1
+       ;;
+esac
diff --git a/hooks/build b/hooks/build
new file mode 100644
index 0000000..1259e29
--- /dev/null
+++ b/hooks/build
@@ -0,0 +1,16 @@
+#!/bin/bash -e
+
+# File needs to be called /hooks/build relative to the Dockerfile.
+# $IMAGE_NAME var is injected into the build so the tag is correct.
+
+echo "[@] hooks/build"
+set
+
+BUILD_ARGS="build"
+if [ "$DOCKER_TAG" == "executor-1001" ];then
+       BUILD_ARGS+=" --build-arg UID=1001"
+fi
+BUILD_ARGS+=" -f $DOCKERFILE_PATH -t $IMAGE_NAME ."
+
+set -x
+docker $BUILD_ARGS
diff --git a/run.bash b/run.bash
new file mode 100755
index 0000000..3da1b35
--- /dev/null
+++ b/run.bash
@@ -0,0 +1,89 @@
+#!/bin/bash
+set -e
+
+if [ "$1" == "-d" ];then
+    RUN_OPTS+=" -d"
+    shift
+else
+    RUN_OPTS+=" -it"
+fi
+
+if [ "$1" != "" ];then
+    RUN_OPTS+=" --name $1 --hostname $1"
+    shift
+fi
+
+RUN_OPTS+=" -v hive-dev-box_work:/work"
+RUN_OPTS+=" -v `pwd`:/hive-dev-box"
+if [ "$TOOLBOX_SOURCES" != "" ];then
+       RUN_OPTS+=" -v $TOOLBOX_SOURCES:/toolbox"
+fi
+
+if [ "$DISPLAY" != "" ];then
+    echo " * enabling X forward..."
+    if [ "`which sw_vers`" != "" ] ; then
+        # MacOSX assumed
+        xhost + 127.0.0.1
+        RUN_OPTS+=" -e DISPLAY=host.docker.internal:0"
+    else
+        XSOCK=/tmp/.X11-unix
+        XAUTH=/tmp/.docker.xauth
+        touch $XAUTH
+        xauth nlist $DISPLAY | sed -e 's/^..../ffff/' | xauth -f $XAUTH nmerge 
-
+        RUN_OPTS+=" -e DISPLAY -e XAUTHORITY=$XAUTH -v $XSOCK:$XSOCK -v 
$XAUTH:$XAUTH"
+    fi
+fi
+
+if [ "$SSH_AUTH_SOCK" != "" ];then
+    echo " * enabling SSH_AUTH_SOCK"
+    RUN_OPTS+=" -v $(dirname $SSH_AUTH_SOCK):$(dirname $SSH_AUTH_SOCK) -e 
SSH_AUTH_SOCK=$SSH_AUTH_SOCK"
+fi
+
+RUN_OPTS+=" -v `pwd`/settings.xml:/home/dev/.m2/settings.xml"
+RUN_OPTS+=" -v $HOME/.ssh:/home/dev/.ssh"
+RUN_OPTS+=" -v $HOME/.gitconfig:/home/dev/.gitconfig"
+RUN_OPTS+=" -e TERM=$TERM"
+
+[ "$HIVE_DEV_BOX_HOST_DIR" != "" ] && RUN_OPTS+=" -v 
$HIVE_DEV_BOX_HOST_DIR:/home/dev/host"
+[ "$HIVE_SOURCES" != "" ] && RUN_OPTS+=" -v $HIVE_SOURCES:/home/dev/hive"
+[ -e "$HOME/.config/asf_toolbox.yml" ] && RUN_OPTS+=" -v 
$HOME/.config/asf_toolbox.yml:/home/dev/.config/asf_toolbox.yml"
+[ -e "$HOME/.config/srcs.dsl" ] && RUN_OPTS+=" -v 
$HOME/.config/srcs.dsl:/home/dev/.config/srcs.dsl"
+[ -e /var/run/docker.sock ] && RUN_OPTS+=" -v 
/var/run/docker.sock:/var/run/docker.sock"
+
+# link artifactory
+[ "`docker ps -q -f name=artifactory`" != "" ] && RUN_OPTS+=" --link 
artifactory:artifactory "
+
+NET=hive-dev-box-net
+RUN_OPTS+=" --network $NET"
+RUN_OPTS+=" --shm-size 2g"
+#RUN_OPTS+=" --security-opt seccomp=unconfined"
+RUN_OPTS+=" --security-opt seccomp=seccomp.json"
+
+
+HDB_TYPE=${HDB_TYPE:-hive}
+IMG_NAME=${HDB_TYPE}-dev-box
+
+case "${HDB_TYPE}" in
+       hive)
+               ;;
+       impala)
+                       BUILD_OPTS+=" -f Dockerfile.$HDB_TYPE"
+                       RUN_OPTS+=" --privileged"
+               ;;
+       *)
+               echo "invalid type $HDB_TYPE"
+               exit 1;
+       ;;
+esac
+
+BUILD_OPTS+=" -t $IMG_NAME"
+BUILD_OPTS+=" -t $IMG_NAME:`date +%s`"
+#docker pull debian:buster
+docker build $BUILD_OPTS .
+docker network create $NET || true
+docker run          \
+    $RUN_OPTS       \
+    "$@"            \
+    $IMG_NAME       \
+    /bin/bash
+
diff --git a/seccomp.json b/seccomp.json
new file mode 100644
index 0000000..698f07f
--- /dev/null
+++ b/seccomp.json
@@ -0,0 +1,815 @@
+{
+       "downloadedFrom": 
"https://raw.githubusercontent.com/moby/moby/master/profiles/seccomp/default.json";,
+       "defaultAction": "SCMP_ACT_ERRNO",
+       "archMap": [
+               {
+                       "architecture": "SCMP_ARCH_X86_64",
+                       "subArchitectures": [
+                               "SCMP_ARCH_X86",
+                               "SCMP_ARCH_X32"
+                       ]
+               },
+               {
+                       "architecture": "SCMP_ARCH_AARCH64",
+                       "subArchitectures": [
+                               "SCMP_ARCH_ARM"
+                       ]
+               },
+               {
+                       "architecture": "SCMP_ARCH_MIPS64",
+                       "subArchitectures": [
+                               "SCMP_ARCH_MIPS",
+                               "SCMP_ARCH_MIPS64N32"
+                       ]
+               },
+               {
+                       "architecture": "SCMP_ARCH_MIPS64N32",
+                       "subArchitectures": [
+                               "SCMP_ARCH_MIPS",
+                               "SCMP_ARCH_MIPS64"
+                       ]
+               },
+               {
+                       "architecture": "SCMP_ARCH_MIPSEL64",
+                       "subArchitectures": [
+                               "SCMP_ARCH_MIPSEL",
+                               "SCMP_ARCH_MIPSEL64N32"
+                       ]
+               },
+               {
+                       "architecture": "SCMP_ARCH_MIPSEL64N32",
+                       "subArchitectures": [
+                               "SCMP_ARCH_MIPSEL",
+                               "SCMP_ARCH_MIPSEL64"
+                       ]
+               },
+               {
+                       "architecture": "SCMP_ARCH_S390X",
+                       "subArchitectures": [
+                               "SCMP_ARCH_S390"
+                       ]
+               }
+       ],
+       "syscalls": [
+               {
+                       "names": [
+                               "accept",
+                               "accept4",
+                               "access",
+                               "adjtimex",
+                               "alarm",
+                               "bind",
+                               "brk",
+                               "capget",
+                               "capset",
+                               "chdir",
+                               "chmod",
+                               "chown",
+                               "chown32",
+                               "clock_adjtime",
+                               "clock_adjtime64",
+                               "clock_getres",
+                               "clock_getres_time64",
+                               "clock_gettime",
+                               "clock_gettime64",
+                               "clock_nanosleep",
+                               "clock_nanosleep_time64",
+                               "close",
+                               "connect",
+                               "copy_file_range",
+                               "creat",
+                               "dup",
+                               "dup2",
+                               "dup3",
+                               "epoll_create",
+                               "epoll_create1",
+                               "epoll_ctl",
+                               "epoll_ctl_old",
+                               "epoll_pwait",
+                               "epoll_wait",
+                               "epoll_wait_old",
+                               "eventfd",
+                               "eventfd2",
+                               "execve",
+                               "execveat",
+                               "exit",
+                               "exit_group",
+                               "faccessat",
+                               "faccessat2",
+                               "fadvise64",
+                               "fadvise64_64",
+                               "fallocate",
+                               "fanotify_mark",
+                               "fchdir",
+                               "fchmod",
+                               "fchmodat",
+                               "fchown",
+                               "fchown32",
+                               "fchownat",
+                               "fcntl",
+                               "fcntl64",
+                               "fdatasync",
+                               "fgetxattr",
+                               "flistxattr",
+                               "flock",
+                               "fork",
+                               "fremovexattr",
+                               "fsetxattr",
+                               "fstat",
+                               "fstat64",
+                               "fstatat64",
+                               "fstatfs",
+                               "fstatfs64",
+                               "fsync",
+                               "ftruncate",
+                               "ftruncate64",
+                               "futex",
+                               "futex_time64",
+                               "futimesat",
+                               "getcpu",
+                               "getcwd",
+                               "getdents",
+                               "getdents64",
+                               "getegid",
+                               "getegid32",
+                               "geteuid",
+                               "geteuid32",
+                               "getgid",
+                               "getgid32",
+                               "getgroups",
+                               "getgroups32",
+                               "getitimer",
+                               "getpeername",
+                               "getpgid",
+                               "getpgrp",
+                               "getpid",
+                               "getppid",
+                               "getpriority",
+                               "getrandom",
+                               "getresgid",
+                               "getresgid32",
+                               "getresuid",
+                               "getresuid32",
+                               "getrlimit",
+                               "get_robust_list",
+                               "getrusage",
+                               "getsid",
+                               "getsockname",
+                               "getsockopt",
+                               "get_thread_area",
+                               "gettid",
+                               "gettimeofday",
+                               "getuid",
+                               "getuid32",
+                               "getxattr",
+                               "inotify_add_watch",
+                               "inotify_init",
+                               "inotify_init1",
+                               "inotify_rm_watch",
+                               "io_cancel",
+                               "ioctl",
+                               "io_destroy",
+                               "io_getevents",
+                               "io_pgetevents",
+                               "io_pgetevents_time64",
+                               "ioprio_get",
+                               "ioprio_set",
+                               "io_setup",
+                               "io_submit",
+                               "io_uring_enter",
+                               "io_uring_register",
+                               "io_uring_setup",
+                               "ipc",
+                               "kill",
+                               "lchown",
+                               "lchown32",
+                               "lgetxattr",
+                               "link",
+                               "linkat",
+                               "listen",
+                               "listxattr",
+                               "llistxattr",
+                               "_llseek",
+                               "lremovexattr",
+                               "lseek",
+                               "lsetxattr",
+                               "lstat",
+                               "lstat64",
+                               "madvise",
+                               "membarrier",
+                               "memfd_create",
+                               "mincore",
+                               "mkdir",
+                               "mkdirat",
+                               "mknod",
+                               "mknodat",
+                               "mlock",
+                               "mlock2",
+                               "mlockall",
+                               "mmap",
+                               "mmap2",
+                               "mprotect",
+                               "mq_getsetattr",
+                               "mq_notify",
+                               "mq_open",
+                               "mq_timedreceive",
+                               "mq_timedreceive_time64",
+                               "mq_timedsend",
+                               "mq_timedsend_time64",
+                               "mq_unlink",
+                               "mremap",
+                               "msgctl",
+                               "msgget",
+                               "msgrcv",
+                               "msgsnd",
+                               "msync",
+                               "munlock",
+                               "munlockall",
+                               "munmap",
+                               "nanosleep",
+                               "newfstatat",
+                               "_newselect",
+                               "open",
+                               "openat",
+                               "openat2",
+                               "pause",
+                               "pipe",
+                               "pipe2",
+                               "poll",
+                               "ppoll",
+                               "ppoll_time64",
+                               "prctl",
+                               "pread64",
+                               "preadv",
+                               "preadv2",
+                               "prlimit64",
+                               "pselect6",
+                               "pselect6_time64",
+                               "pwrite64",
+                               "pwritev",
+                               "pwritev2",
+                               "read",
+                               "readahead",
+                               "readlink",
+                               "readlinkat",
+                               "readv",
+                               "recv",
+                               "recvfrom",
+                               "recvmmsg",
+                               "recvmmsg_time64",
+                               "recvmsg",
+                               "remap_file_pages",
+                               "removexattr",
+                               "rename",
+                               "renameat",
+                               "renameat2",
+                               "restart_syscall",
+                               "rmdir",
+                               "rseq",
+                               "rt_sigaction",
+                               "rt_sigpending",
+                               "rt_sigprocmask",
+                               "rt_sigqueueinfo",
+                               "rt_sigreturn",
+                               "rt_sigsuspend",
+                               "rt_sigtimedwait",
+                               "rt_sigtimedwait_time64",
+                               "rt_tgsigqueueinfo",
+                               "sched_getaffinity",
+                               "sched_getattr",
+                               "sched_getparam",
+                               "sched_get_priority_max",
+                               "sched_get_priority_min",
+                               "sched_getscheduler",
+                               "sched_rr_get_interval",
+                               "sched_rr_get_interval_time64",
+                               "sched_setaffinity",
+                               "sched_setattr",
+                               "sched_setparam",
+                               "sched_setscheduler",
+                               "sched_yield",
+                               "seccomp",
+                               "select",
+                               "semctl",
+                               "semget",
+                               "semop",
+                               "semtimedop",
+                               "semtimedop_time64",
+                               "send",
+                               "sendfile",
+                               "sendfile64",
+                               "sendmmsg",
+                               "sendmsg",
+                               "sendto",
+                               "setfsgid",
+                               "setfsgid32",
+                               "setfsuid",
+                               "setfsuid32",
+                               "setgid",
+                               "setgid32",
+                               "setgroups",
+                               "setgroups32",
+                               "setitimer",
+                               "setpgid",
+                               "setpriority",
+                               "setregid",
+                               "setregid32",
+                               "setresgid",
+                               "setresgid32",
+                               "setresuid",
+                               "setresuid32",
+                               "setreuid",
+                               "setreuid32",
+                               "setrlimit",
+                               "set_robust_list",
+                               "setsid",
+                               "setsockopt",
+                               "set_thread_area",
+                               "set_tid_address",
+                               "setuid",
+                               "setuid32",
+                               "setxattr",
+                               "shmat",
+                               "shmctl",
+                               "shmdt",
+                               "shmget",
+                               "shutdown",
+                               "sigaltstack",
+                               "signalfd",
+                               "signalfd4",
+                               "sigprocmask",
+                               "sigreturn",
+                               "socket",
+                               "socketcall",
+                               "socketpair",
+                               "splice",
+                               "stat",
+                               "stat64",
+                               "statfs",
+                               "statfs64",
+                               "statx",
+                               "symlink",
+                               "symlinkat",
+                               "sync",
+                               "sync_file_range",
+                               "syncfs",
+                               "sysinfo",
+                               "tee",
+                               "tgkill",
+                               "time",
+                               "timer_create",
+                               "timer_delete",
+                               "timer_getoverrun",
+                               "timer_gettime",
+                               "timer_gettime64",
+                               "timer_settime",
+                               "timer_settime64",
+                               "timerfd_create",
+                               "timerfd_gettime",
+                               "timerfd_gettime64",
+                               "timerfd_settime",
+                               "timerfd_settime64",
+                               "times",
+                               "tkill",
+                               "truncate",
+                               "truncate64",
+                               "ugetrlimit",
+                               "umask",
+                               "uname",
+                               "unlink",
+                               "unlinkat",
+                               "utime",
+                               "utimensat",
+                               "utimensat_time64",
+                               "utimes",
+                               "vfork",
+                               "vmsplice",
+                               "wait4",
+                               "waitid",
+                               "waitpid",
+                               "write",
+                               "writev"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {},
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "ptrace"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": null,
+                       "comment": "",
+                       "includes": {
+                               "minKernel": "4.8"
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "personality"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [
+                               {
+                                       "index": 0,
+                                       "value": 0,
+                                       "op": "SCMP_CMP_EQ"
+                               }
+                       ],
+                       "comment": "",
+                       "includes": {},
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "personality"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [
+                               {
+                                       "index": 0,
+                                       "value": 8,
+                                       "op": "SCMP_CMP_EQ"
+                               }
+                       ],
+                       "comment": "",
+                       "includes": {},
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "personality"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [
+                               {
+                                       "index": 0,
+                                       "value": 131072,
+                                       "op": "SCMP_CMP_EQ"
+                               }
+                       ],
+                       "comment": "",
+                       "includes": {},
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "personality"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [
+                               {
+                                       "index": 0,
+                                       "value": 131080,
+                                       "op": "SCMP_CMP_EQ"
+                               }
+                       ],
+                       "comment": "",
+                       "includes": {},
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "personality"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [
+                               {
+                                       "index": 0,
+                                       "value": 4294967295,
+                                       "op": "SCMP_CMP_EQ"
+                               }
+                       ],
+                       "comment": "",
+                       "includes": {},
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "sync_file_range2"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "arches": [
+                                       "ppc64le"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "arm_fadvise64_64",
+                               "arm_sync_file_range",
+                               "sync_file_range2",
+                               "breakpoint",
+                               "cacheflush",
+                               "set_tls"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "arches": [
+                                       "arm",
+                                       "arm64"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "arch_prctl"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "arches": [
+                                       "amd64",
+                                       "x32"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "modify_ldt"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "arches": [
+                                       "amd64",
+                                       "x32",
+                                       "x86"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "s390_pci_mmio_read",
+                               "s390_pci_mmio_write",
+                               "s390_runtime_instr"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "arches": [
+                                       "s390",
+                                       "s390x"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "open_by_handle_at"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_DAC_READ_SEARCH"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "bpf",
+                               "clone",
+                               "fanotify_init",
+                               "lookup_dcookie",
+                               "mount",
+                               "name_to_handle_at",
+                               "perf_event_open",
+                               "quotactl",
+                               "setdomainname",
+                               "sethostname",
+                               "setns",
+                               "syslog",
+                               "umount",
+                               "umount2",
+                               "unshare"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_ADMIN"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "clone"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [
+                               {
+                                       "index": 0,
+                                       "value": 2114060288,
+                                       "op": "SCMP_CMP_MASKED_EQ"
+                               }
+                       ],
+                       "comment": "",
+                       "includes": {},
+                       "excludes": {
+                               "caps": [
+                                       "CAP_SYS_ADMIN"
+                               ],
+                               "arches": [
+                                       "s390",
+                                       "s390x"
+                               ]
+                       }
+               },
+               {
+                       "names": [
+                               "clone"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [
+                               {
+                                       "index": 1,
+                                       "value": 2114060288,
+                                       "op": "SCMP_CMP_MASKED_EQ"
+                               }
+                       ],
+                       "comment": "s390 parameter ordering for clone is 
different",
+                       "includes": {
+                               "arches": [
+                                       "s390",
+                                       "s390x"
+                               ]
+                       },
+                       "excludes": {
+                               "caps": [
+                                       "CAP_SYS_ADMIN"
+                               ]
+                       }
+               },
+               {
+                       "names": [
+                               "reboot"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_BOOT"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "chroot"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_CHROOT"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "delete_module",
+                               "init_module",
+                               "finit_module"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_MODULE"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "acct"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_PACCT"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "kcmp",
+                               "process_vm_readv",
+                               "process_vm_writev",
+                               "ptrace"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_PTRACE"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "iopl",
+                               "ioperm"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_RAWIO"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "settimeofday",
+                               "stime",
+                               "clock_settime"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_TIME"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "vhangup"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_TTY_CONFIG"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "get_mempolicy",
+                               "mbind",
+                               "set_mempolicy"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYS_NICE"
+                               ]
+                       },
+                       "excludes": {}
+               },
+               {
+                       "names": [
+                               "syslog"
+                       ],
+                       "action": "SCMP_ACT_ALLOW",
+                       "args": [],
+                       "comment": "",
+                       "includes": {
+                               "caps": [
+                                       "CAP_SYSLOG"
+                               ]
+                       },
+                       "excludes": {}
+               }
+       ]
+}
diff --git a/settings.xml b/settings.xml
new file mode 100644
index 0000000..b524adf
--- /dev/null
+++ b/settings.xml
@@ -0,0 +1,57 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<settings xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.1.0 
http://maven.apache.org/xsd/settings-1.1.0.xsd"; 
xmlns="http://maven.apache.org/SETTINGS/1.1.0";
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";>
+
+  <mirrors>
+    <mirror>
+      <mirrorOf>*</mirrorOf>
+      <name>wonder</name>
+      <url>http://artifactory:8081/artifactory/wonder</url>
+      <id>wonder</id>
+    </mirror>
+  </mirrors>
+
+
+  <profiles>
+    <profile>
+      <repositories>
+        <repository>
+          <snapshots>
+            <enabled>false</enabled>
+          </snapshots>
+          <id>central</id>
+          <name>wonder</name>
+          <url>http://artifactory:8081/artifactory/wonder</url>
+        </repository>
+        <repository>
+          <snapshots>
+               <updatePolicy>never</updatePolicy>
+          </snapshots>
+          <id>snapshots</id>
+          <name>wonder</name>
+          <url>http://artifactory:8081/artifactory/wonder</url>
+        </repository>
+      </repositories>
+      <pluginRepositories>
+        <pluginRepository>
+          <snapshots>
+            <enabled>false</enabled>
+          </snapshots>
+          <id>central</id>
+          <name>wonder</name>
+          <url>http://artifactory:8081/artifactory/wonder</url>
+        </pluginRepository>
+        <pluginRepository>
+          <snapshots />
+          <id>snapshots</id>
+          <name>wonder</name>
+          <url>http://artifactory:8081/artifactory/wonder</url>
+        </pluginRepository>
+      </pluginRepositories>
+      <id>artifactory</id>
+    </profile>
+  </profiles>
+  <activeProfiles>
+    <activeProfile>artifactory</activeProfile>
+  </activeProfiles>
+</settings>
diff --git a/start_artifactory.bash b/start_artifactory.bash
new file mode 100755
index 0000000..36e8de5
--- /dev/null
+++ b/start_artifactory.bash
@@ -0,0 +1,50 @@
+#!/bin/bash
+
+set -e
+
+DOCKER=docker
+function isContainerRunning() {
+    [ "`$DOCKER ps -q -f name=$1`" != "" ]
+}
+
+
+if isContainerRunning artifactory; then
+       echo "you already have a container named artifactory"
+       echo "docker rm -f artifactory"
+       echo "docker volume rm artifactory_data"
+       exit 1
+fi
+
+IMAGE=docker.bintray.io/jfrog/artifactory-oss:6.23.41
+
+docker pull $IMAGE
+
+
+NET=hive-dev-box-net
+docker network create hive-dev-box-net || true
+RUN_OPTS+=" --name artifactory"
+RUN_OPTS+=" --network $NET"
+RUN_OPTS+=" -d --restart always"
+RUN_OPTS+=" -v artifactory_data:/var/opt/jfrog/artifactory"
+RUN_OPTS+=" -p 127.0.0.1:8081:8081"
+docker run $RUN_OPTS $IMAGE
+
+docker cp artifactory_backup.zip artifactory:/tmp/backup.zip
+echo "@@@ artifactory should be running"
+
+cat << EOF
+===
+To load remote repos/etc you will need to run below command after it have 
started up:
+
+docker exec -it artifactory /bin/bash
+curl -X POST -u admin:password -H "Content-type: application/json" 
http://localhost:8081/artifactory/ui/artifactimport/system \
+  -d '{ 
"path":"/tmp/backup.zip","excludeContent":false,"excludeMetadata":false,"verbose":false,"zip":true,"action":"system"}'
+
+# after executing the above command you will be able to log into artifactory 
by using: admin/admin
+===
+EOF
+
+#
+#curl -X POST -u admin:password1 -H "Content-type: application/json" -d '{ 
"userName" : "admin", "oldPassword" : "password1", "newPassword1" : "password", 
"newPassword2" : "password" }' 
http://localhost:8081/artifactory/api/security/users/authorization/changePassword
+#
+
diff --git a/tools/build_cleanup b/tools/build_cleanup
new file mode 100755
index 0000000..67b0cb6
--- /dev/null
+++ b/tools/build_cleanup
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+apt-get clean
+rm -rf /mnt/work/downloads/*
diff --git a/tools/cdpcli b/tools/cdpcli
new file mode 100755
index 0000000..85b026a
--- /dev/null
+++ b/tools/cdpcli
@@ -0,0 +1,21 @@
+#!/bin/bash
+set -e
+
+curl -s 
https://packagecloud.io/install/repositories/segment/aws-okta/script.deb.sh | 
sudo bash
+sed -i 's/buster/stretch/' /etc/apt/sources.list.d/segment_aws-okta.list
+sudo apt-get update
+sudo apt-get install -y aws-okta
+
+sudo apt-get install -y virtualenv python3-pip
+
+pip3 install cdpcli
+
+cd /tmp/
+curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip"; -o 
"awscliv2.zip"
+unzip awscliv2.zip
+sudo ./aws/install
+
+rm awscliv2.zip
+rm -rf aws
+
+/tools/build_cleanup
diff --git a/tools/docker_entrypoint b/tools/docker_entrypoint
new file mode 100755
index 0000000..6fe2542
--- /dev/null
+++ b/tools/docker_entrypoint
@@ -0,0 +1,43 @@
+#!/bin/bash --login
+
+set -e
+
+#first_run_wizard
+
+echo " * installing some basic stuff..."
+
+sudo cp -rsf /hive-dev-box/etc/* /etc/
+sudo cp -rsf /hive-dev-box/bin/* /bin/
+
+function safe_sw() {
+  if [ -e "/active/$1" ];then
+    echo " * $1 is already present"
+  else
+    /bin/sw "$@"
+  fi
+}
+
+safe_sw tez    0.10.1
+safe_sw hadoop 3.1.2
+safe_sw hive   3.1.2
+#safe_sw eclipse
+safe_sw maven
+safe_sw protobuf
+
+[ -d /toolbox/build/distributions ] && sudo dpkg -i 
/toolbox/build/distributions/*.deb
+
+#reinit_metastore > /dev/null 2>&1
+#echo "exitcode: $?"
+
+sudo /etc/init.d/ssh start
+
+cat /etc/motd
+
+if [ -t 0 ];then
+    export USER=`whoami`
+    exec bash --login
+else
+    echo "non-interactive shell detected; waiting for the end of the world... "
+    while :; do sleep 60;done
+fi
+
diff --git a/tools/docker_entrypoint.bazaar b/tools/docker_entrypoint.bazaar
new file mode 100755
index 0000000..c910aa1
--- /dev/null
+++ b/tools/docker_entrypoint.bazaar
@@ -0,0 +1,32 @@
+#!/bin/bash --login
+
+set -e
+
+echo " * installing some basic stuff..."
+
+function safe_sw() {
+  if [ -e "/active/$1" ];then
+    echo " * $1 is already present"
+  else
+    /bin/sw "$@"
+  fi
+}
+
+safe_sw tez    ${TEZ_VERSION:-0.9.1}
+safe_sw hadoop ${HADOOP_VERSION:-3.1.2}
+safe_sw hive   ${HIVE_VERSION:-3.1.2}
+
+hive_launch -d -I
+
+cat /etc/motd
+
+echo "@@@ you may access the processes running via 'screen -r -x'"
+
+if [ -t 0 ];then
+    export USER=`whoami`
+    exec bash --login
+else
+    echo "non-interactive shell detected; waiting for the end of the world... "
+    while :; do sleep 60;done
+fi
+
diff --git a/tools/docker_entrypoint.executor b/tools/docker_entrypoint.executor
new file mode 100755
index 0000000..d30e4c4
--- /dev/null
+++ b/tools/docker_entrypoint.executor
@@ -0,0 +1,5 @@
+#!/bin/bash --login
+
+set -e
+export USER=`whoami`
+"$@"
diff --git a/tools/entrypoint.impala b/tools/entrypoint.impala
new file mode 100755
index 0000000..59b7286
--- /dev/null
+++ b/tools/entrypoint.impala
@@ -0,0 +1,41 @@
+#!/bin/bash --login
+
+set -e
+
+#first_run_wizard
+
+echo " * installing some basic stuff..."
+
+sudo cp -rsf /hive-dev-box/etc/* /etc/ || true # old coreutils
+sudo cp -rsf /hive-dev-box/bin/* /bin/ || true
+
+function safe_sw() {
+  if [ -e "/active/$1" ];then
+    echo " * $1 is already present"
+  else
+    /bin/sw "$@"
+  fi
+}
+
+safe_sw tez    0.9.1
+safe_sw hadoop 3.1.2
+safe_sw eclipse
+safe_sw hive   3.1.2
+safe_sw maven  3.6.1
+safe_sw protobuf
+
+[ -d /toolbox/build/distributions ] && sudo dpkg -i 
/toolbox/build/distributions/*.deb
+
+#reinit_metastore > /dev/null 2>&1
+#echo "exitcode: $?"
+
+cat /etc/motd
+
+if [ -t 0 ];then
+    export USER=`whoami`
+    exec bash --login
+else
+    echo "non-interactive shell detected; waiting for the end of the world... "
+    while :; do sleep 60;done
+fi
+
diff --git a/tools/i_sort b/tools/i_sort
new file mode 100755
index 0000000..a8f72bf
--- /dev/null
+++ b/tools/i_sort
@@ -0,0 +1,30 @@
+#!/bin/bash -e
+
+# FIXME: consider setting
+#Set hive.tez.container.size=3356;
+#Set hive.tez.java.opts=-Xmx2g;
+#conf set hive/hive-site hive.tez.container.size 3356
+#conf set hive/hive-site hive.tez.java.opts -Xmx2g
+# FIXME: scale up mem values?
+
+#addgroup --gid 999 docker_access
+#adduser dev docker_access
+
+cd tmp
+#apt-get install -y dnsutils wmctrl firefox-esr
+
+cat >> ~dev/.bashrc << EOF
+export HIVE_TEST_DOCKER_HOST="\`ip r | grep '^default'|cut -d ' ' -f 3\`"
+
+[ -d /hive-dev-box/ ] && . /hive-dev-box/bashrc
+
+if [ "\$DISPLAY" != "" ]; then
+  export WID=\`xprop -root _NET_ACTIVE_WINDOW|cut -d ' ' -f5\`
+fi
+
+function urgent() { wmctrl -i -r \$WID   -b add,demands_attention; backburner 
\$HOSTNAME;}
+
+EOF
+
+
+/tools/build_cleanup
diff --git a/tools/iii b/tools/iii
new file mode 100755
index 0000000..c173bc4
--- /dev/null
+++ b/tools/iii
@@ -0,0 +1,16 @@
+#!/bin/bash
+set -e
+
+#sudo apt-get install -y proxychains4
+
+cat << EOF | sudo dd of=/etc/proxychains4.conf
+strict_chain
+proxy_dns
+remote_dns_subnet 224
+tcp_read_time_out 15000
+tcp_connect_time_out 8000
+[ProxyList]
+socks5 172.18.0.1 1080
+EOF
+
+/tools/build_cleanup
diff --git a/tools/impala_bootstrap b/tools/impala_bootstrap
new file mode 100755
index 0000000..c40d3ba
--- /dev/null
+++ b/tools/impala_bootstrap
@@ -0,0 +1,15 @@
+#!/bin/bash
+set -e
+
+git clone --depth=1 https://github.com/apache/impala
+#https://gitbox.apache.org/repos/asf/impala.git
+
+cd impala
+
+export IMPALA_HOME=`pwd`
+sudo mkdir /usr/local/apache-maven-3.5.4
+echo > bin/jenkins/populate_m2_directory.py
+./bin/bootstrap_system.sh
+
+cd ..
+rm -r impala
diff --git a/tools/install_basics b/tools/install_basics
new file mode 100755
index 0000000..3b98061
--- /dev/null
+++ b/tools/install_basics
@@ -0,0 +1,85 @@
+#!/bin/bash -e
+
+sed -i 's/deb.debian.org/ftp.bme.hu/' /etc/apt/sources.list
+
+debconf-set-selections <(cat << EOF
+keyboard-configuration keyboard-configuration/altgr    select  The default for 
the keyboard layout
+keyboard-configuration keyboard-configuration/unsupported_layout       boolean 
true
+keyboard-configuration keyboard-configuration/compose  select  No compose key
+keyboard-configuration keyboard-configuration/unsupported_config_options       
boolean true
+keyboard-configuration keyboard-configuration/model    select  Generic 105-key 
PC (intl.)
+keyboard-configuration keyboard-configuration/toggle   select  No toggling
+keyboard-configuration keyboard-configuration/xkb-keymap       select  
+keyboard-configuration keyboard-configuration/ctrl_alt_bksp    boolean false
+keyboard-configuration keyboard-configuration/store_defaults_in_debconf_db     
boolean true
+keyboard-configuration keyboard-configuration/unsupported_config_layout        
boolean true
+keyboard-configuration keyboard-configuration/layoutcode       string  us
+keyboard-configuration keyboard-configuration/variant  select  English (US)
+keyboard-configuration console-setup/detect    detect-keyboard 
+keyboard-configuration keyboard-configuration/switch   select  No temporary 
switch
+keyboard-configuration keyboard-configuration/layout   select  English (US)
+keyboard-configuration console-setup/ask_detect        boolean false
+keyboard-configuration keyboard-configuration/variantcode      string  
+keyboard-configuration keyboard-configuration/modelcode        string  pc105
+keyboard-configuration keyboard-configuration/unsupported_options      boolean 
true
+keyboard-configuration keyboard-configuration/optionscode      string  
+keyboard-configuration console-setup/detected  note    
+EOF
+)
+
+apt-get update
+apt-get install -y \
+        wget curl nano gnupg lsb-release sysvbanner git \
+        psmisc nano screen sysvbanner net-tools procps \
+        screen psmisc netcat psmisc nano screen sysvbanner \
+        make gcc g++ \
+        locales time git-review jq diffstat unzip zip docker.io \
+        graphviz \
+        postgresql-client kdiff3 \
+        default-mysql-client \
+        xbase-clients libgtk3.0 software-properties-common \
+        docker.io locales time git-review jq diffstat unzip zip graphviz \
+        postgresql-client  kdiff3 golang bash-completion \
+        default-mysql-client python libxml2-utils rsync lnav xmlstarlet jq 
colordiff xclip sudo wget vim tree pastebinit patchutils \
+        dnsutils wmctrl firefox-esr proxychains4
+#python-networkx 
+
+groupadd -f -g 1000 dev
+useradd -m -o -u 1000 -g 1000 -d /home/dev -s /bin/bash dev
+
+addgroup --gid 999 docker_access
+adduser dev docker_access
+
+echo 'root:root' | chpasswd
+echo 'dev:dev' | chpasswd
+
+# setup sudo
+adduser dev sudo
+cat >> /etc/sudoers << EOF
+# dev can do anything
+dev ALL=(ALL) NOPASSWD:ALL
+EOF
+
+echo "LC_ALL=en_US.UTF-8" >> /etc/environment
+echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
+echo "LANG=en_US.UTF-8" > /etc/locale.conf
+locale-gen en_US.UTF-8
+
+cd /tmp
+wget -nv 
https://github.com/bitnami/wait-for-port/releases/download/v1.0/wait-for-port.zip
+unzip wait-for-port.zip
+mv wait-for-port /usr/bin
+rm wait-for-port.zip
+
+sed -i 's/32m/36m/' ~dev/.bashrc
+
+cat >> ~dev/.bashrc << EOF
+
+alias grep='grep --color=auto'
+alias Grep=grep
+alias diffstat='diffstat -p0'
+export USER=dev
+
+EOF
+
+/tools/build_cleanup
diff --git a/tools/install_conf b/tools/install_conf
new file mode 100755
index 0000000..d7e6720
--- /dev/null
+++ b/tools/install_conf
@@ -0,0 +1,98 @@
+#!/bin/bash
+
+set -e
+set -x
+
+git config --global user.email f...@acme.com
+git config --global user.name "John Doe"
+
+mkdir -p /etc/{hadoop,hive,tez}
+#cp -r /hadoop/etc/hadoop /etc/
+#cp -r /hive/conf/ /etc/hive/
+
+conf init
+
+for u in root vagrant dev hive;do
+        conf set hadoop/core-site hadoop.proxyuser.${u}.groups '*'
+        conf set hadoop/core-site hadoop.proxyuser.${u}.hosts '*'
+done
+conf set hadoop/core-site hadoop.tmp.dir '/data/hadoop-${user.name}'
+
+conf set hadoop/yarn-site yarn.nodemanager.aux-services mapreduce_shuffle
+conf set hadoop/yarn-site 
yarn.nodemanager.aux-services.mapreduce_shuffle.class 
org.apache.hadoop.mapred.ShuffleHandler
+conf set hadoop/yarn-site yarn.nodemanager.resource.memory-mb 8192
+conf set hadoop/yarn-site yarn.nodemanager.resource.cpu-vcores 2
+conf set hadoop/yarn-site 
yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage 99
+
+conf set hadoop/hdfs-site dfs.replication 1
+
+conf set hadoop/capacity-scheduler 
yarn.scheduler.capacity.maximum-am-resource-percent 0.6
+conf set hadoop/capacity-scheduler yarn.scheduler.capacity.resource-calculator 
org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator
+conf set hadoop/capacity-scheduler yarn.scheduler.capacity.root.queues default
+conf set hadoop/capacity-scheduler 
yarn.scheduler.capacity.root.default.capacity 100
+conf set hadoop/capacity-scheduler 
yarn.scheduler.capacity.root.default.user-limit-factor 1
+conf set hadoop/capacity-scheduler 
yarn.scheduler.capacity.root.default.maximum-capacity 100
+conf set hadoop/capacity-scheduler yarn.scheduler.capacity.root.default.state 
RUNNING
+#yarn.scheduler.capacity.root.default.acl_submit_applications '*'
+#yarn.scheduler.capacity.root.default.acl_administer_queue '*'
+#yarn.scheduler.capacity.root.default.acl_application_max_priority '*'
+#yarn.scheduler.capacity.root.default.maximum-application-lifetime -1
+#yarn.scheduler.capacity.root.default.default-application-lifetime -1
+#yarn.scheduler.capacity.node-locality-delay 40
+#yarn.scheduler.capacity.rack-locality-additional-delay -1
+#yarn.scheduler.capacity.queue-mappings ''
+
+conf set tez/tez-site tez.lib.uris '${fs.defaultFS}/apps/tez/tez.tar.gz'
+conf set tez/tez-site tez.use.cluster.hadoop-libs true
+#conf set tez/tez-site tez.am.resource.memory.mb 512
+
+conf set hive/hive-site hive.metastore.warehouse.dir /data/hive/warehouse
+# FIXME: probably defunct
+conf set hive/hive-site hive.metastore.local true
+conf set hive/hive-site hive.user.install.directory file:///tmp
+conf set hive/hive-site hive.execution.engine tez
+conf set hive/hive-site hive.log.explain.output true
+conf set hive/hive-site hive.exec.scratchdir /data/hive
+# FIXME: this might not needed...but for me it is :)
+conf set hive/hive-site 
yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage 99
+
+conf set hive/hive-site hive.tez.container.size 3356
+conf set hive/hive-site hive.tez.java.opts -Xmx2g
+
+# enable transactions support
+conf set hive/hive-site hive.support.concurrency true
+conf set hive/hive-site hive.txn.manager 
org.apache.hadoop.hive.ql.lockmgr.DbTxnManager
+conf set hive/hive-site hive.mapred.mode nonstrict # not sure if this is also 
needed
+
+# disable results cache as it usually an obstacle during debugging..
+conf set hive/hive-site hive.query.results.cache.enabled false
+#conf set hive/hive-site hive.metastore.rawstore.impl 
org.apache.hadoop.hive.metastore.cache.CachedStore
+#conf set hive/metastore metastore.rawstore.impl 
org.apache.hadoop.hive.metastore.cache.CachedStore
+
+conf set hive/hivemetastore-site metastore.housekeeping.threads.on true
+conf set hive/hive-site tez.use.cluster.hadoop-libs true
+
+
+mkdir -p /data/hive /data/log /apps/lib /apps/tez /work /active ~dev/.m2 
~dev/.config
+chown dev /data{,/hive,/log} /apps/lib /apps/tez /work /active /apps ~dev/.m2 
~dev/.config
+chmod 777 -R /data
+
+# use ssd for docker
+
+sudo -u dev sw java 8
+
+# FIXME: fix sdkman
+cat > /etc/profile.d/confs.sh << EOF
+
+export MAVEN_OPTS=-Xmx2g
+
+export HADOOP_CONF_DIR=/etc/hadoop
+export HADOOP_LOG_DIR=/data/log
+export HADOOP_CLASSPATH=/etc/tez/:/active/tez/lib/*:/active/tez/*:/apps/lib/*
+export HIVE_CONF_DIR=/etc/hive/
+
+export 
PATH=$PATH:/active/hive/bin:/active/hadoop/bin:/active/eclipse/:/active/maven/bin/:/active/protobuf/bin:/active/visualvm/bin:/active/kubebuilder/bin:/active/idea/bin
+
+. /etc/profile.d/java.sh
+
+EOF
diff --git a/tools/install_executor b/tools/install_executor
new file mode 100755
index 0000000..b157b0e
--- /dev/null
+++ b/tools/install_executor
@@ -0,0 +1,10 @@
+#!/bin/bash
+set -e
+
+chown dev /work
+sudo -u dev sw maven
+sudo -u dev sw protobuf
+sudo -u dev sw thrift 0.16.0
+
+/tools/build_cleanup
+
diff --git a/tools/install_executor2 b/tools/install_executor2
new file mode 100755
index 0000000..3235cc1
--- /dev/null
+++ b/tools/install_executor2
@@ -0,0 +1,9 @@
+#!/bin/bash
+set -e
+
+sw hadoop 3.3.1
+sw hadoop 3.1.0
+sw tez 0.9.1
+sw tez 0.10.1
+
+/tools/build_cleanup
diff --git a/tools/install_executor3 b/tools/install_executor3
new file mode 100755
index 0000000..1795ec3
--- /dev/null
+++ b/tools/install_executor3
@@ -0,0 +1,27 @@
+#!/bin/bash
+set -e
+
+UID=${1:-1000}
+
+userdel dev
+groupadd -g $UID jenkins
+useradd -m -o -u $UID -g $UID -d /home/jenkins -s /bin/bash jenkins
+
+echo 'jenkins:jenkins' | chpasswd
+
+cat >> /etc/sudoers << EOF
+jenkins ALL=(ALL) NOPASSWD:ALL
+EOF
+
+
+cat >> ~jenkins/.bashrc << EOF
+
+alias grep='grep --color=auto'
+alias Grep=grep
+export USER=jenkins
+
+EOF
+
+apt-get install -y rsync python
+
+/tools/build_cleanup
diff --git a/tools/install_java_zulu b/tools/install_java_zulu
new file mode 100755
index 0000000..8ef0498
--- /dev/null
+++ b/tools/install_java_zulu
@@ -0,0 +1,17 @@
+#!/bin/bash -e
+
+apt-get install gnupg ca-certificates curl wget
+curl -s https://repos.azul.com/azul-repo.key | sudo gpg --dearmor -o 
/usr/share/keyrings/azul.gpg
+echo "deb [signed-by=/usr/share/keyrings/azul.gpg] 
https://repos.azul.com/zulu/deb stable main" | sudo tee 
/etc/apt/sources.list.d/zulu.list
+
+apt-get update
+apt-get install -y zulu8-jdk zulu11-jdk zulu17-jdk
+
+# downgrade jdk8 to 1.8.0_262-b19 to avoid timestamp issue
+wget -nv 
https://cdn.azul.com/zulu/bin/zulu8.48.0.51-ca-fx-jdk8.0.262-linux_x64.tar.gz
+tar zxf zulu8.48.0.51-ca-fx-jdk8.0.262-linux_x64.tar.gz && rm 
zulu8.48.0.51-ca-fx-jdk8.0.262-linux_x64.tar.gz
+rm -rf /usr/lib/jvm/zulu8-ca-amd64 &&  mv 
zulu8.48.0.51-ca-fx-jdk8.0.262-linux_x64 /usr/lib/jvm/zulu8-ca-amd64
+
+update-java-alternatives --set zulu8-ca-amd64
+
+/tools/build_cleanup
diff --git a/tools/install_mysql.bash b/tools/install_mysql.bash
new file mode 100755
index 0000000..777b831
--- /dev/null
+++ b/tools/install_mysql.bash
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+set -e
+apt-get install -y mariadb-server default-mysql-client
+
+sudo mysql << EOF
+DROP DATABASE IF EXISTS metastore;
+DROP USER IF EXISTS 'hive'@'localhost';
+create database metastore;
+CREATE USER 'hive'@'localhost' IDENTIFIED BY 'mypassword';
+REVOKE ALL PRIVILEGES, GRANT OPTION FROM 'hive'@'localhost';
+GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'localhost';
+FLUSH PRIVILEGES;
+EOF
+
+cat > ~vagrant/.my.cnf << EOF
+[client]
+user=hive
+password=mypassword
+database=metastore
+EOF
+
+mkdir -p /apps/lib
+cd /apps/lib
+[ ! -f mysql-connector-java-8.0.17.jar ] && wget -nv 
https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.17/mysql-connector-java-8.0.17.jar
diff --git a/tools/install_psql.bash b/tools/install_psql.bash
new file mode 100755
index 0000000..08eff0e
--- /dev/null
+++ b/tools/install_psql.bash
@@ -0,0 +1,16 @@
+#!/bin/bash -e
+
+sudo apt-get install -y postgresql
+sudo -u postgres psql -c "CREATE USER hiveuser WITH PASSWORD 'mypassword'"
+sudo -u postgres createdb metastore -O hiveuser
+
+# FIXME use dev instead of vagrant
+echo localhost:5432:metastore:hiveuser:mypassword > ~vagrant/.pgpass
+chown vagrant ~vagrant/.pgpass
+chmod 600 ~vagrant/.pgpass
+
+cat > /etc/profile.d/postgres_def.sh <<EOF
+export PGHOST=localhost
+export PGUSER=hiveuser
+export PGDATABASE=metastore
+EOF
diff --git a/tools/install_sdk.bash b/tools/install_sdk.bash
new file mode 100755
index 0000000..3f57e87
--- /dev/null
+++ b/tools/install_sdk.bash
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+# FIXME: partially duplicated from sokol
+
+set -e
+
+apt-get install -y zip unzip 
+export SDKMAN_DIR="/usr/local/sdkman"
+
+curl -s "https://get.sdkman.io"; | bash
+source "$SDKMAN_DIR/bin/sdkman-init.sh"
+
+#sdk install java 7.0.222-zulu
+sdk install java 8.0.222-zulu
+#sdk install maven 3.6.1
+
+cat > /etc/bashrc.d << EOF
+
+export SDKMAN_DIR="/usr/local/sdkman"
+#source "$HOME/.sdkman/bin/sdkman-init.sh"
+source "$SDKMAN_DIR/bin/sdkman-init.sh"
+
+function sw_j7() {
+        sdk use java 7.0.222-zulu
+}
+
+function sw_j8() {
+        sdk use java 8.0.212-zulu
+}
+
+EOF
diff --git a/tools/install_texturepacker b/tools/install_texturepacker
new file mode 100755
index 0000000..9ecf999
--- /dev/null
+++ b/tools/install_texturepacker
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+sudo apt-get install -y libopengl0 sudo imagemagick wget nano  libegl1 
libxkbcommon0 libglx0
+cd /tmp
+set -e
+wget 
https://www.codeandweb.com/download/texturepacker/6.0.1/TexturePacker-6.0.1.deb
+sudo dpkg -i TexturePacker-*deb
+rm *deb
diff --git a/tools/install_toolbox b/tools/install_toolbox
new file mode 100755
index 0000000..089170d
--- /dev/null
+++ b/tools/install_toolbox
@@ -0,0 +1,7 @@
+#!/bin/bash
+set -e
+
+V=0.3.8
+wget -nv -O toolbox.deb 
https://github.com/kgyrtkirk/hive-toolbox/releases/download/${V}/hive-toolbox_${V}_all.deb
+dpkg -i toolbox.deb
+rm toolbox.deb
diff --git a/tools/install_x2go b/tools/install_x2go
new file mode 100755
index 0000000..dd239b2
--- /dev/null
+++ b/tools/install_x2go
@@ -0,0 +1,7 @@
+#!/bin/bash -e
+
+apt-get update
+apt-get install -y x2goserver{,-xsession} xfce4
+apt-get upgrade -y
+
+/tools/build_cleanup
diff --git a/tools/install_xmlstarlet b/tools/install_xmlstarlet
new file mode 100755
index 0000000..39b95d3
--- /dev/null
+++ b/tools/install_xmlstarlet
@@ -0,0 +1,72 @@
+#!/bin/bash -e
+
+cd tmp
+
+echo 'deb-src http://ftp.bme.hu/debian buster main' > 
/etc/apt/sources.list.d/srcs.list
+apt-get update
+apt-get install -y dpkg-dev
+
+apt-get source xmlstarlet
+apt-get install -y debhelper dh-autoreconf autotools-dev libxml2-dev 
libxslt1-dev automake xsltproc gawk docbook-xsl-ns fop ghostscript
+
+cd xmlstarlet-1.6.1
+
+patch -p1 << EOF
+diff --git a/debian/patches/series b/debian/patches/series
+index d727d16..97bc5f9 100644
+--- a/debian/patches/series
++++ b/debian/patches/series
+@@ -4,3 +4,4 @@
+ 70-remove-link-usr-lib.patch
+ 80-fix-unesc-dquot.patch
+ 90-fix-upstream-version.patch
++xml_parse_huge
+diff --git a/debian/patches/xml_parse_huge b/debian/patches/xml_parse_huge
+new file mode 100644
+index 0000000..b0c7dcb
+--- /dev/null
++++ b/debian/patches/xml_parse_huge
+@@ -0,0 +1,21 @@
++Description: <short summary of the patch>
++Author: Zoltan Haindrich <k...@rxd.hu>
++Bug-Debian: https://bugs.debian.org/722870
++Bug-Debian: https://bugs.debian.org/837122
++Bug-Debian: https://bugs.debian.org/850842
++
++---
++Bug: <url in upstream bugtracker>
++Forwarded: <no|not-needed|url proving that it has been forwarded>
++Last-Update: 2020-12-08
++
++--- xmlstarlet-1.6.1.orig/src/xml_edit.c
+++++ xmlstarlet-1.6.1/src/xml_edit.c
++@@ -550,6 +550,7 @@ edOutput(const char* filename, const Xml
++         (g_ops->preserveFormat? 0 : XML_SAVE_FORMAT) |
++         (g_ops->omit_decl? XML_SAVE_NO_DECL : 0);
++     int read_options =
+++       XML_PARSE_HUGE |
++         (g_ops->nonet? XML_PARSE_NONET : 0);
++     xmlSaveCtxtPtr save;
++ 
+diff --git a/src/xml_edit.c b/src/xml_edit.c
+index a9b2b33..8239cdc 100644
+--- a/src/xml_edit.c
++++ b/src/xml_edit.c
+@@ -550,6 +550,7 @@ edOutput(const char* filename, const XmlEdAction* ops, int 
ops_count,
+         (g_ops->preserveFormat? 0 : XML_SAVE_FORMAT) |
+         (g_ops->omit_decl? XML_SAVE_NO_DECL : 0);
+     int read_options =
++       XML_PARSE_HUGE |
+         (g_ops->nonet? XML_PARSE_NONET : 0);
+     xmlSaveCtxtPtr save;
+ 
+EOF
+
+dpkg-buildpackage
+cd ..
+dpkg -i xmlstarlet_1.6.1-2_amd64.deb
+rm -rf xmlstarlet*
+
+
+
+/tools/build_cleanup
diff --git a/tools/python3_default b/tools/python3_default
new file mode 100755
index 0000000..fdc57ec
--- /dev/null
+++ b/tools/python3_default
@@ -0,0 +1,7 @@
+#!/bin/bash
+set -e
+
+ls /usr/bin/pyth*
+update-alternatives --install /usr/bin/python python /usr/bin/python3 2
+update-alternatives --install /usr/bin/python python /usr/bin/python2 1
+python --version
diff --git a/tools/y b/tools/y
new file mode 100755
index 0000000..f3fe313
--- /dev/null
+++ b/tools/y
@@ -0,0 +1,13 @@
+#!/bin/bash -e
+
+sudo apt-get install -y cmake  libssl-dev \
+       krb5-multidev libkrb5-dev gdb \
+                libreadline-dev libz-dev bison flex bear ninja-build meson 
libipc-run-perl iproute2 gitsome clang-format-13
+
+
+#export DEBUGINFOD_URLS="https://debuginfod.debian.net";
+
+#sudo mkdir /usr/local/pg
+#sudo chown dev /usr/local/pg
+/tools/build_cleanup
+#deb http://apt.llvm.org/bullseye/ llvm-toolchain-bullseye main

Reply via email to