This is an automated email from the ASF dual-hosted git repository. casion pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/linkis.git
commit 042f0cfa14039762455b6548480d8123580e5360 Author: 赵文恺 <[email protected]> AuthorDate: Sat Sep 16 12:29:49 2023 +0800 add shell script " checkAdd.sh" for optional engines check --- linkis-dist/bin/checkAdd.sh | 146 ++++++++++++++++++++++++++++++++++++++------ 1 file changed, 126 insertions(+), 20 deletions(-) diff --git a/linkis-dist/bin/checkAdd.sh b/linkis-dist/bin/checkAdd.sh index 349086471..4ce5ff8c9 100644 --- a/linkis-dist/bin/checkAdd.sh +++ b/linkis-dist/bin/checkAdd.sh @@ -20,35 +20,141 @@ source ${workDir}/bin/common.sh source ${workDir}/deploy-config/linkis-env.sh source ${workDir}/deploy-config/db.sh -# --- Begin Check service function by zwk +say() { + printf 'check command fail \n %s\n' "$1" +} + +err() { + say "$1" >&2 + exit 1 +} + +function checkPythonAndJava(){ + python --version > /dev/null 2>&1 + isSuccess "execute cmd: python --version" + java -version > /dev/null 2>&1 + isSuccess "execute cmd: java --version" +} + +function checkHdfs(){ + hadoopVersion="`hdfs version`" + defaultHadoopVersion="3.3" + checkversion "$hadoopVersion" $defaultHadoopVersion hadoop +} + +function checkHive(){ + checkversion "$(whereis hive)" "3.1" hive +} + +function checkversion(){ +versionStr=$1 +defaultVersion=$2 +module=$3 + +result=$(echo $versionStr | grep "$defaultVersion") +if [ -n "$result" ]; then + echo -e "Your [$module] version may match default support version: $defaultVersion\n" +else + echo "WARN: Your [$module] version is not match default support version: $defaultVersion, there may be compatibility issues:" + echo " 1: Continue installation, there may be compatibility issues" + echo " 2: Exit installation" + echo -e " other: exit\n" + + read -p "[Please input your choice]:" idx + if [[ '1' != "$idx" ]];then + echo -e "You chose Exit installation\n" + exit 1 + fi + echo "" +fi +} + +function checkSpark(){ + spark-submit --version > /dev/null 2>&1 + isSuccess "execute cmd: spark-submit --version " +} -function checkTrino(){ - TrinoDISAddress="echo $TRINO_DISCOVERY_URL|awk -F'\"' '{print $1}'|awk -F';' '{print $1}'" - curl $TrinoDISAddress > /dev/null 2>&1 - isSuccess "execute cmd: Trino service check" +portIsOccupy=false +function check_service_port() { + pid=`lsof -i TCP:$SERVER_PORT | fgrep LISTEN` + if [ "$pid" != "" ];then + echo "$SERVER_PORT already used" + portIsOccupy=true + fi } -function checkElasticSearch(){ - ESRestfulAddress="echo $ES_RESTFUL_URL|awk -F'\"' '{print $1}'|awk -F';' '{print $1}'" - curl $ESRestfulAddress> /dev/null 2>&1 - isSuccess "execute cmd: ElasticSearch service check" +check_cmd() { + command -v "$1" > /dev/null 2>&1 } -function checkFlink(){ - FlinkRestfulAddress="echo $Flink_RESTFUL_URL|awk -F'\"' '{print $1}'|awk -F';' '{print $1}'" - curl $FlinkRestfulAddress> /dev/null 2>&1 - isSuccess "execute cmd: Flink service check" +need_cmd() { + if ! check_cmd "$1"; then + err "need '$1' (your linux command not found)" + fi } -# Check Optional connection engines by zwk -if [ "$ENABLE_TRINO" == "true" ]; then - checkTrino + +echo "<-----start to check used cmd---->" +echo "check yum" +need_cmd yum +echo "check java" +need_cmd java +echo "check mysql" +need_cmd mysql +echo "check telnet" +need_cmd telnet +echo "check tar" +need_cmd tar +echo "check sed" +need_cmd sed +echo "check lsof" +need_cmd lsof + +echo "check hdfs" +need_cmd hdfs + +echo "check shell" +need_cmd $SHELL + +echo "check spark-sql" +need_cmd spark-sql + +echo "<-----end to check used cmd---->" + +checkPythonAndJava + +SERVER_PORT=$EUREKA_PORT +check_service_port + +SERVER_PORT=$GATEWAY_PORT +check_service_port + +SERVER_PORT=$MANAGER_PORT +check_service_port + +SERVER_PORT=$ENGINECONNMANAGER_PORT +check_service_port + +SERVER_PORT=$ENTRANCE_PORT +check_service_port + +SERVER_PORT=$PUBLICSERVICE_PORT +check_service_port + + +if [ "$portIsOccupy" = true ];then + echo "The port is already in use, please check before installing" + exit 1 +fi + +if [ "$ENABLE_SPARK" == "true" ]; then + checkSpark fi -if [ "$ENABLE_ES" == "true" ]; then - checkElasticSearch +if [ "$ENABLE_HDFS" == "true" ]; then + checkHdfs fi -if [ "$ENABLE_FLINK" == "true" ]; then - checkFlink +if [ "$ENABLE_HIVE" == "true" ]; then + checkHive fi --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
