This is an automated email from the ASF dual-hosted git repository.
yasith pushed a commit to branch sdk-batch-jobs
in repository https://gitbox.apache.org/repos/asf/airavata.git
The following commit(s) were added to refs/heads/sdk-batch-jobs by this push:
new 859a7d5445 commit app scripts into airavata-agent/application
859a7d5445 is described below
commit 859a7d5445399438cf66f6e9f1f870b110471446
Author: yasithdev <[email protected]>
AuthorDate: Sat Aug 2 23:24:26 2025 -0500
commit app scripts into airavata-agent/application
---
.../airavata-agent/application/README.md | 19 ++
.../airavata-agent/application/alphafold2.sh | 91 ++++++++
.../airavata-agent/application/gaussian16.sh | 256 +++++++++++++++++++++
.../airavata-agent/application/gromacs.sh | 165 +++++++++++++
.../airavata-agent/application/namd.sh | 132 +++++++++++
.../airavata-agent/application/pmemd_cuda.sh | 4 +
.../airavata-agent/application/pmemd_mpi.sh | 4 +
.../airavata-agent/application/psi4.sh | 4 +
8 files changed, 675 insertions(+)
diff --git a/modules/agent-framework/airavata-agent/application/README.md
b/modules/agent-framework/airavata-agent/application/README.md
new file mode 100644
index 0000000000..f57bb9b204
--- /dev/null
+++ b/modules/agent-framework/airavata-agent/application/README.md
@@ -0,0 +1,19 @@
+# Applications Scripts - Agent
+
+## NAMD
+
+### Expanse
+```sh
+NAMD_CPU_MODULES="cpu/0.17.3b gcc/10.2.0 openmpi/4.1.1"
+NAMD_CPU_PATH=/home/scigap/applications/NAMD_3.1alpha2_Linux-x86_64-multicore
+NAMD_GPU_MODULES="gpu/0.17.3b"
+NAMD_GPU_PATH=/home/scigap/applications/NAMD_3.0.1_Linux-x86_64-multicore-CUDA
+```
+
+### Delta
+```sh
+NAMD_CPU_MODULES="openmpi/4.1.6 fftw/3.3.10"
+NAMD_CPU_PATH=/sw/namd/NAMD_3.0b3_Linux-x86_64-multicore
+NAMD_GPU_MODULES="namd3/2024.02.mulitcore_cuda.s11"
+NAMD_GPU_PATH=/sw/namd/NAMD_3.0b3_Linux-x86_64-multicore-CUDA
+```
diff --git a/modules/agent-framework/airavata-agent/application/alphafold2.sh
b/modules/agent-framework/airavata-agent/application/alphafold2.sh
new file mode 100644
index 0000000000..2d1318ffe0
--- /dev/null
+++ b/modules/agent-framework/airavata-agent/application/alphafold2.sh
@@ -0,0 +1,91 @@
+#!/bin/bash -x
+set -euo pipefail
+
+while getopts t:p:m: option
+ do
+ case $option in
+ t ) MaxDate=$OPTARG ;;
+ p ) MODEL_PRESET=$OPTARG ;;
+ m ) Num_Multi=$OPTARG ;;
+ \? ) cat << ENDCAT1
+>! Usage: $0 [-t Maximum Template Date ] !<
+>! [-p Model Preset ] !<
+>! [-m Number of Multimers per Model ] !<
+ENDCAT1
+# exit 1 ;;
+ esac
+done
+
+if [ $Num_Multi = "" ]; then
+ export Num_Multi=1
+fi
+#set the environment PATH
+export PYTHONNOUSERSITE=True
+module reset
+module load singularitypro
+ALPHAFOLD_HOME=/expanse/projects/qstore/data/alphafold-v2.3.2
+ALPHAFOLD_MODELS=$ALPHAFOLD_HOME/params
+
+pdb70=""
+uniprot=""
+pdbseqres=""
+nummulti=""
+
+# check_flags
+ if [ "monomer" = "${MODEL_PRESET%_*}" ];then
+ export pdb70="--pdb70_database_path=/data/pdb70/pdb70"
+ else
+ export uniprot="--uniprot_database_path=/data/uniprot/uniprot.fasta"
+ export
pdbseqres="--pdb_seqres_database_path=/data/pdb_seqres/pdb_seqres.txt"
+ export nummulti="--num_multimer_predictions_per_model=$Num_Multi"
+ fi
+
+## Copy input to node local scratch
+cp input.fasta /scratch/$USER/job_$SLURM_JOBID
+#cp -r /expanse/projects/qstore/data/alphafold/uniclust30/uniclust30_2018_08
/scratch/$USER/job_$SLURM_JOBID/
+cd /scratch/$USER/job_$SLURM_JOBID
+ln -s /expanse/projects/qstore/data/alphafold/uniclust30/uniclust30_2018_08
+mkdir bfd
+cp /expanse/projects/qstore/data/alphafold/bfd/*index bfd/
+#cp
/expanse/projects/qstore/data/alphafold/bfd/bfd_metaclust_clu_complete_id30_c90_final_seq.sorted_opt_hhm.ffdata
bfd/
+#cp
/expanse/projects/qstore/data/alphafold/bfd/bfd_metaclust_clu_complete_id30_c90_final_seq.sorted_opt_cs219.ffdata
bfd/
+cd bfd
+ln -s
/expanse/projects/qstore/data/alphafold/bfd/bfd_metaclust_clu_complete_id30_c90_final_seq.sorted_opt_hhm.ffdata
+ln -s
/expanse/projects/qstore/data/alphafold/bfd/bfd_metaclust_clu_complete_id30_c90_final_seq.sorted_opt_cs219.ffdata
+ln -s
/expanse/projects/qstore/data/alphafold/bfd/bfd_metaclust_clu_complete_id30_c90_final_seq.sorted_opt_a3m.ffdata
+cd ../
+mkdir alphafold_output
+# Create soft links ro rundir form submitdir
+
+ln -s /scratch/$USER/job_$SLURM_JOBID $SLURM_SUBMIT_DIR/rundir
+
+#Run the command
+singularity run --nv \
+ -B /expanse/lustre \
+ -B /expanse/projects \
+ -B /scratch \
+ -B $ALPHAFOLD_HOME:/data \
+ -B $ALPHAFOLD_MODELS \
+ /cm/shared/apps/containers/singularity/alphafold/alphafold_aria2_v2.3.2.simg \
+ --fasta_paths=/scratch/$USER/job_$SLURM_JOBID/input.fasta \
+ --uniref90_database_path=/data/uniref90/uniref90.fasta \
+ --data_dir=/data \
+ --mgnify_database_path=/data/mgnify/mgy_clusters_2022_05.fa \
+
--bfd_database_path=/scratch/$USER/job_$SLURM_JOBID/bfd/bfd_metaclust_clu_complete_id30_c90_final_seq.sorted_opt
\
+ --uniref30_database_path=/data/uniref30/UniRef30_2021_03 \
+ $pdbseqres \
+ $pdb70 \
+ $uniprot \
+ --template_mmcif_dir=/data/pdb_mmcif/mmcif_files \
+ --obsolete_pdbs_path=/data/pdb_mmcif/obsolete.dat \
+ --output_dir=/scratch/$USER/job_$SLURM_JOBID/alphafold_output \
+ --max_template_date=$MaxDate \
+ --model_preset=$MODEL_PRESET \
+ --use_gpu_relax=true \
+ --models_to_relax=best \
+ $nummulti
+
+unlink $SLURM_SUBMIT_DIR/rundir
+
+### Copy back results
+tar -cvf $SLURM_SUBMIT_DIR/alphafold_output.tar alphafold_output
diff --git a/modules/agent-framework/airavata-agent/application/gaussian16.sh
b/modules/agent-framework/airavata-agent/application/gaussian16.sh
new file mode 100644
index 0000000000..f4089d116a
--- /dev/null
+++ b/modules/agent-framework/airavata-agent/application/gaussian16.sh
@@ -0,0 +1,256 @@
+#!/bin/sh -x
+# $Id: run_g09_chk_recovery.sh,v 1.0 2017/04/22 14:15:00 Sudhakar Exp $
+
+if [ $# -lt 1 -o $# -gt 3 ]; then
+ echo 1>&2 "Usage: $0 gaussian_input_gjf [SEAGrid_UserName] [GPU] "
+ #echo 1>&2 "Usage: $0 subdir gaussian_input_gjf clobber [gcvars]"
+ exit 127
+fi
+
+# subdir depends on whether we're doing freq, water or PES. For freq and water,
+# it should be hardcoded in the Xbaya workflow. For PES, it should be an
+# additional array generated by the frontend. The contents of this array are
+# trivial, but creating an extra Xbaya service to generate it would add
+# unnecessary extra complexity. Besides, the frontend cannot avoid having to
+# pass at least one array: the array with gjf files.
+#subdir=$1
+subdir="$PWD"
+#export GAUSS_SCRDIR=/oasis/scratch/comet/$USER/temp_project/$SLURM_JOBID
+#export GAUSS_SCRDIR=/expanse/lustre/scratch/$USER/temp_project/$SLURM_JOBID
+scratch_subid=$(id -u $user | tail -c2)
+scrdir="/storage/scratch1/$scratch_subid/$USER"
+export GAUSS_SCRDIR=$scrdir/$SLURM_JOBID
+mkdir -p $GAUSS_SCRDIR
+gaussian_input_full=$1
+if [ $AIRAVATA_USERNAME ]; then
+ echo " The Airavata Gateway User is $AIRAVATA_USERNAME"
+ SG_UserName="$AIRAVATA_USERNAME"
+#elif [ $2 ]; then
+# SG_UserName=$2
+else
+ echo " The Airavata Gateway User is missing "
+exit
+fi
+
+ #export
PATH="/storage/home/hcoda1/7/spamidig6/ideas_storage/apps/g16:$PATH"
+ #export
GAUSS_EXEDIR="/storage/home/hcoda1/7/spamidig6/ideas_storage/apps/g16"
+ #. ~/.bash_profile
+#if [ "$2" = "GPU" ]; then
+ echo "Using GPU version of Gaussian 16"
+ #module reset; module load gpu/0.15.4 gaussian/16.C.01-cuda
+ #module load gaussian/16.C.02
+ #export
PATH="/storage/home/hcoda1/7/spamidig6/ideas_storage/apps/g16:$PATH"
+ #export
GAUSS_EXEDIR="/storage/home/hcoda1/7/spamidig6/ideas_storage/apps/g16"
+ #. ~/.bash_profile
+#fi
+
+if [ $AIRAVATA_ExptDataDir ]; then
+ echo "The Airavata Storage Directory for this job is $AIRAVATA_ExptDataDir"
+ echo "Preparing Cron to push log data to storage periodically"
+ # Get Slurm total time
+# flds=`squeue -j $SLURM_JOBID -l | awk 'END { print $7}' | awk -F:
'{print NF}'`
+# flds=`squeue -j $SLURM_JOBID -l | awk 'END { print $7}' | awk -F-
'{print NF}'` + $flds
+# # if flds 3 $1 is hrs $2 is min and $3 is sec
+# if [ $flds = 4 ]; then
+# jdys=`squeue -j $SLURM_JOBID -l | awk 'END { print $7}' |awk -F-
'{print $1}'`
+# hrmnse=`squeue -j $SLURM_JOBID -l | awk 'END { print $7}' |awk -F-
'{print $2}'`
+# jhrs=`echo $hrmnse | awk -F: '{print $1}'`+24*$jdys
+# jmin=`echo $hrmnse | awk -F: '{print $2}'`+60*$jhrs
+# elif [ $flds = 3 ]; then
+# jhrs=`squeue -j $SLURM_JOBID -l | awk 'END { print $7}' |awk -F:
'{print $1}'`
+# jmin=`squeue -j $SLURM_JOBID -l | awk 'END { print $7}' |awk -F:
'{print $2}'`+60*$jhrs
+# elif [ $flds = 2 ]; then
+# jmin=`squeue -j $SLURM_JOBID -l | awk 'END { print $7}' |awk -F:
'{print $1}'`
+# fi
+# if [ $jhrs .gt. 5 ]; then
+# upd=30
+# else
+# upd=$jmin/10
+# fi
+ # For 5hrs and above uperiod is 30 min and for less than 5 hrs it is 10%
of total time
+ # compute uperiod
+ # preapre or use prepared updateatorage script
+ # i(crontab -l 2>/dev/null; echo "*/$uperiod * * * * /path/to/job -with
args") | crontab - */30 * * * * /path/to/command
+ # Use crontab -r to remove it after the job is finished or ended ( under
trap)
+# mycron=jcron$$
+# #write out current crontab
+# crontab -l > $mycron
+# #echo new cron into cron file
+# #echo "*/$upd * * * * /home/gridchem/bin/joblogscp.sh >/dev/null 2>&1"
>> $mycron
+# echo "*/$upd * * * * scp *.log
[email protected]:$AIRAVATA_ExptDataDir >/dev/null 2>&1" >> $mycron
+# #echo "* * * * * sleep 20; /Users/spamidig1/bin/testscp.sh >/dev/null
2>&1" >> $mycron
+##install new cron file
+# crontab $mycron
+# crontab -l
+# # delete this entry at the end of the job or trap...
+fi
+inputs=`echo $AIRAVATA_INPUTS | sed 's/[//'`
+inputs=`echo $inputs | sed 's/]//'`
+echo "Airavata Inputs: $inputs"
+cd $subdir
+dos2unix -k $gaussian_input_full
+gaussian_input=${gaussian_input_full##*/}
+gaussian_output=${gaussian_input%.*}.log
+clobber="$3" # set to "" for rerun or debug; otherwise set to 1
+gcvars=$4
+null=""
+
+# Next line will ususally return "cp: `GCVARS' and `GCVARS' are the same file"
+#if [ "$gcvars" ] ; then cp -p $gcvars GCVARS 2>/dev/null ; fi
+
+#if [ ! "$LOCAL_LOCATION" ] ; then
+# if [ -s ~/.paramchemlocation ] ; then
+# read LOCAL_LOCATION < ~/.paramchemlocation
+# . $LOCAL_LOCATION/environment
+# fi
+# fi
+#if [ ! -d "$LOCAL_LOCATION" ] ; then
+# echo "Warning: no valid LOCAL_LOCATION found" >&2
+#gauss_mem=56GB
+#gauss_nproc=24
+## Escaping spaces rather than quoting because quoting prevents tilde expansion
+##charmm_location=~gridchem/workflow_script/sys_exec/local/c36a6-132-serial\
-chsize\ 25140
+#SCRIPT_LOCATION=~gridchem/workflow_script/sys_exec/tools
+#LOCAL_LOCATION=~gridchem/workflow_script/sys_exec/local-comet
+#export CONPATH=$SCRIPT_LOCATION
+localarc="$HOME/scratch"
+##globalarc="[email protected]:/home/ccguser/mss/internal/$SGUserName"
+## exit 1
+# fi
+#. $LOCAL_LOCATION/environment
+
+#read GC_UserName GC_ProjectName GC_WorkflowName TIMESTAMP < GCVARS
+usrchkdir=$localarc/${SG_UserName}/
+echo " The Airavata Gateway User Directory is $usrchkdir"
+copysubdir="./"
+mkdir -p $usrchkdir
+#copysubdir=$localarc/${GC_UserName}/${GC_ProjectName}/${GC_WorkflowName}/$subdir
+# The way a "false" boolean variable is passed seems to be unstable; it's
+# been "", "0" and "false", so we try to cover all reasonable possibilities.
+if [ ! "$clobber" ] ; then
+ clobber=0
+ fi
+if [ "$clobber" = "0" -o "$clobber" = "false" -o "$clobber" = "no" ] ; then
+ if [ -s $copysubdir/$gaussian_output ] ; then
+ echo gaussian_output_log=$copysubdir/$gaussian_output
+ exit 0
+ fi
+ gaussian_output_full=${gaussian_input_full%.*}.log
+ if [ -s $gaussian_output_full ] ; then
+ #mkdir -p $copysubdir
+ rm -f $copysubdir/$gaussian_output # clean up symlink if something went
wrong earlier
+ if [ $gaussian_input_full -nt $copysubdir/$gaussian_input ] ; then sed
's/\r$//' $gaussian_input_full > $copysubdir/$gaussian_input ; fi
+ cp -up $gaussian_output_full $copysubdir
+ echo gaussian_output_log=$gaussian_output_full
+ exit 0
+ fi
+ fi
+
+#Process inputfile for run files and other job requirements
+ # PROCESS CHECKPOINT FILE
+ # Check to see if the checkpoint file is given a name in input deck
+ # Input file to look into
+ dummy="$gaussian_input_full"
+ #dummy="$Diskinputdir/$Diskinputfile"
+ checkpt="no"
+ junk=`/usr/bin/head -5 $dummy | /bin/grep -i "%chk"`
+ if [ "$junk" != "" ]; then
+ junk=`echo $junk | /bin/sed 's/=/@/'`
+ junk=`echo $junk | /bin/sed 's/ //'`
+ #
+ # Be careful: Don't lose the subdirectory information for CHKPT file
+ # Also, add .chk if there is no extension to the Checkpoint file
+ #
+ Chkfile=`expr $junk : '.*@\(.*\)'`
+ Chkdir="$Diskinputdir"
+ Chkfile=`/bin/basename $Chkfile`
+ dummy=`expr $Chkfile : '.*\(\..*\)'`
+ Chkfile=`/bin/basename $Chkfile $dummy`
+ ChkfileWNE="$Chkfile"
+ Chkfile="$Chkfile${dummy:-.chk}"
+//"`Chkfile=`echo $Chkfile | sed "s/
+ # 2005/12/08 create name for $formated_chkfile
+ formated_chkfile="$ChkfileWNE.fchk"
+ Chkfile_with_arch="${Chkfile}_$architecture"
+ echo "DEBUG: checkfile = $Chkfile and formated_chkfile = $formated_chkfile
";
+ checkpt="yes"
+#Retrieve the checkpoint file from the user archive directory
+ if [ -f "$usrchkdir/$Chkfile" ]; then
+ cp $usrchkdir/$Chkfile .
+ fi
+//"` export PJobID=`grep -i localjobid $gaussian_input_full | awk -F=
'{print $2}' | sed "s/
+# /bin/cat >> $qsubin << HERE2
+ #export PJobID=`grep -i localjobid $Diskinputfile | awk -F= '{print $2}' |
sed "s/^M//"`
+ #cd /oasis/scratch/comet/gridchem/temp_project/$PJobID
+ #cd \$SCRATCH_BATCH
+ ##if [ ${PJobID:-null} != "$null" ]; then
+ if [ "${PJobID}" != "" ]; then
+ #cp -r /work/ccguser/batch_scratch/$PJobID*/* .
+ #cp -r /oasis/scratch/comet/gridchem/temp_project/$PJobID*/* .
+ cp -r $HOME/scratch/$PJobID*/* .
+ ls -l
+ fi
+ else
+ echo "******** NO CHECKPOINT FILE IDENTIFIED ******** "
+ fi
+
+mkdir -p $copysubdir
+mkdir -p $subdir
+cd $subdir
+cwd=`pwd`
+if [ $gaussian_input_full -nt $copysubdir/$gaussian_input ] ; then sed
's/\r$//' $gaussian_input_full > $copysubdir/$gaussian_input ; fi
+cd $copysubdir
+rm -f $gaussian_output
+if [ "$cwd" != "$subdir" ]; then
+ ln -s $cwd/$gaussian_output $subdir/$gaussian_output
+fi
+cd $cwd
+if [ $gaussian_input_full -nt $gaussian_input ] ; then sed 's/\r$//'
$gaussian_input_full > $gaussian_input ; fi
+signals_to_trap="XCPU INT TERM CHLD"
+#trap "| grep -v $AIRAVATA_ExptDataDir | crontab -; rm -rf $mycron; cp -p
$gaussian_output $copysubdir; cp -p $Chkfile $copysubdir; exit 99"
$signals_to_trap
+#trap "crontab -l | grep -v $AIRAVATA_ExptDataDir | crontab -; rm -rf $mycron;
cp -p $gaussian_output $copysubdir; cp -p $Chkfile $copysubdir; exit 99"
$signals_to_trap
+cd $HOME/scratch
+ln -s $subdir $SLURM_JOBID
+ls -l $SLURM_JOBID/
+cd $cwd
+#$LOCAL_LOCATION/run_gaussian_local.sh $gaussian_input $gaussian_output
+which g16
+g16 $gaussian_input $gaussian_output
+#BEGIN{while(getline < infile) if ($0 ~ "^ *--[lL][iI][nN][kK]1--") nlink++}
+
+#if awk -v infile=$gaussian_input '
+# BEGIN{while(getline < infile) if ($0 ~ "^ *[lL][iI][nN][kK]1") nlink++}
+# /^ *Normal termination of Gaussian/{nnormal++}
+# END{if (nnormal == nlink+1) exit 1}' $gaussian_output ; then
+# echo "Gaussian terminated abnormally." >&2
+# exit 1
+#fi
+# Remove the cron entry to periodically stage the data to storage
+#crontab -l | grep -v "$AIRAVATA_ExptDataDir"" | crontab -
+#crontab -l
+#rm $mycron
+
+#rm $copysubdir/$gaussian_output
+cp -p $gaussian_output $copysubdir
+ if [ -f "$Chkfile" ]; then
+ cp -p $Chkfile $copysubdir
+ fi
+ if [ -f "$GAUSS_SCRDIR/$Chkfile" ]; then
+ cp -p $GAUSS_SCRDIR/$Chkfile .
+ fi
+ # Save checkpoint file to usrchkdir
+ #mkdir -p $usrchkdir
+ if [ -f "$Chkfile" ]; then
+ formchk $Chkfile
+ cp -f $Chkfile $usrchkdir
+ cp -f *.fchk $usrchkdir
+ cp -f *.fchk $copysubdir
+ fi
+#remove rwf files
+rm *.rwf*
+cd $HOME/scratch
+#ln -s $subdir $PBS_JOBID
+ls -l $SLURM_JOBID
+rm $SLURM_JOBID/*.rwf*
+echo gaussian_output_log=$cwd/$gaussian_output
+cat: S: No such file or directory
diff --git a/modules/agent-framework/airavata-agent/application/gromacs.sh
b/modules/agent-framework/airavata-agent/application/gromacs.sh
new file mode 100644
index 0000000000..dacb52f24e
--- /dev/null
+++ b/modules/agent-framework/airavata-agent/application/gromacs.sh
@@ -0,0 +1,165 @@
+#!/bin/sh -x
+# $Id: run_Gromacs_data_recovery.sh,v 1.0 2017/11/23 12:15:00 Sudhakar Exp $
+
+if [ $# -lt 1 -o $# -gt 11 ]; then
+ echo 1>&2 "Usage: $0 -c coord_file -s tpr_file -g log_file -e ener_file
[SEAGrid_UserName] [Gromacs_restart_input] "
+ #echo 1>&2 "Usage: $0 subdir Gromacs_restart_input"
+ exit 127
+fi
+
+# subdir depends on whether we're doing freq, water or PES. For freq and water,
+# it should be hardcoded in the Xbaya workflow. For PES, it should be an
+# additional array generated by the frontend. The contents of this array are
+# trivial, but creating an extra Xbaya service to generate it would add
+# unnecessary extra complexity. Besides, the frontend cannot avoid having to
+# pass at least one array: the array with gjf files.
+#subdir=$1
+subdir="$PWD"
+#Gromacs_res_input_full=$1
+Coord_file=$2
+Tpr_file=$4
+Log_file=$6
+Ener_file=$8
+Rest_file=$10
+if [ $AIRAVATA_USERNAME ]; then
+ echo " The Airavata Gateway User is $AIRAVATA_USERNAME"
+ SG_UserName="$AIRAVATA_USERNAME"
+elif [ $9 ]; then
+ SG_UserName=$9
+else
+ echo " The Airavata Gateway User is missing "
+exit
+fi
+dos2unix -k $Gromacs_res_input_full
+Gromacs_input=${Gromacs_res_input_full##*/}
+Gromacs_output=${Gromacs_input%.*}.log
+#clobber="$3" # set to "" for rerun or debug; otherwise set to 1
+clobber="1" # set to "" for rerun or debug; otherwise set to 1
+#gcvars=$4
+gcvars="GCVARS"
+null=""
+localarc="$HOME/scratch"
+# Next line will ususally return "cp: `GCVARS' and `GCVARS' are the same file"
+if [ "$gcvars" ] ; then cp -p $gcvars GCVARS 2>/dev/null ; fi
+##
+#if [ ! "$LOCAL_LOCATION" ] ; then
+# if [ -s ~/.paramchemlocation ] ; then
+# read LOCAL_LOCATION < ~/.paramchemlocation
+# fi
+# fi
+#if [ ! -d "$LOCAL_LOCATION" ] ; then
+# echo "Error: no valid LOCAL_LOCATION found" >&2
+# exit 1
+# fi
+#. $LOCAL_LOCATION/environment
+
+#read GC_UserName GC_ProjectName GC_WorkflowName TIMESTAMP < GCVARS
+usrchkdir=$localarc/${SG_UserName}/
+echo " The Airavata Gateway User Directory is $usrchkdir"
+copysubdir="./"
+#copysubdir=$localarc/${GC_UserName}/${GC_ProjectName}/${GC_WorkflowName}/$subdir
+# The way a "false" boolean variable is passed seems to be unstable; it's
+# been "", "0" and "false", so we try to cover all reasonable possibilities.
+if [ ! "$clobber" ] ; then
+ clobber=0
+ fi
+if [ "$clobber" = "0" -o "$clobber" = "false" -o "$clobber" = "no" ] ; then
+ if [ -s $copysubdir/$Gromacs_output ] ; then
+ echo Gromacs_output_log=$copysubdir/$Gromacs_output
+ exit 0
+ fi
+ Gromacs_output_full=${Gromacs_input_full%.*}.log
+ if [ -s $Gromacs_output_full ] ; then
+ mkdir -p $copysubdir
+ rm -f $copysubdir/$Gromacs_output # clean up symlink if something went
wrong earlier
+ if [ $Gromacs_input_full -nt $copysubdir/$Gromacs_input ] ; then sed
's/\r$//' $Gromacs_input_full > $copysubdir/$Gromacs_input ; fi
+ cp -up $Gromacs_output_full $copysubdir
+ echo Gromacs_output_log=$Gromacs_output_full
+ exit 0
+ fi
+ fi
+
+#Process inputfile for run files and other job requirements
+//"`export PJobID=`grep -i localjobid restart.txt| awk -F= '{print $2}' | sed
"s/
+ if [ ${PJobID:-null} != "$null" ]; then
+ cp -r /home/scigap/scratch/${SG_UserName}/$PJobID*/* .
+ fi
+# # PROCESS CHECKPOINT FILE
+# # Check to see if the checkpoint file is given a name in input deck
+# # Input file to look into
+# dummy="$Gromacs_input_full"
+# #dummy="$Diskinputdir/$Diskinputfile"
+# checkpt="no"
+# junk=`/usr/bin/head -5 $dummy | /bin/grep -i "%chk"`
+# if [ "$junk" != "" ]; then
+# junk=`echo $junk | /bin/sed 's/=/@/'`
+# junk=`echo $junk | /bin/sed 's/ //'`
+# #
+# # Be careful: Don't lose the subdirectory information for CHKPT file
+# # Also, add .chk if there is no extension to the Checkpoint file
+# #
+# Chkfile=`expr $junk : '.*@\(.*\)'`
+# Chkdir="$Diskinputdir"
+# Chkfile=`/bin/basename $Chkfile`
+# dummy=`expr $Chkfile : '.*\(\..*\)'`
+# Chkfile=`/bin/basename $Chkfile $dummy`
+# ChkfileWNE="$Chkfile"
+# Chkfile="$Chkfile${dummy:-.chk}"
+//"` Chkfile=`echo $Chkfile | sed "s/
+# # 2005/12/08 create name for $formated_chkfile
+# formated_chkfile="$ChkfileWNE.fchk"
+# Chkfile_with_arch="${Chkfile}_$architecture"
+# echo "DEBUG: checkfile = $Chkfile and formated_chkfile =
$formated_chkfile ";
+# checkpt="yes"
+##Retrieve the checkpoint file from the user archive directory
+# if [ -f "$usrchkdir/$Chkfile" ]; then
+# cp $usrchkdir/$Chkfile .
+# fi
+# else
+# echo "******** NO CHECKPOINT FILE IDENTIFIED ******** "
+# fi
+
+mkdir -p $copysubdir
+mkdir -p $subdir
+cd $subdir
+cwd=`pwd`
+if [ $Gromacs_input_full -nt $copysubdir/$Gromacs_input ] ; then sed 's/\r$//'
$Gromacs_input_full > $copysubdir/$Gromacs_input ; fi
+cd $copysubdir
+rm -f $Gromacs_output
+if [ "$cwd" != "$subdir" ]; then
+ ln -s $cwd/$Gromacs_output $subdir/$Gromacs_output
+fi
+cd $cwd
+if [ $Gromacs_input_full -nt $Gromacs_input ] ; then sed 's/\r$//'
$Gromacs_input_full > $Gromacs_input ; fi
+module unload intel; module load gromacs
+if [ ${PJobID:-null} != "$null" ]; then
+ cp -r /home/scigap/scratch/${SG_UserName}/$PJobID*/* .
+ #mpiexec -genv I_MPI_FABRICS shm:ofa gmx_mpi mdrun -s $Tpr_file -cpi
state.cpt
+ mpirun -np $SLURM_NTASKS -genv I_MPI_FABRICS shm:ofa gmx_mpi mdrun -v
-deffnm em -s $Tpr_file -cpi state.cpt
+else
+ mpirun -np $SLURM_NTASKS -genv I_MPI_FABRICS shm:ofa gmx_mpi mdrun -v
-deffnm em -s $Tpr_file -c $Coord_file -g $Log_file -e $Ener_file
+fi
+
+mpiexec gmx_mpi mdrun -s $Tpr_file -cpi state.cpt
+##$LOCAL_LOCATION/run_Gromacs_local.sh $Gromacs_input $Gromacs_output
+##$LOCAL_LOCATION/run_Gromacs_local.sh $Gromacs_input $Gromacs_output
+#BEGIN{while(getline < infile) if ($0 ~ "^ *--[lL][iI][nN][kK]1--") nlink++}
+
+#if awk -v infile=$Gromacs_input '
+# BEGIN{while(getline < infile) if ($0 ~ "^ *[lL][iI][nN][kK]1") nlink++}
+# /^ *Normal termination of Gaussian/{nnormal++}
+# END{if (nnormal == nlink+1) exit 1}' $Gromacs_output ; then
+# echo "Gaussian terminated abnormally." >&2
+# exit 1
+#fi
+#rm $copysubdir/$Gromacs_output
+cp -p $Gromacs_output $copysubdir
+ # Save checkpoint file to usrchkdir
+# mkdir -p $usrchkdir
+# if [ -f "$Chkfile" ]; then
+# cp -f $Chkfile $usrchkdir
+# fi
+# # Create a link directory for this job with jobID in user scratch
+ ln -s $PWD ~/scratch/${SG_USERNAME}/$PBS_JOBID
+echo Gromacs_output_log=$cwd/$Gromacs_output
+#rm *.wfc*
diff --git a/modules/agent-framework/airavata-agent/application/namd.sh
b/modules/agent-framework/airavata-agent/application/namd.sh
new file mode 100644
index 0000000000..fd1297b23b
--- /dev/null
+++ b/modules/agent-framework/airavata-agent/application/namd.sh
@@ -0,0 +1,132 @@
+#!/bin/bash -x
+set -euo pipefail
+
+# ----------------------------------------------------------------------
+# SETUP
+# ----------------------------------------------------------------------
+export PATH=$PWD:$PATH
+export WORKDIR=$PWD
+export CS_HOME=$HOME/cybershuttle
+export MAMBA_ROOT_PREFIX=$CS_HOME/scratch
+export TMPDIR=$CS_HOME/scratch/tmp
+
+
+# ----------------------------------------------------------------------
+# PARSE COMMAND LINE ARGUMENTS
+# ----------------------------------------------------------------------
+
+required_vars=("NAMD_CPU_PATH" "NAMD_GPU_PATH" "NAMD_CPU_MODULES"
"NAMD_GPU_MODULES")
+for var in "${required_vars[@]}"; do
+ if [ -z "${!var}" ]; then
+ echo "$var is not set"
+ exit 2
+ fi
+done
+
+while getopts t:n:i:a:s: option; do
+ case $option in
+ t)
+ if [[ "$OPTARG" != "CPU" && "$OPTARG" != "GPU" ]]; then
+ echo "invalid argument -t $OPTARG: must be CPU|GPU."
+ exit 2
+ fi
+ EXECUTION_TYPE=$OPTARG
+ echo "EXECUTION_TYPE=$EXECUTION_TYPE"
+ module reset
+ if [ $EXECUTION_TYPE = "CPU" ]; then
+ # one replica at a time
+ echo 0 > $FIFO
+ NAMD_PATH=$NAMD_CPU_PATH
+ module load $NAMD_CPU_MODULES
+ elif [ $EXECUTION_TYPE = "GPU" ]; then
+ # one replica per GPU
+ for ((i=0; i<SLURM_GPUS_ON_NODE; i++)); do echo "$i" > $FIFO; done
+ NAMD_PATH=$NAMD_GPU_PATH
+ NAMD_EXTRA_ARGS+=("--CUDASOAintegrate" "on")
+ module load $NAMD_GPU_MODULES
+ fi
+ module list
+ ;;
+ n)
+ NUM_REPLICAS=$OPTARG
+ echo "NUM_REPLICAS=$NUM_REPLICAS"
+ ;;
+ i)
+ NAMD_INPUT_FILES=$(find $WORKDIR -maxdepth 1 -type f ! -name "*slurm*")
+ NAMD_CONF_FILE=$OPTARG
+ echo "NAMD_INPUT_FILES=$NAMD_INPUT_FILES"
+ echo "NAMD_CONF_FILE=$NAMD_CONF_FILE"
+ ;;
+ a)
+ AGENT_ID=$OPTARG
+ echo "AGENT_ID=$AGENT_ID"
+ ;;
+ s)
+ AGENT_SERVER=$OPTARG
+ echo "AGENT_SERVER=$AGENT_SERVER"
+ ;;
+ \?)
+ echo 1>&2 "Usage: $0"
+ echo 1>&2 " -t [CPU|GPU]"
+ echo 1>&2 " -n [NUM_REPLICAS]"
+ echo 1>&2 " -i [NAMD_CONF_FILE]"
+ echo 1>&2 " -a [AGENT_ID]"
+ echo 1>&2 " -s [AGENT_SERVER]"
+ exit 2
+ ;;
+ esac
+done
+shift $((OPTIND - 1))
+
+# ----------------------------------------------------------------------
+# RUN AGENT
+# ----------------------------------------------------------------------
+
+# initialize scratch/tmp and scratch/envs (node-local)
+CS_TEMP=$(readlink $CS_HOME/scratch/tmp)
+CS_ENVS=$(readlink $CS_HOME/scratch/envs)
+[ -n "$CS_TEMP" ] && mkdir -p $CS_TEMP
+[ -n "$CS_ENVS" ] && mkdir -p $CS_ENVS
+
+NAMD_EXTRA_ARGS=()
+FIFO=$(mktemp -u)
+mkfifo $FIFO
+
+wget -q
https://github.com/cyber-shuttle/binaries/releases/download/1.0.1/airavata-agent-linux-amd64
-O $WORKDIR/airavata-agent
+wget -q
https://github.com/cyber-shuttle/binaries/releases/download/1.0.1/kernel.py -O
$WORKDIR/kernel.py
+wget -q
https://github.com/mamba-org/micromamba-releases/releases/download/2.3.0-1/micromamba-linux-64
-O $WORKDIR/micromamba
+chmod +x $WORKDIR/airavata-agent $WORKDIR/micromamba
+$WORKDIR/airavata-agent --server "$AGENT_SERVER:19900" --agent "$AGENT_ID"
--environ base --lib "" --pip "" &
+AGENT_PID=$!
+trap 'kill -TERM $AGENT_PID' EXIT
+echo "Agent started with PID $AGENT_PID"
+
+
+# ----------------------------------------------------------------------
+# RUN NAMD3
+# ----------------------------------------------------------------------
+PIDS=()
+for REPLICA_ID in $(seq 1 $NUM_REPLICAS); do
+ (
+ read TOKEN <$FIFO
+
+ REPLICA_DIR=$WORKDIR/$REPLICA_ID
+ mkdir $REPLICA_DIR
+ cp $NAMD_INPUT_FILES $REPLICA_DIR/
+
+ [[ $EXECUTION_TYPE == "GPU" ]] && export CUDA_VISIBLE_DEVICES=$TOKEN
+ $NAMD_PATH/namd3 +setcpuaffinity +p $SLURM_CPUS_ON_NODE --cwd $REPLICA_DIR
"${NAMD_EXTRA_ARGS[@]}" \
+ $REPLICA_DIR/$NAMD_CONF_FILE >$REPLICA_DIR/$NAMD_CONF_FILE.out
2>$REPLICA_DIR/$NAMD_CONF_FILE.err
+ [[ $EXECUTION_TYPE == "GPU" ]] && unset CUDA_VISIBLE_DEVICES
+
+ echo $TOKEN > $FIFO
+
+ for FILE in $(ls $REPLICA_DIR/*.*); do
+ mv $FILE $REPLICA_ID"_"$(basename $FILE)
+ done
+ rm -rf $REPLICA_DIR/
+
+ ) &
+ PIDS+=($!)
+done
+wait "${PIDS[@]}"
diff --git a/modules/agent-framework/airavata-agent/application/pmemd_cuda.sh
b/modules/agent-framework/airavata-agent/application/pmemd_cuda.sh
new file mode 100644
index 0000000000..59378da8bb
--- /dev/null
+++ b/modules/agent-framework/airavata-agent/application/pmemd_cuda.sh
@@ -0,0 +1,4 @@
+#!/bin/bash -x
+set -euo pipefail
+
+srun pmemd.cuda "$@"
diff --git a/modules/agent-framework/airavata-agent/application/pmemd_mpi.sh
b/modules/agent-framework/airavata-agent/application/pmemd_mpi.sh
new file mode 100644
index 0000000000..b00bd75162
--- /dev/null
+++ b/modules/agent-framework/airavata-agent/application/pmemd_mpi.sh
@@ -0,0 +1,4 @@
+#!/bin/bash -x
+set -euo pipefail
+
+srun pmemd.MPI "$@"
diff --git a/modules/agent-framework/airavata-agent/application/psi4.sh
b/modules/agent-framework/airavata-agent/application/psi4.sh
new file mode 100644
index 0000000000..404c713d33
--- /dev/null
+++ b/modules/agent-framework/airavata-agent/application/psi4.sh
@@ -0,0 +1,4 @@
+#!/bin/bash -x
+set -euo pipefail
+
+psi4 "$@"