diff --git a/create_lanes.sh b/create_lanes.sh index 89f2d68..fce1e5b 100755 --- a/create_lanes.sh +++ b/create_lanes.sh @@ -1,215 +1,210 @@ #!/bin/bash set -euo pipefail # Script made from the page # https://docs.ycrc.yale.edu/clusters-at-yale/guides/cryosparc/ script_path=$(dirname "$0") # relative script_path=$(cd "${script_path}" && pwd) # absolutized and normalized install_path="$HOME"/cryosparc -# set up some more paths -db_path=${install_path}/database -worker_path=${install_path}/cryosparc2_worker - # Usage usage () { echo "Usage:" echo " -p install path : prefix for installation [${install_path}] " echo " -v : be verbose" echo " -h : print this notice" echo "" } VERBOSE=false # Parse options while getopts ":p:vh" opt; do case $opt in p) install_path="${OPTARG}" ;; v) VERBOSE=true ;; h) usage OPTIND=1 exit 0 ;; \?) echo "Invalid option: -$OPTARG" >&2 usage OPTIND=1 exit 1 ;; :) echo "Option -$OPTARG requires an argument." >&2 usage OPTIND=1 exit 1 ;; esac done # Reset OPTIND to allow the next invocation to work OPTIND=1 message() { if $VERBOSE then echo "${1}" fi } echo "[Info] Starting the master if needed" -if [ $(${install_path}/cryosparc_master/bin/cryosparcm.sh status | grep -c "CryoSPARC is not running") -eq 1 ]; then - ${install_path}/cryosparc_master/bin/cryosparcm.sh start +if [ $(${script_path}/cryosparcm.sh status | grep -c "CryoSPARC is not running") -eq 1 ]; then + ${script_path}/cryosparcm.sh start fi user_name=$(whoami) user_accounts=$(sacctmgr show assoc where user=${user_name} format=Account%100 -P | grep -v Account) mkdir -p ${install_path}/site_configs && cd ${install_path}/site_configs for _account in $user_accounts; do if [ $(sacctmgr show assoc where account=${_account} format=ParentName%100 -P | grep -v "Par Name" | grep -c courses) -eq 0 ]; then _worker_name=${HOSTNAME}-${_account} for time in 1 6 12 24 48 72; do mkdir -p ${install_path}/site_configs/${_worker_name}_${time}h && cd ${install_path}/site_configs/${_worker_name}_${time}h cat << EOF > cluster_info.json { "name": "${_worker_name}_${time}h", "worker_bin_path": "${install_path}/cryosparc_worker/bin/cryosparcw", "cache_path": "/tmp/{{ cryosparc_username }}/cryosparc_cache", "cache_reserve_mb": 10000, "cache_quota_mb": 1000000, "send_cmd_tpl": "{{ command }}", "qsub_cmd_tpl": "sbatch {{ script_path_abs }}", "qstat_cmd_tpl": "squeue -j {{ cluster_job_id }}", "qdel_cmd_tpl": "scancel {{ cluster_job_id }}", "qinfo_cmd_tpl": "sinfo" } EOF cat << EOF > cluster_script.sh #!/bin/bash -l #SBATCH --job-name cryosparc_{{ project_uid }}_{{ job_uid }} #SBATCH --nodes=1 #SBATCH --ntasks-per-node=1 #SBATCH --cpus-per-task={{ num_cpu }} #SBATCH --qos=gpu #SBATCH --gres=gpu:{{ num_gpu }} #SBATCH --mem 90GB #SBATCH --time ${time}:00:00 #SBATCH -o {{ job_dir_abs }}/slurm.out #SBATCH -e {{ job_dir_abs }}/slurm.err #SBATCH -A ${_account} module load gcc cuda python mkdir -p /tmp/${USER} ln -sf \${TMPDIR} /tmp/${USER}/cryosparc_cache {{ run_cmd }} EOF - ${install_path}/cryosparc_master/bin/cryosparcm cluster connect + ${script_path}/cryosparcm.sh cluster connect done fi done for _account in $user_accounts; do if [ $(sacctmgr show assoc where account=${_account} format=ParentName%100 -P | grep -v "Par Name" | grep -c courses) -eq 0 ]; then _worker_name=${HOSTNAME}-${_account}-CPUOnly-Max20 for time in 1 6 12 24 48 72; do mkdir -p ${install_path}/site_configs/${_worker_name}_${time}h && cd ${install_path}/site_configs/${_worker_name}_${time}h cat << EOF > cluster_info.json { "name": "${_worker_name}_${time}h", "worker_bin_path": "${install_path}/cryosparc_worker/bin/cryosparcw", "cache_path": "/tmp/{{ cryosparc_username }}/cryosparc_cache", "cache_reserve_mb": 10000, "cache_quota_mb": 1000000, "send_cmd_tpl": "{{ command }}", "qsub_cmd_tpl": "sbatch {{ script_path_abs }}", "qstat_cmd_tpl": "squeue -j {{ cluster_job_id }}", "qdel_cmd_tpl": "scancel {{ cluster_job_id }}", "qinfo_cmd_tpl": "sinfo" } EOF cat << EOF > cluster_script.sh #!/bin/bash -l #SBATCH --job-name cryosparc_{{ project_uid }}_{{ job_uid }} #SBATCH --nodes=1 #SBATCH --ntasks-per-node=1 #SBATCH --cpus-per-task={{ num_cpu }} #SBATCH --qos=gpu #SBATCH --gres=gpu:1 #SBATCH --mem 90GB #SBATCH --time ${time}:00:00 #SBATCH -o {{ job_dir_abs }}/slurm.out #SBATCH -e {{ job_dir_abs }}/slurm.err #SBATCH -A ${_account} module load gcc cuda python mkdir -p /tmp/${USER} ln -sf \${TMPDIR} /tmp/${USER}/cryosparc_cache {{ run_cmd }} EOF - ${install_path}/cryosparc_master/bin/cryosparcm cluster connect + ${script_path}/cryosparcm.sh cluster connect done fi done for _account in $user_accounts; do if [ $(sacctmgr show assoc where account=${_account} format=ParentName%100 -P | grep -v "Par Name" | grep -c courses) -eq 0 ]; then _worker_name=${HOSTNAME}-${_account}-SSDExclusive for time in 1 6 12 24 48 72; do mkdir -p ${install_path}/site_configs/${_worker_name}_${time}h && cd ${install_path}/site_configs/${_worker_name}_${time}h cat << EOF > cluster_info.json { "name": "${_worker_name}_${time}h", "worker_bin_path": "${install_path}/cryosparc_worker/bin/cryosparcw", "cache_path": "/tmp/{{ cryosparc_username }}/cryosparc_cache", "cache_reserve_mb": 10000, "cache_quota_mb": 1000000, "send_cmd_tpl": "{{ command }}", "qsub_cmd_tpl": "sbatch {{ script_path_abs }}", "qstat_cmd_tpl": "squeue -j {{ cluster_job_id }}", "qdel_cmd_tpl": "scancel {{ cluster_job_id }}", "qinfo_cmd_tpl": "sinfo" } EOF cat << EOF > cluster_script.sh #!/bin/bash -l #SBATCH --job-name cryosparc_{{ project_uid }}_{{ job_uid }} #SBATCH --nodes=1 #SBATCH --ntasks-per-node=1 #SBATCH --cpus-per-task={{ num_cpu }} #SBATCH --qos=gpu #SBATCH --gres=gpu:{{ num_gpu }} #SBATCH --mem 90GB #SBATCH --time ${time}:00:00 #SBATCH --exclusive #SBATCH -o {{ job_dir_abs }}/slurm.out #SBATCH -e {{ job_dir_abs }}/slurm.err #SBATCH -A ${_account} module load gcc cuda/ python mkdir -p /tmp/${USER} ln -sf \${TMPDIR} /tmp/${USER}/cryosparc_cache {{ run_cmd }} EOF - ${install_path}/cryosparc_master/bin/cryosparcm cluster connect + ${script_path}/cryosparcm.sh cluster connect done fi done -#${install_path}/cryosparc_master/bin/cryosparcm.sh stop diff --git a/create_reserved_lanes.sh b/create_reserved_lanes.sh index 396ee1b..3bd88d2 100755 --- a/create_reserved_lanes.sh +++ b/create_reserved_lanes.sh @@ -1,172 +1,168 @@ #!/bin/bash set -euo pipefail # Script made from the page # https://docs.ycrc.yale.edu/clusters-at-yale/guides/cryosparc/ script_path=$(dirname "$0") # relative script_path=$(cd "${script_path}" && pwd) # absolutized and normalized install_path="$HOME"/cryosparc -# set up some more paths -db_path=${install_path}/database -worker_path=${install_path}/cryosparc2_worker - # Usage usage () { echo "Usage:" echo " -p install path : prefix for installation [${install_path}] " echo " -v : be verbose" echo " -h : print this notice" echo "" } VERBOSE=false read -p "Enter group account at SCITAS: " hpc_account read -p "Enter reservation name: " reservation_name # Parse options while getopts ":p:vh" opt; do case $opt in p) install_path="${OPTARG}" ;; v) VERBOSE=true ;; h) usage OPTIND=1 exit 0 ;; \?) echo "Invalid option: -$OPTARG" >&2 usage OPTIND=1 exit 1 ;; :) echo "Option -$OPTARG requires an argument." >&2 usage OPTIND=1 exit 1 ;; esac done # Reset OPTIND to allow the next invocation to work OPTIND=1 message() { if $VERBOSE then echo "${1}" fi } echo "[Info] Starting the master if needed" -if [ $(${install_path}/cryosparc_master/bin/cryosparcm.sh status | grep -c "CryoSPARC is not running") -eq 1 ]; then - ${install_path}/cryosparc_master/bin/cryosparcm.sh start +if [ $(${script_path}/cryosparcm.sh status | grep -c "CryoSPARC is not running") -eq 1 ]; then + ${script_path}/cryosparcm.sh start fi cd ${install_path}/site_configs for _account in $hpc_account; do if [ $(sacctmgr show assoc where account=${_account} format=ParentName%100 -P | grep -v "Par Name" | grep -c courses) -eq 0 ]; then _worker_name=${HOSTNAME}-${_account}-reserved max_wall=$(sacctmgr show assoc where account=${_account} format=MaxWall -P | grep -v MaxWall | head -1) for mem in 90; do mkdir -p ${install_path}/site_configs/${_worker_name}_${mem}gb && cd ${install_path}/site_configs/${_worker_name}_${mem}gb cat << EOF > cluster_info.json { "name": "${_worker_name}_${mem}gb", "worker_bin_path": "${install_path}/cryosparc_worker/bin/cryosparcw", "cache_path": "/tmp/{{ cryosparc_username }}/cryosparc_cache", "cache_reserve_mb": 10000, "cache_quota_mb": 1000000, "send_cmd_tpl": "{{ command }}", "qsub_cmd_tpl": "sbatch {{ script_path_abs }}", "qstat_cmd_tpl": "squeue -j {{ cluster_job_id }}", "qdel_cmd_tpl": "scancel {{ cluster_job_id }}", "qinfo_cmd_tpl": "sinfo" } EOF cat << EOF > cluster_script.sh #!/bin/bash -l #SBATCH --job-name cryosparc_{{ project_uid }}_{{ job_uid }} #SBATCH --nodes=1 #SBATCH --ntasks-per-node=1 #SBATCH --cpus-per-task={{ num_cpu }} #SBATCH --qos=gpu #SBATCH --gres=gpu:{{ num_gpu }} #SBATCH --mem ${mem}GB #SBATCH --time 3-00:00:00 #SBATCH -o {{ job_dir_abs }}/slurm.out #SBATCH -e {{ job_dir_abs }}/slurm.err #SBATCH -A ${_account} #SBATCH --reservation $reservation_name module load gcc cuda python mkdir -p /tmp/${USER} ln -sf \${TMPDIR} /tmp/${USER}/cryosparc_cache {{ run_cmd }} EOF - ${install_path}/cryosparc_master/bin/cryosparcm cluster connect + ${script_path}/cryosparcm.sh cluster connect done fi done for _account in $hpc_account; do if [ $(sacctmgr show assoc where account=${_account} format=ParentName%100 -P | grep -v "Par Name" | grep -c courses) -eq 0 ]; then _worker_name=${HOSTNAME}-${_account}-reserved-CPUOnly-Max20 max_wall=$(sacctmgr show assoc where account=${_account} format=MaxWall -P | grep -v MaxWall | head -1) for mem in 90; do mkdir -p ${install_path}/site_configs/${_worker_name}_${mem}gb && cd ${install_path}/site_configs/${_worker_name}_${mem}gb cat << EOF > cluster_info.json { "name": "${_worker_name}_${mem}gb", "worker_bin_path": "${install_path}/cryosparc_worker/bin/cryosparcw", "cache_path": "/tmp/{{ cryosparc_username }}/cryosparc_cache", "cache_reserve_mb": 10000, "cache_quota_mb": 1000000, "send_cmd_tpl": "{{ command }}", "qsub_cmd_tpl": "sbatch {{ script_path_abs }}", "qstat_cmd_tpl": "squeue -j {{ cluster_job_id }}", "qdel_cmd_tpl": "scancel {{ cluster_job_id }}", "qinfo_cmd_tpl": "sinfo" } EOF cat << EOF > cluster_script.sh #!/bin/bash -l #SBATCH --job-name cryosparc_{{ project_uid }}_{{ job_uid }} #SBATCH --nodes=1 #SBATCH --ntasks-per-node=1 #SBATCH --cpus-per-task={{ num_cpu }} #SBATCH --qos=gpu #SBATCH --gres=gpu:1 #SBATCH --mem ${mem}GB #SBATCH --time 3-00:00:00 #SBATCH -o {{ job_dir_abs }}/slurm.out #SBATCH -e {{ job_dir_abs }}/slurm.err #SBATCH -A ${_account} #SBATCH --reservation $reservation_name module load gcc cuda python mkdir -p /tmp/${USER} ln -sf \${TMPDIR} /tmp/${USER}/cryosparc_cache {{ run_cmd }} EOF - ${install_path}/cryosparc_master/bin/cryosparcm cluster connect + ${script_path}/cryosparcm.sh cluster connect done fi done diff --git a/create_user.sh b/create_user.sh index a16f6db..fc49a0e 100755 --- a/create_user.sh +++ b/create_user.sh @@ -1,85 +1,80 @@ #!/bin/bash set -euo pipefail # Script made from the page # https://docs.ycrc.yale.edu/clusters-at-yale/guides/cryosparc/ script_path=$(dirname "$0") # relative script_path=$(cd "${script_path}" && pwd) # absolutized and normalized install_path="$HOME"/cryosparc -# set up some more paths -db_path=${install_path}/database -worker_path=${install_path}/cryosparc2_worker - # Usage usage () { echo "Usage:" echo " -p install path : prefix for installation [${install_path}] " echo " -v : be verbose" echo " -h : print this notice" echo "" } VERBOSE=false read -p "Enter your first name: " firstname read -p "Enter your last name: " lastname read -p "Enter your email address: " mail read -p "Enter user name for the account: " user_name read -p "Enter password for the account: " password # Parse options while getopts ":p:vh" opt; do case $opt in p) install_path="${OPTARG}" ;; v) VERBOSE=true ;; h) usage OPTIND=1 exit 0 ;; \?) echo "Invalid option: -$OPTARG" >&2 usage OPTIND=1 exit 1 ;; :) echo "Option -$OPTARG requires an argument." >&2 usage OPTIND=1 exit 1 ;; esac done # Reset OPTIND to allow the next invocation to work OPTIND=1 message() { if $VERBOSE then echo "${1}" fi } echo "[Info] Starting the master if needed" -if [ $(${install_path}/cryosparc_master/bin/cryosparcm.sh status | grep -c "CryoSPARC is not running") -eq 1 ]; then - ${install_path}/cryosparc_master/bin/cryosparcm.sh start +if [ $(${script_path}/cryosparcm.sh status | grep -c "CryoSPARC is not running") -eq 1 ]; then + ${script_path}/cryosparcm.sh start fi #user_name=$(whoami) #mail=$(ldapsearch -x -LLL -h scoldap.epfl.ch -b "o=epfl,c=ch" uid=${user_name} mail | grep mail | awk '{ print $2 }') #firstname=$(ldapsearch -x -LLL -h scoldap.epfl.ch -b "o=epfl,c=ch" uid=${user_name} givenName | grep givenName | awk '{ print $2 }') #lastname=$(ldapsearch -x -LLL -h scoldap.epfl.ch -b "o=epfl,c=ch" uid=${user_name} sn | grep sn | awk '{ print $2 }') if [ "x$password" != "x" ]; then - ${install_path}/cryosparc_master/bin/cryosparcm.sh createuser --email "${mail}" --firstname "${firstname}" --lastname "${lastname}" --username "${user_name}" --password "${password}" + ${script_path}/cryosparcm.sh createuser --email "${mail}" --firstname "${firstname}" --lastname "${lastname}" --username "${user_name}" --password "${password}" fi -#${install_path}/cryosparc_master/bin/cryosparcm.sh stop diff --git a/cryosparcm.sh b/cryosparcm.sh index b945b9f..e7566d5 100755 --- a/cryosparcm.sh +++ b/cryosparcm.sh @@ -1,90 +1,90 @@ #!/usr/bin/env bash set -euo pipefail install_path="$HOME"/cryosparc # Usage usage () { echo "$0 -p -P command" echo "Usage:" echo " command : command to pass to cryosparcm (start|stop)" echo " -p install path : prefix for installation [${install_path}] " echo " -P base_port : base port [39000] " echo " -v : be verbose" echo " -h : print this notice" echo "" } set +u if [ "x$1" = "x" ]; then usage exit 1 fi set -u # Parse options while getopts ":P:p:h" opt; do case $opt in p) install_path="${OPTARG}" ;; P) cryosparcm_port="${OPTARG}" ;; h) usage OPTIND=1 exit 0 ;; \?) echo "Invalid option: -$OPTARG" >&2 usage OPTIND=1 exit 1 ;; :) echo "Option -$OPTARG requires an argument." >&2 usage OPTIND=1 exit 1 ;; esac done shift $((OPTIND-1)) OPTIND=1 command=$1 shift module load gcc cuda python export PATH=${install_path}/cryosparc_master/bin:$PATH cryosparcm_path="$(which cryosparcm 2> /dev/null)" base_dir=$(dirname "$(dirname "${cryosparcm_path}")") master_host=$(hostname) if [ $master_host = "izar3" ]; then master_host=$master_host.hpc.epfl.ch else master_host=$master_host.epfl.ch fi cp $base_dir/config.sh $base_dir/config.sh.bak sed -i -e 's/export CRYOSPARC_MASTER_HOSTNAME.*$/export CRYOSPARC_MASTER_HOSTNAME=\"'"$master_host"'\"/g' $base_dir/config.sh sed -i -e 's/export CRYOSPARC_HOSTNAME_CHECK.*$/export CRYOSPARC_HOSTNAME_CHECK=\"'"$master_host"'\"/g' $base_dir/config.sh set +u if [ "x${cryosparcm_port}" != "x" ]; then sed -i -e 's/export CRYOSPARC_BASE_PORT.*$/export CRYOSPARC_BASE_PORT='$cryosparcm_port'/g' $base_dir/config.sh fi set -u source $base_dir/config.sh -${cryosparcm_path} $command $* | sed -e 's/'$(hostname --fqdn)'/'${master_host}'.epfl.ch/' +${cryosparcm_path} $command $* if [[ $command =~ '.*start' ]]; then ${cryosparcm_path} status fi diff --git a/remove_lanes.sh b/remove_lanes.sh index 8803d0f..ee0cfea 100755 --- a/remove_lanes.sh +++ b/remove_lanes.sh @@ -1,27 +1,27 @@ #!/bin/bash set -euo pipefail # Script made from the page # https://docs.ycrc.yale.edu/clusters-at-yale/guides/cryosparc/ script_path=$(dirname "$0") # relative script_path=$(cd "${script_path}" && pwd) # absolutized and normalized install_path="$HOME"/cryosparc # set up some more paths db_path=${install_path}/database worker_path=${install_path}/cryosparc2_worker echo "[Info] Starting the master if needed" -if [ $(${install_path}/cryosparc_master/bin/cryosparcm.sh status | grep -c "CryoSPARC is not running") -eq 1 ]; then - ${install_path}/cryosparc_master/bin/cryosparcm.sh start +if [ $(${script_path}/cryosparcm.sh status | grep -c "CryoSPARC is not running") -eq 1 ]; then + ${script_path}/cryosparcm.sh start fi -old_lanes=$(${install_path}/cryosparc_master/bin/cryosparcm cli 'get_scheduler_lanes()' | grep -oP "(?<=')izar.*?(?=')") +old_lanes=$(${script_path}/cryosparcm.sh cli 'get_scheduler_lanes()' | grep -oP "(?<=')izar.*?(?=')") for _lane in $old_lanes; do - ${install_path}/cryosparc_master/bin/cryosparcm cli "remove_scheduler_lane('${_lane}')" + ${script_path}/cryosparcm.sh cli "remove_scheduler_lane('${_lane}')" done #${install_path}/cryosparc_master/bin/cryosparcm.sh stop