Page MenuHomec4science

study_sugama_J_K_params.py
No OneTemporary

File Metadata

Created
Tue, Apr 22, 14:58

study_sugama_J_K_params.py

import os
import sys
import f90nml
import numpy as np
import h5py
import time
# custom tools
import tools
GK = False
# directories
cwd_directory = os.getcwd()
scandir_name = 'sugama.J.K.' + ('gk' if GK else 'dk')
scandir_path = os.path.join(cwd_directory, scandir_name)
# read input namelist
input_file = os.path.join(cwd_directory, 'sugama.J.K.fort.90.model')
with open(input_file) as fh:
inputs = f90nml.read(fh)
# Scans
J_list = np.arange(0,11)
K_list = np.arange(0,11)
# Job infos
job_name_base = 'sugama.J.K'
job_file_name = 'job.submit.cmd'
# Get args
MODE_SCAN = 'scan'
MODE_ANALYSE = 'analyse'
MODE_RUN_CLUSTER = 'run_cluster'
selected_mode = None
if len(sys.argv) >= 2:
selected_mode = sys.argv[1].lower()
def print_preamble():
print('=== Study Sugama J,K parameters ===')
print('- Run from: %s' % cwd_directory)
def get_job_name(J,K):
return 'job_J=%02i_K=%02i' % (J,K)
def get_job_directory(J,K):
return os.path.join(scandir_path, get_job_name(J,K))
def get_output_path(job_directory):
return os.path.join(job_directory, 'output.txt')
# MODE SCAN scan for J and K and generate data
if selected_mode == MODE_SCAN:
print_preamble()
if tools.mkdir(scandir_path):
print('- Generating directory: %s' % scandir_name)
timestamp_start = time.time()
print('', end='')
for iJ, J in enumerate(J_list):
for iK, K in enumerate(K_list):
job_directory = get_job_directory(J,K)
tools.mkdir(job_directory)
timestamp_now = time.time()
print('\r- Scan J=%02i / K=%02i [%.2f%%] \t %.2fs elapsed...' % (J,K, 100*(len(K_list)*iJ+iK+1)/(len(J_list)*len(K_list)), (timestamp_now - timestamp_start)), end='')
# copy T4.in file
os.system("cp "+tools.T4_in_path+" " + os.path.join(job_directory, "T4.in") )
# We set parameters
inputs['basic']['impsugamajmax'] = J
inputs['basic']['impsugamakmax'] = K
inputs['operator_model']['gke'] = 1 if GK else 0
inputs['operator_model']['gki'] = 1 if GK else 0
inputs.write(os.path.join(job_directory, "fort.90"), 'w')
# Run
os.chdir(job_directory)
script_stream = os.popen(tools.exec_path)
script_output = script_stream.read()
output_file = open(get_output_path(job_directory), 'w')
output_file.write(script_output)
output_file.close()
print()
print('- Total time: %.2fs' % (time.time() - timestamp_start))
# Go back to script_dir
os.chdir(cwd_directory)
# MODE RUN_CLUSTER run scan on cluster
elif selected_mode == MODE_RUN_CLUSTER:
print_preamble()
if tools.mkdir(scandir_path):
print('- Generating directory: %s' % scandir_name)
job_ids = []
timestamp_start = time.time()
for iJ, J in enumerate(J_list):
for iK, K in enumerate(K_list):
job_directory = get_job_directory(J,K)
tools.mkdir(job_directory)
# copy T4.in file
os.system("cp "+tools.T4_in_path+" " + os.path.join(job_directory, "T4.in") )
# Create symbolic link to exec
if os.path.exists(os.path.join(job_directory, tools.exec_name)):
os.remove(os.path.join(job_directory, tools.exec_name))
os.system('ln -s %s %s' % (tools.exec_path, os.path.join(job_directory, tools.exec_name)))
# We set parameters
inputs['basic']['impsugamajmax'] = J
inputs['basic']['impsugamakmax'] = K
inputs['operator_model']['gke'] = 1 if GK else 0
inputs['operator_model']['gki'] = 1 if GK else 0
inputs.write(os.path.join(job_directory, "fort.90"), 'w')
# Create job batch script
job_file_path = os.path.join(job_directory, job_file_name)
job_config_file = open(job_file_path, 'w')
job_config_file.write('#!/bin/bash\n')
job_config_file.write('#SBATCH --job-name=%s\n' % (job_name_base + "/" + get_job_name(J,K)))
job_config_file.write('#SBATCH --time=00:10:00\n')
job_config_file.write('#SBATCH --nodes=1\n')
job_config_file.write('#SBATCH --ntasks=8\n')
job_config_file.write('#SBATCH --cpus-per-task=1\n')
job_config_file.write('#SBATCH --output=output.txt\n')
job_config_file.write('#SBATCH --error=errors.txt\n')
job_config_file.write('module purge\n')
job_config_file.write('module load PrgEnv-intel/17.0\n')
job_config_file.write('srun ./CO 2 2 4\n')
job_config_file.close()
# Run
os.chdir(job_directory)
# os.system('rm *.o *.e')
cmd_out = os.popen('sbatch %s' % job_file_name).read()
job_id = int(cmd_out.split('job')[-1])
job_ids.append(job_id)
tools.wait_untile_all_jobs_done(job_ids)
print('- Total time: %.2fs' % (time.time() - timestamp_start))
# Go back to script_dir
os.chdir(cwd_directory)
# MODE ANALYSE read generated data and perform analysis
elif selected_mode == MODE_ANALYSE:
print_preamble()
# Define vars
CeiT_volume = []
CeiF_volume = []
CieT_volume = []
CieF_volume = []
Cii_volume = []
Cee_volume = []
for iJ, J in enumerate(J_list):
for iK, K in enumerate(K_list):
job_directory = get_job_directory(J,K)
if not os.path.exists(job_directory):
print('- ERROR J=%i / K=%i: job_directory does not exists!')
continue
# Get matrices
h5_ei = h5py.File(os.path.join(job_directory, 'ei.h5'), 'r')
CeiT = np.array(h5_ei['Ceipj']['CeipjT'])
CeiF = np.array(h5_ei['Ceipj']['CeipjF'])
CeiT_volume.append(np.copy(CeiT))
CeiF_volume.append(np.copy(CeiF))
h5_ie = h5py.File(os.path.join(job_directory, 'ie.h5'), 'r')
CieT = h5_ei['Ceipj']['CeipjT']
CieF = h5_ei['Ceipj']['CeipjF']
CieT_volume.append(CieT)
CieF_volume.append(CieF)
h5_self = h5py.File(os.path.join(job_directory, 'self.h5'), 'r')
Cee = h5_self['Caapj']['Ceepj']
Cii = h5_self['Caapj']['Ciipj']
Cee_volume.append(Cee)
Cii_volume.append(Cii)
#break # TODO(Sam): Remove...
#break # TODO(Sam): Remove...
# Wrap up volumes
CeiT_volume = np.array(CeiT_volume)
CeiF_volume = np.array(CeiF_volume)
CieT_volume = np.array(CieT_volume)
CieF_volume = np.array(CieF_volume)
Cii_volume = np.array(Cii_volume)
Cee_volume = np.array(Cee_volume)
# TODO(Sam): Check pk on a pas d'évolution sur les éléments de matrices...
matrix_size = CeiT_volume.shape[1]
NJ, NK = len(J_list), len(K_list)
CeiT_volume = CeiT_volume.reshape([NJ, NK, matrix_size, matrix_size])
CeiF_volume = CeiF_volume.reshape([NJ, NK, matrix_size, matrix_size])
CieT_volume = CieT_volume.reshape([NJ, NK, matrix_size, matrix_size])
CieF_volume = CieF_volume.reshape([NJ, NK, matrix_size, matrix_size])
Cee_volume = Cee_volume.reshape([NJ, NK, matrix_size, matrix_size])
Cii_volume = Cii_volume.reshape([NJ, NK, matrix_size, matrix_size])
print('\n- Diff eiT:')
dK = np.diff(CeiT_volume, axis=1)
print(dK[NJ-1, :, :, :].shape)
print(np.max(np.diff(CeiT_volume, axis=0)))
print(np.max(np.diff(CeiT_volume, axis=1)))
# print('\n- Diff eiF:')
# print(np.max(np.diff(CeiF_volume, axis=0)))
# print(np.max(np.diff(CeiF_volume, axis=1)))
# print('\n- Diff ii:')
# print(np.max(np.diff(Cii_volume, axis=0)))
# print(np.max(np.diff(Cii_volume, axis=1)))
# print(CeiT_volume[:, :, 2,1])
# NO MODE or WRONG MODE error
else:
if selected_mode:
print('MODE "%s" does not exist!' % selected_mode)
else:
print('You must specify the program MODE:')
print('> python study_sugama_J_K_params.py [MODE]...')
print('- %8s\t Start a scan of parameters J,K' % MODE_SCAN)
print('- %8s\t Start a scan on cluster' % MODE_RUN_CLUSTER)
print('- %8s\t Run analysis of results' % MODE_ANALYSE)

Event Timeline