diff --git a/BlackDynamite/base.py b/BlackDynamite/base.py
index b756d76..87c386e 100755
--- a/BlackDynamite/base.py
+++ b/BlackDynamite/base.py
@@ -1,449 +1,463 @@
#!/usr/bin/env python3
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
################################################################
from __future__ import print_function
################################################################
from . import job
from . import bdparser
from . import run
from . import bdlogging
from . import jobselector
import os
import psycopg2
import re
import sys
import getpass
import datetime
-import logging
+import atexit
################################################################
__all__ = ["Base"]
print = bdlogging.invalidPrint
-logger = logging.getLogger(__name__)
+logger = bdlogging.getLogger(__name__)
################################################################
class Base(object):
"""
"""
def getRunFromID(self, run_id):
myrun = run.Run(self)
myrun["id"] = run_id
myrun.id = run_id
run_list = myrun.getMatchedObjectList()
if len(run_list) != 1:
raise Exception('Unknown run {0}'.format(run_id))
return run_list[0]
def getJobFromID(self, job_id):
myjob = job.Job(self)
myjob["id"] = job_id
myjob.id = job_id
job_list = myjob.getMatchedObjectList()
if len(job_list) != 1:
raise Exception('Unknown run {0}'.format(job_id))
return job_list[0]
def createBase(self, job_desc, run_desc, quantities={}, **kwargs):
# logger.debug (quantities)
self.createSchema(kwargs)
self.createTable(job_desc)
self.createTable(run_desc)
self.createGenericTables()
for qname, type in quantities.items():
self.pushQuantity(qname, type)
if self.truerun:
self.commit()
def getObject(self, sqlobject):
curs = self.connection.cursor()
curs.execute("SELECT * FROM {0}.{1} WHERE id = {2}".format(
self.schema, sqlobject.table_name, sqlobject.id))
col_info = self.getColumnProperties(sqlobject)
line = curs.fetchone()
for i in range(0, len(col_info)):
col_name = col_info[i][0]
sqlobject[col_name] = line[i]
def createSchema(self, params={"yes": False}):
# create the schema of the simulation
curs = self.connection.cursor()
curs.execute(("SELECT schema_name FROM information_schema.schemata"
" WHERE schema_name = '{0}'").format(
self.schema).lower())
if curs.rowcount:
validated = bdparser.validate_question(
"Are you sure you want to drop the schema named '" +
self.schema + "'", params, False)
if validated is True:
curs.execute("DROP SCHEMA {0} cascade".format(self.schema))
else:
logger.debug("creation canceled: exit program")
sys.exit(-1)
curs.execute("CREATE SCHEMA {0}".format(self.schema))
def createTypeCodes(self):
curs = self.connection.cursor()
curs.execute("SELECT typname,oid from pg_type;")
self.type_code = {}
for i in curs:
if i[0] == 'float8':
self.type_code[i[1]] = float
if i[0] == 'text':
self.type_code[i[1]] = str
if i[0] == 'int8':
self.type_code[i[1]] = int
if i[0] == 'int4':
self.type_code[i[1]] = int
if i[0] == 'bool':
self.type_code[i[1]] = bool
if i[0] == 'timestamp':
self.type_code[i[1]] = datetime.datetime
def createTable(self, obj):
request = obj.createTableRequest()
curs = self.connection.cursor()
logger.debug(request)
curs.execute(request)
def createGenericTables(self,):
sql_script_name = os.path.join(os.path.dirname(__file__),
"build_tables.sql")
curs = self.connection.cursor()
# create generic tables
query_list = list()
with open(sql_script_name, "r") as fh:
for line in fh:
query_list.append(re.sub("SCHEMAS_IDENTIFIER",
self.schema, line))
curs.execute("\n".join(query_list))
def getColumnProperties(self, sqlobject):
curs = self.connection.cursor()
curs.execute("SELECT * FROM {0}.{1} LIMIT 0".format(
self.schema, sqlobject.table_name))
column_names = [desc[0] for desc in curs.description]
column_type = [desc[1] for desc in curs.description]
return list(zip(column_names, column_type))
def setObjectItemTypes(self, sqlobject):
col_info = self.getColumnProperties(sqlobject)
for i, j in col_info:
sqlobject.types[i] = self.type_code[j]
# logger.debug (str(i) + " " + str(self.type_code[j]))
def insert(self, sqlobject):
curs = self.performRequest(*(sqlobject.insert()))
sqlobject.id = curs.fetchone()[0]
def performRequest(self, request, params=[]):
curs = self.connection.cursor()
# logger.debug (request)
# logger.debug (params)
try:
curs.execute(request, params)
except psycopg2.ProgrammingError as err:
raise psycopg2.ProgrammingError(
("While trying to execute the query '{0}' with parameters " +
"'{1}', I caught this: '{2}'").format(request, params, err))
return curs
def createParameterSpace(self, myjob, entry_nb=0,
tmp_job=None, nb_inserted=0):
"""
This function is a recursive call to generate the points
in the parametric space
The entries of the jobs are treated one by one
in a recursive manner
"""
- keys = myjob.entries.keys()
+ # keys() gives a non-indexable view
+ keys = list(myjob.entries.keys())
nparam = len(keys)
# if this is the case I have done all the
# entries of the job
# it is time to insert it (after some checks)
if entry_nb == nparam:
if tmp_job is None:
raise RuntimeError("internal error")
# check if already inserted
jselect = jobselector.JobSelector(self)
jobs = jselect.selectJobs(tmp_job, quiet=True)
if len(jobs) > 0:
return nb_inserted
# insert it
nb_inserted += 1
logger.info("insert job #{0}".format(nb_inserted) +
': ' + str(tmp_job.entries))
self.insert(tmp_job)
return nb_inserted
if tmp_job is None:
tmp_job = job.Job(self)
# the key that I am currently treating
key = keys[entry_nb]
e = myjob[key]
# if this is a list I have to create several parametric points
if not isinstance(e, list):
e = [e]
for value in e:
tmp_job[key.lower()] = value
nb_inserted = self.createParameterSpace(
myjob, entry_nb+1, tmp_job, nb_inserted)
if self.truerun:
self.commit()
return nb_inserted
def pushQuantity(self, name, type_code, description=None):
""" implemented type_codes: "int" "float" "int.vector" "float.vector"
"""
if ((type_code == "int") or (type_code == int)):
is_integer = True
is_vector = False
elif (type_code == "int.vector"):
is_integer = True
is_vector = True
elif ((type_code == "float") or (type_code == float)):
is_integer = False
is_vector = False
elif (type_code == "float.vector"):
is_integer = False
is_vector = True
else:
raise Exception(
"invalid type '{0}' for a quantity".format(type_code))
curs = self.connection.cursor()
curs.execute("""
INSERT INTO {0}.quantities (name, is_integer, is_vector, description)
VALUES (%s , %s , %s, %s) RETURNING id
""".format(self.schema), (name, is_integer, is_vector, description))
item = curs.fetchone()
if (item is None):
raise Exception("Counld not create quantity \"" + name + "\"")
return item[0]
def commit(self):
logger.debug("commiting changes to base")
self.connection.commit()
def getUserList(self):
curs = self.connection.cursor()
curs.execute("""
select tableowner from pg_tables where tablename = 'runs';
""")
users = [desc[0] for desc in curs]
return users
def getStudySize(self, study):
curs = self.connection.cursor()
try:
logger.info(study)
curs.execute("""
select sz from (SELECT SUM(pg_total_relation_size(quote_ident(schemaname)
|| '.' || quote_ident(tablename)))::BIGINT
FROM pg_tables WHERE schemaname = '{0}') as sz
""".format(study))
size = curs.fetchone()[0]
curs.execute("""
select pg_size_pretty(cast({0} as bigint))
""".format(size))
size = curs.fetchone()[0]
curs.execute("""
select count({0}.runs.id) from {0}.runs
""".format(study))
nruns = curs.fetchone()[0]
curs.execute("""
select count({0}.jobs.id) from {0}.jobs
""".format(study))
njobs = curs.fetchone()[0]
except psycopg2.ProgrammingError:
self.connection.rollback()
size = '????'
return {'size': size, 'nruns': nruns, 'njobs': njobs}
def grantAccess(self, study, user):
curs = self.connection.cursor()
curs.execute("""
grant SELECT on ALL tables in schema {0} to {1};
grant USAGE on SCHEMA {0} to {1};
""".format(study, user))
self.commit()
def revokeAccess(self, study, user):
curs = self.connection.cursor()
curs.execute("""
revoke SELECT on ALL tables in schema {0} from {1};
revoke USAGE on SCHEMA {0} from {1};
""".format(study, user))
self.commit()
def getStudyOwner(self, schema):
curs = self.connection.cursor()
curs.execute("""
select grantor from information_schema.table_privileges
where (table_name,table_schema,privilege_type)
= ('runs','{0}','SELECT');
""".format(schema))
owners = [desc[0] for desc in curs]
return owners[0]
def getGrantedUsers(self, schema):
curs = self.connection.cursor()
curs.execute("""
select grantee from information_schema.table_privileges
where (table_name,table_schema,privilege_type)
= ('runs','{0}','SELECT');
""".format(schema))
granted_users = [desc[0] for desc in curs]
return granted_users
def getSchemaList(self, filter_names=True):
curs = self.connection.cursor()
curs.execute("""
SELECT distinct(table_schema) from information_schema.tables
where table_name='runs'
""")
schemas = [desc[0] for desc in curs]
filtered_schemas = []
if filter_names is True:
for s in schemas:
m = re.match('{0}_(.+)'.format(self.user), s)
if m:
s = m.group(1)
filtered_schemas.append(s)
else:
filtered_schemas = schemas
return filtered_schemas
def checkStudy(self, dico):
if "study" not in dico:
message = "\n" + "*"*30 + "\n"
message += "Parameter 'study' must be provided at command line\n"
message += "possibilities are:\n"
schemas = self.getSchemaList()
for s in schemas:
message += "\t" + s + "\n"
message += "\n"
message += "FATAL => ABORT\n"
message += "*"*30 + "\n"
logger.error(message)
sys.exit(-1)
def close(self):
if 'connection' in self.__dict__:
logger.debug('closing database session')
self.connection.close()
del (self.__dict__['connection'])
- def __del__(self):
- self.close()
-
- def __init__(self, truerun=False, **kwargs):
+ def __init__(self, truerun=False, creation=False, **kwargs):
psycopg2_params = ["host", "user", "port", "password"]
connection_params = bdparser.filterParams(psycopg2_params, kwargs)
connection_params['dbname'] = 'blackdynamite'
if ("password" in connection_params and
connection_params["password"] == 'ask'):
connection_params["password"] = getpass.getpass()
logger.debug('connection arguments: {0}'.format(connection_params))
try:
self.connection = psycopg2.connect(**connection_params)
logger.debug('connected to base')
except Exception as e:
logger.error(
"Connection failed: check your connection settings:\n" +
str(e))
sys.exit(-1)
assert(isinstance(self.connection, psycopg2._psycopg.connection))
self.dbhost = (kwargs["host"]
if "host" in kwargs.keys()
else "localhost")
if 'user' in kwargs:
self.user = kwargs["user"]
else:
self.user = os.getlogin()
if ("should_not_check_study" not in kwargs):
self.checkStudy(kwargs)
if 'study' in kwargs:
- if re.match('(.+)_(.+)', kwargs["study"]):
+ # Need this because getSchemaList strips prefix
+ match = re.match('(.+)_(.+)', kwargs["study"])
+ if match:
self.schema = kwargs["study"]
+ study_name = match.group(2)
else:
self.schema = kwargs["user"] + '_' + kwargs["study"]
+ study_name = kwargs["study"]
+
+ if (
+ (creation is not True) and
+ (study_name not in self.getSchemaList())):
+ raise RuntimeError("Study name '{}' invalid".format(
+ study_name))
self.createTypeCodes()
self.truerun = truerun
if("list_parameters" in kwargs and kwargs["list_parameters"] is True):
message = self.getPossibleParameters()
logger.info("\n{0}".format(message))
sys.exit(0)
+ # We should avoid using __del__ to close DB
+ def close_db():
+ self.close()
+
+ atexit.register(close_db)
+
def getPossibleParameters(self):
myjob = job.Job(self)
message = ""
message += ("*"*65 + "\n")
message += ("Job parameters:\n")
message += ("*"*65 + "\n")
params = [str(j[0]) + ": " + str(j[1])
for j in myjob.types.items()]
message += ("\n".join(params)+"\n")
myrun = run.Run(self)
message += ("*"*65 + "\n")
message += ("Run parameters:\n")
message += ("*"*65 + "\n")
params = [str(j[0]) + ": " + str(j[1])
for j in myrun.types.items()]
message += ("\n".join(params))
return message
################################################################
if __name__ == "__main__":
connection = psycopg2.connect(host="localhost")
job_description = job.Job(dict(hono=int, lulu=float, toto=str))
base = Base("honoluluSchema", connection, job_description)
base.create()
connection.commit()
base.pushJob(dict(hono=12, lulu=24.2, toto="toto"))
base.pushQuantity("ekin", "float")
connection.commit()
diff --git a/BlackDynamite/bdlogging.py b/BlackDynamite/bdlogging.py
index 997054b..d00616e 100644
--- a/BlackDynamite/bdlogging.py
+++ b/BlackDynamite/bdlogging.py
@@ -1,111 +1,84 @@
#!/usr/bin/env python3
from __future__ import print_function
import logging
import traceback
import os
+import sys
-################################################################
+from . import __name__ as global_name
+# Base level logger
+root_logger = logging.getLogger(global_name)
+root_logger.setLevel(logging.DEBUG) # Avoid hard-filtering
-def invalidPrint(x):
- raise Exception('print should not be used in that class: '
- 'use the logging system instead: "{0}"'.format(x))
-
-################################################################
-
-
-file_handler = None
-
-
-def setFileHandler(logger, streamformatter):
- if globals()['file_handler'] is None:
- # print("resetting the file for {0}".format(logger.name))
- f = open('bd.log', 'w')
- f.close()
- globals()['file_handler'] = logging.FileHandler('bd.log')
- file_handler = globals()['file_handler']
- file_handler.setFormatter(streamformatter)
- if file_handler not in logger.handlers:
- logger.addHandler(file_handler)
-################################################################
-
+# Logging format
+BD_FORMAT = "%(levelname)s:%(funcName)s [%(filename)s:%(lineno)d]: %(message)s"
-Parent = logging.getLoggerClass()
+sh = logging.StreamHandler(sys.stderr)
+sh.setLevel(logging.WARNING) # Only show warnings to screen
+sh.setFormatter(logging.Formatter(BD_FORMAT))
+root_logger.addHandler(sh)
-class BDlogger(Parent):
- def __init__(self, name):
- super().__init__(name)
- self.name = name
- self.logger_screen = Parent(name + 'screen')
- self.logger_file = Parent(name + 'file')
- self.streamformatter = logging.Formatter(
- fmt='%(levelname)s:%(foo)50s:%(f)15s:%(l)s:' +
- ' '*10 + '%(message)s')
+class ExtraContext:
+ """Adds some context to logging"""
+ _wire = {
+ 'foo': lambda x: x.foo(),
+ 'l': lambda x: x.lololo(),
+ 'f': lambda x: x.fname()
+ }
- self.screen_handler = logging.StreamHandler()
- self.screen_handler.setFormatter(self.streamformatter)
- self.logger_screen.addHandler(self.screen_handler)
- self.enable_file = False
- self.setScreenLevel(logging.INFO)
+ @staticmethod
+ def getTr():
+ return traceback.extract_stack(limit=20)[11]
- def setLogFileLevel(self, level):
- self.logger_file.setLevel(level)
+ def fname(self):
+ return os.path.basename(self.getTr()[0])
- def setScreenLevel(self, level):
- self.logger_screen.setLevel(level)
+ def lololo(self):
+ return self.getTr()[1]
- def activateFileLog(self, level=logging.DEBUG):
- setFileHandler(self.logger_file, self.streamformatter)
- self.enable_file = True
- self.setLogFileLevel(level)
+ def foo(self):
+ return self.getTr()[2]
- def getExtra(self):
- extra = {}
- tr = traceback.extract_stack(limit=3)
- tr = tr[0]
- fname = os.path.basename(tr[0])
+ def __getitem__(self, name):
+ return self._wire[name](self)
- extra['foo'] = tr[2]
- extra['f'] = fname
- extra['l'] = tr[1]
- return extra
+ def __iter__(self):
+ d = {k: self._wire[k](self) for k in self._wire}
+ return iter(d)
- def debug(self, x):
- self.logger_screen.debug(x, extra=self.getExtra())
- if self.enable_file is True:
- self.logger_file.debug(x, extra=self.getExtra())
- def warning(self, x):
- self.logger_screen.warning(x, extra=self.getExtra())
- if self.enable_file is True:
- self.logger_file.warning(x, extra=self.getExtra())
-
- def info(self, x):
- self.logger_screen.info(x, extra=self.getExtra())
- if self.enable_file is True:
- self.logger_file.info(x, extra=self.getExtra())
-
- def error(self, x):
- self.logger_screen.error(x, extra=self.getExtra())
- if self.enable_file is True:
- self.logger_file.error(x, extra=self.getExtra())
-
-
-################################################################
-logging.setLoggerClass(BDlogger)
-################################################################
-logger = logging.getLogger(__name__)
-################################################################
+def invalidPrint(x):
+ raise Exception('print should not be used in that class: '
+ 'use the logging system instead: "{0}"'.format(x))
def activateFileLogging():
- for name, log in logging.Logger.manager.loggerDict.items():
- if isinstance(log, BDlogger):
- log.activateFileLog()
-
-
-__all__ = ["bdlogging"]
+ """Activate logging to file (if not already enabled)"""
+ # formatter = logging.Formatter(fmt='%(levelname)s:%(foo)50s:%(f)15s:%(l)s:'
+ # + ' '*10 + '%(message)s')
+ formatter = logging.Formatter(BD_FORMAT)
+
+ # Handler for file
+ bd_file_handler = logging.FileHandler('bd.log', mode='a+')
+ bd_file_handler.setFormatter(formatter)
+ bd_file_handler.setLevel(logging.DEBUG) # Log everything to file
+ if '_has_file_handler' not in globals() \
+ or not globals()['_has_file_handler']:
+ logger = logging.getLogger(global_name)
+ logger.debug("Activating logging to file")
+ logger.addHandler(bd_file_handler)
+
+ # This should be the first line logged in file
+ logger.debug("Activated logging to file")
+ globals()['_has_file_handler'] = True
+
+
+def getLogger(name):
+ logger = logging.getLogger(name)
+ logger.propagate = True
+ return logger
diff --git a/BlackDynamite/bdparser.py b/BlackDynamite/bdparser.py
index bd91e9b..f7d8753 100755
--- a/BlackDynamite/bdparser.py
+++ b/BlackDynamite/bdparser.py
@@ -1,659 +1,658 @@
#!/usr/bin/env python3
from __future__ import print_function
from . import __path__ as BD_path
from . import base
from . import run
from . import bdlogging
import sys
import re
import os
import stat
import pwd
import argcomplete
import argparse
from argcomplete.completers import EnvironCompleter
import traceback
from types import ModuleType
import imp
################################################################
-import logging
print = bdlogging.invalidPrint
-logger = logging.getLogger(__name__)
+logger = bdlogging.getLogger(__name__)
################################################################
class BDParser(object):
def listPossibleHosts(self):
logger.debug("in")
bd_dir = os.path.expanduser("~/.blackdynamite")
bd_hosts = os.path.join(bd_dir, 'hosts')
hosts = []
try:
hosts += [h.strip() for h in open(bd_hosts)]
except Exception:
pass
return hosts
def listPossibleModules(self, pre_args):
logger.debug("in")
paths = []
if ("PYTHONPATH" in os.environ):
paths = os.environ["PYTHONPATH"].split(':')
if ("module_path" in pre_args):
paths += pre_args["module_path"].split(':')
paths += BD_path
paths += [path + "/coating" for path in BD_path]
module_list = []
paths = [p.strip() for p in paths if not p.strip() == '']
for p in paths:
files = os.listdir(p)
files = [f for f in files if os.path.splitext(f)[1] == '.py']
files = [f for f in files if not f[0] == '_']
matching_string = ".*blackdynamite.*"
files = [os.path.splitext(f)[0] for
f in files if re.match(
matching_string,
open(os.path.join(p, f)).read().replace('\n', ' '),
flags=re.IGNORECASE)]
module_list += files
logger.debug("found these files " + str(module_list))
return module_list
def updatePossibleHosts(self, new_host):
logger.debug("in")
bd_dir = os.path.expanduser("~/.blackdynamite")
bd_hosts = os.path.join(bd_dir, 'hosts')
hosts = set(self.listPossibleHosts())
hosts.add(new_host)
f = open(bd_hosts, 'w')
for h in hosts:
f.write(h+'\n')
def completer(self, prefix, **kwargs):
# bdlogging.activateFileLogging()
try:
params = vars(kwargs["parsed_args"])
if params['logging'] is True:
bdlogging.activateFileLogging()
logger.debug("in")
logger.debug("BDparser prefix " + str(prefix) + "\n")
for k, v in kwargs.items():
logger.debug("kwargs[" + str(k) + "] = " + str(v) + "\n")
logger.debug("dest " + str(vars(kwargs["action"])["dest"]) + "\n")
for k in params.keys():
if params[k] is None:
del params[k]
key = vars(kwargs["action"])["dest"]
logger.debug("key " + str(key) + "\n")
if (key == "BDconf"):
return self.listPossibleConf()
if 'BDconf' in params:
self.readConfFiles(params, params['BDconf'])
if 'host' in params:
self.readConfFile(params, params['host']+'.bd')
logger.debug("key " + str(key) + "\n")
for k in params.keys():
logger.debug("params = " + str(k))
if (key == "host"):
return self.listPossibleHosts()
if (key == "study"):
params["should_not_check_study"] = True
mybase = base.Base(**params)
return mybase.getSchemaList()
if (key == 'quantity'):
mybase = base.Base(**params)
myrun = run.Run(mybase)
return myrun.listQuantities()
if (key in self.admissible_params and
self.admissible_params[key] == ModuleType):
logger.debug(
"trying to complete module list for '{}'".format(key))
return self.listPossibleModules(params)
except Exception as e:
logger.debug(traceback.format_exc())
logger.debug(str(e))
return []
def listPossibleConf(self):
logger.debug("in")
files = []
for dir in ["./", os.path.expanduser("~/.blackdynamite")]:
for filename in os.listdir(dir):
fileName, fileExtension = os.path.splitext(filename)
if (fileExtension == ".bd"):
files.append(filename)
return files
return files
def readConfFiles(self, read_params, fnames):
logger.debug("in")
for f in fnames:
self.readConfFile(read_params, f)
def readConfFile(self, read_params, fname):
logger.debug("in")
logger.debug("readConfFileList {0}".format(self.readConfFileList))
if fname in self.readConfFileList:
return
self.readConfFileList.append(fname)
if type(fname) == list:
raise Exception(
'cannot use list in that function: ' + str(type(fname)))
pre_args = {}
for dir in ["./", os.path.expanduser("~/.blackdynamite")]:
fullpath = os.path.join(dir, fname)
if (os.path.isfile(fullpath)):
fname = fullpath
break
try:
with open(fname) as fh:
logger.debug("loading file '{0}'".format(fname))
os.chmod(fname, stat.S_IREAD | stat.S_IWRITE)
lines = [line.strip() for line in fh]
regex = "(.*)=(.*)"
for line in lines:
match = re.match(regex, line)
if (not match):
print("malformed line:" + line)
sys.exit(-1)
param = match.group(1).strip()
val = match.group(2).strip()
pre_args[param] = val
self.argv.append("--"+param)
self.argv.append(val)
logger.debug("read parameters: '{0}'".format(self.argv))
logger.debug("pre args : '{0}'".format(pre_args))
read_params.update(self.createParamsMap(pre_args))
except Exception as e:
logger.debug("cannot open file " + fname + '\n' +
str(e) + '\n' + str(traceback.format_exc()))
logger.debug("out")
def checkParam(self, p, dico):
logger.debug("in")
print("****************")
print("Obsolete: should use the mandatory argument"
" for the declare_params function")
print("It was used by object " + str(self) + " for keyword " + p)
print("FATAL => ABORT")
print("****************")
sys.exit(-1)
def loadModule(self, read_params, myscript, pre_args):
logger.debug("in")
paths = []
if ("PYTHONPATH" in os.environ):
paths = os.environ["PYTHONPATH"].split(':')
if ("module_path" in pre_args):
paths += pre_args["module_path"].split(':')
paths += BD_path
paths += [path + "/coating" for path in BD_path]
mymod = None
for p in paths:
try:
modfile = os.path.join(p, myscript+".py")
# print("loading file " + modfile)
mymod = imp.load_source(myscript, modfile)
break
except IOError as io_err:
logger.debug("loadModule " + str(io_err))
logger.debug("loadModule " + str(mymod))
if (mymod is None):
logger.debug("cannot find module '" + myscript +
"' from paths " + str(paths))
logger.debug("trace :" + traceback.format_exc() + '\n')
raise Exception("cannot find module '" +
myscript + "' from paths " + str(paths))
return mymod
def createParamsMap(self, pre_args):
logger.debug("in")
read_params = {}
if ('logging' in pre_args) and (pre_args['logging'] is True):
bdlogging.activateFileLogging()
for opt, args in pre_args.items():
logger.debug("createParamsMap1 " + str(opt) + " : " + str(args))
if (args is None):
continue
if (not type(args) == list):
args = [args]
if (type(args) == str):
args = [args]
logger.debug("createParamsMap2 " + str(opt) + " : " + str(args))
for arg in args:
if (arg is None):
continue
if (opt == 'BDconf'):
if (not type(arg) == list):
arg = [arg]
self.readConfFiles(read_params, arg)
continue
if (opt == 'host'):
self.updatePossibleHosts(arg)
self.readConfFile(read_params, arg+'.bd')
for param, typ in self.admissible_params.items():
if opt == param:
logger.debug("createParamsMap3 " +
str(param) + " : " + str(typ))
if (typ == ModuleType):
read_params[param] = self.loadModule(
read_params, arg, pre_args)
logger.debug("createParamsMap4 " +
str(param) + " : " + str(typ))
elif (type(typ) == list):
args = arg.split(",")
if (param not in read_params):
read_params[param] = []
if (not len(typ) == 1):
subtype = str
else:
subtype = typ[0]
for i in range(0, len(args)):
read_params[param].append(subtype(args[i]))
else:
read_params[param] = typ(arg)
break
return read_params
def addModulesAdmissibleParameters(self, read_params):
logger.debug("in")
for k, v in read_params.items():
if self.admissible_params[k] == ModuleType:
mymod = read_params[k]
modname = mymod.__name__
if "admissible_params" in mymod.__dict__:
self.admissible_params.update(
mymod.__dict__["admissible_params"])
self.group_params["'" + modname + "' module options"] = (
mymod.__dict__["admissible_params"].keys())
if "default_params" in mymod.__dict__:
self.default_params.update(
mymod.__dict__["default_params"])
if "help" in mymod.__dict__:
self.help.update(mymod.__dict__["help"])
if "mandatory" in mymod.__dict__:
self.mandatory.update(mymod.__dict__["mandatory"])
logger.debug(str(self.admissible_params))
def addModulesAdmissibleParametersForComplete(self, read_params):
logger.debug("in")
if "_ARGCOMPLETE" not in os.environ:
return
logger.debug("arg complete ? " + os.environ["_ARGCOMPLETE"])
# breaks = os.environ["COMP_WORDBREAKS"]
tmp_read_params = {}
breaks = " |=|&|<|>|;"
logger.debug("break line " + os.environ["COMP_LINE"])
all_args = re.split(breaks, os.environ["COMP_LINE"])
logger.debug("break line " + str(all_args))
for i in range(0, len(all_args)):
a = all_args[i]
res = re.match("--(.*)", a)
if res is None:
continue
a = res.group(1)
logger.debug("treating a " + str(a))
if a in self.admissible_params:
if self.admissible_params[a] == ModuleType:
logger.debug("here treating a " + str(a))
if i+1 >= len(all_args):
continue
b = all_args[i+1]
logger.debug("treating b " + str(b))
res = re.match("--(.*)", b)
if res is not None:
continue
if not b.strip() == '':
tmp_read_params[a] = b
if ("module_path" in read_params):
tmp_read_params["module_path"] = read_params["module_path"]
logger.debug("tmp_read_params " + str(tmp_read_params))
try:
tmp_read_params = self.createParamsMap(tmp_read_params)
except Exception as e:
logger.debug("trace :" + traceback.format_exc() + '\n' + str(e))
logger.debug("AAAAAAAAA " + str(tmp_read_params))
try:
self.addModulesAdmissibleParameters(tmp_read_params)
except Exception as e:
logger.debug("trace :" + traceback.format_exc() + '\n' + str(e))
logger.debug("CCCCCCCCCC" + str(self.admissible_params))
def constructArgParser(self, add_help=True, add_mandatory=True):
logger.debug("in")
parser = argparse.ArgumentParser(
description="BlackDynamite option parser",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
add_help=add_help)
self.params_group = {}
group = parser.add_argument_group("General")
self.params_group["General"] = group
for g, param_list in self.group_params.items():
group = parser.add_argument_group(g)
for p in param_list:
self.params_group[p] = group
for param, typ in self.admissible_params.items():
# print("param {0}: {1}".format(param,typ) )
p_help = "help TODO"
is_mandatory = (param in self.mandatory.keys() and
self.mandatory[param] is True and
add_mandatory)
if (param in self.help):
p_help = self.help[param]
if (param in self.params_group):
grp = self.params_group[param]
else:
grp = self.params_group["General"]
if (typ is None):
raise Exception("Deprectated option type for " +
param + " : should be changed to 'bool'")
if (typ is bool):
if (param in self.default_params and
self.default_params[param] is True):
grp.add_argument("--" + param,
help=p_help,
dest=param,
action='store_false',
required=is_mandatory)
else:
grp.add_argument("--" + param,
help=p_help,
dest=param,
action='store_true',
required=is_mandatory)
elif (typ is list or typ == [str]):
grp.add_argument(
"--" + param,
action='append',
dest=param,
help=p_help,
required=is_mandatory).completer = self.completer
else:
grp.add_argument(
"--" + param,
dest=param,
help=p_help,
required=is_mandatory).completer = self.completer
parser.set_defaults(**self.default_params)
return parser
def register_params(self, group="General", params=None, defaults=None,
help=None, mandatory=None):
logger.debug("in")
if (params is not None):
self.admissible_params.update(params)
if group not in self.group_params:
self.group_params[group] = []
self.group_params[group] += params.keys()
if (defaults is not None):
self.default_params.update(defaults)
for key in defaults.keys():
self.mandatory[key] = False
if (help is not None):
self.help.update(help)
if (mandatory is not None):
self.mandatory.update(mandatory)
for param, typ in self.admissible_params.items():
if typ == bool and param not in self.default_params:
self.default_params[param] = False
def addEnvBDArguments(self, parser):
logger.debug("in")
parser = self.constructArgParser(add_help=False, add_mandatory=False)
pre_args = vars(parser.parse_known_args(args=self.argv)[0])
for name, value in os.environ.items():
m = re.match("BLACKDYNAMITE_(.*)", name)
if (m):
var = m.group(1).lower()
if (var not in pre_args or pre_args[var] is None):
if (var in self.admissible_params):
self.argv.append("--" + var)
self.argv.append(value)
def parseBDParameters(self, argv=None):
logger.debug("in")
if argv is None:
self.argv = list(sys.argv[1:])
else:
self.argv = list(argv)
logger.debug("program called with " +
str(len(self.argv)) +
" args " + str(self.argv) + "\n")
logger.debug("env is\n\n")
for k, v in os.environ.items():
logger.debug("export " + k + "='" + v + "'\n")
logger.debug("constructArgParser\n")
parser = self.constructArgParser(add_help=False, add_mandatory=False)
self.addEnvBDArguments(parser)
logger.debug("parse_known_args\n")
pre_args = parser.parse_known_args(args=self.argv)[0]
logger.debug("createParamsMap\n")
read_params = self.createParamsMap(vars(pre_args))
logger.debug("addModuleAdmissibleParameters\n")
self.addModulesAdmissibleParameters(read_params)
logger.debug("addModulesAdmissibleParametersForComplete\n")
try:
self.addModulesAdmissibleParametersForComplete(read_params)
except KeyError as e:
logger.debug("trace :" + traceback.format_exc())
logger.debug("constructArgParser\n")
parser = self.constructArgParser()
argcomplete.autocomplete(parser)
pre_args = parser.parse_args(args=self.argv)
read_params = self.createParamsMap(vars(pre_args))
if "user" not in read_params:
read_params["user"] = pwd.getpwuid(os.getuid())[0]
return read_params
def __init__(self):
logger.debug("in")
self.admissible_params = {}
self.help = {}
self.default_params = {}
self.group_params = {}
self.mandatory = {}
self.admissible_params["study"] = str
self.help["study"] = (
"Specify the study from the BlackDynamite database"
". This refers to the schemas in PostgreSQL language")
self.admissible_params["host"] = str
self.help["host"] = "Specify data base server address"
self.admissible_params["port"] = int
self.help["port"] = "Specify data base server port"
self.admissible_params["user"] = str
self.help["user"] = "Specify user name to connect to data base server"
self.admissible_params["password"] = str
self.help["password"] = "Provides the password"
self.admissible_params["BDconf"] = list
self.help["BDconf"] = (
"Path to a BlackDynamite file (*.bd) configuring current optons")
self.admissible_params["truerun"] = bool
self.help["truerun"] = (
"Set this flag if you want to truly perform the"
" action on base. If not set all action are mainly dryrun")
self.default_params["truerun"] = False
self.admissible_params["constraints"] = [str]
self.help["constraints"] = (
"This allows to constraint run/job selections by properties")
self.default_params["constraints"] = None
self.admissible_params["binary_operator"] = str
self.default_params["binary_operator"] = 'and'
self.help["binary_operator"] = (
'Set the default binary operator to make requests to database')
self.admissible_params["list_parameters"] = bool
self.help["list_parameters"] = (
"Request to list the possible job/run parameters")
self.admissible_params["yes"] = bool
self.default_params["yes"] = False
self.help["yes"] = "Answer all questions to yes."
self.admissible_params["logging"] = bool
self.help["logging"] = "Activate the file logging system"
self.group_params["BDParser"] = ["study",
"host",
"port",
"user",
"password",
"BDconf",
"truerun",
"constraints",
"job_constraints",
"run_constraints",
"list_parameters"]
self.readConfFileList = []
################################################################
def validate_question(question, params, default_validated=True):
logger.debug("in")
if (default_validated):
default_str = "(Y/n)"
else:
default_str = "(y/N)"
if params["yes"] is False:
validated = input("{0}? {1} ".format(question, default_str))
# print (validated)
if (validated == "\n" or validated == ""):
validated = default_validated
elif(validated == "Y" or validated == "y"):
validated = True
else:
validated = False
else:
logger.info("{0}? {1} Forced Y".format(question, default_str))
validated = True
return validated
################################################################
def filterParams(sub_list, total_list):
logger.debug("in")
new_list = {}
for p in sub_list:
if (p in total_list and total_list[p] is not False):
new_list[p] = total_list[p]
return new_list
################################################################
class RunParser(BDParser):
"""
"""
def parseBDParameters(self):
logger.debug("in")
params = BDParser.parseBDParameters(self)
params['run_name'], nb_subs = re.subn('\s', '_', params['run_name'])
return params
def __init__(self):
logger.debug("in")
BDParser.__init__(self)
self.mandatory["machine_name"] = True
self.mandatory["nproc"] = True
self.mandatory["run_name"] = True
self.admissible_params["machine_name"] = str
self.help["machine_name"] = ("Specify the name of the machine where"
" the job is to be launched")
self.admissible_params["nproc"] = int
self.help["nproc"] = ("Specify the number of processors onto which"
" this run is supposed to be launched")
self.admissible_params["run_name"] = str
self.help["run_name"] = (
'User friendly name given to this run.'
' This is usually helpful to recall a run kind')
self.default_params = {}
self.default_params["job_constraints"] = None
self.group_params["RunParser"] = [
"machine_name",
"nproc",
"run_name"]
################################################################
__all__ = ["BDParser", "RunParser"]
diff --git a/BlackDynamite/coating/slurmCoat.py b/BlackDynamite/coating/slurmCoat.py
index 2c3124e..7abfe84 100755
--- a/BlackDynamite/coating/slurmCoat.py
+++ b/BlackDynamite/coating/slurmCoat.py
@@ -1,122 +1,127 @@
#!/usr/bin/env python
from __future__ import print_function
from BlackDynamite import *
import os
import stat
import subprocess
import re
import socket
admissible_params = {"walltime": str,
"email": str,
"nodes": int,
"module": [str],
"cpus-per-task": int,
"cpus-per-node": int,
"cwd": bool,
"slurm_option": [str]}
default_params = {"walltime": "48:00:00",
"cwd": True,
"nodes": 1,
"cpus-per-task": 1}
help = {"walltime": "Specify the wall time for the runs",
"email": "Specify the email to notify",
"nodes": "Number of nodes for the job",
"slurm_option": "Allow to provide additional SLURM options",
"module": "List of modules to load",
"cpus-per-node": "Number of CPU per nodes",
"cpus-per-task": "Number of thread per MPI process",
"cwd": "Run by default in the run folder"}
mandatory = {"cpus-per-node": True}
def launch(run, params):
_exec = run.getExecFile()
head = "#!/bin/bash\n\n"
head += "#SBATCH --time={0}\n".format(params["walltime"])
if ("email" in params):
head += "#SBATCH --mail-type=ALL\n"
head += "#SBATCH --mail-user={0}\n".format(params["email"])
slurm_head_name = "#SBATCH --job-name={0}_{1}\n".format(
run["run_name"], run.id)
head += slurm_head_name
run["state"] = "SLURM submit"
if ("nproc" in params):
run["nproc"] = params["nproc"]
nproc = run["nproc"]
try:
- nodes = max(nproc * params["cpus-per-task"] /
+ nodes = max(nproc * params["cpus-per-task"] //
params["cpus-per-node"], 1)
except Exception as e:
print(params.keys())
print(e)
raise e
head += "#SBATCH --nodes={0}\n".format(nodes)
head += "#SBATCH --ntasks={0}\n".format(nproc)
head += "#SBATCH --cpus-per-task={0}\n".format(params["cpus-per-task"])
- if ("slurm_option" in params):
- for i in params["slurm_option"]:
- head += "#SBATCH {0}\n".format(i)
+ if "slurm_option" in params:
+ for option in params["slurm_option"]:
+ # To get consistent behavior between --slurm_option=""
+ # and --slurm_option ""
+ m = re.match(r'^--(\S+)$', option)
+ if m:
+ option = m.group(1)
+ head += "#SBATCH --{}\n".format(option)
if (params["cwd"]):
head += "#SBATCH --workdir=__BLACKDYNAMITE__run_path__\n"
if ("module" in params):
head += "\nmodule purge\n"
for i in params["module"]:
head += "module load {0}\n".format(i)
run.update()
head += """
export BLACKDYNAMITE_HOST=__BLACKDYNAMITE__dbhost__
export BLACKDYNAMITE_SCHEMA=__BLACKDYNAMITE__study__
export BLACKDYNAMITE_STUDY=__BLACKDYNAMITE__study__
export BLACKDYNAMITE_RUN_ID=__BLACKDYNAMITE__run_id__
export BLACKDYNAMITE_USER=""" + params["user"] + """
on_kill()
{
updateRuns.py --updates \"state = SLURM killed\" --truerun
exit 0
}
on_stop()
{
updateRuns.py --updates \"state = SLURM stopped\" --truerun
exit 0
}
# Execute function on_die() receiving TERM signal
#
trap on_stop SIGUSR1
trap on_stop SIGTERM
trap on_kill SIGUSR2
trap on_kill SIGKILL
"""
_exec["file"] = run.replaceBlackDynamiteVariables(head) + _exec["file"]
f = open(_exec["filename"], 'w')
f.write(_exec["file"])
f.close()
# os.chmod(_exec["filename"], stat.S_IRWXU)
print("execute sbatch ./" + _exec["filename"])
print("in dir ")
subprocess.call("pwd")
if params["truerun"] is True:
ret = subprocess.call("sbatch " + _exec["filename"], shell=True)
diff --git a/BlackDynamite/constraints.py b/BlackDynamite/constraints.py
index db9a273..f56e1d8 100644
--- a/BlackDynamite/constraints.py
+++ b/BlackDynamite/constraints.py
@@ -1,219 +1,220 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
################################################################
from __future__ import print_function
################################################################
from . import sqlobject
from . import job
from . import run
from . import bdlogging
import pyparsing as pp
-import logging
################################################################
print = bdlogging.invalidPrint
-logger = logging.getLogger(__name__)
+logger = bdlogging.getLogger(__name__)
################################################################
class BDconstraints(object):
""
def __iadd__(self, constraints):
if not isinstance(constraints, BDconstraints):
# print('cons2', constraints)
constraints = BDconstraints(self.base, constraints)
self.constraints += constraints.constraints
return self
def __init__(self, base, constraints):
# print('cons', constraints)
self.constraints = constraints
self.base = base
self.conditions = None
if isinstance(constraints, BDconstraints):
self.constraints = constraints.constraints
elif isinstance(constraints, dict):
if "constraints" in constraints:
self.constraints = constraints["constraints"]
elif "run_id" in constraints:
self.constraints = [
'runs.id = {0}'.format(constraints['run_id'])]
elif "job_id" in constraints:
self.constraints = [
'jobs.id = {0}'.format(constraints['job_id'])]
else:
self.constraints = []
if not isinstance(self.constraints, list):
self.constraints = [self.constraints]
self.constraint_parser = BDconstraintsParser(self.base)
# print('cons(end) ', self.constraints)
def _pushCondition(self, _cond, _params):
if self.conditions != '':
self.conditions += ' and '
self.conditions += _cond
self.params += _params
def pushConditionFromSQLObject(self, _cstr):
_cond = []
_params = []
sql_obj = _cstr
for k, v in sql_obj.entries.items():
_cond.append('({0}.{1} = %s)'.format(
sql_obj.table_name, k))
_params.append(v)
_cond = ' and '.join(_cond)
self._pushCondition(_cond, _params)
def pushConditionFromString(self, _cstr):
_cond, _params = self.constraint_parser.parse(_cstr)
self._pushCondition(_cond, _params)
def getMatchingCondition(self):
self.conditions = ""
self.params = []
for _cstr in self.constraints:
if isinstance(_cstr, str):
self.pushConditionFromString(_cstr)
if isinstance(_cstr, sqlobject.SQLObject):
self.pushConditionFromSQLObject(_cstr)
logger.debug(self.conditions)
logger.debug(self.params)
return self.conditions, self.params
################################################################
class BDconstraintsParser(object):
def __init__(self, base):
self.base = base
self._params = []
self.ref_run = run.Run(self.base)
self.ref_job = job.Job(self.base)
# rule for entry in the sqlobject
var = pp.Word(pp.alphanums+'_')
prefix = (pp.Literal('runs') | pp.Literal('jobs')) + pp.Literal('.')
entry = pp.Optional(prefix) + var
def check_varname(tokens):
- # print (tokens)
+ # print(tokens)
res = pp.ParseResults(''.join(tokens))
+ logger.debug(res)
if len(tokens) == 3:
obj_type = tokens[0]
var_name = tokens[2].lower()
else:
obj_type = None
var_name = tokens[0].lower()
if obj_type is None:
job_var = var_name in self.ref_job.types
run_var = var_name in self.ref_run.types
if job_var and run_var:
raise RuntimeError(
'ambiguous variable: {0}\n{1}'.format(
res, self.base.getPossibleParameters()))
if job_var:
res.type = self.ref_job.types[var_name]
elif run_var:
res.type = self.ref_run.types[var_name]
else:
raise RuntimeError(
'unknown variable: {0}\n{1}'.format(
res[0], self.base.getPossibleParameters()))
else:
if obj_type == 'runs':
ref_obj = self.ref_run
elif obj_type == 'jobs':
ref_obj = self.ref_job
if var_name not in ref_obj.types:
raise RuntimeError(
'unknown variable: "{0}"\n{1}'.format(
var_name, ref_obj.types))
res.type = ref_obj.types[var_name]
return res
entry = entry.setParseAction(check_varname)
# rule to parse the operators
operators = [
- '+', # addition 2 + 3 5
- '-', # subtraction 2 - 3 -1
- '*', # multiplication 2 * 3 6
- '/', # division (integer division truncates the result)
- '%', # modulo (remainder) 5 % 4 1
- '^', # exponentiation 2.0 ^ 3.0 8
+ # '+', # addition 2 + 3 5
+ # '-', # subtraction 2 - 3 -1
+ # '*', # multiplication 2 * 3 6
+ # '/', # division (integer division truncates the result)
+ # '%', # modulo (remainder) 5 % 4 1
+ # '^', # exponentiation 2.0 ^ 3.0 8
'<', # less than
'>', # greater than
'<=', # less than or equal to
'>=', # greater than or equal to
'=', # equal
'!=', # not equal
'~', # Matches regular expression, case sensitive
'~*', # Matches regular expression, case insensitive
'!~', # Does not match regular expression, case sensitive
'!~*' # Does not match regular expression, case insensitive
]
ops = pp.Literal(operators[0])
for o in operators[1:]:
ops |= pp.Literal(o)
# parse a constraint of the form 'var operator value' and flatten it
- constraint = pp.Group(entry + ops + pp.Word(pp.alphanums+'.'))
+ constraint = pp.Group(entry + ops + pp.Word(pp.alphanums+'._'))
def regroup_constraints(tokens):
expected_type = tokens[0].type
key = tokens[0][0]
op = tokens[0][1]
val = tokens[0][2]
try:
parse_res = entry.parseString(val)
if parse_res.type != expected_type:
raise RuntimeError('no the correct type')
val = parse_res[0]
- except:
+ except Exception:
self._params.append(val)
val = '%s'
res = ('(' +
' '.join([str(key), str(op), str(val)]) + ')')
return res
constraint = constraint.setParseAction(regroup_constraints)
separator = (pp.Literal(',').setParseAction(
lambda tokens: 'and') | pp.Literal('and'))
self.constraints = (constraint + pp.Optional(
pp.OneOrMore(separator + constraint))).setParseAction(
lambda tokens: ' '.join(tokens))
def parse(self, _str):
self._params = []
+ logger.debug(_str)
res = self.constraints.parseString(_str)
res = ' '.join(res)
return res, self._params
diff --git a/BlackDynamite/run.py b/BlackDynamite/run.py
index deaebc8..3df54d2 100755
--- a/BlackDynamite/run.py
+++ b/BlackDynamite/run.py
@@ -1,461 +1,464 @@
#!/usr/bin/env python3
from __future__ import print_function
################################################################
from . import job
from . import runconfig
from . import conffile
from . import sqlobject
from . import bdparser
from . import base
from . import runselector
from . import bdlogging
import sys
import re
import numpy as np
import datetime
-import logging
import subprocess
import socket
+import os
################################################################
__all__ = ['Run', 'getRunFromScript']
print = bdlogging.invalidPrint
-logger = logging.getLogger(__name__)
+logger = bdlogging.getLogger(__name__)
################################################################
class Run(sqlobject.SQLObject):
"""
"""
table_name = 'runs'
def getJob(self):
return self.base.getJobFromID(self.entries["job_id"])
def start(self):
self.entries['state'] = 'START'
logger.debug('starting run')
self.update()
logger.debug('update done')
self.base.commit()
logger.debug('commited')
def finish(self):
self.entries['state'] = 'FINISHED'
logger.debug('finish run')
self.update()
logger.debug('update done')
self.base.commit()
logger.debug('commited')
def attachToJob(self, job):
self["job_id"] = job.id
self.base.insert(self)
self.addConfigFile(self.execfile)
for cnffile in self.configfiles:
self.addConfigFile(cnffile)
def getExecFile(self):
return self.getUpdatedConfigFile(self.entries["exec"])
def setExecFile(self, file_name, **kwargs):
# check if the file is already in the config files
for f in self.configfiles:
if f.entries["filename"] == file_name:
self.execfile = f
self.entries["exec"] = f.id
return f.id
# the file is not in the current config files
# so it has to be added
conf = conffile.addFile(file_name, self.base, **kwargs)
self.configfiles.append(conf)
self.execfile = conf
self.entries["exec"] = conf.id
return conf.id
- def listFiles(self):
- command = 'ls {0}'.format(self['run_path'])
+ def listFiles(self, subdir=""):
+ """List files in run directory / specified sub-directory"""
+ command = 'ls {0}'.format(os.path.join(self['run_path'], subdir))
if not self['machine_name'] == socket.gethostname():
command = 'ssh {0} "{1}"'.format(self['machine_name'], command)
- logger.warning(command)
+ logger.info(command)
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
out = p.stdout.readlines()
out = [o.strip() for o in out]
return out
def getFile(self, filename, outpath='/tmp'):
- import subprocess
- import os
-
dest_path = os.path.join(
outpath, "BD-" + self.base.schema + "-cache",
"run-{0}".format(self.id))
dest_file = os.path.join(dest_path, filename)
if self['machine_name'] == socket.gethostname():
return self.getFullFileName(filename)
- # logger.warning(dest_path)
- # logger.warning(dest_file)
+
+ # In case filename contains sub-directories
+ dest_path = os.path.dirname(dest_file)
+
+ logger.info(dest_path)
+ logger.info(dest_file)
+
+ # Making directories
try:
- os.makedirs(dest_path)
+ os.makedirs(dest_path, exist_ok=True)
except Exception as e:
- # logger.error(e)
+ logger.error(e)
pass
if os.path.isfile(dest_file):
return dest_file
cmd = 'scp {0}:{1} {2}'.format(self['machine_name'],
self.getFullFileName(filename),
dest_file)
- logger.warning(cmd)
+ logger.info(cmd)
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
- logger.warning(p.stdout.read())
+ logger.info(p.stdout.read())
return dest_file
def getFullFileName(self, filename):
- import os
return os.path.join(self['run_path'], filename)
def addConfigFiles(self, file_list, regex_params=None):
if not type(file_list) == list:
file_list = [file_list]
- params_list = self.types.keys()
+ params_list = list(self.types.keys())
myjob = job.Job(self.base)
- params_list += myjob.types.keys()
+ params_list += list(myjob.types.keys())
# logger.debug (regex_params)
file_ids = [f.id for f in self.configfiles]
files_to_add = [
conffile.addFile(
fname, self.base,
regex_params=regex_params,
params=params_list)
for fname in file_list]
for f in files_to_add:
if (f.id not in file_ids):
self.configfiles.append(f)
return self.configfiles
def addConfigFile(self, configfile):
myrun_config = runconfig.RunConfig(self.base)
myrun_config.attachToRun(self)
myrun_config.addConfigFile(configfile)
self.base.insert(myrun_config)
def getConfigFiles(self):
# myjob = job.Job(self.base)
# myjob["id"] = self.entries["job_id"]
# myjob = self.getMatchedObjectList()[0]
runconf = runconfig.RunConfig(self.base)
runconf["run_id"] = self.id
runconf_list = runconf.getMatchedObjectList()
logger.info(runconf_list)
conffiles = [
self.getUpdatedConfigFile(f["configfile_id"])
for f in runconf_list]
return conffiles
def getConfigFile(self, file_id):
# runconf = runconfig.RunConfig(self.base)
conf = conffile.ConfFile(self.base)
conf["id"] = file_id
conf = conf.getMatchedObjectList()[0]
return conf
def replaceBlackDynamiteVariables(self, text):
myjob = job.Job(self.base)
myjob["id"] = self.entries["job_id"]
myjob = myjob.getMatchedObjectList()[0]
for key, val in myjob.entries.items():
tmp = text.replace("__BLACKDYNAMITE__" + key + "__",
str(val))
if ((not tmp == text) and val is None):
raise Exception("unset job parameter " + key)
text = tmp
for key, val in self.entries.items():
tmp = text.replace("__BLACKDYNAMITE__" + key + "__",
str(val))
if ((not tmp == text) and val is None):
logger.debug(self.entries)
raise Exception("unset run parameter " + key)
text = tmp
text = text.replace("__BLACKDYNAMITE__dbhost__",
self.base.dbhost)
text = text.replace("__BLACKDYNAMITE__study__",
self.base.schema)
text = text.replace("__BLACKDYNAMITE__run_id__",
str(self.id))
return text
def getUpdatedConfigFile(self, file_id):
conf = self.getConfigFile(file_id)
conf["file"] = self.replaceBlackDynamiteVariables(conf["file"])
return conf
def listQuantities(self):
request = "SELECT id,name FROM {0}.quantities".format(self.base.schema)
curs = self.base.performRequest(request)
all_quantities = [res[1] for res in curs]
return all_quantities
def getScalarQuantities(self, names, additional_request=None):
request = ("SELECT id,name FROM {0}.quantities "
"WHERE (is_vector) = (false)").format(
self.base.schema)
params = []
if (names):
if (not type(names) == list):
names = [names]
request += " and ("
for name in names:
similar_op_match = re.match(r"\s*(~)\s*(.*)", name)
op = " = "
if (similar_op_match):
op = " ~ "
name = similar_op_match.group(2)
request += " name " + op + "%s or"
params.append(str(name))
request = request[:len(request)-3] + ")"
# logger.debug (request)
# logger.debug (params)
curs = self.base.performRequest(request, params)
quantities = [res[1] for res in curs]
if (len(quantities) == 0):
logger.debug("No quantity matches " + str(names))
logger.debug("Quantities declared in the database are \n" +
"\n".join(
[res for res in self.listQuantities()]))
sys.exit(-1)
return None
try:
quant_indexes = [quantities.index(n) for n in names]
logger.debug(quant_indexes)
quantities = names
except:
# quant_indexes = None
pass
# logger.debug (quant)
results = []
for key in quantities:
q = self.getScalarQuantity(key, additional_request)
if (q is not None):
results.append([key, q])
logger.debug("got Quantity " + str(key) + " : " +
str(q.shape[0]) + " values")
return results
def getLastStep(self):
request = """SELECT max(b.max),max(b.time) from (
SELECT max(step) as max,max(computed_at) as time
from {0}.scalar_integer where run_id = {1}
union
SELECT max(step) as max,max(computed_at)
as time from {0}.scalar_real where run_id = {1}
union
SELECT max(step) as max,max(computed_at)
as time from {0}.vector_integer where run_id = {1}
union
SELECT max(step) as max,max(computed_at) as time
from {0}.vector_real where run_id = {1}) as b
""".format(self.base.schema, self.id)
# logger.debug (request)
curs = self.base.performRequest(request, [])
item = curs.fetchone()
if (item is not None):
return item[0], item[1]
def getQuantityID(self, name, is_integer=None, is_vector=None):
request = """
SELECT id,is_integer,is_vector FROM {0}.quantities WHERE (name) = (%s)
""".format(self.base.schema)
curs = self.base.performRequest(request, [name])
item = curs.fetchone()
if (item is None):
raise Exception("unknown quantity \"" + name + "\"")
if ((is_integer is not None) and (not is_integer == item[1])):
raise Exception("quantity \"" + name + "\" has is_integer = " +
str(item[1]))
if ((is_vector is not None) and (not is_vector == item[2])):
raise Exception("quantity \"" + name + "\" has is_vector = " +
str(item[2]))
return item[0], item[1], item[2]
def getScalarQuantity(self, name, additional_request=None):
quantity_id, is_integer, is_vector = self.getQuantityID(name)
if is_vector is True:
raise Exception("Quantity " + name + " is not scalar")
request = """
SELECT step,measurement from {0}.{1} WHERE (run_id,quantity_id) = ({2},{3})
""".format(self.base.schema,
"scalar_real" if (is_integer is False)
else "scalar_integer",
self.id,
quantity_id)
if (additional_request):
request += " and " + " and ".join(additional_request)
request += " ORDER BY step"
curs = self.base.performRequest(request, [name])
fetch = curs.fetchall()
if (not fetch):
return None
return (np.array([(val[0], val[1]) for val in fetch]))
def getVectorQuantity(self, name, step):
quantity_id, is_integer, is_vector = self.getQuantityID(name)
if (is_vector is False):
raise Exception("Quantity " + name + " is not vectorial")
request = """
SELECT measurement from {0}.{1} WHERE (run_id,quantity_id,step) = ({2},{3},{4})
""".format(self.base.schema, "vector_real"
if (is_integer is False)
else "vector_integer",
self.id, quantity_id, step)
curs = self.base.performRequest(request, [name])
fetch = curs.fetchone()
if (fetch):
return np.array(fetch[0])
return None
def pushVectorQuantity(self, vec, step, name,
is_integer, description=None):
logger.debug('pushing {0}'.format(name))
try:
quantity_id, is_integer, is_vector = self.getQuantityID(
name, is_integer=is_integer, is_vector=True)
except Exception as e:
typecode = "int"
if is_integer is False:
typecode = "float"
typecode += ".vector"
quantity_id = self.base.pushQuantity(name, typecode, description)
array = [i for i in vec]
# if is_integer is True:
# array_format = ",".join(["{:d}".format(i) for i in vec])
request = """
INSERT INTO {0}.{1} (run_id,quantity_id,measurement,step) VALUES (%s,%s,%s,%s)
""".format(self.base.schema, "vector_real"
if is_integer is False
else "vector_integer")
curs = self.base.performRequest(request, [self.id, quantity_id,
array, step])
logger.debug(curs)
logger.debug('ready to commit')
self.base.commit()
logger.debug('commited')
def pushScalarQuantity(self, val, step, name,
is_integer, description=None):
logger.debug('pushing {0}'.format(name))
try:
quantity_id, is_integer, is_vector = self.getQuantityID(
name, is_integer=is_integer, is_vector=False)
except Exception as e:
typecode = "int"
if is_integer is False:
typecode = "float"
quantity_id = self.base.pushQuantity(name, typecode, description)
request = """
INSERT INTO {0}.{1} (run_id,quantity_id,measurement,step) VALUES (%s,%s,%s,%s)
""".format(self.base.schema, "scalar_real"
if is_integer is False
else "scalar_integer")
curs = self.base.performRequest(request,
[self.id, quantity_id, val, step])
logger.debug(curs)
logger.debug('ready to commit')
self.base.commit()
logger.debug('commited')
def getAllVectorQuantity(self, name):
quantity_id, is_integer, is_vector = self.getQuantityID(
name, is_vector=True)
request = """
SELECT step,measurement from {0}.{1}
WHERE (run_id,quantity_id) = ({2},{3}) order by step
""".format(self.base.schema, "vector_real"
if is_integer is False
else "vector_integer", self.id, quantity_id)
curs = self.base.performRequest(request, [name])
fetch = curs.fetchall()
if (not fetch):
return [None, None]
matres = np.array([val[1] for val in fetch])
stepres = np.array([val[0] for val in fetch])
return (stepres, matres)
def deleteData(self):
request, params = (
"DELETE FROM {0}.scalar_real WHERE run_id={1}".format(
self.base.schema, self.id), [])
self.base.performRequest(request, params)
request, params = (
"DELETE FROM {0}.scalar_integer WHERE run_id={1}".format(
self.base.schema, self.id), [])
self.base.performRequest(request, params)
request, params = (
"DELETE FROM {0}.vector_real WHERE run_id={1}".format(
self.base.schema, self.id), [])
self.base.performRequest(request, params)
request, params = (
"DELETE FROM {0}.vector_integer WHERE run_id={1}".format(
self.base.schema, self.id), [])
self.base.performRequest(request, params)
def __init__(self, base):
sqlobject.SQLObject.__init__(self, base)
self.foreign_keys["job_id"] = "jobs"
self.types["machine_name"] = str
self.types["run_path"] = str
self.allowNull["run_path"] = True
self.types["job_id"] = int
self.types["nproc"] = int
self.types["run_name"] = str
self.types["wait_id"] = int
self.allowNull["wait_id"] = True
self.types["start_time"] = datetime.datetime
self.allowNull["start_time"] = True
self.types["state"] = str
self.allowNull["state"] = True
self.execfile = None
self.configfiles = []
self.types["exec"] = str
################################################################
def getRunFromScript():
parser = bdparser.BDParser()
parser.register_params(params={"run_id": int})
params = parser.parseBDParameters(argv=[])
mybase = base.Base(**params)
runSelector = runselector.RunSelector(mybase)
- run_list = runSelector.selectRuns(params, params)
+ run_list = runSelector.selectRuns(params)
if len(run_list) > 1:
raise Exception('internal error')
if len(run_list) == 0:
raise Exception('internal error')
myrun, myjob = run_list[0]
# myrun.setEntries(params)
return myrun, myjob
diff --git a/BlackDynamite/selector.py b/BlackDynamite/selector.py
index bcd92fc..9132d09 100644
--- a/BlackDynamite/selector.py
+++ b/BlackDynamite/selector.py
@@ -1,91 +1,90 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# -*- py-which-shell: "python"; -*-
################################################################
from __future__ import print_function
################################################################
import copy
from . import constraints as BDcons
################################################################
-import logging
from . import bdlogging
################################################################
print = bdlogging.invalidPrint
-logger = logging.getLogger(__name__)
+logger = bdlogging.getLogger(__name__)
################################################################
class Selector(object):
def __init__(self, base):
self.base = base
def buildList(self, curs, sqlobjs):
logger.debug(sqlobjs)
if not isinstance(sqlobjs, list):
sqlobjs = [sqlobjs]
col_infos = []
sqlobjs2 = []
for sqlobj in sqlobjs:
if isinstance(sqlobj, type):
sqlobj = sqlobj(self.base)
sqlobjs2.append(sqlobj)
col_infos.append(self.base.getColumnProperties(sqlobj))
sqlobjs = sqlobjs2
list_objects = []
for entries in curs:
# print(entries)
objs = []
offset = 0
logger.debug(sqlobjs)
for index, sqlobj in enumerate(sqlobjs):
obj = copy.deepcopy(sqlobj)
for col_name, size in col_infos[index]:
logger.debug((col_name, entries[offset]))
obj[col_name] = entries[offset]
offset += 1
objs.append(obj)
if len(objs) == 1:
list_objects.append(objs[0])
else:
list_objects.append(tuple(objs))
return list_objects
def select(self, _types, constraints=None, sort_by=None):
if (sort_by is not None) and (not isinstance(sort_by, str)):
raise RuntimeError(
'sort_by argument is not correct: {0}'.format(sort_by))
const = BDcons.BDconstraints(self.base, constraints)
condition, params = const.getMatchingCondition()
if not isinstance(_types, list):
_types = [_types]
selected_tables = ['{0}.{1}'.format(self.base.schema, t.table_name)
for t in _types]
selected_tables = ','.join(selected_tables)
request = "SELECT * FROM {0}".format(selected_tables)
if condition:
request += " WHERE " + condition
# print (sort_by)
if sort_by:
request += " ORDER BY " + sort_by
logger.debug(request)
logger.debug(params)
curs = self.base.performRequest(request, params)
obj_list = self.buildList(curs, _types)
return obj_list
diff --git a/BlackDynamite/sqlobject.py b/BlackDynamite/sqlobject.py
index b679b35..6644879 100644
--- a/BlackDynamite/sqlobject.py
+++ b/BlackDynamite/sqlobject.py
@@ -1,232 +1,232 @@
#!/usr/bin/env python3
################################################################
from __future__ import print_function
################################################################
import copy
import datetime
import psycopg2
import re
import sys
################################################################
class LowerCaseDict(object):
def __init__(self):
self.entries = {}
def __getattr__(self, attr):
if 'entries' not in self.__dict__:
raise AttributeError(attr)
key = attr.lower()
if key in self.entries:
return self.entries[key]
else:
raise AttributeError(attr)
def __setattr__(self, attr, value):
key = attr.lower()
if key == 'entries':
object.__setattr__(self, key, value)
entries = self.entries
if key in entries:
self.__setitem__(attr, value)
else:
object.__setattr__(self, attr, value)
def __getitem__(self, index):
return self.entries[index.lower()]
def keys(self):
return self.entries.keys()
def __iter__(self):
return self.entries.__iter__()
def __setitem__(self, index, value):
self.entries[index.lower()] = value
def items(self):
return self.entries.items()
def copy(self):
cp = LowerCaseDict()
cp.entries = self.entries
return cp
def setEntries(self, params):
for p, val in params.items():
if p in self.types:
self.entries[p] = val
################################################################
class SQLObject(LowerCaseDict):
" The generic object related to entries in the database "
def __str__(self):
keys = set(self.entries.keys())
if 'id' in self.entries:
keys.remove('id')
keys = list(keys)
if 'id' in self.entries:
keys = ['id'] + keys
outputs = []
for k in keys:
v = self.entries[k]
outputs += [k + ": " + str(v)]
return "\n".join(outputs)
def commit(self):
self.base.connection.commit()
def setFields(self, constraints):
for cons in constraints:
_regex = "(\w*)\s*=\s*(.*)"
match = re.match(_regex, cons)
if (not match or (not len(match.groups()) == 2)):
print("malformed assignment: " + cons)
sys.exit(-1)
key = match.group(1).lower().strip()
val = match.group(2)
if key not in self.types:
print("unknown key '{0}'".format(key))
print("possible keys are:")
for k in self.types.keys():
print("\t" + k)
sys.exit(-1)
val = self.types[key](val)
self.entries[key] = val
def __init__(self, base):
LowerCaseDict.__init__(self)
self.foreign_keys = {}
self.allowNull = {}
self.types = LowerCaseDict()
self.base = base
self.operators = {}
self._prepare()
def __deepcopy__(self, memo):
_cp = type(self)(self.base)
_cp.types = copy.deepcopy(self.types.copy(), memo)
_cp.entries = copy.deepcopy(self.entries.copy(), memo)
# _cp.id = self.id
_cp.foreign_keys = copy.deepcopy(self.foreign_keys, memo)
_cp.allowNull = copy.deepcopy(self.foreign_keys, memo)
_cp.connection = self.base
return _cp
def _prepare(self):
try:
self.base.setObjectItemTypes(self)
except psycopg2.ProgrammingError:
self.base.connection.rollback()
def insert(self):
params = list()
# print (self.types)
ex_msg = ""
for key, value in self.types.items():
if key == "id":
continue
if ((key not in self.entries) and (key not in self.allowNull)):
ex_msg += (
"key '" + key +
"' must be given a value before proceeding insertion\n")
if (not ex_msg == ""):
raise Exception("\n****************\n"+ex_msg+"****************\n")
for key, value in self.entries.items():
# print (key)
# print (self.types[key])
# print (value)
params.append(self.types[key.lower()](value))
request = """
INSERT INTO {0}.{1} ({2}) VALUES ({3}) RETURNING id
""".format(self.base.schema, self.table_name,
','.join(self.entries.keys()),
','.join(["%s" for item in params])), params
return request
def delete(self):
request, params = "DELETE FROM {0}.{1} WHERE id={2}".format(
self.base.schema,
self.table_name,
self.id), []
self.base.performRequest(request, params)
def update(self):
params = list()
keys = list()
for key, value in self.entries.items():
if (value is None):
continue
_type = self.types[key]
# print (_type)
# print (key)
# print (type(value))
if (_type == datetime.datetime):
continue
# _type = str
keys.append(key)
params.append(_type(value))
request = "UPDATE {0}.{1} SET ({2}) = ({3}) WHERE id = {4}".format(
self.base.schema,
self.table_name,
','.join(keys),
','.join(["%s" for item in params]), self.id)
self.base.performRequest(request, params)
def getquoted(self):
raise RuntimeError('code needs review')
# objs = [sql_adapt(member) for member in self._sql_members()]
# for obj in objs:
# if hasattr(obj, 'prepare'):
# obj.prepare(self._conn)
# quoted_objs = [obj.getquoted() for obj in objs]
# return '(' + ', '.join(quoted_objs) + ')'
def createTableRequest(self):
query_string = "CREATE TABLE {0}.{1} ( id SERIAL PRIMARY KEY,".format(
self.base.schema, self.table_name)
for key, value in self.types.items():
if key == 'id':
continue
if (value == float):
type_string = "DOUBLE PRECISION"
elif (value == int):
type_string = "INTEGER"
elif (value == str):
type_string = "TEXT"
elif (value == bool):
type_string = "BOOLEAN"
elif (value == datetime.datetime):
type_string = "TIMESTAMP"
else:
print(value)
raise Exception("type '{0}' not handled".format(value))
query_string += "{0} {1} ".format(key, type_string)
if (key not in self.allowNull):
query_string += " NOT NULL"
query_string += ","
for key, value in self.foreign_keys.items():
query_string += "FOREIGN KEY ({0}) REFERENCES {1}.{2},".format(
key, self.base.schema, value)
return query_string[:-1] + ");"
def getMatchedObjectList(self):
- import selector
+ from . import selector
sel = selector.Selector(self.base)
return sel.select(self, self)
diff --git a/example/createDB.py b/example/createDB.py
index ec4670a..12edbe6 100755
--- a/example/createDB.py
+++ b/example/createDB.py
@@ -1,26 +1,26 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# First we need to set the python headers and
# to import the blackdynamite modules
import BlackDynamite as BD
# Then you have to create a generic black dynamite parser
# and parse the system (including the connection parameters and credentials)
parser = BD.bdparser.BDParser()
params = parser.parseBDParameters()
# Then we can connect to the black dynamite database
-base = BD.base.Base(**params)
+base = BD.base.Base(**params, creation=True)
# Then you have to define the parametric space (the job pattern)
myjob_desc = BD.job.Job(base)
myjob_desc.types["param1"] = int
myjob_desc.types["param2"] = float
myjob_desc.types["param3"] = str
# Then you have to define the run pattern
myruns_desc = BD.run.Run(base)
myruns_desc.types["compiler"] = str
# Then we request for the creation of the database
base.createBase(myjob_desc, myruns_desc, **params)
diff --git a/example/createJobs.py b/example/createJobs.py
index 782de2a..7cb84a7 100755
--- a/example/createJobs.py
+++ b/example/createJobs.py
@@ -1,24 +1,24 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# First we need to set the python headers
# and to import the \blackdynamite modules
import BlackDynamite as BD
# Then you have to create a generic black dynamite parser
# and parse the system (including the connection parameters and credentials)
parser = BD.bdparser.BDParser()
params = parser.parseBDParameters()
# Then we can connect to the black dynamite database
base = BD.base.Base(**params)
# create of job object
job = BD.job.Job(base)
# specify a range of jobs
job["param1"] = 10
job["param2"] = [3.14, 1., 2.]
job["param3"] = 'toto'
# creation of the jobs on the database
base.createParameterSpace(job)
diff --git a/example/createRuns.py b/example/createRuns.py
index b30223d..9e89a73 100755
--- a/example/createRuns.py
+++ b/example/createRuns.py
@@ -1,39 +1,39 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# First we need to set the python headers
# and to import the blackdynamite modules
import BlackDynamite as BD
# import a runparser (instead of a generic BD parser)
parser = BD.RunParser()
params = parser.parseBDParameters()
# Then we can connect to the black dynamite database
base = BD.Base(**params)
# create a run object
myrun = BD.Run(base)
# set the run parameters from the parsed entries
myrun.setEntries(params)
# add a configuration file
myrun.addConfigFiles("doIt.py")
# set the entry point (executable) file
myrun.setExecFile("launch.sh")
# create a job selector
jobSelector = BD.JobSelector(base)
# select the jobs that should be associated with the runs about to be created
job_list = jobSelector.selectJobs(params)
# create the runs
for j in job_list:
myrun['compiler'] = 'gcc'
myrun.attachToJob(j)
# if truerun, commit the changes to the base
if (params["truerun"] is True):
base.commit()
diff --git a/example/doIt.py b/example/doIt.py
index cdc849a..6939cf1 100644
--- a/example/doIt.py
+++ b/example/doIt.py
@@ -1,16 +1,16 @@
-#!/bin/env python
+#!/bin/env python3
import BlackDynamite as BD
myrun, myjob = BD.getRunFromScript()
print(myjob)
myrun.start()
for step in range(0, 10):
_quantity = myrun.id*step
myrun.pushScalarQuantity(_quantity, step, "ekin", is_integer=False)
myrun.pushScalarQuantity(_quantity*2, step, "epot", is_integer=False)
myrun.finish()
diff --git a/example/launch.sh b/example/launch.sh
index 828a305..f3f0b57 100644
--- a/example/launch.sh
+++ b/example/launch.sh
@@ -1,9 +1,9 @@
#!/bin/bash
echo 'here is the job'
echo __BLACKDYNAMITE__id__
echo __BLACKDYNAMITE__param1__
echo __BLACKDYNAMITE__param2__
echo __BLACKDYNAMITE__param3__
-python ./doIt.py
+python3 ./doIt.py
diff --git a/example/post_treatment.py b/example/post_treatment.py
index 7b86a08..99c882b 100644
--- a/example/post_treatment.py
+++ b/example/post_treatment.py
@@ -1,44 +1,44 @@
-#!/bin/env python
+#!/bin/env python3
################################################################
import BlackDynamite as BD
import matplotlib.pyplot as plt
################################################################
# basic connection
parser = BD.BDParser()
params = parser.parseBDParameters(
'--host lsmssrv1.epfl.ch --study mystudy'.split())
mybase = BD.Base(**params)
################################################################
# function to plot things (user's job)
def plot(run_list):
for r, j in run_list:
ekin = r.getScalarQuantity('ekin')
if ekin is None:
continue
print(j)
list_files = r.listFiles()
print(list_files)
fname = r.getFile(list_files[3])
print(fname + ':')
_file = open(fname)
print(_file.read())
plt.plot(ekin[:, 0], ekin[:, 1], 'o-',
label='$p_2 = {0}$'.format(j['param2']))
plt.legend(loc='best')
plt.show()
################################################################
# selecting some runs
runSelector = BD.RunSelector(mybase)
run_list = runSelector.selectRuns(params, params)
plot(run_list)
# selecting some other runs
params['run_constraints'] = ['run_name =~ test', 'state = FINISHED']
params['job_constraints'] = ['param2 > 1']
run_list = runSelector.selectRuns(params, params)
plot(run_list)
diff --git a/scripts/enterRun.py b/scripts/enterRun.py
index 7ec9b55..2cc58a6 100755
--- a/scripts/enterRun.py
+++ b/scripts/enterRun.py
@@ -1,74 +1,74 @@
#!/usr/bin/env python3
################################################################
import BlackDynamite as BD
import subprocess
import os
import sys
import socket
################################################################
parser = BD.BDParser()
parser.register_params(group="getRunInfo",
params={"run_id": int, "order": str},
help={"run_id": "Select a run_id for switching to it"})
params = parser.parseBDParameters()
mybase = BD.Base(**params)
if 'run_id' in params:
params['run_constraints'] = ['id = {0}'.format(params['run_id'])]
try:
del params['job_constraints']
except:
pass
runSelector = BD.RunSelector(mybase)
-run_list = runSelector.selectRuns(params, params, quiet=True)
+run_list = runSelector.selectRuns(params, quiet=True)
mybase.close()
if (len(run_list) == 0):
print("no run found")
sys.exit(1)
run, job = run_list[0]
run_id = run['id']
separator = '-'*30
print(separator)
print("JOB INFO")
print(separator)
print(job)
print(separator)
print("RUN INFO")
print(separator)
print(run)
print(separator)
print("LOGGING TO '{0}'".format(run['machine_name']))
print(separator)
if run['state'] == 'CREATED':
print("Cannot enter run: not yet started")
sys.exit(-1)
bashrc_filename = os.path.join(
'/tmp', 'bashrc.user{0}.study{1}.run{2}'.format(params['user'],
params['study'],
run_id))
bashrc = open(bashrc_filename, 'w')
bashrc.write('export PS1="\\u@\\h:<{0}|RUN-{1}> $ "\n'.format(
params['study'], run_id))
bashrc.write('cd {0}\n'.format(run['run_path']))
bashrc.write('echo ' + separator)
bashrc.close()
command_login = 'bash --rcfile {0} -i'.format(bashrc_filename)
if not run['machine_name'] == socket.gethostname():
command1 = 'scp -q {0} {1}:{0}'.format(bashrc_filename,
run['machine_name'])
subprocess.call(command1, shell=True)
command_login = 'ssh -X -A -t {0} "{1}"'.format(
run['machine_name'], command_login)
# print command_login
subprocess.call(command_login, shell=True)
diff --git a/scripts/updateRuns.py b/scripts/updateRuns.py
index 31306a1..1a0c40f 100755
--- a/scripts/updateRuns.py
+++ b/scripts/updateRuns.py
@@ -1,44 +1,42 @@
#!/usr/bin/env python3
import BlackDynamite as BD
import os
import sys
import socket
parser = BD.BDParser()
parser.register_params(
group="updateRuns",
params={"run_id": int,
"updates": [str]},
defaults={"machine_name": socket.gethostname()},
help={"run_id": "The id of the run to update",
"updates": "The updates to perform. Syntax should be 'key = newval'"}
)
params = parser.parseBDParameters()
if "user" not in params.keys():
params["user"] = os.getlogin()
base = BD.Base(**params)
if "run_id" in params:
if "constraints" not in params:
params["constraints"] = []
params["constraints"].append("runs.id = " + str(params["run_id"]))
-print(params)
runSelector = BD.RunSelector(base)
run_list = runSelector.selectRuns(params)
-if (len(run_list) == 0):
- print "No runs to be updated"
+if len(run_list) == 0:
+ print("No runs to be updated")
if "updates" not in params:
- print "No update to be performed: use --updates option"
+ print("No update to be performed: use --updates option")
sys.exit(-1)
for r, j in run_list:
-
r.setFields(params["updates"])
- if (params["truerun"] is True):
+ if params["truerun"] is True:
r.update()
base.commit()
diff --git a/setup.py b/setup.py
index cb9b176..5f63dda 100644
--- a/setup.py
+++ b/setup.py
@@ -1,37 +1,39 @@
from setuptools import setup
setup(name="blackdynamite",
packages=['BlackDynamite', 'BlackDynamite.coating'],
version="0.0.1",
author="Guillaume Anciaux",
author_email="guillaume.anciaux@epfl.ch",
description=("Compliant parametric study tool"),
package_data={
'BlackDynamite': ['build_tables.sql']},
scripts=['scripts/canYouDigIt.py',
'scripts/cleanRuns.py',
'scripts/createUser.py',
'scripts/enterRun.py',
'scripts/getRunInfo.py',
'scripts/launchRuns.py',
'scripts/mvRuns.py',
'scripts/pushQuantity.py',
'scripts/saveBDStudy.py',
'scripts/updateRuns.py',
'scripts/studyInfo.py',
'scripts/bash_completion.sh'],
+ install_requires=["psycopg2-binary", "numpy",
+ "argcomplete", "pyparsing"],
test_suite="pythontests",
license="""
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see .
""")