Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F111423419
eval_logs_to_dict.py
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Thu, May 1, 13:24
Size
2 KB
Mime Type
text/x-python
Expires
Sat, May 3, 13:24 (2 d)
Engine
blob
Format
Raw Data
Handle
25904913
Attached To
R8800 solar_potential
eval_logs_to_dict.py
View Options
import
pandas
as
pd
import
os
import
xarray
as
xr
import
pickle
import
sys
path
=
sys
.
argv
[
1
]
dataset
=
sys
.
argv
[
2
]
type_to_merge
=
sys
.
argv
[
3
]
if
len
(
sys
.
argv
)
>
4
:
prefix
=
str
(
sys
.
argv
[
4
])
+
'_'
else
:
prefix
=
''
file_link
=
{
'train'
:
prefix
+
'log_train.csv'
,
'train_pred'
:
prefix
+
'log_pred_tr.csv'
,
'val_pred'
:
prefix
+
'log_pred_val.csv'
,
'test_pred'
:
prefix
+
'log_pred_te.csv'
}
dirpath
=
os
.
path
.
join
(
path
,
'datasets'
,
dataset
)
subdirs
=
os
.
listdir
(
dirpath
)
# decalre merged log
merged_log
=
{}
for
directory
in
subdirs
:
if
directory
[:
3
]
!=
'ELM'
:
continue
log_list
=
[]
# load all files:
for
file_label
in
file_link
:
try
:
logfile_name
=
file_link
[
file_label
]
logfile_dir
=
os
.
path
.
join
(
dirpath
,
directory
,
type_to_merge
,
logfile_name
)
# load file and drop the cputime (not relevant for ELM as equal to cputime)
current_log
=
pd
.
read_csv
(
logfile_dir
,
index_col
=
0
)
.
drop
(
'cputime'
,
axis
=
1
)
# rename all columnsby appending the file label (for later identification)
for
column_label
in
current_log
.
columns
:
current_log
=
current_log
.
rename
({
column_label
:
column_label
+
'_'
+
file_label
},
axis
=
1
)
# go through the list of collected variables again and rename them accoding to the model
for
column_label
in
current_log
.
columns
:
entry
=
current_log
[
column_label
]
.
to_frame
()
.
rename
({
column_label
:
directory
},
axis
=
1
)
# merge with the merged_log ( and set if the variable does not yet exist)
if
column_label
in
merged_log
:
merged_log
[
column_label
]
=
merged_log
[
column_label
]
.
merge
(
entry
,
left_index
=
True
,
right_index
=
True
,
how
=
'outer'
)
else
:
merged_log
[
column_label
]
=
entry
except
Exception
as
e
:
print
(
e
)
logfile_out
=
os
.
path
.
join
(
dirpath
,
type_to_merge
+
'_merged_log.pickle'
)
with
open
(
logfile_out
,
'wb'
)
as
handle
:
pickle
.
dump
(
merged_log
,
handle
,
protocol
=
pickle
.
HIGHEST_PROTOCOL
)
print
(
'Created file
%s
'
%
logfile_out
)
Event Timeline
Log In to Comment