Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F112075499
model_main.py
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Wed, May 7, 12:23
Size
4 KB
Mime Type
text/x-objective-c
Expires
Fri, May 9, 12:23 (1 d, 23 h)
Engine
blob
Format
Raw Data
Handle
26013677
Attached To
R8797 solarPV
model_main.py
View Options
import
os
import
tensorflow
import
numpy
as
np
import
scipy
import
data_loaders
import
datetime
import
losses
import
sys
#import cv2
from
scipy
import
ndimage
#from matplotlib import pyplot as plt
from
PIL
import
Image
from
keras.models
import
*
from
keras.layers
import
Reshape
from
keras.layers.core
import
*
from
keras.layers
import
*
from
keras.layers.normalization
import
BatchNormalization
from
keras.layers.convolutional
import
*
from
keras.layers.advanced_activations
import
LeakyReLU
from
keras.optimizers
import
Adam
,
SGD
from
keras.utils
import
np_utils
from
keras
import
backend
as
K
directory_path
=
'/home/rcastell/solar_deployment/segmentation/CNN/models/'
def
unet
(
nClasses
,
loss
,
optimizer
=
None
,
input_width
=
360
,
input_height
=
480
,
nChannels
=
1
):
ker
=
3
pad
=
3
dropout_rate
=
0.2
inputs
=
Input
((
input_height
,
input_width
,
nChannels
))
inputs_padded
=
ZeroPadding2D
(
padding
=
pad
,
input_shape
=
(
input_width
,
input_height
,
3
))(
inputs
)
conv1
=
Conv2D
(
32
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
inputs_padded
)
conv1
=
Dropout
(
dropout_rate
)(
conv1
)
conv1
=
Conv2D
(
32
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv1
)
pool1
=
MaxPooling2D
(
padding
=
'same'
)(
conv1
)
conv2
=
Conv2D
(
64
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
pool1
)
conv2
=
Dropout
(
dropout_rate
)(
conv2
)
conv2
=
Conv2D
(
64
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv2
)
pool2
=
MaxPooling2D
(
padding
=
'same'
)(
conv2
)
conv3
=
Conv2D
(
128
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
pool2
)
conv3
=
Dropout
(
dropout_rate
)(
conv3
)
conv3
=
Conv2D
(
128
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv3
)
up1
=
concatenate
([
UpSampling2D
(
size
=
(
2
,
2
))(
conv3
),
conv2
],
axis
=
3
)
conv4
=
Conv2D
(
64
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
up1
)
conv4
=
Dropout
(
dropout_rate
)(
conv4
)
conv4
=
Conv2D
(
64
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv4
)
up2
=
concatenate
([
UpSampling2D
(
size
=
(
2
,
2
))(
conv4
),
conv1
],
axis
=
3
)
conv5
=
Conv2D
(
32
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
up2
)
conv5
=
Dropout
(
dropout_rate
)(
conv5
)
conv5
=
Conv2D
(
32
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv5
)
conv6
=
Conv2D
(
nClasses
,
kernel_size
=
1
,
activation
=
'relu'
,
padding
=
'same'
)(
conv5
)
cropped_conv6
=
Cropping2D
(
cropping
=
pad
)(
conv6
)
conv6
=
core
.
Reshape
((
input_height
*
input_width
,
nClasses
))(
cropped_conv6
)
conv7
=
core
.
Activation
(
'softmax'
)(
conv6
)
model
=
Model
(
input
=
inputs
,
output
=
conv7
)
if
not
optimizer
is
None
:
model
.
compile
(
loss
=
loss
,
optimizer
=
optimizer
,
metrics
=
[
losses
.
dice_coef
])
## metric: here using IoU
print
(
"Model was compiled."
)
return
model
def
run
(
rotations
,
lights
,
dice
,
weight1
,
weight2
,
epochs
):
original_images
,
labeled_images
=
data_loaders
.
load_images
(
lights
=
lights
,
rotations
=
rotations
)
X_train
=
np
.
array
(
original_images
)
Y_train_flat
=
np
.
array
(
labeled_images
)
.
reshape
(
len
(
labeled_images
),
-
1
)
# One dimension for each channel (one channel PV, one channel non-PV)
Y_train_2d
=
np
.
stack
((
np
.
where
(
Y_train_flat
==
0
,
1
,
0
),
Y_train_flat
),
axis
=-
1
)
.
reshape
(
len
(
X_train
),
-
1
,
2
)
inp_shape
=
X_train
[
0
]
.
shape
out_shape_flat
=
Y_train_flat
[
0
]
.
shape
out_shape_2d
=
Y_train_2d
[
0
]
.
shape
if
not
dice
:
model_u
=
unet
(
2
,
loss
=
losses
.
weighted_categorical_crossentropy
([
weight1
,
weight2
]),
optimizer
=
'adam'
,
input_width
=
250
,
input_height
=
250
,
nChannels
=
3
)
else
:
model_u
=
unet
(
2
,
loss
=
losses
.
dice_coef_loss
,
optimizer
=
'adam'
,
input_width
=
250
,
input_height
=
250
,
nChannels
=
3
)
model_u
.
summary
()
model_name
=
str
(
datetime
.
date
.
today
())
+
\
"dice_{:}_{:}_epochs_{:}_rotlight:{:}"
.
format
(
dice
,
(
weight1
,
weight2
),
epochs
,
(
rotations
,
lights
))
print
(
model_name
)
model_u
.
fit
(
X_train
,
Y_train_2d
,
validation_split
=
0.1
,
epochs
=
epochs
)
# Save the weights
model_u
.
save
(
directory_path
+
'{:}.h5'
.
format
(
model_name
))
# Save the model architecture
# Save the weights
model_u
.
save_weights
(
directory_path
+
'{:}weights.h5'
.
format
(
model_name
))
# Save the model architecture
with
open
(
directory_path
+
'{:}architecture.json'
.
format
(
model_name
),
'w'
)
as
f
:
f
.
write
(
model_u
.
to_json
())
if
__name__
==
"__main__"
:
rotations
=
int
(
sys
.
argv
[
1
])
lights
=
int
(
sys
.
argv
[
2
])
dice
=
int
(
sys
.
argv
[
3
])
weight1
=
float
(
sys
.
argv
[
4
])
weight2
=
float
(
sys
.
argv
[
5
])
epochs
=
int
(
sys
.
argv
[
6
])
run
(
rotations
,
lights
,
dice
,
weight1
,
weight2
,
epochs
)
Event Timeline
Log In to Comment