Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F110053000
final_run.py
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Thu, Apr 24, 12:23
Size
2 KB
Mime Type
text/x-python
Expires
Sat, Apr 26, 12:23 (1 d, 23 h)
Engine
blob
Format
Raw Data
Handle
25757511
Attached To
R8797 solarPV
final_run.py
View Options
import
data_loaders
import
datetime
import
losses
import
sys
# import cv2
from
scipy
import
ndimage
from
matplotlib
import
pyplot
as
plt
from
PIL
import
Image
from
keras.models
import
*
from
keras.layers
import
Reshape
from
keras.layers.core
import
*
from
keras.layers
import
*
from
keras.layers.normalization
import
BatchNormalization
from
keras.layers.convolutional
import
*
from
keras.layers.advanced_activations
import
LeakyReLU
from
keras.optimizers
import
Adam
,
SGD
from
keras.utils
import
np_utils
from
keras
import
backend
as
K
def
unet
(
nClasses
,
loss
,
optimizer
=
None
,
input_width
=
360
,
input_height
=
480
,
nChannels
=
1
):
ker
=
3
pad
=
3
dropout_rate
=
0.2
inputs
=
Input
((
input_height
,
input_width
,
nChannels
))
inputs_padded
=
ZeroPadding2D
(
padding
=
pad
,
input_shape
=
(
input_width
,
input_height
,
3
))(
inputs
)
conv1
=
Conv2D
(
32
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
inputs_padded
)
conv1
=
Dropout
(
dropout_rate
)(
conv1
)
conv1
=
Conv2D
(
32
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv1
)
pool1
=
MaxPooling2D
(
padding
=
'same'
)(
conv1
)
conv2
=
Conv2D
(
64
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
pool1
)
conv2
=
Dropout
(
dropout_rate
)(
conv2
)
conv2
=
Conv2D
(
64
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv2
)
pool2
=
MaxPooling2D
(
padding
=
'same'
)(
conv2
)
conv3
=
Conv2D
(
128
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
pool2
)
conv3
=
Dropout
(
dropout_rate
)(
conv3
)
conv3
=
Conv2D
(
128
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv3
)
up1
=
concatenate
([
UpSampling2D
(
size
=
(
2
,
2
))(
conv3
),
conv2
],
axis
=
3
)
conv4
=
Conv2D
(
64
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
up1
)
conv4
=
Dropout
(
dropout_rate
)(
conv4
)
conv4
=
Conv2D
(
64
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv4
)
up2
=
concatenate
([
UpSampling2D
(
size
=
(
2
,
2
))(
conv4
),
conv1
],
axis
=
3
)
conv5
=
Conv2D
(
32
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
up2
)
conv5
=
Dropout
(
dropout_rate
)(
conv5
)
conv5
=
Conv2D
(
32
,
kernel_size
=
ker
,
activation
=
'relu'
,
padding
=
'same'
)(
conv5
)
conv6
=
Conv2D
(
nClasses
,
kernel_size
=
1
,
activation
=
'relu'
,
padding
=
'same'
)(
conv5
)
cropped_conv6
=
Cropping2D
(
cropping
=
pad
)(
conv6
)
conv6
=
core
.
Reshape
((
input_height
*
input_width
,
nClasses
))(
cropped_conv6
)
conv7
=
core
.
Activation
(
'softmax'
)(
conv6
)
model
=
Model
(
input
=
inputs
,
output
=
conv7
)
if
not
optimizer
is
None
:
model
.
compile
(
loss
=
loss
,
optimizer
=
optimizer
,
metrics
=
[
losses
.
dice_coef
])
print
(
"Model was compiled."
)
return
model
Event Timeline
Log In to Comment