Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F110518991
losses.py
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Sat, Apr 26, 16:54
Size
2 KB
Mime Type
text/x-python
Expires
Mon, Apr 28, 16:54 (2 d)
Engine
blob
Format
Raw Data
Handle
25825832
Attached To
R8797 solarPV
losses.py
View Options
import
os
import
tensorflow
as
tf
import
numpy
as
np
import
scipy
import
data_loaders
import
datetime
import
sys
# import cv2
from
scipy
import
ndimage
#from matplotlib import pyplot as plt
from
PIL
import
Image
from
keras.models
import
*
from
keras.layers
import
Reshape
from
keras.layers.core
import
*
from
keras.layers
import
*
from
keras.layers.normalization
import
BatchNormalization
from
keras.layers.convolutional
import
*
from
keras.layers.advanced_activations
import
LeakyReLU
from
keras.optimizers
import
Adam
,
SGD
from
keras.utils
import
np_utils
from
keras
import
backend
as
K
def
weighted_categorical_crossentropy
(
weights
):
"""
A weighted version of keras.objectives.categorical_crossentropy
Variables:
weights: numpy array of shape (C,) where C is the number of classes
Usage:
weights = np.array([0.5,2,10]) # Class one at 0.5, class 2 twice the normal weights, class 3 10x.
loss = weighted_categorical_crossentropy(weights)
model.compile(loss=loss,optimizer='adam')
"""
weights
=
K
.
variable
(
weights
)
def
loss
(
y_true
,
y_pred
):
# scale predictions so that the class probas of each sample sum to 1
y_pred
/=
K
.
sum
(
y_pred
,
axis
=-
1
,
keepdims
=
True
)
# clip to prevent NaN's and Inf's
y_pred
=
K
.
clip
(
y_pred
,
K
.
epsilon
(),
1
-
K
.
epsilon
())
# calc
loss
=
y_true
*
K
.
log
(
y_pred
)
*
weights
loss
=
-
K
.
sum
(
loss
,
-
1
)
return
loss
return
loss
def
jaccard_distance_loss
(
y_true
,
y_pred
,
smooth
=
100
):
"""
Jaccard = (|X & Y|)/ (|X|+ |Y| - |X & Y|)
= sum(|A*B|)/(sum(|A|)+sum(|B|)-sum(|A*B|))
The jaccard distance loss is usefull for unbalanced datasets. This has been
shifted so it converges on 0 and is smoothed to avoid exploding or disapearing
gradient.
Ref: https://en.wikipedia.org/wiki/Jaccard_index
@url: https://gist.github.com/wassname/f1452b748efcbeb4cb9b1d059dce6f96
@author: wassname
"""
intersection
=
K
.
sum
(
K
.
abs
(
y_true
*
y_pred
),
axis
=-
1
)
sum_
=
K
.
sum
(
K
.
abs
(
y_true
)
+
K
.
abs
(
y_pred
),
axis
=-
1
)
jac
=
(
intersection
+
smooth
)
/
(
sum_
-
intersection
+
smooth
)
return
(
1
-
jac
)
*
smooth
def
dice_coef
(
y_true
,
y_pred
,
smooth
=
0.01
):
"""
~Intersection over Union (IoU)
Dice = (2*|X & Y|)/ (|X|+ |Y|)
= 2*sum(|A*B|)/(sum(A^2)+sum(B^2))
ref: https://github.com/keras-team/keras/issues/3611
"""
y_true_f
=
K
.
flatten
(
y_true
[
...
,
1
])
y_pred_f
=
K
.
flatten
(
y_pred
[
...
,
1
])
intersection
=
tf
.
to_float
(
K
.
sum
(
y_true_f
*
y_pred_f
))
union
=
tf
.
to_float
(
K
.
sum
(
y_true_f
)
+
K
.
sum
(
y_pred_f
))
-
intersection
return
(
2.
*
intersection
+
smooth
)
/
(
union
+
smooth
)
def
dice_coef_loss
(
y_true
,
y_pred
):
return
1
-
dice_coef
(
y_true
,
y_pred
)
Event Timeline
Log In to Comment