Page MenuHomec4science

models.py
No OneTemporary

File Metadata

Created
Fri, Jul 26, 05:56

models.py

import tensorflow as tf
from tensorflow import keras
from tensorflow.compat.v1 import train
from tensorflow.keras.layers import Dense, Flatten, Conv1D, MaxPooling1D, GlobalAveragePooling1D, Dropout, TimeDistributed, LSTM
from tensorflow.keras.activations import relu, sigmoid, elu, tanh
tf.compat.v1.enable_eager_execution()
class RNN(keras.layers.Layer):
"""Recurrent Neural Network class
Class which allows to create an instance of a recurrent neural network with hidden state. Each layer are doubled.
Attributes:
fc_x2h_1 : Dense layer processing the input to an intermediate recurrent space.
fc_x2h_2 : Dense layer processing the input in an intermediate recurrent space to the recurrent space.
fc_h2h_1 : Dense layer processing the recurrent state to an intermediate recurrent space.
fc_h2h_2 : Dense layer processing the recurrent state in an intermediate recurrent space to the recurrent space.
fc_h2y_1 : Dense layer processing the recurrent state to an intermediate recurrent space.
fc_h2y_2 : Dense layer processing the recurrent state in an intermediate recurrent space to the output space.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(RNN, self).__init__()
self.fc_x2h_1 = Dense(2*dim_input, input_shape=(dim_input,))
self.fc_x2h_2 = Dense(dim_recurrent, input_shape=(2*dim_input,))
self.fc_h2h_1 = Dense(2*dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2h_2 = Dense(dim_recurrent, input_shape=(2*dim_recurrent,))
self.fc_h2y_1 = Dense(2*dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2y_2 = Dense(dim_output, input_shape=(2*dim_recurrent,))
def __call__(self, input_data, state):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param state: List of recurrent state, usually initialized with zero tensor.
:return: Output of the network
"""
# Convert list of recurrent state into tensor
state = tf.stack(state)
x2h_1 = relu(self.fc_x2h_1(input_data))
h2h_1 = relu(self.fc_h2h_1(state))
# Update of the recurrent state
h = relu(self.fc_x2h_2(x2h_1) + self.fc_h2h_2(h2h_1))
return sigmoid(self.fc_h2y_2(self.fc_h2y_1(h))), tf.unstack(h)
def predict(self, test_input, time_steps, dim_recurrent):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param time_steps: Time grid
:param dim_recurrent: Recurrent dimension of the neural network. Used to initialize recurrent state.
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
# Extract number of samples
nb_samples = int(test_input.shape[0].value)
# Initialize states
states = list(tf.zeros(shape=[dim_recurrent]) for _ in range(nb_samples))
predictions = []
# Iterate over time
for t in range(time_steps):
# Make prediction at current time t
prediction, states = self.__call__(test_input[:, t, :], states)
predictions.append(prediction)
# Convert list of tensors into a single tensor
predictions = tf.stack(predictions, axis=1)
return predictions
class RNN2Cells(keras.layers.Layer):
""" Two cells Recurrent Neural Network class
Class which allows to create an instance of a two consecutive cells of recurrent neural network with hidden state.
Each layer are doubled.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(RNN2Cells, self).__init__()
self.fc_x2h_11 = Dense(2*dim_input, input_shape=(dim_input,))
self.fc_x2h_21 = Dense(dim_recurrent, input_shape=(2*dim_input,))
self.fc_x2h_12 = Dense(2 * dim_input, input_shape=(dim_input,))
self.fc_x2h_22 = Dense(dim_recurrent, input_shape=(2 * dim_input,))
self.fc_h2h_11 = Dense(2*dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2h_21 = Dense(dim_recurrent, input_shape=(2*dim_recurrent,))
self.fc_h2h_12 = Dense(2 * dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2h_22 = Dense(dim_recurrent, input_shape=(2 * dim_recurrent,))
self.fc_h2y_1 = Dense(2*dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2y_2 = Dense(dim_output, input_shape=(2*dim_recurrent,))
def call(self, input_data, state):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param state: List of recurrent state, usually initialized with zero tensor.
:return: Output of the network
"""
# Convert list of recurrent state into tensor
state = tf.stack(state)
x2h_11 = relu(self.fc_x2h_11(input_data[0]))
h2h_11 = relu(self.fc_h2h_11(state))
# Update of the recurrent state in the first cell
h = relu(self.fc_x2h_21(x2h_11) + self.fc_h2h_21(h2h_11))
x2h_12 = relu(self.fc_x2h_12(input_data[1]))
h2h_12 = relu(self.fc_h2h_12(h))
# Update of the recurrent state in the second cell
h = relu(self.fc_x2h_22(x2h_12) + self.fc_h2h_22(h2h_12))
return sigmoid(self.fc_h2y_2(self.fc_h2y_1(h))), tf.unstack(h)
def predict(self, test_input, time_steps, dim_recurrent):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param time_steps: Time grid
:param dim_recurrent: Recurrent dimension of the neural network. Used to initialize recurrent state.
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
# Extract number of samples
nb_samples = int(test_input.shape[0].value)
# Initialize states
states = list(tf.zeros(shape=[dim_recurrent]) for _ in range(nb_samples))
predictions = []
# Iterate over time
for t in range(time_steps):
# Get input data at time t and t+1
input_data = [test_input[:, i, :] for i in range(t, t+2)]
# Make prediction at current time t
prediction, states = self.__call__(input_data, states)
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class GatedRNN(keras.layers.Layer):
"""Gated recurrent Neural Network class
Class which allows to create an instance of a gated recurrent neural network with hidden state.
Attributes:
fc_x2h : Dense layer processing the input to the recurrent space.
fc_h2h : Dense layer processing the recurrent state to the recurrent space.
fc_x2z : Dense layer processing the input to the recurrent space. Used in the computation of the gate.
fc_h2z : Dense layer processing the recurrent state to the recurrent space. Used in the computation of the gate.
fc_h2y : Dense layer processing the recurrent state to the output state.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(GatedRNN, self).__init__()
self.fc_x2h = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2h = Dense(dim_recurrent, input_shape=(dim_recurrent,))
self.fc_x2z = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2z = Dense(dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2y = Dense(dim_output, input_shape=(dim_recurrent,))
def __call__(self, input_data, state):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param state: List of recurrent state, usually initialized with zero tensor.
:return: Output of the network
"""
# Convert list of recurrent state into tensor
state = tf.stack(state)
# Computation of the gate
z = sigmoid(self.fc_x2z(input_data) + self.fc_h2z(state))
# Intermediate state
hb = relu(self.fc_x2h(input_data) + self.fc_h2h(state))
# Update of recurrent state with gating
h = z * state + (1 - z) * hb
return self.fc_h2y(h), tf.unstack(h)
def predict(self, test_input, time_steps, dim_recurrent):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param time_steps: Time grid
:param dim_recurrent: Recurrent dimension of the neural network. Used to initialize recurrent state.
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
# Extract number of samples
nb_samples = int(test_input.shape[0].value)
# Initialize states
states = list(tf.zeros(shape=[dim_recurrent]) for _ in range(nb_samples))
predictions = []
# Iterate over time
for t in range(time_steps):
# Make prediction at current time t
prediction, states = self.__call__(test_input[:, t, :], states)
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class GatedRNNComplex(keras.layers.Layer):
"""Gated recurrent Neural Network class
Class which allows to create an instance of a gated recurrent neural network with hidden state.
Each layer are doubled compared to the GatedRNN class.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(GatedRNNComplex, self).__init__()
self.fc_x2h_1 = Dense(dim_recurrent/2, input_shape=(dim_input,))
self.fc_x2h_2 = Dense(dim_recurrent, input_shape=(dim_recurrent/2,))
self.fc_h2h_1 = Dense(2 * dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2h_2 = Dense(dim_recurrent, input_shape=(2 * dim_recurrent,))
self.fc_x2z_1 = Dense(dim_recurrent/2, input_shape=(dim_input,))
self.fc_x2z_2 = Dense(dim_recurrent, input_shape=(dim_recurrent/2,))
self.fc_h2z_1 = Dense(2 * dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2z_2 = Dense(dim_recurrent, input_shape=(2 * dim_recurrent,))
self.fc_h2y_1 = Dense(2 * dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2y_2 = Dense(dim_output, input_shape=(2 * dim_recurrent,))
def __call__(self, input_data, state):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param state: List of recurrent state, usually initialized with zero tensor.
:return: Output of the network
"""
state = tf.stack(state)
x2z_1 = relu(self.fc_x2z_1(input_data))
h2z_1 = relu(self.fc_h2z_1(state))
z = sigmoid(self.fc_x2z_2(x2z_1) + self.fc_h2z_2(h2z_1))
x2h_1 = relu(self.fc_x2h_1(input_data))
h2h_1 = relu(self.fc_h2h_1(state))
hb = relu(self.fc_x2h_2(x2h_1) + self.fc_h2h_2(h2h_1))
h = z * state + (1 - z) * hb
h2y_1 = relu(self.fc_h2y_1(h))
return self.fc_h2y_2(h2y_1), tf.unstack(h)
def predict(self, test_input, time_steps, dim_recurrent):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param time_steps: Time grid
:param dim_recurrent: Recurrent dimension of the neural network. Used to initialize recurrent state.
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
nb_samples = int(test_input.shape[0].value)
states = list(tf.zeros(shape=[dim_recurrent]) for _ in range(nb_samples))
predictions = []
# Iterate over time
for t in range(time_steps):
# Make prediction at current time
prediction, states = self.__call__(test_input[:, t, :], states)
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class LSTM(keras.layers.Layer):
"""Long Short Term Memory Network
Class which allows to create an instance of a lstm neural network with hidden and candidate state.
Attributes:
fc_x2f : Dense layer processing the input to the recurrent space. Used in the forget gate.
fc_h2f : Dense layer processing the recurrent state to the recurrent space. Used in the forget gate.
fc_x2i : Dense layer processing the input to the recurrent space. Used in the input gate.
fc_h2i : Dense layer processing the recurrent state in the recurrent space. Used in the input gate.
fc_x2o : Dense layer processing the input to the recurrent space. Used in the output gate.
fc_h2o : Dense layer processing the recurrent state in the recurrent space. Used in the output gate.
fc_x2c : Dense layer processing the input to the recurrent space. Used in the candidate computation.
fc_h2c : Dense layer processing the recurrent state in the recurrent space. Used in the candidate computation.
fc_h2y : Dense layer processing the recurrent state to the output state.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(LSTM, self).__init__()
self.fc_x2f = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2f = Dense(dim_recurrent, input_shape=(dim_recurrent,))
self.fc_x2i = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2i = Dense(dim_recurrent, input_shape=(dim_recurrent,))
self.fc_x2o = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2o = Dense(dim_recurrent, input_shape=(dim_recurrent,))
self.fc_x2c = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2c = Dense(dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2y = Dense(dim_output, input_shape=(dim_recurrent,))
def __call__(self, input_data, state, candidate):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param state: List of recurrent state, usually initialized with zero tensor.
:return: Output of the network
"""
# Convert list of recurrent state and candidates into tensor
state = tf.stack(state)
candidate = tf.stack(candidate)
# Forget gate
f = sigmoid(self.fc_x2f(input_data) + self.fc_h2f(state))
# Input gate
inp = sigmoid(self.fc_x2i(input_data) + self.fc_h2i(state))
# Output gate
o = sigmoid(self.fc_x2o(input_data) + self.fc_h2o(state))
g = tanh(self.fc_x2c(input_data) + self.fc_h2c(state))
# Update of the candidate
c = f * candidate + inp * g
# Update of the recurrent state
h = o * tanh(c)
return self.fc_h2y_1(relu(h)), tf.unstack(h), tf.unstack(c)
def predict(self, test_input, time_steps, dim_recurrent):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param time_steps: Time grid
:param dim_recurrent: Recurrent dimension of the neural network. Used to initialize recurrent state.
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
nb_samples = int(test_input.shape[0].value)
states = list(tf.zeros(shape=[dim_recurrent]) for _ in range(nb_samples))
candidates = list(tf.zeros(shape=[dim_recurrent]) for _ in range(nb_samples))
predictions = []
# Iterate over time
for t in range(time_steps):
# Make prediction at current time
prediction, states, candidates = self.__call__(test_input[:, t, :], states, candidates)
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class GRU(keras.layers.Layer):
"""
Gated Recurrent Unit (GRU)
Class which allows to create an instance of a GRU using recurrent state
Attributes:
fc_x2r : Dense layer processing the input to an intermediate recurrent space.
fc_h2r : Dense layer processing the input to the recurrent space.
fc_x2z : Dense layer processing the recurrent state to an intermediate recurrent space.
fc_h2z : Dense layer processing the recurrent state to the recurrent space.
fc_x2h : Dense layer processing the output to an intermediate recurrent space.
fc_h2h : Dense layer processing the output to the recurrent space.
fc_h2y : Dense layer processing the output to the recurrent space.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(GRU, self).__init__()
self.fc_x2r = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2r = Dense(dim_recurrent, input_shape=(dim_recurrent,))
self.fc_x2z = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2z = Dense(dim_recurrent, input_shape=(dim_recurrent,))
self.fc_x2h = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2h = Dense(dim_recurrent, input_shape=(dim_recurrent,))
self.fc_h2y = Dense(dim_output, input_shape=(dim_recurrent,))
def __call__(self, input_data, state):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param state: List of recurrent state, usually initialized with zero tensor.
:return: Output of the network
"""
# Convert the lsit of state into tensor
state = tf.stack(state)
# Reset gate
r = sigmoid(self.fc_x2r(input_data) + self.fc_h2r(state))
# Update gate
z = sigmoid(self.fc_x2z(input_data) + self.fc_h2z(state))
# Intermediate state
hb = tanh(self.fc_x2h(input_data) + self.fc_h2h(r * state))
# Update of the recurrent state
h = z * state + (1 - z) * hb
return self.fc_h2y_1(relu(h)), tf.unstack(h)
def predict(self, test_input, time_steps, dim_recurrent):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param time_steps: Time grid
:param dim_recurrent: Recurrent dimension of the neural network. Used to initialize recurrent state.
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
nb_samples = int(test_input.shape[0].value)
states = list(tf.zeros(shape=[dim_recurrent]) for _ in range(nb_samples))
predictions = []
# Iterate over time
for t in range(time_steps):
# Make prediction at current time
prediction, states = self.__call__(test_input[:, t, :], states)
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class GRU_multisteps(keras.layers.Layer):
"""Multi-steps Recurrent Neural Network class based on GRU architecture
Class which allows to create an instance of a multi-steps recurrent neural network using prediction at previous
time step as well as data at previous time step.
Attributes:
fc_x2r : Dense layer processing the input to the recurrent space. Used in the reset gate.
fc_xprev2r : Dense layer processing the input at previous time step in the recurrent space.
Used in the reset gate.
fc_yprev2r : Dense layer processing the output at previous time step in the recurrent space.
Used in the reset gate.
fc_xprev2r_hb : Dense layer processing the input at previous time step in the recurrent space.
Used in the computation of the intermediate state with the reset gating.
fc_yprev2r_hb : Dense layer processing the output at previous time step in the recurrent space.
Used in the computation of the intermediate state with the reset gating.
fc_x2z : Dense layer processing the input to the recurrent space. Used in the update gate.
fc_xprev2z : Dense layer processing the input at previous time step in the recurrent space.
Used in the update gate.
fc_yprev2z : Dense layer processing the output at previous time step in the recurrent space.
Used in the update gate.
fc_x2hb : Dense layer processing the input to the recurrent space. Used in the computation of
the intermediate state.
fc_xprev2hb : Dense layer processing the input at previous time step in the recurrent space.
Used in the computation of the intermediate state.
fc_yprev2hb : Dense layer processing the output at previous time step in the recurrent space.
Used in the computation of the intermediate state.
fc_yprev2y : Dense layer processing the output at previous time step in the recurrent space.
Used in the computation of the full update state.
fc_h2y : Dense layer processing the full update state in the output space.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(GRU_multisteps, self).__init__()
self.fc_x2r = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xprev2r = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2r = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_xprev2r_hb = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2r_hb = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_x2z = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xprev2z = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2z = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_x2hb = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xprev2hb = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2hb = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_xprev2h = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_yprev2h = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_h2y = Dense(dim_output, input_shape=(dim_recurrent,))
def __call__(self, input_data, previous_data, previous_target):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param previous_data: Tensor of data at previous time step
:param previous_target: Tensor of targets at previous time step
:return: Output of the network
"""
r = sigmoid(self.fc_x2r(input_data) + self.fc_xprev2r(previous_data) + self.fc_yprev2r(previous_target))
z = sigmoid(self.fc_x2z(input_data) + self.fc_xprev2z(previous_data) + self.fc_yprev2z(previous_target))
hb = tanh(self.fc_x2hb(input_data) + self.fc_xprev2hb(r * self.fc_xprev2r_hb(previous_data))
+ self.fc_yprev2hb(r * self.fc_yprev2r_hb(previous_target)))
h = z * (self.fc_yprev2h(previous_target) + self.fc_xprev2h(previous_data)) + (1 - z) * hb
return self.fc_h2y(h)
def predict(self, test_input, test_target, time_steps):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param test_target: Target test tensor of shape (n_samples, len(time_steps), dim_output)
:param time_steps: Time grid
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
predictions = [test_target[:, 0, :]]
# Iterate over time
for t in range(1, time_steps):
if t == 1:
# Make prediction at current time using input data and targets at previous time step
prediction = self.__call__(test_input[:, t, :], test_input[:, t-1, :], test_target[:, t-1, :])
predictions.append(prediction)
else:
prediction = self.__call__(test_input[:, t, :], test_input[:, t - 1, :], predictions[t - 1])
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class GRUMultisteps2(keras.layers.Layer):
"""Multi-steps Recurrent Neural Network class based on GRU architecture
Class which allows to create an instance of a multi-steps recurrent neural network using prediction at two
previous time steps as well as data at two previous time steps.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(GRUMultisteps2, self).__init__()
self.fc_x2r_1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xprev2r_1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xpreprev2r_1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2r_1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_ypreprev2r_1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_xprev2r__1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xpreprev2r__1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2r__1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_ypreprev2r__1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_x2z_1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xprev2z_1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xpreprev2z_1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2z_1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_ypreprev2z_1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_x2h_1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xprev2h_1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xpreprev2h_1 = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2h_1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_ypreprev2h_1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_yprev2y_1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_ypreprev2y_1 = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_h2y_1 = Dense(dim_output, input_shape=(dim_recurrent,))
def __call__(self, input_data, previous_data, preprevious_data, previous_target, preprevious_target):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param previous_data: Tensor of data at previous time step
:param preprevious_data: Tensor of data at two time steps backward in time
:param previous_target: Tensor of targets at previous time step
:param preprevious_data: Tensor of targets at two time steps backward in time
:return: Output of the network
"""
r = sigmoid(self.fc_x2r_1(input_data) + self.fc_xprev2r_1(previous_data) + self.fc_xpreprev2r_1(preprevious_data)
+ self.fc_yprev2r_1(previous_target) + self.fc_ypreprev2r_1(preprevious_target))
z = sigmoid(self.fc_x2z_1(input_data) + self.fc_xprev2z_1(previous_data) + self.fc_xpreprev2z_1(preprevious_data)
+ self.fc_yprev2z_1(previous_target) + self.fc_ypreprev2z_1(preprevious_target))
hb = tanh(self.fc_x2h_1(input_data) + self.fc_xprev2h_1(r * self.fc_xprev2r__1(previous_data))
+ self.fc_xpreprev2h_1(r * self.fc_xpreprev2r__1(preprevious_data))
+ self.fc_yprev2h_1(r * self.fc_yprev2r__1(previous_target))
+ self.fc_ypreprev2h_1(r * self.fc_ypreprev2r__1(preprevious_target)))
h = z * (self.fc_yprev2y_1(previous_target) + self.fc_ypreprev2y_1(preprevious_target)) + (1 - z) * hb
return self.fc_h2y_1(relu(h))
def predict(self, test_input, test_target, time_steps):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param test_target: Target test tensor of shape (n_samples, len(time_steps), dim_output)
:param time_steps: Time grid
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
nb_samples = int(test_input.shape[0].value)
predictions = [test_target[:, 0, :], test_target[:, 1, :]]
# Iterate over time
for t in range(2, time_steps):
# Make prediction at current time using input data and targets at two previous time step
prediction = self.__call__(test_input[:, t, :], test_input[:, t-1, :], test_input[:, t-2, :],
test_target[:, t-1, :], test_target[:, t-2, :])
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class GatedRNNMultistep(keras.layers.Layer):
"""Gated recurrent Neural Network class
Class which allows to create an instance of a gated recurrent neural network with hidden state.
Attributes:
fc_x2h : Dense layer processing the input to the recurrent space.
fc_h2h : Dense layer processing the recurrent state to the recurrent space.
fc_x2z : Dense layer processing the input to the recurrent space. Used in the computation of the gate.
fc_h2z : Dense layer processing the recurrent state to the recurrent space. Used in the computation of the gate.
fc_h2y : Dense layer processing the recurrent state to the output state.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(GatedRNNMultistep, self).__init__()
self.fc_x2hb = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xprev2hb = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2hb = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_xprev2h = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2h = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_x2z = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_xprev2z = Dense(dim_recurrent, input_shape=(dim_input,))
self.fc_yprev2z = Dense(dim_recurrent, input_shape=(dim_output,))
self.fc_h2y = Dense(dim_output, input_shape=(dim_recurrent,))
def __call__(self, input_data, previous_data, previous_target):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param previous_data: Tensor of data at previous time step
:param previous_target: Tensor of targets at previous time step
:return: Output of the network
"""
# Computation of the gate
z = sigmoid(self.fc_x2z(input_data) + self.fc_xprev2z(previous_data) + self.fc_yprev2z(previous_target))
# Intermediate state
hb = relu(self.fc_x2hb(input_data) + self.fc_xprev2hb(previous_data) + self.fc_yprev2hb(previous_target))
# Update of recurrent state with gating
h = z * (self.fc_xprev2h(previous_data) + self.fc_yprev2h(previous_target)) + (1 - z) * hb
return self.fc_h2y(h)
def predict(self, test_input, test_target, time_steps):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param test_target: Target test tensor of shape (n_samples, len(time_steps), dim_output)
:param time_steps: Time grid
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
predictions = [test_target[:, 0, :]]
# Iterate over time
for t in range(1, time_steps):
if t == 1:
# Make prediction at current time using input data and targets at previous time step
prediction = self.__call__(test_input[:, t, :], test_input[:, t - 1, :], test_target[:, t - 1, :])
predictions.append(prediction)
else:
prediction = self.__call__(test_input[:, t, :], test_input[:, t - 1, :], predictions[t - 1])
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class MLP_1(keras.layers.Layer):
"""Recursive Neural Network
Class which allows to create an instance of a recursive neural network.
Attributes:
fc_1 : Dense layer processing the input to an intermediate state in the hidden space.
fc_2 : Dense layer processing the intermediate state in the hidden space to the output.
"""
def __init__(self, dim_input, dim_hidden, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(MLP, self).__init__()
self.fc_1 = Dense(dim_hidden, input_shape=(dim_input + dim_output,))
self.fc_2 = Dense(dim_output, input_shape=(dim_hidden,))
def __call__(self, input_data, previous_data, previous_target, t):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param previous_data: Tensor of data at previous time step
:param previous_target: Tensor of targets at previous time step
:return: Output of the network
"""
t_tensor = t * tf.ones(input_data.shape)
x = tf.reshape(tf.concat([input_data, previous_target, t_tensor], axis=1), shape=[input_data.shape[0], -1])
h = sigmoid(self.fc_1(x))
y = sigmoid(self.fc_2(h))
return y
def predict(self, test_input, test_target, time_steps, time_step_start):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param test_target: Target test tensor of shape (n_samples, len(time_steps), dim_output)
:param time_steps: Time grid
:param time_step_start: Starting time of prediction
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
predictions = [test_target[:, time_step_start, :]]
# Iterate over time
for t in range(time_step_start+1, time_steps):
# Use predictions at previous time step to make a new prediction
prediction = self.__call__(test_input[:, t, :], test_input[:, t - 1, :], predictions[t - time_step_start - 1], t)
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class MLP_2(keras.layers.Layer):
"""Recursive Neural Network
Class which allows to create an instance of a recursive neural network.
Attributes:
fc_1 : Dense layer processing the input to an intermediate state in the hidden space.
fc_2 : Dense layer processing the intermediate state in the hidden space to the output.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(MLP_2, self).__init__()
self.fc_1 = Dense(dim_recurrent, input_shape=(dim_input + 3*dim_output,))
self.fc_2 = Dense(dim_output, input_shape=(dim_recurrent,))
def __call__(self, input_data, previous_target_1, previous_target_2, previous_target_3, t):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param previous_data: Tensor of data at previous time step
:param previous_target: Tensor of targets at previous time step
:return: Output of the network
"""
t_tensor = t * tf.ones(input_data.shape)
#x = tf.reshape(tf.concat([input_data, previous_target_1, previous_target_2, previous_target_3, t_tensor],
# axis=1), shape=[input_data.shape[0], -1])
x = tf.reshape(tf.concat([input_data, previous_target_1, previous_target_2, previous_target_3],
axis=1), shape=[input_data.shape[0], -1])
h = sigmoid(self.fc_1(x))
y = sigmoid(self.fc_2(h))
return y
def predict(self, test_input, test_target, time_steps, time_step_start):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param test_target: Target test tensor of shape (n_samples, len(time_steps), dim_output)
:param time_steps: Time grid
:param time_step_start: Starting time of prediction
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
if time_step_start <= 1:
time_step_start = 2
predictions = []
for i in range(time_step_start+1):
predictions.append(test_target[:, i, :])
# Iterate over time
for t in range(time_step_start+1, time_steps):
# Use predictions at previous time step to make a new prediction
prediction = self.__call__(test_input[:, t, :], predictions[t - time_step_start - 1], predictions[t - time_step_start - 2],
predictions[t - time_step_start - 3], t)
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions
class MLP_3(keras.layers.Layer):
"""Recursive Neural Network
Class which allows to create an instance of a recursive neural network.
Attributes:
fc_1 : Dense layer processing the input to an intermediate state in the hidden space.
fc_2 : Dense layer processing the intermediate state in the hidden space to the output.
"""
def __init__(self, dim_input, dim_recurrent, dim_output):
"""
Initialize the attributes of the class
:param dim_input: Dimension of the input tensor
:param dim_recurrent: Dimension of the recurrent space
:param dim_output: Dimension of the output tensor
"""
super(MLP_3, self).__init__()
self.fc_1 = Dense(dim_recurrent, input_shape=(2*dim_input + 2*dim_output,))
self.fc_2 = Dense(dim_output, input_shape=(dim_recurrent,))
def __call__(self, input_data, previous_data, previous_target_1, previous_target_2, t):
"""
Implement the forward pass of the network
:param input_data: Input tensor, e.g. input data at current time
:param previous_data: Tensor of data at previous time step
:param previous_target: Tensor of targets at previous time step
:return: Output of the network
"""
t_tensor = t * tf.ones(input_data.shape)
#x = tf.reshape(tf.concat([input_data, previous_target_1, previous_target_2, previous_target_3, t_tensor],
# axis=1), shape=[input_data.shape[0], -1])
x = tf.reshape(tf.concat([input_data, previous_data, previous_target_1, previous_target_2],
axis=1), shape=[input_data.shape[0], -1])
h = sigmoid(self.fc_1(x))
y = sigmoid(self.fc_2(h))
return y
def predict(self, test_input, test_target, time_steps, time_step_start):
"""
Make a prediction tensor, given a test input tensor
:param test_input: Input test tensor of shape (n_samples, len(time_steps), dim_input)
:param test_target: Target test tensor of shape (n_samples, len(time_steps), dim_output)
:param time_steps: Time grid
:param time_step_start: Starting time of prediction
:return: Tensor of predictions of size (nb_samples, len(time_steps), test_input.shape[2])
"""
if time_step_start <= 0:
time_step_start = 1
predictions = []
for i in range(time_step_start+1):
predictions.append(test_target[:, i, :])
# Iterate over time
for t in range(time_step_start+1, time_steps):
# Use predictions at previous time step to make a new prediction
prediction = self.__call__(test_input[:, t, :], test_input[:, t - 1, :], predictions[t - time_step_start - 1],
predictions[t - time_step_start - 2], t)
predictions.append(prediction)
predictions = tf.stack(predictions, axis=1)
return predictions

Event Timeline