Skip to content

Commit

Permalink
Adding GaussianReluRBM training and moving it to gaussian's module.
Browse files Browse the repository at this point in the history
  • Loading branch information
gugarosa committed May 27, 2020
1 parent c82bb6d commit e60ff14
Show file tree
Hide file tree
Showing 5 changed files with 90 additions and 111 deletions.
14 changes: 6 additions & 8 deletions examples/applications/gaussian_relu_rbm_training.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import torch
import torchvision

from learnergy.models.gauss_relu_rbm import GReluRBM
from learnergy.models import GaussianReluRBM

# Creating training and testing dataset
train = torchvision.datasets.MNIST(
Expand All @@ -13,14 +13,12 @@
torchvision.transforms.ToTensor()
]))

print("Max pixel value:", train.data.max())
# Creating a GaussianReluRBM
model = GaussianReluRBM(n_visible=784, n_hidden=256, steps=1, learning_rate=0.001,
momentum=0.9, decay=0, temperature=1, use_gpu=False)

# Creating a Gaussian-ReLU RBM
model = GReluRBM(n_visible=784, n_hidden=256, steps=1, learning_rate=0.001,
momentum=0.9, decay=0, temperature=1, use_gpu=False)

# Training a GaussianRBM
mse, pl = model.fit(train, batch_size=100, epochs=5)
# Training a GaussianReluRBM
mse, pl = model.fit(train, batch_size=128, epochs=5)

# Reconstructing test set
rec_mse, v = model.reconstruct(test)
Expand Down
2 changes: 1 addition & 1 deletion learnergy/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from learnergy.models.discriminative_rbm import DiscriminativeRBM, HybridDiscriminativeRBM
from learnergy.models.dropout_rbm import DropoutRBM
from learnergy.models.e_dropout_rbm import EDropoutRBM
from learnergy.models.gaussian_rbm import GaussianRBM, VarianceGaussianRBM
from learnergy.models.gaussian_rbm import GaussianRBM, GaussianReluRBM, VarianceGaussianRBM
from learnergy.models.sigmoid_rbm import SigmoidRBM
from learnergy.models.dbn import DBN
from learnergy.models.residual_dbn import ResidualDBN
102 changes: 0 additions & 102 deletions learnergy/models/gauss_relu_rbm.py

This file was deleted.

67 changes: 67 additions & 0 deletions learnergy/models/gaussian_rbm.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,73 @@ def visible_sampling(self, h, scale=False):
return states, activations


class GaussianReluRBM(GaussianRBM):
"""A GaussianReluRBM class provides the basic implementation for Gaussian-ReLU Restricted Boltzmann Machines (for raw pixels values).
Note that this class requires raw data (integer-valued) in order to model the image covariance into a latent ReLU layer.
References:
G. Hinton. A practical guide to training restricted Boltzmann machines.
Neural networks: Tricks of the trade (2012).
"""

def __init__(self, n_visible=128, n_hidden=128, steps=1, learning_rate=0.001,
momentum=0, decay=0, temperature=1, use_gpu=False):
"""Initialization method.
Args:
n_visible (int): Amount of visible units.
n_hidden (int): Amount of hidden units.
steps (int): Number of Gibbs' sampling steps.
learning_rate (float): Learning rate.
momentum (float): Momentum parameter.
decay (float): Weight decay used for penalization.
temperature (float): Temperature factor.
use_gpu (boolean): Whether GPU should be used or not.
"""

logger.info('Overriding class: GaussianRBM -> GaussianReluRBM.')

# Override its parent class
super(GaussianReluRBM, self).__init__(n_visible, n_hidden, steps, learning_rate,
momentum, decay, temperature, use_gpu)

logger.info('Class overrided.')


def hidden_sampling(self, v, scale=False):
"""Performs the hidden layer sampling, i.e., P(h|v).
Args:
v (torch.Tensor): A tensor incoming from the visible layer.
scale (bool): A boolean to decide whether temperature should be used or not.
Returns:
The probabilities and states of the hidden layer sampling.
"""

# Calculating neurons' activations
activations = F.linear(v, self.W.t(), self.b)

# If scaling is true
if scale:
# Calculate probabilities with temperature
probs = F.relu(torch.div(activations, self.T))

# If scaling is false
else:
# Calculate probabilities as usual
probs = F.relu(activations)

# Current states equals probabilities
states = probs

return probs, states


class VarianceGaussianRBM(RBM):
"""A VarianceGaussianRBM class provides the basic implementation for Gaussian-Bernoulli Restricted Boltzmann Machines (without standardization).
Expand Down
16 changes: 16 additions & 0 deletions tests/learnergy/models/test_gaussian_rbm.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,22 @@ def test_gaussian_rbm_visible_sampling():
assert states.size(1) == 128


def test_gaussian_relu_rbm_hidden_sampling():
new_gaussian_relu_rbm = gaussian_rbm.GaussianReluRBM()

v = torch.ones(1, 128)

probs, states = new_gaussian_relu_rbm.hidden_sampling(v, scale=True)

assert probs.size(1) == 128
assert states.size(1) == 128

probs, states = new_gaussian_relu_rbm.hidden_sampling(v, scale=False)

assert probs.size(1) == 128
assert states.size(1) == 128


def test_variance_gaussian_rbm_sigma():
new_variance_gaussian_rbm = gaussian_rbm.VarianceGaussianRBM()

Expand Down

0 comments on commit e60ff14

Please sign in to comment.