-
Notifications
You must be signed in to change notification settings - Fork 0
/
mixae.R
75 lines (56 loc) · 2.36 KB
/
mixae.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
############ Mixture of Autoencoders for clustering ##########
### Author: Chrysanthi Ainali
### Date: 14.04.2018 (last change @ 03.08.2018)
### Version: 1.1
### Source: developed according to https://arxiv.org/pdf/1712.07788.pdf
### The script is run using Keras in tensorflow
### The code was developed using part of the code from variational autoencoder
### parameters of the network are updated via backpropagation with the target of minimizing the reconstruction error.
##########################################################
library(keras)
K <- keras::backend()
# Parameters --------------------------------------------------------------
batch_size <- 100L
original_dim <- 800L
latent_dim <- 4L
intermediate_dim <- 256L
epochs <- 50L
epsilon_std <- 1.0
# Model definition --------------------------------------------------------
x <- layer_input(shape = c(original_dim))
h <- layer_dense(x, intermediate_dim, activation = "Tanh")
z_mean <- layer_dense(h, latent_dim)
z_log_var <- layer_dense(h, latent_dim)
sampling <- function(arg){
z_mean <- arg[, 1:(latent_dim)]
z_log_var <- arg[, (latent_dim + 1):(2 * latent_dim)]
epsilon <- k_random_normal(
shape = c(k_shape(z_mean)[[1]]),
mean=0.,
stddev=epsilon_std
)
z_mean + k_exp(z_log_var/2)*epsilon
}
# note that "output_shape" isn't necessary with the TensorFlow backend
z <- layer_concatenate(list(z_mean, z_log_var)) %>%
layer_lambda(sampling)
# we instantiate these layers separately so as to reuse them later
decoder_h <- layer_dense(units = intermediate_dim, activation = "relu")
decoder_mean <- layer_dense(units = original_dim, activation = "sigmoid")
h_decoded <- decoder_h(z)
x_decoded_mean <- decoder_mean(h_decoded)
# end-to-end autoencoder
mixae <- keras_model(x, x_decoded_mean)
# encoder, from inputs to latent space
encoder <- keras_model(x, z_mean)
# generator, from latent space to reconstructed inputs
decoder_input <- layer_input(shape = latent_dim)
h_decoded_2 <- decoder_h(decoder_input)
x_decoded_mean_2 <- decoder_mean(h_decoded_2)
generator <- keras_model(decoder_input, x_decoded_mean_2)
mixae_loss <- function(x, x_decoded_mean){
xent_loss <- (original_dim/1.0)*loss_binary_crossentropy(x, x_decoded_mean)
kl_loss <- -0.5*k_mean(1 + z_log_var - k_square(z_mean) - k_exp(z_log_var), axis = -1L)
xent_loss + kl_loss
}
mixae %>% compile(optimizer = "rmsprop", loss = mixae_loss)