Learn Before
Concept
Code for Variational AutoEncoder Encoder
encoder_input = Input(shape=self.input_dim, name='encoder_input') x = encoder_input for i in range(self.n_layers_encoder): conv_layer = Conv2D( filters = self.encoder_conv_filters[i] , kernel_size = self.encoder_conv_kernel_size[i] , strides = self.encoder_conv_strides[i] , padding = 'same' , name = 'encoder_conv_' + str(i) ) x = conv_layer(x) if self.use_batch_norm: x = BatchNormalization()(x) x = LeakyReLU()(x) if self.use_dropout: x = Dropout(rate = 0.25)(x) shape_before_flattening = K.int_shape(x)[1:] x = Flatten()(x) #<1> Instead of connecting the flattened layer # directly to the 2D latent space, we connect # it to layers mu and log_var . self.mu = Dense(self.z_dim, name='mu')(x) self.log_var = Dense(self.z_dim, name='log_var')(x) #<2> The Keras model that outputs the values # of mu and log_var for a given input image. self.encoder_mu_log_var = Model(encoder_input, (self.mu, self.log_var)) def sampling(args): mu, log_var = args epsilon = K.random_normal(shape=K.shape(mu), mean=0., stddev=1.) return mu + K.exp(log_var / 2) * epsilon #This Lambda layer samples a point z in # the latent space from the normal distribution #defined by the parameters mu and log_var . encoder_output = Lambda(sampling, name='encoder_output')([self.mu, self.log_var]) self.encoder = Model(encoder_input, encoder_output)```
0
1
Updated 2020-10-17
Tags
Data Science