In the NeuralSpectralKernel the names of the hidden parameters do not depend on the component number Q, but the code that makes them depend on it is in comments.
Why is that? Is it a way for the components to share weights? Is it better?
def create_nn_params(self, prefix, hidden_sizes, final_size):
for q in range(self.Q):
input_dim = self.input_dim
for level, hidden_size in enumerate(hidden_sizes):
"""name_W = '{prefix}{q}W{level}'.format(prefix=prefix, q=q, level=level)
name_b = '{prefix}{q}b{level}'.format(prefix=prefix, q=q, level=level)
params = create_params(input_dim, hidden_size)
setattr(self, name_W, params[0])
setattr(self, name_b, params[1])"""
name_W = '{prefix}W{level}'.format(prefix=prefix, level=level)
name_b = '{prefix}b{level}'.format(prefix=prefix, level=level)
if not hasattr(self, name_W):
params = _create_params(input_dim, hidden_size)
setattr(self, name_W, params[0])
setattr(self, name_b, params[1])
# input dim for next layer
input_dim = hidden_size
params = create_params(input_dim, final_size)
setattr(self, '{prefix}{q}W_final'.format(prefix=prefix, q=q), params[0])
setattr(self, '{prefix}{q}_b_final'.format(prefix=prefix, q=q), params[1])