我在保存和加载由自定义图层组成的模型时遇到问题。
tensorflow 官方文档中的 VAE 示例中有此类模型的一个很好的示例:
其实我保存的时候并没有报错:
vae.save("vae211.keras")
但是当我加载它时:
vae_ = tf.keras.models.load_model("vae211.keras")
我有以下错误:
我尝试添加 get_config() 方法,但没有任何效果。TypeError:无法找到类“VariationalAutoEncoder”。确保 自定义类装饰有
。完整的对象配置: {'模块':无,'类名':'VariationalAutoEncoder','配置': {'名称':'自动编码器','可训练':True,'dtype':'float32', 'img_d':784,'hidden_d':128,'latent_d':32},'注册名称': '自定义>变分自动编码器', 'build_config': {'input_shape': [64, 第784章
@keras.saving.register_keras_serializable()
如果您能提供帮助,请找到下面的完整代码:
import tensorflow as tf
img_d = 784
hidden_d = 128
latent_d = 32
epochs = 2
"""
Dataset
"""
(x_tra, _), _ = tf.keras.datasets.mnist.load_data()
x_tra = x_tra.reshape(60000, 784).astype("float32") / 255
tra_ds = tf.data.Dataset.from_tensor_slices(x_tra)
tra_ds = tra_ds.shuffle(buffer_size=1024).batch(64)
"""
Model
"""
@tf.keras.saving.register_keras_serializable()
class Sampling(tf.keras.layers.Layer):
def call(self, z_mean, z_log_var):
bs, latent_dim = tf.shape(z_mean)
epsilon = tf.random.normal(shape=(bs, latent_dim))
return z_mean + tf.exp(0.5 * z_log_var) * epsilon
@tf.keras.saving.register_keras_serializable()
class Encoder(tf.keras.layers.Layer):
def __init__(self, latent_d=32, hidden_d=64, name="encoder", **kwargs):
super().__init__(name=name, **kwargs)
self.dense1 = tf.keras.layers.Dense(hidden_d, activation="relu")
self.dense_mean = tf.keras.layers.Dense(latent_d)
self.dense_log_var = tf.keras.layers.Dense(latent_d)
self.sampling = Sampling()
def call(self, inputs):
x = self.dense1(inputs)
z_mean = self.dense_mean(x)
z_log_var = self.dense_log_var(x)
z = self.sampling(z_mean, z_log_var)
return z_mean, z_log_var, z
@tf.keras.saving.register_keras_serializable()
class Decoder(tf.keras.layers.Layer):
def __init__(self, img_d, hidden_d=64, name="decoder", **kwargs):
super().__init__(name=name, **kwargs)
self.dense1 = tf.keras.layers.Dense(hidden_d, activation="relu")
self.dense2 = tf.keras.layers.Dense(img_d, activation="sigmoid")
def call(self, inputs):
x = self.dense1(inputs)
return self.dense2(x)
@tf.keras.saving.register_keras_serializable()
class VariationalAutoEncoder(tf.keras.Model):
def __init__(self, img_d, hidden_d=64, latent_d=32,
name="autoencoder", **kwargs):
super().__init__(name=name, **kwargs)
self.img_d = img_d
self.encoder = Encoder(latent_d=latent_d, hidden_d=hidden_d)
self.decoder = Decoder(img_d=img_d, hidden_d=hidden_d)
def call(self, inputs):
z_mean, z_log_var, z = self.encoder(inputs)
reconstructed = self.decoder(z)
kl_loss = -0.5 * tf.reduce_mean(
z_log_var - tf.square(z_mean) - tf.exp(z_log_var) + 1
)
self.add_loss(kl_loss)
return reconstructed
vae = VariationalAutoEncoder(
img_d=img_d, hidden_d=hidden_d, latent_d=latent_d
)
optimizer = tf.keras.optimizers.Adam(1e-3)
mse_loss_fn = tf.keras.losses.MeanSquaredError()
"""
Fitting
"""
loss_metric = tf.keras.metrics.Mean()
for epoch in range(epochs):
print(f"Start of {epoch = }")
for step, x in enumerate(tra_ds):
with tf.GradientTape() as tape:
reconstructed = vae(x)
loss = mse_loss_fn(x, reconstructed)
loss += sum(vae.losses)
grads = tape.gradient(loss, vae.trainable_weights)
optimizer.apply_gradients(zip(grads, vae.trainable_weights))
loss_metric(loss)
if step % 100 == 0:
print(f"{step=}: mean loss = {loss_metric.result():.4f}")
"""
Save & Load
"""
vae.save("vae211.keras")
vae_ = tf.keras.models.load_model("vae211.keras") # <===== Error
decoder_ = vae.decoder
...