From 63ae72b83eed53dfec5d014c34e1c21352ef332c Mon Sep 17 00:00:00 2001 From: vince62s Date: Wed, 18 Dec 2024 15:02:11 +0100 Subject: [PATCH] fix encoder layer and embeddings --- python/ctranslate2/converters/eole_ct2.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/python/ctranslate2/converters/eole_ct2.py b/python/ctranslate2/converters/eole_ct2.py index 9b4c2fb35..49a3a47c7 100644 --- a/python/ctranslate2/converters/eole_ct2.py +++ b/python/ctranslate2/converters/eole_ct2.py @@ -205,7 +205,7 @@ def set_transformer_encoder(spec, variables): set_input_layers(spec, variables, "src_emb") set_layer_norm(spec.layer_norm, variables, "encoder.layer_norm") for i, layer in enumerate(spec.layer): - set_transformer_encoder_layer(layer, variables, "encoder.transformer.%d" % i) + set_transformer_encoder_layer(layer, variables, "encoder.transformer_layers.%d" % i) def set_transformer_decoder(spec, variables, with_encoder_attention=True): @@ -232,7 +232,11 @@ def set_input_layers(spec, variables, scope): else: spec.scale_embeddings = False - set_embeddings(spec.embeddings, variables, "%s.embeddings" % scope) + embeddings_specs = spec.embeddings + ## encoder embeddings are stored in a list(onmt/ct2 legacy with features) + if isinstance(embeddings_specs, list): + embeddings_specs = embeddings_specs[0] + set_embeddings(embeddings_specs, variables, "%s.embeddings" % scope) def set_transformer_encoder_layer(spec, variables, scope):