Skip to content

Commit

Permalink
fix encoder layer and embeddings
Browse files Browse the repository at this point in the history
  • Loading branch information
vince62s committed Dec 18, 2024
1 parent 7929f8b commit 63ae72b
Showing 1 changed file with 6 additions and 2 deletions.
8 changes: 6 additions & 2 deletions python/ctranslate2/converters/eole_ct2.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def set_transformer_encoder(spec, variables):
set_input_layers(spec, variables, "src_emb")
set_layer_norm(spec.layer_norm, variables, "encoder.layer_norm")
for i, layer in enumerate(spec.layer):
set_transformer_encoder_layer(layer, variables, "encoder.transformer.%d" % i)
set_transformer_encoder_layer(layer, variables, "encoder.transformer_layers.%d" % i)


def set_transformer_decoder(spec, variables, with_encoder_attention=True):
Expand All @@ -232,7 +232,11 @@ def set_input_layers(spec, variables, scope):
else:
spec.scale_embeddings = False

set_embeddings(spec.embeddings, variables, "%s.embeddings" % scope)
embeddings_specs = spec.embeddings
## encoder embeddings are stored in a list(onmt/ct2 legacy with features)
if isinstance(embeddings_specs, list):
embeddings_specs = embeddings_specs[0]
set_embeddings(embeddings_specs, variables, "%s.embeddings" % scope)


def set_transformer_encoder_layer(spec, variables, scope):
Expand Down

0 comments on commit 63ae72b

Please sign in to comment.