We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 52ca075 commit 292afa1Copy full SHA for 292afa1
open_flamingo/src/vlm.py
@@ -393,6 +393,7 @@ def __init__(
393
gradient_checkpointing=gradient_checkpointing,
394
)
395
self.lang_model.set_decoder_layers_attr_name(decoder_layers_attr_name)
396
+ self.decoder_layers_attr_name = decoder_layers_attr_name
397
self.lang_model.init_cross_attention_layers(
398
lang_hidden_size=self.lang_hidden_dim,
399
vis_hidden_size=self.vis_embedding_dim,
@@ -491,7 +492,7 @@ def lambda_fn(module: nn.Module):
491
492
return True
493
if isinstance(module, GatedCrossAttentionBlock):
494
- if isinstance(module, original_decoder_block_class):
495
+ if isinstance(module, decoder_block_class):
496
497
498
return lambda_fn
0 commit comments