Debugging and removing redundant lines
Browse filesI think these changes are needed and fixes a bug but I don't know what testing process was done.
Please see [this issue](https://huggingface.co/baidu/ERNIE-4.5-VL-28B-A3B-PT/discussions/7#68a699b08258e261e1f1042f) and if deemed valid, I will move the explanations here.
- modeling_ernie4_5_vl.py +1 -8
modeling_ernie4_5_vl.py
CHANGED
@@ -2882,11 +2882,6 @@ class Ernie4_5_Model(Ernie4_5_PretrainedModel):
|
|
2882 |
if past_key_values is None:
|
2883 |
past_key_values = tuple([None] * len(self.layers))
|
2884 |
|
2885 |
-
seq_length_with_past = seq_length
|
2886 |
-
cache_length = 0
|
2887 |
-
if past_key_values[0] is not None:
|
2888 |
-
cache_length = past_key_values[0][0].shape[1]
|
2889 |
-
seq_length_with_past += cache_length
|
2890 |
if inputs_embeds is None:
|
2891 |
inputs_embeds = self.embed_tokens(input_ids)
|
2892 |
|
@@ -2908,9 +2903,7 @@ class Ernie4_5_Model(Ernie4_5_PretrainedModel):
|
|
2908 |
if output_hidden_states:
|
2909 |
all_hidden_states += (hidden_states,)
|
2910 |
|
2911 |
-
past_key_value =
|
2912 |
-
past_key_values[idx] if past_key_values is not None else None
|
2913 |
-
)
|
2914 |
layer_outputs = decoder_layer(
|
2915 |
hidden_states,
|
2916 |
attention_mask,
|
|
|
2882 |
if past_key_values is None:
|
2883 |
past_key_values = tuple([None] * len(self.layers))
|
2884 |
|
|
|
|
|
|
|
|
|
|
|
2885 |
if inputs_embeds is None:
|
2886 |
inputs_embeds = self.embed_tokens(input_ids)
|
2887 |
|
|
|
2903 |
if output_hidden_states:
|
2904 |
all_hidden_states += (hidden_states,)
|
2905 |
|
2906 |
+
past_key_value = past_key_values[idx]
|
|
|
|
|
2907 |
layer_outputs = decoder_layer(
|
2908 |
hidden_states,
|
2909 |
attention_mask,
|