maciej.karasek
commited on
Commit
·
556fe40
1
Parent(s):
16bb627
issue #205 bugfix
Browse files
src/axolotl/utils/models.py
CHANGED
@@ -252,11 +252,11 @@ def load_model(
|
|
252 |
)
|
253 |
# Shouldn't be a problem most of the time. will obviously error if the model doesn't support this
|
254 |
# when training starts
|
255 |
-
if hasattr(config, "max_seq_len") and cfg.sequence_len > config.max_seq_len:
|
256 |
config.max_seq_len = cfg.sequence_len
|
257 |
logging.warning(f"increasing context length to {cfg.sequence_len}")
|
258 |
elif (
|
259 |
-
hasattr(config, "max_sequence_length")
|
260 |
and cfg.sequence_len > config.max_sequence_length
|
261 |
):
|
262 |
config.max_sequence_length = cfg.sequence_len
|
@@ -289,7 +289,7 @@ def load_model(
|
|
289 |
model.resize_token_embeddings(embeddings_len)
|
290 |
|
291 |
if (
|
292 |
-
hasattr(model.config, "max_position_embeddings")
|
293 |
and cfg.sequence_len >= model.config.max_position_embeddings
|
294 |
):
|
295 |
logging.warning(
|
|
|
252 |
)
|
253 |
# Shouldn't be a problem most of the time. will obviously error if the model doesn't support this
|
254 |
# when training starts
|
255 |
+
if hasattr(config, "max_seq_len") and config.max_seq_len and cfg.sequence_len > config.max_seq_len:
|
256 |
config.max_seq_len = cfg.sequence_len
|
257 |
logging.warning(f"increasing context length to {cfg.sequence_len}")
|
258 |
elif (
|
259 |
+
hasattr(config, "max_sequence_length") and config.max_sequence_length
|
260 |
and cfg.sequence_len > config.max_sequence_length
|
261 |
):
|
262 |
config.max_sequence_length = cfg.sequence_len
|
|
|
289 |
model.resize_token_embeddings(embeddings_len)
|
290 |
|
291 |
if (
|
292 |
+
hasattr(model.config, "max_position_embeddings") and model.config.max_position_embeddings
|
293 |
and cfg.sequence_len >= model.config.max_position_embeddings
|
294 |
):
|
295 |
logging.warning(
|