Skip to content

Commit f4157be

Browse files
authored
Fix import error in deepspeed_to_megatron.py (#455)
Previously, `deepspeed_to_megatron.py` would raise an import error due to the relative import. This commit fixes this issue by changing from the relative import to the absolute import like in `deepspeed_to_transformers.py`.
1 parent c3df187 commit f4157be

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tools/convert_checkpoint/deepspeed_to_megatron.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import os
55
import torch
66
from collections import OrderedDict
7-
from .deepspeed_checkpoint import ARGS_KEY, DeepSpeedCheckpoint
7+
from deepspeed_checkpoint import ARGS_KEY, DeepSpeedCheckpoint
88

99
MODEL_KEY = 'model'
1010
ARGS_KEY = 'args'
@@ -92,7 +92,7 @@ def _create_rank_checkpoint(ds_checkpoint, tp_index, pp_index, for_release=False
9292
if pp_index == 0:
9393
meg_embedding_sd.update(nested_embedding_sd)
9494

95-
if pp_index == ds_checkpoint.pp_degree -1:
95+
if pp_index == ds_checkpoint.pp_degree - 1:
9696
for key, value in embedding_sd.items():
9797
if key.startswith(WORD_EMBEDDINGS_KEY):
9898
fields = key.split('.')
@@ -111,7 +111,7 @@ def _create_rank_checkpoint(ds_checkpoint, tp_index, pp_index, for_release=False
111111
if pp_index == 0:
112112
checkpoint_sd[MODEL_KEY][LANGUGAGE_MODEL_KEY][EMBEDDING_KEY] = meg_embedding_sd
113113
checkpoint_sd[MODEL_KEY][LANGUGAGE_MODEL_KEY][ENCODER_KEY] = meg_encoder_sd
114-
if pp_index == ds_checkpoint.pp_degree -1:
114+
if pp_index == ds_checkpoint.pp_degree - 1:
115115
checkpoint_sd[MODEL_KEY][WORD_EMBEDDINGS_FOR_HEAD_KEY] = meg_embedding_for_head_sd
116116

117117
checkpoint_sd[ARGS_KEY] = ds_checkpoint.get_args()

0 commit comments

Comments
 (0)