diff --git a/llmfoundry/models/utils/config_moe_args.py b/llmfoundry/models/utils/config_moe_args.py index 3386204e26..1f7132c281 100644 --- a/llmfoundry/models/utils/config_moe_args.py +++ b/llmfoundry/models/utils/config_moe_args.py @@ -18,12 +18,6 @@ def create_process_group_ranks(ranks: tuple[int]): Used in create_set_process_group and create_mod_process_group methods below. This function is an alternative to `distributed.new_group(ranks)`. - When working with FSDP in torch1.13.1, using `distributed.new_group(ranks)` - resulted in an error but this method worked. - - TODO(GRT-2416): When composer no longer has support for torch1.13.1, we should - consider using `distributed.new_group(ranks)` here and in composer's FSDP - custom process group init. Args: ranks (tuple[int]): Tuple of ranks of group members.