diff --git a/colossalai/tensor/d_tensor/layout_converter.py b/colossalai/tensor/d_tensor/layout_converter.py index 546fb5706810..e2298103134f 100644 --- a/colossalai/tensor/d_tensor/layout_converter.py +++ b/colossalai/tensor/d_tensor/layout_converter.py @@ -446,7 +446,7 @@ def layout_converting( # Cache hit def _group_alive_check(cached_comm_action_sequence): - """ + r""" Check if the process groups required for sharding have been deleted by torch.distributed.destroy_process_group method. If not deleted, return True; otherwise, return False.