Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[misc] update compatibility #6008

Merged
merged 6 commits into from
Aug 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .compatibility
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
2.1.0-12.1.0
2.2.2-12.1.0
2.3.0-12.1.0
2.4.0-12.4.1
4 changes: 2 additions & 2 deletions .cuda_ext.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"cuda_image": "hpcaitech/cuda-conda:12.1"
},
{
"torch_command": "pip install torch==2.1.0 torchvision==0.16.0 torchaudio==2.1.0 --index-url https://download.pytorch.org/whl/cu118",
"cuda_image": "hpcaitech/cuda-conda:11.8"
"torch_command": "pip install torch==2.4.0 torchvision==0.19.0 torchaudio==2.4.0 --index-url https://download.pytorch.org/whl/cu124",
"cuda_image": "hpcaitech/cuda-conda:12.4"
}
]
}
2 changes: 1 addition & 1 deletion .github/workflows/build_on_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ jobs:
- name: Install Colossal-AI
run: |
BUILD_EXT=1 pip install -v -e .
pip install -r requirements/requirements-test.txt
pip install --no-cache-dir -r requirements/requirements-test.txt

- name: Store Colossal-AI Cache
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_on_schedule.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ jobs:
[ ! -z "$(ls -A /github/home/cuda_ext_cache/)" ] && cp -r /github/home/cuda_ext_cache/* /__w/ColossalAI/ColossalAI/
BUILD_EXT=1 pip install -v -e .
cp -r /__w/ColossalAI/ColossalAI/build /github/home/cuda_ext_cache/
pip install -r requirements/requirements-test.txt
pip install --no-cache-dir -r requirements/requirements-test.txt

- name: Unit Testing
if: steps.check-avai.outputs.avai == 'true'
Expand Down
2 changes: 1 addition & 1 deletion colossalai/testing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def test_something():
else:
exception = Exception

func_wrapper = rerun_on_exception(exception_type=exception, pattern=".*Address already in use.*")
func_wrapper = rerun_on_exception(exception_type=exception, pattern=".*(A|a)ddress already in use.*")
return func_wrapper


Expand Down
2 changes: 1 addition & 1 deletion requirements/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ click
fabric
contexttimer
ninja
torch>=2.1.0,<=2.3.0
torch>=2.1.0,<=2.4.0
safetensors
einops
pydantic
Expand Down
2 changes: 1 addition & 1 deletion tests/test_booster/test_plugin/test_torch_ddp_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def check_torch_ddp_plugin():
registry = model_zoo

for name, (model_fn, data_gen_fn, output_transform_fn, _, _) in registry.items():
if name == "dlrm_interactionarch" or name.startswith("simple_"):
if name in ("dlrm_interactionarch", "transformers_mixtral") or name.startswith("simple_"):
continue
run_fn(model_fn, data_gen_fn, output_transform_fn)
torch.cuda.empty_cache()
Expand Down
14 changes: 11 additions & 3 deletions tests/test_lazy/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,17 @@ def test_models_lazy_init(subset, default_device):
sub_model_zoo = model_zoo.get_sub_registry(subset, allow_empty=True)
for name, entry in sub_model_zoo.items():
# TODO(ver217): lazy init does not support weight norm, skip these models
if name in ("torchaudio_wav2vec2_base", "torchaudio_hubert_base") or name.startswith(
("transformers_vit", "transformers_blip2", "transformers_whisper")
):
if name in (
"torchaudio_wav2vec2_base",
"torchaudio_hubert_base",
"timm_beit",
"timm_vision_transformer",
"timm_deit",
"timm_beitv2",
"timm_deit3",
"timm_convit",
"timm_tnt_b_patch16_224",
) or name.startswith(("transformers_vit", "transformers_blip2", "transformers_whisper")):
continue
check_lazy_init(entry, verbose=True, default_device=default_device)

Expand Down
Loading