File tree Expand file tree Collapse file tree 3 files changed +43
-5
lines changed Expand file tree Collapse file tree 3 files changed +43
-5
lines changed Original file line number Diff line number Diff line change 1616 - " third_party/*"
1717 - .gitignore
1818 - " *.md"
19+ workflow_dispatch :
1920
2021jobs :
2122 build_test :
Original file line number Diff line number Diff line change 2020 - .gitignore
2121 - " *.md"
2222 workflow_dispatch :
23+ inputs :
24+ channel :
25+ description : " Channel to use (release, nightly, test)"
26+ required : true
27+ type : choice
28+ options :
29+ - release
30+ - nightly
31+ - test
2332
2433jobs :
2534 build_test :
@@ -76,15 +85,22 @@ jobs:
7685 conda info
7786 python --version
7887 conda run -n build_binary python --version
88+ if [[ "${{ inputs.channel }}" = "release" ]]; then
89+ index_url=https://download.pytorch.org/whl/${{ matrix.cuda-tag }}
90+ elif [ -z "${{ inputs.channel }}" ]; then
91+ index_url=https://download.pytorch.org/whl/nightly/${{ matrix.cuda-tag }}
92+ else
93+ index_url=https://download.pytorch.org/whl/${{ inputs.channel }}/${{ matrix.cuda-tag }}
94+ fi
7995 conda run -n build_binary \
80- pip install torch --index-url https://download.pytorch.org/whl/nightly/${{ matrix.cuda-tag }}
96+ pip install torch --index-url $index_url
8197 conda run -n build_binary \
82- python -c "import torch"
98+ python -c "import torch; print(torch.__version__) "
8399 echo "torch succeeded"
84100 conda run -n build_binary \
85101 python -c "import torch.distributed"
86102 conda run -n build_binary \
87- pip install fbgemm-gpu --index-url https://download.pytorch.org/whl/nightly/${{ matrix.cuda-tag }}
103+ pip install fbgemm-gpu --index-url $index_url
88104 conda run -n build_binary \
89105 python -c "import fbgemm_gpu"
90106 echo "fbgemm_gpu succeeded"
Original file line number Diff line number Diff line change @@ -5,6 +5,10 @@ name: CPU Unit Test CI
55
66on :
77 push :
8+ branches :
9+ # only run tests on main branch & nightly; release should be triggered manually
10+ - nightly
11+ - main
812 paths-ignore :
913 - " docs/*"
1014 - " third_party/*"
1620 - " third_party/*"
1721 - .gitignore
1822 - " *.md"
23+ workflow_dispatch :
24+ inputs :
25+ channel :
26+ description : " Channel to use for torch and fbgemm"
27+ required : true
28+ type : choice
29+ options :
30+ - release
31+ - nightly
32+ - test
1933
2034jobs :
2135 build_test :
@@ -60,15 +74,22 @@ jobs:
6074 conda info
6175 python --version
6276 conda run -n build_binary python --version
77+ if [[ "${{ inputs.channel }}" = "release" ]]; then
78+ index_url=https://download.pytorch.org/whl/cpu
79+ elif [ -z "${{ inputs.channel }}" ]; then
80+ index_url=https://download.pytorch.org/whl/nightly/cpu
81+ else
82+ index_url=https://download.pytorch.org/whl/${{ inputs.channel }}/cpu
83+ fi
6384 conda run -n build_binary \
64- pip install torch --index-url https://download.pytorch.org/whl/nightly/cpu
85+ pip install torch --index-url $index_url
6586 conda run -n build_binary \
6687 python -c "import torch"
6788 echo "torch succeeded"
6889 conda run -n build_binary \
6990 python -c "import torch.distributed"
7091 conda run -n build_binary \
71- pip install fbgemm-gpu --index-url https://download.pytorch.org/whl/nightly/cpu
92+ pip install fbgemm-gpu --index-url $index_url
7293 conda run -n build_binary \
7394 python -c "import fbgemm_gpu"
7495 echo "fbgemm_gpu succeeded"
You can’t perform that action at this time.
0 commit comments