diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml index b6a4649a30..2bde4d7ccb 100644 --- a/.github/workflows/conda-package.yml +++ b/.github/workflows/conda-package.yml @@ -48,8 +48,9 @@ jobs: echo "WHEELS_OUTPUT_FOLDER=$GITHUB_WORKSPACE${{ runner.os == 'Linux' && '/' || '\\' }}" >> $GITHUB_ENV - name: Build conda package run: | - CHANNELS="-c intel -c conda-forge --override-channels" - VERSIONS="--python ${{ matrix.python }}" + # use bootstrap channel to pull NumPy linked with OpenBLAS + CHANNELS="-c dppy/label/bootstrap -c intel -c conda-forge --override-channels" + VERSIONS="--python ${{ matrix.python }} --numpy 1.23" TEST="--no-test" conda build \ $TEST \ @@ -104,7 +105,7 @@ jobs: - name: Build conda package env: OVERRIDE_INTEL_IPO: 1 # IPO requires more resources that GH actions VM provides - run: conda build --no-test --python ${{ matrix.python }} -c intel -c conda-forge --override-channels conda-recipe + run: conda build --no-test --python ${{ matrix.python }} -c intel -c conda-forge --override-channels conda-recipe - name: Upload artifact uses: actions/upload-artifact@v3 with: @@ -392,36 +393,6 @@ jobs: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.whl --version ${{ env.PACKAGE_VERSION }} - cleanup_packages: - name: Clean up anaconda packages - needs: [upload_linux, upload_windows] - runs-on: 'ubuntu-latest' - defaults: - run: - shell: bash -el {0} - steps: - - uses: conda-incubator/setup-miniconda@v2 - with: - run-post: false - channel-priority: "disabled" - channels: conda-forge - python-version: '3.11' - - - name: Install anaconda-client - run: conda install anaconda-client - - - name: Checkout repo - uses: actions/checkout@v2 - with: - repository: IntelPython/devops-tools - fetch-depth: 0 - - - name: Cleanup old packages - run: | - python scripts/cleanup-old-packages.py \ - --verbose --force --token ${{ secrets.ANACONDA_TOKEN }} \ - --package dppy/${{ env.PACKAGE_NAME }} --label dev - test_examples_linux: needs: build_linux runs-on: ${{ matrix.runner }} @@ -699,3 +670,33 @@ jobs: allow-repeats: true repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token-user-login: 'github-actions[bot]' + + cleanup_packages: + name: Clean up anaconda packages + needs: [upload_linux, upload_windows] + runs-on: 'ubuntu-latest' + defaults: + run: + shell: bash -el {0} + steps: + - uses: conda-incubator/setup-miniconda@v2 + with: + run-post: false + channel-priority: "disabled" + channels: conda-forge + python-version: '3.11' + + - name: Install anaconda-client + run: conda install anaconda-client + + - name: Checkout repo + uses: actions/checkout@v3 + with: + repository: IntelPython/devops-tools + fetch-depth: 0 + + - name: Cleanup old packages + run: | + python scripts/cleanup-old-packages.py \ + --verbose --force --token ${{ secrets.ANACONDA_TOKEN }} \ + --package dppy/${{ env.PACKAGE_NAME }} --label dev diff --git a/.github/workflows/generate-coverage.yaml b/.github/workflows/generate-coverage.yaml index 7e0c8b8667..d739e32ad7 100644 --- a/.github/workflows/generate-coverage.yaml +++ b/.github/workflows/generate-coverage.yaml @@ -12,6 +12,8 @@ jobs: env: ONEAPI_ROOT: /opt/intel/oneapi GTEST_ROOT: /home/runner/work/googletest-1.13.0/install + # Use oneAPI compiler 2023 to work around an issue + USE_2023: 1 steps: - name: Cancel Previous Runs @@ -27,7 +29,14 @@ jobs: sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main" sudo apt-get update - - name: Install Intel OneAPI + - name: Install Intel OneAPI 2023 + if: env.USE_2023 == '1' + run: | + sudo apt-get install intel-oneapi-compiler-dpcpp-cpp-2023.2.1 + sudo apt-get install intel-oneapi-tbb-2021.10.0 + + - name: Install latest Intel OneAPI + if: env.USE_2023 != '1' run: | sudo apt-get install intel-oneapi-compiler-dpcpp-cpp sudo apt-get install intel-oneapi-tbb diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index 2806fb9262..9e96e43135 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -1,3 +1,5 @@ +{% set required_compiler_version = "2024.0" %} + package: name: dpctl version: {{ GIT_DESCRIBE_TAG }} @@ -14,7 +16,7 @@ build: requirements: build: - {{ compiler('cxx') }} - - {{ compiler('dpcpp') }} >=2023.2 # [not osx] + - {{ compiler('dpcpp') }} >={{ required_compiler_version }} # [not osx] - sysroot_linux-64 >=2.28 # [linux] host: - setuptools @@ -29,7 +31,7 @@ requirements: run: - python - {{ pin_compatible('numpy', min_pin='x.x', upper_bound='1.26') }} - - dpcpp-cpp-rt >=2023.2 + - dpcpp-cpp-rt >={{ required_compiler_version }} - level-zero # [linux] test: diff --git a/scripts/gen_coverage.py b/scripts/gen_coverage.py index 93328f1b85..538b93adbb 100644 --- a/scripts/gen_coverage.py +++ b/scripts/gen_coverage.py @@ -195,8 +195,14 @@ def find_objects(): args.cxx_compiler = "icpx" args.compiler_root = None icx_path = subprocess.check_output(["which", "icx"]) - bin_dir = os.path.dirname(os.path.dirname(icx_path)) - args.bin_llvm = os.path.join(bin_dir.decode("utf-8"), "bin-llvm") + bin_dir = os.path.dirname(icx_path) + compiler_dir = os.path.join(bin_dir.decode("utf-8"), "compiler") + if os.path.exists(compiler_dir): + args.bin_llvm = os.path.join(bin_dir.decode("utf-8"), "compiler") + else: + bin_dir = os.path.dirname(bin_dir) + args.bin_llvm = os.path.join(bin_dir.decode("utf-8"), "bin-llvm") + assert os.path.exists(args.bin_llvm) else: args_to_validate = [ "c_compiler", diff --git a/scripts/gen_docs.py b/scripts/gen_docs.py index 9e2285a477..e99dc74a52 100644 --- a/scripts/gen_docs.py +++ b/scripts/gen_docs.py @@ -134,8 +134,8 @@ def run( args.cxx_compiler = "icpx" args.compiler_root = None icx_path = subprocess.check_output(["which", "icx"]) - bin_dir = os.path.dirname(os.path.dirname(icx_path)) - args.bin_llvm = os.path.join(bin_dir.decode("utf-8"), "bin-llvm") + bin_dir = os.path.dirname(icx_path) + args.bin_llvm = os.path.join(bin_dir.decode("utf-8"), "compiler") else: args_to_validate = [ "c_compiler",