Skip to content

Commit 0f5825f

Browse files
authored
Merge branch 'master' into use-windows-latest
2 parents 5c6e043 + 5ba16f4 commit 0f5825f

File tree

239 files changed

+19142
-11890
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

239 files changed

+19142
-11890
lines changed

.github/CODEOWNERS

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
* @antonwolfy @AlexanderKalistratov @vlad-perevezentsev @vtavana
1+
* @antonwolfy @AlexanderKalistratov @vlad-perevezentsev @vtavana @ndgrigorian

.github/pull_request_template.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
- [ ] Have you provided a meaningful PR description?
2-
- [ ] Have you added a test, reproducer or referred to issue with a reproducer?
2+
- [ ] Have you added a test, reproducer or referred to an issue with a reproducer?
33
- [ ] Have you tested your changes locally for CPU and GPU devices?
44
- [ ] Have you made sure that new changes do not introduce compiler warnings?
55
- [ ] Have you checked performance impact of proposed changes?
6-
- [ ] If this PR is a work in progress, are you filing the PR as a draft?
6+
- [ ] Have you added documentation for your changes, if necessary?
7+
- [ ] Have you added your changes to the changelog?

.github/workflows/array-api-skips.txt

Lines changed: 1 addition & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,42 +1,5 @@
11
# array API tests to be skipped
22

3-
# no 'uint8' dtype
4-
array_api_tests/test_array_object.py::test_getitem_masking
5-
6-
# missing unique-like functions
7-
array_api_tests/test_has_names.py::test_has_names[set-unique_all]
8-
array_api_tests/test_has_names.py::test_has_names[set-unique_counts]
9-
array_api_tests/test_has_names.py::test_has_names[set-unique_inverse]
10-
array_api_tests/test_has_names.py::test_has_names[set-unique_values]
11-
array_api_tests/test_set_functions.py::test_unique_all
12-
array_api_tests/test_set_functions.py::test_unique_counts
13-
array_api_tests/test_set_functions.py::test_unique_inverse
14-
array_api_tests/test_set_functions.py::test_unique_values
15-
array_api_tests/test_signatures.py::test_func_signature[unique_all]
16-
array_api_tests/test_signatures.py::test_func_signature[unique_counts]
17-
array_api_tests/test_signatures.py::test_func_signature[unique_inverse]
18-
array_api_tests/test_signatures.py::test_func_signature[unique_values]
19-
20-
# do not return a namedtuple
21-
array_api_tests/test_linalg.py::test_eigh
22-
array_api_tests/test_linalg.py::test_slogdet
23-
array_api_tests/test_linalg.py::test_svd
24-
25-
# hypothesis found failures
26-
array_api_tests/test_linalg.py::test_qr
27-
array_api_tests/test_operators_and_elementwise_functions.py::test_clip
28-
29-
# unexpected result is returned
3+
# unexpected result is returned - unmute when dpctl-1986 is resolved
304
array_api_tests/test_operators_and_elementwise_functions.py::test_asin
315
array_api_tests/test_operators_and_elementwise_functions.py::test_asinh
32-
33-
# missing 'correction' keyword argument
34-
array_api_tests/test_signatures.py::test_func_signature[std]
35-
array_api_tests/test_signatures.py::test_func_signature[var]
36-
37-
# wrong shape is returned
38-
array_api_tests/test_linalg.py::test_vecdot
39-
array_api_tests/test_linalg.py::test_linalg_vecdot
40-
41-
# arrays have different values
42-
array_api_tests/test_linalg.py::test_linalg_tensordot

.github/workflows/build-sphinx.yml

Lines changed: 38 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,13 @@ jobs:
3636
pull-requests: write
3737

3838
env:
39-
python-ver: '3.12'
40-
CHANNELS: '-c dppy/label/dev -c intel -c conda-forge --override-channels'
41-
NO_INTEL_CHANNELS: '-c dppy/label/dev -c conda-forge --override-channels'
42-
# Install the latest oneAPI compiler to work around an issue
43-
INSTALL_ONE_API: 'yes'
39+
environment-file: 'environments/environment.yml'
40+
build-with-oneapi-env: 'environments/build_with_oneapi.yml'
41+
building-docs-env: 'environments/building_docs.yml'
42+
oneapi-pkgs-env: ''
43+
# Enable env when it's required to use only conda packages without OneAPI installation
44+
# oneapi-pkgs-env: '${{ github.workspace }}/environments/oneapi_pkgs.yml'
45+
dpctl-pkg-txt: 'environments/dpctl_pkg.txt'
4446

4547
steps:
4648
- name: Cancel Previous Runs
@@ -60,6 +62,7 @@ jobs:
6062
docker-images: false
6163

6264
- name: Add Intel repository
65+
if: env.oneapi-pkgs-env == ''
6366
run: |
6467
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
6568
cat GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | sudo tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null
@@ -75,6 +78,7 @@ jobs:
7578
sudo apt install --reinstall -y gcc-9 g++-9 libstdc++-9-dev
7679
7780
- name: Install Intel OneAPI
81+
if: env.oneapi-pkgs-env == ''
7882
run: |
7983
sudo apt install hwloc \
8084
intel-oneapi-mkl \
@@ -101,16 +105,25 @@ jobs:
101105
with:
102106
fetch-depth: 0
103107

108+
- name: Install conda-merge tool
109+
uses: BSFishy/pip-action@8f2d471d809dc20b6ada98c91910b6ae6243f318 # v1
110+
with:
111+
packages: conda-merge
112+
113+
- name: Merge conda env files
114+
run: |
115+
conda-merge ${{ env.build-with-oneapi-env }} ${{ env.building-docs-env }} ${{ env.oneapi-pkgs-env }} > ${{ env.environment-file }}
116+
cat ${{ env.environment-file }}
117+
104118
- name: Setup miniconda
105119
id: setup_miniconda
106120
continue-on-error: true
107121
uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1
108122
with:
109123
miniforge-version: latest
110124
use-mamba: 'true'
111-
channels: conda-forge
112125
conda-remove-defaults: 'true'
113-
python-version: ${{ env.python-ver }}
126+
environment-file: ${{ env.environment-file }}
114127
activate-environment: 'docs'
115128

116129
- name: ReSetup miniconda
@@ -119,40 +132,30 @@ jobs:
119132
with:
120133
miniforge-version: latest
121134
use-mamba: 'true'
122-
channels: conda-forge
123135
conda-remove-defaults: 'true'
124-
python-version: ${{ env.python-ver }}
136+
environment-file: ${{ env.environment-file }}
125137
activate-environment: 'docs'
126138

127-
# Sometimes `mamba install ...` fails due to slow download speed rate, so disable the check in mamba
128-
- name: Disable speed limit check in mamba
129-
run: echo "MAMBA_NO_LOW_SPEED_LIMIT=1" >> $GITHUB_ENV
130-
131-
- name: Install sphinx dependencies
132-
run: |
133-
mamba install sphinx sphinx_rtd_theme
134-
pip install sphinxcontrib-googleanalytics==0.4 \
135-
pyenchant sphinxcontrib-spelling
136-
137-
- name: Install dpnp dependencies
138-
if: env.INSTALL_ONE_API == 'yes'
139-
run: |
140-
mamba install numpy dpctl">=0.18.0dev0" cmake cython pytest ninja scikit-build ${{ env.NO_INTEL_CHANNELS }}
141-
142-
- name: Install dpnp dependencies
143-
if: env.INSTALL_ONE_API != 'yes'
139+
# We can't install dpctl as a conda package when the environment is created through
140+
# installing of Intel OneAPI packages because the dpctl conda package has a runtime
141+
# dependency on DPC++ RT one. Whereas the DPC++ RT package has been already installed
142+
# by the apt command above and its version has been matched with the DPC++ compiler.
143+
# In case where we install the DPC++ compiler with the apt (including DPC++ RT) and
144+
# install the DPC++ RT conda package while resolving dependencies, this can lead
145+
# to a versioning error, i.e. compatibility issue as the DPC++ compiler only guarantees
146+
# backwards compatibility, not forward compatibility (DPC++ RT may not run a binary built
147+
# with a newer version of the DPC++ compiler).
148+
# Installing dpctl via the pip manager has no such limitation, as the package has no
149+
# run dependency on the DPC++ RT pip package, so this is why the step is necessary here.
150+
- name: Install dpctl
151+
if: env.oneapi-pkgs-env == ''
144152
run: |
145-
mamba install numpy dpctl">=0.18.0dev0" mkl-devel-dpcpp onedpl-devel tbb-devel dpcpp_linux-64 \
146-
cmake cython pytest ninja scikit-build ${{ env.CHANNELS }}
147-
148-
- name: Install cuPy dependencies
149-
run: mamba install cupy
153+
pip install -r ${{ env.dpctl-pkg-txt }}
150154
151155
- name: Conda info
152-
run: mamba info
153-
154-
- name: Conda list
155-
run: mamba list
156+
run: |
157+
mamba info
158+
mamba list
156159
157160
- name: Build library
158161
run: |
@@ -178,7 +181,6 @@ jobs:
178181
echo PROJECT_NUMBER=${PROJECT_NUMBER}
179182
echo "PROJECT_NUMBER=$PROJECT_NUMBER" >> $GITHUB_ENV
180183
181-
# https://github.com/marketplace/actions/doxygen-action
182184
- name: Build backend docs
183185
uses: mattnotmitt/doxygen-action@b84fe17600245bb5db3d6c247cc274ea98c15a3b # v1.12
184186
with:
@@ -187,7 +189,6 @@ jobs:
187189
- name: Copy backend docs
188190
run: cp -r dpnp/backend/doc/html ${{ env.PUBLISH_DIR }}/backend_doc
189191

190-
# https://github.com/marketplace/actions/github-pages-action
191192
# The step is only used to build docs while pushing a PR to "master"
192193
- name: Deploy docs
193194
if: env.GH_EVENT_PUSH_UPSTREAM == 'true'

0 commit comments

Comments
 (0)