Skip to content

Commit 69b1133

Browse files
authored
Add Coverage Checks and Badge to CI (#57)
* Add Coverage Checks and Badge to CI * Add coverage installs in workflows * Add ray[default] to tests * Fix pytest/mock versions and fix typo * Added coverage log in test * Add missing newline * Moved ray[default] to requirements
1 parent ac43ea0 commit 69b1133

File tree

11 files changed

+604
-23
lines changed

11 files changed

+604
-23
lines changed

.github/workflows/coverage-badge.yaml

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# This workflow will generate and push an updated coverage badge
2+
3+
name: Coverage Badge
4+
5+
on:
6+
push:
7+
branches: [ main ]
8+
9+
jobs:
10+
report:
11+
12+
runs-on: ubuntu-latest
13+
14+
steps:
15+
- uses: actions/checkout@v2
16+
- name: Set up Python 3.9
17+
uses: actions/setup-python@v2
18+
with:
19+
python-version: 3.9
20+
- name: Install dependencies
21+
run: |
22+
python -m pip install --upgrade pip
23+
pip install pytest==6.2.4
24+
pip install pytest-mock==3.6.1
25+
pip install coverage
26+
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
27+
- name: Generate coverage report
28+
run: |
29+
coverage run -m --source=src pytest -v tests/unit_test.py
30+
31+
- name: Coverage Badge
32+
uses: tj-actions/coverage-badge-py@v1.8
33+
34+
- name: Verify Changed files
35+
uses: tj-actions/verify-changed-files@v12
36+
id: changed_files
37+
with:
38+
files: coverage.svg
39+
40+
- name: Commit files
41+
if: steps.changed_files.outputs.files_changed == 'true'
42+
run: |
43+
git config --local user.email "github-actions[bot]@users.noreply.github.com"
44+
git config --local user.name "github-actions[bot]"
45+
git add coverage.svg
46+
git commit -m "Updated coverage.svg"
47+
48+
- name: Push changes
49+
if: steps.changed_files.outputs.files_changed == 'true'
50+
uses: ad-m/github-push-action@master
51+
with:
52+
github_token: ${{ secrets.CI_PUSH_TOKEN }}
53+
branch: ${{ github.ref }}

.github/workflows/python-app.yml

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,16 @@ jobs:
2323
- name: Install dependencies
2424
run: |
2525
python -m pip install --upgrade pip
26-
pip install pytest
27-
pip install pytest-mock
26+
pip install pytest==6.2.4
27+
pip install pytest-mock==3.6.1
28+
pip install coverage
2829
pip install black==22.3.0
2930
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
3031
- name: Check formatting with black
3132
run: |
3233
black --check .
33-
- name: Test with pytest
34+
- name: Test with pytest and check coverage
3435
run: |
35-
pytest -v tests/unit_test.py
36+
coverage run -m --source=src pytest -v tests/unit_test.py
37+
coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2)
38+
if (( $coverage < 90 )); then exit 1; else echo "Coverage passed, ${coverage}%"; fi

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
dist/
22
.python-version
33
__pycache__/
4+
.coverage

README.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,11 @@ For testing, make sure to have installed:
2020

2121
NOTE: Functional tests coming soon, will live in `tests/func_test.py`
2222

23+
For checking code coverage while testing:
24+
- Start by installing `coverage` (can be done via `pip`)
25+
- Now instead when testing run `coverage run -m --source=src pytest tests/unit_test.py`
26+
- To then view a code coverage report w/ missing lines, run `coverage report -m`
27+
2328
For formatting:
2429
- Currently using black v22.3.0 for format checking
2530
- To install, run `pip install black==22.3.0`

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
openshift-client==1.0.18
22
rich==12.5.1
3-
ray==2.1.0
3+
ray[default]==2.1.0

src/codeflare_sdk/cluster/cluster.py

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ def down(self):
110110
oc.invoke("delete", ["AppWrapper", self.app_wrapper_name])
111111
self.config.auth.logout()
112112

113-
def status(self, print_to_console: bool = True):
113+
def status(self, print_to_console: bool = True): # pragma: no cover
114114
"""
115115
TO BE UPDATED: Will soon return (and print by default) the cluster's
116116
status, from AppWrapper submission to setup completion. All resource
@@ -151,7 +151,7 @@ def cluster_dashboard_uri(self, namespace: str = "default") -> str:
151151
return "Dashboard route not available yet. Did you run cluster.up()?"
152152

153153
# checks whether the ray cluster is ready
154-
def is_ready(self, print_to_console: bool = True):
154+
def is_ready(self, print_to_console: bool = True): # pragma: no cover
155155
"""
156156
TO BE DEPRECATED: functionality will be added into cluster.status().
157157
"""
@@ -228,15 +228,17 @@ def job_logs(self, job_id: str) -> str:
228228
return client.get_job_logs(job_id)
229229

230230

231-
def get_current_namespace() -> str:
231+
def get_current_namespace() -> str: # pragma: no cover
232232
"""
233233
Returns the user's current working namespace.
234234
"""
235235
namespace = oc.invoke("project", ["-q"]).actions()[0].out.strip()
236236
return namespace
237237

238238

239-
def list_all_clusters(namespace: str, print_to_console: bool = True):
239+
def list_all_clusters(
240+
namespace: str, print_to_console: bool = True
241+
): # pragma: no cover
240242
"""
241243
Returns (and prints by default) a list of all clusters in a given namespace.
242244
"""
@@ -246,7 +248,7 @@ def list_all_clusters(namespace: str, print_to_console: bool = True):
246248
return clusters
247249

248250

249-
def list_all_queued(namespace: str, print_to_console: bool = True):
251+
def list_all_queued(namespace: str, print_to_console: bool = True): # pragma: no cover
250252
"""
251253
Returns (and prints by default) a list of all currently queued-up AppWrappers
252254
in a given namespace.
@@ -262,14 +264,18 @@ def list_all_queued(namespace: str, print_to_console: bool = True):
262264
# private methods
263265

264266

265-
def _app_wrapper_status(name, namespace="default") -> Optional[AppWrapper]:
267+
def _app_wrapper_status(
268+
name, namespace="default"
269+
) -> Optional[AppWrapper]: # pragma: no cover
266270
with oc.project(namespace), oc.timeout(10 * 60):
267271
cluster = oc.selector(f"appwrapper/{name}").object()
268272
if cluster:
269273
return _map_to_app_wrapper(cluster)
270274

271275

272-
def _ray_cluster_status(name, namespace="default") -> Optional[RayCluster]:
276+
def _ray_cluster_status(
277+
name, namespace="default"
278+
) -> Optional[RayCluster]: # pragma: no cover
273279
# FIXME should we check the appwrapper first
274280
cluster = None
275281
try:
@@ -283,7 +289,7 @@ def _ray_cluster_status(name, namespace="default") -> Optional[RayCluster]:
283289
return cluster
284290

285291

286-
def _get_ray_clusters(namespace="default") -> List[RayCluster]:
292+
def _get_ray_clusters(namespace="default") -> List[RayCluster]: # pragma: no cover
287293
list_of_clusters = []
288294

289295
with oc.project(namespace), oc.timeout(10 * 60):
@@ -296,7 +302,7 @@ def _get_ray_clusters(namespace="default") -> List[RayCluster]:
296302

297303
def _get_app_wrappers(
298304
namespace="default", filter=List[AppWrapperStatus]
299-
) -> List[AppWrapper]:
305+
) -> List[AppWrapper]: # pragma: no cover
300306
list_of_app_wrappers = []
301307

302308
with oc.project(namespace), oc.timeout(10 * 60):
@@ -311,7 +317,7 @@ def _get_app_wrappers(
311317
return list_of_app_wrappers
312318

313319

314-
def _map_to_ray_cluster(cluster) -> RayCluster:
320+
def _map_to_ray_cluster(cluster) -> RayCluster: # pragma: no cover
315321
cluster_model = cluster.model
316322

317323
with oc.project(cluster.namespace()), oc.timeout(10 * 60):
@@ -342,7 +348,7 @@ def _map_to_ray_cluster(cluster) -> RayCluster:
342348
)
343349

344350

345-
def _map_to_app_wrapper(cluster) -> AppWrapper:
351+
def _map_to_app_wrapper(cluster) -> AppWrapper: # pragma: no cover
346352
cluster_model = cluster.model
347353
return AppWrapper(
348354
name=cluster.name(),

src/codeflare_sdk/utils/generate_yaml.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ def generate_appwrapper(
240240
return outfile
241241

242242

243-
def main():
243+
def main(): # pragma: no cover
244244
parser = argparse.ArgumentParser(description="Generate user AppWrapper")
245245
parser.add_argument(
246246
"--name",
@@ -348,5 +348,5 @@ def main():
348348
return outfile
349349

350350

351-
if __name__ == "__main__":
351+
if __name__ == "__main__": # pragma: no cover
352352
main()

src/codeflare_sdk/utils/pretty_print.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@
2727
from ..cluster.model import RayCluster, AppWrapper, RayClusterStatus
2828

2929

30-
def print_no_resources_found():
30+
def print_no_resources_found(): # pragma: no cover
3131
console = Console()
3232
console.print(Panel("[red]No resources found"))
3333

3434

35-
def print_app_wrappers_status(app_wrappers: List[AppWrapper]):
35+
def print_app_wrappers_status(app_wrappers: List[AppWrapper]): # pragma: no cover
3636
if not app_wrappers:
3737
print_no_resources_found()
3838
return # shortcircuit
@@ -53,7 +53,7 @@ def print_app_wrappers_status(app_wrappers: List[AppWrapper]):
5353
console.print(Panel.fit(table))
5454

5555

56-
def print_clusters(clusters: List[RayCluster], verbose=True):
56+
def print_clusters(clusters: List[RayCluster], verbose=True): # pragma: no cover
5757
if not clusters:
5858
print_no_resources_found()
5959
return # shortcircuit

tests/test-case-cmd.yaml

Lines changed: 150 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,150 @@
1+
apiVersion: mcad.ibm.com/v1beta1
2+
kind: AppWrapper
3+
metadata:
4+
name: unit-cmd-cluster
5+
namespace: default
6+
spec:
7+
priority: 9
8+
resources:
9+
GenericItems:
10+
- custompodresources:
11+
- limits:
12+
cpu: 2
13+
memory: 8G
14+
nvidia.com/gpu: 0
15+
replicas: 1
16+
requests:
17+
cpu: 2
18+
memory: 8G
19+
nvidia.com/gpu: 0
20+
- limits:
21+
cpu: 1
22+
memory: 2G
23+
nvidia.com/gpu: 1
24+
replicas: 2
25+
requests:
26+
cpu: 1
27+
memory: 2G
28+
nvidia.com/gpu: 1
29+
generictemplate:
30+
apiVersion: ray.io/v1alpha1
31+
kind: RayCluster
32+
metadata:
33+
labels:
34+
appwrapper.mcad.ibm.com: unit-cmd-cluster
35+
controller-tools.k8s.io: '1.0'
36+
name: unit-cmd-cluster
37+
namespace: default
38+
spec:
39+
autoscalerOptions:
40+
idleTimeoutSeconds: 60
41+
imagePullPolicy: Always
42+
resources:
43+
limits:
44+
cpu: 500m
45+
memory: 512Mi
46+
requests:
47+
cpu: 500m
48+
memory: 512Mi
49+
upscalingMode: Default
50+
enableInTreeAutoscaling: false
51+
headGroupSpec:
52+
rayStartParams:
53+
block: 'true'
54+
dashboard-host: 0.0.0.0
55+
num-gpus: '0'
56+
serviceType: ClusterIP
57+
template:
58+
spec:
59+
containers:
60+
- image: rayproject/ray:latest
61+
imagePullPolicy: Always
62+
lifecycle:
63+
preStop:
64+
exec:
65+
command:
66+
- /bin/sh
67+
- -c
68+
- ray stop
69+
name: ray-head
70+
ports:
71+
- containerPort: 6379
72+
name: gcs
73+
- containerPort: 8265
74+
name: dashboard
75+
- containerPort: 10001
76+
name: client
77+
resources:
78+
limits:
79+
cpu: 2
80+
memory: 8G
81+
nvidia.com/gpu: 0
82+
requests:
83+
cpu: 2
84+
memory: 8G
85+
nvidia.com/gpu: 0
86+
rayVersion: 1.12.0
87+
workerGroupSpecs:
88+
- groupName: small-group-unit-cmd-cluster
89+
maxReplicas: 2
90+
minReplicas: 2
91+
rayStartParams:
92+
block: 'true'
93+
num-gpus: '1'
94+
replicas: 2
95+
template:
96+
metadata:
97+
annotations:
98+
key: value
99+
labels:
100+
key: value
101+
spec:
102+
containers:
103+
- env:
104+
- name: MY_POD_IP
105+
valueFrom:
106+
fieldRef:
107+
fieldPath: status.podIP
108+
image: rayproject/ray:latest
109+
lifecycle:
110+
preStop:
111+
exec:
112+
command:
113+
- /bin/sh
114+
- -c
115+
- ray stop
116+
name: machine-learning
117+
resources:
118+
limits:
119+
cpu: 1
120+
memory: 2G
121+
nvidia.com/gpu: 1
122+
requests:
123+
cpu: 1
124+
memory: 2G
125+
nvidia.com/gpu: 1
126+
initContainers:
127+
- command:
128+
- sh
129+
- -c
130+
- until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local;
131+
do echo waiting for myservice; sleep 2; done
132+
image: busybox:1.28
133+
name: init-myservice
134+
replicas: 1
135+
- generictemplate:
136+
apiVersion: route.openshift.io/v1
137+
kind: Route
138+
metadata:
139+
labels:
140+
odh-ray-cluster-service: unit-cmd-cluster-head-svc
141+
name: ray-dashboard-unit-cmd-cluster
142+
namespace: default
143+
spec:
144+
port:
145+
targetPort: dashboard
146+
to:
147+
kind: Service
148+
name: unit-cmd-cluster-head-svc
149+
replica: 1
150+
Items: []

0 commit comments

Comments
 (0)