9
9
import yaml
10
10
from jinja2 import select_autoescape
11
11
12
- WORKFLOWS_JOBS_PR = {"filters" : {"branches" : {"ignore" : ["master" ]}}}
12
+ WORKFLOWS_JOBS_PR = {"filters" : {"branches" : {"ignore" : ["master" , "main" ]}}}
13
13
14
- WORKFLOWS_JOBS_MASTER = {
14
+ WORKFLOWS_JOBS_TRUNK = {
15
15
"context" : "org-member" ,
16
- "filters" : {"branches" : {"only" : ["master" ]}},
16
+ "filters" : {"branches" : {"only" : ["master" , "main" ]}},
17
17
}
18
18
19
19
@@ -23,39 +23,39 @@ def indent(indentation, data_list):
23
23
)
24
24
25
25
26
- def jobs (pr_or_master , num_workers = 20 , indentation = 2 ):
26
+ def jobs (pr_or_trunk , num_workers = 20 , indentation = 2 ):
27
27
jobs = {}
28
28
29
29
# all tutorials that need gpu.nvidia.small.multi machines will be routed
30
30
# by get_files_to_run.py to 0th worker
31
31
needs_gpu_nvidia_small_multi = [0 ]
32
- jobs [f"pytorch_tutorial_{ pr_or_master } _build_manager" ] = {
32
+ jobs [f"pytorch_tutorial_{ pr_or_trunk } _build_manager" ] = {
33
33
"<<" : "*pytorch_tutorial_build_manager_defaults"
34
34
}
35
35
for i in range (num_workers ):
36
36
job_info = {"<<" : "*pytorch_tutorial_build_worker_defaults" }
37
37
if i in needs_gpu_nvidia_small_multi :
38
38
job_info ["resource_class" ] = "gpu.nvidia.small.multi"
39
- jobs [f"pytorch_tutorial_{ pr_or_master } _build_worker_{ i } " ] = job_info
39
+ jobs [f"pytorch_tutorial_{ pr_or_trunk } _build_worker_{ i } " ] = job_info
40
40
41
41
return indent (indentation , jobs ).replace ("'" , "" )
42
42
43
43
44
- def workflows_jobs (pr_or_master , indentation = 6 , num_workers = 20 ):
44
+ def workflows_jobs (pr_or_trunk , indentation = 6 , num_workers = 20 ):
45
45
jobs = []
46
46
job_info = deepcopy (
47
- WORKFLOWS_JOBS_PR if pr_or_master == "pr" else WORKFLOWS_JOBS_MASTER
47
+ WORKFLOWS_JOBS_PR if pr_or_trunk == "pr" else WORKFLOWS_JOBS_TRUNK
48
48
)
49
49
50
50
for i in range (num_workers ):
51
51
jobs .append (
52
- {f"pytorch_tutorial_{ pr_or_master } _build_worker_{ i } " : deepcopy (job_info )}
52
+ {f"pytorch_tutorial_{ pr_or_trunk } _build_worker_{ i } " : deepcopy (job_info )}
53
53
)
54
54
55
55
job_info ["requires" ] = [
56
- f"pytorch_tutorial_{ pr_or_master } _build_worker_{ i } " for i in range (num_workers )
56
+ f"pytorch_tutorial_{ pr_or_trunk } _build_worker_{ i } " for i in range (num_workers )
57
57
]
58
- jobs .append ({f"pytorch_tutorial_{ pr_or_master } _build_manager" : deepcopy (job_info )})
58
+ jobs .append ({f"pytorch_tutorial_{ pr_or_trunk } _build_manager" : deepcopy (job_info )})
59
59
return indent (indentation , jobs )
60
60
61
61
@@ -65,7 +65,7 @@ def windows_jobs(indentation=2, num_workers=4):
65
65
jobs [f"pytorch_tutorial_windows_pr_build_worker_{ i } " ] = {
66
66
"<<" : "*pytorch_windows_build_worker"
67
67
}
68
- jobs [f"pytorch_tutorial_windows_master_build_worker_ { i } " ] = {
68
+ jobs [f"pytorch_tutorial_windows_trunk_build_worker_ { i } " ] = {
69
69
"<<" : "*pytorch_windows_build_worker"
70
70
}
71
71
return indent (indentation , jobs ).replace ("'" , "" )
@@ -79,10 +79,10 @@ def windows_workflows_jobs(indentation=6, num_workers=4):
79
79
{f"pytorch_tutorial_windows_pr_build_worker_{ i } " : deepcopy (job_info )}
80
80
)
81
81
82
- job_info = WORKFLOWS_JOBS_MASTER
82
+ job_info = WORKFLOWS_JOBS_TRUNK
83
83
for i in range (num_workers ):
84
84
jobs .append (
85
- {f"pytorch_tutorial_windows_master_build_worker_ { i } " : deepcopy (job_info )}
85
+ {f"pytorch_tutorial_windows_trunk_build_worker_ { i } " : deepcopy (job_info )}
86
86
)
87
87
88
88
return ("\n #" ).join (indent (indentation , jobs ).splitlines ())
0 commit comments