Skip to content

Commit b697b0e

Browse files
committed
Migrated the UNetR app
Signed-off-by: M Q <mingmelvinq@nvidia.com>
1 parent 3e4b839 commit b697b0e

File tree

3 files changed

+141
-161
lines changed

3 files changed

+141
-161
lines changed
Lines changed: 6 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -1,55 +1,11 @@
11
import logging
2-
import shutil
3-
import traceback
4-
from pathlib import Path
5-
from typing import List
62

73
from app import AIUnetrSegApp
84

9-
if __name__ == "__main__":
10-
logging.basicConfig(level=logging.DEBUG)
11-
# This main function is an example to show how a batch of input can be processed.
12-
# It assumes that in the app input folder there are a number of subfolders, each
13-
# containing a discrete input to be processed. Each discrete payload can have
14-
# multiple DICOM instances file, optionally organized in its own folder structure.
15-
# The application object is first created, and on its init the model network is
16-
# loaded as well as pre and post processing transforms. This app object is then
17-
# run multiple times, each time with a single discrete payload.
18-
19-
app = AIUnetrSegApp(do_run=False)
20-
21-
# Preserve the application top level input and output folder path, as the path
22-
# in the context may change on each run if the I/O arguments are passed in.
23-
app_input_path = Path(app.context.input_path)
24-
app_output_path = Path(app.context.output_path)
25-
26-
# Get subfolders in the input path, assume each one contains a discrete payload
27-
input_dirs = [path for path in app_input_path.iterdir() if path.is_dir()]
28-
29-
# Set the output path for each run under the app's output path, and do run
30-
work_dirs: List[str] = [] # strings resprenting folder path
31-
for idx, dir in enumerate(input_dirs):
32-
try:
33-
output_path = app_output_path / f"{dir.name}_output"
34-
# Note: the work_dir should be mapped to the host drive when used in
35-
# a container for better performance.
36-
work_dir = f".unetr_app_workdir{idx}"
37-
work_dirs.extend(work_dir)
5+
from monai.deploy.logger import load_env_log_level
386

39-
logging.info(f"Start processing input in: {dir} with results in: {output_path}")
40-
41-
# Run app with specific input and output path.
42-
# Passing in the input and output do have the side effect of changing
43-
# app context. This side effect will likely be eliminated in later releases.
44-
app.run(input=dir, output=output_path, workdir=work_dir)
45-
46-
logging.info(f"Completed processing input in: {dir} with results in: {output_path}")
47-
except Exception as ex:
48-
logging.error(f"Failed processing input in {dir}, due to: {ex}\n")
49-
traceback.print_exc()
50-
finally:
51-
# Remove the workdir; alternatively do this later, if storage space is not a concern.
52-
shutil.rmtree(work_dir, ignore_errors=True)
53-
54-
# Alternative. Explicitly remove the working dirs at the end of main.
55-
# [shutil.rmtree(work_dir, ignore_errors=True) for work_dir in work_dirs]
7+
if __name__ == "__main__":
8+
load_env_log_level()
9+
logging.info(f"Begin {__name__}")
10+
AIUnetrSegApp().run()
11+
logging.info(f"End {__name__}")

examples/apps/ai_unetr_seg_app/app.py

Lines changed: 65 additions & 80 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2021 MONAI Consortium
1+
# Copyright 2021-2023 MONAI Consortium
22
# Licensed under the Apache License, Version 2.0 (the "License");
33
# you may not use this file except in compliance with the License.
44
# You may obtain a copy of the License at
@@ -10,24 +10,45 @@
1010
# limitations under the License.
1111

1212
import logging
13+
from pathlib import Path
1314
from typing import List
1415

15-
# Required for setting SegmentDescription attributes. Direct import as this is not part of App SDK package.
16-
from pydicom.sr.codedict import codes
16+
from pydicom.sr.codedict import codes # Required for setting SegmentDescription attributes.
1717
from unetr_seg_operator import UnetrSegOperator
1818

19-
from monai.deploy.core import Application, resource
19+
from monai.deploy.conditions import CountCondition
20+
from monai.deploy.core import AppContext, Application
21+
from monai.deploy.logger import load_env_log_level
2022
from monai.deploy.operators.dicom_data_loader_operator import DICOMDataLoaderOperator
2123
from monai.deploy.operators.dicom_seg_writer_operator import DICOMSegmentationWriterOperator, SegmentDescription
2224
from monai.deploy.operators.dicom_series_selector_operator import DICOMSeriesSelectorOperator
2325
from monai.deploy.operators.dicom_series_to_volume_operator import DICOMSeriesToVolumeOperator
2426
from monai.deploy.operators.publisher_operator import PublisherOperator
2527
from monai.deploy.operators.stl_conversion_operator import STLConversionOperator
2628

27-
28-
@resource(cpu=1, gpu=1, memory="7Gi")
29-
# pip_packages can be a string that is a path(str) to requirements.txt file or a list of packages.
30-
# The MONAI pkg is not required by this class, instead by the included operators.
29+
# This is a sample series selection rule in JSON, simply selecting CT series.
30+
# If the study has more than 1 CT series, then all of them will be selected.
31+
# Please see more detail in DICOMSeriesSelectorOperator.
32+
# For list of string values, e.g. "ImageType": ["PRIMARY", "ORIGINAL"], it is a match if all elements
33+
# are all in the multi-value attribute of the DICOM series.
34+
35+
Sample_Rules_Text = """
36+
{
37+
"selections": [
38+
{
39+
"name": "CT Series",
40+
"conditions": {
41+
"Modality": "(?i)CT",
42+
"ImageType": ["PRIMARY", "ORIGINAL"],
43+
"PhotometricInterpretation": "MONOCHROME2"
44+
}
45+
}
46+
]
47+
}
48+
"""
49+
50+
51+
# @resource(cpu=1, gpu=1, memory="7Gi")
3152
class AIUnetrSegApp(Application):
3253
def __init__(self, *args, **kwargs):
3354
"""Creates an application instance."""
@@ -37,25 +58,36 @@ def __init__(self, *args, **kwargs):
3758

3859
def run(self, *args, **kwargs):
3960
# This method calls the base class to run. Can be omitted if simply calling through.
40-
self._logger.debug(f"Begin {self.run.__name__}")
61+
self._logger.info(f"Begin {self.run.__name__}")
4162
super().run(*args, **kwargs)
42-
self._logger.debug(f"End {self.run.__name__}")
63+
self._logger.info(f"End {self.run.__name__}")
4364

4465
def compose(self):
4566
"""Creates the app specific operators and chain them up in the processing DAG."""
4667

47-
self._logger.debug(f"Begin {self.compose.__name__}")
68+
self._logger.info(f"Begin {self.compose.__name__}")
69+
app_context = AppContext({}) # Let it figure out all the attributes without overriding
70+
app_input_path = Path(app_context.input_path)
71+
app_output_path = Path(app_context.output_path)
72+
model_path = Path(app_context.model_path)
73+
74+
self._logger.info(f"App input and output path: {app_input_path}, {app_output_path}")
75+
4876
# Creates the custom operator(s) as well as SDK built-in operator(s).
49-
study_loader_op = DICOMDataLoaderOperator()
50-
series_selector_op = DICOMSeriesSelectorOperator()
51-
series_to_vol_op = DICOMSeriesToVolumeOperator()
77+
study_loader_op = DICOMDataLoaderOperator(
78+
self, CountCondition(self, 1), input_folder=app_input_path, name="dcm_loader_op"
79+
)
80+
series_selector_op = DICOMSeriesSelectorOperator(self, rules=Sample_Rules_Text, name="series_selector_op")
81+
series_to_vol_op = DICOMSeriesToVolumeOperator(self, name="series_to_vol_op")
5282
# Model specific inference operator, supporting MONAI transforms.
53-
unetr_seg_op = UnetrSegOperator()
54-
# Create the publisher operator
55-
publisher_op = PublisherOperator()
83+
seg_op = UnetrSegOperator(self, model_path=model_path, name="seg_op")
84+
5685
# Create the surface mesh STL conversion operator, for all segments
5786
stl_conversion_op = STLConversionOperator(
58-
output_file="stl/multi-organs.stl", keep_largest_connected_component=False
87+
self,
88+
output_file=app_output_path.joinpath("stl/mesh.stl"),
89+
keep_largest_connected_component=False,
90+
name="stl_op",
5991
)
6092

6193
# Create DICOM Seg writer providing the required segment description for each segment with
@@ -100,28 +132,26 @@ def compose(self):
100132
for organ in organs
101133
]
102134

103-
dicom_seg_writer = DICOMSegmentationWriterOperator(segment_descriptions)
135+
dicom_seg_writer = DICOMSegmentationWriterOperator(
136+
self, segment_descriptions=segment_descriptions, output_folder=app_output_path, name="dcm_seg_writer_op"
137+
)
104138

105139
# Create the processing pipeline, by specifying the source and destination operators, and
106140
# ensuring the output from the former matches the input of the latter, in both name and type.
107-
self.add_flow(study_loader_op, series_selector_op, {"dicom_study_list": "dicom_study_list"})
141+
self.add_flow(study_loader_op, series_selector_op, {("dicom_study_list", "dicom_study_list")})
108142
self.add_flow(
109-
series_selector_op, series_to_vol_op, {"study_selected_series_list": "study_selected_series_list"}
143+
series_selector_op, series_to_vol_op, {("study_selected_series_list", "study_selected_series_list")}
110144
)
111-
self.add_flow(series_to_vol_op, unetr_seg_op, {"image": "image"})
112-
self.add_flow(unetr_seg_op, stl_conversion_op, {"seg_image": "image"})
113-
114-
# Add the publishing operator to save the input and seg images for Render Server.
115-
# Note the PublisherOperator has temp impl till a proper rendering module is created.
116-
self.add_flow(unetr_seg_op, publisher_op, {"saved_images_folder": "saved_images_folder"})
145+
self.add_flow(series_to_vol_op, seg_op, {("image", "image")})
146+
self.add_flow(seg_op, stl_conversion_op, {("seg_image", "image")})
117147

118148
# Note below the dicom_seg_writer requires two inputs, each coming from a source operator.
119149
self.add_flow(
120-
series_selector_op, dicom_seg_writer, {"study_selected_series_list": "study_selected_series_list"}
150+
series_selector_op, dicom_seg_writer, {("study_selected_series_list", "study_selected_series_list")}
121151
)
122-
self.add_flow(unetr_seg_op, dicom_seg_writer, {"seg_image": "seg_image"})
152+
self.add_flow(seg_op, dicom_seg_writer, {("seg_image", "seg_image")})
123153

124-
self._logger.debug(f"End {self.compose.__name__}")
154+
self._logger.info(f"End {self.compose.__name__}")
125155

126156

127157
if __name__ == "__main__":
@@ -132,53 +162,8 @@ def compose(self):
132162
# e.g.
133163
# python3 app.py -i input -m model/model.ts
134164
#
135-
import shutil
136-
import traceback
137-
from pathlib import Path
138-
139-
logging.basicConfig(level=logging.DEBUG)
140-
# This main function is an example to show how a batch of input can be processed.
141-
# It assumes that in the app input folder there are a number of subfolders, each
142-
# containing a discrete input to be processed. Each discrete payload can have
143-
# multiple DICOM instances file, optionally organized in its own folder structure.
144-
# The application object is first created, and on its init the model network is
145-
# loaded as well as pre and post processing transforms. This app object is then
146-
# run multiple times, each time with a single discrete payload.
147-
148-
app = AIUnetrSegApp(do_run=False)
149-
150-
# Preserve the application top level input and output folder path, as the path
151-
# in the context may change on each run if the I/O arguments are passed in.
152-
app_input_path = Path(app.context.input_path)
153-
app_output_path = Path(app.context.output_path)
154-
155-
# Get subfolders in the input path, assume each one contains a discrete payload
156-
input_dirs = [path for path in app_input_path.iterdir() if path.is_dir()]
157-
158-
# Set the output path for each run under the app's output path, and do run
159-
work_dirs: List[str] = [] # strings resprenting folder path
160-
for idx, dir in enumerate(input_dirs):
161-
try:
162-
output_path = app_output_path / f"{dir.name}_output"
163-
# Note: the work_dir should be mapped to the host drive when used in
164-
# a container for better performance.
165-
work_dir = f".unetr_app_workdir{idx}"
166-
work_dirs.extend(work_dir)
167-
168-
logging.info(f"Start processing input in: {dir} with results in: {output_path}")
169-
170-
# Run app with specific input and output path.
171-
# Passing in the input and output do have the side effect of changing
172-
# app context. This side effect will likely be eliminated in later releases.
173-
app.run(input=dir, output=output_path, workdir=work_dir)
174-
175-
logging.info(f"Completed processing input in: {dir} with results in: {output_path}")
176-
except Exception as ex:
177-
logging.error(f"Failed processing input in {dir}, due to: {ex}\n")
178-
traceback.print_exc()
179-
finally:
180-
# Remove the workdir; alternatively do this later, if storage space is not a concern.
181-
shutil.rmtree(work_dir, ignore_errors=True)
182-
183-
# Alternative. Explicitly remove the working dirs at the end of main.
184-
# [shutil.rmtree(work_dir, ignore_errors=True) for work_dir in work_dirs]
165+
166+
load_env_log_level()
167+
logging.info(f"Begin {__name__}")
168+
AIUnetrSegApp().run()
169+
logging.info(f"End {__name__}")

0 commit comments

Comments
 (0)