diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index 31ae82be52..f202864bf9 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -32,6 +32,7 @@ import simplejson as json from dateutil.parser import parse as parseutc +from ...utils.telemetry import prepare_execution_stats, submit_telemetry from ... import config, logging, LooseVersion from ...utils.provenance import write_provenance from ...utils.misc import trim, str2bool, rgetcwd @@ -583,6 +584,13 @@ def run(self, cwd=None, ignore_exception=None, **inputs): 'rss_GiB': (vals[:, 2] / 1024).tolist(), 'vms_GiB': (vals[:, 3] / 1024).tolist(), } + + runtime.interface_class_name = '{}.{}'.format(self.__module__, + self.__class__.__name__) + if config.getboolean('monitoring', 'telemetry'): + # it only makes sense to send execution stats if profiler is enabled + payload = prepare_execution_stats(results) + submit_telemetry(payload) os.chdir(syscwd) return results diff --git a/nipype/utils/config.py b/nipype/utils/config.py index e4e518960c..caf76268ef 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -71,6 +71,7 @@ [monitoring] enabled = false +telemetry = false sample_frequency = 1 summary_append = true diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index f73443b348..dc0ed0405f 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -18,6 +18,7 @@ import numpy as np from future.utils import raise_from from future import standard_library + try: from textwrap import indent as textwrap_indent except ImportError: diff --git a/nipype/utils/telemetry.py b/nipype/utils/telemetry.py new file mode 100644 index 0000000000..6ef17faad4 --- /dev/null +++ b/nipype/utils/telemetry.py @@ -0,0 +1,65 @@ +import os +import collections +import requests +import nibabel as nb +from json import dumps + +from .filemanip import split_filename + +def submit_telemetry(payload): + headers = {'Authorization': '', "Content-Type": "application/json"} + webapi_url = "http://127.0.0.1/api/v1/nipype_telemetry" + + response = requests.post(webapi_url, headers=headers, data=dumps(payload)) + + +def prepare_execution_stats(interface_results): + payload = {} + payload['interface_class_name'] = str(interface_results.runtime.interface_class_name) + payload['version'] = str(interface_results.runtime.version) + payload['duration_sec'] = float(interface_results.runtime.duration) + payload['mem_peak_gb'] = float(interface_results.runtime.mem_peak_gb) + payload['inputs'] = extract_meta_from_filenames(interface_results.inputs) + print(dumps(payload)) + return payload + + +def extract_meta_from_filenames(inputs_dict): + + def parse_item(item): + if isinstance(item, str) and os.path.exists(item) and os.path.isfile(item): + try: + nii = nb.load(item) + except nb.filebasedimages.ImageFileError: + return item + stat_dict = {} + stat_dict['shape'] = list(nii.shape) + stat_dict['dtype'] = str(nii.get_data_dtype()) + statinfo = os.stat(item) + stat_dict['st_size'] = statinfo.st_size + _, _, stat_dict['ext'] = split_filename(item) + return stat_dict + else: + return item + + def crawl_dict(d, u): + for k, v in u.items(): + if isinstance(v, collections.Mapping): + d[k] = crawl_dict({}, v) + elif isinstance(v, list): + d[k] = crawl_list([], v) + else: + d[k] = parse_item(v) + return d + + def crawl_list(l, u): + for v in u: + if isinstance(v, list): + l.append(crawl_list([], v)) + elif isinstance(v, collections.Mapping): + l.append(crawl_dict({}, v)) + else: + l.append(parse_item(v)) + return l + + return crawl_dict({}, inputs_dict)