diff --git a/multimodal/openi_multilabel_classification_transchex/README.md b/multimodal/openi_multilabel_classification_transchex/README.md new file mode 100644 index 0000000000..970e3a5969 --- /dev/null +++ b/multimodal/openi_multilabel_classification_transchex/README.md @@ -0,0 +1,23 @@ +# Preprocessing Open-I Dataset + +The Open-I dataset provides a collection of 3,996 radiology reports +with 8,121 associated images in PA, AP and lateral views. In this tutorial, we utilize the images from fronal view with their corresponding reports for training and +evaluation of the TransChex model. The chest x-ray images and reports are originally from the Indiana University hospital (see the licencing information below). +The 14 finding categories in this work include Atelectasis, Cardiomegaly, Consolidation, Edema, Enlarged-Cardiomediastinum, Fracture, Lung-Lesion, Lung-Opacity, No-Finding, Pleural-Effusion, Pleural-Other, Pneumonia, Pneumothorax and Support-Devices. More information can be found in the following link: +https://openi.nlm.nih.gov/faq + +License: Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0) + +In this section, we provide the steps that are needed for preprocessing the Open-I dataset for +the multi-label disease classification tutorial using TransCheX model. As a result, once the following steps are +completed, the dataset can be readily used for the tutorial. + +### Preprocessing Steps +1) Create a new folder named 'monai_data' for downloading the raw data and preprocessing. +2) Download the chest X-ray images in PNG format from this [link](https://openi.nlm.nih.gov/imgs/collections/NLMCXR_png.tgz). Copy the downloaded file (NLMCXR_png.tgz) +to 'monai_data' directory and extract it. +3) Download the reports in XML format from this [link](https://openi.nlm.nih.gov/imgs/collections/NLMCXR_reports.tgz). Copy the downloaded file (NLMCXR_reports.tgz) +to 'monai_data' directory and extract it. +4) Download the splits of train, validation and test datasets from this [link](https://drive.google.com/u/1/uc?id=1jvT0jVl9mgtWy4cS7LYbF43bQE4mrXAY&export=download). Copy the downloaded file (TransChex_openi.zip) +to 'monai_data' directory and extract it. +5) Run 'preprocess_openi.py' to process the images and reports. diff --git a/multimodal/openi_multilabel_classification_transchex/preprocess_openi.py b/multimodal/openi_multilabel_classification_transchex/preprocess_openi.py new file mode 100644 index 0000000000..dcba4b0d05 --- /dev/null +++ b/multimodal/openi_multilabel_classification_transchex/preprocess_openi.py @@ -0,0 +1,119 @@ +# Copyright 2020 - 2021 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from os import listdir +from os.path import isfile, join +import numpy as np +from xml.dom import minidom +from PIL import Image +import pandas as pd +import xml.etree.ElementTree as ET + +def create_report(img_names_list_, report_list_, gt_list_, save_add): + pd.DataFrame({'id': img_names_list_, 'report': report_list_, 'Atelectasis': gt_list_[:, 0], + 'Cardiomegaly': gt_list_[:, 1], 'Consolidation': gt_list_[:, 2],'Edema': gt_list_[:, 3], + 'Enlarged-Cardiomediastinum': gt_list_[:, 4], 'Fracture': gt_list_[:, 5], 'Lung-Lesion': gt_list_[:, 6], + 'Lung-Opacity': gt_list_[:, 7], 'No-Finding': gt_list_[:, 8], 'Pleural-Effusion': gt_list_[:, 9], + 'Pleural_Other': gt_list_[:, 10], 'Pneumonia': gt_list_[:, 11], 'Pneumothorax': gt_list_[:, 12], + 'Support-Devices': gt_list_[:, 13]}).to_csv(save_add, index=False) + +report_file_add= './monai_data/dataset_orig/NLMCXR_reports/ecgen-radiology' +img_file_add= './monai_data/dataset_orig/NLMCXR_png' +img_save_add = './monai_data/dataset_proc/images' +report_train_save_add = './monai_data/dataset_proc/train.csv' +report_val_save_add = './monai_data/dataset_proc/validation.csv' +report_test_save_add = './monai_data/dataset_proc/test.csv' + +if not os.path.isdir(img_save_add): + os.makedirs(img_save_add) +report_files = [f for f in listdir(report_file_add) if isfile(join(report_file_add, f))] + +train_data = np.load('./train.npy', allow_pickle=True).item() +train_data_id = train_data['id_GT'] +train_data_gt = train_data['label_GT'] + +val_data = np.load('./validation.npy', allow_pickle=True).item() +val_data_id = val_data['id_GT'] +val_data_gt = val_data['label_GT'] + +test_data = np.load('./test.npy', allow_pickle=True).item() +test_data_id = test_data['id_GT'] +test_data_gt = test_data['label_GT'] + +all_cases = np.union1d(np.union1d(train_data_id, val_data_id), test_data_id) + +img_names_list_train = [] +img_names_list_val = [] +img_names_list_test = [] + +report_list_train = [] +report_list_val = [] +report_list_test = [] + +gt_list_train = [] +gt_list_val = [] +gt_list_test = [] + +for file in report_files: + print('Processing {}'.format(file)) + add_xml = os.path.join(report_file_add, file) + docs = minidom.parse(add_xml) + tree = ET.parse(add_xml) + for node in tree.iter('AbstractText'): + i = 0 + for elem in node.iter(): + if elem.attrib['Label'] == "FINDINGS": + if elem.text == None: + report = "FINDINGS : " + else: + report = "FINDINGS : " + elem.text + elif elem.attrib['Label'] == "IMPRESSION": + if elem.text == None: + report = report + " IMPRESSION : " + else: + report = report + " IMPRESSION : " + elem.text + images = docs.getElementsByTagName("parentImage") + for i in images: + img_name = i.getAttribute("id") + '.png' + if img_name in all_cases: + Image.open(os.path.join(img_file_add, img_name)).resize((512, 512)).save( + os.path.join(img_save_add, img_name)) + if img_name in train_data_id: + img_names_list_train.append(img_name) + report_list_train.append(report) + gt_list_train.append(train_data_gt[np.where(train_data_id==img_name)[0][0]]) + elif img_name in val_data_id: + img_names_list_val.append(img_name) + report_list_val.append(report) + gt_list_val.append(val_data_gt[np.where(val_data_id == img_name)[0][0]]) + elif img_name in test_data_id: + img_names_list_test.append(img_name) + report_list_test.append(report) + gt_list_test.append(test_data_gt[np.where(test_data_id == img_name)[0][0]]) + +datasets = [{"save_add": report_train_save_add, + "img_name": np.array(img_names_list_train), + "report": np.array(report_list_train), + "gt": np.array(gt_list_train)}, + {"save_add": report_val_save_add, + "img_name": np.array(img_names_list_val), + "report": np.array(report_list_val), + "gt": np.array(gt_list_val)}, + {"save_add": report_test_save_add, + "img_name": np.array(img_names_list_test), + "report": np.array(report_list_test), + "gt": np.array(gt_list_test)} + ] +for dataset in datasets: + create_report(dataset["img_name"], dataset["report"], dataset["gt"], dataset["save_add"]) + +print('Processed Dataset Files Are Saved !') diff --git a/multimodal/openi_multilabel_classification_transchex/transchex_openi_multilabel_classification.ipynb b/multimodal/openi_multilabel_classification_transchex/transchex_openi_multilabel_classification.ipynb new file mode 100644 index 0000000000..3a68226137 --- /dev/null +++ b/multimodal/openi_multilabel_classification_transchex/transchex_openi_multilabel_classification.ipynb @@ -0,0 +1,593 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Chest X-ray Multi-Label Disease Classification With TransCheX \n", + "\n", + "This tutorial demonstrates how to construct a training workflow of TransCheX model [1] for chest X-ray multi-label disease classification using Open-I dataset. The TransCheX is multi-modal transformer-based model consisting of vision, language and mixed modality encoder that is designed for chest X-ray image classification.\n", + "\n", + "The Open-I dataset provides a collection of 3,996 radiology reports with 8,121 associated images in PA, AP and lateral views. In this tutorial, we utilize the images from fronal view with their corresponding reports for training and evaluation of the TransChex model.The 14 finding categories in this work include Atelectasis, Cardiomegaly, Consolidation, Edema, Enlarged-Cardiomediastinum, Fracture, Lung-Lesion, Lung-Opacity, No-Finding, Pleural-Effusion, Pleural-Other, Pneumonia, Pneumothorax and Support-Devices. More information can be found in the following link: \n", + "https://openi.nlm.nih.gov/faq\n", + "\n", + "License: Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)\n", + "\n", + "An example of images and corresponding reports in Open-I dataset is presented as follows [2]:\n", + "![image](https://lh3.googleusercontent.com/bmqrTg0oKbfuwced1SiNdZruqbex_srBbJ9p4nmceAfZaf0FFkl9pzc9UUFP3-6AxxxWDaWbLXfmev5E6_0RmEzd0rLQ1NciF7PTzOkbUcRTJIUKgcpxKZsYnw3L17ATvIFBD47xSIWWiCD28vWBVN1k72P2UPorK1GQJUFEbmDAfGn0XRM2rzwB29SXB2hEtQmbWbe4u4msvcX4spx2rEH-6Qrd-iQRMyDAhq0lstRYBvxtu7ZLRrwtj_P5FQRKeW0hEFqTCQZvKmC75FKoUiltHDfsAl2mig2nsUH0KDBc3atPn9lSBGBFOXsHZdsqw4Q86sXz0roz1vKQWJWcSG7l5YqmPoz5KGrspIs5OJ7QxVvVSmmbe8ctk-T7eBoz3juZ3ux5QhYT2C1BYxGVutLh017FAskyZ1on4BkDTlkLrKSUpbU5la9IrugKM_lAso_cM2ALWb07n-yjsYUJL55oyJBMLCRXyIIutrQSGJW0RwM5LBIgwyklV9P_bRF3_w36hoqtHFNbzN5zrW-RAeJS2nCTYOElmRhzbdl4CwbgVUuStEm66vfUhwtWBMgybyQKb3WVTx69FcgnNC7tuDiPHpU3UuDlNXjKkuh35kxNcbJGYh8ZTY3jmoiVd_nrN9Yh5scCaxxdMtNRgxMWaGFoj7Dl3enBM2wR2FNotZ10smre6F7acOfKSYceAvQXWCzSnZ_C5PJ1szrEFa6v3wn4=w805-h556-no?authuser=0)\n", + "\n", + "In this tutorial, we use the TransCheX model with 2 layers for each of vision, language mixed modality encoders respectively. As an input to the TransCheX, we use the patient **report** and corresponding **chest X-ray image**. The image itself will be divided into non-overlapping patches with a specified patch resolution and projected into an embedding space. Similarly the reports are tokenized and projected into their respective embedding space. The language and vision encoders seperately encode their respective features from the projected embeddings in each modality. Furthmore, the output of vision and language encoders are fed into a mixed modality encoder which extraxts mutual information. The output of the mixed modality encoder is then utilized for the classification application. \n", + "\n", + "[1] : \"Hatamizadeh et al.,TransCheX: Self-Supervised Pretraining of Vision-Language Transformers for Chest X-ray Analysis\"\n", + "\n", + "[2] : \"Shin et al.,Learning to Read Chest X-Rays: Recurrent Neural Cascade Model for Automated Image Annotation\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup environment" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install -q \"monai[transformers, pandas]\"\n", + "!pip install -q scikit-learn==0.20.3\n", + "!python -c \"import matplotlib\" || pip install -q matplotlib\n", + "%matplotlib inline" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Import dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import torch\n", + "import numpy as np\n", + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "from PIL import Image\n", + "from torchvision import transforms\n", + "from sklearn.metrics.ranking import roc_auc_score\n", + "from monai.optimizers.lr_scheduler import WarmupCosineSchedule\n", + "from monai.networks.nets import Transchex\n", + "from monai.config import print_config\n", + "from monai.utils import set_determinism\n", + "from torch.utils.data import Dataset, DataLoader\n", + "from transformers import BertTokenizer" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download and pre-process the dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Download the Open-I dataset from the following link, for both the chest X-ray images and corresponding reports, and pre-process the dataset using the provided script:\n", + "\n", + "https://openi.nlm.nih.gov/faq\n", + "\n", + "Please refer to the pre-processing guide for more details. " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "datadir = \"./monai_data\"\n", + "if not os.path.exists(datadir):\n", + " os.makedirs(datadir)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Print Configurations " + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MONAI version: 0.8.0\n", + "Numpy version: 1.21.0\n", + "Pytorch version: 1.6.0\n", + "MONAI flags: HAS_EXT = False, USE_COMPILED = False\n", + "MONAI rev id: 714d00dffe6653e21260160666c4c201ab66511b\n", + "\n", + "Optional dependencies:\n", + "Pytorch Ignite version: 0.4.4\n", + "Nibabel version: 3.1.1\n", + "scikit-image version: 0.14.2\n", + "Pillow version: 8.3.1\n", + "Tensorboard version: 2.2.0\n", + "gdown version: 3.13.0\n", + "TorchVision version: 0.7.0\n", + "tqdm version: 4.59.0\n", + "lmdb version: 1.2.1\n", + "psutil version: 5.6.1\n", + "pandas version: 0.24.2\n", + "einops version: 0.3.0\n", + "transformers version: 4.10.2\n", + "mlflow version: NOT INSTALLED or UNKNOWN VERSION.\n", + "\n", + "For details about installing the optional dependencies, please visit:\n", + " https://docs.monai.io/en/latest/installation.html#installing-the-recommended-dependencies\n", + "\n" + ] + } + ], + "source": [ + "torch.backends.cudnn.benchmark = True\n", + "\n", + "print_config()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set deterministic training for reproducibility\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "set_determinism(seed=0)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup dataloaders and transforms for training/validation/testomg" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "class MultiModalDataset(Dataset):\n", + " def __init__(self, dataframe, tokenizer, parent_dir, max_seq_length=512):\n", + " self.max_seq_length = max_seq_length\n", + " self.tokenizer = tokenizer\n", + " self.data = dataframe\n", + " self.report_summary = self.data.report\n", + " self.img_name = self.data.id\n", + " self.targets = self.data.list\n", + "\n", + " self.preprocess = transforms.Compose(\n", + " [\n", + " transforms.Resize(256),\n", + " transforms.ToTensor(),\n", + " transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]),\n", + " ]\n", + " )\n", + " self.parent_dir = parent_dir\n", + "\n", + " def __len__(self):\n", + " return len(self.report_summary)\n", + "\n", + " def encode_features(self, sent, max_seq_length, tokenizer):\n", + " tokens = tokenizer.tokenize(sent.strip())\n", + " if len(tokens) > max_seq_length - 2:\n", + " tokens = tokens[: (max_seq_length - 2)]\n", + " tokens = [\"[CLS]\"] + tokens + [\"[SEP]\"]\n", + " input_ids = tokenizer.convert_tokens_to_ids(tokens)\n", + " segment_ids = [0] * len(input_ids)\n", + " while len(input_ids) < max_seq_length:\n", + " input_ids.append(0)\n", + " segment_ids.append(0)\n", + " assert len(input_ids) == max_seq_length\n", + " assert len(segment_ids) == max_seq_length\n", + " return input_ids, segment_ids\n", + "\n", + " def __getitem__(self, index):\n", + " name = self.img_name[index].split(\".\")[0]\n", + " img_address = os.path.join(self.parent_dir, self.img_name[index])\n", + " image = Image.open(img_address)\n", + " images = self.preprocess(image)\n", + " report = str(self.report_summary[index])\n", + " report = \" \".join(report.split())\n", + " input_ids, segment_ids = self.encode_features(\n", + " report, self.max_seq_length, self.tokenizer\n", + " )\n", + " input_ids = torch.tensor(input_ids, dtype=torch.long)\n", + " segment_ids = torch.tensor(segment_ids, dtype=torch.long)\n", + " targets = torch.tensor(self.targets[index], dtype=torch.float)\n", + " return {\n", + " \"ids\": input_ids,\n", + " \"segment_ids\": segment_ids,\n", + " \"name\": name,\n", + " \"targets\": targets,\n", + " \"images\": images,\n", + " }" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + " ## Setup the model directory, tokenizer and dataloaders\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "def load_txt_gt(add):\n", + " txt_gt = pd.read_csv(add)\n", + " txt_gt[\"list\"] = txt_gt[txt_gt.columns[2:]].values.tolist()\n", + " txt_gt = txt_gt[[\"id\", \"report\", \"list\"]].copy()\n", + " return txt_gt\n", + "\n", + "\n", + "logdir = \"./logdir\"\n", + "if not os.path.exists(logdir):\n", + " os.makedirs(logdir)\n", + "\n", + "parent_dir = \"./monai_data/dataset_proc/images/\"\n", + "train_txt_gt = load_txt_gt(\"./monai_data/dataset_proc/train.csv\")\n", + "val_txt_gt = load_txt_gt(\"./monai_data/dataset_proc/validation.csv\")\n", + "test_txt_gt = load_txt_gt(\"./monai_data/dataset_proc/test.csv\")\n", + "batch_size = 32\n", + "num_workers = 8\n", + "tokenizer = BertTokenizer.from_pretrained(\"bert-base-uncased\", do_lower_case=False)\n", + "training_set = MultiModalDataset(train_txt_gt, tokenizer, parent_dir)\n", + "train_params = {\n", + " \"batch_size\": batch_size,\n", + " \"shuffle\": True,\n", + " \"num_workers\": num_workers,\n", + " \"pin_memory\": True,\n", + "}\n", + "training_loader = DataLoader(training_set, **train_params)\n", + "valid_set = MultiModalDataset(val_txt_gt, tokenizer, parent_dir)\n", + "test_set = MultiModalDataset(test_txt_gt, tokenizer, parent_dir)\n", + "valid_params = {\"batch_size\": 1, \"shuffle\": False, \"num_workers\": 1, \"pin_memory\": True}\n", + "val_loader = DataLoader(valid_set, **valid_params)\n", + "test_loader = DataLoader(test_set, **valid_params)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create Model, Loss, Optimizer\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", + "total_epochs = 15\n", + "eval_num = 1\n", + "lr = 1e-4\n", + "weight_decay = 1e-5\n", + "\n", + "model = Transchex(\n", + " in_channels=3,\n", + " img_size=(256, 256),\n", + " num_classes=14,\n", + " patch_size=(32, 32),\n", + " num_language_layers=2,\n", + " num_vision_layers=2,\n", + " num_mixed_layers=2,\n", + ").to(device)\n", + "\n", + "loss_bce = torch.nn.BCELoss().cuda()\n", + "optimizer = torch.optim.Adam(\n", + " params=model.parameters(), lr=lr, weight_decay=weight_decay\n", + ")\n", + "scheduler = WarmupCosineSchedule(optimizer, warmup_steps=5, t_total=total_epochs)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Execute a typical PyTorch training process" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "def save_ckp(state, checkpoint_dir):\n", + " torch.save(state, checkpoint_dir)\n", + "\n", + "\n", + "def compute_AUCs(gt, pred, num_classes=14):\n", + " with torch.no_grad():\n", + " AUROCs = []\n", + " gt_np = gt\n", + " pred_np = pred\n", + " for i in range(num_classes):\n", + " AUROCs.append(roc_auc_score(gt_np[:, i].tolist(), pred_np[:, i].tolist()))\n", + " return AUROCs\n", + "\n", + "\n", + "def train(epoch):\n", + " model.train()\n", + " for i, data in enumerate(training_loader, 0):\n", + " input_ids = data[\"ids\"].cuda()\n", + " segment_ids = data[\"segment_ids\"].cuda()\n", + " img = data[\"images\"].cuda()\n", + " targets = data[\"targets\"].cuda()\n", + " logits_lang = model(\n", + " input_ids=input_ids, vision_feats=img, token_type_ids=segment_ids\n", + " )\n", + " loss = loss_bce(torch.sigmoid(logits_lang), targets)\n", + " optimizer.zero_grad()\n", + " loss.backward()\n", + " optimizer.step()\n", + " print(f\"Epoch: {epoch}, Iteration: {i}, Loss_Tot: {loss}\")\n", + "\n", + "\n", + "def validation(testing_loader):\n", + " model.eval()\n", + " targets_in = np.zeros((len(testing_loader), 14))\n", + " preds_cls = np.zeros((len(testing_loader), 14))\n", + " val_loss = []\n", + " with torch.no_grad():\n", + " for _, data in enumerate(testing_loader, 0):\n", + " input_ids = data[\"ids\"].cuda()\n", + " segment_ids = data[\"segment_ids\"].cuda()\n", + " img = data[\"images\"].cuda()\n", + " targets = data[\"targets\"].cuda()\n", + " logits_lang = model(\n", + " input_ids=input_ids, vision_feats=img, token_type_ids=segment_ids\n", + " )\n", + " prob = torch.sigmoid(logits_lang)\n", + " loss = loss_bce(prob, targets).item()\n", + " targets_in[_, :] = targets.detach().cpu().numpy()\n", + " preds_cls[_, :] = prob.detach().cpu().numpy()\n", + " val_loss.append(loss)\n", + " auc = compute_AUCs(targets_in, preds_cls, 14)\n", + " mean_auc = np.mean(auc)\n", + " mean_loss = np.mean(val_loss)\n", + " print(\n", + " \"Evaluation Statistics: Mean AUC : {}, Mean Loss : {}\".format(\n", + " mean_auc, mean_loss\n", + " )\n", + " )\n", + " return mean_auc, mean_loss, auc\n", + "\n", + "\n", + "auc_val_best = 0.0\n", + "epoch_loss_values = []\n", + "metric_values = []\n", + "for epoch in range(total_epochs):\n", + " train(epoch)\n", + " auc_val, loss_val, _ = validation(val_loader)\n", + " epoch_loss_values.append(loss_val)\n", + " metric_values.append(auc_val)\n", + " if auc_val > auc_val_best:\n", + " checkpoint = {\n", + " \"epoch\": epoch,\n", + " \"state_dict\": model.state_dict(),\n", + " \"optimizer\": optimizer.state_dict(),\n", + " }\n", + " save_ckp(checkpoint, logdir + \"/transchex.pt\")\n", + " auc_val_best = auc_val\n", + " print(\n", + " \"Model Was Saved ! Current Best Validation AUC: {} Current AUC: {}\".format(\n", + " auc_val_best, auc_val\n", + " )\n", + " )\n", + " else:\n", + " print(\n", + " \"Model Was NOT Saved ! Current Best Validation AUC: {} Current AUC: {}\".format(\n", + " auc_val_best, auc_val\n", + " )\n", + " )\n", + " scheduler.step()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training Finished ! Best Validation AUC: 0.9533 \n" + ] + } + ], + "source": [ + "print(f\"Training Finished ! Best Validation AUC: {auc_val_best:.4f} \")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Plot the loss and metric" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAsYAAAGDCAYAAAAoFdb3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xl8XOV59//PNSONlpEXbd5XjG28sTomQLaGQgxJISu1yUKeXxq6hDTN0l9I24ekpGmWZmnS0KakpVnBJdAmbuqEkK0hgKnN6gXLG9iWZBlJliVZkq3tev44R2YQsi3Zks7Mme/79dLLc86cmbmGxKOv77nu+zZ3R0REREQk3yWiLkBEREREJBsoGIuIiIiIoGAsIiIiIgIoGIuIiIiIAArGIiIiIiKAgrGIiIiICKBgLCIiIidhZvPMzM2sIOpaRMaDgrGcNTP7tZm1mFlR1LWMhvD9/EHUdYiInC0ze8DMbh/i/PVm1nC2gdfMnjezbjOrGnT+qTBQzzub5z+LuuabWb+Z/eOg80MGfTP7lpn9TcbxdDP7VzM7aGbtZrbDzP7azNLj9R4kGgrGclbCD71XAw5cN0avoZEKEZEz8y3g3WZmg86/G/i+u/eOwms8B6wdODCzFUDJKDzv2XgP0AKsGemgjZlVAI8SvIfL3H0CcBUwGVgw2oVKdlEwlrP1HmAjwYfvTQMnzeyV4WhEMuPcW8zsmfB2wsxuNbM9ZtZsZveGH0aZ/6J/n5ntB34Znv9B+JytZvYbM1uW8dyVZvZfZtZmZpvM7G/M7LcZ959nZg+a2WEzqzGzG87kzZrZdWa2zcyOhCPLSzLu+7iZ1YWjCzVmdmV4fpWZbQ5rO2RmXz6T1xYROQM/BCoIBjAAMLNy4E3Ad8LjN5rZk+Fn1AEz+9QIX+O7BL8LBtw08NwZr1lkZl80s/3h5+A3zKxkoB4z+7GZNYbfPv7YzGZlPPbXZvZpM3s4/Hz92eAR6iG8B/groAf4vRG+n48A7cC73P15AHc/4O4fcvdnRvhckmMUjOVsvQf4fvjzBjObCuDuG4EO4PUZ194I3B3e/lPgzcBrgRkE/7K/Y9BzvxZYArwhPP4JsBCYAjwRvuaAO8LXm0bwoZwZ0tPAg+FrTyEY2fjHzGA9HGa2CLgH+DOgGtgA/JeZpcxsMXAL8IpwdOENwPPhQ78KfNXdJxKMNtw7ktcVETlT7t5F8JmTGVxvAHa4+9PhcUd4/2TgjcAfm9mbR/AyG4GJZrYkHAz5feB7g675PLAIuBA4F5gJ3BbelwD+DZgLzAG6gK8PevyNwP8h+AxPAR87WTFm9mpgFrCOl7/34fhd4D/cvX+Ej5MYUDCWM2ZmryL4ILvX3R8H9hB8eA24h/DrNTObAFwbngP4Q+Av3b3W3Y8DnwLePqht4lPu3hF+sOPud7l7e8b1F5jZpPCD+G3AJ9290923A9/OeJ43Ac+7+7+5e6+7PwHcD7x9hG/594H/dvcH3b0H+CLBV22XA31AEbDUzArd/Xl33xM+rgc418yq3P1o+I8GEZHx8m3gHQMjtARB8cRnpLv/2t23uHt/OCJ6D8HAxEgMjBpfBewA6gbuCNs43g982N0Pu3s78LfAmvD1m939/vDzux34zBCv/2/uvjMj6F94ilpuAn7i7i0EAyLXmNmUEbyXSuDgCK6XGFEwlrNxE/Azd28Kj+8mY6Q2PH5r2N/1VuAJd98X3jcX+M+wJeEI8CxBuJya8fgDAzfMLGlmnwtbL9p4cTS2imD0tiDz+kG35wKXDrxW+HrvJBhdHokZwED9hKMJB4CZ7r6bYCT5U8ALZrbOzGaEl76PYKRkR9jm8aYRvq6IyBlz998CjcD1ZnYO8Ape/PYOM7vUzH4VtjK0An9E8Nk6Et8lGBh5L4PaKAg+o0uBxzM+g38ansfMSs3sn81sX/j5/htgcmYrHtCQcbsTKBuqiDD8v4PwG0V3fxTYz4uDNgM91YWDHlpIMIgB0AxMP90blnhSMJYzEn743AC8Nuz7bQA+TDCKewFAOHK7D7iGl7ZRQBAor3H3yRk/xe5el3GNZ9y+Ebie4CuuScC8gVIIPvB7Cb46GzB70Gv9z6DXKnP3Px7h264nCNkD/w0sfJ268P3e7e4Do+hO8NUh7r7L3dcSfAX4eeA+08xmERlf3yEY0X03wYDGoYz77gbWA7PdfRLwDYLP1mELBz2eI/hm8D8G3d1E0B6xLOMzeJK7D4TbjwKLgUvDlrPXhOdHVEPoLcBEgna5gd9NM3mxneIgQQCeN+hx83lx4OPnwFvMTBkpD+l/dDlTbyYY4V1K8JXWhQT9wA/x0n6uuwn6iV8D/CDj/DeAz5jZXAAzqzaz60/xehOA4wT/ki8l+BoOAHfvI/gg/lQ48nDeoBp+DCwys3ebWWH484rMiXNDKDCz4oyfQoKv795oZleGxx8Na3rEzBab2evD0fFjBL8E+sL39i4zqw5HmI+Ez993itcWERlt3yEYWHg/L201g+Dz9bC7HzOzVby0JW4k3ge83t07Mk+Gn33fBL4y0NJgZjPNbGD+yASCz8wjFkzC/uQZvj4E31reBazgxd9NVwAXmtmK8PfF/QS/fyrD3wdrCX6X/SR8ji8ThOtvZ/yOmmlmXzaz88+iNskBCsZypm4i6Pna7+4NAz8EEybemdErfA/wOuCXGS0XEExIWw/8zMzaCSZvXHqK1/sOwb/m64Dt4fWZbiEYSW4g+ErvHoLQStizdjVBP1t9eM3nCXqCT+afCD6oB37+zd1rgHcB/0AwAvJ7wO+5e3f4XJ8LzzcQjA7/Rfhcq4FtZnY0fN9r3P3YKV5bRGRUhasrPAKkCT57M/0JcHv4WXwbZzhB2N33uPvmk9z9cWA3sDFsl/g5wSgxwN8TzNdoIvhs/+mZvL6ZzQSuBP4+8/dSOAfmp7zY6vcnwGHgGeAFgt8fbxwYRXf3wwRzR3qAx8L/Lr8AWsP3IDFm7n76q0RyjJl9Hpjm7jed9mIRERERNGIsMWHBOsXnW2AVwVd6/xl1XSIiIpI7tKOYxMUEgvaJGQRfjX0J+FGkFYmIiEhOUSuFiIiIiAhqpRARERERARSMRURERESACHuMq6qqfN68eVG9vIjIGXv88ceb3L066jrGkz6zRSSXDfdzO7JgPG/ePDZvPtlyhyIi2cvM9p3+qnjRZ7aI5LLhfm6rlUJEREREBAVjERERERFAwVhEREREBFAwFhEREREBFIxFRERERAAFYxERERERQMFYRERERARQMBYRERERARSMRUREREQABWMREREREUDBWEREREQEgIKoCxiJTc8fprggyYpZk6IuRUQkcma2GvgqkAT+xd0/N+j+ucBdQDVwGHiXu9eG9/UBW8JL97v7deNWuIjEhrtzvLefYz19dPX00dUd/Hmsp4+u7n4SBuXpFOWlKSaXFlJcmIy65FPKqWD88fufYcm0idzxzoujLkVEJFJmlgTuAK4CaoFNZrbe3bdnXPZF4Dvu/m0zez3wWeDd4X1d7n7huBYtkoP2N3dS29LJilmTmFBcGHU59Pc7zza08eieZh7d08xTB47Q505hMkEqmaAgaRQmExQkjFRB8GdhMvGS28FPeF0yQSppFITnE0ZGsA3Dbk8/x07cDs6fCMI9fbgPv/7SVPJESC4vTYWhuZDJpSkqSgspT6fC2+E16RTpVBIzG7v/qBlyKhhXpYtoOno86jJERLLBKmC3u+8FMLN1wPVAZjBeCnw4vP0r4IfjWqFIjvvRU3V8/P5nONYTjHwunjaRS+ZO5pK55Vwyp4LZFSVjHtjcnd0vHOXRvc08sruZjc81c6SzB4BzqtJcuWQKxYVJevqcnr5+evv6T9zuybjdcbyX3n6nuzc439vv9PT209MfXhve7u93SgqTFKeSlBQmM24nqCpLUZoqoLgwSUkqMej+8CeVDO4Pb/f2OUc6u2np7KGls5uWjuD2kc5uDnd2U3eki5bOblq7ek4asFPJxIkgff1FM/iT1507Zv+9cyoYV5al2PXC0ajLEBHJBjOBAxnHtcClg655GngbQbvFW4AJZlbp7s1AsZltBnqBz7m7QrNIqLevn8//dAfffOg5Vs2r4A9few5b6lp5fF8LP3yynu9t3A9AVVnRi0F5bjnLZkw661YBd2f/4U4eCUeEH9nTfGJQcObkEq5aMpXLz63ksnOqmDap+Kzfa7bo63dau14anoM/X3q7rGhso+uwnn0YfWxfAX4nPCwFprj75NEsFIJgvHGvRoxFRIChhqkGj7d8DPi6mb0X+A1QRxCEAea4e72ZnQP80sy2uPuel7yA2c3AzQBz5swZzdpFslZLRzcfvOdJfru7ifdcNpf/+6alFCYTXLlkKhAEuF0vtPP4vhYe39fCE/taeGDbISAY2Vw+c+KJoHzxnHKmTDx9eK0/0nUiBD+6p4n61mMATJlQxKvOreSyBZVcvqCK2RWlY/fGI5ZMGBXpFBXpVDArIiKnDcbD6WNz9w9nXP9B4KIxqJXKdBEtnT309vVTkNSCGiKS12qB2RnHs4D6zAvcvR54K4CZlQFvc/fWjPtw971m9muCz+09gx5/J3AnwMqVK0fQRSiSm5492MbN393ModbjfP5tK/j9V7z8H4TJhHHetImcN20i77x0LgBNR4/zxL4WHt8fBOVvP7qPbz70HACzK0q4ZE4YlOeWs3jqBFo6e3h0bxCCH93TzPPNnQBUpFO88pwK/nhBFZedU8mC6vS49dZKYDgjxsPpY8u0Fvjk6JT3UlVlKQAOd3YzZUJ8vj4QETkDm4CFZjafYCR4DXBj5gVmVgUcdvd+4BMEK1RgZuVAp7sfD6+5AvjCeBYvkm3++5mDfOwHTzOxpIB1f/hKLp5TPuzHVpUVcfWyaVy9bBoA3b39bKsPWi+e2N/CI3ua+eFTwb9biwsTHOvpB2BCUQGXnlPJuy+bx+ULKlk8dQKJhIJwlIYTjIfTxwacWBpoPvDLk9x/Vl/LVZYVAdB8VMFYRPKbu/ea2S3AAwRtbne5+zYzux3Y7O7rgdcBnzUzJ2il+ED48CXAP5tZP8F69p8btJqFSN7o63e+9LMa/vHXe7h4zmS+8a5LhtX+cCqpggQXzSnnojBcuzt1R7p4fF8LTx9oZcrEIi5fUMmyGZNIKghnleEE4+H0sQ1YA9zn7n1D3Xm2X8tVpoMR4+aj3SN9qIhI7Lj7BmDDoHO3Zdy+D7hviMc9AqwY8wJFslxrVw8fWvckv65pZO2q2XzqumUUFYz+OrtmxqzyUmaVl3L9hTNH/fll9AwnGJ+2jy3DGl4ckRh1J0aMOzQBT0RERM7crkPt3Pzdx6lt6eQzb1l+ol9Y8ttwgvFp+9gAzGwxUA48OqoVZhjoMW7SiLGIiIicoQe2NfCRf3+KklQBd7//lbxiXkXUJUmWOG0wHmYfGwST7ta5j2T/k5GZWFxIQcJo1iYfIiIiMkL9/c7f/2IXX/vFLi6YNYlvvPsSpk8qibosySLDWsf4dH1s4fGnRq+soSXCNe7UYywiIiIj0X6shw//+9P8/NlDvP2SWfzNm5ef9WYcEj85tfMdBH3GzR0KxiIiIjI8exqPcvN3NvN8cyd/fd0y3nPZXK0PLEPKuWBcVZbS5DsREREZll/uOMSH7nmKwoIE33vfpVy2oDLqkiSL5Vwwrkin2BfuECMiIiIyFHfnjl/t5ksP7mTp9In887svYVZ5fLdUltGRc8G4Ml2kyXciIiJyUh3He/nYD57mJ1sbePOFM/jsW8+nJKV+Yjm93AvGZSk6uvvo6u7T/8lFRETkhK7uPh57rpm/3fAsu184yl+9cQnve9V89RPLsOVcMB5Yy7i54zizUvpKREREJF+5OzsPHeU3Oxv5za5GHnvuMN29/ZSXFvKd/+9SXrWwKuoSJcfkXDCuTIe73x3tVq+QiIhInmnp6Oa3u5v4zc5GHtrVREPbMQAWTinj3a+cy2sWVXPp/AotxSZnJPeCccaIsYiIiMRbb18/Tx04wm92NvI/u5p4pvYI7jCppJBXnVvFaxZV8eqF1cyYrI065OzlXDCuKgtGjLUttIiISDzVtnTym53BqPDDe5poP9ZLwuDC2ZP50JULec2iai6YNZlkQr3DMrpyLhifGDFWMBYREYmFru4+Nu5t5n/CXuG9jR0ATJ9UzLXLp/OaRdW86twqJpUWRlypxF3OBePSVAElhUkt2SYiIpKjevv62VLXyiN7mvntriYe39dCd18/RQUJLj2nkhtXzeG1i6o5d0qZVpSQcZVzwRiCUWNtCy0iIpIb3J09jUd5eHczv93dxMa9zbQf6wVgyfSJvOeyYNLcKk2ak4jlaDAuokkjxiIiIlmrofUYD+9u4uE9TTy8u4lDbcHv7dkVJbzp/OlcvqCKyxdUUhnOHRLJBjkZjKvSKQ62Hou6DBEREQm1dvWwcW8zj+xu4re7m9gT9glXpFNctqCSV51bxRULqphTqaVWJXvlZDCuLEuxtb416jJERETy1rGePp7Y3xKMCu9u5pnaI/Q7lBQmWTW/gjWvmMPl51ayZNpEElo9QnJEjgbjIpqPduPuasoXEREZY23Heth1qJ2dh45S09BOTUM7T+xv4XhvP8mEceHsydzy+oVcsaCSi+aUkypIRF2yyBnJzWCcTtHb77R19WrpFhERkVFy9Hgvuw61s+vQUWoOtbMzvD2wuxxAaSrJwill3HjpHF51bhWr5lcwoVi/iyUecjIYD2zy0dxxXMFYRERkhDq7e9n9wlF2HjrKrkPt1IQBuO5I14lrigsTnDuljMsXVLJw6gQWTS1j0dQJzJxcotYIia2cDMYvbgvdzTnVERcjIiKShdydls4enmvq4PmmDnY3vhiCa1u6cA+uSyUTLJhSxsp55dw4dQ4LpwQBeHZFqXaWk7yTm8E4HY4Ya8k2ERHJc62dPTzXHITf55o6eD7jdlu4VjBAYdKYX5XmglmTeccls1k0tYyFUycwt6KUgqR6gkUgV4NxOGLcpG2hRUQkD7Qf6+H5ps4TAfj5po4Tt1s6e05cZwYzJpUwvyrNdRfOYF5lmvlVaeZVpZlTUUqhArDIKeVkMC4vDVspFIxFRCSGdjS08a8PPXdiBHjwQND0ScXMq0yzevl05leVngjAsytKtXOcyFnIyWCcKkgwqaSQ5g61UoiISPz8y0PP8aOn6rhoTjlXnjeVeVXpIABXpZlbkaYkpfArMhZyMhhD0E6hEWMREYmjmoZ2Vs2v4Pt/8MqoSxHJKznbbFSVLqJJk+9ERCRm+vqdnYfaWTx1YtSliOSdnA3GlWUpmjs0Yiwi+cvMVptZjZntNrNbh7h/rpn9wsyeMbNfm9msjPtuMrNd4c9N41u5nMq+5g6O9/Zz3rQJUZcikndyOxhrxFhE8pSZJYE7gGuApcBaM1s66LIvAt9x9/OB24HPho+tAD4JXAqsAj5pZuXjVbucWk1DOwCLFYxFxl3uBuN0ES2dPfT29UddiohIFFYBu919r7t3A+uA6wddsxT4RXj7Vxn3vwF40N0Pu3sL8CCwehxqlmHY0dCOGSyaqmAsMt5yNhhXhWsZH+5UO4WI5KWZwIGM49rwXKangbeFt98CTDCzymE+ViJS09DO3IpSrTwhEoGcDcaVZQO73ykYi0heGmqvXh90/DHgtWb2JPBaoA7oHeZjMbObzWyzmW1ubGw823plmGoOtauNQiQiuRuM09rkQ0TyWi0wO+N4FlCfeYG717v7W939IuAvw3Otw3lseO2d7r7S3VdWV1ePdv0yhK7uPp5v7mDxNK1IIRKF3A3GAyPG2uRDRPLTJmChmc03sxSwBlifeYGZVZnZwOf8J4C7wtsPAFebWXk46e7q8JxEbNcL7bijFSlEIpKzwXigx3jwNpkiIvnA3XuBWwgC7bPAve6+zcxuN7PrwsteB9SY2U5gKvCZ8LGHgU8ThOtNwO3hOYnYDq1IIRKpnN35bmJxIQUJ05JtIpK33H0DsGHQudsybt8H3HeSx97FiyPIkiVqGtopKkgwrzIddSkieSlnR4wTCaMirW2hRUQkPnYeamfh1DKSiaHmR4rIWMvZYAxBn7F2vxMRkbjY0aCtoEWilNPBuKospcl3IiISC4c7umlsP66JdyIRyulgXKlWChERiYkdDW2AJt6JRGlYwdjMVptZjZntNrNbT3LNDWa23cy2mdndo1vm0CrSRZp8JyIisVATrkihEWOR6Jx2VQozSwJ3AFcRLAq/yczWu/v2jGsWEqyReYW7t5jZlLEqOFNlWYqO7j66uvu0daaIiOS0moZ2yksLqZ5QFHUpInlrOCPGq4Dd7r7X3buBdcD1g655P3CHu7cAuPsLo1vm0AbWMlafsYiI5LodDcFW0GZakUIkKsMJxjOBAxnHteG5TIuARWb2sJltNLPVQz2Rmd1sZpvNbHNjY+OZVZyhMh3ufqc+YxERyWH9/c7OQ+2cp62gRSI1nGA81D9dfdBxAbCQYJeltcC/mNnklz3I/U53X+nuK6urq0da68tUasRYRERioLali87uPk28E4nYcIJxLTA743gWUD/ENT9y9x53fw6oIQjKY6qqLBgx1rbQIiKSy7QihUh2GE4w3gQsNLP5ZpYC1gDrB13zQ+B3AMysiqC1Yu9oFjqUEyPGCsYiIpLDBlakWDRVwVgkSqcNxu7eC9wCPAA8C9zr7tvM7HYzuy687AGg2cy2A78C/tzdm8eq6AGlqQJKCpNask1ERHLajkPtzCovoazotItFicgYGtbfQHffAGwYdO62jNsOfCT8GVeVZSltCy0iIjmtpqFd6xeLZIGc3vkOoLKsiCaNGIuISI463tvHc00d6i8WyQI5H4yrtC20iIjksN0vHKWv31mspdpEIpfzwThopdCIsYiI5CZtBS2SPWIQjItoPtpN0OYsIiKSW2oa2ilMGvOr0lGXIpL3cj8Yp1P09jttXb1RlyIiIjJiOxraWVBdRmEy538li+S8nP9bOLDJh9opREQkF2lFCpHskfPB+MVtoTUBT0REcktrZw8Nbcc08U4kS+R+ME6HI8Zask1ERHLMwFbQGjEWyQ45H4yrwhHjJi3ZJiIiOabmULAihdYwFskOOR+My9NhK4WCsYiI5JgdDe1MKC5g+qTiqEsREWIQjAuTCSaVFGrynYiI5Jyd4cQ7M4u6FBEhBsEYwk0+NGIsIiI5xN2pOdSuNgqRLBKLYFyVLqJJk+9ERCSH1Lceo/1Yr1akEMkisQjGwbbQGjEWEZHcUaMVKUSyTnyCsUaMRUQkh+xoCFakWDRVwVgkW8QjGKeLaOnsobevP+pSREREhqWmoZ0Zk4qZVFIYdSkiEopFMB5Yy/hwp9opRCR/mNlqM6sxs91mdusQ988xs1+Z2ZNm9oyZXRuen2dmXWb2VPjzjfGvXmoaNPFOJNsURF3AaKgsG9j9rpspE7QWpIjEn5klgTuAq4BaYJOZrXf37RmX/RVwr7v/k5ktBTYA88L79rj7heNZs7yop6+fPY1Hed3iKVGXIiIZYjFiXKlNPkQk/6wCdrv7XnfvBtYB1w+6xoGBJQ8mAfXjWJ+cwt7GDnr6XBPvRLJMPILxwIixNvkQkfwxEziQcVwbnsv0KeBdZlZLMFr8wYz75octFv9jZq8e6gXM7GYz22xmmxsbG0exdNkRrkihVgqR7BKLYDzQY9ykEWMRyR9DbZXmg47XAt9y91nAtcB3zSwBHATmuPtFwEeAu83sZYvpuvud7r7S3VdWV1ePcvn5raahnYKEsaC6LOpSRCRDLILxxOJCChKmJdtEJJ/UArMzjmfx8laJ9wH3Arj7o0AxUOXux929OTz/OLAHWDTmFcsJNQ3tnFOdJlUQi1/DIrERi7+RiYRRkda20CKSVzYBC81svpmlgDXA+kHX7AeuBDCzJQTBuNHMqsPJe5jZOcBCYO+4VS7saGjX+sUiWSgWwRiCPmPtfici+cLde4FbgAeAZwlWn9hmZreb2XXhZR8F3m9mTwP3AO91dwdeAzwTnr8P+CN3Pzz+7yI/tR/roe5IlybeiWShWCzXBkGfsSbfiUg+cfcNBJPqMs/dlnF7O3DFEI+7H7h/zAuUIe08FOx4t3jay9q6RSRi8RkxViuFiIjkgIGtoDViLJJ94hOMy4o0+U5ERLJeTUM76VSSmZNLoi5FRAaJUTBO0dHdR1d3X9SliIiInNSOhnYWTZtAIjHUinsiEqX4BOOB3e/UZywiIlnK3alpaFcbhUiWilEwDne/U5+xiIhkqUNtx2nt6mGxlmoTyUrxCcZlGjEWEZHs9uJW0FqRQiQbxSYYV5UFI8baFlpERLJVjVakEMlqsQnGJ0aMFYxFRCRL1TS0M2VCEeXhvBgRyS6xCcalqQJKCpNask1ERLJWzaF2Fmu0WCRrxSYYQzBqrG2hRUQkG/X29bPrhaNqoxDJYjELxkU0acRYRESy0PPNnXT39mvinUgWi1UwrtK20CIikqU08U4k+8UqGAetFBoxFhGR7FPT0EbC4NwpZVGXIiInMaxgbGarzazGzHab2a1D3P9eM2s0s6fCnz8Y/VJPr7KsiOaj3bh7FC8vIiJyUjsa2plXlaa4MBl1KSJyEgWnu8DMksAdwFVALbDJzNa7+/ZBl/67u98yBjUOW2U6RW+/09bVy6TSwihLEREReYmaQ+0sm6H+YpFsNpwR41XAbnff6+7dwDrg+rEt68yc2ORD7RQiIpJFOrt72X+4k8VTFYxFstlwgvFM4EDGcW14brC3mdkzZnafmc0elepGaGCTj8Nask1ERLLIzkNHcUdrGItkueEEYxvi3OAm3v8C5rn7+cDPgW8P+URmN5vZZjPb3NjYOLJKh6EyHYwYa5MPERHJJjUNbYBWpBDJdsMJxrVA5gjwLKA+8wJ3b3b3gTT6TeCSoZ7I3e9095XuvrK6uvpM6j2lqnDEuElLtomISBbZ0dBOSWGSORWlUZciIqcwnGC8CVhoZvPNLAWsAdZnXmBm0zMOrwOeHb0Sh29g73mtZSwiItmkpqGdRVPLSCSG+hJWRLLFaVelcPdeM7sFeABIAne5+zYzux3Y7O7rgT81s+uAXuAw8N4xrPnKkQ9qAAAgAElEQVSkCpMJJpcWai1jERHJKjUN7Vy5ZErUZYjIaZw2GAO4+wZgw6Bzt2Xc/gTwidEt7cxUaPc7ERHJIo3tx2nu6NZW0CI5IFY73wFUpYto0uQ7ERHJEgNbQS+eqol3ItkudsE42BZaI8YiIpIddoQrUmipNpHsF89grBFjERHJEjUN7VSmU1RPKIq6FBE5jfgF43QRLZ099Pb1R12KiIgINYfaNVoskiNiF4wH1jI+3Kl2ChERiVZfv7NTwVgkZ8QuGFeWDex+p2AsIiLR2n+4k2M9/drxTiRHxC8Ya5MPEckTZrbazGrMbLeZ3TrE/XPM7Fdm9qSZPWNm12bc94nwcTVm9obxrTx/1JyYeKel2kRywbDWMc4lJ0aMtcmHiMSYmSWBO4CrgFpgk5mtd/ftGZf9FXCvu/+TmS0lWI9+Xnh7DbAMmAH83MwWuXvf+L6L+NvR0I4ZLJpaFnUpIjIMsRsxHugxbtKIsYjE2ypgt7vvdfduYB1w/aBrHBgYqpwE1Ie3rwfWuftxd38O2B0+n4yymoZ25lSUUpqK3TiUSCzFLhhPLC6kIGFask1E4m4mcCDjuDY8l+lTwLvMrJZgtPiDI3isjIKaQ+3a2EMkh8QuGCcSpm2hRSQf2BDnfNDxWuBb7j4LuBb4rpklhvlYzOxmM9tsZpsbGxvPuuB8c6ynj+ebOjTxTiSHxC4YQ9BnrN3vRCTmaoHZGcezeLFVYsD7gHsB3P1RoBioGuZjcfc73X2lu6+srq4exdLzw+4XjtLvmngnkktiGYyrylKafCcicbcJWGhm880sRTCZbv2ga/YDVwKY2RKCYNwYXrfGzIrMbD6wEPjfcas8T+xoaAe0FbRILonlbIDKdIp9zZ1RlyEiMmbcvdfMbgEeAJLAXe6+zcxuBza7+3rgo8A3zezDBK0S73V3B7aZ2b3AdqAX+IBWpBh9NQ1tpAoSzKssjboUERmmeAbjsiJNvhOR2HP3DQST6jLP3ZZxeztwxUke+xngM2NaYJ7b0dDOwillFCRj+eWsSCzF8m9rZVmKju4+uro1ACIiItGoadBW0CK5JpbBuCqtTT5ERCQ6LR3dvNB+XCtSiOSYWAbjCm0LLSIiEXpx4p1WpBDJJbEMxpXh7ncaMRYRkSjUNLQBaMRYJMfEMhhXlQWtFNoWWkREolBzqJ3JpYVMmVAUdSkiMgKxDMYnRowVjEVEJAI7GoKtoM2G2mRQRLJVLINxaaqAksKklmwTEZFx19/v7GxoVxuFSA6KZTCGYNRY20KLiMh4qzvSRUd3nybeieSgGAfjIpo0YiwiIuNMW0GL5K7YBuOqdEo9xiIiMu4GVqRYNLUs4kpEZKRiG4yDVgqNGIuIyPja0dDOzMklTCgujLoUERmhGAfjIpqPduPuUZciIiJ5pEYT70RyVnyDcTpFb7/T1tUbdSkiIpInjvf2sbepQ/3FIjkqtsH4xCYfaqcQEZFxsueFDvr6XcFYJEfFNhgPbPJxWEu2iYjIOKk5NLAVtJZqE8lF8Q3G6WDEWJt8iIjIeNnR0E5h0jinOh11KSJyBmIbjKvCEeMmLdkmIiLjpKahnQXVZRQmY/vrVSTWYvs3tzwdBGOtZSwiIuNlZ0O7+otFclhsg3FhMsHk0kKtZSwiIuOitauH+tZjCsYiOSy2wRiCJds0YiwiIuNh56FgK2itYSySu+IdjMuKaNLkOxERGQc7GoJgvFgrUojkrHgH43SKZi3XJiIi46CmoY0JxQXMmFQcdSkicobiHYzLUlquTURExkVNQzuLp07AzKIuRUTOULyDcbqIls4eevv6oy5FRERizN3ZoRUpRHLesIKxma02sxoz221mt57iurebmZvZytEr8cwNrGV8uFPtFCIiMnYOth6j/VivJt6J5LjTBmMzSwJ3ANcAS4G1ZrZ0iOsmAH8KPDbaRZ6pyrKB3e8UjEVEZOzUaOKdSCwMZ8R4FbDb3fe6ezewDrh+iOs+DXwBODaK9Z2VSm3yISIi42BrXSsAS6ZrxFgklw0nGM8EDmQc14bnTjCzi4DZ7v7jUz2Rmd1sZpvNbHNjY+OIix2pEyPG2uRDRETG0Nb6VuZXpZlQXBh1KSJyFoYTjIeaXusn7jRLAF8BPnq6J3L3O919pbuvrK6uHn6VZ2igx7hJI8YiIjKGtta1sWyG2ihEct1wgnEtMDvjeBZQn3E8AVgO/NrMngdeCazPhgl4E4sLKUiYlmwTkVg63cRoM/uKmT0V/uw0syMZ9/Vl3Ld+fCuPl5aObuqOdLF85qSoSxGRs1QwjGs2AQvNbD5QB6wBbhy4091bgaqBYzP7NfAxd988uqWOXCJhVGhbaBGJoYyJ0VcRDGBsMrP17r594Bp3/3DG9R8ELsp4ii53v3C86o2zrfVBf/EKBWORnHfaEWN37wVuAR4AngXudfdtZna7mV031gWercqyIu1+JyJxNNyJ0QPWAveMS2V5ZmtdG4BaKURiYDgjxrj7BmDDoHO3neTa1519WaOnqiylyXciEkdDTYy+dKgLzWwuMB/4ZcbpYjPbDPQCn3P3Hw7xuJuBmwHmzJkzSmXHz9b6VmaVlzC5NBV1KSJylmK98x0ES7aplUJEYuiUE6MHWQPc5+59GefmuPtKgta4vzezBS97snGeMJ2rttW1snyG2ihE4iD+wbisSJPvRCSOTjcxOtMaBrVRuHt9+Ode4Ne8tP9YhqntWA/PN3eyYpaCsUgc5EEwTtHR3UdXd9/pLxYRyR0nJkabWYog/L5sdQkzWwyUA49mnCs3s6LwdhVwBbB98GPl9Lapv1gkVmIfjKvS2uRDROJnBBOj1wLr3D2zzWIJsNnMngZ+RdBjrGB8BraFK1IsUyuFSCwMa/JdLqsse3Fb6FnlpRFXIyIyeoYzMdrdPzXE4x4BVoxpcXlia10r0yYWUz2hKOpSRGQUxH7EWNtCi4jIWNlS16qNPURiJP7BOK1toUVEZPR1HO9lb1MHy2eqv1gkLuIfjDNaKUREREbLswfbcEdLtYnESOyDcWmqgJLCpJZsExGRUbW1Lph4p1YKkfiIfTCGYNRY20KLiMho2lLXRlVZEVMnauKdSFzkSTAuokkjxiIiMoq21beyfOZEzIbahFBEclFeBOMqbQstIiKj6FhPH7teOKr+YpGYyYtgHLRSaMRYRERGx46Gdvr6XStSiMRMngTjIpqPdvPSjZ9ERETOzBZNvBOJpfwIxukUvf1OW1dv1KWIiEgMbKtrZXJpITMnl0RdioiMorwIxlXh7ndNaqcQEZFRsLW+leUzJmninUjM5EUwHtjk47CWbBMRkbPU3dtPTUM7y9RfLBI7+RGM08GIsTb5EBGRs7XzUDs9fc4K9ReLxE5eBOOqcMS4SUu2iYjIWTqx452WahOJnbwIxuXpIBhrLWMRETlbW+tbmVBUwJyK0qhLEZFRlhfBuDCZYHJpodYyFhGRs7a1ro2lMyaSSGjinUjc5EUwhmDJNo0Yi4jI2ejt6+fZg23qLxaJqfwJxmVFNGnynYiInIXdjUc53tuvjT1EYipvgnFVWYpmLdcmIiJnYWtdG4C2ghaJqbwJxhXplJZrExGRs7K1rpXSVJL5VWVRlyIiYyBvgnFluoiWzh56+/qjLkVERHLU1rpWlk6fSFIT70RiKW+C8cBaxoc71U4hIiIj19fvbD/Ypv5ikRjLm2BcWTaw+52CsYiIjNxzTR10dvexbIb6i0XiKn+CsTb5EBGRs7CtPtzxTiPGIrGVP8F4YMRYm3yIiMgZ2FLbSlFBgoVTNPFOJK7yJhgP9Bg3acRYRETOwNb6Vs6bPpGCZN786hTJO3nzt3ticSEFCdOSbSIiMmL9/c62ujaWq79YJNbyJhgnEhauZawRYxERGZkDLZ20H+9Vf7FIzOVNMIagz1g9xiISF2a22sxqzGy3md06xP1fMbOnwp+dZnYk476bzGxX+HPT+Faee7bUBRPvVigYi8RaQdQFjCdtCy0icWFmSeAO4CqgFthkZuvdffvANe7+4YzrPwhcFN6uAD4JrAQceDx8bMs4voWcsrWujcKksXCqJt6JxFl+jRirlUJE4mMVsNvd97p7N7AOuP4U168F7glvvwF40N0Ph2H4QWD1mFab47bVt7Jo6gSKCpJRlyIiYyi/gnFZkSbfiUhczAQOZBzXhudexszmAvOBX470sQLuzta6VpbPUBuFSNzlWTBO0dHdR1d3X9SliIicLRvinJ/k2jXAfe4+8OE3rMea2c1mttnMNjc2Np5hmbmv7kgXLZ09LJ+lYCwSd8MKxsOY4PFHZrYlnODxWzNbOvqlnr2qtDb5EJHYqAVmZxzPAupPcu0aXmyjGPZj3f1Od1/p7iurq6vPstzctbWuDUBLtYnkgdMG44wJHtcAS4G1QwTfu919hbtfCHwB+PKoVzoKKsu0LbSIxMYmYKGZzTezFEH4XT/4IjNbDJQDj2acfgC42szKzawcuDo8J0PYVt9KMmEsma5gLBJ3wxkxPu0ED3dvyzhMc/Kv8yKlbaFFJC7cvRe4hSDQPgvc6+7bzOx2M7su49K1wDp394zHHgY+TRCuNwG3h+dkCFvrWjm3uoziQk28E4m74SzXNtQkjUsHX2RmHwA+AqSA1w/1RGZ2M3AzwJw5c0Za61mrTGtbaBGJD3ffAGwYdO62QcefOslj7wLuGrPiYsLd2VLXxmsX5W8riUg+Gc6I8bAmabj7He6+APg48FdDPVHU/WpqpRARkZF4of04TUePs3ym2ihE8sFwgvFIJnhA0Grx5rMpaqyUpgooKUxqyTYRERmWreGOd9oKWiQ/DCcYn3aCh5ktzDh8I7Br9EocXZXa/U5ERIZpa10bZmjinUieOG2Psbv3mtnABI8kcNfABA9gs7uvB24xs98FeoAW4KaxLPpsVJYV0aQRYxERGYYtda3Mr0pTVjScKTkikuuG9Tf9dBM83P1Do1zXmKlKpzjYeizqMkREJAdsq29l1fyKqMsQkXGSVzvfwUArhUaMRUTk1JqOHudg6zFtBS2SR/IwGBfRfLSbjCU9RUREXmZbfbBE/zKtSCGSN/IvGKdT9PY7bV29UZciIiJZbGBFimUaMRbJG3kXjKvC3e+a1E4hIiKnsLWulbmVpUwqKYy6FBEZJ3kXjAc2+TisJdtEROQUtta3qr9YJM/kXzBOByPG2uRDREROprWzhwOHu9RfLJJn8i4YV4Ujxk3aFlpERE5ia324451GjEXySt4F4/J0EIybFYxFROQktBW0SH7Ku2BcmEwwubRQaxmLiMhJba1vY+bkEirCwRQRyQ95F4whWLJNI8YiInIy2+paWTZD/cUi+SY/g3FZEU2afCciIkNoP9bD3qYOtVGI5KG8DMZVZSmatVybiIgMYXu4490KBWORvJOXwbgyXaTl2kREZEhbtRW0SN7Ky2BckU7R0tlDb19/1KWIiEiW2VbXypQJRUyZUBx1KSIyzvIyGA+sZXy4U+0UIiLyUlvqWtVfLJKn8jIYV5YN7H6nYCwiIi/q7O5lT+NRBWORPJWfwVibfIiIyBCePdhOv8NyLdUmkpfyMxgPjBhrkw8REcmwrV473onks7wMxgM9xk0aMRYRkQxbalupSKeYPkkT70TyUV4G44nFhRQkTEu2iYjIS2ytb2P5zEmYWdSliEgE8jIYJxJGhbaFFhGRDMd6+th1qF39xSJ5LC+DMQR9xuoxFhGRATsPtdPb7+ovFsljeRuMtS20iIhk2lIXTryboWAskq/yNhhXqpVCREQybK1rY2JxAbMrSqIuRUQikr/BuKxIk+9EJKeZ2WozqzGz3WZ260muucHMtpvZNjO7O+N8n5k9Ff6sH7+qs9e2+lZNvBPJcwVRFxCVyrIUHd19dHX3UZJKRl2OiMiImFkSuAO4CqgFNpnZenffnnHNQuATwBXu3mJmUzKeosvdLxzXorNYT18/Ow62894r5kVdiohEKG9HjKvS2uRDRHLaKmC3u+91925gHXD9oGveD9zh7i0A7v7CONeYM3Yeaqe7r59lWpFCJK/lbTCuLNO20CKS02YCBzKOa8NzmRYBi8zsYTPbaGarM+4rNrPN4fk3j3Wx2W5bXRsAK7QihUhey+NWCo0Yi0hOG6oR1gcdFwALgdcBs4CHzGy5ux8B5rh7vZmdA/zSzLa4+56XvIDZzcDNAHPmzBnt+rPK1vpW0qkk8yrTUZciIhHK32CcDkaMv/Sznfz7pgOkCpKkkglSBQmKChIUJo1UQYJUMhn8Gf4UJV+8ncq4XZhMMGVCEbMrSiN+ZyKSJ2qB2RnHs4D6Ia7Z6O49wHNmVkMQlDe5ez2Au+81s18DFwEvCcbufidwJ8DKlSsHh+5Y2VrXyrIZk0gkNPFOJJ/lbTCePqmYa1dM48DhLp5v6qS7r5/u3v4X/wxv9/UP/3eBGdxx48Vcu2L6GFYuIgLAJmChmc0H6oA1wI2DrvkhsBb4lplVEbRW7DWzcqDT3Y+H568AvjB+pWeX3r5+th9sY+2qeI+Ki8jp5W0wLkgm+Md3XnLa6/r6/URQPt7X95LQ3NPrdPf1cTw895Wf7+Lj9z/DipmTNHIsImPK3XvN7BbgASAJ3OXu28zsdmCzu68P77vazLYDfcCfu3uzmV0O/LOZ9RPMNflc5moW+WZvUwfHevrVXywi+RuMhyuZMEpSyXBJt8JTXruguoxrv/YQt9zzJD/4w8tIFeTt3EYRGQfuvgHYMOjcbRm3HfhI+JN5zSPAivGoMRdsHdjxTsFYJO8puY2i2RWlfOFt5/P0gSN88Wc1UZcjIiLDsLWujeLCBOdUaeKdSL5TMB5l16yYzrtfOZc7f7OXX+3QkqEiItlua10rS6ZPpCCpX4ki+U6fAmPgL9+4hCXTJ/KRe5+iofVY1OWIiMhJ9Pc72+pb1V8sIoCC8ZgoLkzy9Rsv4nhvPx9a9+SIVrYQEZHx83xzBx3dfSyfoWAsIgrGY2ZBdRmfvn45jz13mK/9YlfU5YiIyBC21gc73i2bqa2gRWSYwdjMVptZjZntNrNbh7j/I2a23cyeMbNfmNnc0S8197ztklm87eJZfO2Xu3hkT1PU5YiIyCBb61pJJRMsnDIh6lJEJAucNhibWRK4A7gGWAqsNbOlgy57Eljp7ucD95HHC8UPdvv1y5hflebP1j1F01FtPy0ikk221rVy3vQJWl5TRIDhjRivAna7+1537wbWAddnXuDuv3L3zvBwI8HWpAKkiwq448aLOdLVw0fvfZp+9RuLiGQFdz+xFbSICAwvGM8EDmQc14bnTuZ9wE/Opqi4WTJ9Ire9aSn/s7OROx/aG3U5IiIC1LZ00Xasl+XqLxaR0HCCsQ1xbshhTzN7F7AS+LuT3H+zmW02s82NjY3DrzIG3nnpHK5dMY0vPlDDE/tboi5HRCTvbRnY8U4jxiISGk4wrgVmZxzPAuoHX2Rmvwv8JXCduw/ZTOvud7r7SndfWV1dfSb15iwz47NvPZ9pk4r54N1P0trZE3VJIiJ5bWtdKwUJY/E0TbwTkcBwgvEmYKGZzTezFLAGWJ95gZldBPwzQSjWdm8nMamkkK/feDGH2o7x8fufwV39xiIiUdla38bCqRMoLkxGXYqIZInTBmN37wVuAR4AngXudfdtZna7mV0XXvZ3QBnwAzN7yszWn+Tp8t6Fsyfz8dXn8dNtDXx3476oyxERyQv9/U7bsR4OHO5ka10rD+9uYkvtEZbPUH+xiLyoYDgXufsGYMOgc7dl3P7dUa4r1t73qvk8sqeJv/nxs1wyt1wzokVEhsHd6erpo7WrhyOdPbR2hT8Zt490ddPa1cuRzm7aTpzroa2rh6EWBVo5r3z834iIZK1hBWMZXYmE8aUbLuSar/6GD979JOs/+CrKivQ/hYjIybg77/yXx3hkT/NJr0lY0LI2uTTFxJJCJpWmmFuZDs8VMqmkkIklhUwuCW5XpFOcO6VsHN+FiGQ7pbGIVKRTfG3NRaz95kb+7w+38uUbLsBsqAVARETk6dpWHtnTzFsvnskr5lUEYTcMugPBt6yoQJ+jInJWFIwjdOk5lXzoykV85ec7uXxBJe9YOfv0DxIRyUPf27iP0lSSv75uGROKC6MuR0RiSntgRuyW15/LZedUctuPtrH7hfaoyxERyTpHOrv5r6frefNFMxWKRWRMKRhHLJkwvrrmQkpTST7w/Sc51tMXdUkiIlnlvsdrOd7bz7sunRt1KSIScwrGWWDKxGK+dMMF1Bxq5/Yfb4+6HBGRrOHu3P3Yfi6eM5mlWlpNRMaYgnGWeN3iKfzRaxdw92P7+fEzL9tYUEQkLz2yp5m9TR2865UaLRaRsadgnEU+evUiLp4zmU/cv4X9zZ1RlyMiErnvbdxHeWkh166YHnUpIpIHFIyzSGEywdfWXoQZ3HLPE3T39kddkohIZA61HeNn2w/xjpWztW2ziIwLBeMsM6u8lC+8/XyeqW3l8z/dEXU5IiKRWfe/B+jrd25cNSfqUkQkTygYZ6HVy6dz02Vz+dffPseD2w9FXY6IyLjr7etn3ab9vHphFfOq0lGXIyJ5QsE4S/3FG5ewfOZEPnrvUxw4rH5jEckvv9zxAgdbj2nSnYiMKwXjLFVUkOSOGy/GHW6550n1G4tIXvneY/uZNrGYK8+bEnUpIpJHFIyz2NzKNF94+/k8feCI+o1FJG/sa+7gNzsbWbNqNgVJ/ZoSkfGjT5wsd82K6bz38nn862+f44FtDVGXIyIy5u5+bD/JhLHmFZp0JyLjS8E4B3zi2vM4f9Yk/vwHT6vfWEROMLPVZlZjZrvN7NaTXHODmW03s21mdnfG+ZvMbFf4c9P4VX1qx3r6uHfzAa5aMpVpk4qjLkdE8oyCcQ4oKkjy9bUX48Atd2t9YxEBM0sCdwDXAEuBtWa2dNA1C4FPAFe4+zLgz8LzFcAngUuBVcAnzax8HMs/qZ9sPUhLZ48m3YlIJBSMc8ScylL+7u0X8HRtK5/9ybNRlyMi0VsF7Hb3ve7eDawDrh90zfuBO9y9BcDdXwjPvwF40N0Ph/c9CKwep7pP6Xsb9zO/Ks3lCyqjLkVE8pCCcQ5ZvXwa/+eKefzbw8/z060Hoy5HRKI1EziQcVwbnsu0CFhkZg+b2UYzWz2Cx2JmN5vZZjPb3NjYOIqlD+3Zg208vq+Fd146h0TCxvz1REQGUzDOMZ+4ZgkXzJrEn9/3DPub1W8skseGSo4+6LgAWAi8DlgL/IuZTR7mY3H3O919pbuvrK6uPstyT+97G/dRVJDg7ZfMGvPXEhEZioJxjkkVJPj6jRdjwAfufoLjvX1RlyQi0agFZmcczwLqh7jmR+7e4+7PATUEQXk4jx1XR4/38sMn63jT+TOYXJqKshQRyWMKxjlodkUpf/eOC9hS18pnN2h9Y5E8tQlYaGbzzSwFrAHWD7rmh8DvAJhZFUFrxV7gAeBqMysPJ91dHZ6LzH8+WUdHdx/veqWWaBOR6CgY56g3LJvG+141n2898jwbtqjfWCTfuHsvcAtBoH0WuNfdt5nZ7WZ2XXjZA0CzmW0HfgX8ubs3u/th4NME4XoTcHt4LhLuzvc37mPZjIlcOHtyVGWIiFAQdQFy5j6++jw272vh4/c9w7IZE5lbmY66JBEZR+6+Adgw6NxtGbcd+Ej4M/ixdwF3jXWNw/HE/hZ2NLTz2beuwEyT7kQkOhoxzmGpggR33HgRZkG/8bEe9RuLSO753sb9lBUVcN0FM6IuRUTynIJxjptVXsqXbriQrXVt/O0GrW8sIrnlcEc3//3MQd568UzSRfoSU0SipWAcA1ctncr7Xz2f7zy6jx8/E+nEchGREfnB5gN09/VrpzsRyQoKxjHx/68+j4vmTObW+7fwfFNH1OWIiJxWf79z9//uZ9W8ChZNnRB1OSIiCsZxUZgM1jdOJow/+b76jUUk+z20u4l9zZ28U0u0iUiWUDCOkZmTS/jyDRew/WAbf/Pf26MuR0TklL63cR+V6RSrl0+LuhQREUDBOHauXDKVP3zNOXxv437+62n1G4tIdqo/0sUvnj3EDa+YTVFBMupyREQABeNY+tgbFnPJ3HJuvf8ZnlO/sYhkoXX/ux8HblylNgoRyR4KxjFUmEzwD2svorAgoX5jEck6PX39rNt0gNctqmZ2RWnU5YiInKBgHFMzJpfwlRsu5NmDbdz+Y/Ubi0j2eHD7IV5oP64l2kQk6ygYx9jvnDeFP3rtAu5+bD8/eqou6nJERAD4/mP7mDm5hNctnhJ1KSIiL6FgHHMfvXoRK+eW8xf/sYXt9W1RlyMieW5v41Ee3t3MjZfOIZmwqMsREXkJ7b8Zc4XJBP9w40Vc+9WHuPZrD1FVVsTSGRNZOn3iiT/nV6X1C0pExsX3H9tPQcK4YeXsqEsREXkZBeM8MH1SCT/6wKv4+bOH2H6wje31bfzrnr30/L/27j04rvo84/j33dVKWlkSErZl2ZYtX7G42cZ2MbehQMJA2mAyDTNxIRkoaZnJlEI6TZswmaFtptOhl0khA9MUCIUOFNJxYeIwlOACoUkJDrJrGwM2doxlC8tIBtnyRdfdt3+co/VKlm1hXc5Z6/nM7Ow5v11pn73o1bu/PXtOxgEoTSVoqK0c0DA31FZQVqyXh4iMnq7eDGs2NHPDRbVMrSiJOo6IyAnU+UwQsyeXcedVc3PrPX1ZdrYeyTXK77Uc4sXN+/j39XsAMIO5UyYNmFm+YEYlNRWlUd0FESlwP928j0OdvXx1pb50JyLxNKzG2MxuBB4CksDj7v7AoMuvBh4EFgOr3X3NaAeV0VVclAga3hmVsDwYc3f2HeoKGuWwWd7cfJAXt7Tkfi5/U4yV887l8nmTKU1p5/wicnpPr9/DgppyLpt3btRRRESGdNrG2MySwCPA9UAz8LaZrXX3/H2A7QHuAL41FiFlfJgZM+9Xh7cAAA4oSURBVKvSzKxKc/0F03Ljhzp72dbSkTe73MGPfrmLH77xG0pTCa6cP4VrG2q4tqGGmVXpCO+BiMTV1o8OsXnvQf7ypgsw03caRCSehjNjfCmw0913AZjZc8DNQK4xdvfd4WXZMcgoETsnnWLlvMmsnDc5N9bVm+GtXZ/w+rZWXtveyqvbWgFYNK2CaxtquK6hhmWzqyhKascnIgJPv9VEOpXk95bVRR1FROSkhtMYzwT25q03AyvP5MbM7C7gLoDZs3UY0EJWmkpyzaIarllUw1+585u2o0GTvK2Vx38RzCZXlhZx9XlTua6hht8+byqTy/VlG5GJqKOrl59s2seqJTM4J52KOo6IyEkNpzEe6jMvP5Mbc/dHgUcBVqxYcUa/Q+LHzFhQU86CmnL+6Op5dHT18r87DvDatlZe397Gi1taMIMldVVcF84mXzijMnYfp2ayzrGePjp7MnT2ZjjWE5w6ezJUlaVoqK3QDLjIGXh+QzOdvRkd6U5EYm84jXEzkL/DyTpg39jEkbNBZWmKL1w8nS9cPJ1s1tm671DQJG9r5fvrPuD76z6gpqKEaxfVcG3DVK5aOJXyklO/FDNZp6s3aFg7ezJ09Wbo6s0G673965lcU9t/vc6eDMdOWO4LGt5wvL/57cmcekugsuIkS2dVsaK+mmX11Vwyuzry2a8DR7rZfeAoiYRRWpQkXZykNJUgnUpSmkpSUpQY8zcg2azT1Xf8sQ+eh+PPTcKguqyYqrIU1WXFlBUnY/emSMaOu/P0+j0sqTuHi+vOiTqOiMgpDacxfhtYaGZzgY+A1cCtY5pKzhqJhLG4rorFdVV88/Pn0Xa4mzc+aOP1ba289E4LP27cSyppLJ1VRTJhdPVmcw1wV64Jzp62aR2KGaRTScqKgyaxrDhJuriIslSSaZUp0sVJylJBMxksFwXXDcfLwvF0Ksn+ji42NrXT2NTOw6/vJOvB7z+vpoJl9dWsqK9meX019ZPLxqTp6+zJsKP1MNv2H2Zby2G2f9zB9v2HOXCk57SPQUnR8UY5nUpSkkqSTiVy66W5U3C9omTihDcZJ6wPuOyzPTfFyUSuSe4/r56UoqqsmOqyFFXpcHxSuF5WTFU6NWqz9e5O1iHrTibrFCVMnwSMofUffsrO1iP8/S2Lo44iInJap22M3b3PzO4Gfkawu7Yn3P1dM/se0Ojua83st4AXgGrgJjP7a3e/cEyTS0GaWlHCLcvruGV5Hb2ZLI272/n59lYam9qxLFSmU9RUlOQa0tK8hi5dnBiwfrLxdF6jN5pN6s1LZwJwtLuPzXsP0tjUzoamdl7cso9nfx3s/3lKeTHLZgdN8vL6ai6aec5n2p1dJus0fXKU7fuDJnj7/sNs//gwuz85iocbH5UUJThvWgXXLKqhobaC+TXlGAyYRe/Ka1rzZ9O7+rJ09mToDmd4O7p6c28++q/Xm3FKUwnKiovCGeigkU4XJ6kqS+U97nnPQ/Hxxz63HP5s1p32oz0cPNZL+7Ee2o/1cvBYT25514EjtO8JxvoPOjOUitIiqsuKSSVtQGObzQaNbsYd7x9zwnEn4wPXs4Nu4qHVS3PPrYy+Z9bvobK0iJsWz4g6iojIaQ1rP8bu/hLw0qCx+/OW3ybYxEJk2FLJBJfPn8zl8yef/soxMqmkiCsWTOGKBVOAoOHa0XqEDU3tNDZ9ysamdl5572MgmB29aGZl2Cify/L66twRv9oOd7Ntf8eAJnhH6+HcDKwZzJk8iUXTKli1ZAYNtRUsqq2gfvLZeQhvd+doT2ZQE318uf+8L+skzEha8IlEsGwkEpCwcD1hmBGOWzhOOB5cP5kIto8/f3pl1Hf9rNV2uJuXt7bwtcvmkC7W/s5FJP505DuREUokjEVh03rrymBvKweOdLOhqZ2N4azyU2828dgvPgRg1rlpjnVn+OTo8c0gppSX0FBbwW0r61lUW0FDbQULayomVDNhZpSXFFFeUsQsHf/hrPAfjXvpzTi3Xaa9EIlIYVBjLDIGppSXcMOFtdxwYS0A3X0Ztn7UwcamdjbtPcikkiQNtZW5WWDtyk7ORulUkpuWzGD+1PKoo4iIDIsaY5FxUFKUzG13LDJR3HnV3KgjiIh8JvoqtoiIiIgIaoxFRAqWmd1oZtvNbKeZfWeIy+8wszYz2xSe/jDvskze+NrxTS4iEk/alEJEpACZWRJ4BLie4EBMb5vZWnd/b9BVf+zudw/xKzrdfelY5xQRKSSaMRYRKUyXAjvdfZe79wDPATdHnElEpKCpMRYRKUwzgb15683h2GBfNrMtZrbGzGbljZeaWaOZvWVmXxrTpCIiBUKNsYhIYRrqKC+DDx34U2COuy8G/ht4Ku+y2e6+ArgVeNDM5p9wA2Z3hc1zY1tb22jlFhGJLTXGIiKFqRnInwGuA/blX8HdP3H37nD1MWB53mX7wvNdwM+BSwbfgLs/6u4r3H3F1KlTRze9iEgMqTEWESlMbwMLzWyumRUDq4EBe5cws+l5q6uA98PxajMrCZenAFcCg7+0JyIy4WivFCIiBcjd+8zsbuBnQBJ4wt3fNbPvAY3uvha4x8xWAX3Ap8Ad4Y+fD/yLmWUJJkgeGGJvFiIiE44aYxGRAuXuLwEvDRq7P2/5PuC+IX7uTeDiMQ8oIlJgtCmFiIiIiAhqjEVEREREADXGIiIiIiIAmPvg3V6O0w2btQFNkdz4QFOAA1GHOIk4Z4N454tzNoh3vjhng3jkq3f3CbX/MtXsYYlzNoh3vjhnA+UbibhkG1bdjqwxjgszawx3ch87cc4G8c4X52wQ73xxzgbxzydjK87Pf5yzQbzzxTkbKN9IxDnbULQphYiIiIgIaoxFRERERAA1xgCPRh3gFOKcDeKdL87ZIN754pwN4p9Pxlacn/84Z4N454tzNlC+kYhzthNM+G2MRURERERAM8YiIiIiIsAEbYzNbJaZvW5m75vZu2Z2b9SZhmJmSTP7PzN7Meos+cysyszWmNm28DG8POpM+czsT8PndauZPWtmpRHnecLMWs1sa97YuWa2zsx2hOfVMcr2D+Fzu8XMXjCzqiiynSxf3mXfMjM3sylRZJPxo5o9cnGu26rZI86mmj2KJmRjDPQBf+bu5wOXAX9sZhdEnGko9wLvRx1iCA8BL7t7A7CEGGU0s5nAPcAKd78ISAKro03Fk8CNg8a+A7zq7guBV8P1KDzJidnWARe5+2LgA+C+8Q6V50lOzIeZzQKuB/aMdyCJhGr2yMWybqtmf2ZPopo9piZkY+zuLe6+MVw+TFAgZkabaiAzqwN+F3g86iz5zKwSuBr4EYC797j7wWhTnaAISJtZEVAG7IsyjLv/D/DpoOGbgafC5aeAL41rqNBQ2dz9FXfvC1ffAurGPdjxLEM9dgD/BPwFoC9JTACq2SNTAHVbNXuYVLPH3oRsjPOZ2RzgEmB9tElO8CDBiygbdZBB5gFtwL+GHxk+bmaTog7Vz90/Av6R4F1pC3DI3V+JNtWQprl7CwT/9IGaiPOczJ3Af0UdIp+ZrQI+cvfNUWeR8aeafUZiW7dVs0edavYITejG2MzKgf8EvunuHVHn6WdmXwRa3X1D1FmGUAQsA/7Z3S8BjhLdR0onCLf7uhmYC8wAJpnZV6NNVZjM7LsEH2E/E3WWfmZWBnwXuD/qLDL+VLPPWGzrtmr26FHNHh0TtjE2sxRBgX3G3Z+POs8gVwKrzGw38BxwnZk9HW2knGag2d37Z2vWEBTcuPg88KG7t7l7L/A8cEXEmYbysZlNBwjPWyPOM4CZ3Q58EbjN47VPx/kE/0A3h38fdcBGM6uNNJWMOdXsEYlz3VbNHgWq2aNnQjbGZmYE21q97+7fjzrPYO5+n7vXufscgi8hvObusXgH7e77gb1mtigc+hzwXoSRBtsDXGZmZeHz/Dli8iWTQdYCt4fLtwM/iTDLAGZ2I/BtYJW7H4s6Tz53f8fda9x9Tvj30QwsC1+XcpZSzR6ZmNdt1ewRUs0eXROyMSZ4d/81gnf1m8LT70QdqoD8CfCMmW0BlgJ/G3GenHBGZA2wEXiH4DUe6VF3zOxZ4FfAIjNrNrOvAw8A15vZDoJv6j4Qo2wPAxXAuvBv44dRZDtFPpl4VLNHLpZ1WzV7VLKpZo8iHflORERERISJO2MsIiIiIjKAGmMREREREdQYi4iIiIgAaoxFRERERAA1xiIiIiIigBpjiTkzy+TtnmmTmY3a0ZrMbI6ZbR2t3yciMtGpZkuhK4o6gMhpdLr70qhDiIjIsKhmS0HTjLEUJDPbbWZ/Z2a/Dk8LwvF6M3vVzLaE57PD8Wlm9oKZbQ5P/YccTZrZY2b2rpm9YmbpyO6UiMhZSjVbCoUaY4m79KCP5b6Sd1mHu19KcNSfB8Oxh4F/c/fFwDPAD8LxHwBvuPsSYBnwbji+EHjE3S8EDgJfHuP7IyJyNlPNloKmI99JrJnZEXcvH2J8N3Cdu+8ysxSw390nm9kBYLq794bjLe4+xczagDp37877HXOAde6+MFz/NpBy978Z+3smInL2Uc2WQqcZYylkfpLlk11nKN15yxm03b2IyFhRzZbYU2Mshewreee/CpffBFaHy7cBvwyXXwW+AWBmSTOrHK+QIiICqGZLAdA7LYm7tJltylt/2d37d/9TYmbrCd7g/X44dg/whJn9OdAG/EE4fi/wqJl9nWCW4RtAy5inFxGZWFSzpaBpG2MpSOH2aivc/UDUWURE5NRUs6VQaFMKERERERE0YywiIiIiAmjGWEREREQEUGMsIiIiIgKoMRYRERERAdQYi4iIiIgAaoxFRERERAA1xiIiIiIiAPw/bNdj6opq0k8AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure(\"train\", (12, 6))\n", + "plt.subplot(1, 2, 1)\n", + "plt.title(\"Average Loss\")\n", + "x = [eval_num * (i + 1) for i in range(len(epoch_loss_values))]\n", + "y = epoch_loss_values\n", + "plt.xlabel(\"Epoch\")\n", + "plt.plot(x, y)\n", + "plt.subplot(1, 2, 2)\n", + "plt.title(\"Val Mean AUC\")\n", + "x = [eval_num * (i + 1) for i in range(len(metric_values))]\n", + "y = metric_values\n", + "plt.xlabel(\"Epoch\")\n", + "plt.plot(x, y)\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Check best model output with the input image and label" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "After training is completed, we use the best validation checkpoint to test the model performance on the Open-I testing set. " + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Evaluation Statistics: Mean AUC : 0.9629915902329793, Mean Loss : 0.06937971694082447\n", + "\n", + "Mean test AUC for each class in 14 disease categories :\n", + "\n", + "Atelectasis: 0.9933158010081088\n", + "Cardiomegaly: 0.974534284581847\n", + "Consolidation: 0.9532794249775381\n", + "Edema: 0.9901960784313726\n", + "Enlarged-Cardiomediastinum: 0.9449934738019765\n", + "Fracture: 0.9911196911196911\n", + "Lung-Lesion: 0.9471389645776568\n", + "Lung-Opacity: 0.986452330401375\n", + "No-Finding: 0.9574158854734394\n", + "Pleural-Effusion: 0.8975490196078432\n", + "Pleural_Other: 0.9973118279569892\n", + "Pneumonia: 0.9714795008912656\n", + "Pneumothorax: 0.9787234042553191\n", + "Support-Devices: 0.8983725761772853\n" + ] + } + ], + "source": [ + "model.load_state_dict(torch.load(os.path.join(logdir, \"transchex.pt\"))[\"state_dict\"])\n", + "model.eval()\n", + "with torch.no_grad():\n", + " auc_val, loss_val, auc = validation(test_loader)\n", + "\n", + "print(\n", + " \"\\nMean test AUC for each class in 14 disease categories\\\n", + " :\\n\\nAtelectasis: {}\\nCardiomegaly: {}\\nConsolidation: {}\\nEdema: \\\n", + " {}\\nEnlarged-Cardiomediastinum: {}\\nFracture: {}\\nLung-Lesion: {}\\nLung-Opacity: \\\n", + " {}\\nNo-Finding: {}\\nPleural-Effusion: {}\\nPleural_Other: {}\\nPneumonia: \\\n", + " {}\\nPneumothorax: {}\\nSupport-Devices: {}\".format(\n", + " auc[0],\n", + " auc[1],\n", + " auc[2],\n", + " auc[3],\n", + " auc[4],\n", + " auc[5],\n", + " auc[6],\n", + " auc[7],\n", + " auc[8],\n", + " auc[9],\n", + " auc[10],\n", + " auc[11],\n", + " auc[12],\n", + " auc[13],\n", + " )\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This tutorial demonstrates the effectiveness of TransCheX model for multi-modal training using chest X-ray images and corrersponding reports. By using the Open-I dataset, we demonstrate how TransCheX model can be leveraged for multi-label classification problems involing 2 different modalities of data. \n", + "\n", + "As seen above, the mean AUC for the test dataset is 0.9629 which is 1.007% better than the best validation mean AUC." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/runner.sh b/runner.sh index 46981768aa..b021cc4099 100755 --- a/runner.sh +++ b/runner.sh @@ -59,6 +59,7 @@ doRun=true autofix=false failfast=false pattern="-and -name '*' -and ! -wholename '*federated_learning*'\ + -and ! -wholename '*transchex_openi*'\ -and ! -wholename '*unetr_btcv*'\ -and ! -wholename '*profiling_camelyon*'\ -and ! -wholename '*profiling_train_base_nvtx*'\