MVA-2021 / object_recognition / hw3_bird_classification.ipynb
hw3_bird_classification.ipynb
Raw
{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "A3.ipynb",
      "provenance": [],
      "collapsed_sections": [],
      "toc_visible": true
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "accelerator": "GPU"
  },
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "OMKfmIeSvpae"
      },
      "source": [
        "## Download the datasets and the models"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "QkDAyHwrBSPw",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "b42b491c-871d-4c27-b97c-3227287cdad2"
      },
      "source": [
        "print('downloading dataset...')\n",
        "!wget -nc https://www.di.ens.fr/willow/teaching/recvis18orig/assignment3/bird_dataset.zip\n",
        "print('done!')\n",
        "print('uncompressing...')\n",
        "#q quiet o overwrite\n",
        "!unzip -qo bird_dataset.zip \n",
        "print('done!')"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "downloading dataset...\n",
            "--2020-11-23 14:17:22--  https://www.di.ens.fr/willow/teaching/recvis18orig/assignment3/bird_dataset.zip\n",
            "Resolving www.di.ens.fr (www.di.ens.fr)... 129.199.99.14\n",
            "Connecting to www.di.ens.fr (www.di.ens.fr)|129.199.99.14|:443... connected.\n",
            "HTTP request sent, awaiting response... 200 OK\n",
            "Length: unspecified [application/zip]\n",
            "Saving to: ‘bird_dataset.zip’\n",
            "\n",
            "bird_dataset.zip        [              <=>   ] 183.48M  17.3MB/s    in 11s     \n",
            "\n",
            "2020-11-23 14:17:34 (16.0 MB/s) - ‘bird_dataset.zip’ saved [192388716]\n",
            "\n",
            "done!\n",
            "uncompressing...\n",
            "done!\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "V-pg-RKPw0Pb",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "ffbcd62d-d82f-4118-ef3e-a71d3d72c29a"
      },
      "source": [
        "# nc to not download if it is already there, -P to indicate the direction folder \n",
        "!wget -nc https://github.com/OlafenwaMoses/ImageAI/releases/download/1.0/resnet50_coco_best_v2.0.1.h5"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "File ‘resnet50_coco_best_v2.0.1.h5’ already there; not retrieving.\n",
            "\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "OqokyHSBy9vF"
      },
      "source": [
        "# tensorflow and keras version compatible with imageAI\n",
        "!pip install -U tensorflow==1.15.0 keras==2.3.1\n",
        "!pip install -U imageai"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "aLZ9mWr3Jgyl"
      },
      "source": [
        "### Imports"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "0UmzPJkMzWhq"
      },
      "source": [
        "import time\n",
        "import csv\n",
        "import pandas as pd\n",
        "import numpy as np\n",
        "from PIL import Image \n",
        "import copy\n",
        "\n",
        "from google.colab import files\n",
        "\n",
        "import torch\n",
        "import torch.nn as nn\n",
        "import torch.optim as optim\n",
        "from torch.optim import lr_scheduler\n",
        "from torchvision import datasets, models\n",
        "import torchvision.transforms as transforms\n",
        "from torch.autograd import Variable\n",
        "from tqdm import tqdm\n",
        "import shutil"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "qtYNx4OXan_0"
      },
      "source": [
        "import tensorflow as tf \n",
        "\n",
        "# Detect if we have a GPU available\n",
        "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "OVFPUQ30wlbm",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "f854b497-e156-4b9c-b869-f9a13a196c6f"
      },
      "source": [
        "import os\n",
        "execution_path = os.getcwd()\n",
        "print(execution_path)"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "/content\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "-0lmSIQ5wdE7"
      },
      "source": [
        "## Preprocess and create a cropped dataset"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "1KrGrryFG5QO"
      },
      "source": [
        "# to create the output folder directories of imageAI crop\n",
        "try:\n",
        "  os.mkdir(\"output_retina\")\n",
        "except OSError:\n",
        "  _ = 1\n",
        "try:\n",
        "  os.mkdir(\"output_crop\")\n",
        "except OSError:\n",
        "  _ = 1\n",
        "\n",
        "for path, dirs, files in os.walk(\"bird_dataset\"):\n",
        "  path1 = \"output_retina/\"+path\n",
        "  try:\n",
        "    os.mkdir(path1)\n",
        "  except OSError:\n",
        "    _ = 1\n",
        "  path2 = \"output_crop/\"+path\n",
        "  try:\n",
        "    os.mkdir(path2)\n",
        "  except OSError:\n",
        "    _ = 1"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "68OhblAlBe5J",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "a2f96da8-cea1-4870-8a48-0149c5492f04"
      },
      "source": [
        "data_transforms = None\n",
        "train_loader = torch.utils.data.DataLoader(datasets.ImageFolder('bird_dataset/train_images'))\n",
        "val_loader = torch.utils.data.DataLoader(datasets.ImageFolder('bird_dataset/val_images'))\n",
        "test_loader = torch.utils.data.DataLoader(datasets.ImageFolder('bird_dataset/test_images'))\n",
        "print(train_loader.dataset.imgs)"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "[('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0002_1670.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0004_1528.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0005_1750.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0006_1763.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0007_1615.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0009_1522.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0010_1704.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0012_1784.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0014_1755.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0015_1653.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0017_1561.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0018_1613.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0019_1585.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0023_1485.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0027_1754.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0029_1620.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0031_1588.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0032_1776.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0033_1494.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0035_1591.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0036_1604.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0037_1560.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0040_1715.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0044_1731.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0046_1663.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0047_1706.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0051_1650.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0053_1672.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0055_1501.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0056_1493.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0058_1751.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0059_1480.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0060_1505.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0061_1510.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0062_1767.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0065_1502.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0068_1538.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0069_1546.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0071_1559.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0072_1696.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0074_1730.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0075_1617.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0076_1661.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0077_1724.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0078_1780.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0080_1549.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0082_1697.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0087_1765.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0088_1678.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0090_1567.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0091_1728.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0092_1516.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0094_1540.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0100_1646.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0101_1700.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0107_1590.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0108_1639.jpg', 0), ('bird_dataset/train_images/004.Groove_billed_Ani/Groove_Billed_Ani_0109_1592.jpg', 0), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0002_2278.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0004_2345.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0010_2269.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0012_2691.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0014_2679.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0016_2225.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0017_2668.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0018_2261.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0025_2231.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0026_2625.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0027_2329.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0028_2682.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0030_2268.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0032_2214.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0035_2611.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0038_2294.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0041_2653.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0042_2302.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0045_2303.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0046_2688.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0049_2258.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0054_2631.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0061_2270.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0064_2290.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0065_2310.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0066_2693.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0070_2325.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0074_2277.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0078_2659.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0079_2343.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0080_2234.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0082_2593.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0087_2622.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0090_2658.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0095_2610.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0096_2634.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0097_2322.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0099_2560.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0101_2630.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0102_2620.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0103_2273.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0106_2608.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0109_2232.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0111_2613.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0112_2340.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0115_2279.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0116_2327.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0131_2289.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0132_2293.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0133_2324.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0135_2607.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0137_2680.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0139_2567.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0140_2586.jpg', 1), ('bird_dataset/train_images/009.Brewer_Blackbird/Brewer_Blackbird_0142_2636.jpg', 1), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0001_3695.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0006_6005.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0007_3706.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0009_5841.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0010_6386.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0011_5845.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0012_6015.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0013_5762.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0014_3761.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0017_4116.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0020_4050.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0022_4483.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0023_5257.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0024_4180.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0025_5342.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0027_4123.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0028_4709.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0029_4804.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0032_4004.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0039_4285.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0040_4522.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0042_3635.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0044_5621.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0045_4526.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0046_4242.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0047_3802.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0049_5598.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0052_5575.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0053_4072.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0055_4345.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0058_4141.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0060_4688.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0061_4196.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0062_4233.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0064_4936.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0065_4026.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0066_5070.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0071_3988.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0072_4338.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0074_4146.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0075_4953.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0078_5372.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0079_4527.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0081_6081.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0085_5846.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0089_4188.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0091_4096.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0093_5948.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0096_5019.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0099_3985.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0101_6244.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0104_3918.jpg', 2), ('bird_dataset/train_images/010.Red_winged_Blackbird/Red_Winged_Blackbird_0109_4454.jpg', 2), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0001_6548.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0003_6749.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0006_6633.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0009_6853.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0011_7028.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0013_6902.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0015_6885.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0016_6684.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0017_6755.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0019_6704.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0020_6679.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0022_6808.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0023_6752.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0026_6768.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0027_6593.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0031_6699.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0032_6611.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0033_6879.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0036_6550.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0043_2597.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0047_7009.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0048_6632.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0051_6715.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0052_7035.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0054_6676.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0055_6923.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0056_6856.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0057_6935.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0073_6744.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0074_6585.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0075_6717.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0076_6716.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0080_6877.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0081_6967.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0085_6713.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0086_6658.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0087_6727.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0091_6695.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0092_2727.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0093_6628.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0094_6582.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0096_6846.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0100_6597.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0101_6880.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0102_6590.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0104_6685.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0105_6937.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0107_6839.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0108_6867.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0109_6698.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0111_3220.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0112_3415.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0113_6664.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0114_6760.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0120_6762.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0121_6637.jpg', 3), ('bird_dataset/train_images/011.Rusty_Blackbird/Rusty_Blackbird_0122_6736.jpg', 3), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0003_8337.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0008_8756.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0009_8248.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0012_8443.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0013_8362.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0015_8207.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0017_8511.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0018_8588.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0020_8549.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0023_7325.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0024_8586.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0025_8262.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0026_8545.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0031_8456.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0034_7736.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0035_8447.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0038_8689.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0040_7514.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0041_8264.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0042_8574.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0047_7929.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0049_8548.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0051_8387.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0053_8410.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0055_8357.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0056_8455.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0057_8236.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0058_8350.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0059_8079.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0061_8208.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0062_8310.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0065_8481.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0070_8583.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0072_8606.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0073_8442.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0074_8452.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0077_8332.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0079_8535.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0080_8601.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0082_8577.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0083_8300.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0084_8435.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0085_8363.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0086_8487.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0087_8358.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0088_8257.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0089_8326.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0091_8555.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0095_8458.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0098_8367.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0100_8407.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0102_8441.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0108_7937.jpg', 4), ('bird_dataset/train_images/012.Yellow_headed_Blackbird/Yellow_Headed_Blackbird_0109_8271.jpg', 4), ('bird_dataset/train_images/013.Bobolink/Bobolink_0001_9261.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0002_11085.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0007_9246.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0008_9289.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0013_9367.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0014_11055.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0018_9402.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0019_10552.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0020_9194.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0021_10623.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0026_11057.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0027_10569.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0032_10217.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0033_10809.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0035_11117.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0039_9779.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0040_9681.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0043_10607.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0044_9990.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0048_9988.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0049_9540.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0050_9821.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0052_9423.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0053_10166.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0056_9080.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0057_10051.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0059_10041.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0064_10092.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0065_9375.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0067_11533.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0069_9085.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0070_10624.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0071_9503.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0074_9311.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0076_11093.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0079_10736.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0081_9439.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0092_10026.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0094_9823.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0097_10861.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0099_9314.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0102_10807.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0104_10273.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0106_9126.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0107_10252.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0109_9869.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0110_9496.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0112_11073.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0114_10627.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0115_9265.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0117_10215.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0119_10430.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0120_10859.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0124_10182.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0126_11458.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0128_9947.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0131_9578.jpg', 5), ('bird_dataset/train_images/013.Bobolink/Bobolink_0133_9618.jpg', 5), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0001_12469.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0002_12163.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0003_13049.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0004_13195.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0006_14317.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0010_13000.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0013_12949.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0015_12632.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0016_13661.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0017_11574.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0018_11883.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0021_13979.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0022_12781.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0024_13523.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0025_12532.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0026_11964.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0027_11579.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0028_12335.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0029_13761.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0031_13300.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0032_12215.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0033_12777.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0034_12464.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0036_13716.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0037_14128.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0039_12756.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0040_11805.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0041_13987.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0044_14389.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0047_12966.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0049_13641.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0050_11811.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0051_12837.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0052_11893.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0053_13391.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0054_12213.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0055_13473.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0056_12637.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0058_12207.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0059_11596.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0060_14495.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0061_13259.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0063_11820.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0065_14558.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0066_12869.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0068_13081.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0071_11639.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0072_14197.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0073_13933.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0074_12829.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0075_12835.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0077_14060.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0078_11852.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0080_13416.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0082_11907.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0084_11848.jpg', 6), ('bird_dataset/train_images/014.Indigo_Bunting/Indigo_Bunting_0085_11991.jpg', 6), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0001_14916.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0004_14887.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0008_15195.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0009_15163.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0010_14915.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0015_14690.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0020_14837.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0021_14686.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0025_15079.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0026_14669.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0027_14895.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0028_14950.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0030_14986.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0031_15018.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0032_14778.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0034_14864.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0035_14920.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0037_15021.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0039_15081.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0040_14923.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0041_15152.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0042_14820.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0045_14954.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0046_14787.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0047_14863.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0048_14844.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0052_14618.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0054_14714.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0056_15032.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0057_14775.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0059_14749.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0061_15155.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0067_14672.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0070_14665.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0073_14594.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0074_14854.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0076_14662.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0078_15164.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0080_14893.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0081_14709.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0082_15047.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0084_14815.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0085_14627.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0086_14992.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0087_15096.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0089_14598.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0092_14656.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0093_15030.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0094_11894.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0095_14919.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0097_14617.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0102_14605.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0105_15017.jpg', 7), ('bird_dataset/train_images/015.Lazuli_Bunting/Lazuli_Bunting_0107_14705.jpg', 7), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0001_16585.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0005_15202.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0006_15249.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0009_16674.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0010_16948.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0011_16690.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0013_15294.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0016_15200.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0019_15231.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0021_15295.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0025_16722.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0027_16536.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0028_15205.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0029_16530.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0032_16605.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0036_16563.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0039_15235.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0040_16691.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0044_16557.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0046_16535.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0050_16670.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0053_16404.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0054_16711.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0055_15208.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0056_16599.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0058_16719.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0060_15224.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0061_16930.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0066_15241.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0069_16462.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0070_16515.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0071_15209.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0072_16697.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0073_16737.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0076_16765.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0077_16819.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0078_16565.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0079_15197.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0080_16534.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0081_15230.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0083_16587.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0084_16531.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0085_15282.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0086_16540.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0087_15232.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0091_15198.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0093_15212.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0094_16467.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0096_15233.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0098_15226.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0099_16525.jpg', 8), ('bird_dataset/train_images/016.Painted_Bunting/Painted_Bunting_0102_16642.jpg', 8), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0001_20695.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0002_21395.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0006_20867.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0007_20186.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0008_20430.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0013_20562.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0015_21230.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0019_20567.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0022_19585.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0023_20668.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0024_20739.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0027_20968.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0028_20598.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0031_21635.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0032_21551.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0039_21040.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0043_21008.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0045_20950.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0048_20558.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0049_21311.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0050_20763.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0053_20694.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0055_20671.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0057_20979.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0060_20656.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0063_20707.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0067_21043.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0069_21065.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0074_19601.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0075_21125.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0080_20139.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0091_20416.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0094_21303.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0100_20674.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0101_21178.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0102_20644.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0103_20930.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0104_20716.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0105_20864.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0107_20513.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0111_19550.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0113_21270.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0117_21333.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0118_20476.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0125_19833.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0126_19446.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0127_20034.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0129_20987.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0130_20328.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0131_19633.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0134_20596.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0138_20945.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0139_21281.jpg', 9), ('bird_dataset/train_images/019.Gray_Catbird/Gray_Catbird_0141_21174.jpg', 9), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0001_21928.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0002_21819.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0005_21828.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0008_21856.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0010_21777.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0011_21820.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0012_21961.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0013_22008.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0014_21970.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0022_21944.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0026_21845.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0029_22017.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0032_21823.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0033_21873.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0034_21955.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0035_21870.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0039_21654.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0041_21683.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0044_22106.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0048_21797.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0052_21866.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0058_21864.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0061_21967.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0062_21673.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0063_21783.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0065_22137.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0066_21839.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0071_22129.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0072_21830.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0073_21932.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0077_21986.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0079_21978.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0081_21829.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0084_22082.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0086_21877.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0089_21804.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0090_21931.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0091_22111.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0094_21693.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0095_21832.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0097_21748.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0098_21987.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0100_21913.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0101_21677.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0102_21696.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0103_21670.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0105_21714.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0106_22032.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0107_21698.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0109_21796.jpg', 10), ('bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0110_21871.jpg', 10), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0001_22314.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0002_22318.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0007_22172.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0013_22336.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0014_22367.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0015_22275.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0017_22138.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0018_22546.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0020_22141.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0022_22279.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0024_22382.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0027_22372.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0030_22693.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0031_22233.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0035_22223.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0038_22399.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0040_22341.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0042_22155.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0048_22557.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0049_22357.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0052_22558.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0053_22623.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0054_22147.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0062_22418.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0064_22649.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0067_22142.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0068_22194.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0073_22247.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0074_22620.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0075_22588.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0079_22690.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0080_22303.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0082_22330.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0085_22674.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0086_22611.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0091_22629.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0093_22621.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0095_22594.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0097_22580.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0098_22676.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0099_22566.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0101_22559.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0105_22675.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0110_22549.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0111_22168.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0115_22304.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0117_22741.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0120_22189.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0121_22319.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0124_22585.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0125_22220.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0126_22639.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0129_22358.jpg', 11), ('bird_dataset/train_images/021.Eastern_Towhee/Eastern_Towhee_0134_22624.jpg', 11), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0001_23398.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0002_23072.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0003_22922.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0006_22925.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0007_22934.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0013_23391.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0014_23050.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0015_23198.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0016_23077.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0017_23141.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0018_23090.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0019_23058.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0021_23097.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0022_23157.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0023_23254.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0025_22820.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0026_22913.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0028_22892.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0029_23043.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0030_22926.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0032_22886.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0033_22975.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0035_23000.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0036_22937.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0038_23110.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0040_23144.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0042_23151.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0044_22884.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0045_22916.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0049_22924.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0052_23356.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0053_22957.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0061_22902.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0062_23038.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0063_22865.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0064_22849.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0065_23118.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0067_23352.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0068_23019.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0071_23007.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0072_23069.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0073_23259.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0074_22881.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0075_22970.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0076_23021.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0078_23203.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0079_22874.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0080_23002.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0082_22978.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0084_23265.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0087_23126.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0088_22948.jpg', 12), ('bird_dataset/train_images/023.Brandt_Cormorant/Brandt_Cormorant_0091_22825.jpg', 12), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0001_796219.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0003_796246.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0005_24173.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0008_796250.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0009_24033.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0011_24138.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0012_796247.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0013_24131.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0014_24030.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0016_796245.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0017_24019.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0018_24140.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0019_796242.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0020_796237.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0021_24189.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0022_796221.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0023_24058.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0024_24167.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0025_796213.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0026_796229.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0027_24022.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0029_796256.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0030_24103.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0031_24139.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0032_796233.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0034_796209.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0037_24032.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0039_24026.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0040_24134.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0043_796224.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0044_24145.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0046_796218.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0049_24147.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0051_24083.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0053_24170.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0054_24159.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0055_24076.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0057_24074.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0060_24082.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0061_796232.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0064_24199.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0073_796226.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0074_24045.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0076_796235.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0082_24175.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0085_24152.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0086_796259.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0087_796223.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0089_796220.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0090_24179.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0091_796212.jpg', 13), ('bird_dataset/train_images/026.Bronzed_Cowbird/Bronzed_Cowbird_0092_796215.jpg', 13), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0001_24449.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0002_24838.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0004_24851.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0005_24659.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0006_25034.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0007_24902.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0012_24956.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0023_24940.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0027_24729.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0031_24999.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0032_24800.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0042_24578.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0046_24463.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0047_24984.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0048_24976.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0049_24911.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0051_24468.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0053_24451.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0056_24452.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0057_24529.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0058_24933.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0061_24601.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0063_24724.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0064_24840.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0065_24464.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0069_24618.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0072_24977.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0073_24546.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0074_24789.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0075_24947.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0079_24647.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0085_24938.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0088_24731.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0093_24581.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0100_24502.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0101_24987.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0102_25000.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0103_24632.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0104_24698.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0106_24617.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0107_24827.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0110_24866.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0111_24590.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0114_24649.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0115_24488.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0118_24500.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0120_24955.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0121_24574.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0123_24589.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0124_24963.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0125_24995.jpg', 14), ('bird_dataset/train_images/028.Brown_Creeper/Brown_Creeper_0127_24656.jpg', 14), ('bird_dataset/train_images/029.American_Crow/American_Crow_0001_25053.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0002_25122.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0003_25130.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0004_25819.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0011_25151.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0012_25305.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0014_25287.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0016_25112.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0020_25618.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0021_25137.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0025_25522.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0027_25146.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0030_25092.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0036_25313.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0043_25666.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0047_25397.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0048_25062.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0050_25255.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0051_25505.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0053_25203.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0059_25599.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0067_25443.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0068_25198.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0069_25506.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0074_25350.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0079_25463.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0081_25837.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0085_25260.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0088_25303.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0093_25694.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0094_25576.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0099_25717.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0101_25118.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0102_25066.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0104_25086.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0107_25353.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0109_25123.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0110_25541.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0111_25127.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0113_25149.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0116_25199.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0117_25090.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0121_25720.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0122_25200.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0124_25356.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0127_25412.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0130_25163.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0131_25706.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0132_25704.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0134_25206.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0136_25117.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0137_25221.jpg', 15), ('bird_dataset/train_images/029.American_Crow/American_Crow_0139_25186.jpg', 15), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0001_26031.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0002_26072.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0003_25970.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0004_25936.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0007_26023.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0010_25836.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0011_25866.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0012_25946.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0013_25939.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0014_26041.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0016_25854.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0017_26127.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0018_25879.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0020_26027.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0022_26062.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0023_26037.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0024_26064.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0025_25893.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0028_25968.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0031_25909.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0032_26014.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0033_25915.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0034_25891.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0035_26081.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0037_26071.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0038_26000.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0040_25158.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0041_25887.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0042_26148.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0043_25847.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0044_25964.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0047_26070.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0049_26040.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0051_25934.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0053_26067.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0055_26077.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0056_25851.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0058_25999.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0059_25864.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0060_26016.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0063_26094.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0065_25942.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0067_26124.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0068_25859.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0072_25945.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0073_25977.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0076_25971.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0078_26144.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0079_26030.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0080_25861.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0081_25908.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0082_26012.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0083_25949.jpg', 16), ('bird_dataset/train_images/030.Fish_Crow/Fish_Crow_0085_25919.jpg', 16), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0001_26242.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0005_26161.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0006_26233.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0007_26320.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0008_795305.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0015_26208.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0017_26221.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0018_26218.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0022_795319.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0023_26258.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0024_795331.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0027_26319.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0030_26240.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0031_26318.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0032_26292.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0034_795320.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0042_795308.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0043_795324.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0044_26243.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0045_26194.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0046_795328.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0047_26176.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0051_795318.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0052_26232.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0053_795321.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0054_26313.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0055_26223.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0057_795323.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0058_795292.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0061_795327.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0062_795309.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0065_26203.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0066_26303.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0070_795310.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0071_26288.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0073_795304.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0074_795286.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0075_795298.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0077_26222.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0079_26180.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0081_26209.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0082_26241.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0083_795315.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0084_26175.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0085_795294.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0086_26188.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0087_795300.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0088_26217.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0089_795322.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0090_26311.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0091_26246.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0092_795313.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0093_795316.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0095_795307.jpg', 17), ('bird_dataset/train_images/031.Black_billed_Cuckoo/Black_Billed_Cuckoo_0096_26204.jpg', 17), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0002_26715.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0003_26797.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0004_26790.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0005_26684.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0006_26578.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0007_26687.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0009_26656.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0010_26795.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0012_26712.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0014_26754.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0018_26535.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0019_26803.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0022_26423.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0023_26637.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0024_26832.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0026_26794.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0027_26844.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0028_26446.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0029_26865.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0032_26616.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0034_26694.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0036_26682.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0038_26912.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0039_26510.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0042_26479.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0043_26492.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0045_26685.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0048_26632.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0049_26766.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0050_26424.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0053_26738.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0059_26828.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0060_26686.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0066_26600.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0067_26878.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0069_26597.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0073_26744.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0074_26466.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0077_26431.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0078_26888.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0081_26429.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0084_26761.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0088_26812.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0090_26714.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0091_26428.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0092_26859.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0093_26432.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0094_26643.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0097_26713.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0098_26501.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0104_26814.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0116_26544.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0117_26651.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0119_26550.jpg', 18), ('bird_dataset/train_images/033.Yellow_billed_Cuckoo/Yellow_Billed_Cuckoo_0121_26807.jpg', 18), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0001_27211.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0009_26977.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0010_27039.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0012_27062.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0013_27110.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0015_797291.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0016_27181.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0018_26978.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0019_27192.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0021_797286.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0022_27028.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0023_797288.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0024_27057.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0025_797274.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0026_27160.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0028_27114.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0029_797300.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0030_27068.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0031_797299.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0034_797305.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0036_797287.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0038_797309.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0040_26985.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0041_27105.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0042_27143.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0043_26990.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0044_26976.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0046_797295.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0047_797303.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0048_27236.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0051_26988.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0053_797276.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0055_27112.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0056_797293.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0057_27107.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0061_26979.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0063_27123.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0064_27007.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0066_797298.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0067_797289.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0068_27196.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0071_797285.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0072_26993.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0073_27104.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0074_27156.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0075_27165.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0076_27200.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0078_27136.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0079_797294.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0080_27108.jpg', 19), ('bird_dataset/train_images/034.Gray_crowned_Rosy_Finch/Gray_Crowned_Rosy_Finch_0084_27034.jpg', 19)]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "iVHSeEGjEf_Z",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "b42958d1-0abb-42d7-ca5a-f63a918f3ed8"
      },
      "source": [
        "import imageai\n",
        "from imageai.Detection import ObjectDetection\n",
        "\n",
        "detector = ObjectDetection()\n",
        "detector.setModelTypeAsRetinaNet()\n",
        "detector.setModelPath( os.path.join(execution_path , \"resnet50_coco_best_v2.0.1.h5\"))\n",
        "detector.loadModel(\"normal\")\n",
        "custom_objects = detector.CustomObjects(bird=True)"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n",
            "Instructions for updating:\n",
            "If using Keras pass *_constraint arguments to layers.\n",
            "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:4070: The name tf.nn.max_pool is deprecated. Please use tf.nn.max_pool2d instead.\n",
            "\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "Using TensorFlow backend.\n"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/imageai/Detection/keras_retinanet/backend/tensorflow_backend.py:22: The name tf.image.resize_images is deprecated. Please use tf.image.resize instead.\n",
            "\n",
            "tracking <tf.Variable 'Variable:0' shape=(9, 4) dtype=float32> anchors\n",
            "tracking <tf.Variable 'Variable_1:0' shape=(9, 4) dtype=float32> anchors\n",
            "tracking <tf.Variable 'Variable_2:0' shape=(9, 4) dtype=float32> anchors\n",
            "tracking <tf.Variable 'Variable_3:0' shape=(9, 4) dtype=float32> anchors\n",
            "tracking <tf.Variable 'Variable_4:0' shape=(9, 4) dtype=float32> anchors\n",
            "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/imageai/Detection/keras_retinanet/backend/tensorflow_backend.py:46: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n",
            "Instructions for updating:\n",
            "Use tf.where in 2.0, which has the same broadcast rule as np.where\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "-SyU6LeuPqQh"
      },
      "source": [
        "with open('output_retina/train_bounding_boxes.csv', mode='w') as csv_file:\n",
        "  csv_writer = csv.writer(csv_file, delimiter=';', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n",
        "  for img in train_loader.dataset.imgs:\n",
        "    img_path = img[0]\n",
        "    detections = detector.detectCustomObjectsFromImage(custom_objects=custom_objects, \n",
        "                                                    input_image=os.path.join(execution_path , img_path), \n",
        "                                                    output_image_path=os.path.join(execution_path, \"output_retina\", img_path), \n",
        "                                                    minimum_percentage_probability=10)\n",
        "    if len(detections) == 0:\n",
        "      csv_writer.writerow([img_path])\n",
        "    for eachObject in detections:\n",
        "      csv_writer.writerow([img_path, eachObject[\"box_points\"], eachObject[\"percentage_probability\"]])"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "8g4T_BiMCeP0"
      },
      "source": [
        "with open('output_retina/val_bounding_boxes.csv', mode='w') as csv_file:\n",
        "  csv_writer = csv.writer(csv_file, delimiter=';', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n",
        "  for img in val_loader.dataset.imgs:\n",
        "    img_path = img[0]\n",
        "    detections = detector.detectCustomObjectsFromImage(custom_objects=custom_objects, \n",
        "                                                    input_image=os.path.join(execution_path , img_path), \n",
        "                                                    output_image_path=os.path.join(execution_path, \"output_retina\", img_path), \n",
        "                                                    minimum_percentage_probability=10)\n",
        "    if len(detections) == 0:\n",
        "      csv_writer.writerow([img_path])\n",
        "    for eachObject in detections:\n",
        "      csv_writer.writerow([img_path, eachObject[\"box_points\"], eachObject[\"percentage_probability\"]])"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "KW9CrcAcSLjd"
      },
      "source": [
        "with open('output_retina/test_bounding_boxes.csv', mode='w') as csv_file:\n",
        "  csv_writer = csv.writer(csv_file, delimiter=';', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n",
        "  for img in test_loader.dataset.imgs:\n",
        "    img_path = img[0]\n",
        "    detections = detector.detectCustomObjectsFromImage(custom_objects=custom_objects, \n",
        "                                                    input_image=os.path.join(execution_path , img_path), \n",
        "                                                    output_image_path=os.path.join(execution_path, \"output_retina\", img_path), \n",
        "                                                    minimum_percentage_probability=10)\n",
        "    if len(detections) == 0:\n",
        "      csv_writer.writerow([img_path])\n",
        "    for eachObject in detections:\n",
        "      csv_writer.writerow([img_path, eachObject[\"box_points\"], eachObject[\"percentage_probability\"]])"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "Ld4TlvKGg4qX",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 34
        },
        "outputId": "aac2481d-6d72-4e77-cd19-caf4d6497c53"
      },
      "source": [
        "!zip -qr output_retina/train_crop output_retina/bird_dataset/train_images\n",
        "files.download(\"output_retina/train_bounding_boxes.csv\")\n",
        "files.download(\"output_retina/train_crop.zip\")"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "\n",
              "    async function download(id, filename, size) {\n",
              "      if (!google.colab.kernel.accessAllowed) {\n",
              "        return;\n",
              "      }\n",
              "      const div = document.createElement('div');\n",
              "      const label = document.createElement('label');\n",
              "      label.textContent = `Downloading \"${filename}\": `;\n",
              "      div.appendChild(label);\n",
              "      const progress = document.createElement('progress');\n",
              "      progress.max = size;\n",
              "      div.appendChild(progress);\n",
              "      document.body.appendChild(div);\n",
              "\n",
              "      const buffers = [];\n",
              "      let downloaded = 0;\n",
              "\n",
              "      const channel = await google.colab.kernel.comms.open(id);\n",
              "      // Send a message to notify the kernel that we're ready.\n",
              "      channel.send({})\n",
              "\n",
              "      for await (const message of channel.messages) {\n",
              "        // Send a message to notify the kernel that we're ready.\n",
              "        channel.send({})\n",
              "        if (message.buffers) {\n",
              "          for (const buffer of message.buffers) {\n",
              "            buffers.push(buffer);\n",
              "            downloaded += buffer.byteLength;\n",
              "            progress.value = downloaded;\n",
              "          }\n",
              "        }\n",
              "      }\n",
              "      const blob = new Blob(buffers, {type: 'application/binary'});\n",
              "      const a = document.createElement('a');\n",
              "      a.href = window.URL.createObjectURL(blob);\n",
              "      a.download = filename;\n",
              "      div.appendChild(a);\n",
              "      a.click();\n",
              "      div.remove();\n",
              "    }\n",
              "  "
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "download(\"download_6314d46a-d2d9-44af-bd25-81c6084098b5\", \"train_bounding_boxes.csv\", 204764)"
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "\n",
              "    async function download(id, filename, size) {\n",
              "      if (!google.colab.kernel.accessAllowed) {\n",
              "        return;\n",
              "      }\n",
              "      const div = document.createElement('div');\n",
              "      const label = document.createElement('label');\n",
              "      label.textContent = `Downloading \"${filename}\": `;\n",
              "      div.appendChild(label);\n",
              "      const progress = document.createElement('progress');\n",
              "      progress.max = size;\n",
              "      div.appendChild(progress);\n",
              "      document.body.appendChild(div);\n",
              "\n",
              "      const buffers = [];\n",
              "      let downloaded = 0;\n",
              "\n",
              "      const channel = await google.colab.kernel.comms.open(id);\n",
              "      // Send a message to notify the kernel that we're ready.\n",
              "      channel.send({})\n",
              "\n",
              "      for await (const message of channel.messages) {\n",
              "        // Send a message to notify the kernel that we're ready.\n",
              "        channel.send({})\n",
              "        if (message.buffers) {\n",
              "          for (const buffer of message.buffers) {\n",
              "            buffers.push(buffer);\n",
              "            downloaded += buffer.byteLength;\n",
              "            progress.value = downloaded;\n",
              "          }\n",
              "        }\n",
              "      }\n",
              "      const blob = new Blob(buffers, {type: 'application/binary'});\n",
              "      const a = document.createElement('a');\n",
              "      a.href = window.URL.createObjectURL(blob);\n",
              "      a.download = filename;\n",
              "      div.appendChild(a);\n",
              "      a.click();\n",
              "      div.remove();\n",
              "    }\n",
              "  "
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "download(\"download_6cadb841-de4a-4287-b4fc-9c99e43f9384\", \"train_crop.zip\", 36836408)"
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "5SeGa8veUUR9",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 17
        },
        "outputId": "4f4dc305-b2a5-43ba-bb50-900a1ac2e102"
      },
      "source": [
        "!zip -qr output_retina/val_crop output_retina/bird_dataset/val_images\n",
        "files.download(\"output_retina/val_bounding_boxes.csv\")\n",
        "files.download(\"output_retina/val_crop.zip\")"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "\n",
              "    async function download(id, filename, size) {\n",
              "      if (!google.colab.kernel.accessAllowed) {\n",
              "        return;\n",
              "      }\n",
              "      const div = document.createElement('div');\n",
              "      const label = document.createElement('label');\n",
              "      label.textContent = `Downloading \"${filename}\": `;\n",
              "      div.appendChild(label);\n",
              "      const progress = document.createElement('progress');\n",
              "      progress.max = size;\n",
              "      div.appendChild(progress);\n",
              "      document.body.appendChild(div);\n",
              "\n",
              "      const buffers = [];\n",
              "      let downloaded = 0;\n",
              "\n",
              "      const channel = await google.colab.kernel.comms.open(id);\n",
              "      // Send a message to notify the kernel that we're ready.\n",
              "      channel.send({})\n",
              "\n",
              "      for await (const message of channel.messages) {\n",
              "        // Send a message to notify the kernel that we're ready.\n",
              "        channel.send({})\n",
              "        if (message.buffers) {\n",
              "          for (const buffer of message.buffers) {\n",
              "            buffers.push(buffer);\n",
              "            downloaded += buffer.byteLength;\n",
              "            progress.value = downloaded;\n",
              "          }\n",
              "        }\n",
              "      }\n",
              "      const blob = new Blob(buffers, {type: 'application/binary'});\n",
              "      const a = document.createElement('a');\n",
              "      a.href = window.URL.createObjectURL(blob);\n",
              "      a.download = filename;\n",
              "      div.appendChild(a);\n",
              "      a.click();\n",
              "      div.remove();\n",
              "    }\n",
              "  "
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "download(\"download_c88861f4-06d7-4732-b1ad-6057c64cbe1a\", \"val_bounding_boxes.csv\", 20599)"
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "\n",
              "    async function download(id, filename, size) {\n",
              "      if (!google.colab.kernel.accessAllowed) {\n",
              "        return;\n",
              "      }\n",
              "      const div = document.createElement('div');\n",
              "      const label = document.createElement('label');\n",
              "      label.textContent = `Downloading \"${filename}\": `;\n",
              "      div.appendChild(label);\n",
              "      const progress = document.createElement('progress');\n",
              "      progress.max = size;\n",
              "      div.appendChild(progress);\n",
              "      document.body.appendChild(div);\n",
              "\n",
              "      const buffers = [];\n",
              "      let downloaded = 0;\n",
              "\n",
              "      const channel = await google.colab.kernel.comms.open(id);\n",
              "      // Send a message to notify the kernel that we're ready.\n",
              "      channel.send({})\n",
              "\n",
              "      for await (const message of channel.messages) {\n",
              "        // Send a message to notify the kernel that we're ready.\n",
              "        channel.send({})\n",
              "        if (message.buffers) {\n",
              "          for (const buffer of message.buffers) {\n",
              "            buffers.push(buffer);\n",
              "            downloaded += buffer.byteLength;\n",
              "            progress.value = downloaded;\n",
              "          }\n",
              "        }\n",
              "      }\n",
              "      const blob = new Blob(buffers, {type: 'application/binary'});\n",
              "      const a = document.createElement('a');\n",
              "      a.href = window.URL.createObjectURL(blob);\n",
              "      a.download = filename;\n",
              "      div.appendChild(a);\n",
              "      a.click();\n",
              "      div.remove();\n",
              "    }\n",
              "  "
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "download(\"download_0bd5956a-b9ed-4409-98a9-6bac92f6dfce\", \"val_crop.zip\", 3497079)"
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "rWLwfRxBEoPb",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 34
        },
        "outputId": "fdcf67ba-6974-4796-dfc5-2f13bc6422a3"
      },
      "source": [
        "!zip -qr output_retina/test_crop output_retina/bird_dataset/test_images\n",
        "files.download(\"output_retina/test_bounding_boxes.csv\")\n",
        "files.download(\"output_retina/test_crop.zip\")"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "\n",
              "    async function download(id, filename, size) {\n",
              "      if (!google.colab.kernel.accessAllowed) {\n",
              "        return;\n",
              "      }\n",
              "      const div = document.createElement('div');\n",
              "      const label = document.createElement('label');\n",
              "      label.textContent = `Downloading \"${filename}\": `;\n",
              "      div.appendChild(label);\n",
              "      const progress = document.createElement('progress');\n",
              "      progress.max = size;\n",
              "      div.appendChild(progress);\n",
              "      document.body.appendChild(div);\n",
              "\n",
              "      const buffers = [];\n",
              "      let downloaded = 0;\n",
              "\n",
              "      const channel = await google.colab.kernel.comms.open(id);\n",
              "      // Send a message to notify the kernel that we're ready.\n",
              "      channel.send({})\n",
              "\n",
              "      for await (const message of channel.messages) {\n",
              "        // Send a message to notify the kernel that we're ready.\n",
              "        channel.send({})\n",
              "        if (message.buffers) {\n",
              "          for (const buffer of message.buffers) {\n",
              "            buffers.push(buffer);\n",
              "            downloaded += buffer.byteLength;\n",
              "            progress.value = downloaded;\n",
              "          }\n",
              "        }\n",
              "      }\n",
              "      const blob = new Blob(buffers, {type: 'application/binary'});\n",
              "      const a = document.createElement('a');\n",
              "      a.href = window.URL.createObjectURL(blob);\n",
              "      a.download = filename;\n",
              "      div.appendChild(a);\n",
              "      a.click();\n",
              "      div.remove();\n",
              "    }\n",
              "  "
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "download(\"download_268b7880-f3f1-474e-9020-cce1f719d142\", \"test_bounding_boxes.csv\", 103175)"
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "\n",
              "    async function download(id, filename, size) {\n",
              "      if (!google.colab.kernel.accessAllowed) {\n",
              "        return;\n",
              "      }\n",
              "      const div = document.createElement('div');\n",
              "      const label = document.createElement('label');\n",
              "      label.textContent = `Downloading \"${filename}\": `;\n",
              "      div.appendChild(label);\n",
              "      const progress = document.createElement('progress');\n",
              "      progress.max = size;\n",
              "      div.appendChild(progress);\n",
              "      document.body.appendChild(div);\n",
              "\n",
              "      const buffers = [];\n",
              "      let downloaded = 0;\n",
              "\n",
              "      const channel = await google.colab.kernel.comms.open(id);\n",
              "      // Send a message to notify the kernel that we're ready.\n",
              "      channel.send({})\n",
              "\n",
              "      for await (const message of channel.messages) {\n",
              "        // Send a message to notify the kernel that we're ready.\n",
              "        channel.send({})\n",
              "        if (message.buffers) {\n",
              "          for (const buffer of message.buffers) {\n",
              "            buffers.push(buffer);\n",
              "            downloaded += buffer.byteLength;\n",
              "            progress.value = downloaded;\n",
              "          }\n",
              "        }\n",
              "      }\n",
              "      const blob = new Blob(buffers, {type: 'application/binary'});\n",
              "      const a = document.createElement('a');\n",
              "      a.href = window.URL.createObjectURL(blob);\n",
              "      a.download = filename;\n",
              "      div.appendChild(a);\n",
              "      a.click();\n",
              "      div.remove();\n",
              "    }\n",
              "  "
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "download(\"download_a0620680-2089-444a-8d6f-422a6d4d9a06\", \"test_crop.zip\", 18923659)"
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "PWJhkbG6l_N0"
      },
      "source": [
        "From the bounding boxes we create a new cropped dataset."
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "tnn--DX8UjEH",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "985a26fe-00e4-44bd-f3e3-5fc72a6a000c"
      },
      "source": [
        "import re #regex\n",
        "\n",
        "train_bb = pd.read_csv(\"output_retina/train_bounding_boxes.csv\", sep=\";\")\n",
        "val_bb = pd.read_csv(\"output_retina/val_bounding_boxes.csv\", sep=\";\")\n",
        "test_bb = pd.read_csv(\"output_retina/test_bounding_boxes.csv\", sep=\";\")\n",
        "\n",
        "for set_bb in [train_bb, val_bb, test_bb]:\n",
        "  prev_row = '___'\n",
        "  prob_max = 0.0\n",
        "  for row in set_bb.values:\n",
        "    if row[0] != prev_row:\n",
        "      if prev_row != '___':\n",
        "        im = Image.open(prev_row)\n",
        "        if bb_string != '[0,0,0,0]':\n",
        "          bb = [int(s) for s in re.findall(r'\\d+', bb_string)]\n",
        "          im = im.crop(bb)\n",
        "        else:\n",
        "          print('no bounding-box found for '+ prev_row +' ...')\n",
        "        im.save('output_crop/' + prev_row)\n",
        "      prob_max = 0.0\n",
        "      bb_string = '[0,0,0,0]'\n",
        "    if row[2] > prob_max:\n",
        "      prob_max = row[2]\n",
        "      bb_string = row[1]\n",
        "    prev_row = row[0]"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "no bounding-box found for bird_dataset/train_images/020.Yellow_breasted_Chat/Yellow_Breasted_Chat_0073_21932.jpg ...\n",
            "no bounding-box found for bird_dataset/test_images/mistery_category/1b62fffcbf47a4f9e32b400edc662f1f.jpg ...\n",
            "no bounding-box found for bird_dataset/test_images/mistery_category/64f3fa85502e9cff91d6dc88f54be7cb.jpg ...\n",
            "no bounding-box found for bird_dataset/test_images/mistery_category/8ede0bc5a4976385dcfe6e38feaf90c2.jpg ...\n",
            "no bounding-box found for bird_dataset/test_images/mistery_category/a05ed5dd6cbd3097e81e3c76ac690465.jpg ...\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "dYiN-m83s8wF",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 17
        },
        "outputId": "e97b5290-0118-4699-cdcc-784af522d365"
      },
      "source": [
        "!zip -qr output_crop/cropped_dataset output_crop/bird_dataset/\n",
        "files.download(\"output_crop/cropped_dataset.zip\")"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "\n",
              "    async function download(id, filename, size) {\n",
              "      if (!google.colab.kernel.accessAllowed) {\n",
              "        return;\n",
              "      }\n",
              "      const div = document.createElement('div');\n",
              "      const label = document.createElement('label');\n",
              "      label.textContent = `Downloading \"${filename}\": `;\n",
              "      div.appendChild(label);\n",
              "      const progress = document.createElement('progress');\n",
              "      progress.max = size;\n",
              "      div.appendChild(progress);\n",
              "      document.body.appendChild(div);\n",
              "\n",
              "      const buffers = [];\n",
              "      let downloaded = 0;\n",
              "\n",
              "      const channel = await google.colab.kernel.comms.open(id);\n",
              "      // Send a message to notify the kernel that we're ready.\n",
              "      channel.send({})\n",
              "\n",
              "      for await (const message of channel.messages) {\n",
              "        // Send a message to notify the kernel that we're ready.\n",
              "        channel.send({})\n",
              "        if (message.buffers) {\n",
              "          for (const buffer of message.buffers) {\n",
              "            buffers.push(buffer);\n",
              "            downloaded += buffer.byteLength;\n",
              "            progress.value = downloaded;\n",
              "          }\n",
              "        }\n",
              "      }\n",
              "      const blob = new Blob(buffers, {type: 'application/binary'});\n",
              "      const a = document.createElement('a');\n",
              "      a.href = window.URL.createObjectURL(blob);\n",
              "      a.download = filename;\n",
              "      div.appendChild(a);\n",
              "      a.click();\n",
              "      div.remove();\n",
              "    }\n",
              "  "
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "application/javascript": [
              "download(\"download_59ef54d1-c704-4c36-98b5-b5a2906df6f1\", \"cropped_dataset.zip\", 18602163)"
            ],
            "text/plain": [
              "<IPython.core.display.Javascript object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "bVL2MJbltQKE"
      },
      "source": [
        "We can then manually inspect the cropped dataset and see that it behaves very well. We also manually cropped the 4 images of the test set where no boundind boxes were found. The new dataset can be stored and used from now on."
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "IFqbZeJL34k1"
      },
      "source": [
        "# Feature extraction and classification"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "GQchbVoY4AMz"
      },
      "source": [
        "Upload the cropped dataset zip previously downloaded and unzip it. "
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "2V9dJeDhX6hW",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "e21c5233-4844-4c3c-cced-9796795cec78"
      },
      "source": [
        "print('uncompressing...')\n",
        "#q quiet o overwrite\n",
        "!unzip -qo cropped_dataset.zip\n",
        "print('done!')"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "uncompressing...\n",
            "done!\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "xX4Cl7ocAFXE"
      },
      "source": [
        "From the dataset, several methods can be tried out:"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "wJp7lWkRGZ5P"
      },
      "source": [
        "## Simple finetuning of different models\n",
        "https://pytorch.org/tutorials/beginner/finetuning_torchvision_models_tutorial.html\n",
        "(only last layer parameters are optimized)"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "-t5oJXtbeoj0"
      },
      "source": [
        "def set_parameter_requires_grad(model, feature_extracting):\n",
        "    if feature_extracting:\n",
        "        for param in model.parameters():\n",
        "            param.requires_grad = False"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "mLdeJKM5XSHa"
      },
      "source": [
        "model_name = 'resnext101'\n",
        "num_classes = 20\n",
        "batch_size = 64\n",
        "num_epochs = 50\n",
        "feature_extract = True\n",
        "use_pretrained = True\n",
        "\n",
        "def initialize_model(model_name, num_classes, feature_extract, use_pretrained=True):\n",
        "    # Initialize these variables which will be set in this if statement. Each of these\n",
        "    #   variables is model specific.\n",
        "    model_ft = None\n",
        "    input_size = 0\n",
        "\n",
        "    if model_name == \"resnet\":\n",
        "        \"\"\" Resnet18\n",
        "        \"\"\"\n",
        "        model_ft = models.resnet18(pretrained=use_pretrained)\n",
        "        set_parameter_requires_grad(model_ft, feature_extract)\n",
        "        num_ftrs = model_ft.fc.in_features\n",
        "        model_ft.fc = nn.Linear(num_ftrs, num_classes)\n",
        "        input_size = 224\n",
        "\n",
        "    if model_name == \"resnet101\":\n",
        "        \"\"\" Resnet101\n",
        "        \"\"\"\n",
        "        model_ft = models.resnet101(pretrained=use_pretrained)\n",
        "        set_parameter_requires_grad(model_ft, feature_extract)\n",
        "        num_ftrs = model_ft.fc.in_features\n",
        "        model_ft.fc = nn.Linear(num_ftrs, num_classes)\n",
        "        input_size = 224\n",
        "\n",
        "    if model_name == \"resnet152\":\n",
        "        \"\"\" ResNet-152\n",
        "        \"\"\"\n",
        "        model_ft = models.resnet152(pretrained=use_pretrained)\n",
        "        set_parameter_requires_grad(model_ft, feature_extract)\n",
        "        num_ftrs = model_ft.fc.in_features\n",
        "        model_ft.fc = nn.Linear(num_ftrs, num_classes)\n",
        "        input_size = 224\n",
        "\n",
        "    elif model_name == \"resnext101\":\n",
        "        \"\"\" ResNeXt-101\n",
        "        \"\"\"\n",
        "        model_ft = models.resnext101_32x8d(pretrained=use_pretrained)\n",
        "        set_parameter_requires_grad(model_ft, feature_extract)\n",
        "        num_ftrs = model_ft.fc.in_features\n",
        "        model_ft.fc = nn.Linear(num_ftrs,num_classes)\n",
        "        input_size = 299\n",
        "\n",
        "    elif model_name == \"inception\":\n",
        "        \"\"\" Inception v3\n",
        "        Be careful, expects (299,299) sized images and has auxiliary output\n",
        "        \"\"\"\n",
        "        model_ft = models.inception_v3(pretrained=use_pretrained)\n",
        "        set_parameter_requires_grad(model_ft, feature_extract)\n",
        "        # Handle the auxilary net\n",
        "        num_ftrs = model_ft.AuxLogits.fc.in_features\n",
        "        model_ft.AuxLogits.fc = nn.Linear(num_ftrs, num_classes)\n",
        "        # Handle the primary net\n",
        "        num_ftrs = model_ft.fc.in_features\n",
        "        model_ft.fc = nn.Linear(num_ftrs,num_classes)\n",
        "        input_size = 299\n",
        "\n",
        "    else:\n",
        "        print(\"Invalid model name, exiting...\")\n",
        "        exit()\n",
        "\n",
        "    return model_ft, input_size"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "dim7DT0FXgXa"
      },
      "source": [
        "model_ft, input_size = initialize_model(model_name, num_classes, feature_extract, use_pretrained=True)"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "ck9hpXW6Amk7"
      },
      "source": [
        "To finetune a little better the network, the last module (layer 4) is also trained"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "2izd-3cOGQrE"
      },
      "source": [
        "# Does not work for inception\n",
        "def set_parameter_requires_grad(layer):\n",
        "    for param in layer.parameters():\n",
        "        param.requires_grad = True\n",
        "\n",
        "set_parameter_requires_grad(model_ft.layer4)"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "WAyUMB7FCIBe"
      },
      "source": [
        "data_transforms = {\n",
        "    'train_images': transforms.Compose([\n",
        "        transforms.Resize((input_size,input_size)),\n",
        "        transforms.RandomHorizontalFlip(),\n",
        "        transforms.RandomRotation(180),\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
        "    ]),\n",
        "    'val_images': transforms.Compose([\n",
        "        transforms.Resize((input_size,input_size)),\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
        "    ]),\n",
        "}\n",
        "\n",
        "data_dir = 'cropped_dataset'\n",
        "# Create training and validation datasets\n",
        "image_datasets = {x: datasets.ImageFolder(os.path.join(data_dir, x), data_transforms[x]) for x in ['train_images', 'val_images']}\n",
        "# Create training and validation dataloaders\n",
        "dataloaders_dict = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=batch_size, shuffle=True, num_workers=4) for x in ['train_images', 'val_images']}\n",
        "\n",
        "# Detect if we have a GPU available\n",
        "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "5I6X9V8tgrBY",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "d2089e6f-3271-4e8a-dcc9-ab98acf55aa5"
      },
      "source": [
        "# Send the model to GPU\n",
        "model_ft = model_ft.to(device)\n",
        "\n",
        "# Gather the parameters to be optimized/updated in this run. If we are\n",
        "#  finetuning we will be updating all parameters. However, if we are\n",
        "#  doing feature extract method, we will only update the parameters\n",
        "#  that we have just initialized, i.e. the parameters with requires_grad\n",
        "#  is True.\n",
        "params_to_update = model_ft.parameters()\n",
        "print(\"Params to learn:\")\n",
        "if feature_extract:\n",
        "    params_to_update = []\n",
        "    for name,param in model_ft.named_parameters():\n",
        "        if param.requires_grad == True:\n",
        "            params_to_update.append(param)\n",
        "            print(\"\\t\",name)\n",
        "else:\n",
        "    for name,param in model_ft.named_parameters():\n",
        "        if param.requires_grad == True:\n",
        "            print(\"\\t\",name)"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Params to learn:\n",
            "\t layer4.0.conv1.weight\n",
            "\t layer4.0.bn1.weight\n",
            "\t layer4.0.bn1.bias\n",
            "\t layer4.0.conv2.weight\n",
            "\t layer4.0.bn2.weight\n",
            "\t layer4.0.bn2.bias\n",
            "\t layer4.0.conv3.weight\n",
            "\t layer4.0.bn3.weight\n",
            "\t layer4.0.bn3.bias\n",
            "\t layer4.0.downsample.0.weight\n",
            "\t layer4.0.downsample.1.weight\n",
            "\t layer4.0.downsample.1.bias\n",
            "\t layer4.1.conv1.weight\n",
            "\t layer4.1.bn1.weight\n",
            "\t layer4.1.bn1.bias\n",
            "\t layer4.1.conv2.weight\n",
            "\t layer4.1.bn2.weight\n",
            "\t layer4.1.bn2.bias\n",
            "\t layer4.1.conv3.weight\n",
            "\t layer4.1.bn3.weight\n",
            "\t layer4.1.bn3.bias\n",
            "\t layer4.2.conv1.weight\n",
            "\t layer4.2.bn1.weight\n",
            "\t layer4.2.bn1.bias\n",
            "\t layer4.2.conv2.weight\n",
            "\t layer4.2.bn2.weight\n",
            "\t layer4.2.bn2.bias\n",
            "\t layer4.2.conv3.weight\n",
            "\t layer4.2.bn3.weight\n",
            "\t layer4.2.bn3.bias\n",
            "\t fc.weight\n",
            "\t fc.bias\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "emrh__DtKJvE"
      },
      "source": [
        "# Observe that all parameters are being optimized\n",
        "# optimizer_ft = optim.Adam(params_to_update, lr = 0.001)\n",
        "optimizer_ft = optim.SGD(params_to_update, lr = 0.01 , momentum=0.9, weight_decay=3.0e-4)\n",
        "# Setup the loss fxn\n",
        "criterion = nn.CrossEntropyLoss()\n",
        "# # Cosine annealing\n",
        "# lr_scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer_ft, num_epochs)"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "GA4G8g2dIMg0"
      },
      "source": [
        "def train_model(model, dataloaders, criterion, optimizer, num_epochs=25, is_inception=False):\n",
        "    val_acc_history = []\n",
        "\n",
        "    best_model_wts = copy.deepcopy(model.state_dict())\n",
        "    best_acc = 0.0\n",
        "\n",
        "    for epoch in range(num_epochs):\n",
        "        print('Epoch {}/{}'.format(epoch, num_epochs - 1))\n",
        "        print('-' * 10)\n",
        "\n",
        "        for phase in ['train_images', 'val_images']:\n",
        "            if phase == 'train_images':\n",
        "                model.train()  # Set model to training mode\n",
        "            else:\n",
        "                model.eval()   # Set model to evaluate mode\n",
        "            running_loss = 0.0\n",
        "            running_corrects = 0\n",
        "            # Iterate over data.\n",
        "            for inputs, labels in dataloaders[phase]:\n",
        "                inputs = inputs.to(device)\n",
        "                labels = labels.to(device)\n",
        "                # zero the parameter gradients\n",
        "                optimizer.zero_grad()\n",
        "                # forward\n",
        "                # track history if only in train\n",
        "                with torch.set_grad_enabled(phase == 'train_images'):\n",
        "                    # Get model outputs and calculate loss\n",
        "                    if is_inception and phase == 'train_images':\n",
        "                        # From https://discuss.pytorch.org/t/how-to-optimize-inception-model-with-auxiliary-classifiers/7958\n",
        "                        outputs, aux_outputs = model(inputs)\n",
        "                        loss1 = criterion(outputs, labels)\n",
        "                        loss2 = criterion(aux_outputs, labels)\n",
        "                        loss = loss1 + 0.4*loss2\n",
        "                    else:\n",
        "                        outputs = model(inputs)\n",
        "                        loss = criterion(outputs, labels)\n",
        "                    _, preds = torch.max(outputs, 1)\n",
        "                    # backward + optimize only if in training phase\n",
        "                    if phase == 'train_images':\n",
        "                        loss.backward()\n",
        "                        optimizer.step()\n",
        "                # statistics\n",
        "                running_loss += loss.item() * inputs.size(0)\n",
        "                running_corrects += torch.sum(preds == labels.data)\n",
        "\n",
        "            epoch_loss = running_loss / len(dataloaders[phase].dataset)\n",
        "            epoch_acc = running_corrects.double() / len(dataloaders[phase].dataset)\n",
        "            print('{} Loss: {:.4f} Acc: {:.4f}'.format(phase, epoch_loss, epoch_acc))\n",
        "\n",
        "            # deep copy the model\n",
        "            if phase == 'val_images' and epoch_acc > best_acc:\n",
        "                best_acc = epoch_acc\n",
        "                best_model_wts = copy.deepcopy(model.state_dict())\n",
        "            if phase == 'val_images':\n",
        "                val_acc_history.append(epoch_acc)\n",
        "\n",
        "        print()\n",
        "\n",
        "    print('Best val Acc: {:4f}'.format(best_acc))\n",
        "\n",
        "    # load best model weights\n",
        "    model.load_state_dict(best_model_wts)\n",
        "    return model, val_acc_history"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "IiKR4F-YIogX",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1000
        },
        "outputId": "34aff03b-65c9-4bbc-a9a3-3b15293f49dd"
      },
      "source": [
        "# Train and evaluate\n",
        "model_ft, hist = train_model(model_ft, dataloaders_dict, criterion, optimizer_ft, num_epochs=num_epochs, is_inception=(model_name==\"inception\"))"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Epoch 0/49\n",
            "----------\n",
            "train_images Loss: 2.8804 Acc: 0.1340\n",
            "val_images Loss: 2.5311 Acc: 0.3398\n",
            "\n",
            "Epoch 1/49\n",
            "----------\n",
            "train_images Loss: 1.9332 Acc: 0.5416\n",
            "val_images Loss: 1.2251 Acc: 0.6796\n",
            "\n",
            "Epoch 2/49\n",
            "----------\n",
            "train_images Loss: 0.9923 Acc: 0.7338\n",
            "val_images Loss: 0.6763 Acc: 0.7476\n",
            "\n",
            "Epoch 3/49\n",
            "----------\n",
            "train_images Loss: 0.6329 Acc: 0.7967\n",
            "val_images Loss: 0.5519 Acc: 0.8544\n",
            "\n",
            "Epoch 4/49\n",
            "----------\n",
            "train_images Loss: 0.4497 Acc: 0.8457\n",
            "val_images Loss: 0.4836 Acc: 0.8252\n",
            "\n",
            "Epoch 5/49\n",
            "----------\n",
            "train_images Loss: 0.3337 Acc: 0.8909\n",
            "val_images Loss: 0.4323 Acc: 0.8350\n",
            "\n",
            "Epoch 6/49\n",
            "----------\n",
            "train_images Loss: 0.2632 Acc: 0.9131\n",
            "val_images Loss: 0.4347 Acc: 0.8835\n",
            "\n",
            "Epoch 7/49\n",
            "----------\n",
            "train_images Loss: 0.2041 Acc: 0.9372\n",
            "val_images Loss: 0.4713 Acc: 0.8350\n",
            "\n",
            "Epoch 8/49\n",
            "----------\n",
            "train_images Loss: 0.1751 Acc: 0.9529\n",
            "val_images Loss: 0.4074 Acc: 0.8447\n",
            "\n",
            "Epoch 9/49\n",
            "----------\n",
            "train_images Loss: 0.1451 Acc: 0.9584\n",
            "val_images Loss: 0.4297 Acc: 0.8544\n",
            "\n",
            "Epoch 10/49\n",
            "----------\n",
            "train_images Loss: 0.1042 Acc: 0.9723\n",
            "val_images Loss: 0.4069 Acc: 0.8350\n",
            "\n",
            "Epoch 11/49\n",
            "----------\n",
            "train_images Loss: 0.0864 Acc: 0.9815\n",
            "val_images Loss: 0.4084 Acc: 0.8544\n",
            "\n",
            "Epoch 12/49\n",
            "----------\n",
            "train_images Loss: 0.0714 Acc: 0.9852\n",
            "val_images Loss: 0.5412 Acc: 0.8350\n",
            "\n",
            "Epoch 13/49\n",
            "----------\n",
            "train_images Loss: 0.0595 Acc: 0.9908\n",
            "val_images Loss: 0.3865 Acc: 0.8738\n",
            "\n",
            "Epoch 14/49\n",
            "----------\n",
            "train_images Loss: 0.0472 Acc: 0.9917\n",
            "val_images Loss: 0.3994 Acc: 0.8738\n",
            "\n",
            "Epoch 15/49\n",
            "----------\n",
            "train_images Loss: 0.0403 Acc: 0.9935\n",
            "val_images Loss: 0.4175 Acc: 0.8447\n",
            "\n",
            "Epoch 16/49\n",
            "----------\n",
            "train_images Loss: 0.0345 Acc: 0.9945\n",
            "val_images Loss: 0.4139 Acc: 0.8544\n",
            "\n",
            "Epoch 17/49\n",
            "----------\n",
            "train_images Loss: 0.0354 Acc: 0.9880\n",
            "val_images Loss: 0.4263 Acc: 0.8932\n",
            "\n",
            "Epoch 18/49\n",
            "----------\n",
            "train_images Loss: 0.0271 Acc: 0.9945\n",
            "val_images Loss: 0.3973 Acc: 0.8835\n",
            "\n",
            "Epoch 19/49\n",
            "----------\n",
            "train_images Loss: 0.0264 Acc: 0.9963\n",
            "val_images Loss: 0.3964 Acc: 0.8835\n",
            "\n",
            "Epoch 20/49\n",
            "----------\n",
            "train_images Loss: 0.0231 Acc: 0.9991\n",
            "val_images Loss: 0.3829 Acc: 0.8835\n",
            "\n",
            "Epoch 21/49\n",
            "----------\n",
            "train_images Loss: 0.0240 Acc: 0.9963\n",
            "val_images Loss: 0.4126 Acc: 0.8738\n",
            "\n",
            "Epoch 22/49\n",
            "----------\n",
            "train_images Loss: 0.0180 Acc: 0.9982\n",
            "val_images Loss: 0.4145 Acc: 0.8738\n",
            "\n",
            "Epoch 23/49\n",
            "----------\n",
            "train_images Loss: 0.0257 Acc: 0.9954\n",
            "val_images Loss: 0.3796 Acc: 0.8738\n",
            "\n",
            "Epoch 24/49\n",
            "----------\n",
            "train_images Loss: 0.0178 Acc: 0.9972\n",
            "val_images Loss: 0.4228 Acc: 0.8641\n",
            "\n",
            "Epoch 25/49\n",
            "----------\n",
            "train_images Loss: 0.0163 Acc: 0.9972\n",
            "val_images Loss: 0.4256 Acc: 0.8641\n",
            "\n",
            "Epoch 26/49\n",
            "----------\n",
            "train_images Loss: 0.0125 Acc: 0.9982\n",
            "val_images Loss: 0.4175 Acc: 0.8641\n",
            "\n",
            "Epoch 27/49\n",
            "----------\n",
            "train_images Loss: 0.0215 Acc: 0.9935\n",
            "val_images Loss: 0.5210 Acc: 0.8835\n",
            "\n",
            "Epoch 28/49\n",
            "----------\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "error",
          "ename": "KeyboardInterrupt",
          "evalue": "ignored",
          "traceback": [
            "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
            "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
            "\u001b[0;32m<ipython-input-193-7da8a309b22f>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0;31m# Train and evaluate\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mmodel_ft\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhist\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_ft\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdataloaders_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcriterion\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moptimizer_ft\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_epochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnum_epochs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mis_inception\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel_name\u001b[0m\u001b[0;34m==\u001b[0m\u001b[0;34m\"inception\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
            "\u001b[0;32m<ipython-input-192-3d20d3481211>\u001b[0m in \u001b[0;36mtrain_model\u001b[0;34m(model, dataloaders, criterion, optimizer, num_epochs, is_inception)\u001b[0m\n\u001b[1;32m     17\u001b[0m             \u001b[0mrunning_corrects\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     18\u001b[0m             \u001b[0;31m# Iterate over data.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 19\u001b[0;31m             \u001b[0;32mfor\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mdataloaders\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mphase\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     20\u001b[0m                 \u001b[0minputs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     21\u001b[0m                 \u001b[0mlabels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlabels\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    433\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_sampler_iter\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    434\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_reset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 435\u001b[0;31m         \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_next_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    436\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_num_yielded\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    437\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dataset_kind\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0m_DatasetKind\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mIterable\u001b[0m \u001b[0;32mand\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m\\\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m_next_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1066\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1067\u001b[0m             \u001b[0;32massert\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_shutdown\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_tasks_outstanding\u001b[0m \u001b[0;34m>\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1068\u001b[0;31m             \u001b[0midx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1069\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_tasks_outstanding\u001b[0m \u001b[0;34m-=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1070\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_dataset_kind\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0m_DatasetKind\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mIterable\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m_get_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1032\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1033\u001b[0m             \u001b[0;32mwhile\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1034\u001b[0;31m                 \u001b[0msuccess\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_try_get_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1035\u001b[0m                 \u001b[0;32mif\u001b[0m \u001b[0msuccess\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1036\u001b[0m                     \u001b[0;32mreturn\u001b[0m \u001b[0mdata\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/torch/utils/data/dataloader.py\u001b[0m in \u001b[0;36m_try_get_data\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    870\u001b[0m         \u001b[0;31m#   (bool: whether successfully get data, any: data if successful else None)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    871\u001b[0m         \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 872\u001b[0;31m             \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_data_queue\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    873\u001b[0m             \u001b[0;32mreturn\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdata\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    874\u001b[0m         \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/lib/python3.6/multiprocessing/queues.py\u001b[0m in \u001b[0;36mget\u001b[0;34m(self, block, timeout)\u001b[0m\n\u001b[1;32m    102\u001b[0m                 \u001b[0;32mif\u001b[0m \u001b[0mblock\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    103\u001b[0m                     \u001b[0mtimeout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdeadline\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mtime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmonotonic\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 104\u001b[0;31m                     \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_poll\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    105\u001b[0m                         \u001b[0;32mraise\u001b[0m \u001b[0mEmpty\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    106\u001b[0m                 \u001b[0;32melif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_poll\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/lib/python3.6/multiprocessing/connection.py\u001b[0m in \u001b[0;36mpoll\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    255\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_check_closed\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    256\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_check_readable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 257\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_poll\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    258\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    259\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m__enter__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/lib/python3.6/multiprocessing/connection.py\u001b[0m in \u001b[0;36m_poll\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    412\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    413\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m_poll\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 414\u001b[0;31m         \u001b[0mr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mwait\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    415\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mbool\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    416\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/lib/python3.6/multiprocessing/connection.py\u001b[0m in \u001b[0;36mwait\u001b[0;34m(object_list, timeout)\u001b[0m\n\u001b[1;32m    909\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    910\u001b[0m             \u001b[0;32mwhile\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 911\u001b[0;31m                 \u001b[0mready\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mselector\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mselect\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    912\u001b[0m                 \u001b[0;32mif\u001b[0m \u001b[0mready\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    913\u001b[0m                     \u001b[0;32mreturn\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfileobj\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mevents\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mready\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/lib/python3.6/selectors.py\u001b[0m in \u001b[0;36mselect\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    374\u001b[0m             \u001b[0mready\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    375\u001b[0m             \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 376\u001b[0;31m                 \u001b[0mfd_event_list\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_poll\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpoll\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    377\u001b[0m             \u001b[0;32mexcept\u001b[0m \u001b[0mInterruptedError\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    378\u001b[0m                 \u001b[0;32mreturn\u001b[0m \u001b[0mready\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
          ]
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "VppaUICLG1KB"
      },
      "source": [
        "## More sophisticated finetuning \n",
        "(defining a new classifier model at the end of the previous network)"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "2hxDm_O3Iuqw"
      },
      "source": [
        "def set_parameter_requires_grad(model, feature_extracting):\n",
        "    if feature_extracting:\n",
        "        for param in model.parameters():\n",
        "            param.requires_grad = False"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "y7s8G5_fIuqx"
      },
      "source": [
        "model_name = \"resnet152\"\n",
        "num_classes = 20\n",
        "batch_size = 64\n",
        "num_epochs = 30\n",
        "feature_extract = True\n",
        "use_pretrained = True\n",
        "\n",
        "def initialize_model(model_name, num_classes, feature_extract, use_pretrained=True):\n",
        "    # Initialize these variables which will be set in this if statement. Each of these\n",
        "    #   variables is model specific.\n",
        "    model_ft = None\n",
        "    input_size = 0\n",
        "\n",
        "    if model_name == \"resnet101\":\n",
        "        \"\"\" Resnet101\n",
        "        \"\"\"\n",
        "        model_ft = models.resnet101(pretrained=use_pretrained)\n",
        "        set_parameter_requires_grad(model_ft, feature_extract)\n",
        "        num_ftrs = model_ft.fc.in_features\n",
        "        model_ft.fc = nn.Sequential(\n",
        "            nn.BatchNorm1d(num_ftrs),\n",
        "            nn.Dropout(p=0.25),\n",
        "            nn.Linear(in_features=2048, out_features=4096),\n",
        "            nn.ReLU(),\n",
        "            nn.BatchNorm1d(4096, eps=1e-05, momentum=0.1),\n",
        "            nn.Dropout(p=0.5),\n",
        "            nn.Linear(in_features=4096, out_features=num_classes),\n",
        "        )\n",
        "        input_size = 224\n",
        "\n",
        "    elif model_name == \"resnet152\":\n",
        "        \"\"\" ResNet-152\n",
        "        \"\"\"\n",
        "        model_ft = models.resnet152(pretrained=use_pretrained)\n",
        "        set_parameter_requires_grad(model_ft, feature_extract)\n",
        "        num_ftrs = model_ft.fc.in_features\n",
        "        model_ft.fc = nn.Sequential(\n",
        "            nn.BatchNorm1d(num_ftrs),\n",
        "            nn.Dropout(p=0.25),\n",
        "            nn.Linear(in_features=2048, out_features=2048),\n",
        "            nn.ReLU(),\n",
        "            nn.BatchNorm1d(2048, eps=1e-05, momentum=0.1),\n",
        "            nn.Dropout(p=0.5),\n",
        "            nn.Linear(in_features=2048, out_features=num_classes),\n",
        "        )\n",
        "        input_size = 224\n",
        "\n",
        "    elif model_name == \"resnext101\":\n",
        "        \"\"\" ResNeXt-101\n",
        "        \"\"\"\n",
        "        model_ft = models.resnext101_32x8d(pretrained=use_pretrained)\n",
        "        set_parameter_requires_grad(model_ft, feature_extract)\n",
        "        num_ftrs = model_ft.fc.in_features\n",
        "        model_ft.fc = nn.Sequential(\n",
        "            nn.BatchNorm1d(num_ftrs),\n",
        "            nn.Dropout(p=0.25),\n",
        "            nn.Linear(in_features=2048, out_features=4096),\n",
        "            nn.ReLU(),\n",
        "            nn.BatchNorm1d(4096, eps=1e-05, momentum=0.1),\n",
        "            nn.Dropout(p=0.5),\n",
        "            nn.Linear(in_features=4096, out_features=num_classes),\n",
        "        )\n",
        "        input_size = 299\n",
        "        \n",
        "    else:\n",
        "        print(\"Invalid model name, exiting...\")\n",
        "        exit()\n",
        "\n",
        "    return model_ft, input_size"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "0JG9xLYrIuqy"
      },
      "source": [
        "model_ft, input_size = initialize_model(model_name, num_classes, feature_extract, use_pretrained=True)"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "QhpIyErBIuqy"
      },
      "source": [
        "data_transforms = {\n",
        "    'train_images': transforms.Compose([\n",
        "        transforms.RandomRotation(90),\n",
        "        transforms.RandomHorizontalFlip(),\n",
        "        transforms.Resize(input_size),\n",
        "        transforms.CenterCrop(input_size),\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
        "    ]),\n",
        "    'val_images': transforms.Compose([\n",
        "        transforms.Resize(input_size),\n",
        "        transforms.CenterCrop(input_size),\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
        "    ]),\n",
        "}\n",
        "\n",
        "\n",
        "# data_dir = 'cropped_dataset'\n",
        "data_dir = 'cropped_dataset'\n",
        "# Create training and validation datasets\n",
        "image_datasets = {x: datasets.ImageFolder(os.path.join(data_dir, x), data_transforms[x]) for x in ['train_images', 'val_images']}\n",
        "# Create training and validation dataloaders\n",
        "dataloaders_dict = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=batch_size, shuffle=True, num_workers=4) for x in ['train_images', 'val_images']}\n",
        "\n",
        "# Detect if we have a GPU available\n",
        "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "39EOO-5-k_qA"
      },
      "source": [
        "def set_parameter_requires_grad(layer):\n",
        "    for param in layer.parameters():\n",
        "        param.requires_grad = True\n",
        "\n",
        "set_parameter_requires_grad(model_ft.layer4)"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "Cg8fq3c-Iuqy",
        "outputId": "04156fb2-e833-4cf9-f59e-1b628920179a"
      },
      "source": [
        "# Send the model to GPU\n",
        "model_ft = model_ft.to(device)\n",
        "\n",
        "# Gather the parameters to be optimized/updated in this run. If we are\n",
        "#  finetuning we will be updating all parameters. However, if we are\n",
        "#  doing feature extract method, we will only update the parameters\n",
        "#  that we have just initialized, i.e. the parameters with requires_grad\n",
        "#  is True.\n",
        "params_to_update = model_ft.parameters()\n",
        "print(\"Params to learn:\")\n",
        "if feature_extract:\n",
        "    params_to_update = []\n",
        "    for name,param in model_ft.named_parameters():\n",
        "        if param.requires_grad == True:\n",
        "            params_to_update.append(param)\n",
        "            print(\"\\t\",name)\n",
        "else:\n",
        "    for name,param in model_ft.named_parameters():\n",
        "        if param.requires_grad == True:\n",
        "            print(\"\\t\",name)"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Params to learn:\n",
            "\t layer4.0.conv1.weight\n",
            "\t layer4.0.bn1.weight\n",
            "\t layer4.0.bn1.bias\n",
            "\t layer4.0.conv2.weight\n",
            "\t layer4.0.bn2.weight\n",
            "\t layer4.0.bn2.bias\n",
            "\t layer4.0.conv3.weight\n",
            "\t layer4.0.bn3.weight\n",
            "\t layer4.0.bn3.bias\n",
            "\t layer4.0.downsample.0.weight\n",
            "\t layer4.0.downsample.1.weight\n",
            "\t layer4.0.downsample.1.bias\n",
            "\t layer4.1.conv1.weight\n",
            "\t layer4.1.bn1.weight\n",
            "\t layer4.1.bn1.bias\n",
            "\t layer4.1.conv2.weight\n",
            "\t layer4.1.bn2.weight\n",
            "\t layer4.1.bn2.bias\n",
            "\t layer4.1.conv3.weight\n",
            "\t layer4.1.bn3.weight\n",
            "\t layer4.1.bn3.bias\n",
            "\t layer4.2.conv1.weight\n",
            "\t layer4.2.bn1.weight\n",
            "\t layer4.2.bn1.bias\n",
            "\t layer4.2.conv2.weight\n",
            "\t layer4.2.bn2.weight\n",
            "\t layer4.2.bn2.bias\n",
            "\t layer4.2.conv3.weight\n",
            "\t layer4.2.bn3.weight\n",
            "\t layer4.2.bn3.bias\n",
            "\t fc.0.weight\n",
            "\t fc.0.bias\n",
            "\t fc.2.weight\n",
            "\t fc.2.bias\n",
            "\t fc.4.weight\n",
            "\t fc.4.bias\n",
            "\t fc.6.weight\n",
            "\t fc.6.bias\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "8xWlBPaQIuq4"
      },
      "source": [
        "# optimizer_ft = optim.Adam(params_to_update)\n",
        "optimizer_ft = optim.SGD(params_to_update, lr = 0.001 , momentum=0.9, weight_decay=3.0e-4)\n",
        "# Setup the loss fxn\n",
        "criterion = nn.CrossEntropyLoss()\n",
        "# Scheduler\n",
        "scheduler = lr_scheduler.CosineAnnealingLR(optimizer_ft, num_epochs)\n",
        "# lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=10, gamma=0.1)"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "xxbdx7ROIuq5"
      },
      "source": [
        "def train_model(model, dataloaders, criterion, optimizer, scheduler=None, num_epochs=25, is_inception=False):\n",
        "    val_acc_history = []\n",
        "\n",
        "    best_model_wts = copy.deepcopy(model.state_dict())\n",
        "    best_acc = 0.0\n",
        "\n",
        "    for epoch in range(num_epochs):\n",
        "        # scheduler.step()\n",
        "        print('Epoch {}/{}'.format(epoch, num_epochs - 1))\n",
        "        print('-' * 10)\n",
        "\n",
        "        for phase in ['train_images', 'val_images']:\n",
        "            if phase == 'train_images':\n",
        "                model.train()  # Set model to training mode\n",
        "            else:\n",
        "                model.eval()   # Set model to evaluate mode\n",
        "            running_loss = 0.0\n",
        "            running_corrects = 0\n",
        "            # Iterate over data.\n",
        "            for inputs, labels in dataloaders[phase]:\n",
        "                inputs = inputs.to(device)\n",
        "                labels = labels.to(device)\n",
        "                # zero the parameter gradients\n",
        "                optimizer.zero_grad()\n",
        "                # forward\n",
        "                # track history if only in train\n",
        "                with torch.set_grad_enabled(phase == 'train_images'):\n",
        "                    # Get model outputs and calculate loss\n",
        "                    if is_inception and phase == 'train_images':\n",
        "                        # From https://discuss.pytorch.org/t/how-to-optimize-inception-model-with-auxiliary-classifiers/7958\n",
        "                        outputs, aux_outputs = model(inputs)\n",
        "                        loss1 = criterion(outputs, labels)\n",
        "                        loss2 = criterion(aux_outputs, labels)\n",
        "                        loss = loss1 + 0.4*loss2\n",
        "                    else:\n",
        "                        outputs = model(inputs)\n",
        "                        loss = criterion(outputs, labels)\n",
        "                    _, preds = torch.max(outputs, 1)\n",
        "                    # backward + optimize only if in training phase\n",
        "                    if phase == 'train_images':\n",
        "                        loss.backward()\n",
        "                        optimizer.step()\n",
        "                # statistics\n",
        "                running_loss += loss.item() * inputs.size(0)\n",
        "                running_corrects += torch.sum(preds == labels.data)\n",
        "\n",
        "            epoch_loss = running_loss / len(dataloaders[phase].dataset)\n",
        "            epoch_acc = running_corrects.double() / len(dataloaders[phase].dataset)\n",
        "            print('{} Loss: {:.4f} Acc: {:.4f}'.format(phase, epoch_loss, epoch_acc))\n",
        "\n",
        "            # deep copy the model\n",
        "            if phase == 'val_images' and epoch_acc > best_acc:\n",
        "                best_acc = epoch_acc\n",
        "                best_model_wts = copy.deepcopy(model.state_dict())\n",
        "            if phase == 'val_images':\n",
        "                val_acc_history.append(epoch_acc)\n",
        "\n",
        "        print()\n",
        "\n",
        "    print('Best val Acc: {:4f}'.format(best_acc))\n",
        "\n",
        "    # load best model weights\n",
        "    model.load_state_dict(best_model_wts)\n",
        "    return model, val_acc_history"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "QpT_MnuiIuq6",
        "outputId": "439e5e7d-7460-4187-93a4-16dea20526f3"
      },
      "source": [
        "# Train and evaluate\n",
        "model_ft, hist = train_model(model_ft, dataloaders_dict, criterion, optimizer_ft, scheduler, num_epochs=num_epochs, is_inception=(model_name==\"inception\"))"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Epoch 0/29\n",
            "----------\n",
            "train_images Loss: 2.9212 Acc: 0.1349\n",
            "val_images Loss: 2.3965 Acc: 0.5922\n",
            "\n",
            "Epoch 1/29\n",
            "----------\n",
            "train_images Loss: 1.6613 Acc: 0.5397\n",
            "val_images Loss: 1.2476 Acc: 0.7087\n",
            "\n",
            "Epoch 2/29\n",
            "----------\n",
            "train_images Loss: 1.1066 Acc: 0.6682\n",
            "val_images Loss: 0.8609 Acc: 0.7379\n",
            "\n",
            "Epoch 3/29\n",
            "----------\n",
            "train_images Loss: 0.8247 Acc: 0.7588\n",
            "val_images Loss: 0.7568 Acc: 0.8058\n",
            "\n",
            "Epoch 4/29\n",
            "----------\n",
            "train_images Loss: 0.7258 Acc: 0.7837\n",
            "val_images Loss: 0.7142 Acc: 0.8058\n",
            "\n",
            "Epoch 5/29\n",
            "----------\n",
            "train_images Loss: 0.6212 Acc: 0.8133\n",
            "val_images Loss: 0.6787 Acc: 0.7961\n",
            "\n",
            "Epoch 6/29\n",
            "----------\n",
            "train_images Loss: 0.5359 Acc: 0.8383\n",
            "val_images Loss: 0.6376 Acc: 0.8447\n",
            "\n",
            "Epoch 7/29\n",
            "----------\n",
            "train_images Loss: 0.4952 Acc: 0.8429\n",
            "val_images Loss: 0.6191 Acc: 0.8155\n",
            "\n",
            "Epoch 8/29\n",
            "----------\n",
            "train_images Loss: 0.4419 Acc: 0.8577\n",
            "val_images Loss: 0.5967 Acc: 0.8350\n",
            "\n",
            "Epoch 9/29\n",
            "----------\n",
            "train_images Loss: 0.4137 Acc: 0.8641\n",
            "val_images Loss: 0.6017 Acc: 0.8058\n",
            "\n",
            "Epoch 10/29\n",
            "----------\n",
            "train_images Loss: 0.3659 Acc: 0.8983\n",
            "val_images Loss: 0.5693 Acc: 0.8350\n",
            "\n",
            "Epoch 11/29\n",
            "----------\n",
            "train_images Loss: 0.3421 Acc: 0.8993\n",
            "val_images Loss: 0.5592 Acc: 0.8544\n",
            "\n",
            "Epoch 12/29\n",
            "----------\n",
            "train_images Loss: 0.2943 Acc: 0.9002\n",
            "val_images Loss: 0.5607 Acc: 0.8447\n",
            "\n",
            "Epoch 13/29\n",
            "----------\n",
            "train_images Loss: 0.3136 Acc: 0.8993\n",
            "val_images Loss: 0.5338 Acc: 0.8641\n",
            "\n",
            "Epoch 14/29\n",
            "----------\n",
            "train_images Loss: 0.2890 Acc: 0.9057\n",
            "val_images Loss: 0.5695 Acc: 0.8641\n",
            "\n",
            "Epoch 15/29\n",
            "----------\n",
            "train_images Loss: 0.2575 Acc: 0.9131\n",
            "val_images Loss: 0.5508 Acc: 0.8641\n",
            "\n",
            "Epoch 16/29\n",
            "----------\n",
            "train_images Loss: 0.2322 Acc: 0.9279\n",
            "val_images Loss: 0.5567 Acc: 0.8350\n",
            "\n",
            "Epoch 17/29\n",
            "----------\n",
            "train_images Loss: 0.2311 Acc: 0.9316\n",
            "val_images Loss: 0.5399 Acc: 0.8350\n",
            "\n",
            "Epoch 18/29\n",
            "----------\n",
            "train_images Loss: 0.2204 Acc: 0.9372\n",
            "val_images Loss: 0.5491 Acc: 0.8350\n",
            "\n",
            "Epoch 19/29\n",
            "----------\n",
            "train_images Loss: 0.1915 Acc: 0.9436\n",
            "val_images Loss: 0.5209 Acc: 0.8544\n",
            "\n",
            "Epoch 20/29\n",
            "----------\n",
            "train_images Loss: 0.1748 Acc: 0.9473\n",
            "val_images Loss: 0.5375 Acc: 0.8447\n",
            "\n",
            "Epoch 21/29\n",
            "----------\n",
            "train_images Loss: 0.1527 Acc: 0.9566\n",
            "val_images Loss: 0.5281 Acc: 0.8544\n",
            "\n",
            "Epoch 22/29\n",
            "----------\n",
            "train_images Loss: 0.1670 Acc: 0.9482\n",
            "val_images Loss: 0.5161 Acc: 0.8544\n",
            "\n",
            "Epoch 23/29\n",
            "----------\n",
            "train_images Loss: 0.1390 Acc: 0.9575\n",
            "val_images Loss: 0.5259 Acc: 0.8544\n",
            "\n",
            "Epoch 24/29\n",
            "----------\n",
            "train_images Loss: 0.1418 Acc: 0.9658\n",
            "val_images Loss: 0.5305 Acc: 0.8544\n",
            "\n",
            "Epoch 25/29\n",
            "----------\n",
            "train_images Loss: 0.1546 Acc: 0.9575\n",
            "val_images Loss: 0.5264 Acc: 0.8641\n",
            "\n",
            "Epoch 26/29\n",
            "----------\n",
            "train_images Loss: 0.1320 Acc: 0.9630\n",
            "val_images Loss: 0.5318 Acc: 0.8544\n",
            "\n",
            "Epoch 27/29\n",
            "----------\n",
            "train_images Loss: 0.1245 Acc: 0.9649\n",
            "val_images Loss: 0.5311 Acc: 0.8641\n",
            "\n",
            "Epoch 28/29\n",
            "----------\n",
            "train_images Loss: 0.1183 Acc: 0.9713\n",
            "val_images Loss: 0.5478 Acc: 0.8544\n",
            "\n",
            "Epoch 29/29\n",
            "----------\n",
            "train_images Loss: 0.1046 Acc: 0.9750\n",
            "val_images Loss: 0.5545 Acc: 0.8641\n",
            "\n",
            "Best val Acc: 0.864078\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "zalBY6f3sQ-5"
      },
      "source": [
        "# to save the model that we want\n",
        "torch.save(model_ft, 'best_model')"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "_T1GnOiCHahg"
      },
      "source": [
        "## Extract features, then create new clasifier models\n",
        "Large Scale Fine-Grained Categorization and Domain-Specific Transfer Learning, 2018 paper\n",
        "\n",
        "Code from https://github.com/richardaecn/cvpr18-inaturalist-transfer (several modifications have been used to make it compatible with colab)"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "kqASedaz9fnZ"
      },
      "source": [
        "First, we have to download a model from https://github.com/richardaecn/cvpr18-inaturalist-transfer, there is only one that has not been trained on iNat so we use this one https://drive.google.com/file/d/1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy that has been trained on ImageNet. "
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "DL1l_RLE_n3l"
      },
      "source": [
        "# tensorflow version compatible with the code\n",
        "!pip install tensorflow==1.11"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "_KiaJjaCAmW9",
        "outputId": "34698f06-6915-483f-e99b-996f9cfaf80f"
      },
      "source": [
        "!git clone --recursive https://github.com/richardaecn/cvpr18-inaturalist-transfer.git"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Cloning into 'cvpr18-inaturalist-transfer'...\n",
            "remote: Enumerating objects: 137, done.\u001b[K\n",
            "remote: Total 137 (delta 0), reused 0 (delta 0), pack-reused 137\u001b[K\n",
            "Receiving objects: 100% (137/137), 1.97 MiB | 22.39 MiB/s, done.\n",
            "Resolving deltas: 100% (38/38), done.\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "2SqGtd-uHhhz",
        "outputId": "70bad82e-280c-4819-c981-94bcaa9e529c"
      },
      "source": [
        "# download inceptionv3 pretrained on imageNet\n",
        "!wget --load-cookies /tmp/cookies.txt \"https://drive.google.com/u/0/uc?export=download&confirm=$(wget --quiet --save-cookies /tmp/cookies.txt --keep-session-cookies --no-check-certificate 'https://drive.google.com/u/0/uc?export=download&id=1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy' -O- | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/\\1\\n/p')&id=1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy\" -O inception_v3_ILSVRC_299.ckpt && rm -rf /tmp/cookies.txt"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "--2020-11-23 17:27:17--  https://drive.google.com/u/0/uc?export=download&confirm=Xnvu&id=1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy\n",
            "Resolving drive.google.com (drive.google.com)... 142.250.99.138, 142.250.99.100, 142.250.99.139, ...\n",
            "Connecting to drive.google.com (drive.google.com)|142.250.99.138|:443... connected.\n",
            "HTTP request sent, awaiting response... 302 Moved Temporarily\n",
            "Location: https://doc-0c-8o-docs.googleusercontent.com/docs/securesc/6st36fn75vr8ggsrck3acfllano80h0v/407sd4hh5rt4732cfj4d2fcvcfoe87ec/1606152375000/13117058851018264575/11478932493189232070Z/1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy?e=download [following]\n",
            "--2020-11-23 17:27:17--  https://doc-0c-8o-docs.googleusercontent.com/docs/securesc/6st36fn75vr8ggsrck3acfllano80h0v/407sd4hh5rt4732cfj4d2fcvcfoe87ec/1606152375000/13117058851018264575/11478932493189232070Z/1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy?e=download\n",
            "Resolving doc-0c-8o-docs.googleusercontent.com (doc-0c-8o-docs.googleusercontent.com)... 74.125.199.132, 2607:f8b0:400e:c02::84\n",
            "Connecting to doc-0c-8o-docs.googleusercontent.com (doc-0c-8o-docs.googleusercontent.com)|74.125.199.132|:443... connected.\n",
            "HTTP request sent, awaiting response... 302 Found\n",
            "Location: https://docs.google.com/nonceSigner?nonce=11org9rj78a00&continue=https://doc-0c-8o-docs.googleusercontent.com/docs/securesc/6st36fn75vr8ggsrck3acfllano80h0v/407sd4hh5rt4732cfj4d2fcvcfoe87ec/1606152375000/13117058851018264575/11478932493189232070Z/1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy?e%3Ddownload&hash=uo9984c5usr2b9h4c241k2f7ijeimlv2 [following]\n",
            "--2020-11-23 17:27:17--  https://docs.google.com/nonceSigner?nonce=11org9rj78a00&continue=https://doc-0c-8o-docs.googleusercontent.com/docs/securesc/6st36fn75vr8ggsrck3acfllano80h0v/407sd4hh5rt4732cfj4d2fcvcfoe87ec/1606152375000/13117058851018264575/11478932493189232070Z/1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy?e%3Ddownload&hash=uo9984c5usr2b9h4c241k2f7ijeimlv2\n",
            "Resolving docs.google.com (docs.google.com)... 74.125.142.102, 74.125.142.113, 74.125.142.138, ...\n",
            "Connecting to docs.google.com (docs.google.com)|74.125.142.102|:443... connected.\n",
            "HTTP request sent, awaiting response... 302 Found\n",
            "Location: https://doc-0c-8o-docs.googleusercontent.com/docs/securesc/6st36fn75vr8ggsrck3acfllano80h0v/407sd4hh5rt4732cfj4d2fcvcfoe87ec/1606152375000/13117058851018264575/11478932493189232070Z/1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy?e=download&nonce=11org9rj78a00&user=11478932493189232070Z&hash=50a1m6rmaph86d4vjpov2f8ld8tlcr8m [following]\n",
            "--2020-11-23 17:27:17--  https://doc-0c-8o-docs.googleusercontent.com/docs/securesc/6st36fn75vr8ggsrck3acfllano80h0v/407sd4hh5rt4732cfj4d2fcvcfoe87ec/1606152375000/13117058851018264575/11478932493189232070Z/1Djydji-QnJOQ93dWYw-4yVLSP4-TAXHy?e=download&nonce=11org9rj78a00&user=11478932493189232070Z&hash=50a1m6rmaph86d4vjpov2f8ld8tlcr8m\n",
            "Connecting to doc-0c-8o-docs.googleusercontent.com (doc-0c-8o-docs.googleusercontent.com)|74.125.199.132|:443... connected.\n",
            "HTTP request sent, awaiting response... 200 OK\n",
            "Length: unspecified [application/octet-stream]\n",
            "Saving to: ‘inception_v3_ILSVRC_299.ckpt’\n",
            "\n",
            "inception_v3_ILSVRC     [             <=>    ] 414.57M  92.4MB/s    in 4.7s    \n",
            "\n",
            "2020-11-23 17:27:22 (89.1 MB/s) - ‘inception_v3_ILSVRC_299.ckpt’ saved [434712740]\n",
            "\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "2rRD_DKOubbT"
      },
      "source": [
        "# the following code takes time to process (around 10min)\n",
        "from __future__ import absolute_import\n",
        "from __future__ import division\n",
        "from __future__ import print_function\n",
        "\n",
        "import numpy as np\n",
        "import os\n",
        "import sys\n",
        "import time\n",
        "import tensorflow as tf\n",
        "\n",
        "slim = tf.contrib.slim\n",
        "sys.path.insert(0, '/content/cvpr18-inaturalist-transfer/slim/')\n",
        "from nets import inception, resnet_v2\n",
        "from preprocessing import inception_preprocessing\n",
        "\n",
        "data_dir = './cropped_dataset'\n",
        "\n",
        "base_network = 'InceptionV3'\n",
        "checkpoints_path = 'inception_v3_ILSVRC_299.ckpt'\n",
        "\n",
        "image_size = 299\n",
        "moving_average_decay = 0.9999\n",
        "fea_dim = 2048\n",
        "\n",
        "# Read train and val list.\n",
        "train_list = []\n",
        "val_list = []\n",
        "test_list = []\n",
        "\n",
        "data_transforms = None\n",
        "train_loader = torch.utils.data.DataLoader(datasets.ImageFolder('cropped_dataset/train_images'))\n",
        "val_loader = torch.utils.data.DataLoader(datasets.ImageFolder('cropped_dataset/val_images'))\n",
        "test_loader = torch.utils.data.DataLoader(datasets.ImageFolder('cropped_dataset/test_images'))\n",
        "train_list = train_loader.dataset.imgs\n",
        "val_list = val_loader.dataset.imgs\n",
        "test_list = test_loader.dataset.imgs\n",
        "\n",
        "# Base network architecture\n",
        "if base_network == 'InceptionV3':\n",
        "    endpoint = 'Mixed_7c'\n",
        "    arg_scope = inception.inception_v3_arg_scope()\n",
        "\n",
        "# Feature extraction.\n",
        "fea_train = np.zeros((len(train_list), fea_dim), dtype=np.float32)\n",
        "label_train = np.zeros((len(train_list), ), dtype=np.int32)\n",
        "fea_val = np.zeros((len(val_list), fea_dim), dtype=np.float32)\n",
        "label_val = np.zeros((len(val_list), ), dtype=np.int32)\n",
        "fea_test = np.zeros((len(test_list), fea_dim), dtype=np.float32)\n",
        "\n",
        "with tf.Graph().as_default():\n",
        "    tf_global_step = tf.train.get_or_create_global_step()\n",
        "    image_path = tf.placeholder(tf.string)\n",
        "    image = tf.image.decode_jpeg(tf.read_file(image_path), channels=3)\n",
        "    image = tf.image.convert_image_dtype(image, tf.float32)\n",
        "    image = inception_preprocessing.preprocess_image(image,\n",
        "                                                     image_size,\n",
        "                                                     image_size,\n",
        "                                                     is_training=False)\n",
        "    images  = tf.expand_dims(image, 0)\n",
        "\n",
        "    with slim.arg_scope(arg_scope):\n",
        "        slim_args = [slim.batch_norm, slim.dropout]\n",
        "        with slim.arg_scope(slim_args, is_training=False):\n",
        "            with tf.variable_scope(base_network, reuse=None) as scope:\n",
        "                if base_network == 'InceptionV3':\n",
        "                    net, _ = inception.inception_v3_base(\n",
        "                        images, final_endpoint=endpoint, scope=scope)\n",
        "    net = tf.reduce_mean(net, [0,1,2])\n",
        "\n",
        "    variable_averages = tf.train.ExponentialMovingAverage(\n",
        "        moving_average_decay, tf_global_step)\n",
        "    variables_to_restore = variable_averages.variables_to_restore()\n",
        "    init_fn = slim.assign_from_checkpoint_fn(\n",
        "        checkpoints_path, variables_to_restore)\n",
        "\n",
        "    config_sess = tf.ConfigProto(allow_soft_placement=True)\n",
        "    config_sess.gpu_options.allow_growth = True\n",
        "    with tf.Session(config=config_sess) as sess:\n",
        "        init_fn(sess)\n",
        "        start = time.time()\n",
        "        for i in range(len(fea_train)):\n",
        "            if i%1000 == 0:\n",
        "            fea = sess.run(net, feed_dict={image_path:train_list[i][0]})\n",
        "            fea_train[i, :] = fea\n",
        "            label_train[i] = train_list[i][1]\n",
        "        for i in range(len(fea_val)):\n",
        "            if i%1000 == 0:\n",
        "            fea_val[i, :] = fea\n",
        "            label_val[i] = val_list[i][1]\n",
        "        for i in range(len(fea_test)):\n",
        "            fea = sess.run(net, feed_dict={image_path:test_list[i][0]})\n",
        "            fea_test[i, :] = fea\n",
        "\n",
        "model_name = checkpoints_path.split('/')[-1].split('.ckpt')[0]\n",
        "if not os.path.exists(os.path.join('./feature', model_name)):\n",
        "    os.makedirs(os.path.join('./feature', model_name))\n",
        "\n",
        "save_dir = os.path.join('./feature', model_name)\n",
        "np.save(os.path.join(save_dir + '_feature_train.npy'), fea_train)\n",
        "np.save(os.path.join(save_dir + '_label_train.npy'), label_train)\n",
        "np.save(os.path.join(save_dir + '_feature_val.npy'), fea_val)\n",
        "np.save(os.path.join(save_dir + '_label_val.npy'), label_val)\n",
        "np.save(os.path.join(save_dir + '_feature_test.npy'), fea_test)"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "22mKdNLFQR9r"
      },
      "source": [
        "We can then use the extracted feature like in a classical classification task."
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "jTrtiIOjQRkg"
      },
      "source": [
        "# Gradient boosting\n",
        "from sklearn.datasets import make_classification\n",
        "from sklearn.ensemble import GradientBoostingClassifier\n",
        "from sklearn.model_selection import train_test_split\n",
        "\n",
        "clf = GradientBoostingClassifier(random_state=0)\n",
        "clf.fit(fea_train, label_train)"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "hCBDYQd6XE5K",
        "outputId": "a430fd6b-0527-4560-d017-d92bf28659c1"
      },
      "source": [
        "clf.score(fea_val, label_val)"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "0.8543689320388349"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 27
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "X8LDjiJ0Xta0",
        "outputId": "2370981f-302b-4220-f10d-06c39ff79948"
      },
      "source": [
        "# Logistic Regression\n",
        "from sklearn.linear_model import LogisticRegression\n",
        "\n",
        "LR = LogisticRegression(solver='lbfgs', multi_class='multinomial', max_iter=500)\n",
        "LR.fit(fea_train, label_train)"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,\n",
              "                   intercept_scaling=1, l1_ratio=None, max_iter=500,\n",
              "                   multi_class='multinomial', n_jobs=None, penalty='l2',\n",
              "                   random_state=None, solver='lbfgs', tol=0.0001, verbose=0,\n",
              "                   warm_start=False)"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 52
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "4fZWWeK7X7oR",
        "outputId": "9e819aa3-2dcf-4e36-d694-6bcae52b9ada"
      },
      "source": [
        "LR.score(fea_val, label_val)"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "0.941747572815534"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 53
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "MddI84QICRTk"
      },
      "source": [
        "The Logistic Regression gives good results, we decide to create a csv submission file for this classification method. "
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "FcFFEtnDZMNF",
        "outputId": "c205656c-cdd5-4841-fc7d-e6abf9f8b7cb"
      },
      "source": [
        "test_dir = '/content/cropped_dataset/test_images/mistery_category'\n",
        "\n",
        "output_file = open('kaggle_LR.csv', \"w\")\n",
        "output_file.write(\"Id,Category\\n\")\n",
        "for i,f in enumerate(test_list):\n",
        "    name = f[0].split('/')[3].split('.')[0]\n",
        "    pred = LR.predict([fea_test[i]])[0]\n",
        "    output_file.write(\"%s,%d\\n\" % (name, pred))\n",
        "\n",
        "output_file.close()\n",
        "\n",
        "print(\"Succesfully wrote, you can upload this file to the kaggle competition website\")"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Succesfully wrote, you can upload this file to the kaggle competition website\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "BS6yC_dyUC3h"
      },
      "source": [
        "## Implementation of API Net\n",
        "Learning Attentive Pairwise Interaction for Fine-Grained Classification (API-Net), 2020 paper\n",
        "\n",
        "Code from https://github.com/PeiqinZhuang/API-Net (several modifications have been used to make it compatible with colab)"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "4cBDjEgxV58q"
      },
      "source": [
        "# Utils\n",
        "def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):\n",
        "    torch.save(state, filename)\n",
        "    if is_best:\n",
        "        shutil.copyfile(filename, 'model_best.pth.tar')\n",
        "\n",
        "\n",
        "class AverageMeter(object):\n",
        "    \"\"\"\n",
        "    Keeps track of most recent, average, sum, and count of a metric.\n",
        "    \"\"\"\n",
        "\n",
        "    def __init__(self):\n",
        "        self.reset()\n",
        "\n",
        "    def reset(self):\n",
        "        self.val = 0\n",
        "        self.avg = 0\n",
        "        self.sum = 0\n",
        "        self.count = 0\n",
        "\n",
        "    def update(self, val, n=1):\n",
        "        self.val = val\n",
        "        self.sum += val * n\n",
        "        self.count += n\n",
        "        self.avg = self.sum / self.count\n",
        "\n",
        "def accuracy(scores, targets, k):\n",
        "    \"\"\"\n",
        "    Computes top-k accuracy, from predicted and true labels.\n",
        "    :param scores: scores from the model\n",
        "    :param targets: true labels\n",
        "    :param k: k in top-k accuracy\n",
        "    :return: top-k accuracy\n",
        "    \"\"\"\n",
        "\n",
        "    batch_size = targets.size(0)\n",
        "    _, ind = scores.topk(k, 1, True, True)\n",
        "    correct = ind.eq(targets.view(-1, 1).expand_as(ind))\n",
        "    correct_total = correct.view(-1).float().sum()  # 0D tensor\n",
        "    return correct_total.item() * (100.0 / batch_size)\n"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "A3tJC9WtKxck"
      },
      "source": [
        "# Model Definition\n",
        "def pdist(vectors):\n",
        "    distance_matrix = -2 * vectors.mm(torch.t(vectors)) + vectors.pow(2).sum(dim=1).view(1, -1) + vectors.pow(2).sum(\n",
        "        dim=1).view(-1, 1)\n",
        "    return distance_matrix\n",
        "\n",
        "class API_Net(nn.Module):\n",
        "    def __init__(self):\n",
        "        super(API_Net, self).__init__()\n",
        "\n",
        "        resnet101 = models.resnet101(pretrained=True)\n",
        "        layers = list(resnet101.children())[:-2]\n",
        "\n",
        "        self.conv = nn.Sequential(*layers)\n",
        "        self.avg = nn.AvgPool2d(kernel_size=7 ,stride=1)\n",
        "        self.map1 = nn.Linear(2048 * 2, 512)\n",
        "        self.map2 = nn.Linear(512, 2048)\n",
        "        self.fc = nn.Linear(2048, 20)\n",
        "        self.drop = nn.Dropout(p=0.5)\n",
        "        self.sigmoid = nn.Sigmoid()\n",
        "\n",
        "\n",
        "    def forward(self, images, targets=None, flag='train'):\n",
        "        conv_out = self.conv(images)\n",
        "        pool_out = self.avg(conv_out).squeeze()\n",
        "\n",
        "        if flag == 'train':\n",
        "            intra_pairs, inter_pairs, \\\n",
        "                    intra_labels, inter_labels = self.get_pairs(pool_out, targets)\n",
        "\n",
        "            features1 = torch.cat([pool_out[intra_pairs[:, 0]], pool_out[inter_pairs[:, 0]]], dim=0)\n",
        "            features2 = torch.cat([pool_out[intra_pairs[:, 1]], pool_out[inter_pairs[:, 1]]], dim=0)\n",
        "            labels1 = torch.cat([intra_labels[:, 0], inter_labels[:, 0]], dim=0)\n",
        "            labels2 = torch.cat([intra_labels[:, 1], inter_labels[:, 1]], dim=0)\n",
        "\n",
        "\n",
        "            mutual_features = torch.cat([features1, features2], dim=1)\n",
        "            map1_out = self.map1(mutual_features)\n",
        "            map2_out = self.drop(map1_out)\n",
        "            map2_out = self.map2(map2_out)\n",
        "\n",
        "\n",
        "            gate1 = torch.mul(map2_out, features1)\n",
        "            gate1 = self.sigmoid(gate1)\n",
        "\n",
        "            gate2 = torch.mul(map2_out, features2)\n",
        "            gate2 = self.sigmoid(gate2)\n",
        "\n",
        "            features1_self = torch.mul(gate1, features1) + features1\n",
        "            features1_other = torch.mul(gate2, features1) + features1\n",
        "\n",
        "            features2_self = torch.mul(gate2, features2) + features2\n",
        "            features2_other = torch.mul(gate1, features2) + features2\n",
        "\n",
        "            logit1_self = self.fc(self.drop(features1_self))\n",
        "            logit1_other = self.fc(self.drop(features1_other))\n",
        "            logit2_self = self.fc(self.drop(features2_self))\n",
        "            logit2_other = self.fc(self.drop(features2_other))\n",
        "\n",
        "            return logit1_self, logit1_other, logit2_self, logit2_other, labels1, labels2\n",
        "\n",
        "        elif flag == 'val':\n",
        "            return self.fc(pool_out)\n",
        "\n",
        "\n",
        "    def get_pairs(self, embeddings, labels):\n",
        "        distance_matrix = pdist(embeddings).detach().cpu().numpy()\n",
        "\n",
        "        labels = labels.detach().cpu().numpy().reshape(-1,1)\n",
        "        num = labels.shape[0]\n",
        "        dia_inds = np.diag_indices(num)\n",
        "        lb_eqs = (labels == labels.T)\n",
        "        lb_eqs[dia_inds] = False\n",
        "        dist_same = distance_matrix.copy()\n",
        "        dist_same[lb_eqs == False] = np.inf\n",
        "        intra_idxs = np.argmin(dist_same, axis=1)\n",
        "\n",
        "        dist_diff = distance_matrix.copy()\n",
        "        lb_eqs[dia_inds] = True\n",
        "        dist_diff[lb_eqs == True] = np.inf\n",
        "        inter_idxs = np.argmin(dist_diff, axis=1)\n",
        "\n",
        "        intra_pairs = np.zeros([embeddings.shape[0], 2])\n",
        "        inter_pairs  = np.zeros([embeddings.shape[0], 2])\n",
        "        intra_labels = np.zeros([embeddings.shape[0], 2])\n",
        "        inter_labels = np.zeros([embeddings.shape[0], 2])\n",
        "        for i in range(embeddings.shape[0]):\n",
        "            intra_labels[i, 0] = labels[i]\n",
        "            intra_labels[i, 1] = labels[intra_idxs[i]]\n",
        "            intra_pairs[i, 0] = i\n",
        "            intra_pairs[i, 1] = intra_idxs[i]\n",
        "\n",
        "            inter_labels[i, 0] = labels[i]\n",
        "            inter_labels[i, 1] = labels[inter_idxs[i]]\n",
        "            inter_pairs[i, 0] = i\n",
        "            inter_pairs[i, 1] = inter_idxs[i]\n",
        "\n",
        "        intra_labels = torch.from_numpy(intra_labels).long().to(device)\n",
        "        intra_pairs = torch.from_numpy(intra_pairs).long().to(device)\n",
        "        inter_labels = torch.from_numpy(inter_labels).long().to(device)\n",
        "        inter_pairs = torch.from_numpy(inter_pairs).long().to(device)\n",
        "\n",
        "        return intra_pairs, inter_pairs, intra_labels, inter_labels"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "foXkEYF_UnTr"
      },
      "source": [
        "best_prec1 = 0\n",
        "\n",
        "def train(train_loader, val_loader, model, criterion, optimizer_conv,scheduler_conv, optimizer_fc, scheduler_fc, epoch, step, print_freq = 5):\n",
        "    global best_prec1\n",
        "\n",
        "    batch_time = AverageMeter()\n",
        "    data_time = AverageMeter()\n",
        "    softmax_losses = AverageMeter()\n",
        "    rank_losses = AverageMeter()\n",
        "    losses = AverageMeter()\n",
        "    top1 = AverageMeter()\n",
        "    top5 = AverageMeter()\n",
        "\n",
        "    # switch to train mode\n",
        "    end = time.time()\n",
        "    rank_criterion = nn.MarginRankingLoss(margin=0.05)\n",
        "    softmax_layer = nn.Softmax(dim=1).to(device)\n",
        "\n",
        "    for i, (input, target) in enumerate(train_loader):\n",
        "        model.train()\n",
        "\n",
        "        # measure data loading time\n",
        "        data_time.update(time.time() - end)\n",
        "        input_var = input.to(device)\n",
        "        target_var = target.to(device).squeeze()\n",
        "\n",
        "\n",
        "        # compute output\n",
        "        logit1_self, logit1_other, logit2_self, logit2_other, labels1, labels2 = model(input_var, target_var, flag='train')\n",
        "        batch_size = logit1_self.shape[0]\n",
        "        labels1 = labels1.to(device)\n",
        "        labels2 = labels2.to(device)\n",
        "\n",
        "        self_logits = torch.zeros(2*batch_size, 20).to(device)\n",
        "        other_logits= torch.zeros(2*batch_size, 20).to(device)\n",
        "        self_logits[:batch_size] = logit1_self\n",
        "        self_logits[batch_size:] = logit2_self\n",
        "        other_logits[:batch_size] = logit1_other\n",
        "        other_logits[batch_size:] = logit2_other\n",
        "\n",
        "        # compute loss\n",
        "        logits = torch.cat([self_logits, other_logits], dim=0)\n",
        "        targets = torch.cat([labels1, labels2, labels1, labels2], dim=0)\n",
        "        softmax_loss = criterion(logits, targets)\n",
        "\n",
        "        self_scores = softmax_layer(self_logits)[torch.arange(2*batch_size).to(device).long(),\n",
        "                                                         torch.cat([labels1, labels2], dim=0)]\n",
        "        other_scores = softmax_layer(other_logits)[torch.arange(2*batch_size).to(device).long(),\n",
        "                                                         torch.cat([labels1, labels2], dim=0)]\n",
        "        flag = torch.ones([2*batch_size, ]).to(device)\n",
        "        rank_loss = rank_criterion(self_scores, other_scores, flag)\n",
        "\n",
        "        loss = softmax_loss + rank_loss\n",
        "\n",
        "        # measure accuracy and record loss\n",
        "        prec1 = accuracy(logits, targets, 1)\n",
        "        prec5 = accuracy(logits, targets, 5)\n",
        "        losses.update(loss.item(), 2*batch_size)\n",
        "        softmax_losses.update(softmax_loss.item(), 4*batch_size)\n",
        "        rank_losses.update(rank_loss.item(), 2*batch_size)\n",
        "        top1.update(prec1, 4*batch_size)\n",
        "        top5.update(prec5, 4*batch_size)\n",
        "\n",
        "        # compute gradient and do SGD step\n",
        "        optimizer_conv.zero_grad()\n",
        "        optimizer_fc.zero_grad()\n",
        "        loss.backward()\n",
        "        if epoch >= 8:\n",
        "            optimizer_conv.step()\n",
        "        optimizer_fc.step()\n",
        "        scheduler_conv.step()\n",
        "        scheduler_fc.step()\n",
        "\n",
        "\n",
        "        # measure elapsed time\n",
        "        batch_time.update(time.time() - end)\n",
        "        end = time.time()\n",
        "\n",
        "        if i % print_freq == 0:\n",
        "            print('Time: {time}\\nStep: {step}\\t Epoch: [{0}][{1}/{2}]\\t'\n",
        "                  'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\\t'\n",
        "                  'Data {data_time.val:.3f} ({data_time.avg:.3f})\\t'\n",
        "                  'Loss {loss.val:.4f} ({loss.avg:.4f})\\t'\n",
        "                  'SoftmaxLoss {softmax_loss.val:.4f} ({softmax_loss.avg:.4f})\\t'\n",
        "                  'RankLoss {rank_loss.val:.4f} ({rank_loss.avg:.4f})\\t'\n",
        "                  'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\\t'\n",
        "                  'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(\n",
        "                   epoch, i, len(train_loader), batch_time=batch_time,\n",
        "                   data_time=data_time, loss=losses, softmax_loss=softmax_losses, rank_loss=rank_losses,\n",
        "                   top1=top1, top5=top5, step=step, time= time.asctime(time.localtime(time.time()))))\n",
        "\n",
        "        if i== len(train_loader) - 1:\n",
        "            prec1 = validate(val_loader, model, criterion)\n",
        "\n",
        "            # remember best prec@1 and save checkpoint\n",
        "            is_best = prec1 > best_prec1\n",
        "            best_prec1 = max(prec1, best_prec1)\n",
        "            save_checkpoint({\n",
        "                'epoch': epoch + 1,\n",
        "                'state_dict': model.state_dict(),\n",
        "                'best_prec1': best_prec1,\n",
        "                'optimizer_conv': optimizer_conv.state_dict(),\n",
        "                'optimizer_fc': optimizer_fc.state_dict(),\n",
        "            }, is_best)\n",
        "\n",
        "        step = step +1\n",
        "    return step\n",
        "\n",
        "\n",
        "def validate(val_loader, model, criterion, print_freq = 5):\n",
        "    batch_time = AverageMeter()\n",
        "    softmax_losses = AverageMeter()\n",
        "    top1 = AverageMeter()\n",
        "    top5 = AverageMeter()\n",
        "\n",
        "    # switch to evaluate mode\n",
        "    model.eval()\n",
        "    end = time.time()\n",
        "\n",
        "    with torch.no_grad():\n",
        "        for i, (input, target) in enumerate(val_loader):\n",
        "\n",
        "            input_var = input.to(device)\n",
        "            target_var = target.to(device).squeeze()\n",
        "\n",
        "            # compute output\n",
        "            logits = model(input_var, targets=None, flag='val')\n",
        "            softmax_loss = criterion(logits, target_var)\n",
        "\n",
        "\n",
        "            prec1= accuracy(logits, target_var, 1)\n",
        "            prec5 = accuracy(logits, target_var, 5)\n",
        "            softmax_losses.update(softmax_loss.item(), logits.size(0))\n",
        "            top1.update(prec1, logits.size(0))\n",
        "            top5.update(prec5, logits.size(0))\n",
        "\n",
        "            # measure elapsed time\n",
        "            batch_time.update(time.time() - end)\n",
        "            end = time.time()\n",
        "\n",
        "\n",
        "\n",
        "            if i % print_freq == 0:\n",
        "                print('Time: {time}\\nTest: [{0}/{1}]\\t'\n",
        "                        'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\\t'\n",
        "                        'SoftmaxLoss {softmax_loss.val:.4f} ({softmax_loss.avg:.4f})\\t'\n",
        "                        'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\\t'\n",
        "                        'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(\n",
        "                        i, len(val_loader), batch_time=batch_time, softmax_loss=softmax_losses,\n",
        "                        top1=top1, top5=top5, time=time.asctime(time.localtime(time.time()))))\n",
        "        print(' * Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f}'.format(top1=top1, top5=top5))\n",
        "\n",
        "    return top1.avg"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "d46tXMR0VJpu"
      },
      "source": [
        "input_size = 224\n",
        "batch_size = 64\n",
        "num_epochs = 20\n",
        "\n",
        "data_transforms = {\n",
        "    'train_images': transforms.Compose([\n",
        "        transforms.Resize(input_size),\n",
        "        transforms.RandomHorizontalFlip(),\n",
        "        transforms.RandomRotation(90),\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
        "    ]),\n",
        "    'val_images': transforms.Compose([\n",
        "        transforms.Resize((input_size,input_size)),\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
        "    ]),\n",
        "}\n",
        "\n",
        "data_dir = 'cropped_dataset'\n",
        "# Create training and validation datasets\n",
        "image_datasets = {x: datasets.ImageFolder(os.path.join(data_dir, x), data_transforms[x]) for x in ['train_images', 'val_images']}\n",
        "# Create training and validation dataloaders\n",
        "# dataloaders_dict = {x: torch.utils.data.DataLoader(image_datasets[x], shuffle=True, num_workers=4) for x in ['train_images', 'val_images']}\n",
        "dataloaders_dict = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=batch_size, shuffle=True, num_workers=4) for x in ['train_images', 'val_images']}\n",
        "train_loader = dataloaders_dict[\"train_images\"]\n",
        "val_loader = dataloaders_dict[\"val_images\"]\n",
        "\n",
        "model = API_Net()\n",
        "model = model.to(device)\n",
        "model.conv = nn.DataParallel(model.conv)\n",
        "\n",
        "# define loss function (criterion) and optimizer\n",
        "criterion = nn.CrossEntropyLoss().to(device)\n",
        "optimizer_conv = torch.optim.SGD(model.conv.parameters(), 0.001,momentum=0.9,weight_decay=3.0e-4)\n",
        "\n",
        "fc_parameters = [value for name, value in model.named_parameters() if 'conv' not in name]\n",
        "optimizer_fc = torch.optim.SGD(fc_parameters, 0.001, momentum=0.9, weight_decay=3.0e-4)\n",
        "\n",
        "scheduler_conv = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer_conv, 100*len(train_loader))\n",
        "scheduler_fc = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer_fc, 100*len(train_loader))"
      ],
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 285
        },
        "id": "9PNa0WjbvfBZ",
        "outputId": "558752e3-8620-4240-b96f-f2ddc36c32b0"
      },
      "source": [
        "# define loss function (criterion) and optimizer\n",
        "criterion = nn.CrossEntropyLoss().to(device)\n",
        "optimizer_conv = torch.optim.SGD(model.conv.parameters(), 0.001,momentum=0.9,weight_decay=3.0e-4)\n",
        "\n",
        "fc_parameters = [value for name, value in model.named_parameters() if 'conv' not in name]\n",
        "optimizer_fc = torch.optim.SGD(fc_parameters, 0.001, momentum=0.9, weight_decay=3.0e-4)\n",
        "\n",
        "scheduler_conv = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer_conv, 100*len(train_loader))\n",
        "scheduler_fc = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer_fc, 100*len(train_loader))"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "error",
          "ename": "NameError",
          "evalue": "ignored",
          "traceback": [
            "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
            "\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)",
            "\u001b[0;32m<ipython-input-1-bcdce20551ed>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0;31m# define loss function (criterion) and optimizer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mcriterion\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mCrossEntropyLoss\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      3\u001b[0m \u001b[0moptimizer_conv\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptim\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSGD\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconv\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparameters\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0.001\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmomentum\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0.9\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mweight_decay\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m3.0e-4\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m \u001b[0mfc_parameters\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mvalue\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnamed_parameters\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m'conv'\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;31mNameError\u001b[0m: name 'nn' is not defined"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "h5Q2JFP5W_WM",
        "outputId": "379006a7-6d2f-42db-8059-01cc86cdfaa6"
      },
      "source": [
        "step = 0\n",
        "print('START TIME:', time.asctime(time.localtime(time.time())))\n",
        "for epoch in range(num_epochs):\n",
        "    step = train(train_loader, val_loader,model, criterion, optimizer_conv, scheduler_conv, optimizer_fc, scheduler_fc, epoch, step)"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "START TIME: Mon Nov 23 15:48:29 2020\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/torch/optim/lr_scheduler.py:136: UserWarning: Detected call of `lr_scheduler.step()` before `optimizer.step()`. In PyTorch 1.1.0 and later, you should call them in the opposite order: `optimizer.step()` before `lr_scheduler.step()`.  Failure to do this will result in PyTorch skipping the first value of the learning rate schedule. See more details at https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate\n",
            "  \"https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate\", UserWarning)\n"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "Time: Mon Nov 23 15:48:31 2020\n",
            "Step: 0\t Epoch: [0][0/17]\tTime 2.441 (2.441)\tData 1.904 (1.904)\tLoss 0.5862 (0.5862)\tSoftmaxLoss 0.5096 (0.5096)\tRankLoss 0.0766 (0.0766)\tPrec@1 83.984 (83.984)\tPrec@5 98.047 (98.047)\n",
            "Time: Mon Nov 23 15:48:37 2020\n",
            "Step: 5\t Epoch: [0][5/17]\tTime 1.090 (1.336)\tData 0.017 (0.336)\tLoss 0.5871 (0.5795)\tSoftmaxLoss 0.5261 (0.5141)\tRankLoss 0.0610 (0.0654)\tPrec@1 81.836 (83.594)\tPrec@5 98.242 (96.680)\n",
            "Time: Mon Nov 23 15:48:42 2020\n",
            "Step: 10\t Epoch: [0][10/17]\tTime 1.101 (1.227)\tData 0.005 (0.187)\tLoss 0.5290 (0.6274)\tSoftmaxLoss 0.4653 (0.5595)\tRankLoss 0.0636 (0.0679)\tPrec@1 82.812 (82.280)\tPrec@5 97.852 (96.040)\n",
            "Time: Mon Nov 23 15:48:48 2020\n",
            "Step: 15\t Epoch: [0][15/17]\tTime 1.117 (1.191)\tData 0.007 (0.131)\tLoss 0.5993 (0.6403)\tSoftmaxLoss 0.5329 (0.5724)\tRankLoss 0.0664 (0.0679)\tPrec@1 85.352 (82.324)\tPrec@5 93.945 (95.557)\n",
            "Time: Mon Nov 23 15:48:50 2020\n",
            "Test: [0/2]\tTime 1.156 (1.156)\tSoftmaxLoss 0.4356 (0.4356)\tPrec@1 87.500 (87.500)\tPrec@5 96.875 (96.875)\n",
            " * Prec@1 88.350 Prec@5 98.058\n",
            "Time: Mon Nov 23 15:48:53 2020\n",
            "Step: 17\t Epoch: [1][0/17]\tTime 2.308 (2.308)\tData 1.727 (1.727)\tLoss 0.5986 (0.5986)\tSoftmaxLoss 0.5319 (0.5319)\tRankLoss 0.0667 (0.0667)\tPrec@1 81.641 (81.641)\tPrec@5 99.219 (99.219)\n",
            "Time: Mon Nov 23 15:48:59 2020\n",
            "Step: 22\t Epoch: [1][5/17]\tTime 1.143 (1.357)\tData 0.021 (0.306)\tLoss 0.7264 (0.5872)\tSoftmaxLoss 0.6523 (0.5183)\tRankLoss 0.0741 (0.0689)\tPrec@1 77.344 (83.984)\tPrec@5 97.461 (96.712)\n",
            "Time: Mon Nov 23 15:49:05 2020\n",
            "Step: 27\t Epoch: [1][10/17]\tTime 1.154 (1.265)\tData 0.008 (0.170)\tLoss 0.4048 (0.5671)\tSoftmaxLoss 0.3375 (0.5008)\tRankLoss 0.0673 (0.0663)\tPrec@1 88.867 (84.730)\tPrec@5 100.000 (96.449)\n",
            "Time: Mon Nov 23 15:49:11 2020\n",
            "Step: 32\t Epoch: [1][15/17]\tTime 1.164 (1.232)\tData 0.007 (0.119)\tLoss 0.5703 (0.5810)\tSoftmaxLoss 0.4860 (0.5136)\tRankLoss 0.0843 (0.0674)\tPrec@1 86.328 (84.277)\tPrec@5 98.438 (96.301)\n",
            "Time: Mon Nov 23 15:49:13 2020\n",
            "Test: [0/2]\tTime 1.184 (1.184)\tSoftmaxLoss 0.5065 (0.5065)\tPrec@1 85.938 (85.938)\tPrec@5 98.438 (98.438)\n",
            " * Prec@1 89.320 Prec@5 98.058\n",
            "Time: Mon Nov 23 15:49:17 2020\n",
            "Step: 34\t Epoch: [2][0/17]\tTime 2.731 (2.731)\tData 2.078 (2.078)\tLoss 0.5127 (0.5127)\tSoftmaxLoss 0.4503 (0.4503)\tRankLoss 0.0624 (0.0624)\tPrec@1 86.914 (86.914)\tPrec@5 96.875 (96.875)\n",
            "Time: Mon Nov 23 15:49:23 2020\n",
            "Step: 39\t Epoch: [2][5/17]\tTime 1.125 (1.414)\tData 0.005 (0.361)\tLoss 0.3968 (0.5894)\tSoftmaxLoss 0.3286 (0.5222)\tRankLoss 0.0682 (0.0671)\tPrec@1 88.281 (83.952)\tPrec@5 98.242 (95.605)\n",
            "Time: Mon Nov 23 15:49:28 2020\n",
            "Step: 44\t Epoch: [2][10/17]\tTime 1.121 (1.282)\tData 0.007 (0.200)\tLoss 0.5661 (0.5935)\tSoftmaxLoss 0.4930 (0.5264)\tRankLoss 0.0731 (0.0671)\tPrec@1 86.328 (83.079)\tPrec@5 97.656 (96.076)\n",
            "Time: Mon Nov 23 15:49:34 2020\n",
            "Step: 49\t Epoch: [2][15/17]\tTime 1.108 (1.228)\tData 0.007 (0.140)\tLoss 0.5069 (0.6108)\tSoftmaxLoss 0.4374 (0.5433)\tRankLoss 0.0695 (0.0674)\tPrec@1 85.742 (82.507)\tPrec@5 99.023 (95.947)\n",
            "Time: Mon Nov 23 15:49:36 2020\n",
            "Test: [0/2]\tTime 1.155 (1.155)\tSoftmaxLoss 0.5034 (0.5034)\tPrec@1 85.938 (85.938)\tPrec@5 96.875 (96.875)\n",
            " * Prec@1 89.320 Prec@5 98.058\n",
            "Time: Mon Nov 23 15:49:40 2020\n",
            "Step: 51\t Epoch: [3][0/17]\tTime 2.407 (2.407)\tData 1.808 (1.808)\tLoss 0.4969 (0.4969)\tSoftmaxLoss 0.4381 (0.4381)\tRankLoss 0.0588 (0.0588)\tPrec@1 86.523 (86.523)\tPrec@5 97.266 (97.266)\n",
            "Time: Mon Nov 23 15:49:45 2020\n",
            "Step: 56\t Epoch: [3][5/17]\tTime 1.095 (1.342)\tData 0.008 (0.317)\tLoss 0.4699 (0.5138)\tSoftmaxLoss 0.3962 (0.4466)\tRankLoss 0.0737 (0.0672)\tPrec@1 88.672 (87.402)\tPrec@5 98.828 (97.982)\n",
            "Time: Mon Nov 23 15:49:51 2020\n",
            "Step: 61\t Epoch: [3][10/17]\tTime 1.094 (1.229)\tData 0.006 (0.176)\tLoss 0.6653 (0.5548)\tSoftmaxLoss 0.5971 (0.4850)\tRankLoss 0.0682 (0.0698)\tPrec@1 80.469 (85.298)\tPrec@5 97.852 (97.727)\n",
            "Time: Mon Nov 23 15:49:56 2020\n",
            "Step: 66\t Epoch: [3][15/17]\tTime 1.103 (1.187)\tData 0.007 (0.123)\tLoss 0.5264 (0.5550)\tSoftmaxLoss 0.4492 (0.4846)\tRankLoss 0.0772 (0.0704)\tPrec@1 85.938 (84.998)\tPrec@5 95.117 (97.437)\n",
            "Time: Mon Nov 23 15:49:58 2020\n",
            "Test: [0/2]\tTime 1.147 (1.147)\tSoftmaxLoss 0.4328 (0.4328)\tPrec@1 87.500 (87.500)\tPrec@5 98.438 (98.438)\n",
            " * Prec@1 88.350 Prec@5 98.058\n",
            "Time: Mon Nov 23 15:50:02 2020\n",
            "Step: 68\t Epoch: [4][0/17]\tTime 2.430 (2.430)\tData 1.860 (1.860)\tLoss 0.5764 (0.5764)\tSoftmaxLoss 0.5054 (0.5054)\tRankLoss 0.0710 (0.0710)\tPrec@1 83.594 (83.594)\tPrec@5 98.047 (98.047)\n",
            "Time: Mon Nov 23 15:50:08 2020\n",
            "Step: 73\t Epoch: [4][5/17]\tTime 1.116 (1.356)\tData 0.008 (0.324)\tLoss 0.5473 (0.5205)\tSoftmaxLoss 0.4859 (0.4524)\tRankLoss 0.0614 (0.0681)\tPrec@1 83.398 (85.677)\tPrec@5 97.852 (97.982)\n",
            "Time: Mon Nov 23 15:50:13 2020\n",
            "Step: 78\t Epoch: [4][10/17]\tTime 1.136 (1.252)\tData 0.007 (0.180)\tLoss 0.6014 (0.5660)\tSoftmaxLoss 0.5351 (0.4980)\tRankLoss 0.0663 (0.0679)\tPrec@1 81.836 (84.091)\tPrec@5 99.609 (97.301)\n",
            "Time: Mon Nov 23 15:50:19 2020\n",
            "Step: 83\t Epoch: [4][15/17]\tTime 1.144 (1.218)\tData 0.007 (0.126)\tLoss 0.7604 (0.5896)\tSoftmaxLoss 0.6916 (0.5227)\tRankLoss 0.0688 (0.0670)\tPrec@1 79.297 (83.545)\tPrec@5 91.016 (96.387)\n",
            "Time: Mon Nov 23 15:50:21 2020\n",
            "Test: [0/2]\tTime 1.169 (1.169)\tSoftmaxLoss 0.4763 (0.4763)\tPrec@1 84.375 (84.375)\tPrec@5 96.875 (96.875)\n",
            " * Prec@1 87.379 Prec@5 98.058\n",
            "Time: Mon Nov 23 15:50:25 2020\n",
            "Step: 85\t Epoch: [5][0/17]\tTime 2.600 (2.600)\tData 2.017 (2.017)\tLoss 0.7167 (0.7167)\tSoftmaxLoss 0.6540 (0.6540)\tRankLoss 0.0626 (0.0626)\tPrec@1 77.344 (77.344)\tPrec@5 94.141 (94.141)\n",
            "Time: Mon Nov 23 15:50:31 2020\n",
            "Step: 90\t Epoch: [5][5/17]\tTime 1.123 (1.395)\tData 0.008 (0.351)\tLoss 0.6494 (0.6185)\tSoftmaxLoss 0.5845 (0.5503)\tRankLoss 0.0649 (0.0682)\tPrec@1 80.664 (82.715)\tPrec@5 95.703 (96.094)\n",
            "Time: Mon Nov 23 15:50:36 2020\n",
            "Step: 95\t Epoch: [5][10/17]\tTime 1.120 (1.270)\tData 0.007 (0.194)\tLoss 0.3800 (0.6081)\tSoftmaxLoss 0.3117 (0.5388)\tRankLoss 0.0684 (0.0693)\tPrec@1 91.797 (83.185)\tPrec@5 99.023 (96.112)\n",
            "Time: Mon Nov 23 15:50:42 2020\n",
            "Step: 100\t Epoch: [5][15/17]\tTime 1.122 (1.222)\tData 0.007 (0.136)\tLoss 0.5685 (0.6042)\tSoftmaxLoss 0.5073 (0.5349)\tRankLoss 0.0612 (0.0694)\tPrec@1 83.594 (83.789)\tPrec@5 96.484 (96.118)\n",
            "Time: Mon Nov 23 15:50:44 2020\n",
            "Test: [0/2]\tTime 1.175 (1.175)\tSoftmaxLoss 0.4078 (0.4078)\tPrec@1 84.375 (84.375)\tPrec@5 98.438 (98.438)\n",
            " * Prec@1 86.408 Prec@5 98.058\n",
            "Time: Mon Nov 23 15:50:48 2020\n",
            "Step: 102\t Epoch: [6][0/17]\tTime 2.676 (2.676)\tData 2.078 (2.078)\tLoss 0.6625 (0.6625)\tSoftmaxLoss 0.6074 (0.6074)\tRankLoss 0.0550 (0.0550)\tPrec@1 80.859 (80.859)\tPrec@5 93.945 (93.945)\n",
            "Time: Mon Nov 23 15:50:54 2020\n",
            "Step: 107\t Epoch: [6][5/17]\tTime 1.119 (1.397)\tData 0.005 (0.365)\tLoss 0.5618 (0.5526)\tSoftmaxLoss 0.4983 (0.4910)\tRankLoss 0.0636 (0.0616)\tPrec@1 83.594 (84.440)\tPrec@5 96.875 (96.354)\n",
            "Time: Mon Nov 23 15:50:59 2020\n",
            "Step: 112\t Epoch: [6][10/17]\tTime 1.122 (1.271)\tData 0.007 (0.202)\tLoss 0.6594 (0.5568)\tSoftmaxLoss 0.5922 (0.4906)\tRankLoss 0.0672 (0.0662)\tPrec@1 82.617 (85.174)\tPrec@5 95.508 (96.396)\n",
            "Time: Mon Nov 23 15:51:05 2020\n",
            "Step: 117\t Epoch: [6][15/17]\tTime 1.116 (1.222)\tData 0.007 (0.141)\tLoss 0.5382 (0.5843)\tSoftmaxLoss 0.4613 (0.5171)\tRankLoss 0.0769 (0.0672)\tPrec@1 83.008 (84.167)\tPrec@5 99.805 (96.423)\n",
            "Time: Mon Nov 23 15:51:07 2020\n",
            "Test: [0/2]\tTime 1.162 (1.162)\tSoftmaxLoss 0.3891 (0.3891)\tPrec@1 87.500 (87.500)\tPrec@5 98.438 (98.438)\n",
            " * Prec@1 88.350 Prec@5 98.058\n",
            "Time: Mon Nov 23 15:51:11 2020\n",
            "Step: 119\t Epoch: [7][0/17]\tTime 2.610 (2.610)\tData 2.046 (2.046)\tLoss 0.5519 (0.5519)\tSoftmaxLoss 0.4867 (0.4867)\tRankLoss 0.0652 (0.0652)\tPrec@1 85.938 (85.938)\tPrec@5 96.289 (96.289)\n",
            "Time: Mon Nov 23 15:51:16 2020\n",
            "Step: 124\t Epoch: [7][5/17]\tTime 1.121 (1.381)\tData 0.006 (0.354)\tLoss 0.7966 (0.5779)\tSoftmaxLoss 0.7205 (0.5099)\tRankLoss 0.0761 (0.0679)\tPrec@1 75.781 (85.417)\tPrec@5 94.727 (96.061)\n",
            "Time: Mon Nov 23 15:51:22 2020\n",
            "Step: 129\t Epoch: [7][10/17]\tTime 1.124 (1.263)\tData 0.007 (0.196)\tLoss 0.6543 (0.5864)\tSoftmaxLoss 0.5941 (0.5182)\tRankLoss 0.0602 (0.0682)\tPrec@1 80.078 (84.162)\tPrec@5 96.094 (95.952)\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "4BorW6rkC0Bm"
      },
      "source": [
        "The best model gives an accuracy of 89.320 on the validation dataset (and a top-5 accuracy of 98.058, which means that 98% of the times the good label is on the labels corresponding to the top 5 scores)"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "0SBdapOmuHJN"
      },
      "source": [
        "# Create kaggle submission"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "L-FDzHbcYK9Z",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "d8e4eeec-f642-41e1-b9c9-945ff122d520"
      },
      "source": [
        "use_cuda = torch.cuda.is_available()\n",
        "\n",
        "model = torch.load('/content/restnet152_0.8835')\n",
        "\n",
        "model.eval()\n",
        "if use_cuda:\n",
        "    print('Using GPU')\n",
        "    model.cuda()\n",
        "else:\n",
        "    print('Using CPU')\n",
        "\n",
        "test_dir = '/content/cropped_dataset/test_images/mistery_category'\n",
        "\n",
        "def pil_loader(path):\n",
        "    # open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)\n",
        "    with open(path, 'rb') as f:\n",
        "        with Image.open(f) as img:\n",
        "            return img.convert('RGB')\n",
        "\n",
        "\n",
        "output_file = open('kaggle.csv', \"w\")\n",
        "output_file.write(\"Id,Category\\n\")\n",
        "for f in tqdm(os.listdir(test_dir)):\n",
        "    if 'jpg' in f:\n",
        "        data = data_transforms['val_images'](pil_loader(test_dir + '/' + f))\n",
        "        data = data.view(1, data.size(0), data.size(1), data.size(2))\n",
        "        if use_cuda:\n",
        "            data = data.cuda()\n",
        "        # Use flag='val' and the pred = ...data.max(0)... if the model used comes from the api-net\n",
        "        # output = model(data, flag='val')\n",
        "        # pred = output.data.max(0)[1]\n",
        "        output = model(data)\n",
        "        pred = output.data.max(1, keepdim = True)[1]\n",
        "        output_file.write(\"%s,%d\\n\" % (f[:-4], pred))\n",
        "\n",
        "output_file.close()\n",
        "\n",
        "print(\"Succesfully wrote, you can upload this file to the kaggle competition website\")\n",
        "        "
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "  1%|          | 3/516 [00:00<00:19, 25.73it/s]"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "Using GPU\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "100%|██████████| 516/516 [00:12<00:00, 40.02it/s]"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "Succesfully wrote, you can upload this file to the kaggle competition website\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "2HfezS9lDmvP"
      },
      "source": [
        "Thanks to the code above, we created submission files for 3 models\n",
        "- a ResNet-152 that gave accuracy of 0.93 on the validation dataset\n",
        "- the LR classifier that gave 0.94\n",
        "- the previous API-Net that gave 0.89\n",
        "\n",
        "We then mixed these submissions using a majority vote scheme were the LR (corresponding to the higher accuracy) counts for 1.5 vote and the other 2 to 1 vote."
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "a7oYBjWzW9B7",
        "outputId": "9823cda8-a378-4a10-b52a-0bbcf8f7c064"
      },
      "source": [
        "# from the logistic regression : 0.94 on the validation dataset\n",
        "df_lr = pd.read_csv('kaggle_LR.csv')\n",
        "# from the api net : 0.89 on the validation dataset\n",
        "df_api = pd.read_csv('kaggle_api_1.csv')\n",
        "# from the resnet152 model : 0.93 on the validation dataset\n",
        "df_resnet = pd.read_csv('kaggle_resnet.csv')\n",
        "\n",
        "print(df_lr)\n",
        "print(df_api)\n",
        "print(df_resnet)\n",
        "df = pd.merge(pd.merge(df_lr,df_api,on='Id'),df_resnet,on='Id')\n",
        "print(df.head())\n",
        "    \n",
        "def majority_vote():\n",
        "    output_file = open('kaggle_mv.csv', \"w\")\n",
        "    output_file.write(\"Id,Category\\n\")\n",
        "    for i in range(len(df)):\n",
        "        name, pred_1, pred_2, pred_3 = df.values[i]\n",
        "        pred = pred_1\n",
        "        if pred_2 == pred_3:\n",
        "            pred = pred_2\n",
        "        output_file.write(\"%s,%d\\n\" % (name, pred))\n",
        "\n",
        "    output_file.close()\n",
        "\n",
        "majority_vote()"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "                                   Id  Category\n",
            "0    002f61512a368e4c1434eedacf609957         5\n",
            "1    0247efd7b9d47d036bb4390202a13e69         8\n",
            "2    0267548c2aac82fe3d7e37ae98b00bd7        18\n",
            "3    030c7d18b20ee586db3b74d9966c0348        18\n",
            "4    034abbbb69336b0de7c7c0f2aa1267a6        18\n",
            "..                                ...       ...\n",
            "512  fe95bce0791a7015500d4b9f1d3d32c9         1\n",
            "513  fee2e52c250a812d0e299eb8d0ce558d        15\n",
            "514  fef53a1dada4a77de35c609180d41936         2\n",
            "515  ff5bdc3866e4fda1396030b9a146c19d        15\n",
            "516  ffe6dc708419b819ea897d666e986ec6         8\n",
            "\n",
            "[517 rows x 2 columns]\n",
            "                                   Id  Category\n",
            "0    002f61512a368e4c1434eedacf609957         5\n",
            "1    0247efd7b9d47d036bb4390202a13e69        17\n",
            "2    0267548c2aac82fe3d7e37ae98b00bd7        18\n",
            "3    030c7d18b20ee586db3b74d9966c0348        17\n",
            "4    034abbbb69336b0de7c7c0f2aa1267a6        17\n",
            "..                                ...       ...\n",
            "512  fe95bce0791a7015500d4b9f1d3d32c9         1\n",
            "513  fee2e52c250a812d0e299eb8d0ce558d        15\n",
            "514  fef53a1dada4a77de35c609180d41936         2\n",
            "515  ff5bdc3866e4fda1396030b9a146c19d        15\n",
            "516  ffe6dc708419b819ea897d666e986ec6         8\n",
            "\n",
            "[517 rows x 2 columns]\n",
            "                                   Id  Category\n",
            "0    002f61512a368e4c1434eedacf609957         5\n",
            "1    0247efd7b9d47d036bb4390202a13e69         8\n",
            "2    0267548c2aac82fe3d7e37ae98b00bd7        18\n",
            "3    030c7d18b20ee586db3b74d9966c0348        18\n",
            "4    034abbbb69336b0de7c7c0f2aa1267a6        17\n",
            "..                                ...       ...\n",
            "512  fe95bce0791a7015500d4b9f1d3d32c9         1\n",
            "513  fee2e52c250a812d0e299eb8d0ce558d        16\n",
            "514  fef53a1dada4a77de35c609180d41936         2\n",
            "515  ff5bdc3866e4fda1396030b9a146c19d        16\n",
            "516  ffe6dc708419b819ea897d666e986ec6         8\n",
            "\n",
            "[517 rows x 2 columns]\n",
            "                                 Id  Category_x  Category_y  Category\n",
            "0  002f61512a368e4c1434eedacf609957           5           5         5\n",
            "1  0247efd7b9d47d036bb4390202a13e69           8          17         8\n",
            "2  0267548c2aac82fe3d7e37ae98b00bd7          18          18        18\n",
            "3  030c7d18b20ee586db3b74d9966c0348          18          17        18\n",
            "4  034abbbb69336b0de7c7c0f2aa1267a6          18          17        17\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "r2uMM4RvEqWE"
      },
      "source": [
        "The submission gave a result of 0.81290 on the public leaderboard of the Kaggle challenge."
      ]
    }
  ]
}