{ "cells": [ { "cell_type": "markdown", "metadata": { "id": "tNSNsNcfz126" }, "source": [ "# Generating Text with an RNN" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 6764, "status": "ok", "timestamp": 1606238124957, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "ryGrBgefz126", "outputId": "d6e8727a-c3db-4e13-d59a-e1a6f7d2eb9f" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Collecting Unidecode\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d0/42/d9edfed04228bacea2d824904cae367ee9efd05e6cce7ceaaedd0b0ad964/Unidecode-1.1.1-py2.py3-none-any.whl (238kB)\n", "\r", "\u001b[K |█▍ | 10kB 28.3MB/s eta 0:00:01\r", "\u001b[K |██▊ | 20kB 31.2MB/s eta 0:00:01\r", "\u001b[K |████▏ | 30kB 20.3MB/s eta 0:00:01\r", "\u001b[K |█████▌ | 40kB 24.0MB/s eta 0:00:01\r", "\u001b[K |██████▉ | 51kB 23.1MB/s eta 0:00:01\r", "\u001b[K |████████▎ | 61kB 25.8MB/s eta 0:00:01\r", "\u001b[K |█████████▋ | 71kB 17.2MB/s eta 0:00:01\r", "\u001b[K |███████████ | 81kB 17.7MB/s eta 0:00:01\r", "\u001b[K |████████████▍ | 92kB 17.1MB/s eta 0:00:01\r", "\u001b[K |█████████████▊ | 102kB 17.1MB/s eta 0:00:01\r", "\u001b[K |███████████████▏ | 112kB 17.1MB/s eta 0:00:01\r", "\u001b[K |████████████████▌ | 122kB 17.1MB/s eta 0:00:01\r", "\u001b[K |█████████████████▉ | 133kB 17.1MB/s eta 0:00:01\r", "\u001b[K |███████████████████▎ | 143kB 17.1MB/s eta 0:00:01\r", "\u001b[K |████████████████████▋ | 153kB 17.1MB/s eta 0:00:01\r", "\u001b[K |██████████████████████ | 163kB 17.1MB/s eta 0:00:01\r", "\u001b[K |███████████████████████▍ | 174kB 17.1MB/s eta 0:00:01\r", "\u001b[K |████████████████████████▊ | 184kB 17.1MB/s eta 0:00:01\r", "\u001b[K |██████████████████████████▏ | 194kB 17.1MB/s eta 0:00:01\r", "\u001b[K |███████████████████████████▌ | 204kB 17.1MB/s eta 0:00:01\r", "\u001b[K |████████████████████████████▉ | 215kB 17.1MB/s eta 0:00:01\r", "\u001b[K |██████████████████████████████▎ | 225kB 17.1MB/s eta 0:00:01\r", "\u001b[K |███████████████████████████████▋| 235kB 17.1MB/s eta 0:00:01\r", "\u001b[K |████████████████████████████████| 245kB 17.1MB/s \n", "\u001b[?25hInstalling collected packages: Unidecode\n", "Successfully installed Unidecode-1.1.1\n" ] } ], "source": [ "!pip install Unidecode\n", "\n", "import unidecode\n", "import string\n", "import random\n", "import re\n", "import time\n", "\n", "import torch\n", "import torch.nn as nn\n", "\n", "%matplotlib inline\n", "\n", "%load_ext autoreload\n", "%autoreload 2" ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 23974, "status": "ok", "timestamp": 1606238107555, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "s9CBbG-Lz8jO", "outputId": "810c3c23-ffca-4c82-89b9-33a01680077c" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Mounted at /content/gdrive\n" ] } ], "source": [ "from google.colab import drive\n", "drive.mount('/content/gdrive')" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 625, "status": "ok", "timestamp": 1606238115471, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "tWZZ0LtOz-q_", "outputId": "a4f42e28-a027-4c73-b358-b9c92e73f238" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "/content/gdrive/My Drive/DL_stuff/assignment_4_part_2\n" ] } ], "source": [ "import os\n", "os.chdir(\"gdrive/My Drive/DL_stuff/assignment_4_part_2\")\n", "#os.chdir(\"./assignment1\")\n", "!pwd" ] }, { "cell_type": "code", "execution_count": 4, "metadata": { "executionInfo": { "elapsed": 1134, "status": "ok", "timestamp": 1606238129397, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "UGF2OePpz127" }, "outputs": [], "source": [ "from rnn.model import RNN\n", "from rnn.helpers import time_since\n", "from rnn.generate import generate" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 284, "status": "ok", "timestamp": 1606238131352, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "ivnXVh-9z128", "outputId": "cac8379f-a300-412e-b92b-490b5408a91c", "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "cuda:0\n" ] } ], "source": [ "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") \n", "print(device)" ] }, { "cell_type": "markdown", "metadata": { "id": "4Yn2thRqz128" }, "source": [ "## Data Processing\n", "\n", "The file we are using is a plain text file. We turn any potential unicode characters into plain ASCII by using the `unidecode` package (which you can install via `pip` or `conda`)." ] }, { "cell_type": "code", "execution_count": 6, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 917, "status": "ok", "timestamp": 1606238135558, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "DyoO0kR7z129", "outputId": "eb07f773-e8dc-4d96-c8fb-e7b729643070" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "file_len = 4573338\n", "train len: 4116004\n", "test len: 457334\n" ] } ], "source": [ "all_characters = string.printable\n", "#print(all_characters)\n", "n_characters = len(all_characters)\n", "#print(n_characters)\n", "\n", "file_path = './shakespeare.txt'\n", "file = unidecode.unidecode(open(file_path).read())\n", "file_len = len(file)\n", "print('file_len =', file_len)\n", "\n", "# we will leave the last 1/10th of text as test\n", "split = int(0.9*file_len)\n", "train_text = file[:split]\n", "test_text = file[split:]\n", "\n", "print('train len: ', len(train_text))\n", "print('test len: ', len(test_text))\n", "#print(train_text[0:100])" ] }, { "cell_type": "code", "execution_count": 7, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 296, "status": "ok", "timestamp": 1606238137359, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "zOh2NDhAz129", "outputId": "9ec9b027-0988-47fd-9e52-aa2477220488" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ " sharpens the stomach. Come,\n", "Leonine, take her by the arm, walk with her.\n", "\n", "MARINA:\n", "No, I pray you;\n", "I'll not bereave you of your servant.\n", "\n", "DIONYZA:\n", "Come, come;\n", "I love the king your father, and yourself,\n" ] } ], "source": [ "chunk_len = 200\n", "\n", "def random_chunk(text):\n", " start_index = random.randint(0, len(text) - chunk_len)\n", " end_index = start_index + chunk_len + 1\n", " return text[start_index:end_index]\n", "\n", "print(random_chunk(train_text))" ] }, { "cell_type": "markdown", "metadata": { "id": "N8_75JkOz129" }, "source": [ "### Input and Target data" ] }, { "cell_type": "markdown", "metadata": { "id": "kBNSF4goz12-" }, "source": [ "To make training samples out of the large string of text data, we will be splitting the text into chunks.\n", "\n", "Each chunk will be turned into a tensor, specifically a `LongTensor` (used for integer values), by looping through the characters of the string and looking up the index of each character in `all_characters`." ] }, { "cell_type": "code", "execution_count": 8, "metadata": { "executionInfo": { "elapsed": 290, "status": "ok", "timestamp": 1606238146654, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "Zq-7g92gz12-" }, "outputs": [], "source": [ "# Turn string into list of longs\n", "def char_tensor(string):\n", " tensor = torch.zeros(len(string), requires_grad=True).long()\n", " for c in range(len(string)):\n", " #print(\"this is string c\",string[c])\n", " tensor[c] = all_characters.index(string[c])\n", " #print(tensor)\n", " return tensor" ] }, { "cell_type": "markdown", "metadata": { "id": "Ye0zfwoxz12-" }, "source": [ "The following function loads a batch of input and target tensors for training. Each sample comes from a random chunk of text. A sample input will consist of all characters *except the last*, while the target wil contain all characters *following the first*. For example: if random_chunk='abc', then input='ab' and target='bc'" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "executionInfo": { "elapsed": 1004, "status": "ok", "timestamp": 1606238148933, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "HYf9fU5uz12_" }, "outputs": [], "source": [ "def load_random_batch(text, chunk_len, batch_size):\n", " input_data = torch.zeros(batch_size, chunk_len).long().to(device)\n", " target = torch.zeros(batch_size, chunk_len).long().to(device)\n", " for i in range(batch_size):\n", " start_index = random.randint(0, len(text) - chunk_len - 1)\n", " end_index = start_index + chunk_len + 1\n", " chunk = text[start_index:end_index]\n", " #print(chunk)\n", " input_data[i] = char_tensor(chunk[:-1])\n", " target[i] = char_tensor(chunk[1:])\n", " #print(input_data.size(),target.size())\n", " #print(input_data[:,1])\n", " return input_data, target" ] }, { "cell_type": "markdown", "metadata": { "id": "ueM_L0t9z12_" }, "source": [ "# Implement model\n", "\n", "Your RNN model will take as input the character for step $t_{-1}$ and output a prediction for the next character $t$. The model should consiste of three layers - a linear layer that encodes the input character into an embedded state, an RNN layer (which may itself have multiple layers) that operates on that embedded state and a hidden state, and a decoder layer that outputs the predicted character scores distribution.\n", "\n", "\n", "You must implement your model in the `rnn/model.py` file. You should use a `nn.Embedding` object for the encoding layer, a RNN model like `nn.RNN` or `nn.LSTM`, and a `nn.Linear` layer for the final a predicted character score decoding layer.\n", "\n", "\n", "**TODO:** Implement the model in RNN `rnn/model.py`" ] }, { "cell_type": "markdown", "metadata": { "id": "7ghSMnj2z12_" }, "source": [ "# Evaluating\n", "\n", "To evaluate the network we will feed one character at a time, use the outputs of the network as a probability distribution for the next character, and repeat. To start generation we pass a priming string to start building up the hidden state, from which we then generate one character at a time.\n", "\n", "\n", "Note that in the `evaluate` function, every time a prediction is made the outputs are divided by the \"temperature\" argument. Higher temperature values make actions more equally likely giving more \"random\" outputs. Lower temperature values (less than 1) high likelihood options contribute more. A temperature near 0 outputs only the most likely outputs.\n", "\n", "You may check different temperature values yourself, but we have provided a default which should work well." ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "executionInfo": { "elapsed": 213, "status": "ok", "timestamp": 1606238151708, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "mKSVxrP9z12_" }, "outputs": [], "source": [ "def evaluate(rnn, prime_str='A', predict_len=100, temperature=0.8):\n", " hidden = rnn.init_hidden(1, device=device)\n", " prime_input = char_tensor(prime_str)\n", " predicted = prime_str\n", "\n", " # Use priming string to \"build up\" hidden state\n", " for p in range(len(prime_str) - 1):\n", " _, hidden = rnn(prime_input[p].unsqueeze(0).to(device), hidden)\n", " #print(\"hidden in evaluate\", hidden)\n", " inp = prime_input[-1]\n", " \n", " for p in range(predict_len):\n", " output, hidden = rnn(inp.unsqueeze(0).to(device), hidden)\n", " #print(\"output in evaluate\",output )\n", " # Sample from the network as a multinomial distribution\n", " output_dist = output.data.view(-1).div(temperature).exp()\n", " top_i = torch.multinomial(output_dist, 1)[0]\n", " \n", " # Add predicted character to string and use as next input\n", " predicted_char = all_characters[top_i]\n", " predicted += predicted_char\n", " inp = char_tensor(predicted_char)\n", "\n", " return predicted" ] }, { "cell_type": "markdown", "metadata": { "id": "N0FTD7fGz12_" }, "source": [ "# Train RNN" ] }, { "cell_type": "code", "execution_count": 11, "metadata": { "executionInfo": { "elapsed": 217, "status": "ok", "timestamp": 1606238153997, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "YFfxLtPdz12_" }, "outputs": [], "source": [ "batch_size = 100\n", "n_epochs = 750\n", "hidden_size = 200\n", "n_layers = 1\n", "learning_rate = 0.01\n", "model_type = 'gru'\n", "# model_type = 'rnn'\n", "# model_type = 'lstm'\n", "print_every = 50\n", "plot_every = 50\n" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "executionInfo": { "elapsed": 489, "status": "ok", "timestamp": 1606238156331, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "BwD65P4lz12_" }, "outputs": [], "source": [ "def eval_test(rnn, inp, target):\n", " with torch.no_grad():\n", " hidden = rnn.init_hidden(batch_size, device=device)\n", " loss = 0\n", " for c in range(chunk_len):\n", " output, hidden = rnn(inp[:,c], hidden)\n", " loss += criterion(output.view(batch_size, -1), target[:,c])\n", " \n", " return loss.data.item() / chunk_len" ] }, { "cell_type": "markdown", "metadata": { "id": "Vie3D97sz12_" }, "source": [ "### Train function\n", "\n", "**TODO**: Fill in the train function. You should initialize a hidden layer representation using your RNN's `init_hidden` function, set the model gradients to zero, and loop over each time step (character) in the input tensor. For each time step compute the output of the of the RNN and compute the loss over the output and the corresponding ground truth time step in `target`. The loss should be averaged over all time steps. Lastly, call backward on the averaged loss and take an optimizer step.\n" ] }, { "cell_type": "code", "execution_count": 13, "metadata": { "executionInfo": { "elapsed": 208, "status": "ok", "timestamp": 1606238160465, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "3o4Vbaxjz12_" }, "outputs": [], "source": [ "def train(rnn, input, target, optimizer, criterion):\n", " \"\"\"\n", " Inputs:\n", " - rnn: model\n", " - input: input character data tensor of shape (batch_size, chunk_len)\n", " - target: target character data tensor of shape (batch_size, chunk_len)\n", " - optimizer: rnn model optimizer\n", " - criterion: loss function\n", " \n", " Returns:\n", " - loss: computed loss value as python float\n", " \"\"\"\n", " loss =0\n", " \n", " ####################################\n", " # YOUR CODE HERE #\n", " ####################################\n", " batch_size=input.size()[0] \n", " chunk_size=input.size()[1] \n", " #print(chunk_size)\n", " rnn_hidden = rnn.init_hidden(batch_size, device=device) # initialize a hidden layer representation using your RNN's init_hidden function\n", " rnn.zero_grad() # set the model gradients to zero\n", " \n", " #this should be similar to the eval_test(rnn, inp, target) function above.\n", " for x in range(chunk_size): # loop over each time step (character) in the input tensor.\n", " #print(input[:,x])\n", " rnn_out,rnn_hidden=rnn(input[:,x], rnn_hidden ) #each time step compute the output of the of the RNN\n", " #print(\"size of rnn output\", rnn_out.size())\n", " #print(\"size of traget\", target[:,x].size())\n", " #rnn_out_new=rnn_out.view(batch_size, -1)\n", " #print(\"size of rnn output\", rnn_out.size())\n", " #print(\"size of traget\", target[:,x].size())\n", " loss+=criterion(rnn_out,target[:,x]) # compute the loss over the output and the corresponding ground truth time step in target.\n", " \n", " loss=loss/chunk_size#loss should be averaged over all time steps.\n", " \n", " loss.backward() #call backward on the averaged loss \n", " optimizer.step() # take an optimizer step.\n", "\n", " return loss" ] }, { "cell_type": "code", "execution_count": 17, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 619001, "status": "ok", "timestamp": 1606201661640, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "qgX0yOW5z12_", "outputId": "658157e1-7054-4211-91a2-74b36c9bce3c" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Training for 750 epochs...\n", "[0m 41s (50 6%) train loss: 1.9273, test_loss: 1.9712]\n", "Whe the wecth you beath the died it the and serted great hiss\n", "and do say irs it so he me coblow.\n", "\n", "SAFD \n", "\n", "[1m 21s (100 13%) train loss: 1.7240, test_loss: 1.7752]\n", "What him; who the king\n", "the but the take my lords,\n", "Thuch me contaito,\n", "I light you rence your to as is c \n", "\n", "[2m 0s (150 20%) train loss: 1.6449, test_loss: 1.7337]\n", "What, you must thou be not the lord:\n", "And do not the poor younger,\n", "And the wosed grean of his his loody \n", "\n", "[2m 41s (200 26%) train loss: 1.6108, test_loss: 1.6566]\n", "What where, I am say with my some vroiser well.\n", "\n", "MACHIO:\n", "Nay, if I have not now that course: I stake t \n", "\n", "[3m 22s (250 33%) train loss: 1.5616, test_loss: 1.6215]\n", "Which than say thee other a hearts,\n", "And come in a prayers and pice.\n", "\n", "HERMIO:\n", "Go thirm. There the cause \n", "\n", "[4m 3s (300 40%) train loss: 1.5902, test_loss: 1.6298]\n", "Where on your hade, sischarty;\n", "For thou hasses of my word, if What and fone.\n", "\n", "BOTIPHOLUS OF EPHESUS:\n", "W \n", "\n", "[4m 43s (350 46%) train loss: 1.5108, test_loss: 1.6155]\n", "Whorminess your received so, good grace\n", "A trose me report your in past done crudy,\n", "Now all the fire th \n", "\n", "[5m 24s (400 53%) train loss: 1.5358, test_loss: 1.5887]\n", "Whe a great for him to keep thee.\n", "\n", "ORTING:\n", "Whiles, and none else the barge and out the laws,\n", "These tro \n", "\n", "[6m 4s (450 60%) train loss: 1.5214, test_loss: 1.5724]\n", "Where I say, the pardon.\n", "\n", "LONGAVILLE:\n", "O Duke the blows? an not be of my most king,\n", "Thou should should \n", "\n", "[6m 44s (500 66%) train loss: 1.5071, test_loss: 1.5824]\n", "Where! my lardon semons, feath\n", "That so rother, I'll resemn me.\n", "\n", "KING HENRY V:\n", "The son I am a speak to \n", "\n", "[7m 25s (550 73%) train loss: 1.4938, test_loss: 1.5677]\n", "What stay her, by my freast:\n", "The bearest as man say the fine ever ships,\n", "And her words my body, and mu \n", "\n", "[8m 5s (600 80%) train loss: 1.4951, test_loss: 1.5960]\n", "Where is the great two tempt\n", "Or good distress' all thou earth to hords.\n", "\n", "LEONTES:\n", "I will were you thou \n", "\n", "[8m 45s (650 86%) train loss: 1.4402, test_loss: 1.5782]\n", "Where then, so measure.\n", "\n", "IAGO:\n", "I'll for your dweelight of this great one fear\n", "To will not be majesty, \n", "\n", "[9m 26s (700 93%) train loss: 1.4637, test_loss: 1.5957]\n", "Whe even! Have stands, how they won and the\n", "peads yourself, here give me and this hungro's peace:\n", "That \n", "\n", "[10m 8s (750 100%) train loss: 1.4795, test_loss: 1.5409]\n", "Whee you him.\n", "\n", "JENTIO:\n", "You have find: to this my time we three bear\n", "That renderfeation to thy restore \n", "\n" ] } ], "source": [ "rnn = RNN(n_characters, hidden_size, n_characters, model_type=model_type, n_layers=n_layers).to(device)\n", "rnn_optimizer = torch.optim.Adam(rnn.parameters(), lr=learning_rate)\n", "criterion = nn.CrossEntropyLoss()\n", "\n", "start = time.time()\n", "all_losses = []\n", "test_losses = []\n", "loss_avg = 0\n", "test_loss_avg = 0\n", "\n", "#n_epochs=1000\n", "print(\"Training for %d epochs...\" % n_epochs)\n", "for epoch in range(1, n_epochs + 1):\n", " loss = train(rnn, *load_random_batch(train_text, chunk_len, batch_size), rnn_optimizer, criterion)\n", " loss_avg += loss\n", " \n", " test_loss = eval_test(rnn, *load_random_batch(test_text, chunk_len, batch_size))\n", " test_loss_avg += test_loss\n", "\n", " if epoch % print_every == 0:\n", " print('[%s (%d %d%%) train loss: %.4f, test_loss: %.4f]' % (time_since(start), epoch, epoch / n_epochs * 100, loss, test_loss))\n", " print(generate(rnn, 'Wh', 100, device=device), '\\n')\n", "\n", " if epoch % plot_every == 0:\n", " all_losses.append(loss_avg / plot_every)\n", " test_losses.append(test_loss_avg / plot_every)\n", " loss_avg = 0\n", " test_loss_avg = 0" ] }, { "cell_type": "code", "execution_count": 20, "metadata": { "executionInfo": { "elapsed": 234, "status": "ok", "timestamp": 1606201719786, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "UsbBglR8z12_" }, "outputs": [], "source": [ "#save network\n", "torch.save(rnn.state_dict(), './rnn_generator.pth')\n", "#torch.save(rnn.state_dict(), \"./classification_model_final.pth\")\n", "torch.save(rnn_optimizer.state_dict(), \"./rnn_generator_optimizer.pth\")\n", "\n", "# rnn.load_state_dict(torch.load(\"./rnn_generator.pth\"))\n", "# optimizer.load_state_dict(torch.load(\"./rnn_generator_optimizer.pth\"))" ] }, { "cell_type": "markdown", "metadata": { "id": "0Q7jFQzSz12_" }, "source": [ "# Plot the Training and Test Losses" ] }, { "cell_type": "code", "execution_count": 21, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 284 }, "executionInfo": { "elapsed": 421, "status": "ok", "timestamp": 1606201727491, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "kTP_C5eoz12_", "outputId": "552293de-b028-4c9f-8539-3da6f6d35394" }, "outputs": [ { "data": { "text/plain": [ "[<matplotlib.lines.Line2D at 0x7fd39023aba8>]" ] }, "execution_count": 21, "metadata": { "tags": [] }, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3deXxc5X3v8c9vtO+LJS+SZcsbNrbxggXGImyBYiCASRfICiVJ3dzQNrmhKU3ScpumNzc3SYG0KaVkaZoEEgjLLUsSSAjgODaLbLzJxjveZEuydln7zHP/OCNLtrXZHuloZr7v1+u85mya87Nf0nfOPOc5zzHnHCIiEv0CfhcgIiKRoUAXEYkRCnQRkRihQBcRiREKdBGRGJHo14ELCgpcaWmpX4cXEYlKGzZsOO6cKxxom2+BXlpaSkVFhV+HFxGJSmZ2YLBtanIREYkRCnQRkRihQBcRiREKdBGRGKFAFxGJEQp0EZEYoUAXEYkRURfoO4+18H9+sYMTnT1+lyIiMq5EXaAfqm/jP9bsY8fRZr9LEREZV6Iu0BcW5wBQWaVAFxHpL+oCfVJ2ChMyktl2pMnvUkRExpWoC3QzY35Rts7QRUROE3WBDl6zy67qFjp7gn6XIiIybkRloC8oyqYn5Nhd3ep3KSIi40ZUBvrCot4Lo2pHFxHpFZWBPi0/ncyURLYdUTu6iEivqAz0QMCYPyVbZ+giIv1EX6C/+CLMnMml6V3sONpCMOT8rkhEZFyIvkDPz4f9+ymv2UV7d5D9x3VhVEQEojHQL74YkpOZu28roDtGRUR6RV+gp6TAsmXkbdpAcmJAd4yKiIRFX6ADlJcT2FDBRRNSdIYuIhIWtYFOVxfXdVSx7UgTzunCqIhIdAb6ihUAXHpsJ80dPRxuaPe5IBER/0VnoE+ZAjNmMGv3FkAXRkVEIFoDHaC8nJx33iLBNASAiAhEeaDbsWNcntiqM3QREaI80AGub9ynrosiIkRzoC9cCJmZLD2yg5qWTmpbOv2uSETEV9Eb6ImJcOmllO7aBKgdXUQkegMdoLyc9B2VpHe1qx1dROLesIFuZiVm9qqZbTezSjP77AD7fNTMtpjZVjNbZ2aLR6fc05SXY6EQ1584qDN0EYl7IzlD7wHudc7NBy4D7jGz+aftsx+4yjl3EfBV4NHIljmIyy4D4P0Ne/SwCxGJe8MGunPuqHNuY3i+BdgBFJ+2zzrnXEN48Q1gaqQLHVBeHsyfz6KD2zlY30ZzR/eYHFZEZDw6qzZ0MysFlgJvDrHbJ4FfDvLzq82swswqamtrz+bQgysvp/jdzZgLsV3t6CISx0Yc6GaWCTwNfM45N2Bymtk1eIF+30DbnXOPOufKnHNlhYWF51LvmcrLSWpqZGbdEfVHF5G4NqJAN7MkvDB/zDn3zCD7LAK+B6xyztVFrsRhhG8wuqZ+t87QRSSujaSXiwHfB3Y45x4YZJ9pwDPAx51zuyJb4jAuuADy87nq+G62qaeLiMSxxBHscznwcWCrmW0Kr/sSMA3AOfcIcD8wAXjYy396nHNlkS93AGawYgXzt1Syt/YEHd1BUpMSxuTQIiLjybCB7pxbC9gw+3wK+FSkijpr5eVMePFFMk808+6xFpaU5PpWioiIX6L7TtFe4Xb0pVXv6sKoiMSt2Aj0Sy7BJSRQXr1LQwCISNyKjUDPyMCWLOHyml0aAkBE4lZsBDpAeTlz3tvO7qpGuoMhv6sRERlzMRXoyZ3tzKzay97aVr+rEREZczEV6ADLjuzQQF0iEpdiJ9BLSnBFRVx6dKfa0UUkLsVOoJth5eVccmynerqISFyKnUAHKC9nUt1RanfuJxRyflcjIjKmYi7QAebu38bB+jafixERGVuxFehLlxJKSfEujKodXUTiTGwFenIylJVRVvWu2tFFJO7EVqADgcsvZ+GxPew6EKEnIomIRImYC3TKy0kK9uDe3oBzujAqIvEj9gJ9xQoAZu/dQnVzp8/FiIiMndgL9IkT6Zg+g4s1lK6IxJnYC3Qg4fJylh3ZQaUCXUTiSEwGetIV76PwRCPVm7f7XYqIyJiJyUDvvcEo5e03fS5ERGTsxGagL1hAV3oGM3dvoeFEl9/ViIiMidgM9IQETiy9xGtH1w1GIhInYjPQgdQr38fc2gPs2n3Y71JERMZEzAZ62tVXkOBCtK19w+9SRETGRMwGOsuXEzIjfeNbflciIjImYjfQc3Konz6Hmbu3cKKzx+9qRERGXewGOtBxyXIuPvIuO440+l2KiMioi+lAz7zmSrI7T3Bk3Ua/SxERGXUxHeg5114JQPfatT5XIiIy+mI60G3OHJozc8l5p8LvUkRERl1MBzpmVC+8mFl7ttDZE/S7GhGRURXbgQ70LL+MmfVH2LfjgN+liIiMqpgP9NxrrwKg9tev+1yJiMjoivlAn/T+99EdSIB16/wuRURkVMV8oAcy0jlQcgH5W3RhVERiW8wHOsDxRcuYtX8HwU4NpSsisSsuAp3yctJ6Oql6fb3flYiIjJq4CPSC668GoPE3ujAqIrErLgJ9+uK5VGUXkvCmhtIVkdg1bKCbWYmZvWpm282s0sw+O8A+Zmb/YmZ7zGyLmV08OuWem6SEAHtmX8TErRrTRURi10jO0HuAe51z84HLgHvMbP5p+9wIzAlPq4F/j2iVEdC05BIKGqpxhw75XYqIyKgYNtCdc0edcxvD8y3ADqD4tN1WAT9ynjeAXDObEvFqz0PC+y4HoE7t6CISo86qDd3MSoGlwJunbSoG+p/6HubM0MfMVptZhZlV1NbWnl2l56noqstoT0yh9bdrxvS4IiJjZcSBbmaZwNPA55xzzedyMOfco865MudcWWFh4bm8xTmbV5LPlilzSHn79M8iEZHYMKJAN7MkvDB/zDn3zAC7HAFK+i1PDa8bN1KTEtg/ZxGFeyqhvd3vckREIm4kvVwM+D6wwzn3wCC7PQfcGe7tchnQ5Jw7GsE6I6J12aUkBoNQoWEARCT2jOQM/XLg48D7zWxTeLrJzD5tZp8O7/MLYB+wB/gu8JnRKff8pFzhXRhtfVXt6CISexKH28E5txawYfZxwD2RKmq0zFkwg735xeS8/jsy/S5GRCTC4uJO0V7zi7LZWHQhGRveAuf8LkdEJKLiKtCzU5PYf8Ei0poaYO9ev8sREYmouAp0gM5LLvVm9MALEYkxcRfo+ZcspTk5na7frfW7FBGRiIq7QF8wNZd3iufR/bvf+12KiEhExV+gF+WwofhC0nftgKYmv8sREYmYuAv0wqwU9s1ZhDkHb2oYABGJHXEX6AChSy4haAFdGBWRmBKXgT5rVhE7C6cT/L3a0UUkdsRloPe2o/PmmxAM+l2OiEhExGmgZ7Oh+EISWlpg+3a/yxERiYi4DPSpeWnsnrnQW1A7uojEiLgMdDMjZ8FcGrPyFOgiEjPiMtABFhTn8PaUeTgFuojEiLgN9IXFObxdNA/bswdqavwuR0TkvMVtoPdeGAVg/Xp/ixERiYC4DfQZBZnsKZlLMDFR7egiEhPiNtATAsaskgnsLZmrM3QRiQlxG+jg3WC0btJc3NtvQ1eX3+WIiJyXuA70hcXZvDF5LtbRAZs2+V2OiMh5ietAX1CUw8aied6C2tFFJMrFdaDPmZRJQ24BTROLFOgiEvXiOtBTEhOYMzGL7TMWwu9/D875XZKIyDmL60AHrz/6moI5UFUFhw75XY6IyDmL+0BfWJzjBTqo2UVEolrcB/qComzenTiDYFqa+qOLSFSL+0C/cEo2oYQEquYuhtdfVzu6iEStuA/0jJREZhRksGbRlbB5M3z7236XJCJyTuI+0MHrj/7wvOvhttvgC1+AtWv9LklE5Kwp0IGFRdkcaeqg4d8ehRkz4Pbb4dgxv8sSETkrCnS8M3SA7W0GTz8NjY1wxx3Q0+NzZSIiI6dAx+vpArDtSBNcdBE8+iisWQNf/KLPlYmIjJwCHcjLSKY4N43KqmZvxcc+Bp/5DHzrW94Zu4hIFFCgh80vymbjwQZ6giFvxQMPwPLlcPfdsHOnv8WJiIyAAj3sg0uLOdzQziOv7/VWpKTAz3/uvf7hH0Jrq78FiogMQ4EedtNFU7hlcREP/Wa315YOUFICP/sZvPsu/Nmf6aYjERnXFOj9fHXVAgoyU/ifT2yiozvorbz2Wvinf/KC/V//1d8CRUSGoEDvJzc9mW/+ySJ217TyjV/1aze/7z645Ra4915vmF0RkXFo2EA3sx+YWY2ZbRtke46ZPW9mm82s0szujnyZY+eKOYXctWI6P/j9ftbtOe6tDATgRz+C6dO9m46qq/0tUkRkACM5Q/8hcMMQ2+8BtjvnFgNXA/9sZsnnX5p//vbGC5lZmMFf/3wzTe3d3srcXK8LY0MDfOhDuulIRMadYQPdObcGqB9qFyDLzAzIDO8b1WmXlpzAg7cvobqlk688V9m3YfFieOQReO01+PKXfatPRGQgkWhD/w5wIVAFbAU+65wLDbSjma02swozq6itrY3AoUfP4pJc/uKa2TzzzhF+ufVo34Y774RPfxq+8Q149ln/ChQROU0kAn0lsAkoApYA3zGz7IF2dM496pwrc86VFRYWRuDQo+sv3j+bRVNz+NKzW6lp7ujb8NBDcMklcNddsGuXfwWKiPQTiUC/G3jGefYA+4F5EXhf3yUlBHjg9iW0dQW57+ktuN5+6Ckp8NRTkJwMf/RHcOKEv4WKiBCZQD8IXAtgZpOAucC+CLzvuDB7YiZfvHEer+6s5adv9XuI9LRp8PjjUFkJq1frpiMR8d1Iui3+FFgPzDWzw2b2STP7tJl9OrzLV4FyM9sKvALc55w7Pnolj707V5TyvtkF/NOL2zlQ1+9s/Prr4R//0Qv2hx/2r0AREcCcT2eWZWVlrqKiwpdjn4ujTe2sfHANcyZl8eSfryAhYN6GUAhWrYKXXvKG3L3sMn8LFZGYZmYbnHNlA23TnaIjNCUnja/etpANBxr6BvCCvpuOSkrgj/8Yamr8K1JE4poC/SzcuriIDyyawkO/2UVlVVPfhrw876ajujr48Id105GI+EKBfhbMjP9920Ly0pNPHcALYMkSrx39t7+Fv/97/4oUkbilQD9LuenJfOOPF7GrupV/fvm0B1/cfbc3zO7Xvw7//d/+FCgicUuBfg6unjuRj102je+t3c8b++pO3fgv/wLLlnl3lO7e7U+BIhKXFOjn6Es3XUjphAzufXIzzR3dfRtSU7329MRE3XQkImNKgX6O0pMT+efbF3O0qZ2vPLf91I3Tp3t907dt88Z90U1HIjIGFOjn4eJpedxzzWye3niYX207durGlSvhH/4BfvITuP9+6OrypUYRiR8K9PP0V9fOYWFxtjeAV0vHqRv/7u/gIx/xHmF30UXwi1/4U6SIxAUF+nlKSgjw4O1LaO3s4YtPb+WUO28DAXjssb4g/8AH4OabdbFUREaFAj0C5kzK4r4b5vHKuzU88fahM3e48UbYuhW++U1veIAFC7znlLa0jH2xIhKzFOgRcnd5KeWzJvDVF7ZzsK7tzB2Sk+Gv/9obP/2jH/UekHHBBd6wAaEBnwciInJWFOgREggY3/yTxQTM+PyTmwiGBunZMnky/Od/whtveEPw3nUXXH45vP322BYsIjFHgR5BxblpfGXVAioONPDommGGhF++HNav98J9/35v+ZOfhOrqsSlWRGKOAj3CPri0mJsumswDv97J9qrmoXcOBOBP/9Rrhrn3Xvjxj71mmAcfhO7uoX9WROQ0CvQI8wbwuojc9GQ+/+QmOnuCw/9QdrZ3wXTrVigvh89/HhYtgpdfHv2CRSRmKNBHQV6GN4DXu8daeODls3iI9Ny5XhfH55/3huBdudJ7eMbevcP/rIjEPQX6KLlm7kQ+snwaj/5uH+v31g3/A73MvL7q27Z5ozb+9rcwfz58+cvQ2jp6BYtI1FOgj6Ivhwfw+sQP3+bFLUfP7odTUry+6jt3wh13wNe+5p3BP/64xoYRkQEp0EdRRkoiT/75ChYUZXPP4xv51ks7CQ3WnXEwRUVeX/V162DKFK8P+xVXwMaNo1O0iEQtBfooK8xK4fE/u4wPX1rCd17dw+ofV9DScQ49WFasgLfegu9/3+sVU1YG110H3/0uHD8e+cJFJOoo0MdAcmKAr33wIr66agGv7azlgw+vY1/tObSHBwLwiU94Y8Hcfz8cPAirV3s3K91wg9envbEx8v8AEYkK5nxqjy0rK3MVFRW+HNtP6/fWcc/jG+kOhvjXDy/l6rkTz/3NnINNm+CJJ7zpvfcgKcnrHXPHHXDrrV6XSBGJGWa2wTlXNuA2BfrYO1Tfxuofb2DnsWbuu2Eeq6+ciZmd35s65w0f8MQT8OSTcPiwd2H1ppu8cL/5ZsjIiMw/QER8o0Afh9q6evjCz7fw4tajrFpSxP/9o0WkJiVE5s1DIW+smCeegJ//HI4ehbQ0L9TvuMML+bS0yBxLRMaUAn2ccs7x8Gt7+dbLO1lYlMN/fHwZRbkRDtpgENau9cL9qaegthYyM73mmDvu8JpnUlIie0wRGTUK9HHuN9ur+dwTm0hNCvDIx5ZRVpo/Ogfq6YHXXvPC/ZlnoL4ecnLgttu8cL/uOq8NXkTGLQV6FNhT08Kn/quCI43t/OOqhXz40mmje8DubnjlFS/cn30WmpogP98bauC66+Cqq6C4eHRrEJGzpkCPEk1t3fzlz95hza5a7lwxnb+/eT5JCWPQs7Sz0xsI7Ikn4IUXvHAHmD3bC/arr/ZeS0pGvxYRGZICPYoEQ45v/Opd/mPNPpbPyOfhj17MhMwxbOMOBmHzZq9p5vXXvUfm9fZtnzGjL9yvugpKS8euLhEBFOhR6dl3DnPf01spzEzh0TuXsaAox59CgkFvWN/XX++b6uu9bdOnn3oGP2OGN7iYiIwaBXqU2nK4kdU/2kBTezff/JNF3LyoyO+SvC6RlZVesPeexfcOPTB1al+4X301zJqlgBeJMAV6FKtp6eB//GQjGw40cM81s7j3D+YSCIyjkHQOduzoC/fXXoOaGm9bUVFf88wVV3ijRSZEqK+9SJxSoEe5zp4g9/+/Sp6oOMS18yby4IeWkJ06TrsXOucN+dv/DP5oeOjgzExYutQbWKx3mj3bG6NGREZEgR4DnHP8+I0DfOX57ZROSOe7d5YxszDT77KG5xzs2eM9ELuiwpveeQc6Orzt2dmwbNmpIa+2eJFBKdBjyPq9dXzmsQ30hByfuHwGty4pYlY0BHt/PT2wfXtfwFdUeD1rurq87Xl5XrD3D/pp0xTyIijQY86h+ja+9OxW1u45jnMwf0o2ty4p4pbFRRRHeuiAsdLV5V1s7R/yW7Z44Q9QUHDqWXxZmddGr5CXOKNAj1HVzR28sOUoz22uYvMhr6942fQ8bl1SxI0Lp1CYFeVjtHR0eF0m+4d8ZaXXlRK8ceBLS70z+t4pN3fo5awsfQhIVDuvQDezHwA3AzXOuYWD7HM18BCQBBx3zl01XFEK9Mg6WNfG81uqeG5TFTurWwgYXD67gFsWF7FywWRy0sbpRdSz1dbmNc/0BnxVFTQ09E2NjUM/czUQGD708/K8YRAKCmDChL5Jg5jJOHC+gX4l0Ar8aKBAN7NcYB1wg3PuoJlNdM7VDFeUAn307DzWwvObq3hucxUH69tITghw1dxCbl1cxHUXTiItOYa7DoZC0NJyasCfHvhDLXcP8XjAjAwv2E8P+qHWZWbqG4FE1Hk3uZhZKfDCIIH+GaDIOfd3Z1OUAn30OefYfLiJ5zdX8cKWKqqbO0lPTuAP5k/ilkVFXHlBIcmJ6jJ4knPQ3u7dCVtfD3V13k1TdXVnTv3XNzQM/p5JSaeGfG6uN+XkDDzffzknR6NfyhlGO9B7m1oWAFnAt51zPxrkfVYDqwGmTZu27MCBAyP8J8j5CoYcb+2v57nNVfxy21Ea27rJSUvixoWTuXVxEctnTiBhPN2wFE2CQS/URxL+TU3e1NjovQ7395eePnDY95+fONEbhqG01BtATR8CMW20A/07QBlwLZAGrAc+4JzbNdR76gzdP93BEGt3H+e5zVW8XHmME11BCrNS+MBFU7hlcRFLS3LH192osSoUgtZWL9x7p96wH2q+//LpTUSBgDfscWlpX8j3n0pKIDl5zP+pEjlDBXpiBN7/MFDnnDsBnDCzNcBiYMhAF/8kJQS4Zt5Erpk3kfauIK/urOG5TVU8/tZBfrjuPSZlp3D9/MmsXDCZ5TPzx2YI33gUCHg3VmVne/3sz1ZvE9GxY3DggPeQ8Pfe65v/3e/g8ce9D45eZl53z/4h3z/4p03Txd8oFokz9AuB7wArgWTgLeBDzrltQ72nztDHn+aObl7ZUc1L26p5bVcNHd0hctKSuPbCiaxcMJkr5xTG9gXVWNTdDUeOnBr0/adDh/q6gfYqKvJCPi/Pe/Zsaur5Tae/R3q6LhSfh/Pt5fJT4GqgAKgG/hdemznOuUfC+3wBuBsIAd9zzj00XFEK9PGtvSvImt21vFR5jN9sr6a5o4e0pASuuqCQlQsn8f55k2KnK2Q86+nxun6eHvQHDkBzs3cvQEeH902g/3z/s/6z1fvNJCen7/X0+aG29c4nRqKBIfroxiI5L93BEG/uq+elymO8vP0Y1c2dJAaMFbMmsHLBZK6fP4mJ2al+lyljqaenL+D7T/2Df6Cprc3rVtrU5H1g9F4kPn15qO6jvdLTT/1gyMrqmzIzh14+fV1aWtR8a1CgS8SEQo5Nhxt5qfIYL207xnt1bZjB0pJcbljotbtPn5Dhd5kSzZzzHos4UNAPNd/S4k2trX3znZ0jO2YgMLLgH+qDov/8KDYrKdBlVDjn2FXd6oV75TEqq5oBmDc5i+sXTGblgknMn5KNRcmZj8Sg7u5TA36g0B9s3UDLvaOEDsds6A+CVavg9tvP6Z+kQJcxcai+zWuWqazm7QP1OAcl+WmsnD+Z6+ZPYum0XFISdVFVolj/D4j+gT/Uh8FA21avhr/5m3MqQYEuY662pZPf7Kjmpcpj/H7PcbqDjtSkAGXT81kxawLlsyZwUXEOieoSKXJWFOjiq+aObt7YW8e6vXWs31vHzuoWADJTElk+wwv4FbMmcOHkbN3QJDKM0b6xSGRI2alJXL9gMtcvmAzA8dZO3tjXF/CvvOuN5ZabnsSKmd7Z+4pZE5hVmKn2d5GzoDN08d3RpnbWh8/g1+05TlWTd+GpMCuF8nDzTPmsAkry032uVMR/anKRqOGc42B928mz93V76zje6nU9K85N88J99gRWzCxgco76vkv8UaBL1HLOsaem9WTAr99XR1O7d9PJzIIMFhTnMC0/jZK8dKblp1OSn86UnFRdbJWYpUCXmBEKObYfbWb93jre2FfH7ppWqhrb6Qn1/R4nBoyi3LRwwKdRku+F/bT8dEry0slNT1LbvEQtBbrEtJ5giKNNHRyqb+NQQxsH69s4WN/uLde3UXei65T9s1ISKQmHfW/QTw2/FuemkZqkvvIyfqmXi8S0xIRAOKAHvmja2tnD4YY2DtZ5Ye8Ffzt7a0/w2s5aOnv6Bpoyg8nZqcwqzGTOpEwumJTFBZMymTMpi+xUDUYm45sCXWJeZkoi8yZnM29y9hnbQiHH8dbO8Fl9G4fq2zlQd4LdNa387K1DtHf3DS07OTv1jJCfMzGTLAW9jBMKdIlrgYAxMTuVidmplJXmn7ItFHIcbmhnV3ULu2pa2F3dyq7qFn7yxoFTzuqLclKZ0y/kLwgHfUaK/rxkbOk3TmQQgYAxbUI60yakc938SSfXB0OOQ/Vt7KpuYXeNF/K7qltZv6+Orn5BX5ybxgXhM/o5k7KYPTGTqXlpTMhI1kVZGRUKdJGzlBAwSgsyKC3I4PoFfet7giEO1rexq7qV3dUt7KrxXteGx7LplZIYoDg3jaLcNIpyU8OvaUwNv07OSdWFWTknCnSRCElMCDCzMJOZhZncsHDyyfU9wRDv1bWxt9brYulNHRxpbOe1nbXUtJw5ZndBZgrF4bDvC//e+VTydZYvA1Cgi4yyxIQAsydmMnti5oDbO3uCHGvyAr6qsYOqxnaONLRT1dTOzuoWXt3pPd+1v9SkwMmAn5iVSl56EnkZyeSlJ5OXnkRuejJ5GUnkpSeTm56kYYvjhAJdxGcpiQlMn5Ax6JOenHM0tHVT1djO4Yb2vrP8pnaONHawp+Y4DW1dZ4R+fxnJCaeEfP/gz8/wQv/k+vA+6ckJ+hYQZRToIuOcmZGf4QXvwuKcQffr6A7S0NZFw4luGtu6qG/roqGtm8YT3mtDW1d46uZgfRsNJ7po7ugZ9P1SEgMUZKZQkJVCYWYyBZkpTAi/9k6FWd5yTpruvh0PFOgiMSI1KYEpOWlMyUkb8c/0BEM0tnsfAA1t3dSf6Do5X9fayfHWLo63dnKksYPNh5uoP9FFMHTm3eWJATsj7Auykik87UOgMCuF/PRkjXs/ShToInEsMSFwMmxHIhRyNLR1nQz64/1C/3iLt1x3oovd1S0cb+2iK3hmM1ByQoApuakU5fRe6O3r6dPb8yc9WdF0LvS/JiIjFggYEzJTmJCZwlyyhtzXOUdzR8/JsK870UVtSydVTX0Xf9ftPU51cwenn/TnpicNGvjFuWkUZqWQoLP8MyjQRWRUmBk5aUnkpCUxq3DgHj4A3cEQ1c0dHG0K9/Dp17XzcEMbb+6vo+W0tv7EgDEpO/VkN86CzBTMwDlw9L46escedM6dsb53md7lAbYlBCAtKYHUpARSkhLC8wFS+833rfeWT84nJpCaHCA5ITBm1xcU6CLiq6SEAFPz0pmaN/gTqZo7ujna2Bf4R5v6+vJXHGigrtUbUdMMDO/DxAD6L5+2zcI79K0H49T9ekIhOrpDdHQHTxnu4WyYQWpiAmnJCaQmeh8GH1k+jU9dMfOc3m8oCnQRGfeyU5PInpzE3MlDN/OMplDI0RUM0d4VpKMnSEd3//neKXTytT28rrM7GJ73trV3B0d8zeJsKdBFREYgEDBSAwnjelgGPadLRCRGKNBFRGKEAl1EJEYo0EVEYoQCXUQkRijQRURihAJdRCRGKJ/ik3QAAARmSURBVNBFRGKEOXfmUJhjcmCzWuDAOf54AXA8guWMtmiqN5pqheiqN5pqheiqN5pqhfOrd7pzrnCgDb4F+vkwswrnXJnfdYxUNNUbTbVCdNUbTbVCdNUbTbXC6NWrJhcRkRihQBcRiRHRGuiP+l3AWYqmeqOpVoiueqOpVoiueqOpVhileqOyDV1ERM4UrWfoIiJyGgW6iEiMiLpAN7MbzGynme0xs7/1u57BmFmJmb1qZtvNrNLMPut3TSNhZglm9o6ZveB3LUMxs1wze8rM3jWzHWa2wu+ahmJm/zP8e7DNzH5qZql+19Sfmf3AzGrMbFu/dflm9msz2x1+zfOzxl6D1PrN8O/CFjN71sxy/ayxv4Hq7bftXjNzZlYQiWNFVaCbWQLwb8CNwHzgw2Y239+qBtUD3Oucmw9cBtwzjmvt77PADr+LGIFvA79yzs0DFjOOazazYuCvgDLn3EIgAfiQv1Wd4YfADaet+1vgFefcHOCV8PJ48EPOrPXXwELn3CJgF/DFsS5qCD/kzHoxsxLgeuBgpA4UVYEOXArscc7tc851AT8DVvlc04Ccc0edcxvD8y14gVPsb1VDM7OpwAeA7/ldy1DMLAe4Evg+gHOuyznX6G9Vw0oE0swsEUgHqnyu5xTOuTVA/WmrVwH/FZ7/L+C2MS1qEAPV6px72TnXE158A5g65oUNYpD/W4AHgb8BItYzJdoCvRg41G/5MOM8JAHMrBRYCrzpbyXDegjvF+zcHm8+dmYAtcB/hpuHvmdmGX4XNRjn3BHgW3hnYkeBJufcy/5WNSKTnHNHw/PHgEl+FnMWPgH80u8ihmJmq4AjzrnNkXzfaAv0qGNmmcDTwOecc81+1zMYM7sZqHHObfC7lhFIBC4G/t05txQ4wfhpDjhDuO15Fd4HURGQYWYf87eqs+O8/s3jvo+zmX0Zr7nzMb9rGYyZpQNfAu6P9HtHW6AfAUr6LU8NrxuXzCwJL8wfc84943c9w7gcuNXM3sNrynq/mf3E35IGdRg47Jzr/cbzFF7Aj1fXAfudc7XOuW7gGaDc55pGotrMpgCEX2t8rmdIZvanwM3AR934vsFmFt6H++bw39tUYKOZTT7fN462QH8bmGNmM8wsGe/C0nM+1zQgMzO8Nt4dzrkH/K5nOM65LzrnpjrnSvH+X3/rnBuXZ5HOuWPAITObG151LbDdx5KGcxC4zMzSw78X1zKOL+L28xxwV3j+LuC/faxlSGZ2A15z4a3OuTa/6xmKc26rc26ic640/Pd2GLg4/Ht9XqIq0MMXPf4CeAnvD+JJ51ylv1UN6nLg43hnupvC001+FxVD/hJ4zMy2AEuAr/lcz6DC3ySeAjYCW/H+7sbVrepm9lNgPTDXzA6b2SeBrwN/YGa78b5lfN3PGnsNUut3gCzg1+G/tUd8LbKfQeodnWON728mIiIyUlF1hi4iIoNToIuIxAgFuohIjFCgi4jECAW6iEiMUKCLiMQIBbqISIz4/2Tvb569jPFKAAAAAElFTkSuQmCC\n", "text/plain": [ "<Figure size 432x288 with 1 Axes>" ] }, "metadata": { "needs_background": "light", "tags": [] }, "output_type": "display_data" } ], "source": [ "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as ticker\n", "\n", "plt.figure()\n", "plt.plot(all_losses)\n", "plt.plot(test_losses, color='r')" ] }, { "cell_type": "markdown", "metadata": { "id": "v_aPt1LMz12_" }, "source": [ "# Evaluate text generation\n", "\n", "Check what the outputted text looks like" ] }, { "cell_type": "code", "execution_count": 22, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 1279, "status": "ok", "timestamp": 1606201746205, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "-gjbCojvz12_", "outputId": "03f5f558-ebea-4cf9-9613-4f15552bb322" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "The will benest his mans me,\n", "Hame that will not be a good the provity.\n", "\n", "TRIA:\n", "He dew that with merry a man for the strange.\n", "I then to the rash, so must came of the chamuness, and that I'll treason dost\n", "the heaven! how there. The run of these thou instress\n", "Which wast true come come on my tongue.\n", "\n", "KATHARINE:\n", "My lord, the crown English am a thanks, and I\n", "have you weep you galls. O, I wast thy change;\n", "And go turn of my love to the master.'\n", "\n", "ARCHBISHOP OF YORK:\n", "I'll find by dogs, noble.\n", "\n", "SAMLET:\n", "The matter were be true and treason\n", "Free supples'd best the soldiered.\n", "\n", "TITUS ANDRONICUS:\n", "I ever a bood;\n", "But one a stand have a court in thee: which man as thy break on my bed\n", "'As oath a women; there and shake me; and whencul, comes the house\n", "For them he wall; and no live away. Fies, sir.\n", "\n", "TRANIO:\n", "The congrain upon a dream, for he did pluck'd\n", "That they should cheen gate you gate forth in dicess the\n", "proclay read sative the more and like me weld my life.\n", "\n", "PLETEY:\n", "There that I never my reme, bay hath be \n" ] } ], "source": [ "print(evaluate(rnn, prime_str='Th', predict_len=1000))" ] }, { "cell_type": "markdown", "metadata": { "id": "6mwFrJahz12_" }, "source": [ "# Hyperparameter Tuning\n", "\n", "Some things you should try to improve your network performance are:\n", "- Different RNN types. Switch the basic RNN network in your model to a GRU and LSTM to compare all three.\n", "- Try adding 1 or two more layers\n", "- Increase the hidden layer size\n", "- Changing the learning rate\n", "\n", "**TODO:** Try changing the RNN type and hyperparameters. Record your results." ] }, { "cell_type": "code", "execution_count": 26, "metadata": { "executionInfo": { "elapsed": 240, "status": "ok", "timestamp": 1606202277796, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "iWWxKfmez12_" }, "outputs": [], "source": [ "batch_size = 100\n", "n_epochs = 2000\n", "hidden_size = 150\n", "n_layers =2\n", "learning_rate = 0.01 #0.1 #0.01 #0.005\n", "#model_type = 'gru'\n", "# model_type = 'rnn'\n", "model_type = 'lstm'\n", "print_every = 50\n", "plot_every = 50" ] }, { "cell_type": "code", "execution_count": 27, "metadata": { "executionInfo": { "elapsed": 235, "status": "ok", "timestamp": 1606202280270, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "Bze1BX_Iz13A" }, "outputs": [], "source": [ "def train(rnn, input, target, optimizer, criterion):\n", " \"\"\"\n", " Inputs:\n", " - rnn: model\n", " - input: input character data tensor of shape (batch_size, chunk_len)\n", " - target: target character data tensor of shape (batch_size, chunk_len)\n", " - optimizer: rnn model optimizer\n", " - criterion: loss function\n", " \n", " Returns:\n", " - loss: computed loss value as python float\n", " \"\"\"\n", " loss =0\n", " \n", " ####################################\n", " # YOUR CODE HERE #\n", " ####################################\n", " batch_size=input.size()[0] \n", " chunk_size=input.size()[1] \n", " #print(chunk_size)\n", " rnn_hidden = rnn.init_hidden(batch_size, device=device) # initialize a hidden layer representation using your RNN's init_hidden function\n", " rnn.zero_grad() # set the model gradients to zero\n", " \n", " for x in range(chunk_size): # loop over each time step (character) in the input tensor.\n", " #print(input[:,x])\n", " rnn_out,rnn_hidden=rnn(input[:,x], rnn_hidden ) #each time step compute the output of the of the RNN\n", " #print(\"size of rnn output\", rnn_out.size())\n", " #print(\"size of traget\", target[:,x].size())\n", " #rnn_out_new=rnn_out.view(batch_size, -1)\n", " #print(\"size of rnn output\", rnn_out.size())\n", " #print(\"size of traget\", target[:,x].size())\n", " loss+=criterion(rnn_out,target[:,x]) # compute the loss over the output and the corresponding ground truth time step in target.\n", " \n", " loss=loss/chunk_size#loss should be averaged over all time steps.\n", " \n", " loss.backward() #call backward on the averaged loss \n", " optimizer.step() # take an optimizer step.\n", "\n", " return loss" ] }, { "cell_type": "code", "execution_count": 28, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 1725154, "status": "ok", "timestamp": 1606204008444, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "NddmE2mrz13A", "outputId": "af913c78-7ab3-4355-e642-5420179fbf6e" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "RNN(\n", " (embedding_object): Embedding(100, 150)\n", " (dn_object): LSTM(150, 150, num_layers=2)\n", " (linear): Linear(in_features=150, out_features=100, bias=True)\n", ")\n", "Training for 2000 epochs...\n", "[0m 44s (50 2%) train loss: 2.2568, test_loss: 2.2478]\n", "Whar unthe dor\n", "mistass the some the! Tarler I moovoby rofod. Whear are citinotarlon me,\n", "My tnest hom.\n", " \n", "\n", "[1m 28s (100 5%) train loss: 1.9382, test_loss: 1.9670]\n", "Whund to a frovey for ox sock withich and for hife, lown,\n", "On Worcham, her word?\n", "\n", "ROT:\n", "Ay of hersince s \n", "\n", "[2m 11s (150 7%) train loss: 1.7706, test_loss: 1.8336]\n", "What herely evere your are to\n", "lathe this marring, as will mad,\n", "Nor do with from mift\n", "strave tale thing \n", "\n", "[2m 54s (200 10%) train loss: 1.6878, test_loss: 1.7490]\n", "What be use as the grace of the base can prince,\n", "His that I once\n", "grow, do the enemperion, she more to \n", "\n", "[3m 37s (250 12%) train loss: 1.6231, test_loss: 1.7052]\n", "Where him are you canst\n", "And she wront do great yey and eathers\n", "Your abdess it thousand out and not the \n", "\n", "[4m 21s (300 15%) train loss: 1.5808, test_loss: 1.6612]\n", "Who to loves, not.\n", "\n", "TITUS ANDRONICUS:\n", "There's it hered to me lovesion now,\n", "He well farewell, for so th \n", "\n", "[5m 4s (350 17%) train loss: 1.5867, test_loss: 1.6273]\n", "Wh's guard threate she\n", "have ever will be love of the seeedness.\n", "Nay, and my send you are treason!\n", "What \n", "\n", "[5m 49s (400 20%) train loss: 1.5436, test_loss: 1.6458]\n", "Where is did some atten in it shall too.\n", "They find a man now not his failly\n", "to be shupan?\n", "Or free succ \n", "\n", "[6m 33s (450 22%) train loss: 1.5653, test_loss: 1.6532]\n", "When was this before, and in his both is a; her.\n", "\n", "WARWICK:\n", "That with my heart, Rome and a fure to your \n", "\n", "[7m 16s (500 25%) train loss: 1.4941, test_loss: 1.5937]\n", "What is dead. O virtue to doing\n", "I say and it to the saulty,\n", "If like allation go to it, no supple\n", "As my \n", "\n", "[8m 1s (550 27%) train loss: 1.4899, test_loss: 1.5902]\n", "Where you'ld find so eniin enemy his\n", "served than heart of the lady: and make so, thousand thine\n", "And tr \n", "\n", "[8m 43s (600 30%) train loss: 1.4549, test_loss: 1.5888]\n", "Who we are it is my good tarried.\n", "\n", "TRANIO:\n", "Where, if she satirely hand that he vining being-drawgatien \n", "\n", "[9m 25s (650 32%) train loss: 1.4372, test_loss: 1.5399]\n", "What, the king too lost,--and this trift you of them:\n", "Where she is an honest by thy will will boy and \n", "\n", "[10m 8s (700 35%) train loss: 1.4671, test_loss: 1.5415]\n", "When I such ere not be feuse on taking knot\n", "the tontune\n", "The see again, gentle whereoven his set them l \n", "\n", "[10m 51s (750 37%) train loss: 1.4236, test_loss: 1.5733]\n", "Why which is a good thrush our sent your bloody should\n", "caby, sir: I am where with all us.\n", "\n", "LADY MALCUL \n", "\n", "[11m 32s (800 40%) train loss: 1.4346, test_loss: 1.5570]\n", "What will be bring to him.'\n", "\n", "IAGO:\n", "Degan! Master, to beat dishdient\n", "for thine take of the tatures.\n", "\n", "CO \n", "\n", "[12m 15s (850 42%) train loss: 1.4318, test_loss: 1.5636]\n", "What can find it yet come:\n", "Then leave the sear is and of law:\n", "There as the Forth! I might above and do \n", "\n", "[12m 59s (900 45%) train loss: 1.4155, test_loss: 1.5372]\n", "Why, to eat his upon this honour,\n", "They day, not to be--\n", "What, for you welcome, I strench but the forme \n", "\n", "[13m 43s (950 47%) train loss: 1.4170, test_loss: 1.5296]\n", "Who stand singly lord to Talbot?\n", "\n", "HAMLET:\n", "Well, my lord, granding shall I think well as their tender o \n", "\n", "[14m 27s (1000 50%) train loss: 1.4236, test_loss: 1.5353]\n", "Who say her presently fair to you did deed\n", "Befear my kind of gentlemen, I muddige,\n", "be my hands?\n", "\n", "DAUPH \n", "\n", "[15m 9s (1050 52%) train loss: 1.4151, test_loss: 1.5077]\n", "Who the keep; that we were I go to makes me home and done\n", "more danger well.\n", "\n", "TIMON:\n", "And I have dint yo \n", "\n", "[15m 52s (1100 55%) train loss: 1.3968, test_loss: 1.5172]\n", "What cannot look'st with wars such a prison's\n", "hang me like a thing like a very night at mad?\n", "\n", "OTHELLO: \n", "\n", "[16m 35s (1150 57%) train loss: 1.4115, test_loss: 1.5110]\n", "Who like at me a man hell; shes in the mother:\n", "If you say I have and the plead.\n", "\n", "ANTIPHOLUS OF SYRACUS \n", "\n", "[17m 19s (1200 60%) train loss: 1.3564, test_loss: 1.5019]\n", "What conclude as thou hadst strong,\n", "And tatfore should make the good; therefore I oditing the place\n", "I \n", "\n", "[18m 2s (1250 62%) train loss: 1.4141, test_loss: 1.5343]\n", "What find Caesar'd accused stains, become the great that serves\n", "and my father's faith, and all begun a \n", "\n", "[18m 45s (1300 65%) train loss: 1.3970, test_loss: 1.4959]\n", "What we is not in Christian!\n", "\n", "MISTRESS OF AUMERLE:\n", "Good my lord, my better too littlemen,\n", "By surmen's \n", "\n", "[19m 29s (1350 67%) train loss: 1.3997, test_loss: 1.5177]\n", "Where is he counselling man. If I would not news\n", "Of our head and a moon in the crown, on the gods\n", "With \n", "\n", "[20m 12s (1400 70%) train loss: 1.4090, test_loss: 1.4994]\n", "Who show when nothing have hears and myself to attain\n", "and the speakers given our head, that you, sir,\n", " \n", "\n", "[20m 54s (1450 72%) train loss: 1.3759, test_loss: 1.5362]\n", "When he is stand, well.\n", "\n", "SICINIUS:\n", "I do fear for the spirits from his likes on thy service,\n", "And to be \n", "\n", "[21m 38s (1500 75%) train loss: 1.3725, test_loss: 1.5049]\n", "What watch that come the fair way to the action\n", "To happose ancient make a courage\n", "Before this most ans \n", "\n", "[22m 20s (1550 77%) train loss: 1.3601, test_loss: 1.5156]\n", "Which seeks should seem follow'd to be all to draw in\n", "that the match'd of the best in my private.\n", "\n", "BOT \n", "\n", "[23m 2s (1600 80%) train loss: 1.3634, test_loss: 1.4994]\n", "Which whose mask it were breatness: I will see him practise.\n", "But we have dead, unpose for me.\n", "\n", "Clown:\n", " \n", "\n", "[23m 45s (1650 82%) train loss: 1.3509, test_loss: 1.5462]\n", "What a pardon me with our thalp,\n", "And he shall not needs have wives to so honey; and\n", "you may not threat \n", "\n", "[24m 28s (1700 85%) train loss: 1.3869, test_loss: 1.5105]\n", "What are gives it will not could warrant man;\n", "My bones honest hand, sir, to did I plague your own valo \n", "\n", "[25m 11s (1750 87%) train loss: 1.3838, test_loss: 1.5011]\n", "What we have you no more, if you\n", "through your life.\n", "\n", "EMILIA:\n", "Well, I pray him, and the purposes by the \n", "\n", "[25m 54s (1800 90%) train loss: 1.3506, test_loss: 1.4743]\n", "When he grace I see your\n", "good and offend it.\n", "\n", "POLIXENES:\n", "Is it would ask! you were.\n", "\n", "First Lord:\n", "But w \n", "\n", "[26m 36s (1850 92%) train loss: 1.3648, test_loss: 1.4971]\n", "Who is not smiles!\n", "\n", "SICINIUS:\n", "Yea, so your rain?\n", "\n", "FALSTAFF:\n", "My great loss in her, must not they are my \n", "\n", "[27m 19s (1900 95%) train loss: 1.3581, test_loss: 1.4802]\n", "What we thirty? not what I am that the spirits and\n", "let me found them we may hearts,\n", "But 'Helice hath w \n", "\n", "[28m 2s (1950 97%) train loss: 1.3483, test_loss: 1.4819]\n", "Why spirit the gove a shepherd.\n", "\n", "CLIFFORD:\n", "What men but I say the hand and my blood,\n", "But tell me alrea \n", "\n", "[28m 44s (2000 100%) train loss: 1.3203, test_loss: 1.4649]\n", "What's, madam; stand is like a courtier:\n", "This valiant charm and interpression, sir,\n", "The mark'd his bor \n", "\n" ] } ], "source": [ "rnn = RNN(n_characters, hidden_size, n_characters, model_type=model_type, n_layers=n_layers).to(device)\n", "rnn_optimizer = torch.optim.Adam(rnn.parameters(), lr=learning_rate)\n", "criterion = nn.CrossEntropyLoss()\n", "\n", "start = time.time()\n", "all_losses = []\n", "test_losses = []\n", "loss_avg = 0\n", "test_loss_avg = 0\n", "print(rnn)\n", "\n", "#n_epochs=1000\n", "print(\"Training for %d epochs...\" % n_epochs)\n", "for epoch in range(1, n_epochs + 1):\n", " loss = train(rnn, *load_random_batch(train_text, chunk_len, batch_size), rnn_optimizer, criterion)\n", " loss_avg += loss\n", " \n", " test_loss = eval_test(rnn, *load_random_batch(test_text, chunk_len, batch_size))\n", " test_loss_avg += test_loss\n", "\n", " if epoch % print_every == 0:\n", " print('[%s (%d %d%%) train loss: %.4f, test_loss: %.4f]' % (time_since(start), epoch, epoch / n_epochs * 100, loss, test_loss))\n", " print(generate(rnn, 'Wh', 100, device=device), '\\n')\n", "\n", " if epoch % plot_every == 0:\n", " all_losses.append(loss_avg / plot_every)\n", " test_losses.append(test_loss_avg / plot_every)\n", " loss_avg = 0\n", " test_loss_avg = 0" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "K_pzABtVz13A" }, "outputs": [], "source": [ "# save network\n", "#torch.save(rnn.state_dict(), './rnn_generator.pth')" ] }, { "cell_type": "code", "execution_count": 29, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 284 }, "executionInfo": { "elapsed": 559, "status": "ok", "timestamp": 1606204009019, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "lZoONrXPz13A", "outputId": "8909afea-ec15-4f19-edac-609438cbf3eb" }, "outputs": [ { "data": { "text/plain": [ "[<matplotlib.lines.Line2D at 0x7fd388607828>]" ] }, "execution_count": 29, "metadata": { "tags": [] }, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3deXxc5X3v8c9vFu37ZhvL8orBrDaILRAgkLKmzdK+bvZma0hS2hc0uWlCmtz03vSmzW1LSEIS4kBKFkI2yAZJCmEJIYCJ7Bi8YTDesC1bkmVJ1q6Zee4fz4wlGcmS5ZGOZub7fr3O65yZeTTz47zwd8485znPMeccIiKS+UJBFyAiIumhQBcRyRIKdBGRLKFAFxHJEgp0EZEsEQnqg2tqatyiRYuC+ngRkYy0du3aNudc7VivBRboixYtoqmpKaiPFxHJSGa2a7zX1OUiIpIlFOgiIllCgS4ikiUU6CIiWUKBLiKSJRToIiJZQoEuIpIlMi7QX9jfxX/891YO9QwGXYqIyKyScYG+s62H2x/bxr7OvqBLERGZVTIu0CuL8gDo6B0KuBIRkdllwkA3swVm9piZbTazTWZ20xhtys3sl2b2XLLN+6anXKgs9oHeri4XEZFRJjOXSwz4mHNunZmVAmvN7GHn3OYRbW4ENjvn/tzMaoGtZnaPcy7tqVtRFAWgo1eBLiIy0oRH6M65ZufcuuT2YWALMP/oZkCpmRlQArTjvwjSrqLQH6EfUpeLiMgox9WHbmaLgFXAmqNeuh1YAewDNgA3OecSY/z9DWbWZGZNra2tUyo4r62F63c20d3eMaW/FxHJVpMOdDMrAe4DbnbOdR318tXAeuAkYCVwu5mVHf0ezrnVzrlG51xjbe2Y0/lO7Ikn+OoP/5nwrnFnkBQRyUmTCnQzi+LD/B7n3P1jNHkfcL/ztgE7gFPTV+YIyS+CREvLtLy9iEimmswoFwPuArY4524dp9lu4Mpk+znAKcD2dBU5SjLQw21t0/L2IiKZajKjXC4G3g1sMLP1yec+BTQAOOfuAD4H3G1mGwADPuGcm57ErakBINp+cFreXkQkU00Y6M65J/Ehfaw2+4Cr0lXUMVVXA5DX0T4jHycikiky7kpRolH6S8spOXyIofirBtKIiOSszAt0YKCyiureLl3+LyIyQkYGeqyqhqreTl0tKiIyQkYGuquupqq3U1eLioiMkJGBHqqro6qvi0M6QhcROWIywxZnncjcOkr7uujoGQi6FBGRWSMjj9AL5s0hmojTfUBj0UVEUjIy0KNz6gCI7T8QcCUiIrNHRga61SUDXfO5iIgckZGBnprPxbVqPhcRkZSMDvRw29TmVBcRyUaZGeiaoEtE5FUyM9CLihjML6Cg81DQlYiIzBqZGehAX3kVxV2HcM4FXYqIyKyQsYE+WFVNZU8nXf3Tci9qEZGMk7GBHq+qpqpPE3SJiKRkbKC7mhqqerto71Ggi4hABge6n6CrU3Oii4gkTeYm0QvM7DEz22xmm8zspnHaXW5m65Ntfpf+UkeLzp1D0dAAXQc7p/ujREQywmRmW4wBH3POrTOzUmCtmT3snNucamBmFcDXgGucc7vNrG6a6j2iYJ7/iP7mA8DJ0/1xIiKz3oRH6M65ZufcuuT2YWALMP+oZu8A7nfO7U62m/ZJVgrmzQVg8IAm6BIRgePsQzezRcAqYM1RLy0HKs3scTNba2Z/Pc7f32BmTWbW1Np6Ypfth5IzLiYOaIIuERE4jkA3sxLgPuBm51zXUS9HgHOB64Grgc+Y2fKj38M5t9o51+ica6xNzscyZam/1wRdIiLAJO9YZGZRfJjf45y7f4wme4CDzrkeoMfMngDOBl5MW6VHS87nEj6oQBcRgcmNcjHgLmCLc+7WcZr9HLjEzCJmVgRcgO9rnz4VFcTCYSKH2qf1Y0REMsVkjtAvBt4NbDCz9cnnPgU0ADjn7nDObTGz3wDPAwngTufcxuko+AgzeksrKezQjIsiIjCJQHfOPQnYJNr9O/Dv6Shqsvoq/ARdIiKSwVeKAgxVVlHR00nfYDzoUkREApfRgR6vrqGyr4tDmqBLRCSzA52aGqp7OxXoIiJkeKCH5tRR0d9NZ2dv0KWIiAQuowM9b+4cAHqadfm/iEhGB/qRCbr2KdBFRDI60IvmzwNgSPO5iIhkdqBHk10u8RYFuohIRgd6aoIu0wRdIiIZHuhVVQCE23X5v4hIZgd6NEp3URl5CnQRkQwPdKCnvJLCTs24KCKS8YE+oAm6RESALAj0ocoqyno6icUTQZciIhKojA/0eLWfz6WjbyjoUkREApXxgU5trZ9xsXsg6EpERAKV8YEenlNHNBGna7/GootIbsv4QM+f4+dz6d23P+BKRESCNZmbRC8ws8fMbLOZbTKzm47R9jwzi5nZX6W3zPHln+Qv/x9oVqCLSG6bzE2iY8DHnHPrzKwUWGtmDzvnNo9sZGZh4AvAQ9NQ57hK6v0EXbH9ms9FRHLbhEfozrlm59y65PZhYAswf4ymfw/cB8xoshacNBeAREvrTH6siMisc1x96Ga2CFgFrDnq+fnAm4GvT/D3N5hZk5k1tbamJ4AtOUEXbTopKiK5bdKBbmYl+CPwm51zXUe9fBvwCefcMa/ucc6tds41Oucaa1NBfKKKiujPKyDSrkAXkdw2mT50zCyKD/N7nHP3j9GkEfiBmQHUANeZWcw597O0VXoMh0sryD+k+VxEJLdNGOjmU/ouYItz7tax2jjnFo9ofzfwwEyFOUBPWZUm6BKRnDeZI/SLgXcDG8xsffK5TwENAM65O6aptkkbqKik+IBOiopIbpsw0J1zTwI22Td0zr33RAqailhVNeXbX8I5R7LbR0Qk52T8laIAiepaqno76eqPBV2KiEhgsiLQqaulaGiAzraOoCsREQlMVgR6pM4PgTy8pzngSkREgpMVgR6d6+dz6d13IOBKRESCkxWBXjTfX/4/qAm6RCSHZUWgl9T7QI/t19BFEcldWRLoJwHgWjXjoojkrqwI9FBlBUOhMHZQ87mISO7KikDHjK7iciIHDwZdiYhIYLIj0IHu0kpN0CUiOS1rAr23vJLCLgW6iOSurAn0gYoqSg7rSlERyV1ZE+ixqmrKuxXoIpK7sibQEzW1VPR309fTH3QpIiKByJpAT91btFPzuYhIjsqaQI/O8YHevVeX/4tIbsqaQM+b5yfo6tcEXSKSoyYMdDNbYGaPmdlmM9tkZjeN0eadZva8mW0ws6fM7OzpKXd8hfPnATCgCbpEJEdN5p6iMeBjzrl1ZlYKrDWzh51zm0e02QFc5pw7ZGbXAquBC6ah3nEdmaBL9xYVkRw1mXuKNgPNye3DZrYFmA9sHtHmqRF/8gxQn+Y6J1SenELXtSrQRSQ3HVcfupktAlYBa47R7APAr8f5+xvMrMnMmlrTHLx5hfl0FpQQatMEXSKSmyYd6GZWAtwH3Oyc6xqnzevwgf6JsV53zq12zjU65xprk8MM06mzpIJouyboEpHcNJk+dMwsig/ze5xz94/T5izgTuBa51wgqdpdVkl+hwJdRHLTZEa5GHAXsMU5d+s4bRqA+4F3O+deTG+Jk9dXVklRly7/F5HcNJkj9IuBdwMbzGx98rlPAQ0Azrk7gP8FVANf8/lPzDnXmP5yj22gsprSlzfO9MeKiMwKkxnl8iRgE7T5G+Bv0lXUVMWrqynr6QTnwI5ZsohI1smaK0XBT9AVTcSJHdS86CKSe7Iq0EO1NQB07dHVoiKSe7Iq0CNz6gDo3asZF0Uk92RVoOfP81eL9mrGRRHJQVkV6MXJ+VwGmzXjoojknqwK9JLkfC5xzeciIjkoqwK9oqaC3mg+KNBFJAdlVaAX5YVpLyrXBF0ikpOyKtDNjC5N0CUiOSqrAh38BF0FnbqwSERyT9YFen95JUWdh4IuQ0RkxmVdoA9U1lDe1Q6xWNCliIjMqKwL9NYVZ5MfG4Rnngm6FBGRGZV1gd556RUMhcIM/PRnQZciIjKjsi7QG89exJoFZzD4818GXYqIyIzKukBfuaCCP5xyIaUvvwjbtwddjojIjMm6QI+GQ3S9/hoA3C9+EXA1IiIzJ+sCHWDFa1exraqevp8q0EUkd0zmJtELzOwxM9tsZpvM7KYx2piZfdnMtpnZ82Z2zvSUOzmXLa/lt8vOJ/+pJ6GrK8hSRERmzGSO0GPAx5xzpwEXAjea2WlHtbkWODm53AB8Pa1VHqcFVUVsPvcywrEheOihIEsREZkxEwa6c67ZObcuuX0Y2ALMP6rZG4HvOO8ZoMLM5qW92uNQ/fpL6SgoIf5zdbuISG44rj50M1sErALWHPXSfOCVEY/38OrQx8xuMLMmM2tqneYpbi8+dR6PLWkk8eCvIB6f1s8SEZkNJh3oZlYC3Afc7JybUse0c261c67ROddYW1s7lbeYtIuWVvP48guIHjoIa47+/hERyT6TCnQzi+LD/B7n3P1jNNkLLBjxuD75XGCK8yMcvuwKYqEw/FIXGYlI9pvMKBcD7gK2OOduHafZL4C/To52uRDodM41p7HOKWlcuYRn608n9gsFuohkv8kcoV8MvBu4wszWJ5frzOzDZvbhZJtfAduBbcA3gb+dnnKPz6Un1/LI0vOIbN4EO3cGXY6IyLSKTNTAOfckYBO0ccCN6SoqXU6bV8Znzr4EHrsLHngA/u7vgi5JRGTaZOWVoimhkLHwgrPZUVOPU7eLiGS5rA50gEuX1/Lw4vNwv3scDh8OuhwRkWmT9YH+2pNreWTZ+YQGB+Hhh4MuR0Rk2mR9oNeW5tPbeAHdRaUavigiWS3rAx3g4hXzeHTROSQefBASiaDLERGZFjkR6Jcur+G3S88n1NoKzz4bdDkiItMiJwL93IWVrFl+HvFQ2A9fFBHJQjkR6PmRMKefsYgNi85QP7qIZK2cCHSAS0+u4YGF58Lzz8OuXUGXIyKSdrkT6MtreXTp+f7Bgw8GW4yIyDTImUBfXFPM4LKTOTCnAe69F5wLuiQRkbTKmUA3M157ci2rz/0LePJJuOuuoEsSEUmrnAl0gMuW1/CtM66i86LXwkc/qr50EckqORXor1lWQygc5ocf+qzvcnn/+3WhkYhkjZwK9LKCKOc0VHBva5j4f/wHPPoo3HFH0GWJiKRFTgU6wAdfu4QdbT384Oyr4aqr4OMfh+3bgy5LROSE5Vyg/9lpc2hcWMltj2yj72t3QCQC73uful5EJOPlXKCbGbdcdyqthwf45s4YfOlL8MQT8JWvBF2aiMgJmcxNor9lZi1mtnGc18vN7Jdm9pyZbTKz96W/zPQ6d2EVV58+h2/87mXa/vJtcP31cMst8OKLQZcmIjJlkzlCvxu45hiv3whsds6dDVwO/KeZ5Z14adPrH685lf5Ygq88ug1Wr4aCAnjveyEeD7o0EZEpmTDQnXNPAO3HagKUmpkBJcm2sfSUN32W1pbw1vMWcM+a3ezMK/ddLk8/DbfeGnRpIiJTko4+9NuBFcA+YANwk3NuzDOMZnaDmTWZWVNra2saPvrE3HzlyUTDIf79oa3wjnfAm94En/kMbNoUdGkiIsctHYF+NbAeOAlYCdxuZmVjNXTOrXbONTrnGmtra9Pw0SemrqyAD752MQ8+38xzezr9mPTSUj+cccOGoMsTETku6Qj09wH3O28bsAM4NQ3vOyNuuGwp1cV5/Ouvt+Dq6vzFRmZwySXw+ONBlyciMmnpCPTdwJUAZjYHOAXImCt1SvIj3PT6k3lmezuPb22FM8/0fenz58PVV8OPfxx0iSIikzKZYYv3Ak8Dp5jZHjP7gJl92Mw+nGzyOeA1ZrYBeAT4hHOubfpKTr+3n9/Aouoi/u3XLxBPOFiwwM/IeN558Na3wpe/HHSJIiITikzUwDn39gle3wdclbaKAhANh/j41ady4/fXcd+6PfyPxgVQVQUPP+xPlt50E+zbB//6r747RkRkFsq5K0XHc92Zczl7QQVffPhF+oeSY9ELC+EnP4GPfAS+8AV4z3tgaCjYQkVExqFATzIzbrn2VJo7+/na4y8PvxAOw1e/Cv/yL/Dd78Ib3gCHDwdXqIjIOBToI1y4pJq3rJrPlx95id9sbB5+wQz+6Z/8XY4eeQQuuABeeCG4QkVExqBAP8rn33Imqxoq+IcfPsfGvZ2jX3z/+32/elsbnH8+/PSnwRQpIjIGBfpRCqJhVr+7kariPD7w7T+yv7N/dIPXvQ7WroUVK+Atb/GTemn+FxGZBRToY6gtzeeu9zbS3R/jg99pom/wqMBesMBPufuhD8G//Rtce60/ahcRCZACfRynzi3jK+9YxaZ9nXz0R+tJJNzoBvn5fqqAO+/04d7Y6I/cRUQCokA/hitOncOnrlvBrzfu5z8f3jp2ow98wF+ElEjAxRfDf/2XvwG1iMgMU6BP4AOXLObt5zfw1cde5v51e8ZulDo6v+QSf+L0mmtgy5aZLVREcp4CfQJmxv954+m8Zmk1n7xvA3/cOc7U8LW18JvfwBe/CGvWwFlnwUc/Ch0dM1uwiOQsBfokRMMhvv7Oc6mvLORD313LzraesRtGInDzzfDSS/7G07fdBsuX+352jYQRkWmmQJ+k8qIod733PJxzvHX107zc2j1+49paf1u7piYf6B/8oB+3/oc/zFzBIpJzFOjHYXFNMT+44SLiCcdbv/EMW/dPMAXAOefA738P3/8+HDjg+9jf9jbfJaMTpyKSZgr043TK3FJ+cMNFhEPwttVPv/pq0qOZwdvfDlu3wqc/DQ8+CBde6E+k3nkn9IzTfSMicpwU6FOwrK6EH33oIoryIrzjm8+w/pVJnPgsLobPfc5Pw/u1r/lZGz/4QX8jjZtv1twwInLCFOhTtLC6mB9+6EIqivJ4151raBpv9MvRSkv9dLzPPee7Y66/3gf8ihVw5ZVw993+pKq6ZETkOJkLKDgaGxtdU1NTIJ+dTvs7+3nHN59hf1c/d76nkdcsrTn+N2lp8TM5fuMbsGuXf66mBl7zmuGlsdHPzy4iOc3M1jrnGsd8baJAN7NvAW8AWpxzZ4zT5nLgNiAKtDnnLpuoqGwJdICWw/2868417DrYy+q/buSy5bVTe6NEwl+Q9NRTw8uLL/rXIhFYtQquuMIf1V90kX9ORHLKiQb6pUA38J2xAt3MKoCngGucc7vNrM451zJRUdkU6ADtPYO86841bGvp5u+vWMaHLltKXiQNPVptbf6m1U8/7acYePppiMWgstJfkXr99X5dXX3inyUis94JBXryDRYBD4wT6H8LnOSc+/TxFJVtgQ7Q2TvEp366gQc3NLOsroTPv/lMzl9cleYP6fRzsj/4IPzqV767JhTyI2euv97fUenMM3XvU5EsNd2BnupqOR0oBb7knPvORO+ZjYGe8tjWFj7zs43sOdTHWxsXcMt1p1JRlJf+D0ok/MVLDz7ol9RsjwsX+mD/8z+Hyy/3M0OKSFaY7kC/HWgErgQKgaeB651zL47R9gbgBoCGhoZzd6VOAGahvsE4tz3yInf+fgcVhVE+/YYVvGnlfGw6j5ybm32w//KX/ii+r88Pl7zqKh/u110Hc+ZM3+eLyLSb7kD/JFDonPts8vFdwG+ccz8+1ntm8xH6SFuau7jl/g2sf6WDi5dV8y9vOpPFNcXT/8F9ffDoo/DAAz7g9+71zzc0wMqVcPbZw+vFi323jYjMetMd6CuA24GrgTzgWeBtzrmNx3rPXAl0gETC8f1nd/OF37xA32Ccvzynno9cvpRFMxHs4Me0r1/vj9rXr/dj4F94wXfZgB8bf9ZZcPrp/kKnk06CefOG17W1EA7PTK0ickwnOsrlXuByoAY4AHwW32eOc+6OZJuPA+8DEsCdzrnbJioqlwI9paWrn68+to17//gKsXiCN66cz42vW8qyutKZL6avDzZtGg74557zQybHupVeOOy7ahYu9JONLV8Op5zil2XLoKBg5usXyVEnfIQ+HXIx0FNauvr55u+3871ndtMfi3PdGfO48XXLOO2ksqBLg8FB2L/f98fv2ze83rcPdu70c9Ls2zfc3mw46Gtr/QnYggK/HrkUFkJ9vf8CWLrU/yoQkeOmQJ+l2nsGuevJ7Xz7qV10D8R4/Yo6PnL5Us5pqJzek6cn6vBhPz3B1q3+wqetW/1y6BAMDIxehobGfo85c3ywL1vml8WLoazMn8QtKvLrkdv5+X78/chlaGh4u6LCX10rkuUU6LNcZ+8Q3356J3c9uYPOviFWzCvjXRc28MaV8ynJz/CrQRMJH+x9fbB7N2zbNnp5+WXYM86t/Y5XQ4OfIiG1nHsuVKX5OgCRgCnQM0TPQIyfrd/L957ZzZbmLorzwrxp1XzedeFCVsybBd0x0yUV9t3dfjrh3t7R654e/6UQjfrpDlLLyMcHDvgx+U1N/osiZckSH+xnnOF/EaSWmhpdfCUZSYGeYZxzrH+lg+89s5sHnt/HQCzBOQ0VvPOChVx75lyK8jL8qH26HToE69YNB3xTk+//H6m0dHTAn3WWD/7lyzWEU2Y1BXoG6+gd5L51e7lnzS62t/aQFwlxweIqLj+ljstPqWVJTfHs7m+fLfr6YMcO38Wzfbtfp5YdO/zJYICSEn+nqXPPHV6WL/ddR4cOQXu7Xw4eHN7u7fX9/KWlfikrG94uLfW/JFLnFPr7X71dVOTPKcyd6+fk0ReKHIMCPQs451izo52HNx/g8a0tvNzq73S0oKqQy5f7cL9oabWO3qciFvNDNteu9Ufza9f64Zz9/f71aHT8k7vpFg770UJz5w6HfH29H0mUWhoaNJVyDlOgZ6FX2nt5/MVWfre1hT9sO0jfUJy8cIgz68tZuaCCVQ0VrFxQwfyKQh3BT8XIkN+yxR+BV1W9eqmu9kfY3d1+9M/IpavLr4eGhodyjhzSmdru6fHnAPbv9+uR283NfonHR9dXV+fDfdEiP0Jo8WJ/vmDJEh/4eePMHZRIQEcHtLb65dAh/+ult3f0OrWdn+/PN1RX+yW1XVPjRxaFQr62oaFXL4mE30clJcd/viKR8IumiH4VBXqW6x+K88ed7TzxYivrdnewcW8nAzF/FWhtaT4rF/hwP6ehknMXVqZnWl+ZObGYn7ph165XLzt3+nWqywh8yNbX+5Cvq/PdQi0tPsDb2vz7TSQc9l9UAwOj33sks8ndWSs/39dRWzt6XVrqv1wOHhxeUt1Zhw75/47Fi4eHtp588vB60SL/yykHKdBzzFA8wQvNh/nTK4dYv7uD9a90sL3Nd9GUFkS48tQ6rjp9Lpctr6U404dFij+S3bfPnxvYscMvqe3WVn9EXVs7ekkFa2WlD+7CQr9ObafC0jn/6yMVuG1to9fO+bZjLaHQ6C+TkeuWFv8roKTE15f6tZNaqqr8f9e2bf6ah23b/K+dlHDYtxt5rUKq/tR2Xp5vF4mMvXbOL4nE8HbqcSg0fF6kpOTV65ISv59GLjM0PYYCXejoHeTZZB/8b7cc4FDvEHmREK9dVsNVp8/hyhVzqCnRNLsyg2KxyXepOOe/BFLXL7z0kv9SOXqI68ihrqmLzuLxsddmPrjNhpfU49T1E8cjGh0O99JS3yV19FJe7tfnn++XKVCgyyixeIKmXYd4aNMB/nvTfvZ29BEyOLO+guV1JSytK2FJTTFLaktYWF1ENKwuGslBsZj/Yjh8ePgcSWrd0/Pqcw4jH3d3++6kkUtnp38d4JZb4POfn1JZCnQZl3OOzc1dPLTpAE9vP8j21h7auoePTCIho6GqiCW1JSyfU8JZ9RWcVV/OvPICnWwVOV6Dgz7YIxHf3TUFxwp0daDmODPj9JPKOf2kcv4h+Vxn3xDbW7vZ3trD9ja/frm1m8e3thBL+AOAmpJ8zq4vPxLwZ9WXU60uG5Fjy8vz5y6miQJdXqW8MMqqhkpWNYw+gugfirOluYvn93Ty3J4Ont/TyaNbW44MdKguzqO+spD6qiIWVBaxoKqQ+soiFlQWMr+ykPyI5lQXmU4KdJm0gmj4VUHfPRBj495ONuzpZHtbD3sO9bJpbycPbdrPUHy4O88M6isLObmulJOT/fQn15WwrK6E0oLcHH4mkm4KdDkhJfkRLlxSzYVLqkc9H084Wg7380p7H6+097K7vZftbT28dOAwT77UxmA8caTtvPICltWVsKi6mIXVRSyqLmZRTRH1lUUURHVULzJZCnSZFuGQMa+8kHnlhZy/ePQUtrF4glcO9bGtpZuXWg6z7UA321q7+fn6vXT1D1/0YgbzygpYWF3MvIoC8iMhouEQkVCIaMSIhkJEwkY0HKIkP8KcsgLmlhcwt6yA2tJ8wiGdtJXcokCXGRcJh1hcU8zimmL+7LQ5o17r6B1k58Fedh3sYWdbL7vae9h1sJc129sZiicYiieIxR2D8QSxhCOeGHuUVsigrrSAOeUFzC3LZ0GlH6mzpLaYJbXF1Jbka5SOZJ0JA93MvgW8AWgZ6ybRI9qdBzyNv0H0T9JXouSSiqI8VhblsXJBxaTaJxKOoUSCrr4YB7r62d/Zz/6u/lHb21t7eHxr65HpEABK8yPJcC9hcU0xFUVR8iMhCqJh8iNhCqKhI+uivAgnVRSor19mvckcod8N3A58Z7wGZhYGvgA8lJ6yRCYnFDLyQ2FqS8PUluZzxvzyMdslEo69HX1sb+sZNSTzme0H+emf9k7qs6qK82ioKmJhdRELq4poSPb5n1RRSEl+hKK8sC7CkkBNGOjOuSfMbNEEzf4euA84Lw01iaRdKGQsqCpiQVURly0fPQ64fyhO90CM/qE4A7HE6PVQgu6BGHsO9bG7vYfd7b2s3XWIXz63j7F6e/LCIYrywxRFwxTlRyjOC5MfDRMNG+FQiGjIiISNSDhEJGREQiHyoyEKo2GK8sIURMMURsMU5g0/XlxTzNLaEp0TkAmdcB+6mc0H3gy8jgkC3cxuAG4AaGhoONGPFkmLgmj4uEfTDMYS7O3oY3d7L80dffQMxukdiNEzGKdv0K97B2P0DMSPfDEMJeLE4gniCcfQkbU78uXROxgb80sCoDAa5rSTyjhzfjlnzC/njPllLKstIaJfBDJCOk6K3gZ8wlskXl0AAAeASURBVDmXmOgkk3NuNbAa/KX/afhskUDkRYZP7KaLc/5kb/9ggt6hGH2D/pfDSwe62bC3k417O/lR0yvc/dROAAqiIU6ZU0pxfoRIOERe2JIjgIZ/CRRGw8wpL2BeeQFzygqYV17I3LICCvM0HDQbpSPQG4EfJMO8BrjOzGLOuZ+l4b1FcoaZkR/xJ2XLGT4Be1Z9BX95bj3gx/fvaEsFfBcvHjhM/1CcnkF/9B+L+6P/oUSCoZijdzA2aihoSkVRlLllBVQURf2ssakXHDjckeecc8l1ss2Ix+C/2AqSXUap7qKCZJdRQSREfjRMfiSUXMLkR4e38yK+28l/AQ0PQY0eWYeOtM8LhzQqaRJOONCdc4tT22Z2N/CAwlxkeoRDxrK6UpbVlfLmVZP7m97BmB/xkxz105zcbu7sp6tvCAyM5E2FDIzQ8Iyy2JGbDZnZkXapaB2KO/qG4nT0DtE3FKd/ME5/LEHfYJy+ofjYBU3RkS+GqB99VJwXoawgSllhlPLkUlYYObJdVZxHTUk+1SV5VBfn58SNXSYzbPFe4HKgxsz2AJ8Ff/jgnLtjWqsTkRNWlBdJjsEvmdHPdS51jsCfZB6IJRgYGr0dS55PGIo7YvGEv74g7oglEgym2h1Z/LmI1N92D8To7Btib0cfW5q76Owbontg/LsxlRVEqCnJp6Ykn8riKNHkUX8o+QUVSn5ThcwImxGNGHlh/0siPxIatS6Ihv0XSMHoL5KS/EigvyQmM8rl7ZN9M+fce0+oGhHJGmZGXsTIi4QonaHPjMUTdPX7oG/vGaSte4CD3an1AG09g7QdHmBHWw+xRLJryTkSya6mRPJShVjCf8kMxvwXy8ipKo4lZFBWGKUgEvZfFGaEQv5LImR25NfN289v4G9euyTt//26UlREskYkHKKqOI+q4rxpOWGdCvjewThd/UN09g3R1TdEV5//EkktA7G4v5vdkS8M/6WRcP5LZLruDqZAFxGZwMgT1gDVE7QPSvafJRARyREKdBGRLKFAFxHJEgp0EZEsoUAXEckSCnQRkSyhQBcRyRIKdBGRLGHOBTOLrZm1Arum+Oc1QFsay0kn1TY1s7k2mN31qbapydTaFjrnasd6IbBAPxFm1uScawy6jrGotqmZzbXB7K5PtU1NNtamLhcRkSyhQBcRyRKZGuirgy7gGFTb1Mzm2mB216fapibrasvIPnQREXm1TD1CFxGRoyjQRUSyRMYFupldY2ZbzWybmX0y6HpGMrOdZrbBzNabWVPAtXzLzFrMbOOI56rM7GEzeym5rpxFtf2zme1N7rv1ZnZdQLUtMLPHzGyzmW0ys5uSzwe+745RW+D7zswKzOxZM3suWdv/Tj6/2MzWJP+9/tDM8mZRbXeb2Y4R+23lTNc2osawmf3JzB5IPp7afnPOZcwChIGXgSVAHvAccFrQdY2obydQE3QdyVouBc4BNo547v8Bn0xufxL4wiyq7Z+B/zkL9ts84JzkdinwInDabNh3x6gt8H2Hv1VmSXI7CqwBLgR+BLwt+fwdwEdmUW13A38V9P9zybo+CnwfeCD5eEr7LdOO0M8HtjnntjvnBoEfAG8MuKZZyTn3BNB+1NNvBL6d3P428KYZLSppnNpmBedcs3NuXXL7MLAFmM8s2HfHqC1wzutOPowmFwdcAfwk+XxQ+2282mYFM6sHrgfuTD42prjfMi3Q5wOvjHi8h1nyP3SSAx4ys7VmdkPQxYxhjnOuObm9H5gTZDFj+Dszez7ZJRNId9BIZrYIWIU/optV++6o2mAW7Ltkt8F6oAV4GP9rusM5F0s2Cezf69G1OedS++3/JvfbF81seu7cPLHbgH8EEsnH1Uxxv2VaoM92lzjnzgGuBW40s0uDLmg8zv+WmzVHKcDXgaXASqAZ+M8gizGzEuA+4GbnXNfI14Led2PUNiv2nXMu7pxbCdTjf02fGkQdYzm6NjM7A7gFX+N5QBXwiZmuy8zeALQ459am4/0yLdD3AgtGPK5PPjcrOOf2JtctwE/x/1PPJgfMbB5Act0ScD1HOOcOJP/RJYBvEuC+M7MoPjDvcc7dn3x6Vuy7sWqbTfsuWU8H8BhwEVBhZpHkS4H/ex1R2zXJLiznnBsA/otg9tvFwF+Y2U58F/IVwJeY4n7LtED/I3By8gxwHvA24BcB1wSAmRWbWWlqG7gK2Hjsv5pxvwDek9x+D/DzAGsZJRWWSW8moH2X7L+8C9jinLt1xEuB77vxapsN+87Mas2sIrldCPwZvo//MeCvks2C2m9j1fbCiC9ow/dRz/h+c87d4pyrd84twufZo865dzLV/Rb02d0pnA2+Dn92/2Xgn4KuZ0RdS/Cjbp4DNgVdG3Av/uf3EL4P7gP4vrlHgJeA3wJVs6i27wIbgOfx4TkvoNouwXenPA+sTy7XzYZ9d4zaAt93wFnAn5I1bAT+V/L5JcCzwDbgx0D+LKrt0eR+2wh8j+RImKAW4HKGR7lMab/p0n8RkSyRaV0uIiIyDgW6iEiWUKCLiGQJBbqISJZQoIuIZAkFuohIllCgi4hkif8PRdw9xmyMAZoAAAAASUVORK5CYII=\n", "text/plain": [ "<Figure size 432x288 with 1 Axes>" ] }, "metadata": { "needs_background": "light", "tags": [] }, "output_type": "display_data" } ], "source": [ "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as ticker\n", "\n", "plt.figure()\n", "plt.plot(all_losses)\n", "plt.plot(test_losses, color='r')" ] }, { "cell_type": "code", "execution_count": 30, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 1404, "status": "ok", "timestamp": 1606204009870, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "aNAbp2Aqz13A", "outputId": "fbe70816-edc4-4ede-83f7-03cffe154298" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "The man wake him not hearing of his brother,\n", "Even it but the great sorrow and have them: and\n", "countrymen stands ill to seeming, the boy,\n", "I misa man cave his heads: therefore hath restamed\n", "thee my shadief in things and trust thou, for the faith\n", "She is very eater in here.\n", "\n", "ROSALINE:\n", "Give this made it in the lander: you\n", "see the day will necre 'gainst the heaps of heaven.\n", "\n", "MARGARET:\n", "I will take up the easy?\n", "\n", "CARDINAL WOLSEY:\n", "If your queen of the charess weep,\n", "And supportune shook all the wait with him, he's\n", "mine honest white high at all, which he small strength\n", "them to hear a prier get descends and the good that,\n", "With this place of my soul to secrets\n", "And shall seen mine reputation, I'll answer he\n", "made expled in our meaning strange plain.\n", "\n", "HOLOFERNES:\n", "Now, yet is not my husband on his admirally received\n", "And must be how Perchain, is ever masters to them as\n", "adversaries; which is to my sir, if you should\n", "here a villain the better to be directed and confess of\n", "the Frenchmen three suit; he is there\n" ] } ], "source": [ "print(evaluate(rnn, prime_str='Th', predict_len=1000))" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "1cSYaORWz13A" }, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": 14, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 1474, "status": "ok", "timestamp": 1606238175208, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "qjIv9vg1z13A", "outputId": "942456cb-0ee3-4cff-c974-f225ab8a0eff" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "file_len = 4728857\n", "train len: 4255971\n", "test len: 472886\n" ] } ], "source": [ "#######################################################\n", "# Extra credit\n", "#######################################################\n", "all_characters = string.printable\n", "#print(all_characters)\n", "n_characters = len(all_characters)\n", "#print(n_characters)\n", "\n", "file_path = './charles_dickens.txt'\n", "file = unidecode.unidecode(open(file_path).read())\n", "file_len = len(file)\n", "print('file_len =', file_len)\n", "\n", "# we will leave the last 1/10th of text as test\n", "split = int(0.9*file_len)\n", "train_text = file[:split]\n", "test_text = file[split:]\n", "\n", "print('train len: ', len(train_text))\n", "print('test len: ', len(test_text))\n", "#print(train_text[0:100])\n" ] }, { "cell_type": "code", "execution_count": 15, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 201, "status": "ok", "timestamp": 1606238184249, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "ubMm3pkoOiKu", "outputId": "496c2779-0a27-4b2a-f951-3cc089fb0d42" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "t on how you may.\"\n", "\n", "A ghost-seeing effect in Joe's own countenance informed me that Herbert\n", "had entered the room. So, I presented Joe to Herbert, who held out his\n", "hand; but Joe backed from it, and held\n" ] } ], "source": [ "chunk_len = 200\n", "\n", "def random_chunk(text):\n", " start_index = random.randint(0, len(text) - chunk_len)\n", " end_index = start_index + chunk_len + 1\n", " return text[start_index:end_index]\n", "\n", "print(random_chunk(train_text))" ] }, { "cell_type": "code", "execution_count": 16, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 535045, "status": "ok", "timestamp": 1606238728365, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "ubJW_mQ7z13A", "outputId": "0b2e4e34-f548-46c4-d291-b53ed2be1223" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "RNN(\n", " (embedding_object): Embedding(100, 200)\n", " (dn_object): GRU(200, 200)\n", " (linear): Linear(in_features=200, out_features=100, bias=True)\n", ")\n", "Training for 750 epochs...\n", "[0m 34s (50 6%) train loss: 1.8457, test_loss: 1.9206]\n", "Whe bet to made sable and muscleser, ajing the gre to moth a mill heave in on oy no kning uppoo no to \n", "\n", "[1m 8s (100 13%) train loss: 1.6645, test_loss: 1.7037]\n", "Which as\n", "towed thom all still a\n", "Teman, I such an of so the at Strunce would had ge no one words, and t \n", "\n", "[1m 43s (150 20%) train loss: 1.5787, test_loss: 1.6088]\n", "What the word, I done enchersevering breat stood him\n", "lidger--and I lempation, and the house he\n", "greatso \n", "\n", "[2m 17s (200 26%) train loss: 1.5320, test_loss: 1.5811]\n", "Whe was a cry, as to my eyes,\n", "and coming along this mother. Mr. Peggotty accuple as he face to the sug \n", "\n", "[2m 52s (250 33%) train loss: 1.4975, test_loss: 1.5915]\n", "Whe had a first me, with at\n", "hoarty, I had we early and his gentleman I got again. On the sated as you \n", "\n", "[3m 26s (300 40%) train loss: 1.4718, test_loss: 1.5563]\n", "Where, that he would house-short was a place. 'He's a long some in her, and the remembround the procee \n", "\n", "[4m 1s (350 46%) train loss: 1.4789, test_loss: 1.5444]\n", "Whe, in the large mind, in a fatchened and moment, and I over the struck for it, and dual made the kin \n", "\n", "[4m 36s (400 53%) train loss: 1.4437, test_loss: 1.5138]\n", "Wheer, in his returned in that to keep him gave her so intent that she exceslamy that dealing of the b \n", "\n", "[5m 11s (450 60%) train loss: 1.4172, test_loss: 1.5463]\n", "When, as she could have returned your down and good at a face. The house it went of took hirst, capa \n", "\n", "[5m 46s (500 66%) train loss: 1.4354, test_loss: 1.4769]\n", "What I my libed his way, as was the gentleman you arating.\"\n", "\n", "\"No, which was a gras is not a\n", "such youse \n", "\n", "[6m 21s (550 73%) train loss: 1.4031, test_loss: 1.5110]\n", "Where to be approved yeart, that emoting the whole to in the stable course\n", "of the sulting to myself, a \n", "\n", "[6m 56s (600 80%) train loss: 1.3896, test_loss: 1.4864]\n", "What's imput ins there, and the grown with his is in a moment of the house, as a complete came at the \n", "\n", "[7m 32s (650 86%) train loss: 1.4307, test_loss: 1.5016]\n", "When I must have been to be all I was got me, shall very look of the\n", "dear than to be usumed him to the \n", "\n", "[8m 8s (700 93%) train loss: 1.4032, test_loss: 1.4632]\n", "What is so made by\n", "unduring, after a little bettingly. That was still to devious the us. So me he had \n", "\n", "[8m 44s (750 100%) train loss: 1.4123, test_loss: 1.4549]\n", "Where, old writion as you knows. A good neeares of the please, I promissonal do you assumed them to be \n", "\n" ] } ], "source": [ "rnn = RNN(n_characters, hidden_size, n_characters, model_type=model_type, n_layers=n_layers).to(device)\n", "rnn_optimizer = torch.optim.Adam(rnn.parameters(), lr=learning_rate)\n", "criterion = nn.CrossEntropyLoss()\n", "\n", "start = time.time()\n", "all_losses = []\n", "test_losses = []\n", "loss_avg = 0\n", "test_loss_avg = 0\n", "print(rnn)\n", "\n", "#n_epochs=1000\n", "print(\"Training for %d epochs...\" % n_epochs)\n", "for epoch in range(1, n_epochs + 1):\n", " loss = train(rnn, *load_random_batch(train_text, chunk_len, batch_size), rnn_optimizer, criterion)\n", " loss_avg += loss\n", " \n", " test_loss = eval_test(rnn, *load_random_batch(test_text, chunk_len, batch_size))\n", " test_loss_avg += test_loss\n", "\n", " if epoch % print_every == 0:\n", " print('[%s (%d %d%%) train loss: %.4f, test_loss: %.4f]' % (time_since(start), epoch, epoch / n_epochs * 100, loss, test_loss))\n", " print(generate(rnn, 'Wh', 100, device=device), '\\n')\n", "\n", " if epoch % plot_every == 0:\n", " all_losses.append(loss_avg / plot_every)\n", " test_losses.append(test_loss_avg / plot_every)\n", " loss_avg = 0\n", " test_loss_avg = 0" ] }, { "cell_type": "code", "execution_count": 17, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 284 }, "executionInfo": { "elapsed": 441, "status": "ok", "timestamp": 1606238789430, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "F6RUCJO-z13A", "outputId": "e0ba8a14-38f1-4a5d-a171-6bafcb6d0782" }, "outputs": [ { "data": { "text/plain": [ "[<matplotlib.lines.Line2D at 0x7fece206dc50>]" ] }, "execution_count": 17, "metadata": { "tags": [] }, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3deXxc5X3v8c9vNCPJWkZeJFuLF3m3hDewsXFYAiEkhNJws5CEUAhp2uTVS7MQblLIRpO2WQpNaZMm3JQQk5QQNqcsN2Qpi81qsMGrbNnGxqssy9iWZO2jee4fZ7RZq+2Rjmbm+37lvM6Zc45nfuQlfefRc855HnPOISIiiS/gdwEiIhIfCnQRkSShQBcRSRIKdBGRJKFAFxFJEkG/Pjg/P9+Vlpb69fEiIglp/fr1R51zBX0d8y3QS0tLWbdunV8fLyKSkMxsb3/H1OUiIpIkFOgiIklCgS4ikiQU6CIiSUKBLiKSJBToIiJJQoEuIpIkEi7Qtx+u43u/28bJlojfpYiIjCoJF+gHjjXxf9fspvJwnd+liIiMKgkX6GXFYQAqqup9rkREZHRJuEAvzssknBmk4pBa6CIi3SVcoJsZZUVhtlUp0EVEuku4QAcoLw5Tebie9qjmQxUR6ZCYgV4whqbWCHvfafC7FBGRUSPxAv3RR/nIpedQXF9DhbpdREQ6JV6gl5YSaGnm3MM71Y8uItLNoIFuZlPM7DkzqzCzrWb2xT7Oud7MNpnZZjN72cwWDU+5wMKFkJ7Ou0/sYZtuXRQR6TSUFnoEuNU5Vw5cANxsZuWnnLMHeLdzbgHwD8DP4ltmN+npcO65nKcWuohID4MGunOuyjn3Rmy7HtgGlJxyzsvOueOxl68Ck+NdaA/LljFtzzaqjzdwvKF1WD9KRCRRnFYfupmVAucCawc47TPA0/38+8+a2TozW1dTU3M6H93TsmWEmhuZ9c5+tdJFRGKGHOhmlgM8BnzJOddniprZZXiB/nd9HXfO/cw5t9Q5t7SgoM9Jq4dm2TIAFlXt0J0uIiIxQwp0MwvhhfkDzrlV/ZyzELgXuMY59078SuzDrFkwdiwXHH1LF0ZFRGKGcpeLAT8HtjnnftjPOVOBVcANzrkd8S2xD4EAnH8+S6p3qYUuIhITHMI5FwI3AJvNbENs39eAqQDOuXuAbwETgJ94+U/EObc0/uV2s2wZU595lv0Hj9IaiZIeTLxb6kVE4mnQQHfOvQjYIOf8FfBX8SpqSJYtIxBtZ86ht3ir5iRlReER/XgRkdEmcZu1sQuji6t26E4XERESOdALC3FTp3Ju9Q6NjS4iQiIHOmDLlrGkehfbNB2diEhiBzrLllF89CCH3jqIcxobXURSW8IHOsDUt7ZypL7F52JERPyV2IG+ZAkuEPCeGFU/uoikuMQO9JwcomVlGgJARIRED3QgbflybyjdQ7V+lyIi4quED3SWLWNcwwmOVez0uxIREV8lRaADjN+6gabWdp+LERHxT+IH+vz5tGdksvDQDiqrNfKiiKSuxA/0UIjIosUs0hAAIpLiEj/QgfQVy1lQvYvKA8f8LkVExDdJEei2fDlZbS3Ur9/odykiIr5JikDvuDCas+lNolENASAiqSk5An3GDFrCY5m3bxsHjjf5XY2IiC+SI9DNaDlvCYv1xKiIpLDkCHRgzIUrmHN0Hzt2H/a7FBERXyRNoIdWXECai9K89nW/SxER8UXSBDrnnw9A1ob1PhciIuKP5An0iROpK5xM6e4Kapva/K5GRGTEJU+gA02Lz2NR1Q6268KoiKSgpAr0MReuYEptNXsqdvtdiojIiEuqQM+95F0ANL201udKRERGXlIFui1ZQtQCZOrCqIikoKQKdLKzqZk2i5Idm4m0R/2uRkRkRCVXoAONi5ew4NAO9tSc9LsUEZERlXSBnnnhBYxrruft1zf7XYqIyIhKukDPv+xiABpffNXnSkRERlbSBXpo0QKaQxlkvLnO71JEREZU0gU6wSCHZpZTVLnJ70pEREZU8gU60LDwPOYe2sXR47owKiKpIykDPf1dy8mMtLJvzWt+lyIiMmKSMtAL33sJACfXvOxzJSIiIycpAz2vfA7Hs/JIf0MXRkUkdSRloGPGvlnzmbRdF0ZFJHUkZ6ADJxeey7TDb9N87ITfpYiIjIikDfTgiuUEcBx69iW/SxERGRGDBrqZTTGz58yswsy2mtkX+zjHzOzfzWyXmW0ys/OGp9yhm/ieiwCof0EXRkUkNQSHcE4EuNU594aZ5QLrzexPzrmKbud8AJgdW5YDP42tfTN1zjT2jS0kuE4XRkUkNQzaQnfOVTnn3oht1wPbgJJTTrsG+KXzvAqMNbOiuFd7GtICxtszz2Hi9o1+liEiMmJOqw/dzEqBc4FTpwQqAfZ3e32A3qGPmX3WzNaZ2bqamprTq/QM1C04j4Jj1biqqmH/LBERvw050M0sB3gM+JJz7oxmYXbO/cw5t9Q5t7SgoOBM3uK0BC9YBsCx53VhVESS35AC3cxCeGH+gHNuVR+nHASmdHs9ObbPVxPffQERC1C7WhdGRST5DeUuFwN+Dmxzzv2wn9OeAG6M3e1yAVDrnPO9n2PO9EIqC0pJW/e636WIiAy7odzlciFwA7DZzDbE9n0NmArgnLsH+B1wFbALaAQ+Hf9ST19ORpC3ppfz3s2rwTkw87skEZFhM2igO+deBAZMQuecA26OV1HxdGL+uWStfQp27YLZs/0uR0Rk2CTtk6IdAhd4t8M3v/SKz5WIiAyvpA/0ScvPpSGUSe1q3ekiIskt6QO9bPJYNhfOIqALoyKS5JI+0EvGjmHblHmMrdwKra1+lyMiMmySPtDNjOPnLCbU1gqbN/tdjojIsEn6QAew5d6F0eirp45YICKSPFIi0EsWzKEmeywNL+pOFxFJXikR6GXFeWwomgOvveZ3KSIiwyYlAn32pBw2F88lZ89OqDujccVEREa9lAj0zFAaNWULMedg/Xq/yxERGRYpEegALF3qrdfqwqiIJKeUCfRps6ewe1wxra+86ncpIiLDImUCvbwozMaiObi1ujAqIskpZQK9LBboGdVVcND3uTdEROIuZQK9IDeDt2ee4714XeO6iEjySZlABwicu5hIIE33o4tIUkqpQJ89rYBtE6cTVT+6iCShlAr08qIwG4rm4F5/HaJRv8sREYmrlAr0jgujafV1sGOH3+WIiMRVSgX6jPxstk6e571QP7qIJJmUCvRgWoD08nk0ZWYp0EUk6aRUoAPMLRnLlqLZOAW6iCSZlAv0sqIw6ybOhg0boKXF73JEROImJQN9Q9EcrK0NNm70uxwRkbhJyUDfWDTHe6FuFxFJIikX6HljQqRNmUzt2HwFuogklZQLdPCmpNtSMleBLiJJJSUDvbwol1cmzITKSjhxwu9yRETiIjUDvTjMmx396OvW+VuMiEicpGSglxWF2Vw4y3uhbhcRSRIpGehTxmXRHs6jpqRUgS4iSSMlAz0QMOYVhamYPM+bNNo5v0sSETlrKRno4A2l+9K4GXD4sKakE5GkkLKBXlYUZm3BTO+Ful1EJAmkcKDnsm3iDKLBkAJdRJJCygb63MJc2kIhjsycp0AXkaSQsoGelR5ken422ybP8+5Fb2/3uyQRkbOSsoEOXj/6SxNmQH2999SoiEgCGzTQzew+MztiZlv6OZ5nZk+a2UYz22pmn45/mcOjvCjMc+FS74W6XUQkwQ2lhb4SuHKA4zcDFc65RcClwL+YWfrZlzb8yopy2T2+hEhOrgJdRBLeoIHunFsDHBvoFCDXzAzIiZ0biU95w6u8KA9nAY7MW6hAF5GEF48+9B8DZcAhYDPwRedctK8TzeyzZrbOzNbV1NTE4aPPzqRwBuOyQlROmefNXtTc7HdJIiJnLB6B/n5gA1AMLAZ+bGbhvk50zv3MObfUObe0oKAgDh99dsyMsqIwr0yYAZGIN8+oiEiCikegfxpY5Ty7gD3AvDi874goKwrzdNZU74W6XUQkgcUj0PcBlwOY2SRgLrA7Du87IsqLwuwfM4624hIFuogktOBgJ5jZg3h3r+Sb2QHgDiAE4Jy7B/gHYKWZbQYM+Dvn3NFhqzjOyoq83qGaskUUr14NjY2QleVzVSIip2/QQHfOXTfI8UPA++JW0QibNTGHUJrxwns+zMeffRpuvhnuuw/M/C5NROS0pPSTogDpwQAzC3L4XeEC+OY3YeVKL9BFRBJMygc6eHOMbquqg299C664wmulv/mm32WJiJwWBTrehdEj9S280xSBBx6AggL46EfhxAm/SxMRGTIFOl0XRrdV1Xth/vDDsG8f3HSTpqcTkYShQKcr0Cuqar0dK1bAXXfB44/DnXf6WJmIyNAp0IHx2ekUhjO9FnqHL3wBrr0WvvY1WL3av+JERIZIgR5TVpTL5oO1uI4uFjO4916YORM+8QmoqvK3QBGRQSjQYy4vm8SuIyf5zev7u3aGw/DYY1BXB9dd5433IiIySinQYz65bCoXzcrnO09WsLvmZNeB+fPhnnu8bpdvfMO/AkVEBqFAjwkEjLuuXUR6MMAtD22grb3bCMA33ACf+xz84AfwxBP+FSkiMgAFejeFeZl878ML2Higln9/ZmfPg3ffDUuWwI03wu6EGXtMRFKIAv0UVy0o4qNLJvMfz+3i9be7TdSUmQmPPAKBgPfQkSbDEJFRRoHeh7//4DlMHpfFLQ9toL65revA9Onwy196wwJ84Qv+FSgi0gcFeh9yMoL868cXcehEE3c8sbXnwauvhttvh//8T7j/fn8KFBHpgwK9H0umjedv3zObVW8c5KlNh3oe/M534LLL4G/+BjZt8qdAEZFTKNAH8Pn3zGLRlLF8/bdbqKpt6joQDMKDD8LYsV5/em2tf0WKiMQo0AcQSgvwbx9fTFt7lFsf3kg02m2grkmT4KGHvDtePvMZDeIlIr5ToA+iND+bO/68nJffeoefv7in58GLL/buTX/sMe+2RhERHynQh+BjS6fwvvJJ3PmHSioO1fU8+OUvw4c+BF/9Krz0kj8FioigQB8SM+P7H1lIXlaILz30Js1t7d0Pwi9+AaWl8LGPwZEjvtUpIqlNgT5E47PTuevaReyoPsn3n97e82BeHjz6KBw7Bp/8JLS39/0mIiLDSIF+Gt49p4Cb3lXKypffZvWOmp4HFy2Cn/wEnnkG/v7vfalPRFKbAv003faBecyZlMP/eWQjxxpaex789Ke9O17+8R/hd7/zp0ARSVkK9NOUGUrj7o+fS21jG7c9tqlrQowOP/oRLF4Mf/EX8PbbvtQoIqlJgX4GyovDfOX9c/ljRTUPr9vf8+CYMV5/ejTqTWHX0uJPkSKSchToZ+gzF03nXTMn8O0nK9hztKHnwZkzvXFe1q2DW27xp0ARSTkK9DMUCBj/8rFFBAPGl06dEAPgmmvgK1+Bn/4UfvhD3fkiIsNOgX4WivLG8N0PL2Dj/hP86NldvU/47nfhqqvg1lvhvPPgT38a+SJFJGUo0M/S1QuL+fB5Jfz42Z2s33u858FgEJ56Ch5+GOrr4X3vgw98ALZu7fvNRETOggI9Dr79wXMoHjuGWx7awMmWSM+DZt7F0W3b4K674NVXYeFCb47S6mp/ChaRpKRAj4PczBB3f3wxB4438u1TJ8TokJHhdb3s2gWf/zzcdx/MmgX/9E/Q2DiyBYtIUlKgx8nS0vHcfNksHll/gKc3V/V/4oQJ3siMFRVwxRXwjW/A3Lnwq195tzqKiJwhBXocfeHy2SyanMdtqzZzuHaQSaRnz4ZVq2D1aigshBtvhPPPh+efH5FaRST5KNDjKJQW4F8/vpjWSJT/88gpE2L055JLYO1a+K//gpoab2q7a66BysrhL1hEkooCPc5mFOTwrT8v58VdR7nvpT2D/wOAQACuv94L8e99D557DubP9/rajx4d3oJFJGko0IfBJ86fwhXlk/jn31ey/XDd4P+gw5gxcNtt3oXTv/5r76GkWbPgzjuheZAuHBFJeQr0YWBmfP/DCwiPCfHFBzf0nBBjKCZO9Ibi3bzZm+buq1+FsjL4zW80d6mI9EuBPkwm5GRw57ULqayu55aHNlDb2Hb6b1JWBk8+Cf/zPzB2LFx3HaxYoanuRKRPgwa6md1nZkfMbMsA51xqZhvMbKuZrY5viYnrsrkTuf0D8/hjRTXvv3tN70kxhuryy72Bvn7xC9i/Hy66CD76UXjtNbXYRaTTUFroK4Er+ztoZmOBnwAfdM6dA1wbn9KSw+fePZPf/u93kZsZ5FP3vcbtqzb3fpp0KNLS4KabYMcO+M534Pe/h+XLobwcfvADOHgw7rWLSGIZNNCdc2uAYwOc8klglXNuX+x8zZJ8ioWTx/Lk5y/ic5fM4Dev7+PKu9fwylvvnNmbZWfDN78Jhw7BvfdCfr53IXXqVLjySq+fvakpvv8BIpIQ4tGHPgcYZ2bPm9l6M7uxvxPN7LNmts7M1tXUnGH3Q4LKDKVx+1VlPPK5FQQDxnX/+SrffnIrTa1nOKxuOOxNd/fCC7BzJ3z96954MdddB0VF3lgxL7+sLhmRFGK9plDr6ySzUuAp59z8Po79GFgKXA6MAV4B/sw5t2Og91y6dKlbt27dGZSc+BpbI/zg6e3c/8pepudnc9e1i1gybdzZv3E06j15unKlN2tSY6P3ROqnPgU33OC14kUkoZnZeufc0r6OxaOFfgD4g3OuwTl3FFgDLIrD+yatrPQg375mPg/81XJaI1Guvedlvv/0dloiZzkJRiDgPWl6//1w+LB3EbW42BsvprQU3vte74lUDQYmkpTiEeiPAxeZWdDMsoDlwLY4vG/Su3BWPr//0sVcu2QK96x+iw/+6CW2HKyNz5vn5noXUZ9/Hnbvhjvu8NY33OCNHdPRXaMuGZGkMWiXi5k9CFwK5APVwB1ACMA5d0/snK8AnwaiwL3OubsH++BU7nLpy7Pbq7ntsc0ca2jlb98zi5svm0UoLc6PCUSj8OKLXpfMI4/AyZMwY4bXJXPjjV4rXkRGtYG6XIbUhz4cFOi9nWhs5Y4ntvL4hkPMLwnzw48tZs6k3OH5sIYGb7TH+++HZ5/1WuqXXuqNKXPBBd6QvqHQ8Hy2iJwxBXqCeXpzFV//7y2cbI7w5ffN4a8vnkFawIbvA/ft88ZjX7nSG0cGID3dGyBs0SJYvNhbL1rkPbEqIr5RoCegoydb+PpvN/OHrdWcN3Usd127iBkFOcP7oc55E29s2AAbN3rrDRu8YX07TJvWFfAd6+nTvan2RGTYKdATlHOOxzcc4luPb6G1PcptV87jxhWlBIaztd67CO+OmY6A71jv2NE1w1I47M2T2j3ozznHGz1SROJKgZ7gDtc2c9uqTTxfWcOKGRP4548uZMr4LH+LamyELVt6Bv3Gjd6FVvBuoZw3r6urZtEiWLDAu41SrXmRM6ZATwLOOR56fT//8FQFAN+4upxPnD8FG03hGI3Cnj09W/IbN3p99B3GjfOCvfsyf77XyheRQSnQk8j+Y4189dFNvLL7HWbkZ/PJ5VP5yHmTGZed7ndp/Tt2zBvbvfuyZQvU13edM21a76DXnTYivSjQk0w06nhi4yF+9epe1u89TnowwJ8tKOL65VNZMm3c6Gq198c52Lu3d9BXVkIkNhplKOR125zamp86Vd02krIU6Els++E6fr12H6veOMjJlghzJuVw/fJpfOi8EsKZCdi6bW2F7dt7B/3+/V3nhMNesC9Y4F18nTfPWyZPVtBL0lOgp4CGlghPbjzEr1/bx6YDtYwJpfHni4q4fvk0Fk7OS4xW+0BOnICtW3sH/YkTXedkZ3vdNB0B37HMng2Zmf7VLhJHCvQUs+nACX69dh+PbzhEU1s75xSHuX75NK5ZXEx2RtDv8uLHOaiu9lr0py5793adZ+bdK39q0M+bBwUF/tUvcgYU6CmqrrmNx988yANr97H9cD05GUGuWVzM9cunUV6c5HeVNDZ698qfGvSVldDc3HXehAm9Q37uXK+fPiPDv/pF+qFAT3HOOd7Yd4IH1u7lqU1VtEainDt1LNcvn8bVC4vIDKX5XeLIiUa92yj7atVXV/c8d+JEmDKl9zJ5srcuLtZdODLiFOjS6URjK4+9cZAH1u5ld00D4cwgH1kymeuXT2XWxGEaCCxRHD/uteArK73Q37+/51JX1/P8QMAbiriv0O8I/sJCbz5YkThRoEsvzjnW7jnGA2v38fstVbS1O5ZNH891y6Zw2dyJjM0axfe1+6WuDg4c6B303ZdTJw8JBr2WfEeLPhyGnJzTW7KyvC8PERToMoijJ1t4dP0Bfr12H/uONRIwb2LrS+YUcMnsfBZPGUsw3mOzJyPnvFb+qSHf8SVQVeUNjXDypPdQVcdYOIMx8+7g6S/wx43z/hooKem5zsvTbZxJSIEuQxKNOt7cf4I1O2pYs7OGjftPEHWQmxnkwpn5XDwnn0tmF/g/jkwycA5aWnoGfMf26S5Hj8KRI70/Izu7d8ifup44Ua3/BKNAlzNS29jGS28d9QJ+Rw2Har27Q2bkZ3Px7HwumVPABTMmJNetkImqpcX7C+DAATh4sO/1oUNdT+F26OgS6ivwCwshP99bJkzQBeBRQoEuZ805x1s1Dbyw0wv3V3cfo6mtnVCasWTauFj3TAHlReGRHd5Xhi4a9VryfYV99+3+JhHPy+sK+I6loKD3vo5l3Di1/oeBAl3iriXSzvq3j7N6Zw1rdhxlW5V3B0h+TjoXzcrn4tkFXDwnn4m5ekIzoTgHtbVesB854nXnDLTU1PS8r7+7QADGj+8Z8hMmePf3h0I9l2Bw8H0DnZOe7l087r6kpyflNQQFugy7I/XNvLjzKC/sPMoLO2s4erIVgLKiMJfMzmfFzAksKMljQo4e1kk6jY09A36gL4B33vG6hyIRaGvzllO7geIlEOgd8qe7jBnjLZmZPdenbodCI/bloUCXERWNOiqq6nhhp9f/vm7vMdravZ+z4rxM5pfksaAkj/mT85hfnEdBrkI+pTkH7e1dAd896Ad63X1fSws0NXlfLo2NPbeHujQ0eHWcCbOBA//UL4Srr4aPfOQMP6r/QNfVLIm7QMCYX5LH/JI8/ubSmTS0RNh0oJYtB2vZfNBb/7Gi66nMwnC3kC8Js6Akj4lhddWkDDOv6yQY9H/awra23l8ITU1et1JT05lv19Z6Uzl27J89e1jKV6DLsMvOCLJi5gRWzJzQua++uY2KQ3WdAb/5YC3PbK+m4w/GibkZsYDP6wz7SeGMxB81Uka3UMi7+JuX53clZ0SBLr7IzQyxfMYEls/oCvmTLRG2VdWxuVtr/rnKI0RjIZ+fk8GCknCPkC/Ky1TIi8Qo0GXUyMkIcn7peM4vHd+5r7E10q0lX8eWg7Ws3lHTGfJjs0LMnZTL3EJvmVeYy5xJueQm4uQeImdJgS6jWlZ6kKWl41naLeSbWtupqPLCffvhOrYfruex9QdoaO26oFUydkyPkJ9bmMuM/BzSg7ovWpKXAl0Szpj0NJZMG8eSaeM69znnOHC8icrD9VRW13vrw/Ws2VFDJNacDwaMGQXZzC0MeyEfa9mXjB2jh6EkKSjQJSmYGVPGZzFlfBbvLZ/Uub81EmX30ZOdAV95uJ439h7nyY2HOs/JTk9jTkdLflIucwvDlBXlasRJSTgKdElq6cEA8wrDzCvsOUNTfXMbO6rrqTx8kspYt83TWw7z4Gtdk1EX52VSVhSmvDhMWZG3TBufpda8jFoKdElJuZkhlkwbz5JpXX3zzjmO1Lew/XA926rqOpfnd9TQHuu2yU5PY25hbo+Qn1eYS1a6fpXEf3pSVGQQzW3t7Kw+SUVVLduq6qmoqmPboTrqW7xH1s1g+oTsbq35XMqLdN+8DA89KSpyFjJDaSyYnMeCyV0Pm3RchK3o1pLfdPAE/29zVec547JCXsjHWvKzJ+UwLiud8JgQuRlBdd1I3CnQRc5A94uw7z+nsHN/XXMb26u6umwqqur41at7aYn0nJ0oYF63T3hMkLwxIcKZoa51VohwZmx/bOlxzpggGUHNUyq9KdBF4iicGWLZ9PEsm97VNx9pj7LnaANv1TRQ19RGXXMbtU1t1DXF1s0Rapva2HXkZOx1G81tA09PlxkKdAZ8bmaQ7Iygt07vtp0R287o2E4jNyNEdkYaORlBcjKDjAmlqVsoiSjQRYZZMC3A7Em5zJ6UO+R/0xJpp64p0hnwHV8Add2+ADq+EOqbI9Q3R6iqbaahJcLJ2DKUy2MBg+x0L9x7fgGkMSEng8JwJoXhTCblZXZuh8cE9SUwSinQRUahjGAaBblpZzy0sHOOprZ2L9ybIzS0tFPf0kZDSzsNLRHqWyI0xJb65th2a9f2kfpmXn/7OMcaWnu9d2Yo4IV8OJPCPG89KRb2hXkZTApnMjE3U0/l+kCBLpKEzIys9CBZ6UEmDv0Pg15aIu0cqWvhcF0zh2ubqY6tD9d522/sO051XQutkd5dRPk56Z1B39HCnxTOIDOURkYwQHowQHpamrcOBkhPO2XdbX8ozfRXwRAo0EWkXxnBtM6Lv/1xznG8sa0r8LuHf10zB0808ca+4xxvbDurWtKDATK6hX2o2xdAVnoaE3LSyc/JiC2x7dyu1zkZyd9VNGigm9l9wNXAEefc/AHOOx94BfiEc+7R+JUoIqOZmTE+O53x2emUF4f7Pa+5rZ2a+hZaIu20RKK0tTtaI1FvaW+nNRKlJfbaO9ZOa3u085yW9ihtEdd5rvfvvHVDSzt7jjbw+tvHOd7Y2uf1g4xgoDPkC04N/87gz6AgJyNhrxMMpYW+Evgx8Mv+TjCzNOAHwB/jU5aIJJvMUNqALf14ibRHOdbYytH6Vo6ebOm2tHK0voWaky0cPNHMxgO1HGto7XwKuLv0tAATctLJzggSdQ68/xF1Du+lIxrrZXLOEY3tc47Y0M4d2w4Hndsd7/OXF03ny1fMift/+6CB7pxbY2alg5z2eeAx4Pw41CQicsaCaQEm5noXZgcTjTqON7Z6YR8L/pr6ls7Xja0RzAwDAmaY0bmNgWEEjNh+IxAA8M4LxPZ5215rv2N70ZM/1kkAAAUUSURBVOThmRHprPvQzawE+BBwGYMEupl9FvgswNSpU8/2o0VEzkogYEzIyWBCTgZzOYurx6NEPO4ruhv4O+fcwE9CAM65nznnljrnlhYUFMTho0VEpEM87nJZCvwmdgEhH7jKzCLOuf+Ow3uLiMgQnXWgO+emd2yb2UrgKYW5iMjIG8ptiw8ClwL5ZnYAuAMIATjn7hnW6kREZMiGcpfLdUN9M+fcTWdVjYiInDENtiAikiQU6CIiSUKBLiKSJHybU9TMaoC9Z/jP84GjcSxnuCVSvYlUKyRWvYlUKyRWvYlUK5xdvdOcc30+yONboJ8NM1vX3ySpo1Ei1ZtItUJi1ZtItUJi1ZtItcLw1asuFxGRJKFAFxFJEoka6D/zu4DTlEj1JlKtkFj1JlKtkFj1JlKtMEz1JmQfuoiI9JaoLXQRETmFAl1EJEkkXKCb2ZVmVmlmu8zsNr/r6Y+ZTTGz58yswsy2mtkX/a5pKMwszczeNLOn/K5lIGY21sweNbPtZrbNzFb4XdNAzOyW2M/BFjN70MwGn05nBJnZfWZ2xMy2dNs33sz+ZGY7Y+txftbYoZ9a74z9LGwys9+a2Vg/a+yur3q7HbvVzJyZ5cfjsxIq0GNzl/4H8AGgHLjOzMr9rapfEeBW51w5cAFw8yiutbsvAtv8LmII/g34vXNuHrCIUVxzbFavLwBLYxOtpwGf8LeqXlYCV56y7zbgGefcbOCZ2OvRYCW9a/0TMN85txDYAdw+0kUNYCW968XMpgDvA/bF64MSKtCBZcAu59xu51wr8BvgGp9r6pNzrso590Zsux4vcEr8rWpgZjYZ+DPgXr9rGYiZ5QGXAD8HcM61OudO+FvVoILAGDMLAlnAIZ/r6cE5twY4dsrua4D7Y9v3A/9rRIvqR1+1Ouf+6JyLxF6+Ckwe8cL60c//twD/CnwVb97ouEi0QC8B9nd7fYBRHpIAsUm2zwXW+lvJoO7G+wEbdDpBn00HaoBfxLqH7jWzbL+L6o9z7iBwF15LrAqodc790d+qhmSSc64qtn0YmORnMafhL4Gn/S5iIGZ2DXDQObcxnu+baIGecMwsB3gM+JJzrs7vevpjZlcDR5xz6/2uZQiCwHnAT51z5wINjJ7ugF5ifc/X4H0RFQPZZvYX/lZ1epx3f/Oov8fZzL6O1935gN+19MfMsoCvAd+K93snWqAfBKZ0ez05tm9UMrMQXpg/4Jxb5Xc9g7gQ+KCZvY3XlfUeM/svf0vq1wHggHOu4y+eR/ECfrR6L7DHOVfjnGsDVgHv8rmmoag2syKA2PqIz/UMyMxuAq4Grnej+wGbmXhf7htjv2+TgTfMrPBs3zjRAv11YLaZTTezdLwLS0/4XFOfzJs1++fANufcD/2uZzDOududc5Odc6V4/78+65wbla1I59xhYL+ZzY3tuhyo8LGkwewDLjCzrNjPxeWM4ou43TwBfCq2/SngcR9rGZCZXYnXXfhB51yj3/UMxDm32Tk30TlXGvt9OwCcF/u5PisJFeixix5/C/wB7xfiYefcVn+r6teFwA14Ld0NseUqv4tKIp8HHjCzTcBi4Ls+19Ov2F8SjwJvAJvxfu9G1aPqsbmDXwHmmtkBM/sM8H3gCjPbifdXxvf9rLFDP7X+GMgF/hT7XRs18x33U+/wfNbo/stERESGKqFa6CIi0j8FuohIklCgi4gkCQW6iEiSUKCLiCQJBbqISJJQoIuIJIn/DwxPG+9uiHi0AAAAAElFTkSuQmCC\n", "text/plain": [ "<Figure size 432x288 with 1 Axes>" ] }, "metadata": { "needs_background": "light", "tags": [] }, "output_type": "display_data" } ], "source": [ "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as ticker\n", "\n", "plt.figure()\n", "plt.plot(all_losses)\n", "plt.plot(test_losses, color='r')" ] }, { "cell_type": "code", "execution_count": 18, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "executionInfo": { "elapsed": 1119, "status": "ok", "timestamp": 1606238793308, "user": { "displayName": "Punit Jha", "photoUrl": "", "userId": "07885534541681120711" }, "user_tz": 360 }, "id": "gZRziBDwz13A", "outputId": "fbac23e5-a76a-47bc-9bd2-37ccc02cdd70" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Is, and pulled to bear when he saw, and she was then or a point of Mr. Sumble, as aspection, were no mading her handler stood for the stoods pundsact.\n", "\n", "\"Day!\"\n", "\n", "\"I want to you the most in you to see?\" said Mr. Lorry. She had been offer\n", "with mentions of lawburing his face. He had bad and with me to her hands to be to set of thrify found impury for it what leading in her against suffed people.\"\n", "\n", "I saw, and paused it hurry, stood Defarge; \"am this, she was the supporwaid for good stand, to have you wish is a gently enlangeasure in the controod so he proceeding of the Clara, and the piers, where me to be twomouson was never\n", "appeared\n", "to have a beginner, as the likely to the indived into the table instate at her feel on the possible upon his astreesers (I want to the little touched to her fast came and\n", "graught,\" returned him of me,\" replied the word\n", "with his last were pursuit. At the ainher. And mewhere she had relieved the coversame, and the kitches and said Joe\n", "was find a warms and wants repa\n" ] } ], "source": [ "print(evaluate(rnn, prime_str='Is', predict_len=1000))" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "juvJ_3BhOtLV" }, "outputs": [], "source": [] } ], "metadata": { "accelerator": "GPU", "anaconda-cloud": {}, "colab": { "collapsed_sections": [], "name": "MP4_P2_generation.ipynb", "provenance": [] }, "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.6" } }, "nbformat": 4, "nbformat_minor": 1 }