{"nbformat":4,"nbformat_minor":0,"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.7.6"},"colab":{"name":"MP5.ipynb","provenance":[]},"accelerator":"GPU"},"cells":[{"cell_type":"markdown","metadata":{"id":"Z8T2TIFrA2mD"},"source":["# Deep Q-Learning "]},{"cell_type":"markdown","metadata":{"id":"rPhKDBNWA2md"},"source":["Install dependencies for AI gym to run properly (shouldn't take more than a minute). If running on google cloud or running locally, only need to run once. Colab may require installing everytime the vm shuts down."]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"buqU6rkkA2me","executionInfo":{"status":"ok","timestamp":1610847989198,"user_tz":360,"elapsed":3997,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}},"outputId":"f06dc404-066b-481d-c316-7a0fd37cde34"},"source":["!pip3 install gym pyvirtualdisplay\n","!sudo apt-get install -y xvfb python-opengl ffmpeg"],"execution_count":3,"outputs":[{"output_type":"stream","text":["Requirement already satisfied: gym in /usr/local/lib/python3.6/dist-packages (0.17.3)\n","Requirement already satisfied: pyvirtualdisplay in /usr/local/lib/python3.6/dist-packages (2.0)\n","Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from gym) (1.4.1)\n","Requirement already satisfied: cloudpickle<1.7.0,>=1.2.0 in /usr/local/lib/python3.6/dist-packages (from gym) (1.3.0)\n","Requirement already satisfied: pyglet<=1.5.0,>=1.4.0 in /usr/local/lib/python3.6/dist-packages (from gym) (1.5.0)\n","Requirement already satisfied: numpy>=1.10.4 in /usr/local/lib/python3.6/dist-packages (from gym) (1.19.5)\n","Requirement already satisfied: EasyProcess in /usr/local/lib/python3.6/dist-packages (from pyvirtualdisplay) (0.3)\n","Requirement already satisfied: future in /usr/local/lib/python3.6/dist-packages (from pyglet<=1.5.0,>=1.4.0->gym) (0.16.0)\n","Reading package lists... Done\n","Building dependency tree \n","Reading state information... Done\n","python-opengl is already the newest version (3.1.0+dfsg-1).\n","ffmpeg is already the newest version (7:3.4.8-0ubuntu0.2).\n","xvfb is already the newest version (2:1.19.6-1ubuntu4.8).\n","0 upgraded, 0 newly installed, 0 to remove and 16 not upgraded.\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"TFatWwUjA2mg","executionInfo":{"status":"ok","timestamp":1610848000271,"user_tz":360,"elapsed":7431,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}},"outputId":"6d19e3b8-11b3-438f-cbcb-58184de36803"},"source":["!pip3 install --upgrade setuptools\n","!pip3 install ez_setup \n","!pip3 install gym[atari] "],"execution_count":4,"outputs":[{"output_type":"stream","text":["Requirement already up-to-date: setuptools in /usr/local/lib/python3.6/dist-packages (51.3.0)\n","Requirement already satisfied: ez_setup in /usr/local/lib/python3.6/dist-packages (0.9)\n","Requirement already satisfied: gym[atari] in /usr/local/lib/python3.6/dist-packages (0.17.3)\n","Requirement already satisfied: pyglet<=1.5.0,>=1.4.0 in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (1.5.0)\n","Requirement already satisfied: numpy>=1.10.4 in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (1.19.5)\n","Requirement already satisfied: cloudpickle<1.7.0,>=1.2.0 in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (1.3.0)\n","Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (1.4.1)\n","Requirement already satisfied: atari-py~=0.2.0; extra == \"atari\" in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (0.2.6)\n","Requirement already satisfied: Pillow; extra == \"atari\" in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (7.0.0)\n","Requirement already satisfied: opencv-python; extra == \"atari\" in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (4.1.2.30)\n","Requirement already satisfied: future in /usr/local/lib/python3.6/dist-packages (from pyglet<=1.5.0,>=1.4.0->gym[atari]) (0.16.0)\n","Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from atari-py~=0.2.0; extra == \"atari\"->gym[atari]) (1.15.0)\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"3kfdk-sCA2mg"},"source":["For this assignment we will implement the Deep Q-Learning algorithm with Experience Replay as described in breakthrough paper __\"Playing Atari with Deep Reinforcement Learning\"__. We will train an agent to play the famous game of __Breakout__."]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"QargUW-UBcOh","executionInfo":{"status":"ok","timestamp":1610848025298,"user_tz":360,"elapsed":18316,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}},"outputId":"83c18796-90b2-4628-ac07-2f02e7f314d8"},"source":["from google.colab import drive\r\n","drive.mount('/content/gdrive')"],"execution_count":5,"outputs":[{"output_type":"stream","text":["Mounted at /content/gdrive\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"HVgxBMVWBdFH","executionInfo":{"status":"ok","timestamp":1610848027251,"user_tz":360,"elapsed":371,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}},"outputId":"7cdf43a0-8dbd-4789-b5e1-b88670375edf"},"source":["import os\r\n","os.chdir(\"gdrive/My Drive/DL_stuff/assignment5_materials\")\r\n","#os.chdir(\"./assignment1\")\r\n","!pwd"],"execution_count":6,"outputs":[{"output_type":"stream","text":["/content/gdrive/My Drive/DL_stuff/assignment5_materials\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"nBR0S_c5A2mi","executionInfo":{"status":"ok","timestamp":1610848046971,"user_tz":360,"elapsed":246,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}}},"source":["%matplotlib inline\n","\n","import sys\n","import gym\n","import torch\n","import pylab\n","import random\n","import numpy as np\n","from collections import deque\n","from datetime import datetime\n","from copy import deepcopy\n","import torch.nn as nn\n","import torch.optim as optim\n","import torch.nn.functional as F\n","from torch.autograd import Variable\n","from utils import find_max_lives, check_live, get_frame, get_init_state\n","from model import DQN\n","from config import *\n","\n","import matplotlib.pyplot as plt\n","# %load_ext autoreload\n","# %autoreload 2"],"execution_count":8,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"V4av-s_zA2mi"},"source":["## Understanding the environment"]},{"cell_type":"markdown","metadata":{"id":"jY5dFUgJA2mi"},"source":["In the following cell, we initialize our game of __Breakout__ and you can see how the environment looks like. For further documentation of the of the environment refer to https://gym.openai.com/envs. \n","\n","In breakout, we will use 3 actions \"fire\", \"left\", and \"right\". \"fire\" is only used to reset the game when a life is lost, \"left\" moves the agent left and \"right\" moves the agent right."]},{"cell_type":"code","metadata":{"id":"8_pjMhGIA2mi","executionInfo":{"status":"ok","timestamp":1610848059013,"user_tz":360,"elapsed":552,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}}},"source":["env = gym.make('BreakoutDeterministic-v4')\n","state = env.reset()"],"execution_count":10,"outputs":[]},{"cell_type":"code","metadata":{"id":"E1aaEuZaA2mk","executionInfo":{"status":"ok","timestamp":1610848060185,"user_tz":360,"elapsed":268,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}}},"source":["number_lives = find_max_lives(env)\n","state_size = env.observation_space.shape\n","action_size = 3 #fire, left, and right"],"execution_count":11,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"zsRoD4x7A2ml"},"source":["## Creating a DQN Agent"]},{"cell_type":"markdown","metadata":{"id":"w9sTdTi9A2mm"},"source":["Here we create a DQN Agent. This agent is defined in the __agent.py__. The corresponding neural network is defined in the __model.py__. Once you've created a working DQN agent, use the code in agent.py to create a double DQN agent in __agent_double.py__. Set the flag \"double_dqn\" to True to train the double DQN agent.\n","\n","__Evaluation Reward__ : The average reward received in the past 100 episodes/games.\n","\n","__Frame__ : Number of frames processed in total.\n","\n","__Memory Size__ : The current size of the replay memory."]},{"cell_type":"code","metadata":{"id":"vZn15im2A2mm","executionInfo":{"status":"ok","timestamp":1610848073288,"user_tz":360,"elapsed":11064,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}}},"source":["double_dqn = False # set to True if using double DQN agent\n","\n","if double_dqn:\n"," from agent_double import Agent\n","else:\n"," from agent import Agent\n","\n","agent = Agent(action_size)\n","evaluation_reward = deque(maxlen=evaluation_reward_length)\n","frame = 0\n","memory_size = 0"],"execution_count":12,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"e8793gNoA2mn"},"source":["### Main Training Loop"]},{"cell_type":"markdown","metadata":{"id":"f2owGNxqA2mn"},"source":["In this training loop, we do not render the screen because it slows down training signficantly. To watch the agent play the game, run the code in next section \"Visualize Agent Performance\""]},{"cell_type":"code","metadata":{"scrolled":true,"colab":{"base_uri":"https://localhost:8080/","height":1000},"id":"cvDwsYXwA2mn","executionInfo":{"status":"ok","timestamp":1610848786700,"user_tz":360,"elapsed":700292,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}},"outputId":"8f31944a-1310-4cfd-a05e-283c3d973870"},"source":["rewards, episodes = [], []\n","best_eval_reward = 0\n","for e in range(500):\n"," done = False\n"," score = 0\n","\n"," history = np.zeros([5, 84, 84], dtype=np.uint8)\n"," step = 0\n"," d = False\n"," state = env.reset()\n"," next_state = state\n"," life = number_lives\n","\n"," get_init_state(history, state)\n","\n"," while not done:\n"," step += 1\n"," frame += 1\n","\n"," # Perform a fire action if ball is no longer on screen to continue onto next life\n"," if step > 1 and len(np.unique(next_state[:189] == state[:189])) < 2:\n"," action = 0\n"," else:\n"," action = agent.get_action(np.float32(history[:4, :, :]) / 255.)\n"," state = next_state\n"," next_state, reward, done, info = env.step(action + 1)\n"," \n"," frame_next_state = get_frame(next_state)\n"," history[4, :, :] = frame_next_state\n"," terminal_state = check_live(life, info['ale.lives'])\n","\n"," life = info['ale.lives']\n"," r = np.clip(reward, -1, 1) \n"," r = reward\n","\n"," # Store the transition in memory \n"," agent.memory.push(deepcopy(frame_next_state), action, r, terminal_state)\n"," # Start training after random sample generation\n"," if(frame >= train_frame):\n"," agent.train_policy_net(frame)\n"," # Update the target network only for Double DQN only\n"," if double_dqn and (frame % update_target_network_frequency)== 0:\n"," agent.update_target_net()\n"," score += reward\n"," history[:4, :, :] = history[1:, :, :]\n"," \n"," if done:\n"," evaluation_reward.append(score)\n"," rewards.append(np.mean(evaluation_reward))\n"," episodes.append(e)\n"," pylab.plot(episodes, rewards, 'b')\n"," pylab.xlabel('Episodes')\n"," pylab.ylabel('Rewards') \n"," pylab.title('Episodes vs Reward')\n"," pylab.savefig(\"./save_graph/breakout_dqn.png\") # save graph for training visualization\n"," \n"," # every episode, plot the play time\n"," print(\"episode:\", e, \" score:\", score, \" memory length:\",\n"," len(agent.memory), \" epsilon:\", agent.epsilon, \" steps:\", step,\n"," \" lr:\", agent.optimizer.param_groups[0]['lr'], \" evaluation reward:\", np.mean(evaluation_reward))\n","\n"," # if the mean of scores of last 100 episode is bigger than 5 save model\n"," ### Change this save condition to whatever you prefer ###\n"," if np.mean(evaluation_reward) > 5 and np.mean(evaluation_reward) > best_eval_reward:\n"," torch.save(agent.policy_net, \"./save_model/breakout_dqn.pth\")\n"," best_eval_reward = np.mean(evaluation_reward)\n"],"execution_count":14,"outputs":[{"output_type":"stream","text":["episode: 0 score: 0.0 memory length: 734 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.0\n","episode: 1 score: 4.0 memory length: 1050 epsilon: 1.0 steps: 316 lr: 0.0001 evaluation reward: 1.75\n","episode: 2 score: 2.0 memory length: 1248 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.8\n","episode: 3 score: 2.0 memory length: 1446 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.8333333333333333\n","episode: 4 score: 3.0 memory length: 1692 epsilon: 1.0 steps: 246 lr: 0.0001 evaluation reward: 2.0\n","episode: 5 score: 0.0 memory length: 1815 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.75\n","episode: 6 score: 2.0 memory length: 2013 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.7777777777777777\n","episode: 7 score: 1.0 memory length: 2182 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.7\n","episode: 8 score: 2.0 memory length: 2382 epsilon: 1.0 steps: 200 lr: 0.0001 evaluation reward: 1.7272727272727273\n","episode: 9 score: 1.0 memory length: 2550 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.6666666666666667\n","episode: 10 score: 0.0 memory length: 2672 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.5384615384615385\n","episode: 11 score: 0.0 memory length: 2795 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.4285714285714286\n","episode: 12 score: 0.0 memory length: 2918 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.3333333333333333\n","episode: 13 score: 2.0 memory length: 3115 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.375\n","episode: 14 score: 2.0 memory length: 3313 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.411764705882353\n","episode: 15 score: 1.0 memory length: 3463 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.3888888888888888\n","episode: 16 score: 0.0 memory length: 3586 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.3157894736842106\n","episode: 17 score: 0.0 memory length: 3708 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.25\n","episode: 18 score: 3.0 memory length: 3974 epsilon: 1.0 steps: 266 lr: 0.0001 evaluation reward: 1.3333333333333333\n","episode: 19 score: 3.0 memory length: 4221 epsilon: 1.0 steps: 247 lr: 0.0001 evaluation reward: 1.4090909090909092\n","episode: 20 score: 3.0 memory length: 4468 epsilon: 1.0 steps: 247 lr: 0.0001 evaluation reward: 1.4782608695652173\n","episode: 21 score: 0.0 memory length: 4591 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.4166666666666667\n","episode: 22 score: 0.0 memory length: 4714 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.36\n","episode: 23 score: 2.0 memory length: 4929 epsilon: 1.0 steps: 215 lr: 0.0001 evaluation reward: 1.3846153846153846\n","episode: 24 score: 0.0 memory length: 5051 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.3333333333333333\n","episode: 25 score: 2.0 memory length: 5248 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.3571428571428572\n","episode: 26 score: 2.0 memory length: 5445 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.3793103448275863\n","episode: 27 score: 1.0 memory length: 5613 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.3666666666666667\n","episode: 28 score: 3.0 memory length: 5880 epsilon: 1.0 steps: 267 lr: 0.0001 evaluation reward: 1.4193548387096775\n","episode: 29 score: 1.0 memory length: 6032 epsilon: 1.0 steps: 152 lr: 0.0001 evaluation reward: 1.40625\n","episode: 30 score: 1.0 memory length: 6202 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.393939393939394\n","episode: 31 score: 1.0 memory length: 6370 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.3823529411764706\n","episode: 32 score: 0.0 memory length: 6492 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.3428571428571427\n","episode: 33 score: 0.0 memory length: 6614 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.3055555555555556\n","episode: 34 score: 1.0 memory length: 6786 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.2972972972972974\n","episode: 35 score: 1.0 memory length: 6958 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.2894736842105263\n","episode: 36 score: 0.0 memory length: 7081 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.2564102564102564\n","episode: 37 score: 1.0 memory length: 7253 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.25\n","episode: 38 score: 2.0 memory length: 7472 epsilon: 1.0 steps: 219 lr: 0.0001 evaluation reward: 1.2682926829268293\n","episode: 39 score: 0.0 memory length: 7595 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.2380952380952381\n","episode: 40 score: 0.0 memory length: 7718 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.2093023255813953\n","episode: 41 score: 2.0 memory length: 7935 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.2272727272727273\n","episode: 42 score: 2.0 memory length: 8151 epsilon: 1.0 steps: 216 lr: 0.0001 evaluation reward: 1.2444444444444445\n","episode: 43 score: 2.0 memory length: 8369 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.2608695652173914\n","episode: 44 score: 1.0 memory length: 8519 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.2553191489361701\n","episode: 45 score: 2.0 memory length: 8718 epsilon: 1.0 steps: 199 lr: 0.0001 evaluation reward: 1.2708333333333333\n","episode: 46 score: 3.0 memory length: 8969 epsilon: 1.0 steps: 251 lr: 0.0001 evaluation reward: 1.3061224489795917\n","episode: 47 score: 1.0 memory length: 9120 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.3\n","episode: 48 score: 1.0 memory length: 9270 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.2941176470588236\n","episode: 49 score: 3.0 memory length: 9533 epsilon: 1.0 steps: 263 lr: 0.0001 evaluation reward: 1.3269230769230769\n","episode: 50 score: 3.0 memory length: 9800 epsilon: 1.0 steps: 267 lr: 0.0001 evaluation reward: 1.3584905660377358\n","episode: 51 score: 0.0 memory length: 9923 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.3333333333333333\n","episode: 52 score: 1.0 memory length: 10092 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.3272727272727274\n","episode: 53 score: 2.0 memory length: 10290 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.3392857142857142\n","episode: 54 score: 1.0 memory length: 10441 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.3333333333333333\n","episode: 55 score: 1.0 memory length: 10592 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.3275862068965518\n","episode: 56 score: 3.0 memory length: 10836 epsilon: 1.0 steps: 244 lr: 0.0001 evaluation reward: 1.3559322033898304\n","episode: 57 score: 0.0 memory length: 10959 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.3333333333333333\n","episode: 58 score: 2.0 memory length: 11161 epsilon: 1.0 steps: 202 lr: 0.0001 evaluation reward: 1.3442622950819672\n","episode: 59 score: 1.0 memory length: 11331 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.3387096774193548\n","episode: 60 score: 1.0 memory length: 11482 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.3333333333333333\n","episode: 61 score: 3.0 memory length: 11748 epsilon: 1.0 steps: 266 lr: 0.0001 evaluation reward: 1.359375\n","episode: 62 score: 4.0 memory length: 12046 epsilon: 1.0 steps: 298 lr: 0.0001 evaluation reward: 1.4\n","episode: 63 score: 0.0 memory length: 12168 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.378787878787879\n","episode: 64 score: 3.0 memory length: 12435 epsilon: 1.0 steps: 267 lr: 0.0001 evaluation reward: 1.4029850746268657\n","episode: 65 score: 0.0 memory length: 12558 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.3823529411764706\n","episode: 66 score: 2.0 memory length: 12773 epsilon: 1.0 steps: 215 lr: 0.0001 evaluation reward: 1.391304347826087\n","episode: 67 score: 1.0 memory length: 12924 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.3857142857142857\n","episode: 68 score: 5.0 memory length: 13289 epsilon: 1.0 steps: 365 lr: 0.0001 evaluation reward: 1.4366197183098592\n","episode: 69 score: 4.0 memory length: 13567 epsilon: 1.0 steps: 278 lr: 0.0001 evaluation reward: 1.4722222222222223\n","episode: 70 score: 2.0 memory length: 13751 epsilon: 1.0 steps: 184 lr: 0.0001 evaluation reward: 1.4794520547945205\n","episode: 71 score: 5.0 memory length: 14078 epsilon: 1.0 steps: 327 lr: 0.0001 evaluation reward: 1.527027027027027\n","episode: 72 score: 1.0 memory length: 14229 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.52\n","episode: 73 score: 1.0 memory length: 14379 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.513157894736842\n","episode: 74 score: 1.0 memory length: 14548 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.5064935064935066\n","episode: 75 score: 0.0 memory length: 14671 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.4871794871794872\n","episode: 76 score: 2.0 memory length: 14889 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.4936708860759493\n","episode: 77 score: 2.0 memory length: 15109 epsilon: 1.0 steps: 220 lr: 0.0001 evaluation reward: 1.5\n","episode: 78 score: 3.0 memory length: 15337 epsilon: 1.0 steps: 228 lr: 0.0001 evaluation reward: 1.5185185185185186\n","episode: 79 score: 1.0 memory length: 15506 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.5121951219512195\n","episode: 80 score: 2.0 memory length: 15703 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.5180722891566265\n","episode: 81 score: 0.0 memory length: 15826 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.5\n","episode: 82 score: 1.0 memory length: 15995 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.4941176470588236\n","episode: 83 score: 1.0 memory length: 16164 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.4883720930232558\n","episode: 84 score: 2.0 memory length: 16383 epsilon: 1.0 steps: 219 lr: 0.0001 evaluation reward: 1.4942528735632183\n","episode: 85 score: 3.0 memory length: 16608 epsilon: 1.0 steps: 225 lr: 0.0001 evaluation reward: 1.5113636363636365\n","episode: 86 score: 1.0 memory length: 16759 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.5056179775280898\n","episode: 87 score: 0.0 memory length: 16881 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.488888888888889\n","episode: 88 score: 0.0 memory length: 17004 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.4725274725274726\n","episode: 89 score: 1.0 memory length: 17154 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.4673913043478262\n","episode: 90 score: 3.0 memory length: 17399 epsilon: 1.0 steps: 245 lr: 0.0001 evaluation reward: 1.4838709677419355\n","episode: 91 score: 1.0 memory length: 17550 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.4787234042553192\n","episode: 92 score: 1.0 memory length: 17718 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.4736842105263157\n","episode: 93 score: 1.0 memory length: 17887 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.46875\n","episode: 94 score: 1.0 memory length: 18038 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.4639175257731958\n","episode: 95 score: 2.0 memory length: 18260 epsilon: 1.0 steps: 222 lr: 0.0001 evaluation reward: 1.469387755102041\n","episode: 96 score: 3.0 memory length: 18531 epsilon: 1.0 steps: 271 lr: 0.0001 evaluation reward: 1.4848484848484849\n","episode: 97 score: 1.0 memory length: 18703 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.48\n","episode: 98 score: 2.0 memory length: 18901 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.47\n","episode: 99 score: 2.0 memory length: 19099 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.49\n","episode: 100 score: 2.0 memory length: 19297 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.51\n","episode: 101 score: 2.0 memory length: 19513 epsilon: 1.0 steps: 216 lr: 0.0001 evaluation reward: 1.49\n","episode: 102 score: 1.0 memory length: 19682 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.48\n","episode: 103 score: 0.0 memory length: 19805 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.46\n","episode: 104 score: 2.0 memory length: 20022 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.45\n","episode: 105 score: 0.0 memory length: 20144 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.45\n","episode: 106 score: 0.0 memory length: 20266 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.43\n","episode: 107 score: 2.0 memory length: 20483 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.44\n","episode: 108 score: 0.0 memory length: 20606 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.42\n","episode: 109 score: 1.0 memory length: 20757 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.42\n","episode: 110 score: 0.0 memory length: 20880 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.42\n","episode: 111 score: 1.0 memory length: 21051 epsilon: 1.0 steps: 171 lr: 0.0001 evaluation reward: 1.43\n","episode: 112 score: 0.0 memory length: 21173 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.43\n","episode: 113 score: 1.0 memory length: 21345 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.42\n","episode: 114 score: 1.0 memory length: 21496 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.41\n","episode: 115 score: 1.0 memory length: 21648 epsilon: 1.0 steps: 152 lr: 0.0001 evaluation reward: 1.41\n","episode: 116 score: 0.0 memory length: 21770 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.41\n","episode: 117 score: 6.0 memory length: 22109 epsilon: 1.0 steps: 339 lr: 0.0001 evaluation reward: 1.47\n","episode: 118 score: 2.0 memory length: 22330 epsilon: 1.0 steps: 221 lr: 0.0001 evaluation reward: 1.46\n","episode: 119 score: 2.0 memory length: 22547 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.45\n","episode: 120 score: 3.0 memory length: 22793 epsilon: 1.0 steps: 246 lr: 0.0001 evaluation reward: 1.45\n","episode: 121 score: 0.0 memory length: 22916 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.45\n","episode: 122 score: 0.0 memory length: 23039 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.45\n","episode: 123 score: 0.0 memory length: 23162 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.43\n","episode: 124 score: 1.0 memory length: 23334 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.44\n","episode: 125 score: 2.0 memory length: 23534 epsilon: 1.0 steps: 200 lr: 0.0001 evaluation reward: 1.44\n","episode: 126 score: 2.0 memory length: 23732 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.44\n","episode: 127 score: 3.0 memory length: 23998 epsilon: 1.0 steps: 266 lr: 0.0001 evaluation reward: 1.46\n","episode: 128 score: 1.0 memory length: 24149 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.44\n","episode: 129 score: 3.0 memory length: 24398 epsilon: 1.0 steps: 249 lr: 0.0001 evaluation reward: 1.46\n","episode: 130 score: 2.0 memory length: 24596 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.47\n","episode: 131 score: 0.0 memory length: 24718 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.46\n","episode: 132 score: 2.0 memory length: 24939 epsilon: 1.0 steps: 221 lr: 0.0001 evaluation reward: 1.48\n","episode: 133 score: 2.0 memory length: 25136 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.5\n","episode: 134 score: 1.0 memory length: 25288 epsilon: 1.0 steps: 152 lr: 0.0001 evaluation reward: 1.5\n","episode: 135 score: 1.0 memory length: 25439 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.5\n","episode: 136 score: 0.0 memory length: 25561 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.5\n","episode: 137 score: 3.0 memory length: 25808 epsilon: 1.0 steps: 247 lr: 0.0001 evaluation reward: 1.52\n","episode: 138 score: 1.0 memory length: 25977 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.51\n","episode: 139 score: 2.0 memory length: 26161 epsilon: 1.0 steps: 184 lr: 0.0001 evaluation reward: 1.53\n","episode: 140 score: 2.0 memory length: 26379 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.55\n","episode: 141 score: 2.0 memory length: 26577 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.55\n","episode: 142 score: 2.0 memory length: 26776 epsilon: 1.0 steps: 199 lr: 0.0001 evaluation reward: 1.55\n","episode: 143 score: 1.0 memory length: 26944 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.54\n","episode: 144 score: 1.0 memory length: 27115 epsilon: 1.0 steps: 171 lr: 0.0001 evaluation reward: 1.54\n","episode: 145 score: 1.0 memory length: 27285 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.53\n","episode: 146 score: 3.0 memory length: 27531 epsilon: 1.0 steps: 246 lr: 0.0001 evaluation reward: 1.53\n","episode: 147 score: 0.0 memory length: 27653 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.52\n","episode: 148 score: 0.0 memory length: 27776 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.51\n","episode: 149 score: 0.0 memory length: 27899 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.48\n","episode: 150 score: 2.0 memory length: 28097 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.47\n","episode: 151 score: 1.0 memory length: 28247 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.48\n","episode: 152 score: 2.0 memory length: 28464 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.49\n","episode: 153 score: 0.0 memory length: 28587 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.47\n","episode: 154 score: 1.0 memory length: 28738 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.47\n","episode: 155 score: 5.0 memory length: 29066 epsilon: 1.0 steps: 328 lr: 0.0001 evaluation reward: 1.51\n","episode: 156 score: 0.0 memory length: 29188 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.48\n","episode: 157 score: 1.0 memory length: 29339 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.49\n","episode: 158 score: 4.0 memory length: 29611 epsilon: 1.0 steps: 272 lr: 0.0001 evaluation reward: 1.51\n","episode: 159 score: 1.0 memory length: 29761 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.51\n","episode: 160 score: 0.0 memory length: 29884 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.5\n","episode: 161 score: 2.0 memory length: 30082 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.49\n","episode: 162 score: 1.0 memory length: 30252 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.46\n","episode: 163 score: 0.0 memory length: 30375 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.46\n","episode: 164 score: 3.0 memory length: 30620 epsilon: 1.0 steps: 245 lr: 0.0001 evaluation reward: 1.46\n","episode: 165 score: 0.0 memory length: 30743 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.46\n","episode: 166 score: 0.0 memory length: 30866 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.44\n","episode: 167 score: 1.0 memory length: 31034 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.44\n","episode: 168 score: 2.0 memory length: 31231 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.41\n","episode: 169 score: 1.0 memory length: 31403 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.38\n","episode: 170 score: 2.0 memory length: 31621 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.38\n","episode: 171 score: 1.0 memory length: 31790 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.34\n","episode: 172 score: 4.0 memory length: 32048 epsilon: 1.0 steps: 258 lr: 0.0001 evaluation reward: 1.37\n","episode: 173 score: 0.0 memory length: 32171 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.36\n","episode: 174 score: 2.0 memory length: 32369 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.37\n","episode: 175 score: 0.0 memory length: 32492 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.37\n","episode: 176 score: 1.0 memory length: 32644 epsilon: 1.0 steps: 152 lr: 0.0001 evaluation reward: 1.36\n","episode: 177 score: 2.0 memory length: 32863 epsilon: 1.0 steps: 219 lr: 0.0001 evaluation reward: 1.36\n","episode: 178 score: 0.0 memory length: 32986 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.33\n","episode: 179 score: 1.0 memory length: 33136 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.33\n","episode: 180 score: 1.0 memory length: 33305 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.32\n","episode: 181 score: 0.0 memory length: 33428 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.32\n","episode: 182 score: 1.0 memory length: 33579 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.32\n","episode: 183 score: 0.0 memory length: 33701 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.31\n","episode: 184 score: 1.0 memory length: 33852 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.3\n","episode: 185 score: 1.0 memory length: 34024 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.28\n","episode: 186 score: 2.0 memory length: 34241 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.29\n","episode: 187 score: 1.0 memory length: 34392 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.3\n","episode: 188 score: 2.0 memory length: 34590 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.32\n","episode: 189 score: 2.0 memory length: 34789 epsilon: 1.0 steps: 199 lr: 0.0001 evaluation reward: 1.33\n","episode: 190 score: 4.0 memory length: 35061 epsilon: 1.0 steps: 272 lr: 0.0001 evaluation reward: 1.34\n","episode: 191 score: 3.0 memory length: 35307 epsilon: 1.0 steps: 246 lr: 0.0001 evaluation reward: 1.36\n","episode: 192 score: 3.0 memory length: 35578 epsilon: 1.0 steps: 271 lr: 0.0001 evaluation reward: 1.38\n","episode: 193 score: 2.0 memory length: 35795 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.39\n","episode: 194 score: 2.0 memory length: 35993 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.4\n","episode: 195 score: 3.0 memory length: 36222 epsilon: 1.0 steps: 229 lr: 0.0001 evaluation reward: 1.41\n","episode: 196 score: 1.0 memory length: 36392 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.39\n","episode: 197 score: 2.0 memory length: 36589 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.4\n","episode: 198 score: 1.0 memory length: 36740 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.39\n","episode: 199 score: 0.0 memory length: 36863 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.37\n","episode: 200 score: 2.0 memory length: 37060 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.37\n","episode: 201 score: 1.0 memory length: 37210 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.36\n","episode: 202 score: 2.0 memory length: 37429 epsilon: 1.0 steps: 219 lr: 0.0001 evaluation reward: 1.37\n","episode: 203 score: 3.0 memory length: 37674 epsilon: 1.0 steps: 245 lr: 0.0001 evaluation reward: 1.4\n","episode: 204 score: 4.0 memory length: 37970 epsilon: 1.0 steps: 296 lr: 0.0001 evaluation reward: 1.42\n","episode: 205 score: 3.0 memory length: 38237 epsilon: 1.0 steps: 267 lr: 0.0001 evaluation reward: 1.45\n","episode: 206 score: 2.0 memory length: 38435 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.47\n","episode: 207 score: 2.0 memory length: 38632 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.47\n","episode: 208 score: 1.0 memory length: 38801 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.48\n","episode: 209 score: 3.0 memory length: 39050 epsilon: 1.0 steps: 249 lr: 0.0001 evaluation reward: 1.5\n","episode: 210 score: 0.0 memory length: 39173 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.5\n","episode: 211 score: 2.0 memory length: 39389 epsilon: 1.0 steps: 216 lr: 0.0001 evaluation reward: 1.51\n","episode: 212 score: 0.0 memory length: 39511 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.51\n","episode: 213 score: 3.0 memory length: 39780 epsilon: 1.0 steps: 269 lr: 0.0001 evaluation reward: 1.53\n","episode: 214 score: 1.0 memory length: 39949 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.53\n","episode: 215 score: 3.0 memory length: 40216 epsilon: 1.0 steps: 267 lr: 0.0001 evaluation reward: 1.55\n","episode: 216 score: 1.0 memory length: 40367 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.56\n","episode: 217 score: 0.0 memory length: 40489 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.5\n","episode: 218 score: 0.0 memory length: 40612 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.48\n","episode: 219 score: 1.0 memory length: 40780 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.47\n","episode: 220 score: 0.0 memory length: 40902 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.44\n","episode: 221 score: 3.0 memory length: 41171 epsilon: 1.0 steps: 269 lr: 0.0001 evaluation reward: 1.47\n","episode: 222 score: 2.0 memory length: 41386 epsilon: 1.0 steps: 215 lr: 0.0001 evaluation reward: 1.49\n","episode: 223 score: 0.0 memory length: 41509 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.49\n","episode: 224 score: 0.0 memory length: 41632 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.48\n","episode: 225 score: 3.0 memory length: 41843 epsilon: 1.0 steps: 211 lr: 0.0001 evaluation reward: 1.49\n","episode: 226 score: 0.0 memory length: 41966 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.47\n","episode: 227 score: 3.0 memory length: 42238 epsilon: 1.0 steps: 272 lr: 0.0001 evaluation reward: 1.47\n","episode: 228 score: 3.0 memory length: 42500 epsilon: 1.0 steps: 262 lr: 0.0001 evaluation reward: 1.49\n","episode: 229 score: 2.0 memory length: 42698 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.48\n","episode: 230 score: 1.0 memory length: 42849 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.47\n","episode: 231 score: 0.0 memory length: 42972 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.47\n","episode: 232 score: 2.0 memory length: 43174 epsilon: 1.0 steps: 202 lr: 0.0001 evaluation reward: 1.47\n","episode: 233 score: 2.0 memory length: 43392 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.47\n","episode: 234 score: 0.0 memory length: 43515 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.46\n","episode: 235 score: 0.0 memory length: 43638 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.45\n","episode: 236 score: 1.0 memory length: 43808 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.46\n","episode: 237 score: 2.0 memory length: 44026 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.45\n","episode: 238 score: 0.0 memory length: 44148 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.44\n","episode: 239 score: 2.0 memory length: 44365 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.44\n","episode: 240 score: 2.0 memory length: 44584 epsilon: 1.0 steps: 219 lr: 0.0001 evaluation reward: 1.44\n","episode: 241 score: 2.0 memory length: 44802 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.44\n","episode: 242 score: 1.0 memory length: 44953 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.43\n","episode: 243 score: 4.0 memory length: 45264 epsilon: 1.0 steps: 311 lr: 0.0001 evaluation reward: 1.46\n","episode: 244 score: 3.0 memory length: 45510 epsilon: 1.0 steps: 246 lr: 0.0001 evaluation reward: 1.48\n","episode: 245 score: 3.0 memory length: 45754 epsilon: 1.0 steps: 244 lr: 0.0001 evaluation reward: 1.5\n","episode: 246 score: 3.0 memory length: 45997 epsilon: 1.0 steps: 243 lr: 0.0001 evaluation reward: 1.5\n","episode: 247 score: 4.0 memory length: 46254 epsilon: 1.0 steps: 257 lr: 0.0001 evaluation reward: 1.54\n","episode: 248 score: 2.0 memory length: 46472 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.56\n","episode: 249 score: 3.0 memory length: 46739 epsilon: 1.0 steps: 267 lr: 0.0001 evaluation reward: 1.59\n","episode: 250 score: 2.0 memory length: 46936 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.59\n","episode: 251 score: 0.0 memory length: 47059 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.58\n","episode: 252 score: 0.0 memory length: 47182 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.56\n","episode: 253 score: 0.0 memory length: 47305 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.56\n","episode: 254 score: 0.0 memory length: 47428 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.55\n","episode: 255 score: 1.0 memory length: 47579 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.51\n","episode: 256 score: 0.0 memory length: 47702 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.51\n","episode: 257 score: 2.0 memory length: 47922 epsilon: 1.0 steps: 220 lr: 0.0001 evaluation reward: 1.52\n","episode: 258 score: 3.0 memory length: 48148 epsilon: 1.0 steps: 226 lr: 0.0001 evaluation reward: 1.51\n","episode: 259 score: 4.0 memory length: 48424 epsilon: 1.0 steps: 276 lr: 0.0001 evaluation reward: 1.54\n","episode: 260 score: 3.0 memory length: 48669 epsilon: 1.0 steps: 245 lr: 0.0001 evaluation reward: 1.57\n","episode: 261 score: 1.0 memory length: 48840 epsilon: 1.0 steps: 171 lr: 0.0001 evaluation reward: 1.56\n","episode: 262 score: 5.0 memory length: 49166 epsilon: 1.0 steps: 326 lr: 0.0001 evaluation reward: 1.6\n","episode: 263 score: 1.0 memory length: 49318 epsilon: 1.0 steps: 152 lr: 0.0001 evaluation reward: 1.61\n","episode: 264 score: 1.0 memory length: 49487 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.59\n","episode: 265 score: 0.0 memory length: 49610 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.59\n","episode: 266 score: 1.0 memory length: 49779 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.6\n","episode: 267 score: 1.0 memory length: 49948 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.6\n","episode: 268 score: 2.0 memory length: 50145 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.6\n","episode: 269 score: 3.0 memory length: 50371 epsilon: 1.0 steps: 226 lr: 0.0001 evaluation reward: 1.62\n","episode: 270 score: 0.0 memory length: 50494 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.6\n","episode: 271 score: 1.0 memory length: 50666 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.6\n","episode: 272 score: 1.0 memory length: 50835 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.57\n","episode: 273 score: 2.0 memory length: 51033 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.59\n","episode: 274 score: 1.0 memory length: 51203 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.58\n","episode: 275 score: 1.0 memory length: 51372 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.59\n","episode: 276 score: 3.0 memory length: 51642 epsilon: 1.0 steps: 270 lr: 0.0001 evaluation reward: 1.61\n","episode: 277 score: 2.0 memory length: 51840 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.61\n","episode: 278 score: 2.0 memory length: 52059 epsilon: 1.0 steps: 219 lr: 0.0001 evaluation reward: 1.63\n","episode: 279 score: 0.0 memory length: 52182 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.62\n","episode: 280 score: 2.0 memory length: 52382 epsilon: 1.0 steps: 200 lr: 0.0001 evaluation reward: 1.63\n","episode: 281 score: 0.0 memory length: 52505 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.63\n","episode: 282 score: 1.0 memory length: 52674 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.63\n","episode: 283 score: 3.0 memory length: 52899 epsilon: 1.0 steps: 225 lr: 0.0001 evaluation reward: 1.66\n","episode: 284 score: 4.0 memory length: 53168 epsilon: 1.0 steps: 269 lr: 0.0001 evaluation reward: 1.69\n","episode: 285 score: 2.0 memory length: 53366 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.7\n","episode: 286 score: 1.0 memory length: 53517 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.69\n","episode: 287 score: 1.0 memory length: 53686 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.69\n","episode: 288 score: 2.0 memory length: 53884 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.69\n","episode: 289 score: 2.0 memory length: 54103 epsilon: 1.0 steps: 219 lr: 0.0001 evaluation reward: 1.69\n","episode: 290 score: 2.0 memory length: 54301 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.67\n","episode: 291 score: 2.0 memory length: 54517 epsilon: 1.0 steps: 216 lr: 0.0001 evaluation reward: 1.66\n","episode: 292 score: 2.0 memory length: 54715 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.65\n","episode: 293 score: 2.0 memory length: 54932 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.65\n","episode: 294 score: 3.0 memory length: 55177 epsilon: 1.0 steps: 245 lr: 0.0001 evaluation reward: 1.66\n","episode: 295 score: 2.0 memory length: 55397 epsilon: 1.0 steps: 220 lr: 0.0001 evaluation reward: 1.65\n","episode: 296 score: 2.0 memory length: 55597 epsilon: 1.0 steps: 200 lr: 0.0001 evaluation reward: 1.66\n","episode: 297 score: 1.0 memory length: 55766 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.65\n","episode: 298 score: 1.0 memory length: 55938 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.65\n","episode: 299 score: 0.0 memory length: 56061 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.65\n","episode: 300 score: 1.0 memory length: 56214 epsilon: 1.0 steps: 153 lr: 0.0001 evaluation reward: 1.64\n","episode: 301 score: 0.0 memory length: 56337 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.63\n","episode: 302 score: 1.0 memory length: 56509 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.62\n","episode: 303 score: 1.0 memory length: 56678 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.6\n","episode: 304 score: 1.0 memory length: 56829 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.57\n","episode: 305 score: 2.0 memory length: 57026 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.56\n","episode: 306 score: 3.0 memory length: 57271 epsilon: 1.0 steps: 245 lr: 0.0001 evaluation reward: 1.57\n","episode: 307 score: 1.0 memory length: 57441 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.56\n","episode: 308 score: 4.0 memory length: 57739 epsilon: 1.0 steps: 298 lr: 0.0001 evaluation reward: 1.59\n","episode: 309 score: 2.0 memory length: 57936 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.58\n","episode: 310 score: 1.0 memory length: 58105 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.59\n","episode: 311 score: 1.0 memory length: 58274 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.58\n","episode: 312 score: 0.0 memory length: 58396 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.58\n","episode: 313 score: 0.0 memory length: 58519 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.55\n","episode: 314 score: 2.0 memory length: 58738 epsilon: 1.0 steps: 219 lr: 0.0001 evaluation reward: 1.56\n","episode: 315 score: 2.0 memory length: 58936 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.55\n","episode: 316 score: 0.0 memory length: 59059 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.54\n","episode: 317 score: 1.0 memory length: 59210 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.55\n","episode: 318 score: 1.0 memory length: 59382 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.56\n","episode: 319 score: 0.0 memory length: 59504 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.55\n","episode: 320 score: 2.0 memory length: 59701 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.57\n","episode: 321 score: 0.0 memory length: 59824 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.54\n","episode: 322 score: 0.0 memory length: 59946 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.52\n","episode: 323 score: 1.0 memory length: 60115 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.53\n","episode: 324 score: 2.0 memory length: 60313 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.55\n","episode: 325 score: 1.0 memory length: 60482 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.53\n","episode: 326 score: 2.0 memory length: 60700 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.55\n","episode: 327 score: 3.0 memory length: 60947 epsilon: 1.0 steps: 247 lr: 0.0001 evaluation reward: 1.55\n","episode: 328 score: 1.0 memory length: 61119 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.53\n","episode: 329 score: 0.0 memory length: 61242 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.51\n","episode: 330 score: 0.0 memory length: 61365 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.5\n","episode: 331 score: 2.0 memory length: 61546 epsilon: 1.0 steps: 181 lr: 0.0001 evaluation reward: 1.52\n","episode: 332 score: 0.0 memory length: 61669 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.5\n","episode: 333 score: 0.0 memory length: 61792 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.48\n","episode: 334 score: 4.0 memory length: 62087 epsilon: 1.0 steps: 295 lr: 0.0001 evaluation reward: 1.52\n","episode: 335 score: 1.0 memory length: 62259 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.53\n","episode: 336 score: 3.0 memory length: 62503 epsilon: 1.0 steps: 244 lr: 0.0001 evaluation reward: 1.55\n","episode: 337 score: 2.0 memory length: 62720 epsilon: 1.0 steps: 217 lr: 0.0001 evaluation reward: 1.55\n","episode: 338 score: 2.0 memory length: 62900 epsilon: 1.0 steps: 180 lr: 0.0001 evaluation reward: 1.57\n","episode: 339 score: 1.0 memory length: 63072 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.56\n","episode: 340 score: 1.0 memory length: 63241 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.55\n","episode: 341 score: 2.0 memory length: 63439 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.55\n","episode: 342 score: 0.0 memory length: 63561 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.54\n","episode: 343 score: 2.0 memory length: 63762 epsilon: 1.0 steps: 201 lr: 0.0001 evaluation reward: 1.52\n","episode: 344 score: 0.0 memory length: 63884 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.49\n","episode: 345 score: 1.0 memory length: 64053 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.47\n","episode: 346 score: 0.0 memory length: 64176 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.44\n","episode: 347 score: 0.0 memory length: 64299 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.4\n","episode: 348 score: 0.0 memory length: 64422 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.38\n","episode: 349 score: 2.0 memory length: 64603 epsilon: 1.0 steps: 181 lr: 0.0001 evaluation reward: 1.37\n","episode: 350 score: 3.0 memory length: 64846 epsilon: 1.0 steps: 243 lr: 0.0001 evaluation reward: 1.38\n","episode: 351 score: 3.0 memory length: 65092 epsilon: 1.0 steps: 246 lr: 0.0001 evaluation reward: 1.41\n","episode: 352 score: 2.0 memory length: 65290 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.43\n","episode: 353 score: 2.0 memory length: 65510 epsilon: 1.0 steps: 220 lr: 0.0001 evaluation reward: 1.45\n","episode: 354 score: 2.0 memory length: 65728 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.47\n","episode: 355 score: 1.0 memory length: 65897 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.47\n","episode: 356 score: 1.0 memory length: 66065 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.48\n","episode: 357 score: 0.0 memory length: 66188 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.46\n","episode: 358 score: 1.0 memory length: 66357 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.44\n","episode: 359 score: 2.0 memory length: 66575 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.42\n","episode: 360 score: 2.0 memory length: 66790 epsilon: 1.0 steps: 215 lr: 0.0001 evaluation reward: 1.41\n","episode: 361 score: 1.0 memory length: 66941 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.41\n","episode: 362 score: 0.0 memory length: 67064 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.36\n","episode: 363 score: 2.0 memory length: 67262 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.37\n","episode: 364 score: 0.0 memory length: 67385 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.36\n","episode: 365 score: 1.0 memory length: 67553 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.37\n","episode: 366 score: 2.0 memory length: 67751 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.38\n","episode: 367 score: 0.0 memory length: 67874 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.37\n","episode: 368 score: 1.0 memory length: 68043 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.36\n","episode: 369 score: 0.0 memory length: 68166 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.33\n","episode: 370 score: 0.0 memory length: 68289 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.33\n","episode: 371 score: 6.0 memory length: 68664 epsilon: 1.0 steps: 375 lr: 0.0001 evaluation reward: 1.38\n","episode: 372 score: 3.0 memory length: 68911 epsilon: 1.0 steps: 247 lr: 0.0001 evaluation reward: 1.4\n","episode: 373 score: 1.0 memory length: 69081 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.39\n","episode: 374 score: 3.0 memory length: 69331 epsilon: 1.0 steps: 250 lr: 0.0001 evaluation reward: 1.41\n","episode: 375 score: 0.0 memory length: 69454 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.4\n","episode: 376 score: 2.0 memory length: 69652 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.39\n","episode: 377 score: 1.0 memory length: 69821 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.38\n","episode: 378 score: 2.0 memory length: 70039 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.38\n","episode: 379 score: 1.0 memory length: 70190 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.39\n","episode: 380 score: 0.0 memory length: 70313 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.37\n","episode: 381 score: 1.0 memory length: 70482 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.38\n","episode: 382 score: 0.0 memory length: 70605 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.37\n","episode: 383 score: 2.0 memory length: 70788 epsilon: 1.0 steps: 183 lr: 0.0001 evaluation reward: 1.36\n","episode: 384 score: 0.0 memory length: 70911 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.32\n","episode: 385 score: 2.0 memory length: 71091 epsilon: 1.0 steps: 180 lr: 0.0001 evaluation reward: 1.32\n","episode: 386 score: 2.0 memory length: 71288 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.33\n","episode: 387 score: 0.0 memory length: 71411 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.32\n","episode: 388 score: 1.0 memory length: 71581 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.31\n","episode: 389 score: 1.0 memory length: 71749 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.3\n","episode: 390 score: 0.0 memory length: 71872 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.28\n","episode: 391 score: 2.0 memory length: 72070 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.28\n","episode: 392 score: 1.0 memory length: 72221 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.27\n","episode: 393 score: 1.0 memory length: 72393 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.26\n","episode: 394 score: 4.0 memory length: 72651 epsilon: 1.0 steps: 258 lr: 0.0001 evaluation reward: 1.27\n","episode: 395 score: 4.0 memory length: 72947 epsilon: 1.0 steps: 296 lr: 0.0001 evaluation reward: 1.29\n","episode: 396 score: 1.0 memory length: 73116 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.28\n","episode: 397 score: 1.0 memory length: 73268 epsilon: 1.0 steps: 152 lr: 0.0001 evaluation reward: 1.28\n","episode: 398 score: 2.0 memory length: 73469 epsilon: 1.0 steps: 201 lr: 0.0001 evaluation reward: 1.29\n","episode: 399 score: 0.0 memory length: 73591 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.29\n","episode: 400 score: 3.0 memory length: 73817 epsilon: 1.0 steps: 226 lr: 0.0001 evaluation reward: 1.31\n","episode: 401 score: 1.0 memory length: 73985 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.32\n","episode: 402 score: 2.0 memory length: 74187 epsilon: 1.0 steps: 202 lr: 0.0001 evaluation reward: 1.33\n","episode: 403 score: 2.0 memory length: 74403 epsilon: 1.0 steps: 216 lr: 0.0001 evaluation reward: 1.34\n","episode: 404 score: 1.0 memory length: 74573 epsilon: 1.0 steps: 170 lr: 0.0001 evaluation reward: 1.34\n","episode: 405 score: 1.0 memory length: 74745 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.33\n","episode: 406 score: 2.0 memory length: 74963 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.32\n","episode: 407 score: 2.0 memory length: 75181 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.33\n","episode: 408 score: 1.0 memory length: 75350 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.3\n","episode: 409 score: 4.0 memory length: 75647 epsilon: 1.0 steps: 297 lr: 0.0001 evaluation reward: 1.32\n","episode: 410 score: 1.0 memory length: 75816 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.32\n","episode: 411 score: 0.0 memory length: 75938 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.31\n","episode: 412 score: 2.0 memory length: 76136 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.33\n","episode: 413 score: 2.0 memory length: 76354 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.35\n","episode: 414 score: 0.0 memory length: 76477 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.33\n","episode: 415 score: 0.0 memory length: 76600 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.31\n","episode: 416 score: 3.0 memory length: 76846 epsilon: 1.0 steps: 246 lr: 0.0001 evaluation reward: 1.34\n","episode: 417 score: 4.0 memory length: 77142 epsilon: 1.0 steps: 296 lr: 0.0001 evaluation reward: 1.37\n","episode: 418 score: 3.0 memory length: 77408 epsilon: 1.0 steps: 266 lr: 0.0001 evaluation reward: 1.39\n","episode: 419 score: 0.0 memory length: 77531 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.39\n","episode: 420 score: 2.0 memory length: 77729 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.39\n","episode: 421 score: 0.0 memory length: 77852 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.39\n","episode: 422 score: 3.0 memory length: 78095 epsilon: 1.0 steps: 243 lr: 0.0001 evaluation reward: 1.42\n","episode: 423 score: 4.0 memory length: 78390 epsilon: 1.0 steps: 295 lr: 0.0001 evaluation reward: 1.45\n","episode: 424 score: 3.0 memory length: 78638 epsilon: 1.0 steps: 248 lr: 0.0001 evaluation reward: 1.46\n","episode: 425 score: 0.0 memory length: 78760 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.45\n","episode: 426 score: 1.0 memory length: 78929 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.44\n","episode: 427 score: 1.0 memory length: 79101 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.42\n","episode: 428 score: 0.0 memory length: 79224 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.41\n","episode: 429 score: 1.0 memory length: 79393 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.42\n","episode: 430 score: 2.0 memory length: 79593 epsilon: 1.0 steps: 200 lr: 0.0001 evaluation reward: 1.44\n","episode: 431 score: 2.0 memory length: 79811 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.44\n","episode: 432 score: 1.0 memory length: 79982 epsilon: 1.0 steps: 171 lr: 0.0001 evaluation reward: 1.45\n","episode: 433 score: 3.0 memory length: 80226 epsilon: 1.0 steps: 244 lr: 0.0001 evaluation reward: 1.48\n","episode: 434 score: 0.0 memory length: 80349 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.44\n","episode: 435 score: 0.0 memory length: 80471 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.43\n","episode: 436 score: 2.0 memory length: 80671 epsilon: 1.0 steps: 200 lr: 0.0001 evaluation reward: 1.42\n","episode: 437 score: 0.0 memory length: 80794 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.4\n","episode: 438 score: 2.0 memory length: 80975 epsilon: 1.0 steps: 181 lr: 0.0001 evaluation reward: 1.4\n","episode: 439 score: 2.0 memory length: 81172 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.41\n","episode: 440 score: 0.0 memory length: 81294 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.4\n","episode: 441 score: 2.0 memory length: 81510 epsilon: 1.0 steps: 216 lr: 0.0001 evaluation reward: 1.4\n","episode: 442 score: 3.0 memory length: 81759 epsilon: 1.0 steps: 249 lr: 0.0001 evaluation reward: 1.43\n","episode: 443 score: 0.0 memory length: 81882 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.41\n","episode: 444 score: 3.0 memory length: 82128 epsilon: 1.0 steps: 246 lr: 0.0001 evaluation reward: 1.44\n","episode: 445 score: 2.0 memory length: 82326 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.45\n","episode: 446 score: 1.0 memory length: 82477 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.46\n","episode: 447 score: 1.0 memory length: 82646 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.47\n","episode: 448 score: 3.0 memory length: 82873 epsilon: 1.0 steps: 227 lr: 0.0001 evaluation reward: 1.5\n","episode: 449 score: 0.0 memory length: 82996 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.48\n","episode: 450 score: 0.0 memory length: 83119 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.45\n","episode: 451 score: 1.0 memory length: 83270 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.43\n","episode: 452 score: 0.0 memory length: 83392 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.41\n","episode: 453 score: 0.0 memory length: 83515 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.39\n","episode: 454 score: 3.0 memory length: 83741 epsilon: 1.0 steps: 226 lr: 0.0001 evaluation reward: 1.4\n","episode: 455 score: 2.0 memory length: 83939 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.41\n","episode: 456 score: 3.0 memory length: 84208 epsilon: 1.0 steps: 269 lr: 0.0001 evaluation reward: 1.43\n","episode: 457 score: 1.0 memory length: 84377 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.44\n","episode: 458 score: 1.0 memory length: 84527 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.44\n","episode: 459 score: 0.0 memory length: 84649 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.42\n","episode: 460 score: 2.0 memory length: 84849 epsilon: 1.0 steps: 200 lr: 0.0001 evaluation reward: 1.42\n","episode: 461 score: 2.0 memory length: 85047 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.43\n","episode: 462 score: 2.0 memory length: 85245 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.45\n","episode: 463 score: 0.0 memory length: 85367 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.43\n","episode: 464 score: 3.0 memory length: 85616 epsilon: 1.0 steps: 249 lr: 0.0001 evaluation reward: 1.46\n","episode: 465 score: 0.0 memory length: 85739 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.45\n","episode: 466 score: 2.0 memory length: 85936 epsilon: 1.0 steps: 197 lr: 0.0001 evaluation reward: 1.45\n","episode: 467 score: 0.0 memory length: 86058 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.45\n","episode: 468 score: 3.0 memory length: 86291 epsilon: 1.0 steps: 233 lr: 0.0001 evaluation reward: 1.47\n","episode: 469 score: 0.0 memory length: 86414 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.47\n","episode: 470 score: 3.0 memory length: 86659 epsilon: 1.0 steps: 245 lr: 0.0001 evaluation reward: 1.5\n","episode: 471 score: 1.0 memory length: 86810 epsilon: 1.0 steps: 151 lr: 0.0001 evaluation reward: 1.45\n","episode: 472 score: 4.0 memory length: 87087 epsilon: 1.0 steps: 277 lr: 0.0001 evaluation reward: 1.46\n","episode: 473 score: 0.0 memory length: 87210 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.45\n","episode: 474 score: 3.0 memory length: 87455 epsilon: 1.0 steps: 245 lr: 0.0001 evaluation reward: 1.45\n","episode: 475 score: 2.0 memory length: 87673 epsilon: 1.0 steps: 218 lr: 0.0001 evaluation reward: 1.47\n","episode: 476 score: 0.0 memory length: 87796 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.45\n","episode: 477 score: 0.0 memory length: 87919 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.44\n","episode: 478 score: 1.0 memory length: 88087 epsilon: 1.0 steps: 168 lr: 0.0001 evaluation reward: 1.43\n","episode: 479 score: 2.0 memory length: 88286 epsilon: 1.0 steps: 199 lr: 0.0001 evaluation reward: 1.44\n","episode: 480 score: 1.0 memory length: 88457 epsilon: 1.0 steps: 171 lr: 0.0001 evaluation reward: 1.45\n","episode: 481 score: 0.0 memory length: 88579 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.44\n","episode: 482 score: 0.0 memory length: 88701 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.44\n","episode: 483 score: 1.0 memory length: 88851 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.43\n","episode: 484 score: 3.0 memory length: 89095 epsilon: 1.0 steps: 244 lr: 0.0001 evaluation reward: 1.46\n","episode: 485 score: 2.0 memory length: 89293 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.46\n","episode: 486 score: 0.0 memory length: 89415 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.44\n","episode: 487 score: 1.0 memory length: 89587 epsilon: 1.0 steps: 172 lr: 0.0001 evaluation reward: 1.45\n","episode: 488 score: 3.0 memory length: 89838 epsilon: 1.0 steps: 251 lr: 0.0001 evaluation reward: 1.47\n","episode: 489 score: 2.0 memory length: 90036 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.48\n","episode: 490 score: 2.0 memory length: 90234 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.5\n","episode: 491 score: 1.0 memory length: 90384 epsilon: 1.0 steps: 150 lr: 0.0001 evaluation reward: 1.49\n","episode: 492 score: 0.0 memory length: 90507 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.48\n","episode: 493 score: 0.0 memory length: 90630 epsilon: 1.0 steps: 123 lr: 0.0001 evaluation reward: 1.47\n","episode: 494 score: 4.0 memory length: 90927 epsilon: 1.0 steps: 297 lr: 0.0001 evaluation reward: 1.47\n","episode: 495 score: 1.0 memory length: 91098 epsilon: 1.0 steps: 171 lr: 0.0001 evaluation reward: 1.44\n","episode: 496 score: 2.0 memory length: 91296 epsilon: 1.0 steps: 198 lr: 0.0001 evaluation reward: 1.45\n","episode: 497 score: 0.0 memory length: 91418 epsilon: 1.0 steps: 122 lr: 0.0001 evaluation reward: 1.44\n","episode: 498 score: 1.0 memory length: 91587 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.43\n","episode: 499 score: 1.0 memory length: 91756 epsilon: 1.0 steps: 169 lr: 0.0001 evaluation reward: 1.44\n"],"name":"stdout"},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3de7QcZZnv8e8vFyAIhoREhntEEESFIFtBYSToiIAXziAeBBRBJOOIyhzxFkcFjhzHy1IUOYJBMeIFXQ6ICKggAtGJXHYQQiAwhosShJOEcBESIMl+zh9Vze50+rq7q6u76/dZa6/dVfV21VPd1fXU+1bVW4oIzMysuMblHYCZmeXLicDMrOCcCMzMCs6JwMys4JwIzMwKzonAzKzgnAisp0n6laT3dnieZ0j6YSfnWSSS5kk6K+84rHOcCCxzkh6QtEbSU2V/5zbz3og4LCK+n3WMvUDSDElR9hk9IOlTecdlg29C3gFYYbwtIn6bdxB9YquIWCdpCLhB0sKIuCaPQCSNj4j1eSzbusc1AsuVpBMk/ZekcyU9IeluSW8sm369pPenr3eVdENabqWkn5aVe52kW9Jpt0h6Xdm0F6fv+7uka4BpFTHsL2mBpMcl3S5pVkV896XvvV/ScVXWYbu0xjO1bNw+aYwT68VdT0QMA3cCM8vm+z5JSyQ9Juk3knZOx58p6Zvp64mSnpb0lXR4kqRnSvFJ+pmkR9J45kt6edn850k6T9JVkp4GDk7X5db0M/gpsFkz8Vv/cCKwXrAfcC/JDvp04NLynWqZzwNXA1OAHYDSjm8qcCVwDrA18DXgSklbp+/7MbAwnf/ngefPOUjaPn3vWcBU4GPAJZKmS3pBOs/DImJL4HXAbZVBRcTfgD8C7ygbfSzwnxGxtlbcjUjaH3gFsDQdPgL4NHAkMB34PXBxWvwGYFb6+tXAI8Dr0+HXAvdExKp0+FfAbsCLgFuBH1Us+ljg/wBbAjcDlwE/SD+fn1Wspw0AJwLrlsvSI+7S38ll05YDX4+ItRHxU+Ae4C1V5rEW2BnYLiKeiYg/pOPfAvw5In4QEesi4mLgbuBtknYi2TF+NiKejYj5wC/L5vlu4KqIuCoiRtImmGHg8HT6CPAKSZMi4uGIuLPG+v0YOAZAkoB3pePqxV3LSklrSJLLt0h2xAAfAP4jIpZExDrgC8DMtFbwR2C3NPm9HvgusL2kLYCDSBIFABFxYUT8PSKeBc4A9pY0uWz5v4iI/4qIEZLayERGv5//BG5pEL/1GScC65b/ERFblf1dUDbtodiw98O/ANtVmccnAAE3S7pT0vvS8dul7yn3F2D7dNpjEfF0xbSSnYF3licp4EBg2/Q9R5PsgB+WdKWkPWqs3yXAayVtS7IjHiE5Yq8Xdy3TgC2A00iO8ieWxfqNsjhXpfPdPiLWkCSwg9Ll3wAsAA6gLBFIGi/pi5LulfQk8EDZMkseLHu9HdW/HxsgTgTWC7ZPj6JLdgL+VlkoIh6JiJMjYjvgX4BvSdo1LbtzRfGdgIeAh4EpaTNP+bSSB4EfVCSpF0TEF9Nl/iYi3gRsS1LLKE9g5bE9RtL8czRJ08pPSjvPOnHXFBHrI+JrwDPAB8ti/ZeKWCdFxIJ0+g3AG4B9SI7abwDeDLwGmJ+WORY4AvgnYDIwIx1f/vmX7/Qfpvr3YwPEicB6wYuAj6QnOd8JvAy4qrKQpHdK2iEdfIxkhzWSln2ppGMlTZB0NLAncEVE/IXkSPlMSZtIOhB4W9lsf0jShPTm9Gh5M0mzJO0gaRtJR6RJ5FngqXR5tfwYOB44itFmoXpxN+OLwCckbQacD8wpndyVNDn9vEpuSJd/V0Q8B1wPvB+4PyJWpGW2TNflUWBzkualev4IrGP0+zmSJLHYAHEisG75pTa8j+DnZdNuIjl5uZLkJOVREfFolXm8GrhJ0lPA5cCpEXFfWvatJE0pj5I0xbw1Ilam7zuW5IT0KpKT0ReVZhgRD5IcIX8aWEFy1P1xkt/GOOCjJDWOVSRNLP9aZx0vT9fjkYi4vVHcdeZT7kqS5HFyRPwc+BLwk7RZZzFwWFnZBcAkRo/+7yKpUcwvK3MRSdPOQ+n0G+stPE0oRwInkHwGRwOXNhm79Qn5wTSWJ0knAO+PiAPzjsWsqFwjMDMrOCcCM7OCc9OQmVnBuUZgZlZwfdfp3LRp02LGjBl5h2Fm1lcWLly4MiKmV5vWd4lgxowZDA8P5x2GmVlfkVTzjnA3DZmZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRVcZolA0o6SrpN0V9oH+6lVykjSOZKWSlok6VVZxWNmZtVlefnoOuC0iLhV0pbAQknXRMRdZWUOI+mtcTeS3iHPS/+bmVmXZFYjSB/rd2v6+u/AEpInRpU7ArgoEjcCW6VPeMqUlPyZmVmXzhFImkHy1KSbKiZtz4aPxVvGxskCSbMlDUsaXrFiReVkMzNrQ+aJIH149iXAv0XEk2OZR0TMjYihiBiaPr3qHdJmZjZGmSYCSRNJksCPIqLaU40eAnYsG94hHWdmZl2S5VVDAr4LLEkfwl3N5cDx6dVD+wNPRMTDWcVkZmYby/KqoQOA9wB3SLotHfdpYCeAiDif5KHjhwNLgdXAiRnGY2ZmVWSWCCLiD0Dda3MieSrOKVnFYGZmjRXuzmJfNmpmtqHCJQIzM9uQE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRVcoROBlHcEZmb5K3QiMDMzJwIzs8JzIjAzKzgnAjOzgsssEUi6UNJySYtrTJ8s6ZeSbpd0p6QTs4rFzMxqy7JGMA84tM70U4C7ImJvYBbwVUmbZBiPrxIyM6sis0QQEfOBVfWKAFtKErBFWnZdVvGYmVl1eZ4jOBd4GfA34A7g1IgYqVZQ0mxJw5KGV6xY0c0YzcwGXp6J4M3AbcB2wEzgXEkvrFYwIuZGxFBEDE2fPr2bMZqZDbw8E8GJwKWRWArcD+yRYzxmZoWUZyL4K/BGAEnbALsD9+UYj5lZIU3IasaSLia5GmiapGXA6cBEgIg4H/g8ME/SHYCAT0bEyqziMTOz6jJLBBFxTIPpfwMOyWr5ZmbWHN9ZbGZWcE4EZmYFV/hE4LuNzazoCp8IzMyKzonAzKzgCpkIIvKOwMysdxQyEZiZ2SgnAuDJJ/OOwMwsP04EwOTJeUdgZpYfJwIzs4JzIjAzKzgnAjOzgnMiMOthUvLnS54tS04EZn1gnH+plqHCbl5r1+YdgVl969ZtOFyqHZh1WmETwYTMnsRg1hkTJ+YdgRVFYROBmZklnAjM+oBPFluWCp0I/OMyMyt4IjDrRz5hbJ3mU6ZmPa5azbU8Gbhma+1yjcCsT+y1V94R2KByIjDrE7ffXn287y+wdjkRmJkVnBOBWQ+qdYRf74541wpsrHyy2KyPTJiw8clhJwBrlxOBWQ/xTt3y4KYhsz7ny0etXU4EZjmYMGHjq3122WXjcs8+29p8XaOwsXDTkFkO1q/feNz992847CN965bMagSSLpS0XNLiOmVmSbpN0p2SbsgqFrNe0Mr1/iMj2cZiVi7LpqF5wKG1JkraCvgW8PaIeDnwzgxjMctVowRQOb3VJh7XHqwdmSWCiJgPrKpT5Fjg0oj4a1p+eVaxmPWSRjv9yieTmWUtz5PFLwWmSLpe0kJJx9cqKGm2pGFJwytWrMgkGJ9ks14xfnx77/e2bK3KMxFMAPYF3gK8GfispJdWKxgRcyNiKCKGpk+f3s0YzZpSav/vlZ1weTy9EpP1rqYSgaRTJb1Qie9KulXSIW0uexnwm4h4OiJWAvOBvducp1lfiKjeru+TxJaHZmsE74uIJ4FDgCnAe4AvtrnsXwAHSpogaXNgP2BJm/M0y107R+BZHb27VmD1NHsfQWkzOhz4QUTcKdXftCRdDMwCpklaBpwOTASIiPMjYomkXwOLgBHgOxFR81JTs0Hgq3usFzWbCBZKuhp4MTBH0pYkO++aIuKYRjONiK8AX2kyBrO+1OzReBZJYtUqmDp1w3EHHwzXX9/cMrfZBpYvb66s9a9mE8FJwEzgvohYLWlr4MTswjKzVkVsnHSmTNm4XCkJNGO5L+ouhLqJQNKrKkbt0qBFyKwvlDbj556DiRM7M6/Kcc0cQT/3XHK56LguXL/nn67V0qhG8NX0/2Ykl3ouIjlfsBcwDLw2u9DMsjch4962Gu18201C3dRscrP+U/c4JCIOjoiDgYeBfdNr+fcF9gEe6kaA1tv68Vr18lg7fST+3HPVlwPd2YmuWwerV8Pf/97c8urdb1CrpmODp9njod0j4o7SQEQslvSyjGKyPlFtx1H0I8a8j/DHj4dJk/KNwfpPs4ngDknfAX6YDh9H0kxkZql2u4bIyshIe7W2aiehbbA0mwhOAP4VODUdng+cl0VA1t+KXCvo1c7iKnfizz0Hm2xSu2y178/JYLA1TASSxgO/Ss8VnJ19SAbJk6k23TTvKMaml5NBkXdmzz6bNF21+xn08vdrY9PwVFlErAdGJE3uQjxG8kPbbLP+2WnV6jfHRvXC57PJJs1vU/2y7VlnNNs09BTJeYJrgKdLIyPiI5lEZc8rHX2V/zB7YafiHcXgGBnZ8Oopf7fF02wiuDT9sx4gJc+87cZNSK0oT1hFaT6ottPsh/b0at9VrXWxwddUIoiI72cdiNVW7QdafoVKrR9rVrWIVndyvbKD6fWdc7e1+h285CVw773ZxGL5aioRSNoN+A9gT5K7jAGIiF0yisvMeszSpU6mg6rZxoXvkVwuug44GLiI0XsKrCCqXYteeVTZq00J3T7HUjqBPjLSu5+JtWbBgrHdj9Hqe3bdNSl/332tLacdzSaCSRFxLaCI+EtEnEHyiMm+t/vueUfQvl4+SqsVWzdjzqppqpl16OXvxlpzwAHdWU6p+W3WrO4sD5o/WfyspHHAnyV9iKSfoS2yC6t77r67f36svXQSstl+bHpNM+dTmik/1nK9rFSL6bWLEIrqwQe7t6xmv/JTgc2Bj5D0Qvpu4L1ZBWWtK69+NtuB2FgN2nN1ezFh5cX3GWRnwYK8I6it2USwKiKeiohlEXFiRLwjIm7MNLKcPfVU/r1qrl07+rq08+2FI0/vBKyafuyJtqTV2OfOba7cxz42+vrkkxuXv/nmDYePOKK55bRL0cSeRdINwA7ALcDvgfnlvZF209DQUAwPD4/pvaUvuXKVq43P6wauVpfbyo+u3fWo9fnVKlcrhmbn0ym1PtN2HyHZazf5dUq99RrrtF7XTOxj6VZ8/PjRg7jx4xv3RzVpEjzzzOjwuHHJPUOdIGlhRAxVm9ZUjSAiDgJeBnwT2Aq4UtKqzoRnvaDe0VCnjvLyvndgrMtvtP79ttNrpHSuoJX16sdaQDeUN6OuX994WyolgXPOGX3/zJnZxVfSVCKQdCBwGvDvJFcLXQGckmFc1iFr1jQu0+kfcb/sGPvl0tdeUtTP6JZbWn9PvWdD//rX9d/74Q+PPlfiji60vTR71dD1wEKSm8quiojn6he3bmmla4Bmun2oLNNOH/aNdLOJqJ2rf8pPwpfK+Ai4f41lu3vNa1qffz1HHJH0Bgtw3nnwwQ9uXGb16u511dLsyeJpwP8meUbxryX9VtLnswvLWlFelX/00c5tOP2+s+v3+PtZr3725SdvX/CC/OIof6TpaafVL9sziSAiHgfuA+4neX7xS4DXZxhX4Y31y586tf70Zm+CqlXL6MRGWTmfXt1pWH2zZyf/X/nKfONoxVe/Ovp69erW39/OhRLVykobN9+Wz6P0etttm5/vWDR7juA+4KvAVJKuJnZPTyBbHyhqu25WnLgSF1yQ/F+8ON842tXo+2z3+7777tbKH3LIxst+5JENaxGd1uw5gl0jYsBuIyqueu2OebV/93K31aW4nAAGzz/+Y/vzaOau9FZqwOUnksvft+mm8PWvw6mnbvyedjV7jmBXSddKWgwgaS9Jn+l8OJaVIl8hk+W6FulzbEa/fR5/+MPG45rtbmSrrTofT+WyTzppw3FZnddoNhFcAMwB1gJExCLgXdmEZN0wblxz9weM5ZryZvXCTqOV9fMjOev70IfyjqDz6v0+nnhibLXESy4ZfX3PPaOvS73Vlrvggg3Hve1trS+vGc0mgs0jouLmZxrcI2etctNDf3AyqO6b39xwuNHVML3md7/rznKOPHL09Utf2tx75syB/faDKVOyianZRLBS0kuAAJB0FMnVQ4XSz32pQGvX0vvotz5/Po197Wt5R1DblVduODwyAgcfPDq8ySa139vonoJf/KLxtlG+/TSzLX3hC3DjjfXjakezJ4tPAeYCe0h6iOQy0uOyCSk/9foi8o++d42MbJyg+zVZ94PK5x33i/32G319+OEbTqtcj7Vr4Q1vqD6fm26qv95vf/vY4stTs/cR3BcR/wRMB/YADgIOrPceSRdKWl46wVyn3KslrUtrGZaxekf7eSS7Tixz/PjRcx6Wv333HX3dqP+qbn5nlT17NnLddc2X7fdadN1EIOmFkuZIOlfSm4DVJM8hWAr8zwbzngcc2mD+44EvAVc3HfEYdXqD6+T88mpu6teN1npbo86Be3m722OPvCPIR6OmoR8AjwF/BE4m6XROwD9HxG313hgR8yXNaDD/DwOXAK9uJthuyfsxit3Uyz/KZlR7AE+/r9Og65UnoFXbTpYsab7vrkHS6CvZJSJOiIhvA8cAewJvbpQEmiFpe+CfSe5UblR2tqRhScMrVqxod9FWIIP+A+4VlScxP/7x5t8rZXvXbLvbwGGH1Z/+hS+0N/9e0CgRPP+MrIhYDyyLiGfqlG/F14FPNnPHckTMjYihiBiaPn16hxZvvaZTtSPv/Luv1JNmyZe/3Nr7N900u9px5fmjRtfil7f3R8BVV9UvP2dO+zHmrVHT0N6SnkxfC5iUDguIiHhhG8seAn6i5BuaBhwuaV1EXNbGPDNTbSN1M0S+au04eqXpwUatWTPav37eLr+8/XmMjAzWdlY3EUTE+KwWHBEvLr2WNA+4oleTQLeVP6vYmuNnBPSuel02dPMcTyfnO2gHgZnlNEkXk5xk3l3SMkknSfqApA9ktcxBEAETmr27YwBFwFNP5R2FNVLqgtoGQ2a7nIg4poWyJ2QVR78YpKOLVpUfGVZWt4v8ufSyb38b5s5tfz7Venbt5NF2eb8+VtsAtXL1HzdldEb5rfrWfccem3cEtR3l21SbUuBGiO4p77rimWeSk2ZPPJFvTP1i0NpiB0knvxc/8yFfrhG0qN2Nv3TlxOTJ7cdSZK0+DMTy1e3nA69Zk+29CYPGNYIGql3Z0MoVKj7CaV27VwA5AfSGap03dsvmm3dvWYPANYIuclIwa85b39pcuccfT35XlTe0WWucCMys6z73ufrTK58XUEvpQS2bbVa/nGuJ9TkRtKAfbnTpV5WP6Cv/TOq19fqz609nntm4TOU20chZZ40tFnMi6AnemY1eHVTts9h0043L2uAb36Bfg8rt4LOfTcZl9RSvQeZEQOOHStSbNtadUulB1e4aoTmSu94oorPPbv093k5a50TQJU8+mTRxPP20r5luVmXy9ZFefxvLU7w++tHm5mvtcSLoki23hIkTfVlbJ3kHMHhWr26unA+iOsuJICN5PX5y0FQeQfozHWyTJo3t2b8+KGiPE0Ed3rjM8jfW5F/qFO/d7+5cLIPKdxabWd+qd4EHwMkndy+WfuYawRi5ucIsW9/4xobDZ52V/Nb+4R/qv++ZTj1Mt0AUfdb+MTQ0FMPDwy2/r3xnXW+Vmy3XqGy1/omsPa18NzYYynvurXbA5e2geZIWRsRQtWmuEbShmY1wLCe+zGxDrnVny4nAzPqSD7A6x4nA+o57mrRttsk7gsHiq4Y6pFFbprXPR4DFU+t5INZZrhF0WKs9JpqZ5c2JoMMa9ZhoZq0prwHss09+cQwyJwIz6xsLF+YdwWDyOYIKnWzjd1umWWf4t5Qt1wja1OgWdzOzXudEUEWrN4F5p29m/cyJwMys4JwIzMwKzonAzKzgnAjMzAous0Qg6UJJyyUtrjH9OEmLJN0haYGkvbOKpdt88tjM+kmWNYJ5wKF1pt8PHBQRrwQ+D8zNMJbMeedvZv0qsxvKImK+pBl1pi8oG7wR2CGrWMzMrLZeOUdwEvCrWhMlzZY0LGl4xYoVXQyrNX4IjZn1o9wTgaSDSRLBJ2uViYi5ETEUEUPTp0/vXnBmZgWQa19DkvYCvgMcFhGP5hmLmVlR5VYjkLQTcCnwnoj477ziMDMrusxqBJIuBmYB0yQtA04HJgJExPnA54CtgW8p6e5zXUQMZRWPmZlVl+VVQ8c0mP5+4P1ZLd/MzJqT+8liMzPLlxOBmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVnBOBmVnBORGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRmJkVXGaJQNKFkpZLWlxjuiSdI2mppEWSXpVVLGZmVluWNYJ5wKF1ph8G7Jb+zQbOyzAWMzOrIbNEEBHzgVV1ihwBXBSJG4GtJG2bVTxmZlZdnucItgceLBtelo7biKTZkoYlDa9YsaIrwZmZFUVfnCyOiLkRMRQRQ9OnTx/jPEb/zMxsVJ6J4CFgx7LhHdJxZmbWRXkmgsuB49Orh/YHnoiIh3OMx8yskCZkNWNJFwOzgGmSlgGnAxMBIuJ84CrgcGApsBo4MatYzMystswSQUQc02B6AKdktXwzM2tOX5wsNjOz7DgRmJkVnBOBmVnBORGYmRWcos/usJK0AvjLGN8+DVjZwXD6gde5GLzOxdDOOu8cEVXvyO27RNAOScMRMZR3HN3kdS4Gr3MxZLXObhoyMys4JwIzs4IrWiKYm3cAOfA6F4PXuRgyWedCnSMwM7ONFa1GYGZmFZwIzMwKrjCJQNKhku6RtFTSp/KOp1MkXShpuaTFZeOmSrpG0p/T/1PS8ZJ0TvoZLJL0qvwiHztJO0q6TtJdku6UdGo6fmDXW9Jmkm6WdHu6zmem418s6aZ03X4qaZN0/Kbp8NJ0+ow84x8rSeMl/UnSFenwQK8vgKQHJN0h6TZJw+m4TLftQiQCSeOB/wscBuwJHCNpz3yj6ph5wKEV4z4FXBsRuwHXpsOQrP9u6d9s4Lwuxdhp64DTImJPYH/glPT7HOT1fhZ4Q0TsDcwEDk2f4/El4OyI2BV4DDgpLX8S8Fg6/uy0XD86FVhSNjzo61tycETMLLtnINttOyIG/g94LfCbsuE5wJy84+rg+s0AFpcN3wNsm77eFrgnff1t4Jhq5fr5D/gF8KairDewOXArsB/JXaYT0vHPb+fAb4DXpq8npOWUd+wtrucO6U7vDcAVgAZ5fcvW+wFgWsW4TLftQtQIgO2BB8uGl6XjBtU2Mfq0t0eAbdLXA/c5pE0A+wA3MeDrnTaT3AYsB64B7gUej4h1aZHy9Xp+ndPpTwBbdzfitn0d+AQwkg5vzWCvb0kAV0taKGl2Oi7TbTuzB9NYb4iIkDSQ1whL2gK4BPi3iHhS0vPTBnG9I2I9MFPSVsDPgT1yDikzkt4KLI+IhZJm5R1Plx0YEQ9JehFwjaS7yydmsW0XpUbwELBj2fAO6bhB9f8kbQuQ/l+ejh+Yz0HSRJIk8KOIuDQdPfDrDRARjwPXkTSNbCWpdEBXvl7Pr3M6fTLwaJdDbccBwNslPQD8hKR56BsM7vo+LyIeSv8vJ0n4ryHjbbsoieAWYLf0ioNNgHcBl+ccU5YuB96bvn4vSRt6afzx6ZUG+wNPlFU3+4aSQ//vAksi4mtlkwZ2vSVNT2sCSJpEck5kCUlCOCotVrnOpc/iKOB3kTYi94OImBMRO0TEDJLf6+8i4jgGdH1LJL1A0pal18AhwGKy3rbzPjHSxRMwhwP/TdKu+u95x9PB9boYeBhYS9I+eBJJ2+i1wJ+B3wJT07IiuXrqXuAOYCjv+Me4zgeStKMuAm5L/w4f5PUG9gL+lK7zYuBz6fhdgJuBpcDPgE3T8Zulw0vT6bvkvQ5trPss4IoirG+6frenf3eW9lVZb9vuYsLMrOCK0jRkZmY1OBGYmRWcE4GZWcE5EZiZFZwTgZlZwTkRWCFJWp/27lj6q9sjraQPSDq+A8t9QNK0dudj1km+fNQKSdJTEbFFDst9gORa75XdXrZZLa4RmJVJj9i/nPYHf7OkXdPxZ0j6WPr6I0qehbBI0k/ScVMlXZaOu1HSXun4rSVdreQZAt8huQGotKx3p8u4TdK3007lxkuaJ2lxGsP/yuFjsIJxIrCimlTRNHR02bQnIuKVwLkkPWBW+hSwT0TsBXwgHXcm8Kd03KeBi9LxpwN/iIiXk/QbsxOApJcBRwMHRMRMYD1wHMmzBraPiFekMXyvg+tsVpV7H7WiWpPugKu5uOz/2VWmLwJ+JOky4LJ03IHAOwAi4ndpTeCFwOuBI9PxV0p6LC3/RmBf4Ja019RJJB2J/RLYRdI3gSuBq8e+imbNcY3AbGNR43XJW0j6d3kVyY58LAdUAr4fyVOoZkbE7hFxRkQ8BuwNXE9S2/jOGOZt1hInArONHV32/4/lEySNA3aMiOuAT5J0d7wF8HuSph3S/vNXRsSTwHzg2HT8YcCUdFbXAkelfc6XzjHsnF5RNC4iLgE+Q5JszDLlpiErqknp075Kfh0RpUtIp0haRPKc4GMq3jce+KGkySRH9edExOOSzgAuTN+3mtEug88ELpZ0J7AA+CtARNwl6TMkT6IaR9J77CnAGuB76ThIHqtqlilfPmpWxpd3WhG5acjMrOBcIzAzKzjXCMzMCs6JwMys4CsvnN8AAAAXSURBVJwIzMwKzonAzKzgnAjMzAru/wNDr7lKv+vF+AAAAABJRU5ErkJggg==\n","text/plain":["<Figure size 432x288 with 1 Axes>"]},"metadata":{"tags":[],"needs_background":"light"}}]},{"cell_type":"markdown","metadata":{"id":"Fdeycz2kA2mo"},"source":["# Visualize Agent Performance"]},{"cell_type":"markdown","metadata":{"id":"EZMEU9vrA2mo"},"source":["BE AWARE THIS CODE BELOW MAY CRASH THE KERNEL IF YOU RUN THE SAME CELL TWICE.\n","\n","Please save your model before running this portion of the code."]},{"cell_type":"code","metadata":{"id":"kXbDiZKTA2mo","executionInfo":{"status":"ok","timestamp":1610848787413,"user_tz":360,"elapsed":703,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}}},"source":["torch.save(agent.policy_net, \"./save_model/breakout_dqn_latest.pth\")"],"execution_count":15,"outputs":[]},{"cell_type":"code","metadata":{"id":"eojCQm1lA2mp","executionInfo":{"status":"ok","timestamp":1610848787417,"user_tz":360,"elapsed":699,"user":{"displayName":"Punit Jha","photoUrl":"","userId":"07885534541681120711"}}},"source":["from gym.wrappers import Monitor\n","import glob\n","import io\n","import base64\n","\n","from IPython.display import HTML\n","from IPython import display as ipythondisplay\n","\n","from pyvirtualdisplay import Display\n","\n","# Displaying the game live\n","def show_state(env, step=0, info=\"\"):\n"," plt.figure(3)\n"," plt.clf()\n"," plt.imshow(env.render(mode='rgb_array'))\n"," plt.title(\"%s | Step: %d %s\" % (\"Agent Playing\",step, info))\n"," plt.axis('off')\n","\n"," ipythondisplay.clear_output(wait=True)\n"," ipythondisplay.display(plt.gcf())\n"," \n","# Recording the game and replaying the game afterwards\n","def show_video():\n"," mp4list = glob.glob('video/*.mp4')\n"," if len(mp4list) > 0:\n"," mp4 = mp4list[0]\n"," video = io.open(mp4, 'r+b').read()\n"," encoded = base64.b64encode(video)\n"," ipythondisplay.display(HTML(data='''<video alt=\"test\" autoplay \n"," loop controls style=\"height: 400px;\">\n"," <source src=\"data:video/mp4;base64,{0}\" type=\"video/mp4\" />\n"," </video>'''.format(encoded.decode('ascii'))))\n"," else: \n"," print(\"Could not find video\")\n"," \n","\n","def wrap_env(env):\n"," env = Monitor(env, './video', force=True)\n"," return env"],"execution_count":16,"outputs":[]},{"cell_type":"code","metadata":{"id":"LBNfm5f2A2mp","outputId":"946e9127-1134-4a26-cb4d-4b39964cad1d"},"source":["display = Display(visible=0, size=(300, 200))\n","display.start()\n","\n","# Load agent\n","# agent.load_policy_net(\"./save_model/breakout_dqn.pth\")\n","agent.epsilon = 0.0 # Set agent to only exploit the best action\n","\n","env = gym.make('BreakoutDeterministic-v4')\n","env = wrap_env(env)\n","\n","done = False\n","score = 0\n","step = 0\n","state = env.reset()\n","next_state = state\n","life = number_lives\n","history = np.zeros([5, 84, 84], dtype=np.uint8)\n","get_init_state(history, state)\n","\n","while not done:\n"," \n"," # Render breakout\n"," env.render()\n","# show_state(env,step) # uncommenting this provides another way to visualize the game\n","\n"," step += 1\n"," frame += 1\n","\n"," # Perform a fire action if ball is no longer on screen\n"," if step > 1 and len(np.unique(next_state[:189] == state[:189])) < 2:\n"," action = 0\n"," else:\n"," action = agent.get_action(np.float32(history[:4, :, :]) / 255.)\n"," state = next_state\n"," \n"," next_state, reward, done, info = env.step(action + 1)\n"," \n"," frame_next_state = get_frame(next_state)\n"," history[4, :, :] = frame_next_state\n"," terminal_state = check_live(life, info['ale.lives'])\n"," \n"," life = info['ale.lives']\n"," r = np.clip(reward, -1, 1) \n"," r = reward\n","\n"," # Store the transition in memory \n"," agent.memory.push(deepcopy(frame_next_state), action, r, terminal_state)\n"," # Start training after random sample generation\n"," score += reward\n"," \n"," history[:4, :, :] = history[1:, :, :]\n","env.close()\n","show_video()\n","display.stop()"],"execution_count":null,"outputs":[{"output_type":"display_data","data":{"text/html":["<video alt=\"test\" autoplay \n"," loop controls style=\"height: 400px;\">\n"," <source src=\"data:video/mp4;base64,AAAAIGZ0eXBpc29tAAACAGlzb21pc28yYXZjMW1wNDEAAAAIZnJlZQAAKa1tZGF0AAACrgYF//+q3EXpvebZSLeWLNgg2SPu73gyNjQgLSBjb3JlIDE1MiByMjg1NCBlOWE1OTAzIC0gSC4yNjQvTVBFRy00IEFWQyBjb2RlYyAtIENvcHlsZWZ0IDIwMDMtMjAxNyAtIGh0dHA6Ly93d3cudmlkZW9sYW4ub3JnL3gyNjQuaHRtbCAtIG9wdGlvbnM6IGNhYmFjPTEgcmVmPTMgZGVibG9jaz0xOjA6MCBhbmFseXNlPTB4MzoweDExMyBtZT1oZXggc3VibWU9NyBwc3k9MSBwc3lfcmQ9MS4wMDowLjAwIG1peGVkX3JlZj0xIG1lX3JhbmdlPTE2IGNocm9tYV9tZT0xIHRyZWxsaXM9MSA4eDhkY3Q9MSBjcW09MCBkZWFkem9uZT0yMSwxMSBmYXN0X3Bza2lwPTEgY2hyb21hX3FwX29mZnNldD0tMiB0aHJlYWRzPTcgbG9va2FoZWFkX3RocmVhZHM9MSBzbGljZWRfdGhyZWFkcz0wIG5yPTAgZGVjaW1hdGU9MSBpbnRlcmxhY2VkPTAgYmx1cmF5X2NvbXBhdD0wIGNvbnN0cmFpbmVkX2ludHJhPTAgYmZyYW1lcz0zIGJfcHlyYW1pZD0yIGJfYWRhcHQ9MSBiX2JpYXM9MCBkaXJlY3Q9MSB3ZWlnaHRiPTEgb3Blbl9nb3A9MCB3ZWlnaHRwPTIga2V5aW50PTI1MCBrZXlpbnRfbWluPTI1IHNjZW5lY3V0PTQwIGludHJhX3JlZnJlc2g9MCByY19sb29rYWhlYWQ9NDAgcmM9Y3JmIG1idHJlZT0xIGNyZj0yMy4wIHFjb21wPTAuNjAgcXBtaW49MCBxcG1heD02OSBxcHN0ZXA9NCBpcF9yYXRpbz0xLjQwIGFxPTE6MS4wMACAAAACB2WIhAA3//728P4FNlYEUGa7Q91nCgDAQZ/NTMClgclA4pYaytdZh+dVJuV432kRnWAs5rNIwVgDq82rBm7oYytwVrbMx0s0deIjDqA1k01uG4ANrMfO1grOJwZga71GmymuARhsR4WZ3pZaGRcKmQ1AKWNgP+g0jF9vZyFT5gpot86JTbMFc0ZcdazcAe48r5fEfvVute9rB6tUebo7LSDmgk0Yk5SccgOC7xqaSGkQEoLeMoQTFC1XD+H1qV9Nq5mXTbCYJFNnVh2EEU8jkDgqZPwQY6bpeHW1QeapzVRURgWt3Cb/fABnsQJUbQBnzSOFXARJd6BPn/ypthsbnmLx1saqciKlnYZZ24AAOUpfp3/kL8nQXNV5hCVSuxUphNhf2FUx9v5ZJA00iJO5WRdWWaZkX+Ci4Zn82b71n0oDij1a25QB7zkO9SOwci8nB51VU+gsTvBFhPUEwCoBDHwIcjmoghed3vaEGILDxhQGSJJY5vdxY5HgzWDyGbi4i0/KYbzIg+m4H0Te2oSePthouInVVvYO0qDi64T1ric4NQOP8jzEU1VOvKYnXKHkJEaUd7tg/aX5bMO+DCTV6zyjsec9BL/DK/zvV536bgL4XyF4n6hJUxR2uxnzWUGNZcByX6P5y2LbYEt5wxsANhaCg6kfI5evGz06kty6JtAHP8fYylLq1QAAADxBmiFsQ3/+p4QDF1N0gFpuh3nrdedDb2ojbBn0Ewkwv+AmDTlh93KHsmVPcyqs7tbnYudIFVTDEt53A5AAAABOQZpEPCGTKYQ3//6nhALt2KIAWh6UfANfKFGYGgFr2spgrCoX1P2ajQJAZyknWYur6FaNLD0PB+0Pxw0J5kOZ8245NZWabw1YPLCkV2/dAAAARkGeYmpTwr8BotMT7ZtzSnwPby80p9HGifAAQo4tnvnJ+nxJcWSK6CCt//I9eo0seomgPuthER3NJcBaTNdF592yVeWduWYAAAAgAZ6DakJ/Ahm2szl121PQRZ3oThaR33DXoGVQkO8d1CcAAABpQZqISahBaJlMCG///qeEAuDJZ0jl7aIixNsJQ+llJvfz0ST/z/gAnWtZr0nWYur6FaNLD0PB+0Pxw0J5kOZ9MOrd7qbNpankMpYAAMT/fx4TKOqEF4EyeZM8HJkL6nHI8nSpaNDpabiHAAAAPkGepkURLCv/Aacf7rYnRhAoaq3QAIUcWz3zk/T4kuLJFdBBXABoVp5oiaEKSv0LlYk0CoBIy0cy1QI6E655AAAAQgGexXRCfwIY0neM/Z1gUhjSiCeMc2J59o1ORWEAcRgBbxzhK3ayhfcCXD3BsfguThzsPtwKOpkjOn9kguaK4XOrDQAAABYBnsdqQn8A6xO25ketnGw9H83//5NgAAAAO0GazEmoQWyZTAhv//6nhACwiGYyjk6wvdPfiOEAJ1rWa9J1mLq+hWjSw9DwftD8cNCeZDmfTDq3fAq6AAAANkGe6kUVLCv/AJM+QABCji2e+cn6fElxZIroIK3/9K8IHtkTQfy5yhcrJBtgBAUjvUK0jmWRsQAAABQBnwl0Qn8AvzuhGat3gPQZk6oKVwAAABEBnwtqQn8AvwmSO2zZ5dNdywAAADxBmxBJqEFsmUwIb//+p4QAiq2mThUHM17p78RwgBOtazXpOsxdYMG0aWHohdmmDQyRBHJRzPph1bvgXNEAAAAzQZ8uRRUsK/8AdFEsACFHFs985P0+JLiyRXQQVv/6V4QPbImg/lzlC5WSDbACApH72+SXAAAAFQGfTXRCfwCW7u6dTCNFEoVytQVTgQAAABIBn09qQn8AlsRiO2zZ4e5oyYAAAABhQZtUSahBbJlMCG///qeEAG5wMxlHgMH4LOZkAWA7H/kPHIBXBYQjSYzyLmelT/qoegIIwcVpCDVNzv8pV+XzPZ+hu/tfVKhfjeo2Ns4Bxfr6Z4PwqJoa0HcdWIUN50AdzAAAADFBn3JFFSwr/wBdfCxABzQ1hP4/v6aP+p2wE7eIMjuKJOaPj+1ocfB4G+PrngUliG4XAAAAHgGfkXRCfwB27H5gANiwl6jPdAfHvVCJDPLf1+23wAAAACIBn5NqQn8AdstXK2DoO3336JkXZQAVnoWNKDfXtvvSX97QAAAAEkGbmEmoQWyZTAhv//6nhAA1IQAAAAtBn7ZFFSwr/wArYAAAAAkBn9V0Qn8AN6EAAAAJAZ/XakJ/ADehAAAAUUGb3EmoQWyZTAhv//6SsVAAOhC//wg9UCTOi8tjgCYm5smLNAu/BtHYwAtPGlgPdtzQCvZuzUpB2HU2zt9z9mQEEDyRxIPS9dAmbDaJLkWu2wAAACZBn/pFFSwr/zlV766TYWAG5ZL7y8cAYVwfsVZNOne58tDpeBFyOQAAAA4Bnhl0Qn9AGeH0PFc7/AAAABgBnhtqQn8BJYq/eB53Xc8EdEyKRvTKh0EAAABTQZoASahBbJlMCG///qeEANf7KkkBEEMAITZl8MccA1+4r82SBfnsPSRMV09WejYAAMT/9/NC9afGefVAWPHYvDtdP5/Q4dfrbayeAiys5KZmydsAAAApQZ4+RRUsK/8AsTSxTEAHFSr6KGzBkRzeYvoFAuMEQlbCOdNVV2dWi04AAABYAZ5ddEJ/AOK7w49VlzHx9prrcWzztgASySrE1tZSUfcsoFrX28Vj3wLzHLWpb4LO7p8DiBqdOIsZhdGQSdhgW9B8oNVxIdTcP5yc5LsCipva37Z8xt690AAAAB4Bnl9qQn8A3QrFfvtAEO1H00NmKUzQFmkDZiBx5c0AAAAnQZpESahBbJlMCG///qeEAKP7wpor65cAG0dR9wOOAa/cV+bJAvNYAAAAH0GeYkUVLCv/AILIsUxABxUq+ihswZEc3mL6BQLi6GEAAAAcAZ6BdEJ/AKyltcAHAgnvzxwMfKlVQPyqvP5X7AAAABsBnoNqQn8AqDNyJ+ZACKaj6aGzFKZoCzSBsZ0AAAApQZqISahBbJlMCG///qeEAHn9lSSAiCGAFudR9wDnwtr9xX7NZZV6AOEAAAAfQZ6mRRUsK/8AZIg2piADipV9FDZgyI5vMX0CgXF/IQAAABwBnsV0Qn8AfvitwAcCCe/PHAx8qVVA/Kq8/lnpAAAAGwGex2pCfwB8QmzzZkAIpqPpobMUpmgLNIGzHAAAABhBmspJqEFsmUwUTDf//qeEAFzwrHVI5GAAAAARAZ7pakJ/AGH/jBwA+NXzZH8AAAA5QZruSeEKUmUwIb/+p4QC4B78AnxMUF9UEevOO2C4SBF76C1r3fLyYKhw3R//3+XqhpJVLV6w9Nx0AAAAEkGfDEU0TCv/AGSdQz+HJE32UAAAAB0Bnyt0Qn8AYhHizABGQph6kFX3z4DARhVypmIVAwAAABsBny1qQn8Afx+K4AHFwveXBVUC4DAJ/skqbtUAAAAlQZsySahBaJlMCG///qeEAKdtLUAINqP/3+XrNUzhE/UYT1E3oQAAAB1Bn1BFESwr/wCG7CbXswAKzY//f5fw/gmJBze4DAAAAB8Bn290Qn8AgwPYwAWHZveaME9jsk6kz8+FCA9wRZbTAAAAHAGfcWpCfwCxW9gq8AIvxveXBVUC4DAKAt/CtckAAAAuQZt2SahBbJlMCG///qeEAt/P63LQaPnx8e4wUZJASXyf/v8vVCLGDzFnTe7DtAAAABxBn5RFFSwr/wC12Cak+UIAaYT/+/y/h6NZnp0MAAAAHQGfs3RCfwCxSHGeAImFMPUgq++fAYCMKuVMxCebAAAAIgGftWpCfwDljZdAD4Nav5OY3u6YtlhOwkEI4y+E5n6SAYAAAAA0QZu6SahBbJlMCG///qeEAt/151dX6b2OVwWXCa5LqEIwTbKIsJbAW//f5e6aoVqS/z/3RwAAAB5Bn9hFFSwr/wDyss2XswAKzY//f5fw/gmJCZmfGoEAAAAgAZ/3dEJ/AO3VlMAFh2b3mjBPY55iC3lAuFCA925/d9IAAAAcAZ/5akJ/AT3bsFXgBF+N7y4KqgXAYBQFv4ViSQAAACZBm/5JqEFsmUwIb//+p4QBrj2BQAtVH///8YCuSKh2c0HtN7RJJAAAABxBnhxFFSwr/wFIsE1J8oQA0wn/9/l/D0azPTZtAAAAHQGeO3RCfwE9tOM8ARMKYepBV98+AwEYVcqZiEyfAAAAGwGePWpCfwGkfiuABxcL3lwVVAuAwCf7JKm2fQAAAD5BmiJJqEFsmUwIb//+GRfpEasFd33y43dASXQ1svKgAkjAABiaOscwfPcs0z8ipGGvi7g9JAbEW0OIBHASwAAAAB1BnkBFFSwr/1elwP2jklfzEADlC/9s/NCGOt1otQAAABgBnn90Qn8BrrPncAIU2b2d8u86mLFtFe4AAAAjAZ5hakJ/X/Th+yQoQAH87PsqUWAq34txd9qV7gJ+qSaLAsEAAAAuQZpmSahBbJlMCG///qeEAbISssDTCz4ANWoXWMddMZ28UmUZiM4+4OVOtaaPpgAAACRBnoRFFSwr/wFIcj6AFojDmZWSStPo4z9sp4CVrEyhUPtTevkAAAAlAZ6jdEJ/AaRFaSAEYCfZUqRjcbDnUY+DqKXVevpD61heiGg4+wAAACgBnqVqQn8BpBRLAIAWHs+ypUjGv8VYaSxaAoJW3Oox8HMmaWkE4laPAAAAL0GaqkmoQWyZTAhv//6nhAEt+Rsz/CMcAA1ahdYx10xnbxSZRmIzhPE0WbVP+/eBAAAAJUGeyEUVLCv/APKBYUxABwaWAFfxhZ05c5e2U8CBy4vxUkchebYAAAAoAZ7ndEJ/AT1prqI0ARIJ9lSpGNxsOdRj4OopYYSmqRLV2HTQHGZ9gAAAACgBnulqQn8BNZRnACPdn2VKkY1/NUsc4LIELYu+4SuEmT2kE4Q5nRKBAAAALkGa7kmoQWyZTAhv//6nhADb2ob3AdAgAhI3UzVSwtnbxSZRmIzj7g5U61pp4cAAAAAkQZ8MRRUsK/8AtbkfQAtEYczKySVp9HGftlPAStYmUKh9qcHYAAAAJQGfK3RCfwDoIbPgA4wf2VKkY3Gw51GPg6il1Xr6Q+tYXohoPAUAAAAoAZ8takJ/AOgEGVNAFb7PsqVIxr/FWGksWgKCVtzqMfBzJmlpBOJZJQAAAC9BmzJJqEFsmUwIb//+p4QAqHvCmf4RjgAGrULrGOumM7eKTKMxGcJ4mizap/5FgQAAACVBn1BFFSwr/wCGyLFMQAcGlgBX8YWdOXOXtlPAgcuL8VJHIXucAAAAJQGfb3RCfwCxItdRGgCJBPsqVIxuNhzqMfB1FLDCU1SJauw7BjAAAAAoAZ9xakJ/AKy4M4AR7s+ypUjGv5qljnBZAhbF33CVwkye0gnCHM7QgQAAADFBm3ZJqEFsmUwIb//+p4QAef2VJICIIYAWUdE2bG2xjDZrg+0Y9s4+4OVQiSruJQVAAAAAJEGflEUVLCv/AGSKo6AFojDmZWSStPo4z9sp4CVrEyhUPtTluAAAACUBn7N0Qn8AfxDZ8AHGD+ypUjG42HOox8HUUuq9fSH1rC9ENCFVAAAAKAGftWpCfwB/AgypoArfZ9lSpGNf4qw0li0BQStudRj4OZM0tIJxLuQAAAAYQZu6SahBbJlMCG///qeEAHaOEtiUbYCRAAAAIEGf2EUVLCv/AGIkxZ8aKyAEH0sAK21Dv/EEk5XbbjKHAAAAIQGf93RCfwBh/LuojQAmmMbmlFgMmi77Ur3aNk+8/hWR0AAAABABn/lqQn8AfF47yHzFf4yxAAAANEGb/kmoQWyZTAhv//6nhACemURABD891M1UsLZ28UmUZiQPJUoyh+aDQk5kOy6b2VkcrtYAAAAnQZ4cRRUsK/8Afxlmrg9qADbeHMysklbY54z9sqCqZqqZu6YzogUxAAAAGwGeO3RCfwB8UKyYAIpqPpobMUsjaiThkWkOxQAAABsBnj1qQn8AqF55wAceST3543WbUqqLgf+puWQAAAA0QZoiSahBbJlMCG///qeEAQwuqIAJ1nupmqlhWQJz1QKj7pYS4pMozDNaYWwsM7qvCeuZ2AAAACdBnkBFFSwr/wDcumyTorIAa3Y5mVkkrbHPGftlQVZHLjC3TGz76bUAAAAbAZ5/dEJ/ANhMqB77QBDtR9NDZilM0BZpA2FMAAAAHQGeYWpCfwEd3C1OmgCJhCfTQ2UYK7q08bLwU/iLAAAANEGaZkmoQWyZTAhv//6nhAFyAFEAEPz3UzVSwtnbxSZRmJA8lSjKH5oNCTmQ0NpvZUhyr8QAAAAnQZ6ERRUsK/8BJthNSfKEANbsczKySVtjnjP2yoKpmqpm7pjOh+3RAAAAGwGeo3RCfwEdsp8IARTUfTQ2YpZG1EnDItIbVwAAABsBnqVqQn8BfLzzgA48knvzxus2pVUXA/9TbV0AAABqQZqqSahBbJlMCG///hFUlKVQCEZ+h/mMItwsxDXzGx9qf1xs58qeY8ySGx7/wcMIRSyKyYfiLcvYLc0cbugGArsPrCsIWe+BbFAWbLtWfTQr6G4B2R4DJpNiFEO8cxiXIp3eIMTvcfPsIQAAACVBnshFFSwr/1b9ge1UGzA+RaAFP/fRMzAKtVnS5li4LEHSwo5AAAAADwGe53RCf2BTT96oS6T3ogAAACUBnulqQn8BhoGPgA49aaC1GMmHQRvZDZUIvduoxUlY5gkTIKCBAAAAKkGa7kmoQWyZTAhv//6nhAF/tpx46OgQAQ+zL4Y44Br9xX5skDA37T3Z+gAAACJBnwxFFSwr/wEujaq9xvYAFeZfeXjglrJYP2Z3Rblke4l1AAAAJQGfK3RCfwGGQVhszIARVa3Z6BJK08veyGyngEZ4Q7xH63es1TEAAAAoAZ8takJ/AYYWJ9sAArfZ9oegSSb0tQ9Oh6eUW+xcvyPBtOQq0DXQ5QAAACtBmzJJqEFsmUwIb//+p4QA4nsqSzxrIAQ+zL4Y44BrUfm80CYoulRBynLrAAAAJ0GfUEUVLCv/AScM41aQbfYluMQAbTV9FDZjw9GsvlVRhsbOZZAbQAAAACUBn290Qn8A7aFZMAEVWt2egSStPL3shsp4EDZP3takj3U59DVoAAAALQGfcWpCfwDtg5tqqjOAImMjdnoEkrRRcvyPB/ytXr7Xf9Zxf+shcfZu7+2eCQAAAHJBm3ZJqEFsmUwIb//+p4QAtfvCmRH1y4ANo6j7gccA1+4r82SB0+J8FmY1HDqQ8yndQKTQteSqg5jE/0UQZUKdXm1bKDgHe0vIIMtv4JmBWZDIrzHCxjw+osqySENa6xdjWiDna7uJ7nv/eqwee+2YtzAAAAArQZ+URRUsK/8BJwzjBdNVmC2GgA2mr6KGzHiAHN5lVRgyf6wEq/bTPP8g6AAAACcBn7N0Qn8AvyCsNmZACKrW7PQJJWnl72Q2U8AjPCHeI+oVwvwuZIEAAABtAZ+1akJ/AL8LE+2AAVvs+0PQJJN6WoenQ9PKLfYuX5Hg2nIVaBsKLrYTeLZ1LP4uHthZX1PaCA0Xyygoxu00e8Z6L5bgDplc9I3+y/yOr1a1yHeBCcYR5V0frKne8tWlupoDXq55c50Dt0LKfAAAAGtBm7hJqEFsmUwUTDf//qeEAG5tQaP8DamAA2q5Luk8kOMDUqQP1xRrBrD5xXxcZGx9EP7mEuxXpdMI9XH7eW2UPkrQ81P788DQN9aP8L98h4bgOCKIPrfQhZXNclJiXl0QuxZQToeXALfSwQAAAG8Bn9dqQn8BfPOfCOiBACKrW7PQJJWnl72Q2U8CBsn72tAviSPcWzapjcmWSt6ennvQ7fwRggT/IBDptWkt2qRiKivQ6HA6gSVPfO8VlAfVEOHBMsRUdxwFICkD+r3uaR8WtGns5EvCmxm5HapfoTkAAAB1QZvcSeEKUmUwIb/+p4QAcbXiWgAKuwAc6TYJ91f3RZPni0TSPKff5jnv5B3D2LguFHYOqyXOC+YtMN3pQ/YDuj6RdIVgX1exNi/NKm6m6I69tMOz7ctmOa8jVtKhvH0EkND4r3dG+SFt5n6DKzO2KzP0Cr1AAAAAGUGf+kU0TCv/ASb1n1Y5wroOAHWHlmym7oEAAAAzAZ4ZdEJ/AHbsIGCBV65oAW26S7PDpZcZS7DtAnn2CG5rp87ximKTs0WN/b002bqxJvUqAAAACQGeG2pCfwA3oQAAAFFBmh9JqEFomUwIZ//+gflvitAFq4f/+Hd9AlVaPT86qYNJ73i5BQXq2Cy5RAIb+AaiIx9Uspy8+je0T6KUJtj/WcjH7R3djQx8fCqfKwQCkIEAAAAwQZ49RREsK/85Ve+7N0IfzuBsG5gBZ62L2RzBxsMp0IyVfUTp5u5TAauQnht3sbr0AAAALwGeXmpCfwENky8AcmGpaBm6+jcQNHyPQC4x2XSwAHjpLWAeaGwec6f77kTqORO8AAAAf0GaQkmoQWyZTAhn//6eEAMe69AAOKV9uTjeH02k92A8En8k3BcY7LqTnXThsy6lhu8GofEImoevC3LR8wmdvGRVGKBYXqchfzFuQbCemj6lqtdba8qw0tTr3WScR/HIaKoerMpDRA39FJeBOf+rXqawFR01bFXOaIIm6yRcIUMAAAAfQZ5gRRUsK/8BJwzljHXP8U2OzGhV7pTkK/8Ifln/eAAAABsBnoFqQn8BDdyW8SLEHPMV8TcwY0uc3689FLEAAAA3QZqDSahBbJlMCGf//p4QAyPCdJehDPQA2uPtycbw+YV85uC4x1uZG3Uq+3TPSFWZTlYA7lNzXAAAAHNBmqZJ4QpSZTAhn/6eEAJ6RLkANyl7jzrvD6bSe7AeCT+SbguMdl1Jzrpw4w8mIAJ2ObfkItPc/qN9KLDCmlLGAMGGx44lKhQa9YLR4qmmvZksf3FdQhtWndSDNqAfGXrgixTEBbpBWelpygiDrGZfoJ+9AAAAMkGexEU0TCv/ASb1oV61tFkyLXgXMSCu8shdbwAX13WE/j9UCShNRpz0fKrt5zEupbl/AAAAGAGe5WpCfwEN3JbxJD+S5Q4su1LkMfgqzwAAAC1BmudJqEFomUwIZ//+nhACffFjG2pAwAs+Ptycbw+YV85uC4x1uZG3Uq+3LOEAAAAoQZsISeEKUmUwIZ/+nhAB5PN9NCglcW2TwAJehXemLqzsm7deu/n9ugAAAHRBmypJ4Q6JlMFNEwz//p4QAfAO6AAcUr7cnG8PptJ7scPIZE7EF2UZbLLzsArr+icmoePIxG0r8XHWMoo/BWztqX1nCuRIsswReb3a0bzGUa2yO3j07WwhT3q/Pa0W3RD7ngoRrCU43P//XcuMRjeGANaSgAAAAFgBn0lqQn8BeJhuD9x4IfTV0niwTJC2ABLVm1xa0O43BSCvc/W5q6CaoXm1Fo5/Lzzi9T80+6UDGxPhGqZJiFn58Pyvr7IOog8HvlrMiuLBW9VaqNLDfsBRAAAAaEGbS0nhDyZTAhv//qeEAH99lSe670LECAJie5fAA61uuGr/tLkOudaWfEy1FNYVP9LB8BIHlth/M/CVuFfsPEULBJ7gGWDROm/SH5XGNV9fYe+chlA//HpiclUdR7SqCKNIt/0wPUHAAAAAYEGbbEnhDyZTAhv//qeEAGRc6UTyAAN2R5+LiJd/yE40JukeDDhsz/7a0erRy41bzsH7gn8J+LYMpp+3Uhyo8j/wf3RgZBlCdfRtece+5YZGwrWiI6vJMNCKhseB1FZVDgAAAB1Bm5BJ4Q8mUwIb//6nhABkXU01ftYrIFaWFmrR6QAAABxBn65FETwr/wEjV/fo9HjKBxph5TI8aVSRH91hAAAAGAGfzXRCfwBsHdCHBdp6LANn6Gb7TaVUYQAAABMBn89qQn8AbATiUeslTGv1NqGgAAAAEkGb1EmoQWiZTAhv//6nhAA1IAAAAA1Bn/JFESwr/wEnDNxtAAAACQGeEXRCfwA3oAAAAAkBnhNqQn8AN6AAAABcQZoYSahBbJlMCG///pK0vcAcU8f/sMPGtlbWjS/UCxMOicPLv+8cH7AAZE+NtzQDBuMR6ytH16LogY1KG03VTwtae1hZDQjqGCC/GaOrAwnoc+4QL6X+k7o96IUAAAA3QZ42RRUsK/85Ve+7N0Y1GyAImMw44NRjX+MblXBv4wNXl4wvg5kzzdLUG8STEx7vu6HNGNDM3wAAACgBnlV0Qn8+7imzVjM0AQhvsqVIxuNhzqMfB06sjZP33/H3yBKe4C6bAAAAKwGeV2pCfwFGZuQYTQBCG+ypUjG42HOox8HTqv//g8wRci89P4VxoTObJ+cAAABbQZpcSahBbJlMCG///qeEAO16jKsI8v0RABEHmFzilrQLJmvmuApbdNa09rCyGhHUMp2ddkyS/HU9fAFHXMEUq4dOg6wMFIpFF2QOPN2lA77upAMBIurDmLTZgAAAADRBnnpFFSwr/wEnDOQTAzaEARMZhxwajGv8Y3KuDfqR2Pz3kfg5kzzdLUG809L1XGrsTjlBAAAAUgGemXRCfwD4bEec4ANpPsqVIxuNhzqMfB06sjZP33/VqAA2N3yHT8WhiI7MDId3zEFWYMNcV6TMNgjwNEdXKSxeM8Z6NdQc0sdP6GePykieMmAAAAAlAZ6bakJ/APME2bdNAEIb7KlSMbjYc6jHwdOq//+DzBFyH4cH4QAAADJBmoBJqEFsmUwIb//+p4QAtfvCW8Hl+iIAIg8wucUtaBZM181wFLbprWntYWQ0I6hkhQAAAC9Bnr5FFSwr/wEnDOEn+gFCAImMw44NRjX+MblXBvslRoC5eML4OZM83S1BvyWOQAAAACMBnt10Qn8Avvl0tnABtJ9lSpGNxsOdRj4OnVkbJ++/6tQU+AAAACMBnt9qQn8AujNyDCaAIQ32VKkY3Gw51GPg6dV//8HmCLkXhwAAADJBmsRJqEFsmUwIb//+p4QAh3yNW8Hl+iIAIg8wucUtaBZM181wFLbprWntYWQ0I6hlHgAAAC5BnuJFFSwr/wEnDN+JDbreAEZGYccGoxr/GNyrg38qOly8YXwcyZ5ulqDeaipBAAAAIwGfAXRCfwCOtNS2cAG0n2VKkY3Gw51GPg6dWRsn77/q1BgYAAAAIwGfA2pCfwCKxXIMJoAhDfZUqRjcbDnUY+Dp1X//weYIuRfrAAAAdkGbCEmoQWyZTAhv//6nhABnfZT8GDRDWQB0riS5Ckl4i7Fdhn/rifXXB1u5e7cfG/P3BslIOTuRfQI26xI4PZ/ngupkH3No2lRAhzR4i68b44JLqenE8fp45Er+UdqUA5uGhsi71BaQyUeGUP0In305cOcZTbcAAAAxQZ8mRRUsK/8BJwzeP6VgoQBExmHHBqMa/xjcq4OECADpmbJ8vGF8HMmebpag9PJMUQAAACMBn0V0Qn8Aa/y6WzgA2k+ypUjG42HOox8HTqyNk/ff9WoOOQAAACABn0dqQn8AaYXKshuYHwFnAA3padCq6/KzDcQUQFP3rAAAABJBm0xJqEFsmUwIb//+p4QANSAAAAANQZ9qRRUsK/8BJwzcbQAAAAkBn4l0Qn8AN6AAAAAJAZ+LakJ/ADegAAAAZEGbj0moQWyZTAhn//6JPfToLoWATXvy/9xh41spLX1rT9AznDS+c4gEvhNtlbL3nbAqq11ci+vH962jdtqEMz3zK0lJ8JnDtef6mFOaptgw9bBzlOZsWP4Qu+wBrga+wp+w4fEAAAAYQZ+tRRUsK/85qsl0sGYqDibykCH38DnRAAAANAGfzmpCfz9F019kXqAIcIyNU+lWd1PeRxguTZHQ0dsysARNY24FyzulUZl5Emr6vYiDTLEAAACFQZvQSahBbJlMCG///qeEANu50oqMyjVlOyUzv8FnlSALBWs1+fI6gpYF1bwwg/swWnt2y4Wxc5erNoEOG7hZWJ3NKnbYuccPHjmXQEA/WxhD+pYY9qs7+gjBTj3Oc702LnWTbXwwK2VErvTGeQ7mAIcTa8mWLL1vTqnxXOkxBREbtVg9DAAAAERBm/NJ4QpSZTAhv/6nhADc/AWchi7Y+na2QAnJEzlJOsxdX2KrMkcPdUp0RCcrQhx5RPSYZE9iH38qgQSLeimAZc2OVgAAAG5BnhFFNEwr/wEm9aMiOwyWZCeX2O3hEJ7fFs4B4AHFnyVnRVUVkX9DPTSP2Dv43cY86x2lH0+807/p1jPh84EFc4MGnk2UPJ1c+Th66h+3Tm/gPCIKQXE09l8yp29EB8SWgVfdHVSnLAWoHfrywQAAABwBnjJqQn8BLdxzWV6SqmtlTjwAlYWHwbkX9cnzAAAARkGaN0moQWiZTAhn//6eEAKv6sEa3wYXkfnjQyAHB5wWfwgb3AyG8qhwibyIglIzXtUYHbGVvP9TCniXq/akplFZI/kjQTAAAABHQZ5VRREsK/8BJwzo7qnWD7gcWRQARWASs6Kq7rodkH48pfb5J0r0UpIdtJ8sUhATQJUvDe2UkvQqlDmtOE9mtgWfTNvSJ6EAAAAdAZ50dEJ/AS1quUgh4OhWOgjlKgET/wnHgZWwC0AAAAA7AZ52akJ/AS3cXMm3AvOiQAsE6eIVwxHrob56Ti3OHSJUWg1e64SEXjjB+tiwglnpJMEht5YVPbdbXOEAAAAkQZp4SahBbJlMCG///qeEAIqOy2rIOFHQ6kg4Yz6c7tD4y8vhAAAAKkGanEnhClJlMCG//qeEAIt9LiXNQPAtbOaudcaUjod9B3SBU35ULkxmYAAAAB5BnrpFNEwr/wEm9aQx3pdQ8ZQR3srRIcM0qy83eHEAAAAaAZ7ZdEJ/AS1qrmfUIeMtv/62MeDzsUPHpOAAAAAaAZ7bakJ/AS3cVJ8ztSIe2A15BYKb2W//FPEAAAA9QZrASahBaJlMCGf//p4QAbH1FdAvU7KBhAQA08gAB6/0chNq7QBsumirAF2Trn3gb2P/kXgrMOiMBIq7cQAAADBBnv5FESwr/wEnDOjuqWb8bwAByzVXpBfFQe1mgi6xi8G9mXFy+pZm/0HQOFnZRCAAAAAiAZ8ddEJ/AS1qqGvBrgAhunmW4iCo+N01CJalG9cO2ht0kAAAACMBnx9qQn8BLdxPRGCu3Z1hCNsjsoAKz0LGlBvr21ZhQ7lKwQAAABJBmwRJqEFsmUwIX//+jLAA0IAAAAAQQZ8iRRUsK/8BJwzo7qkeYQAAAAsBn0F0Qn8BLWqa2AAAAAsBn0NqQn8BLdxD2QAAABJBm0ZJqEFsmUwUTCf//fEAB6UAAAAOAZ9lakJ/AXzzrX2hzYEAAAwfbW9vdgAAAGxtdmhkAAAAAAAAAAAAAAAAAAAD6AAAGeoAAQAAAQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAC0l0cmFrAAAAXHRraGQAAAADAAAAAAAAAAAAAAABAAAAAAAAGeoAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAABAAAAAAKAAAADSAAAAAAAkZWR0cwAAABxlbHN0AAAAAAAAAAEAABnqAAAEAAABAAAAAArBbWRpYQAAACBtZGhkAAAAAAAAAAAAAAAAAAA8AAABjgBVxAAAAAAALWhkbHIAAAAAAAAAAHZpZGUAAAAAAAAAAAAAAABWaWRlb0hhbmRsZXIAAAAKbG1pbmYAAAAUdm1oZAAAAAEAAAAAAAAAAAAAACRkaW5mAAAAHGRyZWYAAAAAAAAAAQAAAAx1cmwgAAAAAQAACixzdGJsAAAAmHN0c2QAAAAAAAAAAQAAAIhhdmMxAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAKAA0gBIAAAASAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGP//AAAAMmF2Y0MBZAAM/+EAGWdkAAys2UKHfiIQAAADABAAAAMDwPFCmWABAAZo6+PLIsAAAAAYc3R0cwAAAAAAAAABAAAAxwAAAgAAAAAUc3RzcwAAAAAAAAABAAAAAQAABgBjdHRzAAAAAAAAAL4AAAACAAAEAAAAAAEAAAgAAAAAAgAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAGAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAGAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAIAAAAAAIAAAIAAAAAAQAACAAAAAACAAACAAAAAAEAAAQAAAAAAQAACAAAAAACAAACAAAAAAIAAAQAAAAAAQAABgAAAAABAAACAAAAAAIAAAQAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAgAAAAAAgAAAgAAAAABAAAEAAAAAAEAAAgAAAAAAgAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAQAAAAAAQAACgAAAAABAAAEAAAAAAEAAAAAAAAAAQAAAgAAAAABAAAKAAAAAAEAAAQAAAAAAQAAAAAAAAABAAACAAAAAAEAAAoAAAAAAQAABAAAAAABAAAAAAAAAAEAAAIAAAAAAQAABgAAAAABAAACAAAAABxzdHNjAAAAAAAAAAEAAAABAAAAxwAAAAEAAAMwc3RzegAAAAAAAAAAAAAAxwAABL0AAABAAAAAUgAAAEoAAAAkAAAAbQAAAEIAAABGAAAAGgAAAD8AAAA6AAAAGAAAABUAAABAAAAANwAAABkAAAAWAAAAZQAAADUAAAAiAAAAJgAAABYAAAAPAAAADQAAAA0AAABVAAAAKgAAABIAAAAcAAAAVwAAAC0AAABcAAAAIgAAACsAAAAjAAAAIAAAAB8AAAAtAAAAIwAAACAAAAAfAAAAHAAAABUAAAA9AAAAFgAAACEAAAAfAAAAKQAAACEAAAAjAAAAIAAAADIAAAAgAAAAIQAAACYAAAA4AAAAIgAAACQAAAAgAAAAKgAAACAAAAAhAAAAHwAAAEIAAAAhAAAAHAAAACcAAAAyAAAAKAAAACkAAAAsAAAAMwAAACkAAAAsAAAALAAAADIAAAAoAAAAKQAAACwAAAAzAAAAKQAAACkAAAAsAAAANQAAACgAAAApAAAALAAAABwAAAAkAAAAJQAAABQAAAA4AAAAKwAAAB8AAAAfAAAAOAAAACsAAAAfAAAAIQAAADgAAAArAAAAHwAAAB8AAABuAAAAKQAAABMAAAApAAAALgAAACYAAAApAAAALAAAAC8AAAArAAAAKQAAADEAAAB2AAAALwAAACsAAABxAAAAbwAAAHMAAAB5AAAAHQAAADcAAAANAAAAVQAAADQAAAAzAAAAgwAAACMAAAAfAAAAOwAAAHcAAAA2AAAAHAAAADEAAAAsAAAAeAAAAFwAAABsAAAAZAAAACEAAAAgAAAAHAAAABcAAAAWAAAAEQAAAA0AAAANAAAAYAAAADsAAAAsAAAALwAAAF8AAAA4AAAAVgAAACkAAAA2AAAAMwAAACcAAAAnAAAANgAAADIAAAAnAAAAJwAAAHoAAAA1AAAAJwAAACQAAAAWAAAAEQAAAA0AAAANAAAAaAAAABwAAAA4AAAAiQAAAEgAAAByAAAAIAAAAEoAAABLAAAAIQAAAD8AAAAoAAAALgAAACIAAAAeAAAAHgAAAEEAAAA0AAAAJgAAACcAAAAWAAAAFAAAAA8AAAAPAAAAFgAAABIAAAAUc3RjbwAAAAAAAAABAAAAMAAAAGJ1ZHRhAAAAWm1ldGEAAAAAAAAAIWhkbHIAAAAAAAAAAG1kaXJhcHBsAAAAAAAAAAAAAAAALWlsc3QAAAAlqXRvbwAAAB1kYXRhAAAAAQAAAABMYXZmNTguMTIuMTAw\" type=\"video/mp4\" />\n"," </video>"],"text/plain":["<IPython.core.display.HTML object>"]},"metadata":{"tags":[]}},{"output_type":"execute_result","data":{"text/plain":["<pyvirtualdisplay.display.Display at 0x7f705e9b28d0>"]},"metadata":{"tags":[]},"execution_count":7}]},{"cell_type":"code","metadata":{"id":"bqF5uRkGA2mq"},"source":[""],"execution_count":null,"outputs":[]}]}