{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# input_img = Input(shape=(1, 28, 28))\n", "\n", "# x = Convolution2D(32, 3, 3, activation='relu', border_mode='same')(input_img)\n", "# x = MaxPooling2D((2, 2), border_mode='same')(x)\n", "# x = Convolution2D(32, 3, 3, activation='relu', border_mode='same')(x)\n", "# encoded = MaxPooling2D((2, 2), border_mode='same')(x)\n", "\n", "# # at this point the representation is (32, 7, 7)\n", "\n", "# x = Convolution2D(32, 3, 3, activation='relu', border_mode='same')(encoded)\n", "# x = UpSampling2D((2, 2))(x)\n", "# x = Convolution2D(32, 3, 3, activation='relu', border_mode='same')(x)\n", "# x = UpSampling2D((2, 2))(x)\n", "# decoded = Convolution2D(1, 3, 3, activation='sigmoid', border_mode='same')(x)\n", "\n", "# autoencoder = Model(input_img, decoded)\n", "# autoencoder.compile(optimizer='adadelta', loss='binary_crossentropy')\n" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "abcdefghijklmnopqrstuvwxyz0123456789\n" ] }, { "data": { "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from PIL import Image, ImageFont, ImageDraw\n", "from tensorflow.keras.utils import Sequence\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", "import random\n", "import copy\n", "import string\n", "characters = string.ascii_lowercase + string.digits # 验证码字符集合数字+英文\n", "print(characters)\n", "width, height, n_len, n_class = 200, 70, 6, len(characters) + 1\n", "fonts=['fonts/ariali.ttf']\n", "\n", "def get_wavy_line(w=(0, 100), h=(30, 50)):\n", " '''产生波浪线坐标'''\n", " import random\n", " n = 50\n", " x = 0\n", " y = random.randint(h[0], h[1])\n", " flag = random.randint(0, 2)\n", " xy = [(x, y)]\n", " while x < w[1]:\n", " temp_y = random.randint(1, 3)\n", " temp_x = random.randint(5, 10)\n", " if flag == 0:\n", " if y + temp_y > h[1]:\n", " y -= temp_y\n", " flag = 1\n", " else:\n", " y += temp_y\n", " else:\n", " if y - temp_y < h[0]:\n", " y += temp_y\n", " flag = 0\n", " else:\n", " y -= temp_y\n", " x = x + temp_x if x + temp_x < w[1] else w[1]\n", " xy.append((x, y))\n", " return xy\n", "\n", "\n", "def gen_captcha(text, fig_size=(200, 70), fonts=['fonts/ariali.ttf'], font_color=[(0, 0, 0)], same_color=1,\n", " font_size=(25, 35), offset_hor=5, offset_ver=5, rotate=0,\n", " font_noise=0, line=(0, 5), line_width=(1, 2), point=(0, 500), wavy=(0, 0),\n", " noise_color=[(200, 200, 255)], bg=[(255, 255, 255)]):\n", " '''\n", " text:验证码文本\n", " size:验证码图片宽高\n", " fonts:字体列表,随机选择一个\n", " font_noise: 字体散点干扰,0不加干扰,1加干扰\n", " fill:字体颜色范围\n", " rotate:字体旋转角度\n", " line:干扰线条数范围\n", " point:干扰点数范围\n", " wavy:波浪线数范围\n", " color:干扰线、点 颜色\n", " bg:背景色范围\n", " '''\n", " bg = random.choice(bg)\n", " img = Image.new(mode='RGB', size=fig_size, color=bg) #\n", " draw = ImageDraw.Draw(im=img, mode='RGB') # im, mode=None\n", " font = ImageFont.truetype(random.choice(fonts), size=random.randint(font_size[0], font_size[1])) # font=None, size=10, index=0, encoding=\"\"\n", " rotate = random.randint(0, rotate)\n", "\n", " def get_char_img(char, font, font_color, rotate, bg, font_noise=0):\n", " '''\n", " 生成单个字符图片,随机颜色加随机旋转\n", "\n", " '''\n", " w, h = draw.textsize(char, font=font)\n", " im = Image.new('RGB', (w, h), color=bg)\n", " ImageDraw.Draw(im).text((0, 0), char, font=font, fill=font_color)\n", " if rotate:\n", " im = im.rotate(random.randint(-rotate, rotate), Image.BILINEAR, expand=1)\n", " im = im.crop(im.getbbox())\n", " if font_noise:\n", " im_draw = ImageDraw.Draw(im)\n", " # for i in range(random.randint(10,100)):\n", " for i in range(random.randint(int(w * h * 0.01), int(w * h * 0.05))):\n", " im_draw.point(xy=(random.randint(0, w), random.randint(0, h)), fill=bg)\n", " table = []\n", " for i in range(256):\n", " table.append(i * 97) # 5.97\n", " mask = im.convert('L').point(table)\n", " return (im, mask)\n", "\n", " char_color = random.choice(font_color)\n", " if same_color:\n", " char_imgs = [get_char_img(char, font, font_color=char_color, rotate=rotate, bg=bg, font_noise=font_noise) for\n", " char in text]\n", " else:\n", " char_imgs = [\n", " get_char_img(char, font, font_color=random.choice(font_color), rotate=rotate, bg=bg, font_noise=font_noise)\n", " for char in text]\n", " ws = [img[0].size[0] for img in char_imgs]\n", " hs = [img[0].size[1] for img in char_imgs]\n", " w = max(sum(ws), fig_size[0])\n", " h = max(max(hs), fig_size[1])\n", " if w > fig_size[0] or h > fig_size[1]:\n", " img = img.resize((w + 6, h + 6), Image.BILINEAR)\n", " draw = ImageDraw.Draw(im=img, mode='RGB') # im, mode=None\n", " fig_size = img.size\n", "\n", " if rotate:\n", " temp_x = random.randint(int((fig_size[0] - sum(ws)) / 5), int((fig_size[0] - sum(ws)) / 2 + 1))\n", " temp_y = random.randint(int((fig_size[1] - hs[0]) / 8), int((fig_size[1] - hs[0]) / 2 + 1))\n", " for i in range(len(char_imgs)):\n", " img.paste(char_imgs[i][0], box=(temp_x, temp_y), mask=char_imgs[i][1])\n", " new_x = temp_x + ws[i] + random.randint(0, offset_hor)\n", " temp_x = new_x if new_x < fig_size[0] else temp_x + ws[i] + random.randint(-offset_hor, 0)\n", " new_y = temp_y + random.randint(-offset_ver, offset_ver)\n", " temp_y = new_y if 0 < new_y and new_y + hs[i] < fig_size[1] else temp_y\n", "\n", " if not rotate:\n", " temp_x = random.randint(int((fig_size[0] - sum(ws)) / 5), int((fig_size[0] - sum(ws)) / 2 + 1))\n", " temp_y = random.randint(int((fig_size[1] - hs[0]) / 8), int((fig_size[1] - hs[0]) / 2 + 1))\n", " for i in range(len(char_imgs)):\n", " draw.text((temp_x, temp_y), text[i], font=font, fill=char_color)\n", " new_x = temp_x + ws[i] + random.randint(0, offset_hor)\n", " temp_x = new_x if new_x < fig_size[0] else temp_x + ws[i] + random.randint(-offset_hor, 0)\n", " new_y = temp_y + random.randint(-offset_ver, offset_ver)\n", " temp_y = new_y if 0 < new_y and new_y + hs[i] < fig_size[1] else temp_y\n", "\n", " img_clean = copy.deepcopy(img)\n", "\n", " # 直线\n", " for i in range(random.randint(line[0], line[1])):\n", " x0 = random.randint(0, fig_size[0])\n", " x1 = random.randint(0, fig_size[0])\n", " y0 = random.randint(0, fig_size[1])\n", " y1 = random.randint(0, fig_size[1])\n", " draw.line(xy=((x0, y0), (x1, y1)),\n", " fill=random.choice(noise_color),\n", " width=random.choice(line_width)) # xy, fill=None, width=0\n", "\n", " # 散点\n", " for i in range(random.randint(point[0], point[1])):\n", " draw.point(xy=(random.randint(0, fig_size[0]), random.randint(0, fig_size[1])),\n", " fill=random.choice(noise_color))\n", " # 波浪线\n", " for _ in range(random.randint(wavy[0], wavy[1])):\n", " draw.line(xy=get_wavy_line(w=(0, 200), h=(min(hs) - 5, max(hs) + 5)),\n", " fill=char_color, width=random.choice(line_width))\n", "\n", " return img.resize((240,80), Image.BILINEAR), img_clean.resize((240,80), Image.BILINEAR)\n", "gen_characters = random.choice(\n", " [string.ascii_lowercase, string.ascii_lowercase + string.digits, string.ascii_uppercase,\n", " string.digits + string.ascii_uppercase])\n", "batch_label_length = random.choice([4, 5])\n", "\n", "\n", "random_str = ''.join([random.choice(gen_characters) for j in range(batch_label_length)])\n", "font_color = [(random.randint(210, 255), random.randint(0, 40), 0),\n", " (0, random.randint(200, 255), random.randint(0, 90)),\n", " (random.randint(30, 45), random.randint(90, 110), 255),\n", " (0, random.randint(0, 5), random.randint(0, 5))]\n", "noise_color = [(random.randint(210, 255), random.randint(0, 40), 0),\n", " (0, random.randint(200, 255), random.randint(0, 90)),\n", " (random.randint(30, 45), random.randint(90, 110), 255),\n", " (255, random.randint(210, 255), random.randint(0, 10)),\n", " (0, random.randint(0, 5), random.randint(0, 5))]\n", "image, image2 = gen_captcha(random_str, fig_size=(52, 21), fonts=fonts, font_color=font_color, same_color=1,\n", " font_size=(15, 18), offset_hor=1, offset_ver=0, rotate=0,\n", " font_noise=1, line=(0, 0), line_width=(1, 2, 1), point=(20, 100), wavy=(0, 0),\n", " noise_color=noise_color,\n", " bg=[(random.randint(220, 255), random.randint(220, 255), random.randint(220, 255))])\n", "im = [image, image2]\n", "plt.figure(figsize=(50,20))\n", "for i in range(1,3): \n", " plt.subplot(2,2,i)\n", " plt.imshow(im[i-1])\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "\n", "class CaptchaSequence(Sequence):\n", " '''\n", " 继承Sequence的数据生成类,方便调用多CPU,加快生成训练及测试数据\n", " 参数:self.characters:验证码字符集合,self.batch_size:每批次样本数,self.steps:生成多少批数据,self.n_len:验证码长度,\n", " self.width:图片宽度,self.height:图片高度,self.input_length:lstm time step长度,self.label_length:标签长度\n", " 返回:array类型训练或测试数据\n", "\n", " '''\n", "\n", " def __init__(self, characters, batch_size, steps, n_len=6, width=200, height=70,\n", " input_length=12, label_length=6,\n", " chars_len=(5, 5)): # width=128, height=64, input_length=16, label_length=4\n", " self.characters = characters\n", " self.batch_size = batch_size\n", " self.steps = steps\n", " self.n_len = n_len\n", " self.width = width\n", " self.height = height\n", " self.input_length = input_length\n", " self.label_length = label_length\n", " self.chars_len = chars_len\n", " # self.label_length = self.n_len\n", " self.n_class = len(characters) + 1\n", " # self.n_class = -2\n", "\n", "\n", " def __len__(self):\n", " return self.steps\n", "\n", " def __getitem__(self, idx):\n", " batch_label_length = random.choice([4, 5])\n", " self.n_len = n_len\n", " # print('batch_label_length',batch_label_length)\n", " raw = np.zeros((self.batch_size, self.height, self.width, 1), dtype=np.float32)\n", " noisy = np.zeros((self.batch_size, self.height, self.width, 1), dtype=np.float32)\n", " y = np.zeros((self.batch_size, self.n_len), dtype=np.uint8)\n", " # print(y)\n", " # y = np.zeros((self.batch_size, batch_label_length), dtype=np.uint8)\n", " input_length = np.ones(self.batch_size) * self.input_length\n", " label_length = np.ones(self.batch_size) * self.n_len\n", "\n", " for i in range(self.batch_size):\n", " # print('len 4',y.shape, i)\n", " # 定义验证码字符集 (大写字母、小写字母、大写字母+数字)\n", " gen_characters = random.choice(\n", " [string.ascii_lowercase, string.ascii_lowercase + string.digits, string.ascii_uppercase,\n", " string.digits + string.ascii_uppercase])\n", " if i % 10 <= 3:\n", " random_str = ''.join([random.choice(gen_characters) for j in range(batch_label_length)])\n", " font_color = [(random.randint(210, 255), random.randint(0, 40), 0),\n", " (0, random.randint(200, 255), random.randint(0, 90)),\n", " (random.randint(30, 45), random.randint(90, 110), 255),\n", " # (255,random.randint(210,255),random.randint(0,10)),\n", " (0, random.randint(0, 5), random.randint(0, 5))]\n", " image, image2 = gen_captcha(random_str, fig_size=(200, 70), fonts=fonts, font_color=font_color,\n", " same_color=1, font_size=(35, 45), offset_hor=8, offset_ver=5, rotate=5,\n", " font_noise=1, line=(0, 0), line_width=(1, 3), point=(0, 0), wavy=(1, 1),\n", " noise_color=[(200, 200, 255)], bg=[(255, 255, 255)])\n", " # image = gen_captcha(random_str, size=(200,70), fonts=font,fill=(0,201),font_size=(30, 45), font_noise=1, rotate=(0,0),\n", " # line=(0,0), point=(0,0),wavy=(1,1), color=(0,255), bg=255) # 产生波浪线干扰验证码\n", " elif i % 10 <= 5:\n", " random_str = ''.join([random.choice(gen_characters) for j in range(batch_label_length)])\n", " font_color = [(random.randint(200, 255), random.randint(0, 40), random.randint(30, 100)),\n", " (random.randint(150, 200), random.randint(150, 200), random.randint(150, 200)),\n", " (random.randint(150, 200), random.randint(150, 250), random.randint(50, 100))]\n", " noise_color = [(random.randint(50, 100), random.randint(150, 250), random.randint(50, 100)),\n", " (random.randint(200, 250), random.randint(200, 250), random.randint(50, 100))]\n", "\n", " image, image2 = gen_captcha(random_str, fig_size=(100, 25), fonts=fonts, font_color=font_color, same_color=1,\n", " font_size=(15, 20), offset_hor=8, offset_ver=5, rotate=10,\n", " font_noise=0, line=(0, 0), line_width=(1, 2), point=(30, 100), wavy=(0, 0),\n", " noise_color=noise_color,\n", " bg=[(random.randint(20, 100), random.randint(10, 100), 255)])\n", " # image = gen_captcha(random_str, size=(100,25), fonts=fonts,fill=(150,200),font_size=(15, 20), font_noise=0, rotate=(0,0),\n", " # line=(0,0), point=(50,150),wavy=(0,0), color=(150,200), bg=57) # 深色背景\n", " elif i % 10 <= 7:\n", " random_str = ''.join([random.choice(gen_characters) for j in range(batch_label_length)])\n", " font_color = [(random.randint(50, 100), random.randint(10, 50), random.randint(100, 250)),\n", " (random.randint(0, 50), random.randint(0, 50), random.randint(120, 250))]\n", " noise_color = [(random.randint(200, 225), random.randint(0, 50), random.randint(20, 100)),\n", " (random.randint(50, 120), random.randint(150, 250), random.randint(50, 80))]\n", " # font_color = [(random.randint(10,50),random.randint(10,50),random.randint(100,160)),\n", " # (random.randint(30,100),random.randint(30,50),random.randint(120,250))]\n", " # noise_color = [(random.randint(180,255),random.randint(10,50),random.randint(20,40)),\n", " # (random.randint(10,20),random.randint(150,250),random.randint(0,40))]\n", " image, image2 = gen_captcha(random_str, fig_size=(135, 40), fonts=fonts, font_color=font_color, same_color=0,\n", " font_size=(20, 28), offset_hor=8, offset_ver=5, rotate=10,\n", " font_noise=0, line=(2, 8), line_width=(1, 2, 1, 3, 4), point=(20, 200), wavy=(0, 0),\n", " noise_color=noise_color,\n", " bg=[(random.randint(220, 255), random.randint(220, 255), random.randint(220, 255))])\n", " # image = gen_captcha(random_str, size=(135,40), fonts=fonts,fill=(20,60),font_size=(20, 28), font_noise=0, rotate=(-6,6),\n", " # line=(2,5),line_width=(1,3), point=(50,150),wavy=(0,0), color=(80,170), bg=255) #深色线斜体字\n", " elif i % 10 <= 8:\n", " random_str = ''.join([random.choice(gen_characters) for j in range(batch_label_length)])\n", " font_color = [(random.randint(60, 90), random.randint(60, 90), random.randint(60, 90)),\n", " (random.randint(90, 120), random.randint(90, 120), random.randint(90, 120)),\n", " (random.randint(120, 150), random.randint(120, 150), random.randint(120, 150))]\n", " noise_color = [(random.randint(60, 80), random.randint(60, 90), random.randint(60, 80)),\n", " (random.randint(90, 110), random.randint(90, 110), random.randint(90, 120)),\n", " (random.randint(120, 140), random.randint(120, 140), random.randint(120, 140))]\n", " image, image2 = gen_captcha(random_str, fig_size=(70, 26), fonts=fonts, font_color=font_color, same_color=0,\n", " font_size=(20, 24), offset_hor=1, offset_ver=1, rotate=0,\n", " font_noise=1, line=(4, 8), line_width=(1, 1), point=(0, 0), wavy=(0, 0),\n", " noise_color=noise_color,\n", " bg=[(random.randint(220, 255), random.randint(220, 255), random.randint(220, 255))])\n", " else:\n", " random_str = ''.join([random.choice(gen_characters) for j in range(batch_label_length)])\n", " font_color = [(random.randint(210, 255), random.randint(0, 40), 0),\n", " (0, random.randint(200, 255), random.randint(0, 90)),\n", " (random.randint(30, 45), random.randint(90, 110), 255),\n", " (0, random.randint(0, 5), random.randint(0, 5))]\n", " noise_color = [(random.randint(210, 255), random.randint(0, 40), 0),\n", " (0, random.randint(200, 255), random.randint(0, 90)),\n", " (random.randint(30, 45), random.randint(90, 110), 255),\n", " (255, random.randint(210, 255), random.randint(0, 10)),\n", " (0, random.randint(0, 5), random.randint(0, 5))]\n", " image, image2 = gen_captcha(random_str, fig_size=(52, 21), fonts=fonts, font_color=font_color, same_color=1,\n", " font_size=(15, 18), offset_hor=1, offset_ver=0, rotate=0,\n", " font_noise=1, line=(0, 0), line_width=(1, 2, 1), point=(20, 100), wavy=(0, 0),\n", " noise_color=noise_color,\n", " bg=[(random.randint(220, 255), random.randint(220, 255), random.randint(220, 255))])\n", " # X[i] = np.expand_dims(np.array(image)/255.0, axis=-1)\n", " image = image.convert('L')\n", " image2 = image2.convert('L')\n", " noisy[i] = np.reshape(np.array(image) / 255.0, (80, 240, 1))\n", " raw[i] = np.reshape(np.array(image2) / 255.0, (80, 240, 1))\n", " \n", "# noisy[i] = np.array(image) / 255.0\n", "# raw[i] = np.array(image2) / 255.0\n", " label = [self.characters.find(x) for x in random_str.lower()] # 全部标签转为小写\n", " if len(random_str) < self.n_len:\n", " label += [self.n_class] * (self.n_len - len(random_str))\n", " y[i] = label\n", " # return [X, y, input_length, label_length], np.ones(self.batch_size)\n", " return noisy, raw\n" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "data = CaptchaSequence(characters, batch_size=10, steps=2, width=240, height=80) " ] }, { "cell_type": "code", "execution_count": 68, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "(200, 70) P\n", "(240, 80)\n", "(1, 80, 240, 1)\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAACwsAAAHJCAYAAABw2kRpAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzs3XFsXed1GPDz2RQdB9JEV1FUIq4YGQ4mW4CgIkbKtP8EbQLVWzwZdWHEGNIMCiAU9IBS3bqmAlJjTWQ0ETBzBcoN8SpETRa5nhPATdEgsI22QWurq1R7WiR5s6dGng3aZjVTI1HVz87u/gg3WH6X0iXvffeSH38/ILB4+O45B5f3PV2eHF+noigCAAAAAAAAAAAAAMjPdV03AAAAAAAAAAAAAAAMhmVhAAAAAAAAAAAAAMiUZWEAAAAAAAAAAAAAyJRlYQAAAAAAAAAAAADIlGVhAAAAAAAAAAAAAMiUZWEAAAAAAAAAAAAAyJRlYQAAAAAAAAAAAADIlGVhAAAAAAAAAAAAAMhUrWXhlNLPp5T+W0rpxZTS55pqCgAAAAAAqM8cHwAAAABIRVGs7MCUro+I/x4Rn4iIlyPiryLivqIozi51zKZNm4r3ve99K6q3Wm3YsKHxnG+99VbjOds0iHNSxnmqZq2cp7bOR8TaOSfL4Xq6kuupGuepmkGdp7V8Tsq4nqpz/1iN81SN83Qln0X1uJ6u5Hqqxnnql9s5+du//duYn59PAy8EDIQ5/o+4z+lnnliN83Sl3O5z2uZ6upLrqRrnqRpz/GpcT9W5f6zGearGebqSz6J6XE9Xcj1V4zz1y+2cVJ3jD9Wo8ZGIeLEoivMRESmlRyJiX0QsOWR83/veF7/1W79Vo+Tq8/73v7/xnK+//nrjOds0iHNSxnmqZq2cp7bOR8TaOSfL4Xq6kuupGuepmkGdp7V8Tsq4nqpz/1iN81SN83Qln0X1uJ6u5Hqqxnnql9s5+c3f/M2B1wAGyhw/3OeUMU+sxnm6Um73OW1zPV3J9VSN81SNOX41rqfq3D9W4zxV4zxdyWdRPa6nK7meqnGe+uV2TqrO8a+rUeMDEfE/3/H1y4sxAAAAAACge+b4AAAAAECtZeFKUkoHUkonU0on5+fnB10OAAAAAABYBnN8AAAAAMhbnWXhVyLiJ97x9c2LsSsURfGVoijuKIrijk2bNtUoBwAAAAAALIM5PgAAAABQa1n4ryLiQymlHSml4Yj4VET8YTNtAQAAAAAANZnjAwAAAAAxtNIDi6J4O6X0zyPiuxFxfUQcLYriTGOdAQAAAAAAK2aODwAAAABE1FgWjogoiuKPI+KPG+oFAAAAAABokDk+AAAAAHBd1w0AAAAAAAAAAAAAAINhWRgAAAAAAAAAAAAAMmVZGAAAAAAAAAAAAAAyNdR1AwAAAAAAAE14//vf31md119/vZXay9HW+VhOfeepWv2uz5NzsnLOXT/npBrnqZo2z9NaOSdlXE/VuJ6qcZ6qcZ76+SxaOddTv66vp7Wi6/O0Vq6n9cqThQEAAAAAAAAAAAAgU5aFAQAAAAAAAAAAACBTloUBAAAAAAAAAAAAIFOWhQEAAAAAAAAAAAAgU0NdNzAIn/70p7tuAQAAAKCSiYmJxnNOT083njNHk5OTjeecmppqPOdSvva1r7VWC6At5vsAAABAl7773e9Wet0zzzxTOedHP/rRSq9b73ORqjPvU6dOVc754Q9/uNLr9u7dWzlnmarXTZc8WRgAAAAAAAAAAAAAMmVZGAAAAAAAAAAAAAAyZVkYAAAAAAAAAAAAADJlWRgAAAAAAAAAAAAAMjXUdQMAAAAAAMD68ulPf7rrFgAAAAD67N27ty82MTFRK+czzzzTF5uenq6VM0dl86LJyclaOU+dOtUXm5qaqpWzTNl1ExHxta99rfFaK+XJwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkCnLwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkCnLwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkCnLwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkCnLwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkKmhrhsAAAAAAAAAAAAAWI2mp6f7YhMTE7WOp5qpqam+2OTkZK3j1ytPFgYAAAAAAAAAAACATFkWBgAAAAAAAAAAAIBMWRYGAAAAAAAAAAAAgExZFgYAAAAAAAAAAACATA113QCsJhMTE63Vmp6ebq0WrDfeywAArCXuKbszNTXVdQsAQAPqzgPdj8Hq4f0MAMBa4d6zO2b7K+PJwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkCnLwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkCnLwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkKlrLgunlI6mlF5PKX3/HbEfSyk9kVJ6YfGfNw22TQAAAAAAoIw5PgAAAABwNVWeLPzViPj5d8U+FxFPFUXxoYh4avFrAAAAAACgfV8Nc3wAAAAAYAnXXBYuiuJ7EfG/3hXeFxHHFv98LCLubrgvAAAAAACgAnN8AAAAAOBqqjxZuMy2oihmFv/8akRsa6gfAAAAAACgPnN8AAAAACAiVr4s/P8VRVFERLHU91NKB1JKJ1NKJ+fn5+uWAwAAAAAAlsEcHwAAAADWt5UuC7+WUhqNiFj85+tLvbAoiq8URXFHURR3bNq0aYXlAAAAAACAZTDHBwAAAAAiImJohcf9YUR8JiJ+e/GfjzfW0RozOTnZeM6pqanGc1LN9PR01y0ADfBeBgAAADDHv5rlzPbN7Fc/80DIh/czAADAYFzzycIppeMR8UxE/MOU0ssppc/Gj4aLn0gpvRARH1/8GgAAAAAAaJk5PgAAAABwNdd8snBRFPct8a2fa7gXAAAAAABgmczxAQAAAICrueaThQEAAAAAAAAAAACAtcmyMAAAAAAAAAAAAABkaqjrBgAAAAAAAAZlcnKy8eOnpqZq5QQAAACANnmyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkaqjrBgAAAAAAAAZlamqqLzY5OVnreAAAAABYSzxZGAAAAAAAAAAAAAAyZVkYAAAAAAAAAAAAADJlWRgAAAAAAAAAAAAAMmVZGAAAAAAAAAAAAAAyNdR1A2vd1NRU1y0AADAAExMTA8k7PT09kLy5WVhYaDzn4cOHG89ZZt++fa3UiYgYHx9vrRYAAOTEbB8AIF9V5/vm9c1bjbP9tmb25vUArHaeLAwAAAAAAAAAAAAAmbIsDAAAAAAAAAAAAACZsiwMAAAAAAAAAAAAAJmyLAwAAAAAAAAAAAAAmbIsDAAAAAAAAAAAAACZGuq6AQAAAAAAAAAAWI6JiYmBHD89PV0r73qxsLDQeM7Dhw83nvPxxx/vi+3bt6/xOidOnCiNj4+PN14LAFbCk4UBAAAAAAAAAAAAIFOWhQEAAAAAAAAAAAAgU5aFAQAAAAAAAAAAACBTloUBAAAAAAAAAAAAIFOWhQEAAAAAAAAAAAAgU5aFAQAAAAAAAAAAACBTloUBAAAAAAAAAAAAIFOWhQEAAAAAAAAAAAAgU5aFAQAAAAAAAAAAACBTQ103AAAAAAAAAAAAyzE9PV0an5iYqHU8rMTo6GjXLQDAVXmyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkaqjrBmC96vV6rdR54IEHGs85MjLSeM4LFy40nnMpGzdubKXOoUOHWqkTMZifCcB6Nz093XULNOzy5cut1BkdHW2lDv0WFhZaq3XkyJFW6szOzrZSJyJibGys8ZwXL15sPGdb99nusQEAKLMaZ/td37vWne/nNrPv+ucBgPl+burO9m+88cZKrzPb71Zb8/21MtsfxLx+OZYz23efDWuHJwsDAAAAAAAAAAAAQKYsCwMAAAAAAAAAAABApiwLAwAAAAAAAAAAAECmLAsDAAAAAAAAAAAAQKauuSycUvqJlNKfpJTOppTOpJR+ZTH+YymlJ1JKLyz+86bBtwsAAAAAALyTOT4AAAAAcDVDFV7zdkT8i6Io/jqltCkiTqWUnoiIfxYRTxVF8dsppc9FxOci4tcH1yoAAAAAAFDCHB8GoNfrtVLngQceqHX83NxcX2xkZKRWzjIXLlxoPGdExMLCQl9s48aNjdd58MEH+2KHDh1qvE7ZzyNiMD8TAMjN5cuXW8k5OjraeB2qKbv3G5QjR460Umd2drbxnEvde4+NjTVe6+LFi7WO7/I+2z02LM81nyxcFMVMURR/vfjn+Yg4FxEfiIh9EXFs8WXHIuLuQTUJAAAAAACUM8cHAAAAAK7mmsvC75RS+mBE/GRE/GVEbCuKYmbxW69GxLYljjmQUjqZUjo5Pz9fo1UAAAAAAOBqzPEBAAAAgHervCycUtoYEd+MiMmiKP73O79XFEUREUXZcUVRfKUoijuKorhj06ZNtZoFAAAAAADKmeMDAAAAAGUqLQunlDbEjwaM/7Eoim8thl9LKY0ufn80Il4fTIsAAAAAAMDVmOMDAAAAAEu55rJwSilFxO9FxLmiKP7NO771hxHxmcU/fyYiHm++PQAAAAAA4GrM8QEAAACAqxmq8JqfiYhPR8R/TSk9txg7FBG/HRGPppQ+GxEXIuLewbQIAAAAAABchTk+AAAAALCkay4LF0Xx5xGRlvj2zzXbDgAAAAAAsBzm+AAAAADA1VzXdQMAAAAAAAAAAAAAwGBYFgYAAAAAAAAAAACATA113QAAAAAAAAB0pdfrtVbrq1/9al9sbGysL3bhwoXKOUdGRuq0VFlZnxHL67XMxo0bax1fxw9+8IPS+Ac/+MHGa83NzfXF2vrZAcBasXnz5r7YpUuXGs85MzPTFxsdHa1Vh34LCwut1Pn+97/fSp2lbN26tS82OztbK+dS996DsGXLlr7YxYsXa+X8zne+0xe78847a+UsU3aPHeE+G5biycIAAAAAAAAAAAAAkCnLwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkCnLwgAAAAAAAAAAAACQqaGuG4DVpNfrtVbr6NGjrdTZsWNH4zkPHDjQeM42Pffcc63UefDBB1upExFx6NChVuqMjIy0UgcABmHz5s2t1JmZmWmlTkTE2NhYa7XqOHPmTCt1Hn744VbqRER86lOfaqXO+Ph4K3UG5fjx443nbOs+u6177Aj32QAAda312f511/U/22cQs/2I1Tnfb2tmX+Yb3/hG4zmX87vE3NxcrVp+lwBgPVgrs/3R0dGGOlk92prtR9Sb7y91jdx5550rzlnXapztL2dev3379lq1nn322Uqx5XCfDfV5sjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZGqo6wYAAAAAAACgil6v11qto0ePtlaraQcOHOi6hcr27NnTF3vuuec66KQZv//7v18a/6Vf+qXGa83NzfXFRkZGGq8DAFzbzMxMaXxsbKzlTlbmzJkzrdT5i7/4i1bqtGl8fLzrFiq57777SuPHjx9vuZOVefTRR/ti995770Bquc8mV54sDAAAAAAAAAAAAACZsiwMAAAAAAAAAAAAAJmyLAwAAAAAAAAAAAAAmbIsDAAAAAAAAAAAAACZGuq6AQAAAAAAAOjK2bNnu26hlnvvvbfrFhq3Z8+evthzzz3XeJ2bb765NP7yyy83XuvRRx/ti+X4swOAKmZmZrpuYV3r9Xqt1Dl9+nQrdYaGytffyurv3r170O2sOffcc09f7Jvf/GatnLt27eqLnTlzplbOMo8//nhpfN++fY3Xghx4sjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZGqo6wZgNTl79mxrtc6fP99KnUOHDrVSZy3Zs2dPK3W+973vtVInIuLRRx9tpc6BAwdaqQMAMzMzXbewbvV6vcZznj59uvGcZW6//fZW6kREPPHEE63UGR8fb6XOoNxzzz2N5xzENVqmrXvsCPfZAAB1raXZ/i233FLpdffee2+tOjmqO9t/7rnnKr/25ptvrlWrqrq/d7hOAFhtzPa7s5Zn+xGDme+/9tprfbG6s/1PfOITtY5fK5Yz2//mN79Z6XW7du1aaTtLevPNN0vj7rOhnCcLAwAAAAAAAAAAAECmLAsDAAAAAAAAAAAAQKYsCwMAAAAAAAAAAABApiwLAwAAAAAAAAAAAECmLAsDAAAAAAAAAAAAQKaGum4AAAAAAAAAqjh79mxrtc6fP99KzkOHDjVeh3579uzpi33ve9/roJPmPProo32xAwcOdNAJAOvRzMxM1y2sW71er/Gcp0+fbjxnjp544om+2Pj4eAedrB733HNPX+z48eMddNIc99nkypOFAQAAAAAAAAAAACBTloUBAAAAAAAAAAAAIFOWhQEAAAAAAAAAAAAgU5aFAQAAAAAAAAAAACBTloUBAAAAAAAAAAAAIFOWhQEAAAAAAAAAAAAgU5aFAQAAAAAAAAAAACBTloUBAAAAAAAAAAAAIFOWhQEAAAAAAAAAAAAgU5aFAQAAAAAAAAAAACBTloUBAAAAAAAAAAAAIFOWhQEAAAAAAAAAAAAgU5aFAQAAAAAAAAAAACBTloUBAAAAAAAAAAAAIFPXXBZOKb0npfSfU0r/JaV0JqX0rxfjO1JKf5lSejGl9AcppeHBtwsAAAAAALyTOT4AAAAAcDVDFV7zZkT8bFEUCymlDRHx5yml70TEr0bEQ0VRPJJS+vcR8dmI+HcD7BUAAAAAAOhnjg81nDt3rjS+ffv2vthLL71Uq1ZZzrpGRkYaz7lebN26tTQ+Ozu74pzXXdftf9h1bm6uL+YaAaCOF154ofGcjzzySOM5lzI0VGU1ihxt27atL/baa6/Vyvn5z3++1vGr0fBw/79X3Ov1Vl3OrrnPJgfX/G21+JGFxS83LP6viIifjYjHFuPHIuLugXQIAAAAAAAsyRwfAAAAALiaSv9qa0rp+pTScxHxekQ8ERH/IyLmiqJ4e/ElL0fEB5Y49kBK6WRK6eT8/HwTPQMAAAAAAO9gjg8AAAAALKXSsnBRFD8simJPRNwcER+JiJ1VCxRF8ZWiKO4oiuKOTZs2rbBNAAAAAABgKeb4AAAAAMBSKi0L/z9FUcxFxJ9ExEcjYiSlNLT4rZsj4pWGewMAAAAAAJbBHB8AAAAAeLeha70gpbQ1It4qimIupXRjRHwiIr4UPxo2/mJEPBIRn4mIxwfZKLTh3LlzrdXavn17K3VGRkZaqUO/rVu3tlbr4sWLrdSZm5trPOcgrtEnn3yy8ZxLeeaZZ1qpM4ifca/XazznUsbGxlqpM4jzdOjQocZzRvh8XgsmJiYGknf37t2N5zx9+nTjOcsMDw+3UmdQtmzZ0nUL61Zb1+i+fftaqRMRMT4+3lqttWwQnxttfRadOXOmlToRa+c+G4DVyxyf9c5sn6bVme/Pz8+Xxgcxj636+9Fyfufo+tpra77f1mw/YnXO99ua2S9H1fM0iJl919c9zas63x/EvH456s5N1/LMfvPmzaXxoaFrrjHRorrX6I4dO/piS/3XbMpeOwhm+9Us5/Ol6n1J3c+s5dz/vPjii7Vq3XrrrZVet5busyGiwrJwRIxGxLGU0vXxoycRP1oUxR+llM5GxCMppS9GxLMR8XsD7BMAAAAAAChnjg8AAAAALOmay8JFUZyOiJ8siZ+PiI8MoikAAAAAAKAac3wAAAAA4Gqu67oBAAAAAAAAAAAAAGAwLAsDAAAAAAAAAAAAQKaGum4AAAAAAAAAqjh37lzXLTRuZGSk6xZo0KZNm0rjFy9ebLxWr9drPOfc3FxfbBDX6JNPPtl4zqX88Ic/bK0Wg3f06NG+2P79+2vlLLvuI3w+rwVf+tKXah1/6dKl0vjmzZtr5YW16Pnnn2885yuvvNIXu+OOOxqvQ7eGh4c7qzOIe+yIiIWFhcZztnWfDVfjycIAAAAAAAAAAAAAkCnLwgAAAAAAAAAAAACQKcvCAAAAAAAAAAAAAJApy8IAAAAAAAAAAAAAkKmhrhsAAAAAAACAd+v1ep3WHx4e7ovV7aksJ9257bbbSuPnzp1ruZPVbW5urjQ+MjJS6fgzZ8402Q70+c53vlMav/POO2vlLbv2q173NO+FF17ougXIRtf32eRv9+7dfbHTp0930AnwTp4sDAAAAAAAAAAAAACZsiwMAAAAAAAAAAAAAJmyLAwAAAAAAAAAAAAAmbIsDAAAAAAAAAAAAACZsiwMAAAAAAAAAAAAAJka6roBWKler9d1C7UMDw933QIDdtttt7VW67HHHmutVtMmJiYaz3n//fc3nnMpH/nIR1qps3fv3lbqrHXHjx9vPOfRo0cbzxkRsX///oHkfbeRkZFW6gzivbyUgwcPtlJnbGxsIHn37NnTeM5f/uVfbjxn177whS80nvPtt99uPGeOBnGf3da97+zsbCt1IiImJydbqTM1NdVKnbVk9+7drdSZmZlppQ4AwHpjts9aUGe+f+7cucqvffXVV1dcJyLix3/8x2sdX1fVmeAv/MIv9MVGR0ebbiciqv8u19ZsP2J9z/eXM7Pfvn37iuss9XOvOt9fzrx+bm6u8mvLtDWzX4668/22ZvbLMYj5/nqe7Q9iXr+UQczxh4bWx2rUWp7tR7Q336872zezX7m6s/3Tp09Xfm3X98mwWnmyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkaqjrBgAAAAAAAMhPr9frugVo3W233VYaf+yxxxqv9eqrrzaes8yhQ4dqHf+tb32rL3b//ffXyrmU0dHRvtjLL788kFrvtnfv3lbqrCX33XdfX+z48eMddHJ1R48eLY3v37+/8Vpzc3N9sZGRkcbrlJmYmBhI3oceeqgvdvDgwYHUasuePXsazzk+Pt54zrZ8/vOfL41/4QtfaLmTlXn77be7bqFx7rOreeqppxrPOTk52RebmppqvA79du/e3RebmZnpoBNYuzxZGAAAAAAAAAAAAAAyZVkYAAAAAAAAAAAAADJlWRgAAAAAAAAAAAAAMmVZGAAAAAAAAAAAAAAyZVkYAAAAAAAAAAAAADJlWRgAAAAAAAAAAAAAMmVZGAAAAAAAAAAAAAAyZVkYAAAAAAAAAAAAADJlWRgAAAAAAAAAAAAAMmVZGAAAAAAAAAAAAAAyNdR1AwAAAAAAANCV+fn5rlugIxs2bCiNv/XWWy13sro9+OCDpfFDhw5VOv7+++9vsp1lu/nmm/tiL7/8cq2ce/furXX8enbXXXeVxr/97W+33Mm1Pf/8832xnTt3dtBJM6anp0vjExMTtfL+1E/9VK3jq3r66af7YjfddFPpa994441KOZc6nmoOHjzYF3vooYc66GT9GR4e7ov1er1aObds2dIXu3jxYuM5Z2ZmSl87Ojpaq1aZ22+/vS929uzZWjmnpqZqHc/aldt9AeuTJwsDAAAAAAAAAAAAQKYsCwMAAAAAAAAAAABApiwLAwAAAAAAAAAAAECmLAsDAAAAAAAAAAAAQKaGum4AVpPTp0+3VmvHjh2t1aIbGzZsaK1Wr9drrVbTpqenu26hll27dnXdAu9w1113NZ7zyJEjjeeMiHj++ecHkvfdxsfHW6nT5nv52LFjrdS56aabBpJ3ED+TycnJxnOWmZqaaqVORMTBgwcbzzmo93NuhoeHG8+5ZcuWxnOWmZmZaaVORLvvB7rx0ksvtVZrEPcFbd0DAACsBnVn++b1vFud+f5bb71V+bV1Z/uD+B1+OdbyfN9sf/WpOt//9re/XTnnpUuXKr1u8+bNlXPW/R1+586dtY4fhLrv5RdeeKEv9vTTT9fKWdcg5vt1Zy0nTpzoiz3yyCO1cq7GGWXV2f5DDz004E7Wn+XcF1S9B2lrth8xmPn+6OhoX+zAgQON12H1qTvf3759e6XXLee+YDXeA7D+eLIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGRqqOsGAAAAAAAAoIrTp083nvNv/uZvSuM7duxovBb527BhQ2m81+s1XmsQOcuMjIy0UgequOuuu/piR44cqZXz0qVLtY5fjueff74vNj4+3lr9QfjQhz7UF3v66ac76KQ5g/iZPPLII43nnJyc7ItNTU01XmcQDh48WBqv+36mmuHh4a5baEXZ3xnk5aWXXmot70//9E/XypnjPQBrjycLAwAAAAAAAAAAAECmLAsDAAAAAAAAAAAAQKYsCwMAAAAAAAAAAABApiwLAwAAAAAAAAAAAECmKi8Lp5SuTyk9m1L6o8Wvd6SU/jKl9GJK6Q9SSsODaxMAAAAAALgac3wAAAAAoMxyniz8KxFx7h1ffykiHiqK4taIeCMiPttkYwAAAAAAwLKY4wMAAAAAfSotC6eUbo6IfxwR/2Hx6xQRPxsRjy2+5FhE3D2IBgEAAAAAgKszxwcAAAAAllL1ycJTEfGvIuL/LH69JSLmiqJ4e/HrlyPiA2UHppQOpJROppROzs/P12oWAAAAAAAoZY4PAAAAAJS65rJwSumTEfF6URSnVlKgKIqvFEVxR1EUd2zatGklKQAAAAAAgCWY4wMAAAAAVzNU4TU/ExGQ+6BdAAAgAElEQVT/JKX0jyLiPRHxDyLi30bESEppaPGpBDdHxCuDaxMAAAAAAFiCOT4AAAAAsKRrLgsXRfEbEfEbEREppY9FxL8siuKfppT+U0T8YkQ8EhGfiYjHB9gnAAAAAABQwhyf9WRsbKwvduHChcZzwkrNzs523QKsGgsLC43n/N3f/d3Gc773ve9tPOd6cuzYsa5bWLG9e/e2VmtqaqovNjk52XhOujM6OtpareHh4b5Yr9erlfP222/vi509e7ZWzq6dOHGiLzY+Pt5BJ3mre+1Vdfz48b7YjTfeWPray5cv16q1cePGWseX2blzZ+M5Ybmuq3Hsr0fEr6aUXoyILRHxe820BAAAAAAANMAcHwAAAAC49pOF36koij+NiD9d/PP5iPhI8y0BAAAAAAArYY4PAAAAALxbnScLAwAAAAAAAAAAAACrmGVhAAAAAAAAAAAAAMjUUNcNwGoyNjbWdQtkZHZ2tusWaMHCwkIrdQ4fPtx4zsuXLzees2u9Xq/xnP5uqGZiYqK1Wlu3bm2lzv79+1up04SpqamuW1gTLl261Eqd0dHRVuoMyvDwcOM5b7/99sZzljl16lQrdSIiTpw40Uqd8fHxVuoMyiD+bv7617/eeM4yGzdubKVORMTOnTtbqwUAkCPzG5pmvp+/tmb7Eeb7VVWdIZTNh9/73vc23c66Une+39bM/gMf+EBf7IYbbih97cc+9rEBd9MMs/1q6s72N2/eXOl162m2X/Uzt+5s//z5832xv//7vy99bZ2/22666abKr60721/rM/uq1sps//rrry+NtzXfN9tnrfFkYQAAAAAAAAAAAADIlGVhAAAAAAAAAAAAAMiUZWEAAAAAAAAAAAAAyJRlYQAAAAAAAAAAAADI1FDXDQAAAAAAAAC0bXZ2tusWaMHCwkIrdQ4fPtx4zsuXLzees2u9Xq/W8WXv27GxsVo514uJiYmB5C37mWzdurXxOq+88kpfbP/+/Y3XoVuXLl1qJefo6GjjddaS4eHhVurccsstfbFTp041XueNN95oPOdSTpw40RcbHx9vrf4g1P27uczXv/71xnN2befOnV23ALV5sjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZGqo6wYAAAAAAADIz/DwcF+s1+vVynnjjTf2xS5fvlwr53XXebbOerV169bS+OzsbMud0JSFhYVW6nz/+99vpU7ZZ15E/c+9LpX93RBR/e+HzZs3N9nOqnDixIm+2Pj4eK2cZe+Fuud+KUt9lrZhZmamND46OtpyJzRlEPd6ZTnLrp31ft18/OMf74s9+eSTjdf58Ic/XBo/depU47XaMojP8UGo+3m/lJMnTw4k77tdf/31rdSJiLjhhhsazzkyMtJ4Tlgu0w8AAAAAAAAAAAAAyJRlYQAAAAAAAAAAAADIlGVhAAAAAAAAAAAAAMiUZWEAAAAAAAAAAAAAyNRQ1w3ASg0PDzee88Ybb2w851LefPPN1mrRja1bt3bdwrp17Nix1mo9++yzrdT51Kc+1XjO8fHxxnN2rdfrNZ7zgQceaDxnmyYnJ1up8+Uvf7mVOhERR44caaXOzMzMQPKOjY0NJC/X1ta93qCunTJr5Xr6+Mc/3kqd0dHRVupERDz88MOt1HniiSdaqRMRcfHixcZzDuL3trvvvrvxnGWeeuqpVupERIyMjLRWCwCga8u5R6w666n7+555Pe9WZ74/OzvbYCfrT1vz/bqz/bLPnX379tXKuRxrZb4/iJn9Qw891Bebm5urlXM1/l4+iNn+F7/4xcZzRrQ3s1+OqjPaNueJrNxame3neD3Vne2fOXOm8ms/+clPrrjO/Px8afzP/uzP+mJ1f85btmyp9LrVONsfxLw+IuL666/vi911112N12lzZl/VaryHgKvxZGEAAAAAAAAAAAAAyJRlYQAAAAAAAAAAAADIlGVhAAAAAAAAAAAAAMiUZWEAAAAAAAAAAAAAyJRlYQAAAAAAAAAAAADI1FDXDQAAAAAAAMC7DQ8Pt1LnhhtuKI2/+eabrdSH9ejYsWOt1Tp//nxrtZo2Pj7edQu1lH2O93q9Wjnvv//+vtjhw4dr5fy7v/u7Wsevd7/2a7/WFzty5EgHnSzfzMxMaXxsbKzlTsiB66nfrl27+mJnzpxpvM6mTZsaz8nqs23btr7Ya6+91lr9kZGR1mrBoHiyMAAAAAAAAAAAAABkyrIwAAAAAAAAAAAAAGTKsjAAAAAAAAAAAAAAZMqyMAAAAAAAAAAAAABkaqjrBgAAAAAAAABycOHChdL4rbfe2hd78cUXa9Uqy1nX0FDz//fxwsJC4zm7tm/fvsZzjo+PN55zvRgeHi6N93q9ljtZPb74xS923QIZafO9VPZ+rlt/qc8IurFr167S+JkzZxqv9clPfrIv9vDDDzdeZ70bHR3trPbmzZtL45cuXaqVd9u2bbWOh9XKk4UBAAAAAAAAAAAAIFOWhQEAAAAAAAAAAAAgU5aFAQAAAAAAAAAAACBTloUBAAAAAAAAAAAAIFNDXTcA61Wv1+u6BQbs5MmTrdW69dZbW6kzMjLSeM6FhYXGc54/f77xnEs5ePBgK3XGxsZaqbPWDeKzdXh4uPGcERGjo6MDyftuU1NTrdQZxHt5KZcuXWqtFleamJhordYgrt1BvZ9ZPXbt2tVarbY+X+n3O7/zO63U2bZtWyt1AABYW+rOn/xuuja0Nd9fK7P9Nmd/g5jv33LLLX2xj33sY43XWYr5fjVrZb6/Vmb7bb5vq6o729+8eXNDnaw/dWf7X/7yl0vjdd5jy3nPu39aH9qa75vtN+/pp5/ui7V1P/2e97xnWXFY7zxZGAAAAAAAAAAAAAAyZVkYAAAAAAAAAAAAADJlWRgAAAAAAAAAAAAAMmVZGAAAAAAAAAAAAAAyNVTlRSmlH0TEfET8MCLeLorijpTSj0XEH0TEByPiBxFxb1EUbwymTeD/tnfHMVKeZ2LAnzdeT0QCYoIvJitzorhBZ8WyDqfIwumJ+OTecbbkEksVwpESckSiEVxVpCYpdawkToIUxWq7rVRi2T1qap3t0OMs29IpvgSV8E9yF+6Meya5CJccOiwwLtFyu2fSsd23fzCOMTMDszvffN/Mt7+fhHb32W/e59HMfu9+8/DoWwAAAACAXvTxoXitVmss1mT+jhw5UnUJhWs2m1WX0JcTJ06UluuOO+4oJc/KlStLyTPuxmkfnJycLCXPMH52Fi9e3BGbnZ0tPE8v58+fH4s162j79u2Fr/nFL36xa3xqamreazYajXk/FhgtH/vYxzpidbzOhjqYy52FfzvnvCbnvLb99a6IOJhzXh0RB9tfAwAAAAAA1dDHBwAAAAA6zGVY+HIbI2Jf+/N9EfGJwcsBAAAAAAAKoo8PAAAAAPQ9LJwj4s9SSn+ZUtrWji3POZ9uf34mIpZ3e2BKaVtK6UhK6cjMzMyA5QIAAAAAAF3o4wMAAAAAXU30edxv5ZxfSSldHxHfSyn9zaXfzDnnlFLu9sCc8yMR8UhExKpVq7oeAwAAAAAADEQfHwAAAADoqq87C+ecX2l/PBsRT0fEbRHxakppMiKi/fHssIoEAAAAAAB608cHAAAAAHq56rBwSun9KaUlb38eEb8bES9FxLMRsaV92JaIeGZYRQIAAAAAAN3p4wMAAAAAVzLRxzHLI+LplNLbxz+Rc/5uSunHEbE/pfTZiDgZEZuGVyYAAAAAANCDPj4AAAAA0NNVh4Vzzici4je7xM9FxJ3DKAoAAAAAAOiPPj4AAAAAcCXvqboAAAAAAAAAAAAAAGA4DAsDAAAAAAAAAAAAQE0ZFgYAAAAAAAAAAACAmjIsDAAAAAAAAAAAAAA1ZVgYAAAAAAAAAAAAAGrKsDAAAAAAAAAAAAAA1JRhYQAAAAAAAAAAAACoqYmqCwAAAAAAAIB+rF27tiN25MiRCiopzvT0dEes2WxWUAkMx8mTJyvNf+HChY7YokWLKqiEcXP69OmO2OTkZAWVFKPRaHSNt1qtwnMtXbq0I3b+/PnC13zjjTe6HnvttdcOlGuc7dmzpyO2ffv2gdb81re+NdDjuxnGzx1QjdnZ2apLKNymTZuqLgGGwp2FAQAAAAAAAAAAAKCmDAsDAAAAAAAAAAAAQE0ZFgYAAAAAAAAAAACAmjIsDAAAAAAAAAAAAAA1ZVgYAAAAAAAAAAAAAGpqouoCYJSsXbu2tFxPPvlkKXmmp6cLX7PZbBa+JgvXyZMnC1/zwoULha85CrmoRqvVGsq6p0+fHsq6l1u5cmUpeRqNRil5IiKWLl1aSp433nijlDzjZM+ePaXlmp2dLXzNYZ3PQG/DOJdnZmYKX7ObT3/606XkAQBgcIP29o8cOdL3sWfOnBko14c+9KG+jptLb1/PnqqMYn9/0aJFI5WHavXbD5xLf3vQ3v7k5ORAjx+GQfv7/T7PZfX2I/rv71977bVDrmQ0DNrb7/Ual9VzHzRPmf+HBXUxir39JUuW9H3spk2bBsoFdeXOwgAAAAAAAAAAAABQU4aFAQAAAAAAAAAAAKCmDAsDAAAAAAAAAAAAQE0ZFgYAAAAAAAAAAACAmpqougAAAAAAAACoytq1a7vGn3zyycJznTlzpvA1p6enO2LNZrPwPMA7Lly4UHUJXKLValX6+Lk4ffp0R2zlypWl5R+GRqNRdQnz9sYbb1Rdwljo9RoP49wZlzWhTmZnZwtfc2ZmpvA1u9m0aVMpeaAu3FkYAAAAAAAAAAAAAGrKsDAAAAAAAAAAAAAA1JRhYQAAAAAAAAAAAACoKcPCAAAAAAAAAAAAAFBTE1UXAAAAAAAAAP1YvXp1R+z48eMVVFKc/fv3d8Q2bdpUQSX1dujQoapLKFxZPyeNRqPwNa+77rqu8VOnThW+7uHDhzti69evHygP/ZmdnS18zd27dxe+5kLR61xutVqF57r33ns7Yk8//XTheYZlenq6I9ZsNiuopL4OHDhQdQml+dGPftQRW7duXQWVsNAN4/dyRMRLL700lHUvd9ttt5WSJ8KeT325szAAAAAAAAAAAAAA1JRhYQAAAAAAAAAAAACoKcPCAAAAAAAAAAAAAFBThoUBAAAAAAAAAAAAoKYmqi4ARsnq1atLy/XWW2+Vkmf//v2Fr7lt27bC1yzToUOHSslz7NixUvJERNx///2l5SraMM67FStWFL5mL4cPHy4lz0033VRKnmGZnZ0tJc8DDzxQSh46NRqN0nLde++9peR54oknhrLuLbfcUviazWaz8DWrduDAgcLXvHDhQuFrVm3nzp2Frzk1NVX4moyHYVy/Pvroo4WvuXnz5sLX7KaOeysAQF0N2mM8fvx438cO2tu/5ppr+jpuLr39TZs2zbecsVJWbz9isPdHH/7wh7vG3/ve93bENm7cOO88o6Dfc28u51hZ/f1Be/vr168vqJLRVlZvP6La/v6iRYsqyz0KBu3vt1qtvo4btLc/MzPTEfv+97/f9dh+e8H33HNP3/mnp6f7Om6h95RGsbc/iud4v719/Xr6UXVv/4Mf/GBH7M477yyynDlb6Hsx9OLOwgAAAAAAAAAAAABQU4aFAQAAAAAAAAAAAKCmDAsDAAAAAAAAAAAAQE0ZFgYAAAAAAAAAAACAmjIsDAAAAAAAAAAAAAA1NVF1AQAAAAAAAFCV1atXd42/9dZbhecaxpr79+/viG3btq3wPGU6dOhQKXmOHTtW+Jovv/xy1/j9999feK5x0e0cO3jwYAWVFOfw4cMdsZtuuqmCSoozOztbSp4HHniglDxzceHChapLGGuNRqOUPEuWLOmIDfraPffcc13jt9xyy7zXnJ6e7hpvNpvzXnNUHThwoPA1h3E+Vn2OP/XUU/N+7M6dO7vGp6am5r0m420Y16+PPvroQI9/7bXXCqpk7uq4t8IwubMwAAAAAAAAAAAAANSUYWEAAAAAAAAAAAAAqCnDwgAAAAAAAAAAAABQU4aFAQAAAAAAAAAAAKCmDAsDAAAAAAAAAAAAQE0ZFgYAAAAAAAAAAACAmjIsDAAAAAAAAAAAAAA1ZVgYAAAAAAAAAAAAAGqqr2HhlFIzpfTHKaW/SSn9NKV0e0ppWUrpeyml4+2PHxh2sQAAAAAAQCd9fAAAAACgl4k+j/tPEfHdnPO/SCk1IuJ9EXF/RBzMOX8zpbQrInZFxL8dUp0AAAAAAEBv+vgsCI1GoyPWarWGkuvBBx/siH3lK18ZaM1u9Q/DI4880jW+bdu2UvLPxaFDh6ouoVA7duwoLVez2SwlT6+f20HOva1bt3aN7927d95rVm2czrvZ2dnKci9evLhrvMqabrzxxspy19WaNWs6YkePHi08z80339w1fuzYsYHWfeaZZzpiGzduHGjN6enpjlhZ+/igDh48WFqupUuXdsTOnz9f+Jpl2rx5c0fsqaee6uuxU1NTRZfDmBh0H5uLbr+b5/J7udvP+DCsW7eulDxQZ1e9s3BKaWlErI+IP4yIyDm3cs7TEbExIva1D9sXEZ8YVpEAAAAAAEB3+vgAAAAAwJVcdVg4IlZFxGsR8d9SSi+klP5rSun9EbE853y6fcyZiFg+rCIBAAAAAICe9PEBAAAAgJ76GRaeiIiPRsS3c863RsQ/xMU/VfYrOeccEbnbg1NK21JKR1JKR2ZmZgatFwAAAAAAeDd9fAAAAACgp36GhU9FxKmc85+3v/7juNh0fDWlNBkR0f54ttuDc86P5JzX5pzXLlmypIiaAQAAAACAd+jjAwAAAAA9XXVYOOd8JiL+LqX0G+3QnRHxk4h4NiK2tGNbIuKZoVQIAAAAAAD0pI8PAAAAAFzJRJ/H/auI+KOUUiMiTkTE78fFQeP9KaXPRsTJiNg0nBKhPI1Go7RcDz74YCl5du/eXfiaX//61wtf89y5c4Wv2UtZr/OOHTtKyRMR0Ww2S8tVtGG8Hlu3bi18zV4eeuihUvKM+3m3ePHiUvJ84xvfKHzNvXv3Fr4mg1mzZk0peV588cWhrPvVr3618DVbrVbha3ZT1rkcEbFhw4bC1zxx4kTha3YzOTlZSp6IiKmpqdJyDaKsn9GHH364lDwR5f08lfXcRUQsW7as8DWHseeN87UvAAyZPj4L1qA9xrlcdw/a23/sscc6Yq+88spAa05M9PvffaPZZyzz/2Yud/fdd3eNr1q1qpT84/7+pt/Xbi7n2KD9/SeeeKIj9stf/rIjNujP7bifd2X2+S63a9eugR6vZz++Bu3tHz16tO9jb7755oFyHTt2rCP2wgsvDLRmN4P2/so6l3s9nytWrJj3mqdOner72KVLl847z1zo7Xcqsz9dVn9/XHr7w+jX93Lfffd1xG644YbC84z7tS/UQV/vYnLORyNibZdv3VlsOQAAAAAAwFzp4wMAAAAAvbyn6gIAAAAAAAAAAAAAgOEwLAwAAAAAAAAAAAAANWVYGAAAAAAAAAAAAABqaqLqAgAAAAAAAKAqjUaja7zVahWe6zOf+UxHbPfu3QOt+eabb3bEJiaK/y/Ac+fOFb5mRPfnuddrUrRVq1aVkiciotlslpZr1HR7PYdxfkVEfPKTn+yIPfTQQ4XnGffzbnZ2tiO2ePHioeQq2tatWztie/furaASyrZmzZqO2IsvvjiUXMPYo4axZlnn8rFjx7rGV6xYMe81ez32xIkT815zLiYnJ0vJExGxcuXK0nINYli/my/38MMPl5Inoryfp2E8d7/4xS+6xpctW1Z4rhtuuKHwNRfytS+MMncWBgAAAAAAAAAAAICaMiwMAAAAAAAAAAAAADVlWBgAAAAAAAAAAAAAasqwMAAAAAAAAAAAAADUlGFhAAAAAAAAAAAAAKgpw8IAAAAAAAAAAAAAUFOGhQEAAAAAAAAAAACgpgwLAwAAAAAAAAAAAEBNGRYGAAAAAAAAAAAAgJqaqLoAAAAAAAAAGDWNRqMj1mq1Cs/zpS99qSO2e/fuvh8/MVHOf/ddd911XePnzp0baN1uz/Mw7Nixo5Q8K1euLCXPuFu8eHHX+OzsbOG5vvCFL3TEHnrooYHWHPfzrtfzX7Rdu3aVkmfr1q1d43v37u3r8bfddluR5VzRunXrSsu1EGzZsqVrfN++fQOte+utt3bEXnjhhYHWHMZ1RVnn8oYNG0rJExGxfv36jtjhw4cHWrPXHlG0cb8GKOva93Of+1zX+MMPP1x4rhtvvLEjduLEicLzDOO5W7Zs2UCP7+bzn/984WtGRDSbzaGsCxTPnYUBAAAAAAAAAAAAoKYMCwMAAAAAAAAAAABATRkWBgAAAAAAAAAAAICaMiwMAAAAAAAAAAAAADVlWBgAAAAAAAAAAAAAairlnEtLtmrVqvy1r31t6Hk+9alPDT0HAAAAAFTp8ccfH3qOL3/5y/Hzn/88DT0RMDL08QEAAACgGKPUx3dnYQAAAAAAAAAAAACoKcPCAAAAAAAAAAAAAFBThoUBAAAAAAAAAAAAoKYMCwMAAAAAAAAAAABATRkWBgAAAAAAAAAAAICaMiwMAAAAAAAAAAAAADVlWBgAAAAAAAAAAAAAasqwMAAAAAAAAAAAAADUlGFhAAAAAAAAAAAAAKgpw8IAAAAAAAAAAAAAUFOGhQEAAAAAAAAAAACgpgwLAwAAAAAAAAAAAEBNGRYGAAAAAAAAAAAAgJoyLAwAAAAAAAAAAAAANWVYGAAAAAAAAAAAAABqyrAwAAAAAAAAAAAAANSUYWEAAAAAAAAAAAAAqCnDwgAAAAAAAAAAAABQU4aFAQAAAAAAAAAAAKCmDAsDAAAAAAAAAAAAQE1NVF0AAAAwmJ07d5aSZ2pqqpQ8MA6cdwAAAADAIAbtMeodwtw574CFzJ2FAQAAAAAAAAAAAKCmDAsDAAAAAAAAAAAAQE0ZFgYAAAAAAAAAAACAmrrqsHBK6TdSSkcv+ff3KaWdKaVlKaXvpZSOtz9+oIyCAQAAAACAd+jjAwAAAABXctVh4Zzzz3LOa3LOayLin0TE6xHxdETsioiDOefVEXGw/TUAAAAAAFAifXwAAAAA4EquOix8mTsj4n/nnE9GxMaI2NeO74uITxRZGAAAAAAAMGf6+AAAAADAu8x1WHhzRDzZ/nx5zvl0+/MzEbG8sKoAAAAAAID50McHAAAAAN6l72HhlFIjIv55RPyPy7+Xc84RkXs8bltK6UhK6cjMzMy8CwUAAAAAAHrTxwcAAAAAupnLnYXvioi/yjm/2v761ZTSZERE++PZbg/KOT+Sc16bc167ZMmSwaoFAAAAAAB60ccHAAAAADrMZVj4vnjnT5dFRDwbEVvan2+JiGeKKgoAAAAAAJgzfXwAAAAAoENfw8IppfdHxO9ExJ9cEv5mRPxOSul4RPyz9tcAAAAAAEDJ9PEBAAAAgF4m+jko5/wPEXHdZbFzEXHnMIoCAAAAAAD6p48PAAAAAPTS152FAQAAAAAAAAAAAIDxY1gYAAAAAAAAAAAAAGrKsDAAAAAAAAAAAAAA1NRE1QUA42f79u2Fr7lnz57C1wSAhWJqaqrqEmDBcd5VZxjvR7rxHgUAgDrq93ra9TAADJ8eI5TPeVedQXv73qPA4NxZGAAAAAAAAAAAAABqyrAwAAAAAAAAAAAAANSUYWEAAAAAAAAAAAAAqCnDwgAAAAAAAAAAAABQUxNVFwAAAAAAAAAUZ/v27YU/fs+ePQOtCQAALAyDvh/pd03vUWBu3FkYAAAAAAAAAAAAAGrKsDAAAAAAAAAAAAAA1JRhYQAAAAAAAAAAAACoKcPCAAAAAAAAAAAAAFBThoUBAAAAAAAAAAAAoKYMCwMAAAAAAAAAAABATRkWBgAAAAAAAAAAAICaMiwMAAAAAAAAAAAAADVlWBgAAAAAAAAAAAAAasqwMAAAAAAAAAAAAADUlGFhAAAAAAAAAAAAAKgpw8IAAAAAAAAAAAAAUFOGhQEAAAAAAAAAAACgpgwLAwAAAAAAAAAAAEBNGRYGAAAAAAAAAAAAgJqaqLoAAAAAAABgYXn++ee7xjds2FByJTD+Hn/88Y7Y7bff3vXYH/7wh32t2e3xzlsAAOByzz33XEfs4x//eEfsBz/4wUB5uq3ZLXdExD333DNQLpiPXu+ZR4k7CwMAAAAAAAAAAABATRkWBgAAAAAAAAAAAICaMiwMAAAAAAAAAAAAADVlWBgAAAAAAAAAAAAAamqi6gKG4fnnny8t19mzZ0vLBaPi9ttvL3xN5+1gGo1GKXlarVYpeeg0jNfY69mfYZ1fnv/5s+dVp9lslpZrenq6tFzjbBiviee+P2WdD+P+egxjz77rrrsKX7Ob73znO6XkifA7p26uv/76qksAGGtl9Qnr2COEfoxzf39cztuyeldz4T1H8ap+nRfya+q5Hz1VvybdLJTXqcyefb/GvZ85iKpfD8/9aBnF16PM/fp973tfR2wYvf3XX3+9a7ys/v5C+X0z7vTs3+HOwgAAAAAAAAAAAABQU4aFAQAAAAAAAAAAAKCmDAsDAAAAAAAAAAAAQE0ZFgYAAAAAAAAAAACAmpqougAAAAAAAIBhOXv2bNUlAHM0zudtq9XqGm80GpXmp1jdnudhvMZez05lPfe9ctGpzNekn9wLyfT0dEes2WxWmn8h6/V8DOM18dy/W5nP/Vzyj5qqr1PH3UL/nTPOur2/vP766yuopHruLAwAAAAAAAAAAAAANWVYGAAAAAAAAAAAAABqyrAwAAAAAAAAAAAAANSUYWEAAAAAAAAAAAAAqCnDwgAAAAAAAAAAAABQU4aFAQAAAAAAAAAAAKCmDKdHLzEAAAXCSURBVAsDAAAAAAAAAAAAQE0ZFgYAAAAAAAAAAACAmjIsDAAAAAAAAAAAAAA1ZVgYAAAAAAAAAAAAAGoq5ZzLS5bSaxFxMiJ+LSL+T2mJgXFjjwB6sT8AvdgfgF7sDzCYlTnnD1ZdBFAefXygD/YH4ErsEUAv9gegF/sDDKavPn6pw8K/SprSkZzz2tITA2PBHgH0Yn8AerE/AL3YHwBgfvwOBXqxPwBXYo8AerE/AL3YH6Ac76m6AAAAAAAAAAAAAABgOAwLAwAAAAAAAAAAAEBNVTUs/EhFeYHxYI8AerE/AL3YH4Be7A8AMD9+hwK92B+AK7FHAL3YH4Be7A9QgpRzrroGAAAAAAAAAAAAAGAIqrqzMAAAAAAAAAAAAAAwZKUPC6eUfi+l9LOU0ssppV1l5wdGS0rpb1NKf51SOppSOtKOLUspfS+ldLz98QNV1wmUI6W0N6V0NqX00iWxrntCuug/t68p/ldK6aPVVQ4MW4/94asppVfa1xFHU0p3X/K9f9feH36WUtpQTdVAGVJKv55S+p8ppZ+klI6llP51O+4aAgDmSR8fuJQ+PvA2PXzgSvTxgW708GF0lDosnFK6JiL+S0TcFREfiYj7UkofKbMGYCT9ds55Tc55bfvrXRFxMOe8OiIOtr8GFobHIuL3Lov12hPuiojV7X/bIuLbJdUIVOOx6NwfIiL+Y/s6Yk3O+U8jItrvMTZHxM3tx+xpvxcB6unNiPg3OeePRMS6iNjR3gdcQwDAPOjjAz3o4wMRevjAlT0W+vhAJz18GBFl31n4toh4Oed8IufcioinImJjyTUAo29jROxrf74vIj5RYS1AiXLOhyPiF5eFe+0JGyPiv+eLfhQRzZTSZDmVAmXrsT/0sjEinso5/9+c888j4uW4+F4EqKGc8+mc81+1P5+JiJ9GxA3hGgIA5ksfH+iHPj4sQHr4wJXo4wPd6OHD6Ch7WPiGiPi7S74+1Y4BC1eOiD9LKf1lSmlbO7Y853y6/fmZiFheTWnAiOi1J7iuACIi/qD9J4j2XvInT+0PsECllP5RRNwaEX8eriEAYL78rgQup48PXIn338DV6OMDEaGHD1Ure1gY4HK/lXP+aFz8MwI7UkrrL/1mzjnHxUYkgD0BuNy3I+IfR8SaiDgdEf++2nKAKqWUFkfEgYjYmXP++0u/5xoCAAAGoo8P9MV+AHShjw9EhB4+jIKyh4VfiYhfv+TrFe0YsEDlnF9pfzwbEU/HxT8t8urbf0Kg/fFsdRUCI6DXnuC6Aha4nPOrOee3cs7/LyIejXf+RJn9ARaYlNK1cbHJ+Ec55z9ph11DAMD8+F0JvIs+PnAV3n8DPenjAxF6+DAqyh4W/nFErE4prUopNSJic0Q8W3INwIhIKb0/pbTk7c8j4ncj4qW4uC9saR+2JSKeqaZCYET02hOejYhPp4vWRcT5S/5MCbAAvN1AaLs3Ll5HRFzcHzanlN6bUloVEasj4i/Krg8oR0opRcQfRsRPc87/4ZJvuYYAgPnRxwd+RR8f6IP330BP+viAHj6Mjokyk+Wc30wp/UFEPB8R10TE3pzzsTJrAEbK8oh4+uJ1QUxExBM55++mlH4cEftTSp+NiJMRsanCGoESpZSejIg7IuLXUkqnIuIrEfHN6L4n/GlE3B0RL0fE6xHx+6UXDJSmx/5wR0ppTVz8s0R/GxH/MiIi53wspbQ/In4SEW9GxI6c81tV1A2U4p9GxKci4q9TSkfbsfvDNQQAzIs+PnAZfXzgV/TwgSvRxwd60MOHEZFyzlXXAAAAAAAAAAAAAAAMwXuqLgAAAAAAAAAAAAAAGA7DwgAAAAAAAAAAAABQU4aFAQAAAAAAAAAAAKCmDAsDAAAAAAAAAAAAQE0ZFgYAAAAAAAAAAACAmjIsDAAAAAAAAAAAAAA1ZVgYAAAAAAAAAAAAAGrKsDAAAAAAAAAAAAAA1NT/B6oUtYdNtY8QAAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "# noisy, raw = data[1]\n", "# print(noisy.shape)\n", "# idx = 2\n", "# plt.imshow(noisy[idx])\n", "# plt.imshow(raw[idx])\n", "# plt.imshow(np.reshape(noisy[idx], (80, 240)))\n", "# plt.imshow(np.reshape(raw[idx], (80, 240)))\n", "# model.load_weights('model/denosing.hdf5')\n", "# rs = model.predict(noisy)\n", "# plt.imshow(np.reshape(rs[idx], (80, 240)))\n", "\n", "# img = Image.open('FileInfo0508/31c1f481-912a-11ea-b24d-408d5cd36814_cmftq.jpg')\n", "# img = Image.open('/data/captcha/shensebeijingsandian/pgv4_d58a8328-c425-11ea-be07-ecf4bbc56acd.jpg')\n", "# import os \n", "# import glob\n", "# print('path exists',os.path.exists('FileInfo0508/'))\n", "# files = glob.glob('FileInfo0508/*.jpg')\n", "img = Image.open(files[6])\n", "# img = Image.open('/data/captcha/shensexiansandian/r4y6_f7bcd30f3c913228ba2404e83aea0806.jpg')\n", "img = Image.open('/data/captcha/0ad9.jpg').resize((200,70), Image.BILINEAR) #小图噪点 \n", "# plt.imshow(img)\n", "print(img.size, img.mode)\n", "img = img.resize((240, 80)).convert('L')\n", "print(img.size)\n", "noisy = np.zeros((1,80,240,1))\n", "noisy[0] = np.reshape(np.array(img) / 255.0, (80, 240, 1))\n", "print(noisy.shape)\n", "# plt.imshow(np.reshape(noisy[0], (80, 240)))\n", "# rs = model.predict(noisy)\n", "# img2 = np.reshape(rs[0], (80, 240))\n", "from PIL import ImageFilter\n", "img2 = img.filter(ImageFilter.MedianFilter(size=3))\n", "img2 = img2.filter(ImageFilter.MedianFilter(size=3))\n", "im = [img, img2]\n", "\n", "# rs = encoder.predict(noisy)\n", "# print('rs.shape', rs.shape)\n", "# im = [img, rs[0][:,:,3]]\n", "\n", "plt.figure(figsize=(50,20))\n", "for i in range(1,3): \n", " plt.subplot(2,2,i)\n", " plt.imshow(im[i-1])\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "_________________________________________________________________\n", "Layer (type) Output Shape Param # \n", "=================================================================\n", "input_1 (InputLayer) (None, 80, 240, 1) 0 \n", "_________________________________________________________________\n", "cnn1 (Conv2D) (None, 80, 240, 32) 1600 \n", "_________________________________________________________________\n", "bn1 (BatchNormalization) (None, 80, 240, 32) 128 \n", "_________________________________________________________________\n", "leaky_re_lu (LeakyReLU) (None, 80, 240, 32) 0 \n", "_________________________________________________________________\n", "pool1 (MaxPooling2D) (None, 80, 240, 32) 0 \n", "_________________________________________________________________\n", "cnn2 (Conv2D) (None, 80, 240, 64) 51264 \n", "_________________________________________________________________\n", "bn2 (BatchNormalization) (None, 80, 240, 64) 256 \n", "_________________________________________________________________\n", "leaky_re_lu_1 (LeakyReLU) (None, 80, 240, 64) 0 \n", "_________________________________________________________________\n", "pool2 (MaxPooling2D) (None, 40, 120, 64) 0 \n", "_________________________________________________________________\n", "cnn3 (Conv2D) (None, 40, 120, 128) 73856 \n", "_________________________________________________________________\n", "bn3 (BatchNormalization) (None, 40, 120, 128) 512 \n", "_________________________________________________________________\n", "leaky_re_lu_2 (LeakyReLU) (None, 40, 120, 128) 0 \n", "_________________________________________________________________\n", "pool3 (MaxPooling2D) (None, 20, 60, 128) 0 \n", "_________________________________________________________________\n", "cnn4 (Conv2D) (None, 20, 60, 128) 147584 \n", "_________________________________________________________________\n", "bn4 (BatchNormalization) (None, 20, 60, 128) 512 \n", "_________________________________________________________________\n", "leaky_re_lu_3 (LeakyReLU) (None, 20, 60, 128) 0 \n", "_________________________________________________________________\n", "pool4 (MaxPooling2D) (None, 10, 30, 128) 0 \n", "_________________________________________________________________\n", "cnn5 (Conv2D) (None, 10, 30, 64) 73792 \n", "_________________________________________________________________\n", "bn5 (BatchNormalization) (None, 10, 30, 64) 256 \n", "_________________________________________________________________\n", "leaky_re_lu_4 (LeakyReLU) (None, 10, 30, 64) 0 \n", "_________________________________________________________________\n", "pool5 (MaxPooling2D) (None, 5, 15, 64) 0 \n", "_________________________________________________________________\n", "cnn6 (Conv2D) (None, 5, 15, 64) 36928 \n", "_________________________________________________________________\n", "bn6 (BatchNormalization) (None, 5, 15, 64) 256 \n", "_________________________________________________________________\n", "leaky_re_lu_5 (LeakyReLU) (None, 5, 15, 64) 0 \n", "_________________________________________________________________\n", "upsamp6 (UpSampling2D) (None, 10, 30, 64) 0 \n", "_________________________________________________________________\n", "cnn7 (Conv2D) (None, 10, 30, 128) 73856 \n", "_________________________________________________________________\n", "bn7 (BatchNormalization) (None, 10, 30, 128) 512 \n", "_________________________________________________________________\n", "leaky_re_lu_6 (LeakyReLU) (None, 10, 30, 128) 0 \n", "_________________________________________________________________\n", "upsamp7 (UpSampling2D) (None, 20, 60, 128) 0 \n", "_________________________________________________________________\n", "cnn14 (Conv2D) (None, 20, 60, 64) 204864 \n", "_________________________________________________________________\n", "bn14 (BatchNormalization) (None, 20, 60, 64) 256 \n", "_________________________________________________________________\n", "leaky_re_lu_7 (LeakyReLU) (None, 20, 60, 64) 0 \n", "_________________________________________________________________\n", "upsamp14 (UpSampling2D) (None, 40, 120, 64) 0 \n", "_________________________________________________________________\n", "cnn15 (Conv2D) (None, 40, 120, 32) 100384 \n", "_________________________________________________________________\n", "bn15 (BatchNormalization) (None, 40, 120, 32) 128 \n", "_________________________________________________________________\n", "leaky_re_lu_8 (LeakyReLU) (None, 40, 120, 32) 0 \n", "_________________________________________________________________\n", "upsamp15 (UpSampling2D) (None, 80, 240, 32) 0 \n", "_________________________________________________________________\n", "decode (Conv2D) (None, 80, 240, 1) 289 \n", "=================================================================\n", "Total params: 767,233\n", "Trainable params: 765,825\n", "Non-trainable params: 1,408\n", "_________________________________________________________________\n" ] } ], "source": [ "from tensorflow.keras.layers import *\n", "from tensorflow.keras.models import *\n", "width = 240 # 60 * 4\n", "height = 80\n", "depth = 1\n", "\n", "inputs = Input(shape=(height,width,depth))\n", "def cnn_layer(index,inputs, filters, kernel_size, strides, padding='same'):\n", " x = Conv2D(filters, kernel_size=kernel_size, strides=strides[0], padding='same',name='cnn{}'.format(index + 1))(inputs)\n", " x = BatchNormalization(name='bn{}'.format(index + 1))(x)\n", " x = LeakyReLU(0.01)(x)\n", " x = MaxPooling2D(pool_size=(2,2), strides=strides[1], padding=padding,name='pool{}'.format(index + 1))(x)\n", " return x\n", "\n", "def up_layer(index, inputs, filters, kernel_size, strides):\n", " x = Conv2D(filters=filters, kernel_size=kernel_size, strides=strides[0], padding='same', name='cnn{}'.format(index+1))(inputs)\n", " x = BatchNormalization(name='bn{}'.format(index+1))(x)\n", " x = LeakyReLU(0.01)(x)\n", " x = UpSampling2D(size=(2,2),name='upsamp{}'.format(index+1))(x)\n", " return x\n", "\n", "# x = ZeroPadding2D(padding=(1,1))(x)\n", " \n", "x = cnn_layer(0,inputs=inputs, kernel_size=7, filters=32, strides=(1, 1))\n", "x = cnn_layer(1,inputs=x, kernel_size=5, filters=64, strides=(1, 2))\n", "x = cnn_layer(2,inputs=x, kernel_size=3, filters=128, strides=(1, 2))\n", "x = cnn_layer(3,inputs=x, kernel_size=3, filters=128, strides=(1, 2))\n", "x = cnn_layer(4,inputs=x, kernel_size=3, filters=64, strides=(1, 2))\n", "encode = x\n", "x = up_layer(5,inputs=x, kernel_size=3, filters=64, strides=(1, 2))\n", "x = up_layer(6,inputs=x, kernel_size=3, filters=128, strides=(1, 2))\n", "# x = up_layer(12,inputs=x, kernel_size=3, filters=128, strides=(1, 2))\n", "x = up_layer(13,inputs=x, kernel_size=5, filters=64, strides=(1, 2))\n", "x = up_layer(14,inputs=x, kernel_size=7, filters=32, strides=(1, 1))\n", "x = Conv2D(filters=depth, kernel_size=3, strides=1, padding='same', activation='sigmoid', name='decode')(x)\n", "model = Model(inputs=inputs, outputs=x)\n", "model.summary()\n", "encoder = Model(inputs=inputs, outputs=encode)\n", "model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Using TensorFlow backend.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/200\n", "1000/1000 [==============================] - 440s 440ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2719 - val_acc: 0.3562\n", "Epoch 2/200\n", "1000/1000 [==============================] - 435s 435ms/step - loss: 0.2719 - acc: 0.3556 - val_loss: 0.2726 - val_acc: 0.3551\n", "Epoch 3/200\n", "1000/1000 [==============================] - 435s 435ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2721 - val_acc: 0.3552\n", "Epoch 4/200\n", "1000/1000 [==============================] - 435s 435ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2722 - val_acc: 0.3552\n", "Epoch 5/200\n", "1000/1000 [==============================] - 435s 435ms/step - loss: 0.2723 - acc: 0.3554 - val_loss: 0.2736 - val_acc: 0.3546\n", "Epoch 6/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2721 - acc: 0.3557 - val_loss: 0.2708 - val_acc: 0.3570\n", "Epoch 7/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2720 - acc: 0.3558 - val_loss: 0.2730 - val_acc: 0.3551\n", "Epoch 8/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2732 - val_acc: 0.3555\n", "Epoch 9/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2719 - val_acc: 0.3563\n", "Epoch 10/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2719 - acc: 0.3559 - val_loss: 0.2720 - val_acc: 0.3561\n", "Epoch 11/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2718 - acc: 0.3559 - val_loss: 0.2717 - val_acc: 0.3559\n", "Epoch 12/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2717 - acc: 0.3558 - val_loss: 0.2726 - val_acc: 0.3558\n", "Epoch 13/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2727 - val_acc: 0.3551\n", "Epoch 14/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2722 - acc: 0.3555 - val_loss: 0.2728 - val_acc: 0.3550\n", "Epoch 15/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2722 - acc: 0.3556 - val_loss: 0.2728 - val_acc: 0.3554\n", "Epoch 16/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2716 - val_acc: 0.3559\n", "Epoch 17/200\n", "1000/1000 [==============================] - 436s 436ms/step - loss: 0.2719 - acc: 0.3558 - val_loss: 0.2737 - val_acc: 0.3550\n", "Epoch 18/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2721 - val_acc: 0.3554\n", "Epoch 19/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2719 - acc: 0.3558 - val_loss: 0.2728 - val_acc: 0.3555\n", "Epoch 20/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2722 - acc: 0.3555 - val_loss: 0.2725 - val_acc: 0.3552\n", "Epoch 21/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2717 - acc: 0.3560 - val_loss: 0.2719 - val_acc: 0.3564\n", "Epoch 22/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2722 - val_acc: 0.3555\n", "Epoch 23/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3560 - val_loss: 0.2719 - val_acc: 0.3559\n", "Epoch 24/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2728 - val_acc: 0.3550\n", "Epoch 25/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2714 - val_acc: 0.3566\n", "Epoch 26/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2714 - val_acc: 0.3557\n", "Epoch 27/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3554 - val_loss: 0.2726 - val_acc: 0.3545\n", "Epoch 28/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2722 - val_acc: 0.3556\n", "Epoch 29/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2717 - val_acc: 0.3566\n", "Epoch 30/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2723 - acc: 0.3553 - val_loss: 0.2729 - val_acc: 0.3548\n", "Epoch 31/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3556 - val_loss: 0.2722 - val_acc: 0.3561\n", "Epoch 32/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3559 - val_loss: 0.2722 - val_acc: 0.3554\n", "Epoch 33/200\n", "1000/1000 [==============================] - 439s 439ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2725 - val_acc: 0.3553\n", "Epoch 34/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2711 - val_acc: 0.3559\n", "Epoch 35/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3558 - val_loss: 0.2725 - val_acc: 0.3553\n", "Epoch 36/200\n", "1000/1000 [==============================] - 439s 439ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2704 - val_acc: 0.3570\n", "Epoch 37/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3556 - val_loss: 0.2720 - val_acc: 0.3561\n", "Epoch 38/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3556 - val_loss: 0.2719 - val_acc: 0.3562\n", "Epoch 39/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2718 - acc: 0.3558 - val_loss: 0.2717 - val_acc: 0.3567\n", "Epoch 40/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2720 - val_acc: 0.3555\n", "Epoch 41/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3555 - val_loss: 0.2725 - val_acc: 0.3557\n", "Epoch 42/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3556 - val_loss: 0.2722 - val_acc: 0.3556\n", "Epoch 43/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2722 - val_acc: 0.3554\n", "Epoch 44/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2718 - acc: 0.3558 - val_loss: 0.2723 - val_acc: 0.3554\n", "Epoch 45/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3555 - val_loss: 0.2713 - val_acc: 0.3561\n", "Epoch 46/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2710 - val_acc: 0.3572\n", "Epoch 47/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3558 - val_loss: 0.2722 - val_acc: 0.3551\n", "Epoch 48/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2717 - acc: 0.3558 - val_loss: 0.2715 - val_acc: 0.3563\n", "Epoch 49/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2721 - val_acc: 0.3560\n", "Epoch 50/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3554 - val_loss: 0.2719 - val_acc: 0.3561\n", "Epoch 51/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2718 - acc: 0.3558 - val_loss: 0.2724 - val_acc: 0.3561\n", "Epoch 52/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3555 - val_loss: 0.2717 - val_acc: 0.3568\n", "Epoch 53/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2718 - acc: 0.3559 - val_loss: 0.2721 - val_acc: 0.3554\n", "Epoch 54/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2718 - acc: 0.3558 - val_loss: 0.2723 - val_acc: 0.3563\n", "Epoch 55/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2718 - acc: 0.3560 - val_loss: 0.2717 - val_acc: 0.3562\n", "Epoch 56/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2723 - acc: 0.3555 - val_loss: 0.2723 - val_acc: 0.3557\n", "Epoch 57/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2722 - acc: 0.3557 - val_loss: 0.2725 - val_acc: 0.3554\n", "Epoch 58/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3555 - val_loss: 0.2725 - val_acc: 0.3550\n", "Epoch 59/200\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3556 - val_loss: 0.2720 - val_acc: 0.3551\n", "Epoch 60/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2722 - acc: 0.3555 - val_loss: 0.2718 - val_acc: 0.3551\n", "Epoch 61/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3557 - val_loss: 0.2720 - val_acc: 0.3563\n", "Epoch 62/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2724 - val_acc: 0.3560\n", "Epoch 63/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2726 - acc: 0.3552 - val_loss: 0.2721 - val_acc: 0.3556\n", "Epoch 64/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2721 - val_acc: 0.3557\n", "Epoch 65/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2727 - val_acc: 0.3549\n", "Epoch 66/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2721 - val_acc: 0.3558\n", "Epoch 67/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3555 - val_loss: 0.2726 - val_acc: 0.3552\n", "Epoch 68/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3555 - val_loss: 0.2724 - val_acc: 0.3550\n", "Epoch 69/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3558 - val_loss: 0.2710 - val_acc: 0.3569\n", "Epoch 70/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2722 - val_acc: 0.3558\n", "Epoch 71/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2719 - acc: 0.3554 - val_loss: 0.2717 - val_acc: 0.3560\n", "Epoch 72/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2727 - val_acc: 0.3552\n", "Epoch 73/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3557 - val_loss: 0.2715 - val_acc: 0.3560\n", "Epoch 74/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2719 - acc: 0.3558 - val_loss: 0.2727 - val_acc: 0.3557\n", "Epoch 75/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2718 - acc: 0.3556 - val_loss: 0.2738 - val_acc: 0.3552\n", "Epoch 76/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3553 - val_loss: 0.2723 - val_acc: 0.3557\n", "Epoch 77/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2717 - val_acc: 0.3562\n", "Epoch 78/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2718 - val_acc: 0.3559\n", "Epoch 79/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2719 - acc: 0.3555 - val_loss: 0.2717 - val_acc: 0.3556\n", "Epoch 80/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2719 - acc: 0.3556 - val_loss: 0.2717 - val_acc: 0.3555\n", "Epoch 81/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2722 - val_acc: 0.3561\n", "Epoch 82/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2718 - acc: 0.3558 - val_loss: 0.2724 - val_acc: 0.3554\n", "Epoch 83/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3557 - val_loss: 0.2713 - val_acc: 0.3565\n", "Epoch 84/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2722 - acc: 0.3554 - val_loss: 0.2730 - val_acc: 0.3542\n", "Epoch 85/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2719 - acc: 0.3555 - val_loss: 0.2718 - val_acc: 0.3555\n", "Epoch 86/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2718 - acc: 0.3558 - val_loss: 0.2730 - val_acc: 0.3547\n", "Epoch 87/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3555 - val_loss: 0.2730 - val_acc: 0.3552\n", "Epoch 88/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3555 - val_loss: 0.2717 - val_acc: 0.3564\n", "Epoch 89/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3557 - val_loss: 0.2715 - val_acc: 0.3566\n", "Epoch 90/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3555 - val_loss: 0.2718 - val_acc: 0.3556\n", "Epoch 91/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2725 - acc: 0.3552 - val_loss: 0.2727 - val_acc: 0.3551\n", "Epoch 92/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2723 - val_acc: 0.3557\n", "Epoch 93/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2720 - acc: 0.3554 - val_loss: 0.2726 - val_acc: 0.3548\n", "Epoch 94/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3556 - val_loss: 0.2712 - val_acc: 0.3563\n", "Epoch 95/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2717 - acc: 0.3558 - val_loss: 0.2726 - val_acc: 0.3555\n", "Epoch 96/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3555 - val_loss: 0.2714 - val_acc: 0.3559\n", "Epoch 97/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3553 - val_loss: 0.2719 - val_acc: 0.3557\n", "Epoch 98/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2721 - acc: 0.3554 - val_loss: 0.2720 - val_acc: 0.3560\n", "Epoch 99/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2717 - acc: 0.3558 - val_loss: 0.2715 - val_acc: 0.3559\n", "Epoch 100/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2717 - acc: 0.3558 - val_loss: 0.2720 - val_acc: 0.3557\n", "Epoch 101/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2723 - acc: 0.3554 - val_loss: 0.2718 - val_acc: 0.3558\n", "Epoch 102/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3555 - val_loss: 0.2716 - val_acc: 0.3564\n", "Epoch 103/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3556 - val_loss: 0.2722 - val_acc: 0.3553\n", "Epoch 104/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3553 - val_loss: 0.2716 - val_acc: 0.3561\n", "Epoch 105/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2718 - acc: 0.3558 - val_loss: 0.2719 - val_acc: 0.3554\n", "Epoch 106/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3559 - val_loss: 0.2728 - val_acc: 0.3549\n", "Epoch 107/200\n", "1000/1000 [==============================] - 439s 439ms/step - loss: 0.2719 - acc: 0.3556 - val_loss: 0.2717 - val_acc: 0.3559\n", "Epoch 108/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2718 - acc: 0.3557 - val_loss: 0.2729 - val_acc: 0.3554\n", "Epoch 109/200\n", "1000/1000 [==============================] - 439s 439ms/step - loss: 0.2716 - acc: 0.3559 - val_loss: 0.2706 - val_acc: 0.3562\n", "Epoch 110/200\n", "1000/1000 [==============================] - 439s 439ms/step - loss: 0.2718 - acc: 0.3557 - val_loss: 0.2718 - val_acc: 0.3559\n", "Epoch 111/200\n", "1000/1000 [==============================] - 439s 439ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2726 - val_acc: 0.3550\n", "Epoch 112/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3558 - val_loss: 0.2725 - val_acc: 0.3556\n", "Epoch 113/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3556 - val_loss: 0.2728 - val_acc: 0.3548\n", "Epoch 114/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2718 - acc: 0.3556 - val_loss: 0.2723 - val_acc: 0.3551\n", "Epoch 115/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3555 - val_loss: 0.2707 - val_acc: 0.3570\n", "Epoch 116/200\n", "1000/1000 [==============================] - 437s 437ms/step - loss: 0.2717 - acc: 0.3559 - val_loss: 0.2729 - val_acc: 0.3559\n", "Epoch 117/200\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2716 - acc: 0.3559 - val_loss: 0.2726 - val_acc: 0.3558\n", "Epoch 118/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2718 - acc: 0.3557 - val_loss: 0.2722 - val_acc: 0.3557\n", "Epoch 119/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3557 - val_loss: 0.2725 - val_acc: 0.3555\n", "Epoch 120/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3558 - val_loss: 0.2726 - val_acc: 0.3552\n", "Epoch 121/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3557 - val_loss: 0.2719 - val_acc: 0.3556\n", "Epoch 122/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2721 - acc: 0.3555 - val_loss: 0.2720 - val_acc: 0.3557\n", "Epoch 123/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3554 - val_loss: 0.2721 - val_acc: 0.3558\n", "Epoch 124/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3555 - val_loss: 0.2719 - val_acc: 0.3564\n", "Epoch 125/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3557 - val_loss: 0.2716 - val_acc: 0.3563\n", "Epoch 126/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2722 - acc: 0.3557 - val_loss: 0.2716 - val_acc: 0.3557\n", "Epoch 127/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2719 - acc: 0.3559 - val_loss: 0.2712 - val_acc: 0.3570\n", "Epoch 128/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2720 - acc: 0.3556 - val_loss: 0.2727 - val_acc: 0.3549\n", "Epoch 129/200\n", "1000/1000 [==============================] - 438s 438ms/step - loss: 0.2718 - acc: 0.3556 - val_loss: 0.2730 - val_acc: 0.3565\n", "Epoch 130/200\n", " 473/1000 [=============>................] - ETA: 3:43 - loss: 0.2724 - acc: 0.3552" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Process ForkPoolWorker-1541:\n", "Process ForkPoolWorker-1544:\n", "Process ForkPoolWorker-1543:\n", "Process ForkPoolWorker-1542:\n", "Traceback (most recent call last):\n", "Traceback (most recent call last):\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/process.py\", line 254, in _bootstrap\n", " self.run()\n", "Traceback (most recent call last):\n", "Traceback (most recent call last):\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/process.py\", line 93, in run\n", " self._target(*self._args, **self._kwargs)\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/process.py\", line 254, in _bootstrap\n", " self.run()\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/process.py\", line 254, in _bootstrap\n", " self.run()\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/process.py\", line 254, in _bootstrap\n", " self.run()\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/pool.py\", line 108, in worker\n", " task = get()\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/process.py\", line 93, in run\n", " self._target(*self._args, **self._kwargs)\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/process.py\", line 93, in run\n", " self._target(*self._args, **self._kwargs)\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/process.py\", line 93, in run\n", " self._target(*self._args, **self._kwargs)\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/pool.py\", line 119, in worker\n", " result = (True, func(*args, **kwds))\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/queues.py\", line 343, in get\n", " res = self._reader.recv_bytes()\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/pool.py\", line 108, in worker\n", " task = get()\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/pool.py\", line 108, in worker\n", " task = get()\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/connection.py\", line 216, in recv_bytes\n", " buf = self._recv_bytes(maxlength)\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/queues.py\", line 342, in get\n", " with self._rlock:\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/site-packages/tensorflow/python/keras/utils/data_utils.py\", line 432, in get_index\n", " return _SHARED_SEQUENCES[uid][i]\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/queues.py\", line 342, in get\n", " with self._rlock:\n", " File \"\", line 59, in __getitem__\n", " noise_color=[(200, 200, 255)], bg=[(255, 255, 255)])\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/synchronize.py\", line 96, in __enter__\n", " return self._semlock.__enter__()\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/connection.py\", line 407, in _recv_bytes\n", " buf = self._recv(4)\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/synchronize.py\", line 96, in __enter__\n", " return self._semlock.__enter__()\n", " File \"\", line 123, in gen_captcha\n", " img_clean = copy.deepcopy(img)\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/multiprocessing/connection.py\", line 379, in _recv\n", " chunk = read(handle, remaining)\n", "KeyboardInterrupt\n", "KeyboardInterrupt\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/copy.py\", line 174, in deepcopy\n", " rv = reductor(4)\n", "KeyboardInterrupt\n", " File \"/home/python/anaconda3/envs/dl_nlp/lib/python3.5/site-packages/PIL/Image.py\", line 705, in __getstate__\n", " def __getstate__(self):\n", "KeyboardInterrupt\n" ] }, { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0mvalid_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mCaptchaSequence\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcharacters\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m128\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msteps\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m100\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwidth\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m240\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheight\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m80\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# (characters, batch_size=128, steps=100)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m model.fit_generator(train_data, epochs=200, validation_data=valid_data, workers=4, use_multiprocessing=True,\n\u001b[0;32m---> 10\u001b[0;31m callbacks=[checkpoint])\n\u001b[0m", "\u001b[0;32m~/anaconda3/envs/dl_nlp/lib/python3.5/site-packages/tensorflow/python/keras/engine/training.py\u001b[0m in \u001b[0;36mfit_generator\u001b[0;34m(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)\u001b[0m\n\u001b[1;32m 1777\u001b[0m \u001b[0muse_multiprocessing\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0muse_multiprocessing\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1778\u001b[0m \u001b[0mshuffle\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mshuffle\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1779\u001b[0;31m initial_epoch=initial_epoch)\n\u001b[0m\u001b[1;32m 1780\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1781\u001b[0m def evaluate_generator(self,\n", "\u001b[0;32m~/anaconda3/envs/dl_nlp/lib/python3.5/site-packages/tensorflow/python/keras/engine/training_generator.py\u001b[0m in \u001b[0;36mfit_generator\u001b[0;34m(model, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)\u001b[0m\n\u001b[1;32m 202\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 203\u001b[0m outs = model.train_on_batch(\n\u001b[0;32m--> 204\u001b[0;31m x, y, sample_weight=sample_weight, class_weight=class_weight)\n\u001b[0m\u001b[1;32m 205\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 206\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dl_nlp/lib/python3.5/site-packages/tensorflow/python/keras/engine/training.py\u001b[0m in \u001b[0;36mtrain_on_batch\u001b[0;34m(self, x, y, sample_weight, class_weight)\u001b[0m\n\u001b[1;32m 1550\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1551\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_make_train_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1552\u001b[0;31m \u001b[0moutputs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1553\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1554\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dl_nlp/lib/python3.5/site-packages/tensorflow/python/keras/backend.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 2912\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_make_callable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfeed_arrays\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_symbols\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msymbol_vals\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msession\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2913\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2914\u001b[0;31m \u001b[0mfetched\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_callable_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0marray_vals\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2915\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_fetch_callbacks\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetched\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2916\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mfetched\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dl_nlp/lib/python3.5/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1380\u001b[0m ret = tf_session.TF_SessionRunCallable(\n\u001b[1;32m 1381\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_handle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstatus\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1382\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 1383\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1384\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "from keras.callbacks import TensorBoard, ModelCheckpoint\n", "\n", "checkpoint = ModelCheckpoint(filepath='model/denosing.hdf5',\n", " save_weights_only=True, save_best_only=True) # 'model/zibianmaquzao.hdf5' verbose=1, \n", "\n", "model.load_weights('model/denosing.hdf5')\n", "train_data = CaptchaSequence(characters, batch_size=128, steps=1000, width=240, height=80) # (characters, batch_size=128, steps=1000)\n", "valid_data = CaptchaSequence(characters, batch_size=128, steps=100, width=240, height=80) # (characters, batch_size=128, steps=100)\n", "model.fit_generator(train_data, epochs=200, validation_data=valid_data, workers=4, use_multiprocessing=True,\n", " callbacks=[checkpoint])" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.5.0" } }, "nbformat": 4, "nbformat_minor": 2 }