diff --git "a/code/04-\346\225\260\346\215\256\350\257\273\345\217\226\345\222\214\346\223\215\344\275\234/\346\225\260\346\215\256\346\223\215\344\275\234.ipynb" "b/code/04-\346\225\260\346\215\256\350\257\273\345\217\226\345\222\214\346\223\215\344\275\234/\346\225\260\346\215\256\346\223\215\344\275\234.ipynb" new file mode 100644 index 0000000..4fe5a00 --- /dev/null +++ "b/code/04-\346\225\260\346\215\256\350\257\273\345\217\226\345\222\214\346\223\215\344\275\234/\346\225\260\346\215\256\346\223\215\344\275\234.ipynb" @@ -0,0 +1,716 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "d4318b85", + "metadata": {}, + "outputs": [], + "source": [ + "import torch" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "acab34f9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "x = torch.arange(12)\n", + "x" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "4c48667b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([12])" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "x.shape # 形状是一个Tensor" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "41c31e36", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "12" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "x.numel() # number of elements 标量" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "ad5ebe9f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[ 0, 1],\n", + " [ 2, 3]],\n", + "\n", + " [[ 4, 5],\n", + " [ 6, 7]],\n", + "\n", + " [[ 8, 9],\n", + " [10, 11]]])" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "x = x.reshape(3, 2, 2)\n", + "x" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "d7f4f0a0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[0., 0., 0., 0.],\n", + " [0., 0., 0., 0.],\n", + " [0., 0., 0., 0.]],\n", + "\n", + " [[0., 0., 0., 0.],\n", + " [0., 0., 0., 0.],\n", + " [0., 0., 0., 0.]]])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch.zeros((2, 3, 4))" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "b5892774", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[1., 1., 1., 1.],\n", + " [1., 1., 1., 1.],\n", + " [1., 1., 1., 1.]],\n", + "\n", + " [[1., 1., 1., 1.],\n", + " [1., 1., 1., 1.],\n", + " [1., 1., 1., 1.]]])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch.ones((2, 3, 4))" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "3d1d9aee", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([3, 4])" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch.tensor([[2, 1, 4, 3], [1, 2, 3, 4], [4, 3, 2, 1]]).shape" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "dbbac335", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([ 3., 4., 6., 10.]),\n", + " tensor([-1., 0., 2., 6.]),\n", + " tensor([ 2., 4., 8., 16.]),\n", + " tensor([0.5000, 1.0000, 2.0000, 4.0000]),\n", + " tensor([ 1., 4., 16., 64.]))" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "x = torch.tensor([1.0, 2, 4, 8])\n", + "y = torch.tensor([2, 2, 2, 2])\n", + "x + y, x - y, x * y, x / y, x**y" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "74687d03", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([2.7183e+00, 7.3891e+00, 5.4598e+01, 2.9810e+03])" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch.exp(x)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "0ed84770", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([[ 0., 1., 2., 3.],\n", + " [ 4., 5., 6., 7.],\n", + " [ 8., 9., 10., 11.],\n", + " [ 2., 1., 4., 3.],\n", + " [ 1., 2., 3., 4.],\n", + " [ 4., 3., 2., 1.]]),\n", + " tensor([[ 0., 1., 2., 3., 2., 1., 4., 3.],\n", + " [ 4., 5., 6., 7., 1., 2., 3., 4.],\n", + " [ 8., 9., 10., 11., 4., 3., 2., 1.]]))" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.arange(12, dtype=torch.float32).reshape((3, 4))\n", + "Y = torch.tensor([[2.0, 1, 4, 3], [1, 2, 3, 4], [4, 3, 2, 1]])\n", + "torch.cat((X, Y), dim=0), torch.cat((X, Y), dim=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "76d5b846", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([[[ 0., 1.],\n", + " [ 2., 3.]],\n", + " \n", + " [[ 4., 5.],\n", + " [ 6., 7.]],\n", + " \n", + " [[ 8., 9.],\n", + " [10., 11.]],\n", + " \n", + " [[ 2., 1.],\n", + " [ 4., 3.]],\n", + " \n", + " [[ 1., 2.],\n", + " [ 3., 4.]],\n", + " \n", + " [[ 4., 3.],\n", + " [ 2., 1.]]]),\n", + " tensor([[[ 0., 1.],\n", + " [ 2., 3.],\n", + " [ 2., 1.],\n", + " [ 4., 3.]],\n", + " \n", + " [[ 4., 5.],\n", + " [ 6., 7.],\n", + " [ 1., 2.],\n", + " [ 3., 4.]],\n", + " \n", + " [[ 8., 9.],\n", + " [10., 11.],\n", + " [ 4., 3.],\n", + " [ 2., 1.]]]),\n", + " tensor([[[ 0., 1., 2., 1.],\n", + " [ 2., 3., 4., 3.]],\n", + " \n", + " [[ 4., 5., 1., 2.],\n", + " [ 6., 7., 3., 4.]],\n", + " \n", + " [[ 8., 9., 4., 3.],\n", + " [10., 11., 2., 1.]]]))" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.arange(12, dtype=torch.float32).reshape((3, 2, 2))\n", + "Y = torch.tensor([[[2.0, 1], [4, 3]], [[1, 2], [3, 4]], [[4, 3], [2, 1]]])\n", + "torch.cat((X, Y), dim=0), torch.cat((X, Y), dim=1), torch.cat((X, Y), dim=2)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "3cdae941", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[False, True],\n", + " [False, True]],\n", + "\n", + " [[False, False],\n", + " [False, False]],\n", + "\n", + " [[False, False],\n", + " [False, False]]])" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X == Y" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "b4783c7f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(66.)" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X.sum()" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "022acc52", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([[0],\n", + " [1],\n", + " [2]]),\n", + " tensor([[0, 1]]))" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a = torch.arange(3).reshape((3, 1))\n", + "b = torch.arange(2).reshape((1, 2))\n", + "a, b" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "991baa4c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0, 1],\n", + " [1, 2],\n", + " [2, 3]])" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a + b # broadcasting mechanism" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "4be5d210", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([[ 0., 1., 2., 3.],\n", + " [ 4., 5., 6., 7.],\n", + " [ 8., 9., 10., 11.]]),\n", + " tensor([ 8., 9., 10., 11.]),\n", + " tensor([[ 4., 5., 6., 7.],\n", + " [ 8., 9., 10., 11.]]))" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.arange(12, dtype=torch.float32).reshape((3, 4))\n", + "Y = torch.tensor([[2.0, 1, 4, 3], [1, 2, 3, 4], [4, 3, 2, 1]])\n", + "X, X[-1], X[1:3] # the last element, the 2nd & 3rd elements" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "de56f357", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[ 0., 3.],\n", + " [ 8., 11.]])" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X[::2, ::3]" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "5ab91418", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[ 0., 1., 2., 3.],\n", + " [ 4., 5., 9., 7.],\n", + " [ 8., 9., 10., 11.]])" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X[1, 2] = 9\n", + "X" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "da97e754", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[12., 12., 12., 12.],\n", + " [12., 12., 12., 12.],\n", + " [ 8., 9., 10., 11.]])" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X[0:2, :] = 12\n", + "X" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "559caffe", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2725130111488" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "id(Y)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "bd6c5438", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2725130062848\n", + "2725130062848\n" + ] + } + ], + "source": [ + "Z = torch.zeros_like(Y)\n", + "print(id(Z))\n", + "Z[:] = X + Y # 原地操作\n", + "print(id(Z))" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "86fe37ca", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "before = id(X)\n", + "X += Y\n", + "id(X) == before" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "cd389d85", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(numpy.ndarray, torch.Tensor)" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "A = X.numpy()\n", + "B = torch.tensor(A)\n", + "type(A), type(B)" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "57068db9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([3.5000]), 3.5, 3.5, 3)" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a = torch.tensor([3.5])\n", + "a, a.item(), float(a), int(a)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "a61e58d8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2])" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a = torch.arange(12)\n", + "b = a.reshape((3, 4))\n", + "b[:] = 2\n", + "a" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "c7f58134", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a.dim()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c5720285", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + }, + "toc": { + "base_numbering": 1, + "nav_menu": {}, + "number_sections": true, + "sideBar": true, + "skip_h1_title": false, + "title_cell": "Table of Contents", + "title_sidebar": "Contents", + "toc_cell": false, + "toc_position": {}, + "toc_section_display": true, + "toc_window_display": false + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git "a/code/04-\346\225\260\346\215\256\350\257\273\345\217\226\345\222\214\346\223\215\344\275\234/\346\225\260\346\215\256\350\257\273\345\217\226.ipynb" "b/code/04-\346\225\260\346\215\256\350\257\273\345\217\226\345\222\214\346\223\215\344\275\234/\346\225\260\346\215\256\350\257\273\345\217\226.ipynb" new file mode 100644 index 0000000..bd67aa7 --- /dev/null +++ "b/code/04-\346\225\260\346\215\256\350\257\273\345\217\226\345\222\214\346\223\215\344\275\234/\346\225\260\346\215\256\350\257\273\345\217\226.ipynb" @@ -0,0 +1,456 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "bf04964f", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "os.makedirs(os.path.join('..', 'data'), exist_ok=True)\n", + "data_file = os.path.join('..', 'data', 'house_tiny.csv')\n", + "with open(data_file, 'w') as f:\n", + " f.write('NumRooms,Alley,Price\\n') # 列名\n", + " f.write('NA,Pave,127500\\n') # 一个样本\n", + " f.write('2,NA,106000\\n')\n", + " f.write('4,NA,178100\\n')\n", + " f.write('NA,NA,140000\\n')" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "b5984b6c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
随lambda增大精度先降低后不收敛," + ] + }, + { + "cell_type": "markdown", + "id": "e5f48dcd", + "metadata": {}, + "source": [ + "[评论区](#https://zh-v2.d2l.ai/chapter_multilayer-perceptrons/weight-decay.html)有其他同学发布的图片" + ] + }, + { + "cell_type": "markdown", + "id": "f6f883dd", + "metadata": {}, + "source": [ + "2. 使用验证集来找到最佳值$\\lambda$。它真的是最优值吗?这有关系吗?\n", + "
不能确定是否是最优值,但没关系,只要足够优即可\n", + "1. 如果我们使用$\\sum_i |w_i|$作为我们选择的惩罚($L_1$正则化),那么更新方程会是什么样子?\n", + "
$\\frac{\\partial{}}{\\partial{\\mathbf{w}}}(\\ell(\\mathbf{w},b)+\\lambda||\\mathbf{w}||_1)=\\frac{\\partial{\\ell(\\mathbf{w},b)}}{{\\partial{\\mathbf{w}}}}+\\lambda I'$\n", + "
其中$I'=(a_1,...,a_n)$,当$\\mathbf{w}$中第$i$个元素为正时$a_i=1$,反之$a_i=-1$.(=0时随意)
代入公式化简得$\\mathbf{w}_{t+1}=\\mathbf{w}_{t}-\\eta\\frac{\\partial{\\ell(\\mathbf{w}_t,b_t)}}{{\\partial{\\mathbf{w}_{t}}}}-\\eta\\lambda I'$\n", + "
从这个式子可以看出使用L1正则化时只能对所有同号的参数施加一个相同大小的正则项(增减一个定值),而反观L2正则化对参数的影响是与参数本身的值有关的(乘上一个系数)似乎是更好的选择。不过L1正则化在特征提取上会有用处。\n", + "1. 我们知道$\\|\\mathbf{w}\\|^2 = \\mathbf{w}^\\top \\mathbf{w}$。你能找到类似的矩阵方程吗(见 :numref:`subsec_lin-algebra-norms` 中的Frobenius范数)?
这个不太理解题意,可能是说对称矩阵?\n", + "1. 回顾训练误差和泛化误差之间的关系。除了权重衰减、增加训练数据、使用适当复杂度的模型之外,你还能想出其他什么方法来处理过拟合?
EarlyStopping等\n", + "1. 在贝叶斯统计中,我们使用先验和似然的乘积,通过公式$P(w \\mid x) \\propto P(x \\mid w) P(w)$得到后验。如何得到带正则化的$P(w)$?\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6d395f2b", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git "a/code/13-\344\270\242\345\274\203\346\263\225.ipynb" "b/code/13-\344\270\242\345\274\203\346\263\225.ipynb" new file mode 100644 index 0000000..356599e --- /dev/null +++ "b/code/13-\344\270\242\345\274\203\346\263\225.ipynb" @@ -0,0 +1,76 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 6, + "id": "d5195177", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "\n", + "def dropout_layer (X,dropout): #X为dropout层的输入,dropout为设置的丢弃概率\n", + " assert 0<=dropout<=1 #丢弃概率介于0,1之间\n", + " if dropout == 1:\n", + " return torch.zeros_like(X) #若丢弃概率为1,则X的全部项均被置0\n", + " if dropout == 0:\n", + " return X #若丢弃概率为0,不对X作丢弃操作,直接返回X\n", + " mask=(torch.Tensor(X.shape).uniform_(0,1)>dropout).float() #用uniform函数生成0-1间的随机实数,利用”>\",将大于dropout的记为1,小于dropout的记为0,实现丢弃操作\n", + " return mask*X/(1-dropout) #将mask与X相乘实现丢弃操作,并除以(1-dropout),这里不使用选中X中元素置0的原因是相乘操作相比选中操作更快" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "8ff5ba10", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[ 0., 1., 2., 3., 4., 5., 6., 7.],\n", + " [ 8., 9., 10., 11., 12., 13., 14., 15.]])\n", + "tensor([[ 0., 1., 2., 3., 4., 5., 6., 7.],\n", + " [ 8., 9., 10., 11., 12., 13., 14., 15.]])\n", + "tensor([[ 0., 0., 4., 0., 0., 10., 0., 14.],\n", + " [16., 18., 20., 0., 0., 26., 0., 30.]])\n", + "tensor([[0., 0., 0., 0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0., 0., 0., 0.]])\n" + ] + } + ], + "source": [ + "#丢弃法测试\n", + "X=torch.arange(16,dtype=torch.float32).reshape((2,8))\n", + "print(X)\n", + "print(dropout_layer (X,0.)) #丢弃率设置为0\n", + "print(dropout_layer (X,0.5)) #丢弃率设置为0.5\n", + "print(dropout_layer (X,1)) #丢弃率设置为1" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python [conda env:py36torch040]", + "language": "python", + "name": "conda-env-py36torch040-py" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git "a/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/custom-layer.ipynb" "b/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/custom-layer.ipynb" new file mode 100644 index 0000000..2fbfe73 --- /dev/null +++ "b/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/custom-layer.ipynb" @@ -0,0 +1,352 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 自定义层\n", + "\n", + "深度学习成功背后的一个因素是神经网络的灵活性:\n", + "我们可以用创造性的方式组合不同的层,从而设计出适用于各种任务的架构。\n", + "例如,研究人员发明了专门用于处理图像、文本、序列数据和执行动态规划的层。\n", + "未来,你会遇到或要自己发明一个现在在深度学习框架中还不存在的层。\n", + "在这些情况下,你必须构建自定义层。在本节中,我们将向你展示如何构建。\n", + "\n", + "## 不带参数的层\n", + "\n", + "首先,我们(**构造一个没有任何参数的自定义层**)。\n", + "如果你还记得我们在 :numref:`sec_model_construction`对块的介绍,\n", + "这应该看起来很眼熟。\n", + "下面的`CenteredLayer`类要从其输入中减去均值。\n", + "要构建它,我们只需继承基础层类并实现前向传播功能。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import torch\n", + "import torch.nn.functional as F\n", + "from torch import nn\n", + "\n", + "\n", + "class CenteredLayer(nn.Module):\n", + " def __init__(self):\n", + " super().__init__()\n", + "\n", + " def forward(self, X):\n", + " return X - X.mean()#使用了广播机制" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "让我们向该层提供一些数据,验证它是否能按预期工作。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([-2., -1., 0., 1., 2.])" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "layer = CenteredLayer()\n", + "layer(torch.FloatTensor([1, 2, 3, 4, 5]))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "现在,我们可以[**将层作为组件合并到更复杂的模型中**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 10, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "net = nn.Sequential(nn.Linear(8, 128), CenteredLayer())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12 + }, + "source": [ + "作为额外的健全性检查,我们可以在向该网络发送随机数据后,检查均值是否为0。\n", + "由于我们处理的是浮点数,因为存储精度的原因,我们仍然可能会看到一个非常小的非零数。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(0., grad_fn=)" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Y = net(torch.rand(4, 8))\n", + "Y.mean()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 16 + }, + "source": [ + "## [**带参数的层**]\n", + "\n", + "以上我们知道了如何定义简单的层,下面我们继续定义具有参数的层,\n", + "这些参数可以通过训练进行调整。\n", + "我们可以使用内置函数来创建参数,这些函数提供一些基本的管理功能。\n", + "比如管理访问、初始化、共享、保存和加载模型参数。\n", + "这样做的好处之一是:我们不需要为每个自定义层编写自定义的序列化程序。\n", + "\n", + "现在,让我们实现自定义版本的全连接层。\n", + "回想一下,该层需要两个参数,一个用于表示权重,另一个用于表示偏置项。\n", + "在此实现中,我们使用修正线性单元作为激活函数。\n", + "该层需要输入参数:`in_units`和`units`,分别表示输入数和输出数。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 18, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class MyLinear(nn.Module):\n", + " def __init__(self, in_units, units):\n", + " super().__init__()\n", + " #实例化Parameter的同时传入随机值作为初始值\n", + " self.weight = nn.Parameter(torch.randn(in_units, units))\n", + " self.bias = nn.Parameter(torch.randn(units,))\n", + " def forward(self, X):\n", + " linear = torch.matmul(X, self.weight.data) + self.bias.data#通过.data获取tensor形式的参数\n", + " return F.relu(linear)#在前向传播中使用ReLU激活" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 21, + "tab": [ + "pytorch" + ] + }, + "source": [ + "接下来,我们实例化`MyLinear`类并访问其模型参数。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 23, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Parameter containing:\n", + "tensor([[ 2.2578, -0.4571, -1.4185],\n", + " [ 1.6469, -0.4779, 1.0448],\n", + " [ 0.9764, 0.4048, 0.6039],\n", + " [ 0.9259, 1.2831, -0.4907],\n", + " [ 1.1768, -0.2245, 1.2883]], requires_grad=True)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "linear = MyLinear(5, 3)\n", + "linear.weight" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 25 + }, + "source": [ + "我们可以[**使用自定义层直接执行前向传播计算**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 27, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0.9983, 1.5834, 0.1999],\n", + " [2.7293, 0.3060, 1.1866]])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "linear(torch.rand(2, 5))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 29 + }, + "source": [ + "我们还可以(**使用自定义层构建模型**),就像使用内置的全连接层一样使用自定义层。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 31, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0.],\n", + " [0.]])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net = nn.Sequential(MyLinear(64, 8), MyLinear(8, 1))\n", + "net(torch.rand(2, 64))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 33 + }, + "source": [ + "## 小结\n", + "\n", + "* 我们可以通过基本层类设计自定义层。这允许我们定义灵活的新层,其行为与深度学习框架中的任何现有层不同。\n", + "* 在自定义层定义完成后,我们就可以在任意环境和网络架构中调用该自定义层。\n", + "* 层可以有局部参数,这些参数可以通过内置函数创建。\n", + "\n", + "## 练习\n", + "\n", + "1. 设计一个接受输入并计算张量降维的层,它返回$y_k = \\sum_{i, j} W_{ijk} x_i x_j$。\n", + "1. 设计一个返回输入数据的傅立叶系数前半部分的层。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 35, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/1835)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.11" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/model-construction.ipynb" "b/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/model-construction.ipynb" new file mode 100644 index 0000000..393d0f4 --- /dev/null +++ "b/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/model-construction.ipynb" @@ -0,0 +1,643 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 层和块\n", + ":label:`sec_model_construction`\n", + "\n", + "之前首次介绍神经网络时,我们关注的是具有单一输出的线性模型。\n", + "在这里,整个模型只由一个输出。\n", + "注意,单个神经网络\n", + "(1)接受一些输入;\n", + "(2)生成相应的标量输出;\n", + "(3)具有一组相关 *参数*(parameters),更新这些参数可以优化某目标函数。\n", + "\n", + "然后,当考虑具有多个输出的网络时,\n", + "我们利用矢量化算法来描述整层神经元。\n", + "像单个神经元一样,层(1)接受一组输入,\n", + "(2)生成相应的输出,\n", + "(3)由一组可调整参数描述。\n", + "当我们使用softmax回归时,一个单层本身就是模型。\n", + "然而,即使我们随后引入了多层感知机,我们仍然可以认为该模型保留了上面所说的基本架构。\n", + "\n", + "对于多层感知机而言,整个模型及其组成层都是这种架构。\n", + "整个模型接受原始输入(特征),生成输出(预测),\n", + "并包含一些参数(所有组成层的参数集合)。\n", + "同样,每个单独的层接收输入(由前一层提供),\n", + "生成输出(到下一层的输入),并且具有一组可调参数,\n", + "这些参数根据从下一层反向传播的信号进行更新。\n", + "\n", + "事实证明,研究讨论“比单个层大”但“比整个模型小”的组件更有价值。\n", + "例如,在计算机视觉中广泛流行的ResNet-152架构就有数百层,\n", + "这些层是由*层组*(groups of layers)的重复模式组成。\n", + "这个ResNet架构赢得了2015年ImageNet和COCO计算机视觉比赛\n", + "的识别和检测任务 :cite:`He.Zhang.Ren.ea.2016`。\n", + "目前ResNet架构仍然是许多视觉任务的首选架构。\n", + "在其他的领域,如自然语言处理和语音,\n", + "层组以各种重复模式排列的类似架构现在也是普遍存在。\n", + "\n", + "为了实现这些复杂的网络,我们引入了神经网络*块*的概念。\n", + "*块*(block)可以描述单个层、由多个层组成的组件或整个模型本身。\n", + "使用块进行抽象的一个好处是可以将一些块组合成更大的组件,\n", + "这一过程通常是递归的,如 :numref:`fig_blocks`所示。\n", + "通过定义代码来按需生成任意复杂度的块,\n", + "我们可以通过简洁的代码实现复杂的神经网络。\n", + "\n", + "![多个层被组合成块,形成更大的模型](../img/blocks.svg)\n", + ":label:`fig_blocks`\n", + "\n", + "从编程的角度来看,块由*类*(class)表示。\n", + "它的任何子类都必须定义一个将其输入转换为输出的前向传播函数,\n", + "并且必须存储任何必需的参数。\n", + "注意,有些块不需要任何参数。\n", + "最后,为了计算梯度,块必须具有反向传播函数。\n", + "在定义我们自己的块时,由于自动微分(在 :numref:`sec_autograd` 中引入)\n", + "提供了一些后端实现,我们只需要考虑前向传播函数和必需的参数。\n", + "\n", + "在构造自定义块之前,(**我们先回顾一下多层感知机**)\n", + "( :numref:`sec_mlp_concise` )的代码。\n", + "下面的代码生成一个网络,其中包含一个具有256个单元和ReLU激活函数的全连接隐藏层,\n", + "然后是一个具有10个隐藏单元且不带激活函数的全连接输出层。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[ 0.0557, -0.2469, 0.0089, 0.0078, 0.1052, 0.1832, -0.0727, -0.4046,\n", + " 0.2297, -0.0177],\n", + " [ 0.1933, -0.1326, 0.1534, 0.1434, -0.0235, 0.1264, -0.1059, -0.3951,\n", + " 0.2697, -0.0394]], grad_fn=)" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import torch\n", + "from torch import nn\n", + "from torch.nn import functional as F\n", + "\n", + "net = nn.Sequential(nn.Linear(20, 256), nn.ReLU(), nn.Linear(256, 10))\n", + "\n", + "X = torch.rand(2, 20)# 随机初始化X,X大小2 *20\n", + "net(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 5, + "tab": [ + "pytorch" + ] + }, + "source": [ + "在这个例子中,我们通过实例化`nn.Sequential`来构建我们的模型,\n", + "层的执行顺序是作为参数传递的。\n", + "简而言之,(**`nn.Sequential`定义了一种特殊的`Module`**),\n", + "即在PyTorch中表示一个块的类,\n", + "它维护了一个由`Module`组成的有序列表。\n", + "注意,两个全连接层都是`Linear`类的实例,\n", + "`Linear`类本身就是`Module`的子类。\n", + "另外,到目前为止,我们一直在通过`net(X)`调用我们的模型来获得模型的输出。\n", + "这实际上是`net.__call__(X)`的简写。\n", + "这个前向传播函数非常简单:\n", + "它将列表中的每个块连接在一起,将每个块的输出作为下一个块的输入。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 7 + }, + "source": [ + "## [**自定义块**]\n", + "\n", + "要想直观地了解块是如何工作的,最简单的方法就是自己实现一个。\n", + "在实现我们自定义块之前,我们简要总结一下每个块必须提供的基本功能:\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 9, + "tab": [ + "pytorch" + ] + }, + "source": [ + "1. 将输入数据作为其前向传播函数的参数。\n", + "1. 通过前向传播函数来生成输出。请注意,输出的形状可能与输入的形状不同。例如,我们上面模型中的第一个全连接的层接收一个20维的输入,但是返回一个维度为256的输出。\n", + "1. 计算其输出关于输入的梯度,可通过其反向传播函数进行访问。通常这是自动发生的。\n", + "1. 存储和访问前向传播计算所需的参数。\n", + "1. 根据需要初始化模型参数。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 10 + }, + "source": [ + "在下面的代码片段中,我们从零开始编写一个块。\n", + "它包含一个多层感知机,其具有256个隐藏单元的隐藏层和一个10维输出层。\n", + "注意,下面的`MLP`类继承了表示块的类。\n", + "我们的实现只需要提供我们自己的构造函数(Python中的`__init__`函数)和前向传播函数。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 12, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class MLP(nn.Module):\n", + " # 重写__init__方法,用模型参数声明层,我们声明两个全连接的层\n", + " def __init__(self):\n", + " # 调用MLP的父类Module的构造函数来执行必要的初始化。\n", + " # 这样,在类实例化时也可以指定其他函数参数,例如模型参数params(稍后将介绍)\n", + " super().__init__()# 父类构造函数\n", + " self.hidden = nn.Linear(20, 256) # 隐藏层\n", + " self.out = nn.Linear(256, 10) # 输出层\n", + "\n", + " # 定义模型的前向传播,即如何根据输入X返回所需的模型输出\n", + " def forward(self, X):\n", + " # 注意,这里我们使用ReLU的函数版本,其在nn.functional模块中定义。\n", + " return self.out(F.relu(self.hidden(X)))# 构建网络" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 14 + }, + "source": [ + "我们首先看一下前向传播函数,它以`X`作为输入,\n", + "计算带有激活函数的隐藏表示,并输出其未规范化的输出值。\n", + "在这个`MLP`实现中,两个层都是实例变量。\n", + "要了解这为什么是合理的,可以想象实例化两个多层感知机(`net1`和`net2`),\n", + "并根据不同的数据对它们进行训练。\n", + "当然,我们希望它们学到两种不同的模型。\n", + "\n", + "接着我们[**实例化多层感知机的层,然后在每次调用前向传播函数时调用这些层**]。\n", + "注意一些关键细节:\n", + "首先,我们定制的`__init__`函数通过`super().__init__()`\n", + "调用父类的`__init__`函数,\n", + "省去了重复编写模版代码的痛苦。\n", + "然后,我们实例化两个全连接层,\n", + "分别为`self.hidden`和`self.out`。\n", + "注意,除非我们实现一个新的运算符,\n", + "否则我们不必担心反向传播函数或参数初始化,\n", + "系统将自动生成这些。\n", + "\n", + "我们来试一下这个函数:\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 16, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[-0.1515, 0.0797, 0.0325, -0.1259, -0.1020, -0.0129, 0.0294, 0.0134,\n", + " -0.1724, 0.1959],\n", + " [-0.0858, 0.1295, 0.0952, -0.0216, 0.0174, 0.0534, 0.0705, 0.1240,\n", + " -0.1096, -0.0396]], grad_fn=)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net = MLP() # 实例化MLP\n", + "net(X) # 将X输入网络进行运算" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 18 + }, + "source": [ + "块的一个主要优点是它的多功能性。\n", + "我们可以子类化块以创建层(如全连接层的类)、\n", + "整个模型(如上面的`MLP`类)或具有中等复杂度的各种组件。\n", + "我们在接下来的章节中充分利用了这种多功能性,\n", + "比如在处理卷积神经网络时。\n", + "\n", + "## [**顺序块**]\n", + "\n", + "现在我们可以更仔细地看看`Sequential`类是如何工作的,\n", + "回想一下`Sequential`的设计是为了把其他模块串起来。\n", + "为了构建我们自己的简化的`MySequential`,\n", + "我们只需要定义两个关键函数:\n", + "\n", + "1. 一种将块逐个追加到列表中的函数。\n", + "1. 一种前向传播函数,用于将输入按追加块的顺序传递给块组成的“链条”。\n", + "\n", + "下面的`MySequential`类提供了与默认`Sequential`类相同的功能。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 20, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class MySequential(nn.Module):\n", + " # 重写__init__方法,将模型名称的序列作为参数传入\n", + " def __init__(self, *args):\n", + " super().__init__()# 父类构造函数\n", + " for idx, module in enumerate(args):\n", + " # 这里,module是Module子类的一个实例。我们把它保存在'Module'类的成员变量_modules中。module的类型是OrderedDict\n", + " self._modules[str(idx)] = module # 按顺序将module插入\n", + "\n", + " def forward(self, X):\n", + " # OrderedDict保证了按照成员添加的顺序遍历它们\n", + " for block in self._modules.values():# 遍历_modules的成员,顺序调用每个模块\n", + " X = block(X)# 迭代处理X\n", + " return X" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 23, + "tab": [ + "pytorch" + ] + }, + "source": [ + "`__init__`函数将每个模块逐个添加到有序字典`_modules`中。\n", + "你可能会好奇为什么每个`Module`都有一个`_modules`属性?\n", + "以及为什么我们使用它而不是自己定义一个Python列表?\n", + "简而言之,`_modules`的主要优点是:\n", + "在模块的参数初始化过程中,\n", + "系统知道在`_modules`字典中查找需要初始化参数的子块。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "#测试如果改为用list存储各个层\n", + "class MySequential2(nn.Module):\n", + " def __init__(self,*args):\n", + " super().__init__()\n", + " self.ms=[]#存储所用List\n", + " for idx,module in enumerate(args):\n", + " self.ms.append(module)\n", + " def forward(self,X):\n", + " for block in self.ms:\n", + " X=block(X)\n", + " return X" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MySequential(\n", + " (0): Linear(in_features=20, out_features=256, bias=True)\n", + " (1): ReLU()\n", + " (2): Linear(in_features=256, out_features=10, bias=True)\n", + ")\n", + "MySequential2()\n", + "MLP(\n", + " (hidden): Linear(in_features=20, out_features=256, bias=True)\n", + " (out): Linear(in_features=256, out_features=10, bias=True)\n", + ")\n", + "OrderedDict([('hidden', Linear(in_features=20, out_features=256, bias=True)), ('out', Linear(in_features=256, out_features=10, bias=True))])\n" + ] + } + ], + "source": [ + "net=MySequential(nn.Linear(20,256),nn.ReLU(),nn.Linear(256,10))\n", + "net2=MySequential2(nn.Linear(20,256),nn.ReLU(),nn.Linear(256,10))\n", + "print(net)#能输出\n", + "print(net2)#输出为空\n", + "net=MLP()\n", + "print(net)#输出成功\n", + "print(net._modules)#自动生成了_modules有序字典" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 24 + }, + "source": [ + "当`MySequential`的前向传播函数被调用时,\n", + "每个添加的块都按照它们被添加的顺序执行。\n", + "现在可以使用我们的`MySequential`类重新实现多层感知机。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 26, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[ 0.2252, 0.0363, -0.1693, 0.0870, 0.0077, -0.0833, 0.0181, 0.0321,\n", + " 0.2099, -0.2846],\n", + " [ 0.0971, 0.0553, -0.1123, 0.1500, 0.0153, -0.0618, -0.0417, -0.0683,\n", + " 0.1763, -0.1938]], grad_fn=)" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net = MySequential(nn.Linear(20, 256), nn.ReLU(), nn.Linear(256, 10)) # 实例化MySequential\n", + "net(X) # 将X输入网络进行运算" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 28 + }, + "source": [ + "请注意,`MySequential`的用法与之前为`Sequential`类编写的代码相同\n", + "(如 :numref:`sec_mlp_concise` 中所述)。\n", + "\n", + "## [**在前向传播函数中执行代码**]\n", + "\n", + "`Sequential`类使模型构造变得简单,\n", + "允许我们组合新的架构,而不必定义自己的类。\n", + "然而,并不是所有的架构都是简单的顺序架构。\n", + "当需要更强的灵活性时,我们需要定义自己的块。\n", + "例如,我们可能希望在前向传播函数中执行Python的控制流。\n", + "此外,我们可能希望执行任意的数学运算,\n", + "而不是简单地依赖预定义的神经网络层。\n", + "\n", + "到目前为止,\n", + "我们网络中的所有操作都对网络的激活值及网络的参数起作用。\n", + "然而,有时我们可能希望合并既不是上一层的结果也不是可更新参数的项,\n", + "我们称之为*常数参数*(constant parameter)。\n", + "例如,我们需要一个计算函数\n", + "$f(\\mathbf{x},\\mathbf{w}) = c \\cdot \\mathbf{w}^\\top \\mathbf{x}$的层,\n", + "其中$\\mathbf{x}$是输入,\n", + "$\\mathbf{w}$是参数,\n", + "$c$是某个在优化过程中没有更新的指定常量。\n", + "因此我们实现了一个`FixedHiddenMLP`类,如下所示:\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 30, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "# 在__init__和forward中进行自定义计算\n", + "class FixedHiddenMLP(nn.Module):\n", + " def __init__(self):\n", + " super().__init__()\n", + " # 不计算梯度的随机权重参数。因此其在训练期间保持不变\n", + " self.rand_weight = torch.rand((20, 20), requires_grad=False)\n", + " self.linear = nn.Linear(20, 20)\n", + "\n", + " def forward(self, X):\n", + " X = self.linear(X)\n", + " # 使用创建的常量参数以及relu和mm函数\n", + " X = F.relu(torch.mm(X, self.rand_weight) + 1)\n", + " # 复用全连接层。这相当于两个全连接层共享参数\n", + " X = self.linear(X)\n", + " # 控制流\n", + " while X.abs().sum() > 1:\n", + " X /= 2\n", + " return X.sum()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 32 + }, + "source": [ + "在这个`FixedHiddenMLP`模型中,我们实现了一个隐藏层,\n", + "其权重(`self.rand_weight`)在实例化时被随机初始化,之后为常量。\n", + "这个权重不是一个模型参数,因此它永远不会被反向传播更新。\n", + "然后,神经网络将这个固定层的输出通过一个全连接层。\n", + "\n", + "注意,在返回输出之前,模型做了一些不寻常的事情:\n", + "它运行了一个while循环,在$L_1$范数大于$1$的条件下,\n", + "将输出向量除以$2$,直到它满足条件为止。\n", + "最后,模型返回了`X`中所有项的和。\n", + "注意,此操作可能不会常用于在任何实际任务中,\n", + "我们只是向你展示如何将任意代码集成到神经网络计算的流程中。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "origin_pos": 34, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(-0.0123, grad_fn=)" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net = FixedHiddenMLP()\n", + "net(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 35 + }, + "source": [ + "我们可以[**混合搭配各种组合块的方法**]。\n", + "在下面的例子中,我们以一些想到的方法嵌套块。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "origin_pos": 37, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(-0.3015, grad_fn=)" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# 定义嵌套的MLP\n", + "class NestMLP(nn.Module):\n", + " def __init__(self):\n", + " super().__init__()\n", + " # 将Linear, Sequnential嵌套入网络\n", + " self.net = nn.Sequential(nn.Linear(20, 64), nn.ReLU(),\n", + " nn.Linear(64, 32), nn.ReLU())\n", + " self.linear = nn.Linear(32, 16)\n", + "\n", + " def forward(self, X):\n", + " return self.linear(self.net(X))\n", + "\n", + "# Sequnential的输入可以是任何nn.Module的子类\n", + "# 将NestMLP, Linear, FixedHiddenMLP嵌套入Sequnential\n", + "chimera = nn.Sequential(NestMLP(), nn.Linear(16, 20), FixedHiddenMLP())\n", + "chimera(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 39 + }, + "source": [ + "## 效率\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 41, + "tab": [ + "pytorch" + ] + }, + "source": [ + "你可能会开始担心操作效率的问题。\n", + "毕竟,我们在一个高性能的深度学习库中进行了大量的字典查找、\n", + "代码执行和许多其他的Python代码。\n", + "Python的问题[全局解释器锁](https://wiki.python.org/moin/GlobalInterpreterLock)\n", + "是众所周知的。\n", + "在深度学习环境中,我们担心速度极快的GPU可能要等到CPU运行Python代码后才能运行另一个作业。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 43 + }, + "source": [ + "## 小结\n", + "\n", + "* 一个块可以由许多层组成;一个块可以由许多块组成。\n", + "* 块可以包含代码。\n", + "* 块负责大量的内部处理,包括参数初始化和反向传播。\n", + "* 层和块的顺序连接由`Sequential`块处理。\n", + "\n", + "## 练习\n", + "\n", + "1. 如果将`MySequential`中存储块的方式更改为Python列表,会出现什么样的问题?\n", + "1. 实现一个块,它以两个块为参数,例如`net1`和`net2`,并返回前向传播中两个网络的串联输出。这也被称为平行块。\n", + "1. 假设你想要连接同一网络的多个实例。实现一个函数,该函数生成同一个块的多个实例,并在此基础上构建更大的网络。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 45, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/1827)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python [conda env:pytorch]", + "language": "python", + "name": "conda-env-pytorch-py" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.13" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/parameters.ipynb" "b/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/parameters.ipynb" new file mode 100644 index 0000000..4e1007b --- /dev/null +++ "b/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/parameters.ipynb" @@ -0,0 +1,787 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 参数管理\n", + "\n", + "在选择了架构并设置了超参数后,我们就进入了训练阶段。\n", + "此时,我们的目标是找到使损失函数最小化的模型参数值。\n", + "经过训练后,我们将需要使用这些参数来做出未来的预测。\n", + "此外,有时我们希望提取参数,以便在其他环境中复用它们,\n", + "将模型保存下来,以便它可以在其他软件中执行,\n", + "或者为了获得科学的理解而进行检查。\n", + "\n", + "之前的介绍中,我们只依靠深度学习框架来完成训练的工作,\n", + "而忽略了操作参数的具体细节。\n", + "本节,我们将介绍以下内容:\n", + "\n", + "* 访问参数,用于调试、诊断和可视化。\n", + "* 参数初始化。\n", + "* 在不同模型组件间共享参数。\n", + "\n", + "(**我们首先看一下具有单隐藏层的多层感知机。**)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0.3913],\n", + " [0.3100]], grad_fn=)" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import torch\n", + "from torch import nn\n", + "\n", + "net = nn.Sequential(nn.Linear(4, 8), nn.ReLU(), nn.Linear(8, 1))#使用Sequential定义网络\n", + "X = torch.rand(size=(2, 4))#随机生成输出\n", + "net(X)#通过网络计算输出" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "## [**参数访问**]\n", + "\n", + "我们从已有模型中访问参数。\n", + "当通过`Sequential`类定义模型时,\n", + "我们可以通过索引来访问模型的任意层。\n", + "这就像模型是一个列表一样,每层的参数都在其属性中。\n", + "如下所示,我们可以检查第二个全连接层的参数。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OrderedDict([('weight', tensor([[-0.2680, -0.3387, 0.0259, -0.0591, 0.1884, 0.2721, 0.1892, 0.2496]])), ('bias', tensor([0.3248]))])\n" + ] + } + ], + "source": [ + "print(net[2].state_dict())# 取得net第三模块参数" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "输出的结果告诉我们一些重要的事情:\n", + "首先,这个全连接层包含两个参数,分别是该层的权重和偏置。\n", + "两者都存储为单精度浮点数(float32)。\n", + "注意,参数名称允许唯一标识每个参数,即使在包含数百个层的网络中也是如此。\n", + "\n", + "### [**目标参数**]\n", + "\n", + "注意,每个参数都表示为参数类的一个实例。\n", + "要对参数执行任何操作,首先我们需要访问底层的数值。\n", + "有几种方法可以做到这一点。有些比较简单,而另一些则比较通用。\n", + "下面的代码从第二个全连接层(即第三个神经网络层)提取偏置,\n", + "提取后返回的是一个参数类实例,并进一步访问该参数的值。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 10, + "scrolled": false, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Parameter containing:\n", + "tensor([0.3248], requires_grad=True)\n", + "tensor([0.3248])\n" + ] + } + ], + "source": [ + "print(type(net[2].bias))# 第三层偏置的数据类型:torch.nn.parameter.Parameter\n", + "print(net[2].bias)#一个requires_grad为True的tensor\n", + "print(net[2].bias.data)#一个Tensor" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12, + "tab": [ + "pytorch" + ] + }, + "source": [ + "参数是复合的对象,包含值、梯度和额外信息。\n", + "这就是我们需要显式参数值的原因。\n", + "除了值之外,我们还可以访问每个参数的梯度。\n", + "在上面这个网络中,由于我们还没有调用反向传播,所以参数的梯度处于初始状态。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net[2].weight.grad == None" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 15 + }, + "source": [ + "### [**一次性访问所有参数**]\n", + "\n", + "当我们需要对所有参数执行操作时,逐个访问它们可能会很麻烦。\n", + "当我们处理更复杂的块(例如,嵌套块)时,情况可能会变得特别复杂,\n", + "因为我们需要递归整个树来提取每个子块的参数。\n", + "下面,我们将通过演示来比较访问第一个全连接层的参数和访问所有层。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 17, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "('weight', torch.Size([8, 4])) ('bias', torch.Size([8]))\n", + "('0.weight', torch.Size([8, 4])) ('0.bias', torch.Size([8])) ('2.weight', torch.Size([1, 8])) ('2.bias', torch.Size([1]))\n" + ] + } + ], + "source": [ + "#*list可以在函数传参时将列表中的元素作为函数的参数传递\n", + "#函数named_parameters()可迭代,迭代器返回参数名和参水对象构成的元组\n", + "#此处迭代的访问每个参数,得到参数名和参数,组合成列表,再使用*逐一传入print函数输出\n", + "print(*[(name, param.shape) for name, param in net[0].named_parameters()])\n", + "print(*[(name, param.shape) for name, param in net.named_parameters()])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 19 + }, + "source": [ + "这为我们提供了另一种访问网络参数的方式,如下所示。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 21, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0.3248])" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net.state_dict()['2.bias'].data# 指定层数和参数名,在字典中得到相应的值,关键字值为层数.参数名" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 23 + }, + "source": [ + "### [**从嵌套块收集参数**]\n", + "\n", + "让我们看看,如果我们将多个块相互嵌套,参数命名约定是如何工作的。\n", + "我们首先定义一个生成块的函数(可以说是“块工厂”),然后将这些块组合到更大的块中。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 25, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[-0.2676],\n", + " [-0.2679]], grad_fn=)" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def block1():\n", + " return nn.Sequential(nn.Linear(4, 8), nn.ReLU(),\n", + " nn.Linear(8, 4), nn.ReLU())\n", + "\n", + "def block2():\n", + " net = nn.Sequential()\n", + " for i in range(4):\n", + " # 在这里嵌套\n", + " net.add_module(f'block {i}', block1())#f''包起来的部分中的{}被视作表达式\n", + " return net\n", + "\n", + "rgnet = nn.Sequential(block2(), nn.Linear(4, 1))#再嵌套一层\n", + "rgnet(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 27 + }, + "source": [ + "[**设计了网络后,我们看看它是如何工作的。**]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 29, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sequential(\n", + " (0): Sequential(\n", + " (block 0): Sequential(\n", + " (0): Linear(in_features=4, out_features=8, bias=True)\n", + " (1): ReLU()\n", + " (2): Linear(in_features=8, out_features=4, bias=True)\n", + " (3): ReLU()\n", + " )\n", + " (block 1): Sequential(\n", + " (0): Linear(in_features=4, out_features=8, bias=True)\n", + " (1): ReLU()\n", + " (2): Linear(in_features=8, out_features=4, bias=True)\n", + " (3): ReLU()\n", + " )\n", + " (block 2): Sequential(\n", + " (0): Linear(in_features=4, out_features=8, bias=True)\n", + " (1): ReLU()\n", + " (2): Linear(in_features=8, out_features=4, bias=True)\n", + " (3): ReLU()\n", + " )\n", + " (block 3): Sequential(\n", + " (0): Linear(in_features=4, out_features=8, bias=True)\n", + " (1): ReLU()\n", + " (2): Linear(in_features=8, out_features=4, bias=True)\n", + " (3): ReLU()\n", + " )\n", + " )\n", + " (1): Linear(in_features=4, out_features=1, bias=True)\n", + ")\n" + ] + } + ], + "source": [ + "print(rgnet)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 31 + }, + "source": [ + "因为层是分层嵌套的,所以我们也可以像通过嵌套列表索引一样访问它们。\n", + "下面,我们访问第一个主要的块中、第二个子块的第一层的偏置项。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "origin_pos": 33, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([-0.3981, -0.4271, -0.3701, 0.1086, -0.1413, 0.3649, 0.0478, -0.4952])" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rgnet[0][1][0].bias.data" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 35 + }, + "source": [ + "## 参数初始化\n", + "\n", + "知道了如何访问参数后,现在我们看看如何正确地初始化参数。\n", + "我们在 :numref:`sec_numerical_stability`中讨论了良好初始化的必要性。\n", + "深度学习框架提供默认随机初始化,\n", + "也允许我们创建自定义初始化方法,\n", + "满足我们通过其他规则实现初始化权重。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 37, + "tab": [ + "pytorch" + ] + }, + "source": [ + "默认情况下,PyTorch会根据一个范围均匀地初始化权重和偏置矩阵,\n", + "这个范围是根据输入和输出维度计算出的。\n", + "PyTorch的`nn.init`模块提供了多种预置初始化方法。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 39 + }, + "source": [ + "### [**内置初始化**]\n", + "\n", + "让我们首先调用内置的初始化器。\n", + "下面的代码将所有权重参数初始化为标准差为0.01的高斯随机变量,\n", + "且将偏置参数设置为0。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "origin_pos": 41, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([-0.0053, 0.0067, 0.0123, 0.0004]), tensor(0.))" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def init_normal(m):#输入为一个层,类型为Module的子类\n", + " if type(m) == nn.Linear:#对于所有线性层\n", + " nn.init.normal_(m.weight, mean=0, std=0.01)#期望0,方差0.01的正态分布\n", + " nn.init.zeros_(m.bias)#全部置零\n", + "net.apply(init_normal)#递归的对于每个模块调用函数\n", + "net[0].weight.data[0], net[0].bias.data[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 43 + }, + "source": [ + "我们还可以将所有参数初始化为给定的常数,比如初始化为1。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "origin_pos": 45, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([1., 1., 1., 1.]), tensor(0.))" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def init_constant(m):\n", + " if type(m) == nn.Linear:\n", + " nn.init.constant_(m.weight, 1)#初始化为常数1\n", + " nn.init.zeros_(m.bias)\n", + "net.apply(init_constant)\n", + "net[0].weight.data[0], net[0].bias.data[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 47 + }, + "source": [ + "我们还可以[**对某些块应用不同的初始化方法**]。\n", + "例如,下面我们使用Xavier初始化方法初始化第一个神经网络层,\n", + "然后将第三个神经网络层初始化为常量值42。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "origin_pos": 49, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([-0.4398, -0.2092, 0.2993, 0.2878])\n", + "tensor([[42., 42., 42., 42., 42., 42., 42., 42.]])\n" + ] + } + ], + "source": [ + "def xavier(m):\n", + " if type(m) == nn.Linear:\n", + " nn.init.xavier_uniform_(m.weight)#使用均匀分布进行xavier初始化\n", + "def init_42(m):\n", + " if type(m) == nn.Linear:\n", + " nn.init.constant_(m.weight, 42)#初始化为常数42\n", + "\n", + "net[0].apply(xavier)\n", + "net[2].apply(init_42)\n", + "print(net[0].weight.data[0])\n", + "print(net[2].weight.data)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 51 + }, + "source": [ + "### [**自定义初始化**]\n", + "\n", + "有时,深度学习框架没有提供我们需要的初始化方法。\n", + "在下面的例子中,我们使用以下的分布为任意权重参数$w$定义初始化方法:\n", + "\n", + "$$\n", + "\\begin{aligned}\n", + " w \\sim \\begin{cases}\n", + " U(5, 10) & \\text{ 可能性 } \\frac{1}{4} \\\\\n", + " 0 & \\text{ 可能性 } \\frac{1}{2} \\\\\n", + " U(-10, -5) & \\text{ 可能性 } \\frac{1}{4}\n", + " \\end{cases}\n", + "\\end{aligned}\n", + "$$\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 53, + "tab": [ + "pytorch" + ] + }, + "source": [ + "同样,我们实现了一个`my_init`函数来应用到`net`。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "origin_pos": 56, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Init weight torch.Size([8, 4])\n", + "Init weight torch.Size([1, 8])\n" + ] + }, + { + "data": { + "text/plain": [ + "tensor([[-0.0000, -8.5068, 0.0000, 6.6374],\n", + " [ 0.0000, 5.2751, -7.4942, 5.9510]], grad_fn=)" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def my_init(m):\n", + " if type(m) == nn.Linear:\n", + " #取所有参数的(名字,参数)元组构成列表的第一项,将元组元素分别作为参数传入print函数\n", + " print(\"Init\", *[(name, param.shape) for name, param in m.named_parameters()][0])\n", + " nn.init.uniform_(m.weight, -10, 10)# 权重在-10,10上服从均匀分布\n", + " m.weight.data *= m.weight.data.abs()>=5 # 保留权重绝对值大于5的值\n", + "\n", + "net.apply(my_init)\n", + "net[0].weight[:2]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 58 + }, + "source": [ + "注意,我们始终可以直接设置参数。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "origin_pos": 60, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([42.0000, -7.5068, 1.0000, 7.6374])" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net[0].weight.data[:] += 1\n", + "net[0].weight.data[0, 0] = 42\n", + "net[0].weight.data[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 63 + }, + "source": [ + "## [**参数绑定**]\n", + "\n", + "有时我们希望在多个层间共享参数:\n", + "我们可以定义一个稠密层,然后使用它的参数来设置另一个层的参数。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "origin_pos": 65, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([True, True, True, True, True, True, True, True])\n", + "tensor([True, True, True, True, True, True, True, True])\n" + ] + } + ], + "source": [ + "# 我们需要给共享层一个名称,以便可以引用它的参数\n", + "shared = nn.Linear(8, 8)\n", + "net = nn.Sequential(nn.Linear(4, 8), nn.ReLU(),\n", + " shared, nn.ReLU(),\n", + " shared, nn.ReLU(),\n", + " nn.Linear(8, 1))\n", + "net(X)\n", + "# 检查参数是否相同\n", + "print(net[2].weight.data[0] == net[4].weight.data[0])\n", + "net[2].weight.data[0, 0] = 100#修改其中一个的一个位置,如果二者实际上是同一对象,则修改一个另一个也会改变\n", + "# 确保它们实际上是同一个对象,而不只是有相同的值\n", + "print(net[2].weight.data[0] == net[4].weight.data[0])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 68, + "tab": [ + "pytorch" + ] + }, + "source": [ + "这个例子表明第三个和第五个神经网络层的参数是绑定的。\n", + "它们不仅值相等,而且由相同的张量表示。\n", + "因此,如果我们改变其中一个参数,另一个参数也会改变。\n", + "你可能会思考:当参数绑定时,梯度会发生什么情况?\n", + "答案是由于模型参数包含梯度,因此在反向传播期间第二个隐藏层\n", + "(即第三个神经网络层)和第三个隐藏层(即第五个神经网络层)的梯度会加在一起。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 69 + }, + "source": [ + "## 小结\n", + "\n", + "* 我们有几种方法可以访问、初始化和绑定模型参数。\n", + "* 我们可以使用自定义初始化方法。\n", + "\n", + "## 练习\n", + "\n", + "1. 使用 :numref:`sec_model_construction` 中定义的`FancyMLP`模型,访问各个层的参数。\n", + "1. 查看初始化模块文档以了解不同的初始化方法。\n", + "1. 构建包含共享参数层的多层感知机并对其进行训练。在训练过程中,观察模型各层的参数和梯度。\n", + "1. 为什么共享参数是个好主意?\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 71, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/1829)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.11" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/read-write.ipynb" "b/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/read-write.ipynb" new file mode 100644 index 0000000..8e5c639 --- /dev/null +++ "b/code/16-Pytorch\347\245\236\347\273\217\347\275\221\347\273\234\345\237\272\347\241\200/read-write.ipynb" @@ -0,0 +1,356 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 读写文件\n", + "\n", + "到目前为止,我们讨论了如何处理数据,\n", + "以及如何构建、训练和测试深度学习模型。\n", + "然而,有时我们希望保存训练的模型,\n", + "以备将来在各种环境中使用(比如在部署中进行预测)。\n", + "此外,当运行一个耗时较长的训练过程时,\n", + "最佳的做法是定期保存中间结果,\n", + "以确保在服务器电源被不小心断掉时,我们不会损失几天的计算结果。\n", + "因此,现在是时候学习如何加载和存储权重向量和整个模型了。\n", + "\n", + "## (**加载和保存张量**)\n", + "\n", + "对于单个张量,我们可以直接调用`load`和`save`函数分别读写它们。\n", + "这两个函数都要求我们提供一个名称,`save`要求将要保存的变量作为输入。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from torch.nn import functional as F\n", + "\n", + "x = torch.arange(4)\n", + "torch.save(x, 'x-file')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "我们现在可以将存储在文件中的数据读回内存。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0, 1, 2, 3])" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "x2 = torch.load('x-file')\n", + "x2" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "我们可以[**存储一个张量列表,然后把它们读回内存。**]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 10, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([0, 1, 2, 3]), tensor([0., 0., 0., 0.]))" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "y = torch.zeros(4)\n", + "torch.save([x, y],'x-files')\n", + "x2, y2 = torch.load('x-files')\n", + "(x2, y2)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12 + }, + "source": [ + "我们甚至可以(**写入或读取从字符串映射到张量的字典**)。\n", + "当我们要读取或写入模型中的所有权重时,这很方便。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{'x': tensor([0, 1, 2, 3]), 'y': tensor([0., 0., 0., 0.])}" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "mydict = {'x': x, 'y': y}\n", + "torch.save(mydict, 'mydict')\n", + "mydict2 = torch.load('mydict')\n", + "mydict2" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 16 + }, + "source": [ + "## [**加载和保存模型参数**]\n", + "\n", + "保存单个权重向量(或其他张量)确实有用,\n", + "但是如果我们想保存整个模型,并在以后加载它们,\n", + "单独保存每个向量则会变得很麻烦。\n", + "毕竟,我们可能有数百个参数散布在各处。\n", + "因此,深度学习框架提供了内置函数来保存和加载整个网络。\n", + "需要注意的一个重要细节是,这将保存模型的参数而不是保存整个模型。\n", + "例如,如果我们有一个3层多层感知机,我们需要单独指定架构。\n", + "因为模型本身可以包含任意代码,所以模型本身难以序列化。\n", + "因此,为了恢复模型,我们需要用代码生成架构,\n", + "然后从磁盘加载参数。\n", + "让我们从熟悉的多层感知机开始尝试一下。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 18, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#定义多层感知机\n", + "class MLP(nn.Module):\n", + " def __init__(self):\n", + " super().__init__()\n", + " self.hidden = nn.Linear(20, 256)\n", + " self.output = nn.Linear(256, 10)\n", + "\n", + " def forward(self, x):\n", + " return self.output(F.relu(self.hidden(x)))\n", + "\n", + "net = MLP()#实例化\n", + "X = torch.randn(size=(2, 20))#随机输入\n", + "Y = net(X)#得到对应输出" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 20 + }, + "source": [ + "接下来,我们[**将模型的参数存储在一个叫做“mlp.params”的文件中。**]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 22, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "torch.save(net.state_dict(), 'mlp.params')#将所有参数写入字典并保存为文件" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 24 + }, + "source": [ + "为了恢复模型,我们[**实例化了原始多层感知机模型的一个备份。**]\n", + "这里我们不需要随机初始化模型参数,而是(**直接读取文件中存储的参数。**)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 26, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "MLP(\n", + " (hidden): Linear(in_features=20, out_features=256, bias=True)\n", + " (output): Linear(in_features=256, out_features=10, bias=True)\n", + ")" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "clone = MLP()#另一个多层感知机,参数并未学习\n", + "clone.load_state_dict(torch.load('mlp.params'))#读取文件并写入模型(覆盖初始化的参数)\n", + "clone.eval()#设置为评估模式,保持参数不变" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 28 + }, + "source": [ + "由于两个实例具有相同的模型参数,在输入相同的`X`时,\n", + "两个实例的计算结果应该相同。\n", + "让我们来验证一下。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 30, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[True, True, True, True, True, True, True, True, True, True],\n", + " [True, True, True, True, True, True, True, True, True, True]])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Y_clone = clone(X)\n", + "Y_clone == Y" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 32 + }, + "source": [ + "## 小结\n", + "\n", + "* `save`和`load`函数可用于张量对象的文件读写。\n", + "* 我们可以通过参数字典保存和加载网络的全部参数。\n", + "* 保存架构必须在代码中完成,而不是在参数中完成。\n", + "\n", + "## 练习\n", + "\n", + "1. 即使不需要将经过训练的模型部署到不同的设备上,存储模型参数还有什么实际的好处?\n", + "1. 假设我们只想复用网络的一部分,以将其合并到不同的网络架构中。比如说,如果你想在一个新的网络中使用之前网络的前两层,你该怎么做?\n", + "1. 如何同时保存网络架构和参数?你会对架构加上什么限制?\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 34, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/1839)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.11" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/17-\344\275\277\347\224\250\345\222\214\350\264\255\344\271\260GPU.ipynb" "b/code/17-\344\275\277\347\224\250\345\222\214\350\264\255\344\271\260GPU.ipynb" new file mode 100644 index 0000000..244bd65 --- /dev/null +++ "b/code/17-\344\275\277\347\224\250\345\222\214\350\264\255\344\271\260GPU.ipynb" @@ -0,0 +1,221 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "79fae070", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "527e2f88", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "94442ac9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import torch\n", + "from torch import nn\n", + "\n", + "\"\"\"指定cpu, gpu设备\"\"\"\n", + "torch.device('cpu'), torch.device('cuda'), torch.device('cuda:1')\n", + "# cpu, gpu0, gpu1\n", + "\n", + "\"\"\"查询可用gpu数量\"\"\"\n", + "torch.cuda.device_count()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "85a5d238", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(device(type='cpu'), device(type='cpu'), [device(type='cpu')])" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "\"\"\"定义了两个方便的函数, 这两个函数允许我们在不存在所需所有GPU的情况下运行代码\"\"\"\n", + "\n", + "\n", + "\"\"\"如果存在,则返回gpu(i),否则返回cpu()\"\"\"\n", + "def try_gpu(i=0):\n", + "# 不输入参数则默认i = 0 \n", + " if torch.cuda.device_count() >= i + 1:\n", + " return torch.device(f'cuda:{i}')\n", + " # 如果当前可用gpu的总数大于等于i+1,则返回第i个gpu(从0计数)\t\t\n", + " return torch.device('cpu')\n", + "\t# 否则证明当前没有更多可用gpu,则返回cpu\n", + "\n", + "\n", + "\"\"\"返回所有可用的GPU,如果没有GPU,则返回[cpu(),]\"\"\"\n", + "def try_all_gpus():\n", + " devices = [torch.device(f'cuda:{i}')\n", + " for i in range(torch.cuda.device_count())]\n", + " # 所有可用gpu设备序号组成的列表devices\n", + " \n", + " return devices if devices else [torch.device('cpu')]\n", + "\t# 如果列表devices不为空则证明此时有可用的gpu,则返回可用gpu序号列表;否则证明没有可用gpu,则返回cpu\n", + "\n", + "try_gpu(), try_gpu(10), try_all_gpus()\n", + "# 测试函数功能\n", + "# try_gpu():检测是否有第i=0号gpu\n", + "# try_gpu(10):检测是否有第i=10号gpu\n", + "# try_all_gpus():返回所有可用gpu序号列表,如果没有gpu则返回cpu\n" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "295a6578", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "device(type='cpu')" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "\"\"\"我们可以查询张量所在的设备。 默认情况下,张量是在CPU上创建的。\"\"\"\n", + "x = torch.tensor([1, 2, 3])\n", + "x.device" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "255462a4", + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"我们在第一个gpu上创建张量变量X\"\"\" \n", + "X = torch.ones(2, 3, device=try_gpu())\n", + "\n", + "\"\"\"假设你至少有两个GPU,下面的代码将在第二个GPU上创建一个随机张量\"\"\"\n", + "Y = torch.rand(2, 3, device=try_gpu(1))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f72119d", + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"将gpu(0)中的X复制到gpu(1)中的Z\"\"\"\n", + "Z = X.cuda(1)\n", + "print(X)\n", + "print(Z)\n", + "\n", + "\n", + "\"\"\"现在数据在同一个GPU上(Z和Y都在),我们可以将它们相加。\"\"\"\n", + "Y + Z\n", + "\n", + "\n", + "\"\"\"如果变量Z已经存在于第i个GPU上,再调用Z.cuda(i)只会返回Z并不会复制并分配新内存\"\"\"\n", + "Z.cuda(1) is Z\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "e9861ae0", + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"\n", + "类似地,神经网络模型可以指定设备。 下面的代码将模型参数放在GPU上。\n", + "\"\"\"\n", + "\n", + "net = nn.Sequential(nn.Linear(3, 1))\n", + "net = net.to(device=try_gpu())" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "0b17de5d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "device(type='cpu')" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net(X)\n", + "\n", + "net[0].weight.data.device\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "26d4de02", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git "a/code/20-\345\241\253\345\205\205\345\222\214\346\255\245\345\271\205.ipynb" "b/code/20-\345\241\253\345\205\205\345\222\214\346\255\245\345\271\205.ipynb" new file mode 100644 index 0000000..330c905 --- /dev/null +++ "b/code/20-\345\241\253\345\205\205\345\222\214\346\255\245\345\271\205.ipynb" @@ -0,0 +1,195 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 填充和步幅" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "-" + } + }, + "source": [ + "导入包,定义comp_conv2d函数(进行卷积操作, 输出后两维,便于观察高宽的维度变化)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "\n", + "def comp_conv2d(conv2d, X):\n", + " X = X.reshape((1, 1) + X.shape) #X的维度之前加入批量大小数(batch_size)和输入通道数(channel_in)\n", + " Y = conv2d(X) \n", + " return Y.reshape(Y.shape[2:]) #去掉前面的两维后(原来四维) 进行输出\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### padding\n", + "在所有侧边填充1个像素(padding=1(即(1,1)))" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([8, 8])" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "conv2d = nn.Conv2d(1, 1, kernel_size=3, padding=1) #输入输出通道数为1, 卷积核大小3x3, 填充为1(上下左右各填充一行)\n", + "X = torch.rand(size=(8, 8)) \n", + "comp_conv2d(conv2d, X).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "填充不同的高度和宽度(padding=(2,1))" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([8, 8])" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "conv2d = nn.Conv2d(1, 1, kernel_size=(5, 3), padding=(2, 1))\n", + "comp_conv2d(conv2d, X).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### stride\n", + "将高度和宽度的步幅设置为2" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([4, 4])" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "conv2d = nn.Conv2d(1, 1, kernel_size=3, padding=1, stride=2)\n", + "comp_conv2d(conv2d, X).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "一个稍微复杂的例子" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 2])" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "conv2d = nn.Conv2d(1, 1, kernel_size=(3, 5), padding=(0, 1), stride=(3, 4))\n", + "comp_conv2d(conv2d, X).shape" + ] + } + ], + "metadata": { + "celltoolbar": "幻灯片", + "kernelspec": { + "display_name": "Python [conda env:root] *", + "language": "python", + "name": "conda-root-py" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/21-\345\244\232\350\276\223\345\205\245\350\276\223\345\207\272\351\200\232\351\201\223.ipynb" "b/code/21-\345\244\232\350\276\223\345\205\245\350\276\223\345\207\272\351\200\232\351\201\223.ipynb" new file mode 100644 index 0000000..c7d4d8e --- /dev/null +++ "b/code/21-\345\244\232\350\276\223\345\205\245\350\276\223\345\207\272\351\200\232\351\201\223.ipynb" @@ -0,0 +1,389 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 多输入多输出通道\n", + ":label:`sec_channels`\n", + "\n", + "虽然我们在 :numref:`subsec_why-conv-channels`中描述了构成每个图像的多个通道和多层卷积层。例如彩色图像具有标准的RGB通道来指示红、绿和蓝。\n", + "但是到目前为止,我们仅展示了单个输入和单个输出通道的简化例子。\n", + "这使得我们可以将输入、卷积核和输出看作二维张量。\n", + "\n", + "当我们添加通道时,我们的输入和隐藏的表示都变成了三维张量。例如,每个RGB输入图像具有$3\\times h\\times w$的形状。我们将这个大小为$3$的轴称为*通道*(channel)维度。在本节中,我们将更深入地研究具有多输入和多输出通道的卷积核。\n", + "\n", + "## 多输入通道\n", + "\n", + "当输入包含多个通道时,需要构造一个与输入数据具有相同输入通道数的卷积核,以便与输入数据进行互相关运算。假设输入的通道数为$c_i$,那么卷积核的输入通道数也需要为$c_i$。如果卷积核的窗口形状是$k_h\\times k_w$,那么当$c_i=1$时,我们可以把卷积核看作形状为$k_h\\times k_w$的二维张量。\n", + "\n", + "然而,当$c_i>1$时,我们卷积核的每个输入通道将包含形状为$k_h\\times k_w$的张量。将这些张量$c_i$连结在一起可以得到形状为$c_i\\times k_h\\times k_w$的卷积核。由于输入和卷积核都有$c_i$个通道,我们可以对每个通道输入的二维张量和卷积核的二维张量进行互相关运算,再对通道求和(将$c_i$的结果相加)得到二维张量。这是多通道输入和多输入通道卷积核之间进行二维互相关运算的结果。\n", + "\n", + "在 :numref:`fig_conv_multi_in`中,我们演示了一个具有两个输入通道的二维互相关运算的示例。阴影部分是第一个输出元素以及用于计算这个输出的输入和核张量元素:$(1\\times1+2\\times2+4\\times3+5\\times4)+(0\\times0+1\\times1+3\\times2+4\\times3)=56$。\n", + "\n", + "![两个输入通道的互相关计算。](../img/conv-multi-in.svg)\n", + ":label:`fig_conv_multi_in`\n", + "\n", + "为了加深理解,我们(**实现一下多输入通道互相关运算**)。\n", + "简而言之,我们所做的就是对每个通道执行互相关操作,然后将结果相加。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import torch\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": { + "origin_pos": 3, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def corr2d_multi_in(X, K):\n", + " # 先遍历“X”和“K”的第0个维度(通道维度),再把它们加在一起\n", + " return sum(d2l.corr2d(x, k) for x, k in zip(X, K))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 5 + }, + "source": [ + "我们可以构造与 :numref:`fig_conv_multi_in`中的值相对应的输入张量`X`和核张量`K`,以(**验证互相关运算的输出**)。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[ 56., 72.],\n", + " [104., 120.]])" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.tensor([[[0.0, 1.0, 2.0], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]],\n", + " [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]]])\n", + "K = torch.tensor([[[0.0, 1.0], [2.0, 3.0]], [[1.0, 2.0], [3.0, 4.0]]])\n", + "\n", + "corr2d_multi_in(X, K)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 7 + }, + "source": [ + "## 多输出通道\n", + "\n", + "到目前为止,不论有多少输入通道,我们还只有一个输出通道。然而,正如我们在 :numref:`subsec_why-conv-channels`中所讨论的,每一层有多个输出通道是至关重要的。在最流行的神经网络架构中,随着神经网络层数的加深,我们常会增加输出通道的维数,通过减少空间分辨率以获得更大的通道深度。直观地说,我们可以将每个通道看作是对不同特征的响应。而现实可能更为复杂一些,因为每个通道不是独立学习的,而是为了共同使用而优化的。因此,多输出通道并不仅是学习多个单通道的检测器。\n", + "\n", + "用$c_i$和$c_o$分别表示输入和输出通道的数目,并让$k_h$和$k_w$为卷积核的高度和宽度。为了获得多个通道的输出,我们可以为每个输出通道创建一个形状为$c_i\\times k_h\\times k_w$的卷积核张量,这样卷积核的形状是$c_o\\times c_i\\times k_h\\times k_w$。在互相关运算中,每个输出通道先获取所有输入通道,再以对应该输出通道的卷积核计算出结果。\n", + "\n", + "如下所示,我们实现一个[**计算多个通道的输出的互相关函数**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": { + "origin_pos": 8, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def corr2d_multi_in_out(X, K):\n", + " # 迭代“K”的第0个维度,每次都对输入“X”执行互相关运算。\n", + " # 最后将所有结果都叠加在一起\n", + " return torch.stack([corr2d_multi_in(X, k) for k in K], 0)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 9 + }, + "source": [ + "通过将核张量`K`与`K+1`(`K`中每个元素加$1$)和`K+2`连接起来,构造了一个具有$3$个输出通道的卷积核。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": { + "origin_pos": 10, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([3, 3, 3, 2, 2, 2])" + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "K = torch.stack((K, K + 1, K + 2), 0) #构造一个多输出通道的卷积核\n", + "K.shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 11 + }, + "source": [ + "下面,我们对输入张量`X`与卷积核张量`K`执行互相关运算。现在的输出包含$3$个通道,第一个通道的结果与先前输入张量`X`和多输入单输出通道的结果一致。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "origin_pos": 12, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "ename": "ValueError", + "evalue": "too many values to unpack (expected 2)", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/var/folders/m2/mcjqb__94rgf7qkm22vhg9xh0000gn/T/ipykernel_17262/2068829854.py\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mcorr2d_multi_in_out\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mK\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/var/folders/m2/mcjqb__94rgf7qkm22vhg9xh0000gn/T/ipykernel_17262/439246277.py\u001b[0m in \u001b[0;36mcorr2d_multi_in_out\u001b[0;34m(X, K)\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;31m# 迭代“K”的第0个维度,每次都对输入“X”执行互相关运算。\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;31m# 最后将所有结果都叠加在一起\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstack\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mcorr2d_multi_in\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mk\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mK\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/var/folders/m2/mcjqb__94rgf7qkm22vhg9xh0000gn/T/ipykernel_17262/439246277.py\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;31m# 迭代“K”的第0个维度,每次都对输入“X”执行互相关运算。\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;31m# 最后将所有结果都叠加在一起\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstack\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mcorr2d_multi_in\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mk\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mK\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/var/folders/m2/mcjqb__94rgf7qkm22vhg9xh0000gn/T/ipykernel_17262/1525835610.py\u001b[0m in \u001b[0;36mcorr2d_multi_in\u001b[0;34m(X, K)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mcorr2d_multi_in\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mK\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;31m# 先遍历“X”和“K”的第0个维度(通道维度),再把它们加在一起\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msum\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0md2l\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcorr2d\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mK\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/var/folders/m2/mcjqb__94rgf7qkm22vhg9xh0000gn/T/ipykernel_17262/1525835610.py\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mcorr2d_multi_in\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mK\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;31m# 先遍历“X”和“K”的第0个维度(通道维度),再把它们加在一起\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msum\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0md2l\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcorr2d\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mk\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mK\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/d2l/torch.py\u001b[0m in \u001b[0;36mcorr2d\u001b[0;34m(X, K)\u001b[0m\n\u001b[1;32m 462\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 463\u001b[0m Defined in :numref:`sec_conv_layer`\"\"\"\n\u001b[0;32m--> 464\u001b[0;31m \u001b[0mh\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mw\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mK\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 465\u001b[0m \u001b[0mY\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0md2l\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mX\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mh\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mX\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mw\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 466\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mY\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mValueError\u001b[0m: too many values to unpack (expected 2)" + ] + } + ], + "source": [ + "corr2d_multi_in_out(X, K)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 13 + }, + "source": [ + "## $1\\times 1$ 卷积层\n", + "\n", + "[~~1x1卷积~~]\n", + "\n", + "$1 \\times 1$卷积,即$k_h = k_w = 1$,看起来似乎没有多大意义。\n", + "毕竟,卷积的本质是有效提取相邻像素间的相关特征,而$1 \\times 1$卷积显然没有此作用。\n", + "尽管如此,$1 \\times 1$仍然十分流行,时常包含在复杂深层网络的设计中。下面,让我们详细地解读一下它的实际作用。\n", + "\n", + "因为使用了最小窗口,$1\\times 1$卷积失去了卷积层的特有能力——在高度和宽度维度上,识别相邻元素间相互作用的能力。\n", + "其实$1\\times 1$卷积的唯一计算发生在通道上。\n", + "\n", + " :numref:`fig_conv_1x1`展示了使用$1\\times 1$卷积核与$3$个输入通道和$2$个输出通道的互相关计算。\n", + "这里输入和输出具有相同的高度和宽度,输出中的每个元素都是从输入图像中同一位置的元素的线性组合。\n", + "我们可以将$1\\times 1$卷积层看作是在每个像素位置应用的全连接层,以$c_i$个输入值转换为$c_o$个输出值。\n", + "因为这仍然是一个卷积层,所以跨像素的权重是一致的。\n", + "同时,$1\\times 1$卷积层需要的权重维度为$c_o\\times c_i$,再额外加上一个偏差。\n", + "\n", + "![互相关计算使用了具有3个输入通道和2个输出通道的 $1\\times 1$ 卷积核。其中,输入和输出具有相同的高度和宽度。](../img/conv-1x1.svg)\n", + ":label:`fig_conv_1x1`\n", + "\n", + "下面,我们使用全连接层实现$1 \\times 1$卷积。\n", + "请注意,我们需要对输入和输出的数据形状进行微调。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def corr2d_multi_in_out_1x1(X, K):\n", + " c_i, h, w = X.shape\n", + " c_o = K.shape[0]\n", + " X = X.reshape((c_i, h * w)) # 拉成一个向量\n", + " K = K.reshape((c_o, c_i)) # 后两个维度都是1\n", + " # 全连接层中的矩阵乘法\n", + " Y = torch.matmul(K, X)\n", + " return Y.reshape((c_o, h, w)) # 重新转换成矩阵" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 15 + }, + "source": [ + "当执行$1\\times 1$卷积运算时,上述函数相当于先前实现的互相关函数`corr2d_multi_in_out`。让我们用一些样本数据来验证这一点。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": { + "origin_pos": 16, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "X = torch.normal(0, 1, (3, 3, 3))\n", + "K = torch.normal(0, 1, (2, 3, 1, 1))" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "origin_pos": 18, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([[[-0.0482, 2.0465, 0.5210],\n", + " [ 0.6970, -0.3922, 1.6800],\n", + " [-1.2534, -0.9197, -0.9118]],\n", + " \n", + " [[ 0.1979, -1.3069, 1.0266],\n", + " [ 0.5115, -1.7857, 1.3425],\n", + " [-0.9712, 0.5453, -0.3722]]]),\n", + " tensor([[[-0.0482, 2.0465, 0.5210],\n", + " [ 0.6970, -0.3922, 1.6800],\n", + " [-1.2534, -0.9197, -0.9118]],\n", + " \n", + " [[ 0.1979, -1.3069, 1.0266],\n", + " [ 0.5115, -1.7857, 1.3425],\n", + " [-0.9712, 0.5453, -0.3722]]]))" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Y1 = corr2d_multi_in_out_1x1(X, K)\n", + "Y2 = corr2d_multi_in_out(X, K)\n", + "assert float(torch.abs(Y1 - Y2).sum()) < 1e-6 # 比较实用卷积方法和全连接层进行卷积的方法结果的差别\n", + "Y1,Y2" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 19 + }, + "source": [ + "## 小结\n", + "\n", + "* 多输入多输出通道可以用来扩展卷积层的模型。\n", + "* 当以每像素为基础应用时,$1\\times 1$卷积层相当于全连接层。\n", + "* $1\\times 1$卷积层通常用于调整网络层的通道数量和控制模型复杂性。\n", + "\n", + "## 练习\n", + "\n", + "1. 假设我们有两个卷积核,大小分别为$k_1$和$k_2$(中间没有非线性激活函数)。\n", + " 1. 证明运算可以用单次卷积来表示。\n", + " 1. 这个等效的单卷积的维数是多少呢?\n", + " 1. 反之亦然吗?\n", + "1. 假设输入为$c_i\\times h\\times w$,卷积核大小为$c_o\\times c_i\\times k_h\\times k_w$,填充为$(p_h, p_w)$,步幅为$(s_h, s_w)$。\n", + " 1. 前向传播的计算成本(乘法和加法)是多少?\n", + " 1. 内存占用是多少?\n", + " 1. 反向传播的内存占用是多少?\n", + " 1. 反向传播的计算成本是多少?\n", + "1. 如果我们将输入通道$c_i$和输出通道$c_o$的数量加倍,计算数量会增加多少?如果我们把填充数量翻一番会怎么样?\n", + "1. 如果卷积核的高度和宽度是$k_h=k_w=1$,前向传播的计算复杂度是多少?\n", + "1. 本节最后一个示例中的变量`Y1`和`Y2`是否完全相同?为什么?\n", + "1. 当卷积窗口不是$1\\times 1$时,如何使用矩阵乘法实现卷积?\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 21, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/1854)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.1" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/22-\346\261\240\345\214\226\345\261\202.ipynb" "b/code/22-\346\261\240\345\214\226\345\261\202.ipynb" new file mode 100644 index 0000000..7470289 --- /dev/null +++ "b/code/22-\346\261\240\345\214\226\345\261\202.ipynb" @@ -0,0 +1,460 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 池化层\n", + "\n", + "本节将介绍*池化*(pooling)层,它具有目的:类似于数据增强,降低卷积层对位置的敏感性;一定程度减少计算。\n", + "\n", + "## 最大池化层和平均池化层\n", + "\n", + "与卷积层类似,池化层运算符由一个固定形状的窗口组成,该窗口根据其步幅大小在输入的所有区域上滑动,为固定形状窗口遍历的每个位置计算一个输出。\n", + "然而,不同于卷积层中的输入与卷积核之间的互相关计算,**池化层不包含参数**。\n", + "相反,池运算符是确定性的,我们通常计算池化窗口中所有元素的最大值或平均值。这些操作分别称为*最大池化层*(maximum pooling)和*平均池化层*(average pooling)。\n", + "\n", + "在这两种情况下,与互相关运算符一样,池化窗口从输入张量的左上角开始,从左往右、从上往下的在输入张量内滑动。在池化窗口到达的每个位置,它计算该窗口中输入子张量的最大值或平均值。计算最大值或平均值是取决于使用了最大池化层还是平均池化层。\n", + "\n", + "![池化窗口形状为 $2\\times 2$ 的最大池化层。着色部分是第一个输出元素,以及用于计算这个输出的输入元素: $\\max(0, 1, 3, 4)=4$.](http://d2l.ai/_images/pooling.svg)\n", + "上图中的输出张量的高度为$2$,宽度为$2$。这四个元素为每个池化窗口中的最大值:\n", + "\n", + "$$\n", + "\\max(0, 1, 3, 4)=4,\\\\\n", + "\\max(1, 2, 4, 5)=5,\\\\\n", + "\\max(3, 4, 6, 7)=7,\\\\\n", + "\\max(4, 5, 7, 8)=8.\\\\\n", + "$$\n", + "\n", + "池化窗口形状为$p \\times q$的池化层称为$p \\times q$池化层,池化操作称为$p \\times q$池化。\n", + "\n", + "回到本节开头提到的对象边缘检测示例,现在我们将使用卷积层的输出作为$2\\times 2$最大池化的输入。\n", + "设置卷积层输入为`X`,池化层输出为`Y`。\n", + "无论`X[i, j]`和`X[i, j + 1]`的值是否不同,或`X[i, j + 1]`和`X[i, j + 2]`的值是否不同,池化层始终输出`Y[i, j] = 1`。\n", + "也就是说,使用$2\\times 2$最大池化层,即使在高度或宽度上移动一个元素,卷积层仍然可以识别到模式。\n", + "\n", + "在下面的代码中的`pool2d`函数,我们(**实现池化层的前向传播**)。然而,这里我们没有卷积核,输出为输入中每个区域的最大值或平均值。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 3, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def pool2d(X, pool_size, mode='max'):\n", + " p_h, p_w = pool_size\n", + " Y = torch.zeros((X.shape[0] - p_h + 1, X.shape[1] - p_w + 1))\n", + " for i in range(Y.shape[0]):\n", + " for j in range(Y.shape[1]): # 枚举输出的每个位置,[i,j]对应输入的位置[i至i+p_h,j至j+p_w]\n", + " if mode == 'max': # 最大池化\n", + " Y[i, j] = X[i: i + p_h, j: j + p_w].max() # max函数返回最大值\n", + " elif mode == 'avg': # 平均池化\n", + " Y[i, j] = X[i: i + p_h, j: j + p_w].mean() # mean函数返回平均值\n", + " return Y" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 5 + }, + "source": [ + "我们可以构建上图中的输入张量`X`,[**验证二维最大池化层的输出**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[4., 5.],\n", + " [7., 8.]])" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.tensor([[0.0, 1.0, 2.0], [3.0, 4.0, 5.0], [6.0, 7.0, 8.0]])\n", + "pool2d(X, (2, 2))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 7 + }, + "source": [ + "此外,我们还可以(**验证平均池化层**)。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 8, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[2., 3.],\n", + " [5., 6.]])" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pool2d(X, (2, 2), 'avg')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 9 + }, + "source": [ + "## 填充和步幅\n", + "\n", + "与卷积层一样,池化层也可以改变输出形状。和以前一样,我们可以通过填充和步幅以获得所需的输出形状。\n", + "下面,我们用深度学习框架中内置的二维最大池化层,来演示池化层中填充和步幅的使用。\n", + "我们首先构造了一个输入张量`X`,它有四个维度,其中样本数和通道数都是1。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 11, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[[ 0., 1., 2., 3.],\n", + " [ 4., 5., 6., 7.],\n", + " [ 8., 9., 10., 11.],\n", + " [12., 13., 14., 15.]]]])" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.arange(16, dtype=torch.float32).reshape(\n", + " (1, 1, 4, 4)) # 维度[batch_size,通道数,H,W]\n", + "X" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 13 + }, + "source": [ + "默认情况下,(**深度学习框架中的步幅与池化窗口的大小相同**)。\n", + "因此,如果我们使用形状为`(3, 3)`的池化窗口,那么默认情况下,我们得到的步幅形状为`(3, 3)`。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 15, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[[10.]]]])" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pool2d = nn.MaxPool2d(3)\n", + "pool2d(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 17 + }, + "source": [ + "[**填充和步幅可以手动设定**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 19, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[[ 5., 7.],\n", + " [13., 15.]]]])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pool2d = nn.MaxPool2d(3, padding=1, stride=2)\n", + "pool2d(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 22, + "tab": [ + "pytorch" + ] + }, + "source": [ + "当然,我们可以(**设定一个任意大小的矩形池化窗口,并分别设定填充和步幅的高度和宽度**)。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 25, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[[ 5., 7.],\n", + " [13., 15.]]]])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pool2d = nn.MaxPool2d((2, 3), stride=(2, 3), padding=(0, 1))\n", + "pool2d(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 27 + }, + "source": [ + "## 多个通道\n", + "\n", + "在处理多通道输入数据时,[**池化层在每个输入通道上单独运算**],而不是像卷积层一样在通道上对输入进行汇总。\n", + "这意味着池化层的输出通道数与输入通道数相同。\n", + "下面,我们将在通道维度上连结张量`X`和`X + 1`,以构建具有2个通道的输入。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "origin_pos": 29, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[[ 0., 1., 2., 3.],\n", + " [ 4., 5., 6., 7.],\n", + " [ 8., 9., 10., 11.],\n", + " [12., 13., 14., 15.]],\n", + "\n", + " [[ 1., 2., 3., 4.],\n", + " [ 5., 6., 7., 8.],\n", + " [ 9., 10., 11., 12.],\n", + " [13., 14., 15., 16.]]]])" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.cat((X, X + 1), 1) # 在第一个维度也就是通道维度拼接\n", + "X" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 31 + }, + "source": [ + "如下所示,池化后输出通道的数量仍然是2。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "origin_pos": 33, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[[ 5., 7.],\n", + " [13., 15.]],\n", + "\n", + " [[ 6., 8.],\n", + " [14., 16.]]]])" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pool2d = nn.MaxPool2d(3, padding=1, stride=2)\n", + "pool2d(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 36 + }, + "source": [ + "## 小结\n", + "\n", + "* 对于给定输入元素,最大池化层会输出该窗口内的最大值,平均池化层会输出该窗口内的平均值。\n", + "* 池化层的主要优点之一是减轻卷积层对位置的过度敏感。\n", + "* 我们可以指定池化层的填充和步幅。\n", + "* 使用最大池化层以及大于1的步幅,可减少空间维度(如高度和宽度)。\n", + "* 池化层的输出通道数与输入通道数相同。\n", + "\n", + "## 问题和练习\n", + "\n", + "1. 你能将平均池化层作为卷积层的特殊情况实现吗?\n", + ">设卷积层大小是$m\\times n$,卷积层里面每个元素参数是$\\dfrac{1} {m\\times n}$,这样就是一个平均池化层作为卷积层的实现\n", + "1. 假设池化层的输入大小为$c\\times h\\times w$,则汇聚窗口的形状为$p_h\\times p_w$,填充为$(p_h, p_w)$,步幅为$(s_h, s_w)$。这个池化层的计算成本是多少?\n", + ">$ c\\times \\left \\lfloor \\dfrac {h-p_h+s_h}{s_h}\\right \\rfloor \\times \\left \\lfloor \\dfrac {w-p_w+s_w}{s_w}\\right \\rfloor $\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 38, + "tab": [ + "pytorch" + ] + }, + "source": [ + "\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + }, + "toc": { + "base_numbering": 1, + "nav_menu": {}, + "number_sections": true, + "sideBar": true, + "skip_h1_title": false, + "title_cell": "Table of Contents", + "title_sidebar": "Contents", + "toc_cell": false, + "toc_position": {}, + "toc_section_display": true, + "toc_window_display": false + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/code/23-LeNet.ipynb b/code/23-LeNet.ipynb new file mode 100644 index 0000000..5584551 --- /dev/null +++ b/code/23-LeNet.ipynb @@ -0,0 +1,1314 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 卷积神经网络(LeNet)\n", + ":label:`sec_lenet`\n", + "\n", + "通过之前几节,我们学习了构建一个完整卷积神经网络的所需组件。\n", + "回想一下,之前我们将softmax回归模型( :numref:`sec_softmax_scratch`)和多层感知机模型( :numref:`sec_mlp_scratch`)应用于Fashion-MNIST数据集中的服装图片。\n", + "为了能够应用softmax回归和多层感知机,我们首先将每个大小为$28\\times28$的图像展平为一个784维的固定长度的一维向量,然后用全连接层对其进行处理。\n", + "而现在,我们已经掌握了卷积层的处理方法,我们可以在图像中保留空间结构。\n", + "同时,用卷积层代替全连接层的另一个好处是:模型更简洁、所需的参数更少。\n", + "\n", + "在本节中,我们将介绍LeNet,它是最早发布的卷积神经网络之一,因其在计算机视觉任务中的高效性能而受到广泛关注。\n", + "这个模型是由AT&T贝尔实验室的研究员Yann LeCun在1989年提出的(并以其命名),目的是识别图像 :cite:`LeCun.Bottou.Bengio.ea.1998`中的手写数字。\n", + "当时,Yann LeCun发表了第一篇通过反向传播成功训练卷积神经网络的研究,这项工作代表了十多年来神经网络研究开发的成果。\n", + "\n", + "当时,LeNet取得了与支持向量机(support vector machines)性能相媲美的成果,成为监督学习的主流方法。\n", + "LeNet被广泛用于自动取款机(ATM)机中,帮助识别处理支票的数字。\n", + "时至今日,一些自动取款机仍在运行Yann LeCun和他的同事Leon Bottou在上世纪90年代写的代码呢!\n", + "\n", + "## LeNet\n", + "\n", + "总体来看,(**LeNet(LeNet-5)由两个部分组成:**)(~~卷积编码器和全连接层密集块~~)\n", + "\n", + "* 卷积编码器:由两个卷积层组成;\n", + "* 全连接层密集块:由三个全连接层组成。\n", + "\n", + "该架构如 :numref:`img_lenet`所示。\n", + "\n", + "![LeNet中的数据流。输入是手写数字,输出为10种可能结果的概率。](../img/lenet.svg)\n", + ":label:`img_lenet`\n", + "\n", + "每个卷积块中的基本单元是一个卷积层、一个sigmoid激活函数和平均汇聚层。请注意,虽然ReLU和最大汇聚层更有效,但它们在20世纪90年代还没有出现。每个卷积层使用$5\\times 5$卷积核和一个sigmoid激活函数。这些层将输入映射到多个二维特征输出,通常同时增加通道的数量。第一卷积层有6个输出通道,而第二个卷积层有16个输出通道。每个$2\\times2$池操作(步骤2)通过空间下采样将维数减少4倍。卷积的输出形状由批量大小、通道数、高度、宽度决定。\n", + "\n", + "为了将卷积块的输出传递给稠密块,我们必须在小批量中展平每个样本。换言之,我们将这个四维输入转换成全连接层所期望的二维输入。这里的二维表示的第一个维度索引小批量中的样本,第二个维度给出每个样本的平面向量表示。LeNet的稠密块有三个全连接层,分别有120、84和10个输出。因为我们仍在执行分类,所以输出层的10维对应于最后输出结果的数量。\n", + "\n", + "通过下面的LeNet代码,你会相信用深度学习框架实现此类模型非常简单。我们只需要实例化一个`Sequential`块并将需要的层连接在一起。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#导入所需的库\n", + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "#定义网络结构\n", + "net = nn.Sequential(\n", + " nn.Conv2d(1, 6, kernel_size=5, padding=2), nn.Sigmoid(),\n", + " nn.AvgPool2d(kernel_size=2, stride=2),\n", + " nn.Conv2d(6, 16, kernel_size=5), nn.Sigmoid(),\n", + " nn.AvgPool2d(kernel_size=2, stride=2),\n", + " nn.Flatten(),\n", + " nn.Linear(16 * 5 * 5, 120), nn.Sigmoid(),\n", + " nn.Linear(120, 84), nn.Sigmoid(),\n", + " nn.Linear(84, 10))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "我们对原始模型做了一点小改动,去掉了最后一层的高斯激活。除此之外,这个网络与最初的LeNet-5一致。\n", + "\n", + "下面,我们将一个大小为$28 \\times 28$的单通道(黑白)图像通过LeNet。通过在每一层打印输出的形状,我们可以[**检查模型**],以确保其操作与我们期望的 :numref:`img_lenet_vert`一致。\n", + "\n", + "![LeNet 的简化版。](../img/lenet-vert.svg)\n", + ":label:`img_lenet_vert`\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Conv2d output shape: \t torch.Size([1, 6, 28, 28])\n", + "Sigmoid output shape: \t torch.Size([1, 6, 28, 28])\n", + "AvgPool2d output shape: \t torch.Size([1, 6, 14, 14])\n", + "Conv2d output shape: \t torch.Size([1, 16, 10, 10])\n", + "Sigmoid output shape: \t torch.Size([1, 16, 10, 10])\n", + "AvgPool2d output shape: \t torch.Size([1, 16, 5, 5])\n", + "Flatten output shape: \t torch.Size([1, 400])\n", + "Linear output shape: \t torch.Size([1, 120])\n", + "Sigmoid output shape: \t torch.Size([1, 120])\n", + "Linear output shape: \t torch.Size([1, 84])\n", + "Sigmoid output shape: \t torch.Size([1, 84])\n", + "Linear output shape: \t torch.Size([1, 10])\n" + ] + } + ], + "source": [ + "#把每一层数据的shape给打印出来\n", + "X = torch.rand(size=(1, 1, 28, 28), dtype=torch.float32)#创建符合要求的张量\n", + "for layer in net:\n", + " X = layer(X)#通过每一层\n", + " print(layer.__class__.__name__,'output shape: \\t',X.shape)#打印" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "请注意,在整个卷积块中,与上一层相比,每一层特征的高度和宽度都减小了。\n", + "第一个卷积层使用2个像素的填充,来补偿$5 \\times 5$卷积核导致的特征减少。\n", + "相反,第二个卷积层没有填充,因此高度和宽度都减少了4个像素。\n", + "随着层叠的上升,通道的数量从输入时的1个,增加到第一个卷积层之后的6个,再到第二个卷积层之后的16个。\n", + "同时,每个汇聚层的高度和宽度都减半。最后,每个全连接层减少维数,最终输出一个维数与结果分类数相匹配的输出。\n", + "\n", + "## 模型训练\n", + "\n", + "现在我们已经实现了LeNet,让我们看看[**LeNet在Fashion-MNIST数据集上的表现**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 9, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "batch_size = 256#批量大小\n", + "train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size=batch_size)#下载或加载数据集,得到训练和测试集的迭代对象" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 10 + }, + "source": [ + "虽然卷积神经网络的参数较少,但与深度的多层感知机相比,它们的计算成本仍然很高,因为每个参数都参与更多的乘法。\n", + "如果你有机会使用GPU,可以用它加快训练。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 11, + "tab": [ + "pytorch" + ] + }, + "source": [ + "为了进行评估,我们需要[**对**] :numref:`sec_softmax_scratch`中描述的(**`evaluate_accuracy`函数进行轻微的修改**)。\n", + "由于完整的数据集位于内存中,因此在模型使用GPU计算数据集之前,我们需要将其复制到显存中。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 13, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def evaluate_accuracy_gpu(net, data_iter, device=None): #@save\n", + " \"\"\"使用GPU计算模型在数据集上的精度\"\"\"\n", + " if isinstance(net, nn.Module):\n", + " net.eval() # 设置为评估模式\n", + " if not device:\n", + " device = next(iter(net.parameters())).device\n", + " # 正确预测的数量,总预测的数量\n", + " metric = d2l.Accumulator(2)#创建一个累加器,包含2个要累加的元素\n", + " with torch.no_grad():\n", + " for X, y in data_iter:\n", + " if isinstance(X, list):\n", + " # BERT微调所需的(之后将介绍)\n", + " X = [x.to(device) for x in X]\n", + " else:\n", + " X = X.to(device)\n", + " y = y.to(device)\n", + " metric.add(d2l.accuracy(net(X), y), y.numel())#把每一组数据预测结果正确的个数和长度累加\n", + " return metric[0] / metric[1]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 14 + }, + "source": [ + "[**为了使用GPU,我们还需要一点小改动**]。\n", + "与 :numref:`sec_softmax_scratch`中定义的`train_epoch_ch3`不同,在进行正向和反向传播之前,我们需要将每一小批量数据移动到我们指定的设备(例如GPU)上。\n", + "\n", + "如下所示,训练函数`train_ch6`也类似于 :numref:`sec_softmax_scratch`中定义的`train_ch3`。\n", + "由于我们将实现多层神经网络,因此我们将主要使用高级API。\n", + "以下训练函数假定从高级API创建的模型作为输入,并进行相应的优化。\n", + "我们使用在 :numref:`subsec_xavier`中介绍的Xavier随机初始化模型参数。\n", + "与全连接层一样,我们使用交叉熵损失函数和小批量随机梯度下降。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 16, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "def train_ch6(net, train_iter, test_iter, num_epochs, lr, device):\n", + " \"\"\"用GPU训练模型(在第六章定义)\"\"\"\n", + " def init_weights(m):\n", + " if type(m) == nn.Linear or type(m) == nn.Conv2d:\n", + " nn.init.xavier_uniform_(m.weight)#对linear类型的层用xavier初始化\n", + " net.apply(init_weights)\n", + " print('training on', device)\n", + " net.to(device)\n", + " optimizer = torch.optim.SGD(net.parameters(), lr=lr)\n", + " loss = nn.CrossEntropyLoss()\n", + " animator = d2l.Animator(xlabel='epoch', xlim=[1, num_epochs],\n", + " legend=['train loss', 'train acc', 'test acc'])#动画需要\n", + " timer, num_batches = d2l.Timer(), len(train_iter)\n", + " for epoch in range(num_epochs):\n", + " # 训练损失之和,训练准确率之和,范例数\n", + " metric = d2l.Accumulator(3)\n", + " net.train()\n", + " for i, (X, y) in enumerate(train_iter):\n", + " timer.start()\n", + " optimizer.zero_grad()#梯度清零\n", + " X, y = X.to(device), y.to(device)\n", + " y_hat = net(X)#正向传播\n", + " l = loss(y_hat, y)#计算损失\n", + " l.backward()#反向传播\n", + " optimizer.step()#梯度下降\n", + " with torch.no_grad():\n", + " metric.add(l * X.shape[0], d2l.accuracy(y_hat, y), X.shape[0])#训练损失之和,训练准确率之和,范例数\n", + " timer.stop()\n", + " train_l = metric[0] / metric[2]\n", + " train_acc = metric[1] / metric[2]\n", + " if (i + 1) % (num_batches // 5) == 0 or i == num_batches - 1:\n", + " animator.add(epoch + (i + 1) / num_batches,\n", + " (train_l, train_acc, None))\n", + " test_acc = evaluate_accuracy_gpu(net, test_iter)#评估测试集的精度\n", + " animator.add(epoch + 1, (None, None, test_acc))\n", + " print(f'loss {train_l:.3f}, train acc {train_acc:.3f}, '\n", + " f'test acc {test_acc:.3f}')\n", + " print(f'{metric[2] * num_epochs / timer.sum():.1f} examples/sec '\n", + " f'on {str(device)}')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 18 + }, + "source": [ + "现在,我们[**训练和评估LeNet-5模型**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 19, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loss 0.470, train acc 0.824, test acc 0.797\n", + "26005.4 examples/sec on cuda:0\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-19T19:20:19.241930\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.5.0, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "lr, num_epochs = 0.9, 10\n", + "train_ch6(net, train_iter, test_iter, num_epochs, lr, d2l.try_gpu())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 20 + }, + "source": [ + "## 小结\n", + "\n", + "* 卷积神经网络(CNN)是一类使用卷积层的网络。\n", + "* 在卷积神经网络中,我们组合使用卷积层、非线性激活函数和汇聚层。\n", + "* 为了构造高性能的卷积神经网络,我们通常对卷积层进行排列,逐渐降低其表示的空间分辨率,同时增加通道数。\n", + "* 在传统的卷积神经网络中,卷积块编码得到的表征在输出之前需由一个或多个全连接层进行处理。\n", + "* LeNet是最早发布的卷积神经网络之一。\n", + "\n", + "## 练习\n", + "\n", + "1. 将平均汇聚层替换为最大汇聚层,会发生什么?\n", + "1. 尝试构建一个基于LeNet的更复杂的网络,以提高其准确性。\n", + " 1. 调整卷积窗口大小。\n", + " 1. 调整输出通道的数量。\n", + " 1. 调整激活函数(如ReLU)。\n", + " 1. 调整卷积层的数量。\n", + " 1. 调整全连接层的数量。\n", + " 1. 调整学习率和其他训练细节(例如,初始化和轮数)。\n", + "1. 在MNIST数据集上尝试以上改进的网络。\n", + "1. 显示不同输入(例如毛衣和外套)时,LeNet第一层和第二层的激活值。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 22, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/1860)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.0" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/code/24-AlexNet.ipynb b/code/24-AlexNet.ipynb new file mode 100644 index 0000000..85b9b2e --- /dev/null +++ b/code/24-AlexNet.ipynb @@ -0,0 +1,337 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 深度卷积神经网络(AlexNet)\n", + ":label:`sec_alexnet`\n", + "\n", + "在LeNet提出后,卷积神经网络在计算机视觉和机器学习领域中很有名气。但卷积神经网络并没有主导这些领域。这是因为虽然LeNet在小数据集上取得了很好的效果,但是在更大、更真实的数据集上训练卷积神经网络的性能和可行性还有待研究。事实上,在上世纪90年代初到2012年之间的大部分时间里,神经网络往往被其他机器学习方法超越,如支持向量机(support vector machines)。\n", + "\n", + "在计算机视觉中,直接将神经网络与其他机器学习方法进行比较也许不公平。这是因为,卷积神经网络的输入是由原始像素值或是经过简单预处理(例如居中、缩放)的像素值组成的。但在使用传统机器学习方法时,从业者永远不会将原始像素作为输入。在传统机器学习方法中,计算机视觉流水线是由经过人的手工精心设计的特征流水线组成的。对于这些传统方法,大部分的进展都来自于对特征有了更聪明的想法,并且学习到的算法往往归于事后的解释。\n", + "\n", + "虽然上世纪90年代就有了一些神经网络加速卡,但仅靠它们还不足以开发出有大量参数的深层多通道多层卷积神经网络。此外,当时的数据集仍然相对较小。除了这些障碍,训练神经网络的一些关键技巧仍然缺失,包括启发式参数初始化、随机梯度下降的变体、非挤压激活函数和有效的正则化技术。\n", + "\n", + "因此,与训练*端到端*(从像素到分类结果)系统不同,经典机器学习的流水线看起来更像下面这样:\n", + "\n", + "1. 获取一个有趣的数据集。在早期,收集这些数据集需要昂贵的传感器(在当时最先进的图像也就100万像素)。\n", + "2. 根据光学、几何学、其他知识以及偶然的发现,手工对特征数据集进行预处理。\n", + "3. 通过标准的特征提取算法,如SIFT(尺度不变特征变换) :cite:`Lowe.2004`和SURF(加速鲁棒特征) :cite:`Bay.Tuytelaars.Van-Gool.2006`或其他手动调整的流水线来输入数据。\n", + "4. 将提取的特征送入最喜欢的分类器中(例如线性模型或其它核方法),以训练分类器。\n", + "\n", + "如果你和机器学习研究人员交谈,你会发现他们相信机器学习既重要又美丽:优雅的理论去证明各种模型的性质。机器学习是一个正在蓬勃发展、严谨且非常有用的领域。然而,如果你和计算机视觉研究人员交谈,你会听到一个完全不同的故事。他们会告诉你图像识别的诡异事实————推动领域进步的是数据特征,而不是学习算法。计算机视觉研究人员相信,从对最终模型精度的影响来说,更大或更干净的数据集、或是稍微改进的特征提取,比任何学习算法带来的进步要大得多。\n", + "\n", + "## 学习表征\n", + "\n", + "另一种预测这个领域发展的方法————观察图像特征的提取方法。在2012年前,图像特征都是机械地计算出来的。事实上,设计一套新的特征函数、改进结果,并撰写论文是盛极一时的潮流。SIFT :cite:`Lowe.2004`、SURF :cite:`Bay.Tuytelaars.Van-Gool.2006`、HOG(定向梯度直方图) :cite:`Dalal.Triggs.2005`、[bags of visual words](https://en.wikipedia.org/wiki/Bag-of-words_model_in_computer_vision)和类似的特征提取方法占据了主导地位。\n", + "\n", + "另一组研究人员,包括Yann LeCun、Geoff Hinton、Yoshua Bengio、Andrew Ng、Shun ichi Amari和Juergen Schmidhuber,想法则与众不同:他们认为特征本身应该被学习。此外,他们还认为,在合理地复杂性前提下,特征应该由多个共同学习的神经网络层组成,每个层都有可学习的参数。在机器视觉中,最底层可能检测边缘、颜色和纹理。事实上,Alex Krizhevsky、Ilya Sutskever和Geoff Hinton提出了一种新的卷积神经网络变体*AlexNet*。在2012年ImageNet挑战赛中取得了轰动一时的成绩。AlexNet以Alex Krizhevsky的名字命名,他是论文 :cite:`Krizhevsky.Sutskever.Hinton.2012`的第一作者。\n", + "\n", + "有趣的是,在网络的最底层,模型学习到了一些类似于传统滤波器的特征抽取器。 :numref:`fig_filters`是从AlexNet论文 :cite:`Krizhevsky.Sutskever.Hinton.2012`复制的,描述了底层图像特征。\n", + "\n", + "![AlexNet第一层学习到的特征抽取器。](../img/filters.png)\n", + ":width:`400px`\n", + ":label:`fig_filters`\n", + "\n", + "AlexNet的更高层建立在这些底层表示的基础上,以表示更大的特征,如眼睛、鼻子、草叶等等。而更高的层可以检测整个物体,如人、飞机、狗或飞盘。最终的隐藏神经元可以学习图像的综合表示,从而使属于不同类别的数据易于区分。尽管一直有一群执着的研究者不断钻研,试图学习视觉数据的逐级表征,然而很长一段时间里这些尝试都未有突破。深度卷积神经网络的突破出现在2012年。突破可归因于两个关键因素。\n", + "\n", + "### 缺少的成分:数据\n", + "\n", + "包含许多特征的深度模型需要大量的有标签数据,才能显著优于基于凸优化的传统方法(如线性方法和核方法)。\n", + "然而,限于早期计算机有限的存储和90年代有限的研究预算,大部分研究只基于小的公开数据集。例如,不少研究论文基于加州大学欧文分校(UCI)提供的若干个公开数据集,其中许多数据集只有几百至几千张在非自然环境下以低分辨率拍摄的图像。这一状况在2010年前后兴起的大数据浪潮中得到改善。2009年,ImageNet数据集发布,并发起ImageNet挑战赛:要求研究人员从100万个样本中训练模型,以区分1000个不同类别的对象。ImageNet数据集由斯坦福教授李飞飞小组的研究人员开发,利用谷歌图像搜索(Google Image Search)对每一类图像进行预筛选,并利用亚马逊众包(Amazon Mechanical Turk)来标注每张图片的相关类别。这种规模是前所未有的。这项被称为ImageNet的挑战赛推动了计算机视觉和机器学习研究的发展,挑战研究人员确定哪些模型能够在更大的数据规模下表现最好。\n", + "\n", + "### 缺少的成分:硬件\n", + "\n", + "深度学习对计算资源要求很高,训练可能需要数百个迭代轮数,每次迭代都需要通过代价高昂的许多线性代数层传递数据。这也是为什么在20世纪90年代至21世纪初,优化凸目标的简单算法是研究人员的首选。然而,用GPU训练神经网络改变了这一格局。*图形处理器*(Graphics Processing Unit,GPU)早年用来加速图形处理,使电脑游戏玩家受益。GPU可优化高吞吐量的$4 \\times 4$矩阵和向量乘法,从而服务于基本的图形任务。幸运的是,这些数学运算与卷积层的计算惊人地相似。由此,英伟达(NVIDIA)和ATI已经开始为通用计算操作优化gpu,甚至把它们作为*通用GPU*(general-purpose GPUs,GPGPU)来销售。\n", + "\n", + "那么GPU比CPU强在哪里呢?\n", + "\n", + "首先,我们深度理解一下中央处理器(Central Processing Unit,CPU)的*核心*。\n", + "CPU的每个核心都拥有高时钟频率的运行能力,和高达数MB的三级缓存(L3Cache)。\n", + "它们非常适合执行各种指令,具有分支预测器、深层流水线和其他使CPU能够运行各种程序的功能。\n", + "然而,这种明显的优势也是它的致命弱点:通用核心的制造成本非常高。\n", + "它们需要大量的芯片面积、复杂的支持结构(内存接口、内核之间的缓存逻辑、高速互连等等),而且它们在任何单个任务上的性能都相对较差。\n", + "现代笔记本电脑最多有4核,即使是高端服务器也很少超过64核,因为它们的性价比不高。\n", + "\n", + "相比于CPU,GPU由$100 \\sim 1000$个小的处理单元组成(NVIDIA、ATI、ARM和其他芯片供应商之间的细节稍有不同),通常被分成更大的组(NVIDIA称之为warps)。\n", + "虽然每个GPU核心都相对较弱,有时甚至以低于1GHz的时钟频率运行,但庞大的核心数量使GPU比CPU快几个数量级。\n", + "例如,NVIDIA最近一代的Ampere GPU架构为每个芯片提供了高达312 TFlops的浮点性能,而CPU的浮点性能到目前为止还没有超过1 TFlops。\n", + "之所以有如此大的差距,原因其实很简单:首先,功耗往往会随时钟频率呈二次方增长。\n", + "对于一个CPU核心,假设它的运行速度比GPU快4倍,你可以使用16个GPU内核取代,那么GPU的综合性能就是CPU的$16 \\times 1/4 = 4$倍。\n", + "其次,GPU内核要简单得多,这使得它们更节能。\n", + "此外,深度学习中的许多操作需要相对较高的内存带宽,而GPU拥有10倍于CPU的带宽。\n", + "\n", + "回到2012年的重大突破,当Alex Krizhevsky和Ilya Sutskever实现了可以在GPU硬件上运行的深度卷积神经网络时,一个重大突破出现了。他们意识到卷积神经网络中的计算瓶颈:卷积和矩阵乘法,都是可以在硬件上并行化的操作。\n", + "于是,他们使用两个显存为3GB的NVIDIA GTX580 GPU实现了快速卷积运算。他们的创新[cuda-convnet](https://code.google.com/archive/p/cuda-convnet/)几年来它一直是行业标准,并推动了深度学习热潮。\n", + "\n", + "## AlexNet\n", + "\n", + "2012年,AlexNet横空出世。它首次证明了学习到的特征可以超越手工设计的特征。它一举打破了计算机视觉研究的现状。\n", + "AlexNet使用了8层卷积神经网络,并以很大的优势赢得了2012年ImageNet图像识别挑战赛。\n", + "\n", + "AlexNet和LeNet的架构非常相似,如 :numref:`fig_alexnet`所示。\n", + "注意,这里我们提供了一个稍微精简版本的AlexNet,去除了当年需要两个小型GPU同时运算的设计特点。\n", + "\n", + "![从LeNet(左)到AlexNet(右)](../img/alexnet.svg)\n", + ":label:`fig_alexnet`\n", + "\n", + "AlexNet和LeNet的设计理念非常相似,但也存在显著差异。\n", + "首先,AlexNet比相对较小的LeNet5要深得多。\n", + "AlexNet由八层组成:五个卷积层、两个全连接隐藏层和一个全连接输出层。\n", + "其次,AlexNet使用ReLU而不是sigmoid作为其激活函数。\n", + "下面,让我们深入研究AlexNet的细节。\n", + "\n", + "### 模型设计\n", + "\n", + "在AlexNet的第一层,卷积窗口的形状是$11\\times11$。\n", + "由于ImageNet中大多数图像的宽和高比MNIST图像的多10倍以上,因此,需要一个更大的卷积窗口来捕获目标。\n", + "第二层中的卷积窗口形状被缩减为$5\\times5$,然后是$3\\times3$。\n", + "此外,在第一层、第二层和第五层卷积层之后,加入窗口形状为$3\\times3$、步幅为2的最大汇聚层。\n", + "而且,AlexNet的卷积通道数目是LeNet的10倍。\n", + "\n", + "在最后一个卷积层后有两个全连接层,分别有4096个输出。\n", + "这两个巨大的全连接层拥有将近1GB的模型参数。\n", + "由于早期GPU显存有限,原版的AlexNet采用了双数据流设计,使得每个GPU只负责存储和计算模型的一半参数。\n", + "幸运的是,现在GPU显存相对充裕,所以我们现在很少需要跨GPU分解模型(因此,我们的AlexNet模型在这方面与原始论文稍有不同)。\n", + "\n", + "### 激活函数\n", + "\n", + "此外,AlexNet将sigmoid激活函数改为更简单的ReLU激活函数。\n", + "一方面,ReLU激活函数的计算更简单,它不需要如sigmoid激活函数那般复杂的求幂运算。\n", + "另一方面,当使用不同的参数初始化方法时,ReLU激活函数使训练模型更加容易。\n", + "当sigmoid激活函数的输出非常接近于0或1时,这些区域的梯度几乎为0,因此反向传播无法继续更新一些模型参数。\n", + "相反,ReLU激活函数在正区间的梯度总是1。\n", + "因此,如果模型参数没有正确初始化,sigmoid函数可能在正区间内得到几乎为0的梯度,从而使模型无法得到有效的训练。\n", + "\n", + "### 容量控制和预处理\n", + "\n", + "AlexNet通过dropout( :numref:`sec_dropout`)控制全连接层的模型复杂度,而LeNet只使用了权重衰减。\n", + "为了进一步扩充数据,AlexNet在训练时增加了大量的图像增强数据,如翻转、裁切和变色。\n", + "这使得模型更健壮,更大的样本量有效地减少了过拟合。\n", + "我们将在 :numref:`sec_image_augmentation`中更详细地讨论数据扩充。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "\n", + "net = nn.Sequential(\n", + " # 这里,我们使用一个11*11的更大窗口来捕捉对象。\n", + " # 同时,步幅为4,以减少输出的高度和宽度。\n", + " # 另外,输出通道的数目远大于LeNet\n", + " nn.Conv2d(1, 96, kernel_size=11, stride=4, padding=1), nn.ReLU(),\n", + " nn.MaxPool2d(kernel_size=3, stride=2),\n", + " # 减小卷积窗口,使用填充为2来使得输入与输出的高和宽一致,且增大输出通道数\n", + " nn.Conv2d(96, 256, kernel_size=5, padding=2), nn.ReLU(),\n", + " nn.MaxPool2d(kernel_size=3, stride=2),\n", + " # 使用三个连续的卷积层和较小的卷积窗口。\n", + " # 除了最后的卷积层,输出通道的数量进一步增加。\n", + " # 在前两个卷积层之后,汇聚层不用于减少输入的高度和宽度\n", + " nn.Conv2d(256, 384, kernel_size=3, padding=1), nn.ReLU(),\n", + " nn.Conv2d(384, 384, kernel_size=3, padding=1), nn.ReLU(),\n", + " nn.Conv2d(384, 256, kernel_size=3, padding=1), nn.ReLU(),\n", + " nn.MaxPool2d(kernel_size=3, stride=2),\n", + " nn.Flatten(),\n", + " # 这里,全连接层的输出数量是LeNet中的好几倍。使用dropout层来减轻过拟合\n", + " nn.Linear(6400, 4096), nn.ReLU(),\n", + " nn.Dropout(p=0.5),\n", + " nn.Linear(4096, 4096), nn.ReLU(),\n", + " nn.Dropout(p=0.5),\n", + " # 最后是输出层。由于这里使用Fashion-MNIST,所以用类别数为10,而非论文中的1000\n", + " nn.Linear(4096, 10))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "[**我们构造一个**]高度和宽度都为224的(**单通道数据,来观察每一层输出的形状**)。\n", + "它与 :numref:`fig_alexnet`中的AlexNet架构相匹配。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Conv2d output shape:\t torch.Size([1, 96, 54, 54])\n", + "ReLU output shape:\t torch.Size([1, 96, 54, 54])\n", + "MaxPool2d output shape:\t torch.Size([1, 96, 26, 26])\n", + "Conv2d output shape:\t torch.Size([1, 256, 26, 26])\n", + "ReLU output shape:\t torch.Size([1, 256, 26, 26])\n", + "MaxPool2d output shape:\t torch.Size([1, 256, 12, 12])\n", + "Conv2d output shape:\t torch.Size([1, 384, 12, 12])\n", + "ReLU output shape:\t torch.Size([1, 384, 12, 12])\n", + "Conv2d output shape:\t torch.Size([1, 384, 12, 12])\n", + "ReLU output shape:\t torch.Size([1, 384, 12, 12])\n", + "Conv2d output shape:\t torch.Size([1, 256, 12, 12])\n", + "ReLU output shape:\t torch.Size([1, 256, 12, 12])\n", + "MaxPool2d output shape:\t torch.Size([1, 256, 5, 5])\n", + "Flatten output shape:\t torch.Size([1, 6400])\n", + "Linear output shape:\t torch.Size([1, 4096])\n", + "ReLU output shape:\t torch.Size([1, 4096])\n", + "Dropout output shape:\t torch.Size([1, 4096])\n", + "Linear output shape:\t torch.Size([1, 4096])\n", + "ReLU output shape:\t torch.Size([1, 4096])\n", + "Dropout output shape:\t torch.Size([1, 4096])\n", + "Linear output shape:\t torch.Size([1, 10])\n" + ] + } + ], + "source": [ + "X = torch.randn(1, 1, 224, 224)\n", + "for layer in net:\n", + " X=layer(X)\n", + " print(layer.__class__.__name__,'output shape:\\t',X.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "## 读取数据集\n", + "\n", + "尽管本文中AlexNet是在ImageNet上进行训练的,但我们在这里使用的是Fashion-MNIST数据集。因为即使在现代GPU上,训练ImageNet模型,同时使其收敛可能需要数小时或数天的时间。\n", + "将AlexNet直接应用于Fashion-MNIST的一个问题是,[**Fashion-MNIST图像的分辨率**]($28 \\times 28$像素)(**低于ImageNet图像。**)\n", + "为了解决这个问题,(**我们将它们增加到$224 \\times 224$**)(通常来讲这不是一个明智的做法,但我们在这里这样做是为了有效使用AlexNet架构)。\n", + "我们使用`d2l.load_data_fashion_mnist`函数中的`resize`参数执行此调整。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 9, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "batch_size = 128\n", + "train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size, resize=224)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 10 + }, + "source": [ + "## [**训练AlexNet**]\n", + "\n", + "现在,我们可以开始训练AlexNet了。与 :numref:`sec_lenet`中的LeNet相比,这里的主要变化是使用更小的学习速率训练,这是因为网络更深更广、图像分辨率更高,训练卷积神经网络就更昂贵。\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "origin_pos": 11, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "training on cpu\n" + ] + } + ], + "source": [ + "lr, num_epochs = 0.01, 5\n", + "d2l.train_ch6(net, train_iter, test_iter, num_epochs, lr, d2l.try_gpu())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12 + }, + "source": [ + "## 小结\n", + "\n", + "* AlexNet的架构与LeNet相似,但使用了更多的卷积层和更多的参数来拟合大规模的ImageNet数据集。\n", + "* 今天,AlexNet已经被更有效的架构所超越,但它是从浅层网络到深层网络的关键一步。\n", + "* 尽管AlexNet的代码只比LeNet多出几行,但学术界花了很多年才接受深度学习这一概念,并应用其出色的实验结果。这也是由于缺乏有效的计算工具。\n", + "* Dropout、ReLU和预处理是提升计算机视觉任务性能的其他关键步骤。\n", + "\n", + "## 练习\n", + "\n", + "1. 试着增加迭代轮数。对比LeNet的结果有什么不同?为什么?\n", + "1. AlexNet对于Fashion-MNIST数据集来说可能太复杂了。\n", + " 1. 尝试简化模型以加快训练速度,同时确保准确性不会显著下降。\n", + " 1. 设计一个更好的模型,可以直接在$28 \\times 28$图像上工作。\n", + "1. 修改批量大小,并观察模型精度和GPU显存变化。\n", + "1. 分析了AlexNet的计算性能。\n", + " 1. 在AlexNet中主要是哪部分占用显存?\n", + " 1. 在AlexNet中主要是哪部分需要更多的计算?\n", + " 1. 计算结果时显存带宽如何?\n", + "1. 将dropout和ReLU应用于LeNet-5,效果有提升吗?再试试预处理会怎么样?\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/1863)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/code/25-VGG.ipynb b/code/25-VGG.ipynb new file mode 100644 index 0000000..4ee9650 --- /dev/null +++ b/code/25-VGG.ipynb @@ -0,0 +1,1007 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "c2247b6e", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "\n", + "#VGG块\n", + "def vgg_block(num_convs, in_channels, out_channels):#块中卷积层数,输入输出通道数\n", + " layers = []\n", + " for _ in range(num_convs):#用for循环\n", + " layers.append(nn.Conv2d(\n", + " in_channels, out_channels, kernel_size=3, padding=1))\n", + " layers.append(nn.ReLU())\n", + " in_channels = out_channels#添加一层后取当前输出通道数为下一层输入通道数,\n", + " #这里说明VGG块改变通道数的方法是在第一层就将通道数改变好,后面层中通道数全不变\n", + " layers.append(nn.MaxPool2d(kernel_size=2, stride=2))\n", + " return nn.Sequential(*layers)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "0bf25540", + "metadata": {}, + "outputs": [], + "source": [ + "#每个VGG块的(卷积层数,输出通道数)\n", + "conv_arch = ((1, 64), (1, 128), (2, 256), (2, 512), (2, 512))\n", + "\n", + "#VGG网络\n", + "def vgg(conv_arch):\n", + " conv_blks = []\n", + " in_channels = 1#初始输入图像为单通道\n", + " for (num_convs, out_channels) in conv_arch:#依次读取VGG块尺寸并创建\n", + " conv_blks.append(vgg_block(\n", + " num_convs, in_channels, out_channels))\n", + " in_channels = out_channels#输入通道数随每层输出通道数更新\n", + " \n", + " return nn.Sequential(\n", + " *conv_blks, nn.Flatten(),#“*”将列表中所有元素解开成独立的参数\n", + " nn.Linear(out_channels * 7 * 7, 4096), nn.ReLU(),\n", + " nn.Dropout(0.5), nn.Linear(4096, 4096), nn.ReLU(),\n", + " nn.Dropout(0.5), nn.Linear(4096, 10))\n", + "\n", + "net = vgg(conv_arch)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "b8c794b4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sequential ouput shape:\t torch.Size([1, 64, 112, 112])\n", + "Sequential ouput shape:\t torch.Size([1, 128, 56, 56])\n", + "Sequential ouput shape:\t torch.Size([1, 256, 28, 28])\n", + "Sequential ouput shape:\t torch.Size([1, 512, 14, 14])\n", + "Sequential ouput shape:\t torch.Size([1, 512, 7, 7])\n", + "Flatten ouput shape:\t torch.Size([1, 25088])\n", + "Linear ouput shape:\t torch.Size([1, 4096])\n", + "ReLU ouput shape:\t torch.Size([1, 4096])\n", + "Dropout ouput shape:\t torch.Size([1, 4096])\n", + "Linear ouput shape:\t torch.Size([1, 4096])\n", + "ReLU ouput shape:\t torch.Size([1, 4096])\n", + "Dropout ouput shape:\t torch.Size([1, 4096])\n", + "Linear ouput shape:\t torch.Size([1, 10])\n" + ] + } + ], + "source": [ + "X = torch.randn(size=(1, 1, 224, 224))\n", + "for blk in net:\n", + " X = blk(X)\n", + " print(blk.__class__.__name__, 'ouput shape:\\t', X.shape)\n", + " \n", + "#总体而言,网络分为五块,每一块将输入宽高减半,通道数翻倍" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "27e6185a", + "metadata": {}, + "outputs": [], + "source": [ + "#VGG-11计算量太大,这里构建了一个四分之一尺寸的网络来训练,计算量为1/16,但依然很大\n", + "\n", + "ratio = 4\n", + "small_conv_arch = [(pair[0], pair[1] // ratio) for pair in conv_arch]\n", + "net = vgg(small_conv_arch)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "fc93a8c0", + "metadata": {}, + "outputs": [ + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/var/folders/k1/yryg_xkx7078z6t5kzg6bmth0000gn/T/ipykernel_34700/2154047581.py\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mlr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_epochs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m0.05\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m10\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m128\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mtrain_iter\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_iter\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0md2l\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_data_fashion_mnist\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresize\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m224\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0md2l\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_ch6\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnet\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_iter\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_iter\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnum_epochs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0md2l\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtry_gpu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m/usr/local/lib/python3.9/site-packages/d2l/torch.py\u001b[0m in \u001b[0;36mtrain_ch6\u001b[0;34m(net, train_iter, test_iter, num_epochs, lr, device)\u001b[0m\n\u001b[1;32m 514\u001b[0m \u001b[0moptimizer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzero_grad\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 515\u001b[0m \u001b[0mX\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mX\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 516\u001b[0;31m \u001b[0my_hat\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnet\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mX\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 517\u001b[0m \u001b[0ml\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mloss\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_hat\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 518\u001b[0m \u001b[0ml\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbackward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.9/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1100\u001b[0m if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks\n\u001b[1;32m 1101\u001b[0m or _global_forward_hooks or _global_forward_pre_hooks):\n\u001b[0;32m-> 1102\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mforward_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1103\u001b[0m \u001b[0;31m# Do not call functions when jit is used\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1104\u001b[0m \u001b[0mfull_backward_hooks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnon_full_backward_hooks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.9/site-packages/torch/nn/modules/container.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 139\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 140\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mmodule\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 141\u001b[0;31m \u001b[0minput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodule\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 142\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 143\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.9/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1100\u001b[0m if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks\n\u001b[1;32m 1101\u001b[0m or _global_forward_hooks or _global_forward_pre_hooks):\n\u001b[0;32m-> 1102\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mforward_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1103\u001b[0m \u001b[0;31m# Do not call functions when jit is used\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1104\u001b[0m \u001b[0mfull_backward_hooks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnon_full_backward_hooks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.9/site-packages/torch/nn/modules/container.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 139\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 140\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mmodule\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 141\u001b[0;31m \u001b[0minput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodule\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 142\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 143\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.9/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1100\u001b[0m if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks\n\u001b[1;32m 1101\u001b[0m or _global_forward_hooks or _global_forward_pre_hooks):\n\u001b[0;32m-> 1102\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mforward_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1103\u001b[0m \u001b[0;31m# Do not call functions when jit is used\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1104\u001b[0m \u001b[0mfull_backward_hooks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnon_full_backward_hooks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.9/site-packages/torch/nn/modules/pooling.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 160\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 161\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 162\u001b[0;31m return F.max_pool2d(input, self.kernel_size, self.stride,\n\u001b[0m\u001b[1;32m 163\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpadding\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdilation\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mceil_mode\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 164\u001b[0m self.return_indices)\n", + "\u001b[0;32m/usr/local/lib/python3.9/site-packages/torch/_jit_internal.py\u001b[0m in \u001b[0;36mfn\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 420\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mif_true\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 421\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 422\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mif_false\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 423\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 424\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mif_true\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__doc__\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mif_false\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__doc__\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.9/site-packages/torch/nn/functional.py\u001b[0m in \u001b[0;36m_max_pool2d\u001b[0;34m(input, kernel_size, stride, padding, dilation, ceil_mode, return_indices)\u001b[0m\n\u001b[1;32m 717\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mstride\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 718\u001b[0m \u001b[0mstride\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjit\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mannotate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mList\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 719\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmax_pool2d\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkernel_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstride\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpadding\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdilation\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mceil_mode\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 720\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 721\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-22T17:28:27.378259\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "lr, num_epochs, batch_size = 0.05, 10, 128\n", + "train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size, resize = 224)\n", + "d2l.train_ch6(net, train_iter, test_iter, num_epochs, lr, d2l.try_gpu())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "817054d7", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/code/26-NiN.ipynb b/code/26-NiN.ipynb new file mode 100644 index 0000000..0ac1946 --- /dev/null +++ b/code/26-NiN.ipynb @@ -0,0 +1,1249 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "YnAtHwhmmGOh", + "outputId": "9c2fd82f-fe05-4f0f-cd17-1923e9307495", + "slideshow": { + "slide_type": "skip" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting git+https://github.com/d2l-ai/d2l-zh@release\n", + " Cloning https://github.com/d2l-ai/d2l-zh (to revision release) to /tmp/pip-req-build-rwrn0sr4\n", + " Running command git clone -q https://github.com/d2l-ai/d2l-zh /tmp/pip-req-build-rwrn0sr4\n", + " Running command git checkout -b release --track origin/release\n", + " Switched to a new branch 'release'\n", + " Branch 'release' set up to track remote branch 'release' from 'origin'.\n", + " Running command git submodule update --init --recursive -q\n", + "Requirement already satisfied: jupyter in /usr/local/lib/python3.7/dist-packages (from d2l==2.0.0b0) (1.0.0)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from d2l==2.0.0b0) (1.19.5)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from d2l==2.0.0b0) (3.2.2)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from d2l==2.0.0b0) (2.23.0)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from d2l==2.0.0b0) (1.1.5)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==2.0.0b0) (5.6.1)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==2.0.0b0) (4.10.1)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==2.0.0b0) (5.3.1)\n", + "Requirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==2.0.0b0) (5.2.2)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==2.0.0b0) (7.6.5)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==2.0.0b0) (5.2.0)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==2.0.0b0) (5.3.5)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==2.0.0b0) (5.1.1)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==2.0.0b0) (5.1.1)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==2.0.0b0) (5.5.0)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==2.0.0b0) (0.8.1)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==2.0.0b0) (4.4.2)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==2.0.0b0) (0.7.5)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==2.0.0b0) (1.0.18)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==2.0.0b0) (4.8.0)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==2.0.0b0) (57.4.0)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==2.0.0b0) (2.6.1)\n", + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==2.0.0b0) (1.15.0)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==2.0.0b0) (0.2.5)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==2.0.0b0) (3.5.2)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==2.0.0b0) (1.0.2)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==2.0.0b0) (5.1.3)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==2.0.0b0) (0.2.0)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==2.0.0b0) (4.3.3)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==2.0.0b0) (4.9.1)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==2.0.0b0) (0.18.0)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==2.0.0b0) (5.4.0)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==2.0.0b0) (3.10.0.2)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==2.0.0b0) (4.10.0)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==2.0.0b0) (21.4.0)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==2.0.0b0) (3.7.0)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==2.0.0b0) (0.12.1)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==2.0.0b0) (1.8.0)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==2.0.0b0) (2.11.3)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==2.0.0b0) (2.8.2)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==2.0.0b0) (22.3.0)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter->d2l==2.0.0b0) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter->d2l==2.0.0b0) (2.0.1)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==2.0.0b0) (1.3.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==2.0.0b0) (3.0.6)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==2.0.0b0) (0.11.0)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==2.0.0b0) (0.7.1)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==2.0.0b0) (0.3)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==2.0.0b0) (1.5.0)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==2.0.0b0) (0.5.0)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==2.0.0b0) (0.8.4)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==2.0.0b0) (4.1.0)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==2.0.0b0) (21.3)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==2.0.0b0) (0.5.1)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas->d2l==2.0.0b0) (2018.9)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter->d2l==2.0.0b0) (2.0.0)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->d2l==2.0.0b0) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->d2l==2.0.0b0) (1.24.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->d2l==2.0.0b0) (2021.10.8)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->d2l==2.0.0b0) (2.10)\n", + "Building wheels for collected packages: d2l\n", + " Building wheel for d2l (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for d2l: filename=d2l-2.0.0b0-py3-none-any.whl size=80071 sha256=52bdc5edc13a6233bf007431cd81aac7f01ddffc0b2e0a61eeae9f9a17c5c9e1\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-9zyqtm5h/wheels/73/f4/42/d2b85ca46d85a241d6aa57c1c24027de2d2258202bb67945f9\n", + "Successfully built d2l\n", + "Installing collected packages: d2l\n", + "Successfully installed d2l-2.0.0b0\n" + ] + } + ], + "source": [ + "!pip install git+https://github.com/d2l-ai/d2l-zh@release # installing d2l\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "3J6COHAcmGOj", + "origin_pos": 0, + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "# 网络中的网络(NiN)\n", + "\n", + "## (**NiN块结构**)\n", + "\n", + "![对比 VGG 和 NiN 及它们的块之间主要架构差异。](http://d2l.ai/_images/nin.svg)\n", + ":width:`600px`\n", + ":label:`fig_nin`\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "id": "icjYVLrqmGOk", + "origin_pos": 2, + "slideshow": { + "slide_type": "slide" + }, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "\n", + "#定义NiN块\n", + "def nin_block(in_channels, out_channels, kernel_size, strides, padding):\n", + " return nn.Sequential(\n", + " nn.Conv2d(in_channels, out_channels, kernel_size, strides, padding),\n", + " nn.ReLU(),\n", + " nn.Conv2d(out_channels, out_channels, kernel_size=1), nn.ReLU(),\n", + " nn.Conv2d(out_channels, out_channels, kernel_size=1), nn.ReLU())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "m76Gv4E2mGOl", + "origin_pos": 4, + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "## [**NiN模型**]" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "id": "ZM95zdrnmGOm", + "origin_pos": 6, + "slideshow": { + "slide_type": "-" + }, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "net = nn.Sequential(\n", + " nin_block(1, 96, kernel_size=11, strides=4, padding=0),\n", + " nn.MaxPool2d(3, stride=2),\n", + " nin_block(96, 256, kernel_size=5, strides=1, padding=2),\n", + " nn.MaxPool2d(3, stride=2),\n", + " nin_block(256, 384, kernel_size=3, strides=1, padding=1),\n", + " nn.MaxPool2d(3, stride=2),\n", + " nn.Dropout(0.5),\n", + " # 标签类别数是10\n", + " nin_block(384, 10, kernel_size=3, strides=1, padding=1),\n", + " nn.AdaptiveAvgPool2d((1, 1)), #全局平均池化,高宽都变成1\n", + " # 将四维的输出转成二维的输出,其形状为(批量大小,10)\n", + " nn.Flatten())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "qpeUzjQ5mGOn", + "origin_pos": 8, + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "**demo测试,查看每个块的输出情况**\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "jOu7BZpsmGOo", + "origin_pos": 10, + "outputId": "80275b87-d88b-4d17-fbc1-90bf1dc46241", + "slideshow": { + "slide_type": "-" + }, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sequential output shape:\t torch.Size([1, 96, 54, 54])\n", + "MaxPool2d output shape:\t torch.Size([1, 96, 26, 26])\n", + "Sequential output shape:\t torch.Size([1, 256, 26, 26])\n", + "MaxPool2d output shape:\t torch.Size([1, 256, 12, 12])\n", + "Sequential output shape:\t torch.Size([1, 384, 12, 12])\n", + "MaxPool2d output shape:\t torch.Size([1, 384, 5, 5])\n", + "Dropout output shape:\t torch.Size([1, 384, 5, 5])\n", + "Sequential output shape:\t torch.Size([1, 10, 5, 5])\n", + "AdaptiveAvgPool2d output shape:\t torch.Size([1, 10, 1, 1])\n", + "Flatten output shape:\t torch.Size([1, 10])\n" + ] + } + ], + "source": [ + "X = torch.rand(size=(1, 1, 224, 224))\n", + "for layer in net:\n", + " X = layer(X)\n", + " print(layer.__class__.__name__,'output shape:\\t', X.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "dsBcacGnmGOq", + "origin_pos": 12, + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "**训练模型**" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 261 + }, + "id": "ERZoQ6v-mGOr", + "origin_pos": 13, + "outputId": "6b85b5ef-f4f3-4195-bb5c-0ad95998bfd0", + "slideshow": { + "slide_type": "-" + }, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "lr, num_epochs, batch_size = 0.1, 10, 128\n", + "train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size, resize=224)\n", + "d2l.train_ch6(net, train_iter, test_iter, num_epochs, lr, d2l.try_gpu())" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "celltoolbar": "幻灯片", + "colab": { + "name": "nin.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.3" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/code/27-googlenet.ipynb b/code/27-googlenet.ipynb new file mode 100644 index 0000000..218c185 --- /dev/null +++ b/code/27-googlenet.ipynb @@ -0,0 +1,1162 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "-" + } + }, + "source": [ + "# 含并行连结的网络(GoogLeNet)\n", + "\n", + "Inception块" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from torch.nn import functional as F\n", + "from d2l import torch as d2l\n", + "\n", + "\n", + "class Inception(nn.Module):\n", + " def __init__(self, in_channels, c1, c2, c3, c4, **kwargs):\n", + " super(Inception, self).__init__(**kwargs)\n", + " self.p1_1 = nn.Conv2d(in_channels, c1, kernel_size=1)\n", + " #1X1卷积\n", + " self.p2_1 = nn.Conv2d(in_channels, c2[0], kernel_size=1)\n", + " self.p2_2 = nn.Conv2d(c2[0], c2[1], kernel_size=3, padding=1)\n", + " #1X1卷积,3X3卷积\n", + " self.p3_1 = nn.Conv2d(in_channels, c3[0], kernel_size=1)\n", + " self.p3_2 = nn.Conv2d(c3[0], c3[1], kernel_size=5, padding=2)\n", + " #1X1卷积,5X5卷积\n", + " self.p4_1 = nn.MaxPool2d(kernel_size=3, stride=1, padding=1)\n", + " self.p4_2 = nn.Conv2d(in_channels, c4, kernel_size=1)\n", + " #maxpool,1x1卷积\n", + " def forward(self, x):\n", + " p1 = F.relu(self.p1_1(x))\n", + " p2 = F.relu(self.p2_2(F.relu(self.p2_1(x))))\n", + " p3 = F.relu(self.p3_2(F.relu(self.p3_1(x))))\n", + " p4 = F.relu(self.p4_2(self.p4_1(x)))\n", + " #定义四个模块,然后将输出在通道维度上拼接在一起\n", + " return torch.cat((p1, p2, p3, p4), dim=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "GoogLeNet模型" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 22, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "b1 = nn.Sequential(nn.Conv2d(1, 64, kernel_size=7, stride=2, padding=3),\n", + " nn.ReLU(),\n", + " nn.MaxPool2d(kernel_size=3, stride=2, padding=1))\n", + "\n", + "b2 = nn.Sequential(nn.Conv2d(64, 64, kernel_size=1),\n", + " nn.ReLU(),\n", + " nn.Conv2d(64, 192, kernel_size=3, padding=1),\n", + " nn.ReLU(),\n", + " nn.MaxPool2d(kernel_size=3, stride=2, padding=1))\n", + "\n", + "b3 = nn.Sequential(Inception(192, 64, (96, 128), (16, 32), 32),\n", + " Inception(256, 128, (128, 192), (32, 96), 64),\n", + " nn.MaxPool2d(kernel_size=3, stride=2, padding=1))\n", + "\n", + "b4 = nn.Sequential(Inception(480, 192, (96, 208), (16, 48), 64),\n", + " Inception(512, 160, (112, 224), (24, 64), 64),\n", + " Inception(512, 128, (128, 256), (24, 64), 64),\n", + " Inception(512, 112, (144, 288), (32, 64), 64),\n", + " Inception(528, 256, (160, 320), (32, 128), 128),\n", + " nn.MaxPool2d(kernel_size=3, stride=2, padding=1))\n", + "\n", + "b5 = nn.Sequential(Inception(832, 256, (160, 320), (32, 128), 128),\n", + " Inception(832, 384, (192, 384), (48, 128), 128),\n", + " nn.AdaptiveAvgPool2d((1,1)),\n", + " nn.Flatten())\n", + "\n", + "net = nn.Sequential(b1, b2, b3, b4, b5, nn.Linear(1024, 10))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "为了使Fashion-MNIST上的训练短小精悍,我们将输入的高和宽从224降到96" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 26, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sequential output shape:\t torch.Size([1, 64, 24, 24])\n", + "Sequential output shape:\t torch.Size([1, 192, 12, 12])\n", + "Sequential output shape:\t torch.Size([1, 480, 6, 6])\n", + "Sequential output shape:\t torch.Size([1, 832, 3, 3])\n", + "Sequential output shape:\t torch.Size([1, 1024])\n", + "Linear output shape:\t torch.Size([1, 10])\n" + ] + } + ], + "source": [ + "X = torch.rand(size=(1, 1, 96, 96))\n", + "for layer in net:\n", + " X = layer(X)\n", + " print(layer.__class__.__name__,'output shape:\\t', X.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "训练模型" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 29, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loss 0.246, train acc 0.906, test acc 0.884\n", + "3448.9 examples/sec on cuda:0\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T15:02:56.719705\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "lr, num_epochs, batch_size = 0.1, 10, 128\n", + "train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size, resize=96)\n", + "d2l.train_ch6(net, train_iter, test_iter, num_epochs, lr, d2l.try_gpu())" + ] + } + ], + "metadata": { + "celltoolbar": "Slideshow", + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + }, + "rise": { + "autolaunch": true, + "enable_chalkboard": true, + "overlay": "", + "scroll": true + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/28-\346\211\271\351\207\217\345\275\222\344\270\200\345\214\226.ipynb" "b/code/28-\346\211\271\351\207\217\345\275\222\344\270\200\345\214\226.ipynb" new file mode 100644 index 0000000..c0f35d7 --- /dev/null +++ "b/code/28-\346\211\271\351\207\217\345\275\222\344\270\200\345\214\226.ipynb" @@ -0,0 +1,2268 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 批量规范化\n", + ":label:`sec_batch_norm`\n", + "\n", + "训练深层神经网络是十分困难的,特别是在较短的时间内使他们收敛更加棘手。\n", + "在本节中,我们将介绍*批量规范化*(batch normalization) :cite:`Ioffe.Szegedy.2015`,这是一种流行且有效的技术,可持续加速深层网络的收敛速度。\n", + "再结合在 :numref:`sec_resnet`中将介绍的残差块,批量规范化使得研究人员能够训练100层以上的网络。\n", + "\n", + "## 训练深层网络\n", + "\n", + "为什么需要批量规范化层呢?让我们来回顾一下训练神经网络时出现的一些实际挑战。\n", + "\n", + "首先,数据预处理的方式通常会对最终结果产生巨大影响。\n", + "回想一下我们应用多层感知机来预测房价的例子( :numref:`sec_kaggle_house`)。\n", + "使用真实数据时,我们的第一步是标准化输入特征,使其平均值为0,方差为1。\n", + "直观地说,这种标准化可以很好地与我们的优化器配合使用,因为它可以将参数的量级进行统一。\n", + "\n", + "第二,对于典型的多层感知机或卷积神经网络。当我们训练时,中间层中的变量(例如,多层感知机中的仿射变换输出)可能具有更广的变化范围:不论是沿着从输入到输出的层,跨同一层中的单元,或是随着时间的推移,模型参数的随着训练更新变幻莫测。\n", + "批量规范化的发明者非正式地假设,这些变量分布中的这种偏移可能会阻碍网络的收敛。\n", + "直观地说,我们可能会猜想,如果一个层的可变值是另一层的100倍,这可能需要对学习率进行补偿调整。\n", + "\n", + "第三,更深层的网络很复杂,容易过拟合。\n", + "这意味着正则化变得更加重要。\n", + "\n", + "批量规范化应用于单个可选层(也可以应用到所有层),其原理如下:在每次训练迭代中,我们首先规范化输入,即通过减去其均值并除以其标准差,其中两者均基于当前小批量处理。\n", + "接下来,我们应用比例系数和比例偏移。\n", + "正是由于这个基于*批量*统计的*标准化*,才有了*批量规范化*的名称。\n", + "\n", + "请注意,如果我们尝试使用大小为1的小批量应用批量规范化,我们将无法学到任何东西。\n", + "这是因为在减去均值之后,每个隐藏单元将为0。\n", + "所以,只有使用足够大的小批量,批量规范化这种方法才是有效且稳定的。\n", + "请注意,在应用批量规范化时,批量大小的选择可能比没有批量规范化时更重要。\n", + "\n", + "从形式上来说,用$\\mathbf{x} \\in \\mathcal{B}$表示一个来自小批量$\\mathcal{B}$的输入,批量规范化$\\mathrm{BN}$根据以下表达式转换$\\mathbf{x}$:\n", + "\n", + "$$\\mathrm{BN}(\\mathbf{x}) = \\boldsymbol{\\gamma} \\odot \\frac{\\mathbf{x} - \\hat{\\boldsymbol{\\mu}}_\\mathcal{B}}{\\hat{\\boldsymbol{\\sigma}}_\\mathcal{B}} + \\boldsymbol{\\beta}.$$\n", + ":eqlabel:`eq_batchnorm`\n", + "\n", + "在 :eqref:`eq_batchnorm`中,$\\hat{\\boldsymbol{\\mu}}_\\mathcal{B}$是样本均值,$\\hat{\\boldsymbol{\\sigma}}_\\mathcal{B}$是小批量$\\mathcal{B}$的样本标准差。\n", + "应用标准化后,生成的小批量的平均值为0和单位方差为1。\n", + "由于单位方差(与其他一些魔法数)是一个任意的选择,因此我们通常包含\n", + "*拉伸参数*(scale)$\\boldsymbol{\\gamma}$和*偏移参数*(shift)$\\boldsymbol{\\beta}$,它们的形状与$\\mathbf{x}$相同。\n", + "请注意,$\\boldsymbol{\\gamma}$和$\\boldsymbol{\\beta}$是需要与其他模型参数一起学习的参数。\n", + "\n", + "由于在训练过程中,中间层的变化幅度不能过于剧烈,而批量规范化将每一层主动居中,并将它们重新调整为给定的平均值和大小(通过$\\hat{\\boldsymbol{\\mu}}_\\mathcal{B}$和${\\hat{\\boldsymbol{\\sigma}}_\\mathcal{B}}$)。\n", + "\n", + "从形式上来看,我们计算出 :eqref:`eq_batchnorm`中的$\\hat{\\boldsymbol{\\mu}}_\\mathcal{B}$和${\\hat{\\boldsymbol{\\sigma}}_\\mathcal{B}}$,如下所示:\n", + "\n", + "$$\\begin{aligned} \\hat{\\boldsymbol{\\mu}}_\\mathcal{B} &= \\frac{1}{|\\mathcal{B}|} \\sum_{\\mathbf{x} \\in \\mathcal{B}} \\mathbf{x},\\\\\n", + "\\hat{\\boldsymbol{\\sigma}}_\\mathcal{B}^2 &= \\frac{1}{|\\mathcal{B}|} \\sum_{\\mathbf{x} \\in \\mathcal{B}} (\\mathbf{x} - \\hat{\\boldsymbol{\\mu}}_{\\mathcal{B}})^2 + \\epsilon.\\end{aligned}$$\n", + "\n", + "请注意,我们在方差估计值中添加一个小常量$\\epsilon > 0$,以确保我们永远不会尝试除以零,即使在经验方差估计值可能消失的情况下也是如此。估计值$\\hat{\\boldsymbol{\\mu}}_\\mathcal{B}$和${\\hat{\\boldsymbol{\\sigma}}_\\mathcal{B}}$通过使用平均值和方差的噪声(noise)估计来抵消缩放问题。\n", + "你可能会认为这种噪声是一个问题,而事实上它是有益的。\n", + "\n", + "事实证明,这是深度学习中一个反复出现的主题。\n", + "由于理论上尚未明确表述的原因,优化中的各种噪声源通常会导致更快的训练和较少的过拟合:这种变化似乎是正则化的一种形式。\n", + "在一些初步研究中, :cite:`Teye.Azizpour.Smith.2018`和 :cite:`Luo.Wang.Shao.ea.2018`分别将批量规范化的性质与贝叶斯先验相关联。\n", + "这些理论揭示了为什么批量规范化最适应$50 \\sim 100$范围中的中等小批量尺寸的难题。\n", + "\n", + "另外,批量规范化图层在”训练模式“(通过小批量统计数据规范化)和“预测模式”(通过数据集统计规范化)中的功能不同。\n", + "在训练过程中,我们无法得知使用整个数据集来估计平均值和方差,所以只能根据每个小批次的平均值和方差不断训练模型。\n", + "而在预测模式下,可以根据整个数据集精确计算批量规范化所需的平均值和方差。\n", + "\n", + "现在,我们了解一下批量规范化在实践中是如何工作的。\n", + "\n", + "## 批量规范化层\n", + "\n", + "回想一下,批量规范化和其他图层之间的一个关键区别是,由于批量规范化在完整的小批次上运行,因此我们不能像以前在引入其他图层时那样忽略批处理的尺寸大小。\n", + "我们在下面讨论这两种情况:全连接层和卷积层,他们的批量规范化实现略有不同。\n", + "\n", + "### 全连接层\n", + "\n", + "通常,我们将批量规范化层置于全连接层中的仿射变换和激活函数之间。\n", + "设全连接层的输入为u,权重参数和偏置参数分别为$\\mathbf{W}$和$\\mathbf{b}$,激活函数为$\\phi$,批量规范化的运算符为$\\mathrm{BN}$。\n", + "那么,使用批量规范化的全连接层的输出的计算详情如下:\n", + "\n", + "$$\\mathbf{h} = \\phi(\\mathrm{BN}(\\mathbf{W}\\mathbf{x} + \\mathbf{b}) ).$$\n", + "\n", + "回想一下,均值和方差是在应用变换的\"相同\"小批量上计算的。\n", + "\n", + "### 卷积层\n", + "\n", + "同样,对于卷积层,我们可以在卷积层之后和非线性激活函数之前应用批量规范化。\n", + "当卷积有多个输出通道时,我们需要对这些通道的“每个”输出执行批量规范化,每个通道都有自己的拉伸(scale)和偏移(shift)参数,这两个参数都是标量。\n", + "假设我们的微批次包含$m$个示例,并且对于每个通道,卷积的输出具有高度$p$和宽度$q$。\n", + "那么对于卷积层,我们在每个输出通道的$m \\cdot p \\cdot q$个元素上同时执行每个批量规范化。\n", + "因此,在计算平均值和方差时,我们会收集所有空间位置的值,然后在给定通道内应用相同的均值和方差,以便在每个空间位置对值进行规范化。\n", + "\n", + "### 预测过程中的批量规范化\n", + "\n", + "正如我们前面提到的,批量规范化在训练模式和预测模式下的行为通常不同。\n", + "首先,将训练好的模型用于预测时,我们不再需要样本均值中的噪声以及在微批次上估计每个小批次产生的样本方差了。\n", + "其次,例如,我们可能需要使用我们的模型对逐个样本进行预测。\n", + "一种常用的方法是通过移动平均估算整个训练数据集的样本均值和方差,并在预测时使用它们得到确定的输出。\n", + "可见,和暂退法一样,批量规范化层在训练模式和预测模式下的计算结果也是不一样的。\n", + "\n", + "## (**从零实现**)\n", + "\n", + "下面,我们从头开始实现一个具有张量的批量规范化层。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "\n", + "\n", + "def batch_norm(X, gamma, beta, moving_mean, moving_var, eps, momentum):\n", + " # 通过is_grad_enabled来判断当前模式是训练模式还是预测模式\n", + " if not torch.is_grad_enabled():\n", + " # 如果是在预测模式下,直接使用传入的移动平均所得的均值和方差\n", + " X_hat = (X - moving_mean) / torch.sqrt(moving_var + eps)\n", + " else:\n", + " assert len(X.shape) in (2, 4)\n", + " if len(X.shape) == 2:\n", + " # 使用全连接层的情况,计算特征维上的均值和方差\n", + " mean = X.mean(dim=0)\n", + " var = ((X - mean) ** 2).mean(dim=0)\n", + " else:\n", + " # 使用二维卷积层的情况,计算通道维上(axis=1)的均值和方差。\n", + " # 这里我们需要保持X的形状以便后面可以做广播运算\n", + " mean = X.mean(dim=(0, 2, 3), keepdim=True)#计算后维度为[1,channels,1,1]\n", + " var = ((X - mean) ** 2).mean(dim=(0, 2, 3), keepdim=True)#计算后维度为[1,channels,1,1]\n", + " # 训练模式下,用当前的均值和方差做标准化\n", + " X_hat = (X - mean) / torch.sqrt(var + eps)\n", + " # 更新移动平均的均值和方差,这里用到了指数加权平均,类似于动量梯度下降法中对梯度进行的操作,所以用Momentum这个变量名\n", + " moving_mean = momentum * moving_mean + (1.0 - momentum) * mean\n", + " moving_var = momentum * moving_var + (1.0 - momentum) * var\n", + " Y = gamma * X_hat + beta # 缩放和移位\n", + " return Y, moving_mean.data, moving_var.data" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "我们现在可以[**创建一个正确的`BatchNorm`图层**]。\n", + "这个层将保持适当的参数:拉伸`gamma`和偏移`beta`,这两个参数将在训练过程中更新。\n", + "此外,我们的图层将保存均值和方差的移动平均值,以便在模型预测期间随后使用。\n", + "\n", + "撇开算法细节,注意我们实现图层的基础设计模式。\n", + "通常情况下,我们用一个单独的函数定义其数学原理,比如说`batch_norm`。\n", + "然后,我们将此功能集成到一个自定义层中,其代码主要处理簿记问题,例如将数据移动到训练设备(如GPU)、分配和初始化任何必需的变量、跟踪移动平均线(此处为均值和方差)等。\n", + "为了方便起见,我们并不担心在这里自动推断输入形状,因此我们需要指定整个特征的数量。\n", + "不用担心,深度学习框架中的批量规范化API将为我们解决上述问题,我们稍后将展示这一点。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class BatchNorm(nn.Module):\n", + " # num_features:完全连接层的输出数量或卷积层的输出通道数。\n", + " # num_dims:2表示完全连接层,4表示卷积层\n", + " def __init__(self, num_features, num_dims):\n", + " super().__init__()\n", + " if num_dims == 2:\n", + " shape = (1, num_features)\n", + " else:\n", + " shape = (1, num_features, 1, 1)\n", + " # 参与求梯度和迭代的拉伸和偏移参数,分别初始化成1和0\n", + " self.gamma = nn.Parameter(torch.ones(shape))\n", + " self.beta = nn.Parameter(torch.zeros(shape))\n", + " # 非模型参数的变量,即平均的期望和方差初始化为0和1\n", + " self.moving_mean = torch.zeros(shape)\n", + " self.moving_var = torch.ones(shape)\n", + "\n", + " def forward(self, X):\n", + " # 如果X不在内存上,将moving_mean和moving_var复制到X所在显存上\n", + " if self.moving_mean.device!=X.device:\n", + " self.moving_mean = self.moving_mean.to(X.device)\n", + " self.moving_var = self.moving_var.to(X.device)\n", + " # 保存更新过的moving_mean和moving_var\n", + " Y, self.moving_mean, self.moving_var = batch_norm(\n", + " X, self.gamma, self.beta, self.moving_mean,\n", + " self.moving_var, eps=1e-5, momentum=0.9)\n", + " return Y" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "## 使用批量规范化层的 LeNet\n", + "\n", + "为了更好理解如何[**应用`BatchNorm`**],下面我们将其应用(**于LeNet模型**)( :numref:`sec_lenet`)。\n", + "回想一下,批量规范化是在卷积层或全连接层之后、相应的激活函数之前应用的。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 10, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "net = nn.Sequential(\n", + " nn.Conv2d(1, 6, kernel_size=5), BatchNorm(6, num_dims=4), nn.Sigmoid(),\n", + " nn.AvgPool2d(kernel_size=2, stride=2),\n", + " nn.Conv2d(6, 16, kernel_size=5), BatchNorm(16, num_dims=4), nn.Sigmoid(),\n", + " nn.AvgPool2d(kernel_size=2, stride=2), nn.Flatten(),\n", + " nn.Linear(16*4*4, 120), BatchNorm(120, num_dims=2), nn.Sigmoid(),\n", + " nn.Linear(120, 84), BatchNorm(84, num_dims=2), nn.Sigmoid(),\n", + " nn.Linear(84, 10))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12 + }, + "source": [ + "和以前一样,我们将[**在Fashion-MNIST数据集上训练网络**]。\n", + "这个代码与我们第一次训练LeNet( :numref:`sec_lenet`)时几乎完全相同,主要区别在于学习率大得多。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 13, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loss 0.263, train acc 0.903, test acc 0.884\n", + "37527.5 examples/sec on cuda:0\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T21:25:26.190661\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "lr, num_epochs, batch_size = 1.0, 10, 256\n", + "train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size)\n", + "d2l.train_ch6(net, train_iter, test_iter, num_epochs, lr, d2l.try_gpu())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 15 + }, + "source": [ + "让我们来看看从第一个批量规范化层中学到的[**拉伸参数`gamma`和偏移参数`beta`**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 17, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([1.8120, 3.0465, 3.3851, 3.3600, 1.5014, 4.1684], device='cuda:0',\n", + " grad_fn=),\n", + " tensor([-0.3901, -3.1025, 3.4992, -1.4968, 1.4242, 2.1901], device='cuda:0',\n", + " grad_fn=))" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "net[1].gamma.reshape((-1,)), net[1].beta.reshape((-1,))#取对应的参数并转为向量" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 19 + }, + "source": [ + "## [**简明实现**]\n", + "\n", + "除了使用我们刚刚定义的`BatchNorm`,我们也可以直接使用深度学习框架中定义的`BatchNorm`。\n", + "该代码看起来几乎与我们上面的代码相同。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 21, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "net = nn.Sequential(#使用nn.BatchNorm2d(特征维度)函数,pytorch会自动判断是对全连接层归一化还是卷积层归一化\n", + " nn.Conv2d(1, 6, kernel_size=5), nn.BatchNorm2d(6), nn.Sigmoid(),\n", + " nn.AvgPool2d(kernel_size=2, stride=2),\n", + " nn.Conv2d(6, 16, kernel_size=5), nn.BatchNorm2d(16), nn.Sigmoid(),\n", + " nn.AvgPool2d(kernel_size=2, stride=2), nn.Flatten(),\n", + " nn.Linear(256, 120), nn.BatchNorm1d(120), nn.Sigmoid(),\n", + " nn.Linear(120, 84), nn.BatchNorm1d(84), nn.Sigmoid(),\n", + " nn.Linear(84, 10))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 23 + }, + "source": [ + "下面,我们[**使用相同超参数来训练模型**]。\n", + "请注意,通常高级API变体运行速度快得多,因为它的代码已编译为C++或CUDA,而我们的自定义代码由Python实现。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 24, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loss 0.263, train acc 0.903, test acc 0.871\n", + "66194.1 examples/sec on cuda:0\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T21:26:03.439714\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "d2l.train_ch6(net, train_iter, test_iter, num_epochs, lr, d2l.try_gpu())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 25 + }, + "source": [ + "## 争议\n", + "\n", + "直观地说,批量规范化被认为可以使优化更加平滑。\n", + "然而,我们必须小心区分直觉和对我们观察到的现象的真实解释。\n", + "回想一下,我们甚至不知道简单的神经网络(多层感知机和传统的卷积神经网络)为什么如此有效。\n", + "即使在暂退法和权重衰减的情况下,它们仍然非常灵活,因此无法通过常规的学习理论泛化保证来解释它们是否能够泛化到看不见的数据。\n", + "\n", + "在提出批量规范化的论文中,作者除了介绍了其应用,还解释了其原理:通过减少*内部协变量偏移*(internal covariate shift)。\n", + "据推测,作者所说的“内部协变量转移”类似于上述的投机直觉,即变量值的分布在训练过程中会发生变化。\n", + "然而,这种解释有两个问题:\n", + "i)这种偏移与严格定义的*协变量偏移*(covariate shift)非常不同,所以这个名字用词不当。\n", + "ii)这种解释只提供了一种不明确的直觉,但留下了一个有待后续挖掘的问题:为什么这项技术如此有效?。\n", + "本书旨在传达实践者用来发展深层神经网络的直觉。\n", + "然而,重要的是将这些指导性直觉与既定的科学事实区分开来。\n", + "最终,当你掌握了这些方法,并开始撰写自己的研究论文时,你会希望清楚地区分技术和直觉。\n", + "\n", + "随着批量规范化的普及,“内部协变量偏移”的解释反复出现在技术文献的辩论,特别是关于“如何展示机器学习研究”的更广泛的讨论中。\n", + "Ali Rahimi在接受2017年NeurIPS大会的“接受时间考验奖”(Test of Time Award)时发表了一篇令人难忘的演讲。他将“内部协变量转移”作为焦点,将现代深度学习的实践比作炼金术。\n", + "他对该示例进行了详细回顾 :cite:`Lipton.Steinhardt.2018`,概述了机器学习中令人不安的趋势。\n", + "此外,一些作者对批量规范化的成功提出了另一种解释:在某些方面,批量规范化的表现出与原始论文 :cite:`Santurkar.Tsipras.Ilyas.ea.2018`中声称的行为是相反的。\n", + "\n", + "然而,与技术机器学习文献中成千上万类似模糊的声明相比,内部协变量偏移没有什么更值得批评。\n", + "很可能,它作为这些辩论的焦点而产生共鸣,要归功于它对目标受众的广泛认可。\n", + "批量规范化已经被证明是一种不可或缺的方法。它适用于几乎所有图像分类器,并在学术界获得了数万引用。\n", + "\n", + "## 小结\n", + "\n", + "* 在模型训练过程中,批量规范化利用小批量的均值和标准差,不断调整神经网络的中间输出,使整个神经网络各层的中间输出值更加稳定。\n", + "* 批量规范化在全连接层和卷积层的使用略有不同。\n", + "* 批量规范化层和暂退层一样,在训练模式和预测模式下计算不同。\n", + "* 批量规范化有许多有益的副作用,主要是正则化。另一方面,”减少内部协变量偏移“的原始动机似乎不是一个有效的解释。\n", + "\n", + "## 练习\n", + "\n", + "1. 在使用批量规范化之前,我们是否可以从全连接层或卷积层中删除偏置参数?为什么?\n", + "1. 比较LeNet在使用和不使用批量规范化情况下的学习率。\n", + " 1. 绘制训练和测试准确度的提高。\n", + " 1. 你的学习率有多高?\n", + "1. 我们是否需要在每个层中进行批量规范化?尝试一下?\n", + "1. 你可以通过批量规范化来替换暂退法吗?行为如何改变?\n", + "1. 确定参数`beta`和`gamma`,并观察和分析结果。\n", + "1. 查看高级API中有关`BatchNorm`的在线文档,以查看其他批量规范化的应用。\n", + "1. 研究思路:想想你可以应用的其他“规范化”转换?你可以应用概率积分变换吗?全秩协方差估计如何?\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 27, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/1874)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python [conda env:pytorch]", + "language": "python", + "name": "conda-env-pytorch-py" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.13" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/34-\345\244\232GPU\350\256\255\347\273\203\345\256\236\347\216\260.ipynb" "b/code/34-\345\244\232GPU\350\256\255\347\273\203\345\256\236\347\216\260.ipynb" new file mode 100644 index 0000000..9cadda3 --- /dev/null +++ "b/code/34-\345\244\232GPU\350\256\255\347\273\203\345\256\236\347\216\260.ipynb" @@ -0,0 +1,666 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "GPUs.ipynb", + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "#调用GPU" + ], + "metadata": { + "id": "Pue_m2w_1_KK" + } + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "id": "MSV3_ePh3l0j", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "6cd0080c-b389-4a43-d6f7-14469cefed59" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Found GPU at: /device:GPU:0\n" + ] + } + ], + "source": [ + "%tensorflow_version 2.x\n", + "import tensorflow as tf\n", + "device_name = tf.test.gpu_device_name()\n", + "if device_name != '/device:GPU:0':\n", + " raise SystemError('GPU device not found')\n", + "print('Found GPU at: {}'.format(device_name))" + ] + }, + { + "cell_type": "markdown", + "source": [ + "#创建虚拟GPU(Tensorflow)" + ], + "metadata": { + "id": "cSo7ULhY2IHY" + } + }, + { + "cell_type": "code", + "source": [ + "gpus = tf.config.list_physical_devices('GPU')\n", + "if gpus:\n", + " # Create 2 virtual GPUs with 1GB memory each\n", + " try:\n", + " tf.config.set_logical_device_configuration(\n", + " gpus[0],\n", + " [tf.config.LogicalDeviceConfiguration(memory_limit=1024),\n", + " tf.config.LogicalDeviceConfiguration(memory_limit=1024)])\n", + " logical_gpus = tf.config.list_logical_devices('GPU')\n", + " print(len(gpus), \"Physical GPU,\", len(logical_gpus), \"Logical GPUs\")\n", + " except RuntimeError as e:\n", + " # Virtual devices must be set before GPUs have been initialized\n", + " print(e)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "8R9y7tYXxzUB", + "outputId": "c41fec70-b336-496b-a347-5e394d8f0b36" + }, + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "1 Physical GPU, 2 Logical GPUs\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "tf.debugging.set_log_device_placement(True)" + ], + "metadata": { + "id": "LBIDvdFBz8Qd" + }, + "execution_count": 4, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "#下载pytorch" + ], + "metadata": { + "id": "77WgvREP3UVu" + } + }, + { + "cell_type": "code", + "source": [ + "!pip3 install torch torchvision torchaudio" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "UzDofKvtUQCz", + "outputId": "74a969c7-67ca-4380-9222-30d5e4b19356" + }, + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Requirement already satisfied: torch in /usr/local/lib/python3.7/dist-packages (1.10.0+cu111)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (0.11.1+cu111)\n", + "Requirement already satisfied: torchaudio in /usr/local/lib/python3.7/dist-packages (0.10.0+cu111)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch) (3.10.0.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from torchvision) (1.19.5)\n", + "Requirement already satisfied: pillow!=8.3.0,>=5.3.0 in /usr/local/lib/python3.7/dist-packages (from torchvision) (7.1.2)\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!pip install d2l==0.14" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Bd9GDWfyUUW2", + "outputId": "da171369-c8d4-49d0-e1a2-4554a3e4b89f" + }, + "execution_count": 6, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting d2l==0.14\n", + " Downloading d2l-0.14.0-py3-none-any.whl (48 kB)\n", + "\u001b[?25l\r\u001b[K |██████▊ | 10 kB 20.2 MB/s eta 0:00:01\r\u001b[K |█████████████▍ | 20 kB 18.8 MB/s eta 0:00:01\r\u001b[K |████████████████████ | 30 kB 10.4 MB/s eta 0:00:01\r\u001b[K |██████████████████████████▉ | 40 kB 8.3 MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 48 kB 2.6 MB/s \n", + "\u001b[?25hRequirement already satisfied: jupyter in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.0.0)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.19.5)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (3.2.2)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.1.5)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.0)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (7.6.5)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.6.1)\n", + "Requirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.2)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.3.1)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (4.10.1)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.3.5)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.5.0)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (57.4.0)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.7.5)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.4.2)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.0.18)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (2.6.1)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.8.1)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.8.0)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.2.5)\n", + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.15.0)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (1.0.2)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (0.2.0)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (5.1.3)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (3.5.2)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.3.3)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.9.1)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (21.4.0)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (0.18.1)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.10.1)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.10.0.2)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (5.4.0)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.7.0)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (1.8.0)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (2.11.3)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (0.12.1)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (2.8.2)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (22.3.0)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter->d2l==0.14) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter->d2l==0.14) (2.0.1)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (1.3.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (3.0.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (0.11.0)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.8.4)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (1.5.0)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.5.0)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.7.1)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.3)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (4.1.0)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (21.3)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (0.5.1)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas->d2l==0.14) (2018.9)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter->d2l==0.14) (2.0.0)\n", + "Installing collected packages: d2l\n", + "Successfully installed d2l-0.14.0\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "#多GPU数据并行的实现" + ], + "metadata": { + "id": "6FNRNv863c9w" + } + }, + { + "cell_type": "code", + "source": [ + "%matplotlib inline\n", + "import torch\n", + "from torch import nn\n", + "from torch.nn import functional as F\n", + "from d2l import torch as d2l" + ], + "metadata": { + "id": "pGsmo-kEUU6g" + }, + "execution_count": 7, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#初始化模型参数\n", + "scale = 0.01\n", + "W1 = torch.randn(size=(20,1,3,3))*scale\n", + "b1 = torch.zeros(20)\n", + "W2 = torch.randn(size=(50,20,5,5))*scale\n", + "b2 = torch.zeros(50)\n", + "W3 = torch.randn(size=(800,128))*scale\n", + "b3 = torch.zeros(128)\n", + "W4 = torch.randn(size=(128,10))*scale\n", + "b4 = torch.zeros(10)\n", + "params = [W1,b1,W2,b2,W3,b3,W4,b4]\n", + "\n", + "#定义LeNet模型\n", + "def lenet(X,params):\n", + " h1_conv = F.conv2d(input=X,weight=params[0],bias=params[1])\n", + " h1_activation = F.relu(h1_conv)\n", + " h1 = F.avg_pool2d(input=h1_activation,kernel_size=(2,2),stride=(2,2))\n", + " h2_conv = F.conv2d(input=h1,weight=params[2],bias=params[3])\n", + " h2_activation = F.relu(h2_conv)\n", + " h2 = F.avg_pool2d(input=h2_activation,kernel_size=(2,2),stride=(2,2))\n", + " h2 = h2.reshape(h2.shape[0],-1)\n", + " h3_linear = torch.mm(h2,params[4]) + params[5]\n", + " h3 = F.relu(h3_linear)\n", + " y_hat = torch.mm(h3,params[6])+params[7]\n", + " return y_hat\n", + "\n", + "#交叉熵损失函数\n", + "loss = nn.CrossEntropyLoss(reduction='none')" + ], + "metadata": { + "id": "b5lKSX7KbPUe" + }, + "execution_count": 8, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#确定CPU还是GPU训练,定义梯度下降\n", + "def get_params(params,device):\n", + " new_params = [p.to(device) for p in params]\n", + " for p in new_params:\n", + " p.requires_grad_()\n", + " return new_params" + ], + "metadata": { + "id": "U95GZMaduPFw" + }, + "execution_count": 9, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "new_params = get_params(params,d2l.try_gpu(0))\n", + "print('b1 weight:',new_params[1])\n", + "print('b1 grad:',new_params[1].grad)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "tL2CN2Ggurok", + "outputId": "0a823b10-001a-4f46-fd54-d9cdedb08042" + }, + "execution_count": 10, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "b1 weight: tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n", + " device='cuda:0', requires_grad=True)\n", + "b1 grad: None\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "#数据累积和数据复制\n", + "def allreduce(data):\n", + " for i in range(1,len(data)):\n", + " data[0][:] += data[i].to(data[0].device)\n", + " for i in range(1,len(data)):\n", + " data[i][:] = data[0].to(data[i].device)" + ], + "metadata": { + "id": "DCxTDrH0vAo9" + }, + "execution_count": 11, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "data = [torch.ones((1,2),device=d2l.try_gpu(i))*(i+1) for i in range(2)]\n", + "print('allreduce before: \\n',data[0],'\\n',data[1])\n", + "allreduce(data)\n", + "print('allreduce after: \\n',data[0],'\\n',data[1])" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "pX9BenRNvb7X", + "outputId": "f3efd303-a61c-45e8-c08a-f88ae6abdd82" + }, + "execution_count": 12, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "allreduce before: \n", + " tensor([[1., 1.]], device='cuda:0') \n", + " tensor([[2., 2.]])\n", + "allreduce after: \n", + " tensor([[3., 3.]], device='cuda:0') \n", + " tensor([[3., 3.]])\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "data = torch.arange(20).reshape(4,5)\n", + "devices = tf.config.list_logical_devices('GPU')\n", + "split = nn.parallel.scatter(data,devices)\n", + "print('input:',data)\n", + "print('load into',devices)\n", + "print('output:',split)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "cCZXamnIwV2j", + "outputId": "2ef81fe7-d1b2-452c-9eab-198c5fe4b4ad" + }, + "execution_count": 13, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "input: tensor([[ 0, 1, 2, 3, 4],\n", + " [ 5, 6, 7, 8, 9],\n", + " [10, 11, 12, 13, 14],\n", + " [15, 16, 17, 18, 19]])\n", + "load into [LogicalDevice(name='/device:GPU:0', device_type='GPU'), LogicalDevice(name='/device:GPU:1', device_type='GPU')]\n", + "output: (tensor([[0, 1, 2, 3, 4],\n", + " [5, 6, 7, 8, 9]], device='cuda:0'), tensor([[10, 11, 12, 13, 14],\n", + " [15, 16, 17, 18, 19]], device='cuda:0'))\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "#将数据分配到各个GPU上\n", + "def split_batch(X,y,devices):\n", + " \"\"\"将x和y拆分到多个设备上\"\"\"\n", + " assert X.shape[0] == y.shape[0]\n", + " return (nn.parallel.scatter(X,devices),\n", + " nn.parallel.scatter(y,devices))" + ], + "metadata": { + "id": "vUV8uEEfysoN" + }, + "execution_count": 14, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#小批量训练\n", + "def train_batch(X,y,device_params,devices,lr):\n", + " X_shards,y_shards = split_batch(X,y,devices)\n", + " #在每个GPU上分别计算损失\n", + " ls = [loss(lenet(X_shard,device_W),y_shard).sum() for X_shard,y_shard ,device_W in zip(X_shards,y_shards,device_params)]\n", + " for l in ls: #反向传播在每个GPU上分别执行\n", + " l.backward()\n", + " #将每个GPU的所有梯度相加,并将其广播到所有GPU\n", + " with torch.no_grad():\n", + " for i in range(len(device_params[0])):\n", + " allreduce([device_params[c][i].grad for c in range(len(devices))])\n", + " #在每个GPU上分别更新模型参数\n", + " for param in device_params:\n", + " d2l.sgd(param,lr,X.shape[0]) # 使用全尺寸的小批量" + ], + "metadata": { + "id": "FArYLHj20VST" + }, + "execution_count": 15, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#训练函数\n", + "def train(num_gpus,batch_size,lr):\n", + " train_iter,test_iter = d2l.load_data_fashion_mnist(batch_size)\n", + " devices = [d2l.try_gpu(i) for i in range(num_gpus)]\n", + " #将模型参数复制到num_gpus个GPU\n", + " device_params = [get_params(params,d) for d in devices]\n", + " num_epochs = 10\n", + " animator = d2l.Animator('epoch','test acc',xlim=[1,num_epochs])\n", + " timer = d2l.Timer()\n", + " for epoch in range(num_epochs):\n", + " timer.start()\n", + " for X,y in train_iter:\n", + " #为单个小批量执行多GPU训练\n", + " train_batch(X,y,device_params,devices,lr)\n", + " torch.cuda.synchronize()\n", + " timer.stop()\n", + " #在GPU0上评估模型\n", + " animator.add(epoch+1,(d2l.evaluate_accuracy_gpu(lambda x: lenet(x,device_params[0]),test_iter,devices[0]),))\n", + " print(f'test acc:{animator.Y[0][-1]:.2f},{timer.avg():.1f}s/round,'\n", + " f'at{str(devices)}')" + ], + "metadata": { + "id": "Jo9d0YR33Asa" + }, + "execution_count": 18, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "train(num_gpus=1,batch_size=256,lr=0.2)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 279 + }, + "id": "Mb5BTgM-5jir", + "outputId": "0ef5798e-ca4f-47d6-c6bf-d1c43acbf11f" + }, + "execution_count": 19, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "test acc:0.83,9.5s/round,at[device(type='cuda', index=0)]\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n" + }, + "metadata": { + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "#利用pytorch框架实现" + ], + "metadata": { + "id": "31gYmNceH_fl" + } + }, + { + "cell_type": "code", + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ], + "metadata": { + "id": "sUuKGN8n5o_h" + }, + "execution_count": 21, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#搭建ResNet18模型\n", + "def resnet18(num_classes,in_channels=1):\n", + " \"\"\"经过修改的ResNet18模型\"\"\"\n", + " def resnet_block(in_channels,out_channels,num_residuals,first_block=False):\n", + " blk = []\n", + " for i in range(num_residuals):\n", + " if i == 0 and not first_block:\n", + " blk.append(d2l.Residual(in_channels,out_channels,use_1x1conv=True,strides=2))\n", + " else:\n", + " blk.append(d2l.Residual(out_channels,out_channels))\n", + " return nn.Sequential(*blk)\n", + "\n", + " #该模型使用了更小的卷积核、步长和填充,而且删除了最大汇聚层\n", + " net = nn.Sequential(\n", + " nn.Conv2d(in_channels,64,kernel_size=3,stride=1,padding=1),\n", + " nn.BatchNorm2d(64),\n", + " nn.ReLU())\n", + " net.add_module(\"resnet_block1\",resnet_block(64,64,2,first_block=True))\n", + " net.add_module(\"resnet_block2\",resnet_block(64,128,2))\n", + " net.add_module(\"resnet_block3\",resnet_block(128,256,2))\n", + " net.add_module(\"resnet_block4\",resnet_block(256,512,2))\n", + " net.add_module(\"global_avg_pool\",nn.AdaptiveAvgPool2d((1,1)))\n", + " net.add_module(\"fc\",nn.Sequential(nn.Flatten(),nn.Linear(512,num_classes)))\n", + " return net" + ], + "metadata": { + "id": "gq2zpt1PCu1V" + }, + "execution_count": 26, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "net = resnet18(10)\n", + "#获得GPU列表\n", + "devices = d2l.try_all_gpus()" + ], + "metadata": { + "id": "fMJrTQBVFaf5" + }, + "execution_count": 27, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def train(net,num_gpus,batch_size,lr):\n", + " train_iter,test_iter = d2l.load_data_fashion_mnist(batch_size)\n", + " devices = [d2l.try_gpu(i) for i in range(num_gpus)]\n", + " #初始化网络\n", + " def init_weights(m):\n", + " if type(m) in [nn.Linear,nn.Conv2d]:\n", + " nn.init.normal_(m.weight,std=0.01)\n", + " net.apply(init_weights)\n", + " #在多个GPU上设置模型\n", + " net = nn.DataParallel(net,device_ids=devices)\n", + " trainer = torch.optim.SGD(net.parameters(),lr)\n", + " loss = nn.CrossEntropyLoss()\n", + " timer,num_epochs = d2l.Timer(),10\n", + " animator = d2l.Animator('epoch','test acc',xlim=[1,num_epochs])\n", + " for epoch in range(num_epochs):\n", + " net.train()\n", + " timer.start()\n", + " for X,y in train_iter:\n", + " trainer.zero_grad()\n", + " X,y = X.to(devices[0]),y.to(devices[0])\n", + " l = loss(net(X),y)\n", + " l.backward()\n", + " trainer.step()\n", + " timer.stop()\n", + " animator.add(epoch+1,(d2l.evaluate_accuracy_gpu(net,test_iter),))\n", + " print(f'test acc:{animator.Y[0][-1]:.2f},{timer.avg():.1f}s/round,'\n", + " f'at{str(devices)}')" + ], + "metadata": { + "id": "xTRfsoXYFjEx" + }, + "execution_count": 28, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "train(net,num_gpus=1,batch_size=256,lr=0.1)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 284 + }, + "id": "NlMNxz1fG3yc", + "outputId": "94bc35e3-f58c-4eb9-98e8-297aee10ba7c" + }, + "execution_count": 29, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "test acc:0.89,88.5s/round,at[device(type='cuda', index=0)]\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n" + }, + "metadata": { + "needs_background": "light" + } + } + ] + } + ] +} \ No newline at end of file diff --git "a/code/36-\346\225\260\346\215\256\345\242\236\345\271\277.ipynb" "b/code/36-\346\225\260\346\215\256\345\242\236\345\271\277.ipynb" new file mode 100644 index 0000000..9b72aac --- /dev/null +++ "b/code/36-\346\225\260\346\215\256\345\242\236\345\271\277.ipynb" @@ -0,0 +1,5165 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "de7dbe44", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['/Users/khador/Documents', '/usr/local/Cellar/python@3.9/3.9.0_2/Frameworks/Python.framework/Versions/3.9/lib/python39.zip', '/usr/local/Cellar/python@3.9/3.9.0_2/Frameworks/Python.framework/Versions/3.9/lib/python3.9', '/usr/local/Cellar/python@3.9/3.9.0_2/Frameworks/Python.framework/Versions/3.9/lib/python3.9/lib-dynload', '', '/usr/local/lib/python3.9/site-packages', '/usr/local/Cellar/protobuf/3.14.0/libexec/lib/python3.9/site-packages', '/usr/local/lib/python3.9/site-packages/IPython/extensions', '/Users/khador/.ipython']\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-27T10:42:20.054235\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "import torch\n", + "import torchvision\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "import sys \n", + "\n", + "print(sys.path)\n", + "\n", + "d2l.set_figsize()\n", + "img = d2l.Image.open('/Users/khador/Documents/img/testphoto.jpg')#输入自己的文件路径\n", + "d2l.plt.imshow(img);" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "a806246b", + "metadata": {}, + "outputs": [], + "source": [ + "def apply(img, aug, num_rows=2, num_cols=4, scale=1.5):\n", + " Y = [aug(img) for _ in range(num_rows * num_cols)]\n", + " d2l.show_images(Y, num_rows, num_cols, scale=scale)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "e606b61c", + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-26T15:49:11.356699\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "apply(img, torchvision.transforms.RandomHorizontalFlip())#水平方向随机反转" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "e6b43845", + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-26T15:49:16.350504\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "apply(img, torchvision.transforms.RandomVerticalFlip())#垂直翻转" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "aeeba831", + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-26T15:49:18.943420\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "shape_aug = torchvision.transforms.RandomResizedCrop(\n", + " (200, 200), scale=(0.1, 1), ratio=(0.5, 2))#随机取部分并调整至规定大小\n", + "apply(img, shape_aug)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "906b4c1e", + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-26T15:49:27.433022\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "apply(img, torchvision.transforms.ColorJitter(\n", + " brightness=0.5, contrast=0, saturation=0, hue=0))#亮度" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "4085ef25", + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-26T15:49:35.973680\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "apply(img, torchvision.transforms.ColorJitter(\n", + " brightness=0, contrast=0, saturation=0, hue=0.5))#色相" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "156df2ee", + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-26T15:49:45.854585\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "color_aug = torchvision.transforms.ColorJitter(\n", + " brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5)#多个参数一起调\n", + "apply(img, color_aug)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "c382f668", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-26T15:49:53.409244\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "augs = torchvision.transforms.Compose([\n", + " torchvision.transforms.RandomVerticalFlip(),\n", + " color_aug, shape_aug])#所有策略一起使用\n", + "apply(img, augs)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "06efdc2f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz to /Users/khador/Documents/data/cifar-10-python.tar.gz\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100.0%\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Extracting /Users/khador/Documents/data/cifar-10-python.tar.gz to /Users/khador/Documents/data\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-27T10:48:35.051761\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "all_images = torchvision.datasets.CIFAR10(\n", + " train=True, root=\"/Users/khador/Documents/data\", download=True)\n", + "d2l.show_images([\n", + " all_images[i][0] for i in range(32)], 4, 8, scale=0.8);" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "14a819bd", + "metadata": {}, + "outputs": [], + "source": [ + "#训练集做数据增强,测试集不用(但命名仍带‘augs'注意区分)\n", + "train_augs = torchvision.transforms.Compose([\n", + " torchvision.transforms.RandomHorizontalFlip(),#这里只做水平翻转\n", + " torchvision.transforms.ToTensor()])\n", + "\n", + "test_augs = torchvision.transforms.Compose([\n", + " torchvision.transforms.ToTensor()])" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "e68c53d2", + "metadata": {}, + "outputs": [], + "source": [ + "def load_cifar10(is_train, augs, batch_size):\n", + " dataset = torchvision.datasets.CIFAR10(\n", + " root=\"/Users/khador/Documents/data\", train=is_train,\n", + " transform=augs, download=True)#augs是之前定义的数据处理方法\n", + " dataloader = torch.utils.data.DataLoader(\n", + " dataset, batch_size=batch_size, shuffle=is_train,\n", + " num_workers=4)#num_workers可以稍大些,数据增广计算量可能很大\n", + " return dataloader" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "3f786de4", + "metadata": {}, + "outputs": [], + "source": [ + "#训练函数\n", + "def train_batch_ch13(net, X, y, loss, trainer, devices):\n", + " \"\"\"用多GPU进行小批量训练\"\"\"\n", + " if isinstance(X, list):\n", + " # 微调BERT中所需(稍后讨论)\n", + " X = [x.to(devices[0]) for x in X]\n", + " else:\n", + " X = X.to(devices[0])\n", + " y = y.to(devices[0])\n", + " net.train()\n", + " trainer.zero_grad()\n", + " pred = net(X)\n", + " l = loss(pred, y)\n", + " l.sum().backward()\n", + " trainer.step()\n", + " train_loss_sum = l.sum()\n", + " train_acc_sum = d2l.accuracy(pred, y)\n", + " return train_loss_sum, train_acc_sum\n", + "\n", + "#@save\n", + "def train_ch13(net, train_iter, test_iter, loss, trainer, num_epochs,\n", + " devices=d2l.try_all_gpus()):\n", + " \"\"\"用多GPU进行模型训练\"\"\"\n", + " timer, num_batches = d2l.Timer(), len(train_iter)\n", + " animator = d2l.Animator(xlabel='epoch', xlim=[1, num_epochs], ylim=[0, 1],\n", + " legend=['train loss', 'train acc', 'test acc'])\n", + " net = nn.DataParallel(net, device_ids=devices).to(devices[0])\n", + " for epoch in range(num_epochs):\n", + " # 4个维度:储存训练损失,训练准确度,实例数,特点数\n", + " metric = d2l.Accumulator(4)\n", + " for i, (features, labels) in enumerate(train_iter):\n", + " timer.start()\n", + " l, acc = train_batch_ch13(\n", + " net, features, labels, loss, trainer, devices)\n", + " metric.add(l, acc, labels.shape[0], labels.numel())\n", + " timer.stop()\n", + " if (i + 1) % (num_batches // 5) == 0 or i == num_batches - 1:\n", + " animator.add(epoch + (i + 1) / num_batches,\n", + " (metric[0] / metric[2], metric[1] / metric[3],\n", + " None))\n", + " test_acc = d2l.evaluate_accuracy_gpu(net, test_iter)\n", + " animator.add(epoch + 1, (None, None, test_acc))\n", + " print(f'loss {metric[0] / metric[2]:.3f}, train acc '\n", + " f'{metric[1] / metric[3]:.3f}, test acc {test_acc:.3f}')\n", + " print(f'{metric[2] * num_epochs / timer.sum():.1f} examples/sec on '\n", + " f'{str(devices)}')" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "c1332aca", + "metadata": {}, + "outputs": [], + "source": [ + "batch_size, devices, net = 256, d2l.try_all_gpus(), d2l.resnet18(10, 3)\n", + "\n", + "def init_weights(m):\n", + " if type(m) in [nn.Linear, nn.Conv2d]:\n", + " nn.init.xavier_uniform_(m.weight)#xavier初始化\n", + "\n", + "net.apply(init_weights)\n", + "\n", + "def train_with_data_aug(train_augs, test_augs, net, lr=0.001):\n", + " train_iter = load_cifar10(True, train_augs, batch_size)\n", + " test_iter = load_cifar10(False, test_augs, batch_size)\n", + " loss = nn.CrossEntropyLoss(reduction=\"none\")\n", + " trainer = torch.optim.Adam(net.parameters(), lr=lr)\n", + " train_ch13(net, train_iter, test_iter, loss, trainer, 10, devices)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2d1d2b90", + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-01-27T15:25:31.456858\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "train_with_data_aug(train_augs, test_augs, net)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f39bec9", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git "a/code/37-\345\276\256\350\260\203.ipynb" "b/code/37-\345\276\256\350\260\203.ipynb" new file mode 100644 index 0000000..f3441e0 --- /dev/null +++ "b/code/37-\345\276\256\350\260\203.ipynb" @@ -0,0 +1,2250 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "-" + } + }, + "source": [ + "# 微调\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "import os\n", + "import torch\n", + "import torchvision\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "热狗数据集来源于网络" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 7, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "d2l.DATA_HUB['hotdog']=(d2l.DATA_URL+'hotdog.zip',\n", + " 'fba480ffa8aa7e0febbb511d181409f899b9baa5')\n", + "data_dir=d2l.download_extract('hotdog')\n", + "\n", + "train_imgs=torchvision.datasets.ImageFolder(os.path.join(data_dir,'train'))\n", + "test_imgs=torchvision.datasets.ImageFolder(os.path.join(data_dir,'test'))\n", + "#下载训练集和测试集" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "图像的大小和纵横比各有不同" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 9, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAn4AAACqCAYAAAAz4EJwAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9abBkaXrfh/3e5Wy5Z961bu1VXb2v092zAgNiBiBIACQACiQlUWEFHZS/OBwOfVCEHWGFwxH2B+uL5bDssEISFSQkUpRMQCQNkiAIkMAAo9kwM90903vXfuvueXM927v5w7k90wMOaFR3A6DI/EVU3ax7T2beevNknud9nuf/f0QIgRUrVqxYsWLFihX/+iP/pH+BFStWrFixYsWKFX88rAK/FStWrFixYsWKf0NYBX4rVqxYsWLFihX/hrAK/FasWLFixYoVK/4NYRX4rVixYsWKFStW/BvCKvBbsWLFihUrVqz4NwT9MAcncRS63TbeB6JIU1WGqjKAp9VKUVIghURHGuc8zlmqypBlCdY6nHNkWYp3nhACCIFzDggE55Ei0E4VkRJ45/Ah4FzAolhWFodCCgkCQqC5b/BIqZBS4ryHEJBS0u10ybIMBJz9dfb1X25fEwIcHR0yn8/Ev/TAFX8spGkvdDtbZ69c89pJCVJKIOB9890kFgxGKfL9M1qcnSSAQDQ3z15RQSAEgTee+SQnX5b48P7L/f6BojlOCISUxLEmSZOzcwx0pM8e15/9Ls1TfhDRPMzZ7y543zpJiLPvfPDcFOH79w/NMW+88Z3jEMLGx7WWK1asWLFixUMFfp1Oi5/9yR+lnUTMc8PXvv0d2lnKM09cZ3N9RK/dptvKUEoxGHXodDN0pEjTNlmaESlFmkUIBVoFpEhACu6/eZP7X/8Vrj16laQ/YnqwSzG+T2d9B9dfp9NZY5lXhGSDwrWYL3MmswXT+ZKTk1MODo7xaE6nC4wQIGE2m/DX/tpfY9Bbh9BcgUMAAjjrcc5Q1SUAkY7QWiOEIAD/8X/8v//4V3rFh6LT2uBnf+L/jJACgUTH0OklKC0xdQVC0ulIfvGvPMr29fQsuPIAeOtRXiOcQBQWjyO0I4QKoBXBO5gqXv2tt/jNf/Qtjicl1joQkiAkWmh8kEgtkUoSdVpcffwC2+fWqSvBcNjGB9BxRJJoBB4lzwJOIZEKvG8CVR+aYDP4QJxKjHXoSOK9R2tJFEuEcsRKI0JAKXjhEzfu/Mmu/ooVK1as+NeNhwr8Iq146rGLdDstTB34+T/3Y5zb2aLdTkiTFC11c+FD4oPFuQQpBQ6PCgKpBAKIVMAEmOzPufval4iqXda22tRmSuQkvYHCVi2y4Ra1TJFasbE2wMuI9Z0roDSBgBACJSQ+BGprMM5SV4bp6ZLxyZxOp4WLPWXtME7ivMB4z9HhhP3DA+q6ZDad8uDBfeoqp9VqoSLNdDr5o1jrFR+GEAjGETQIrUALrPP4YPDWkSjHF37qKpuPpDhrmoywBCyIWhCqgKsc7N0iWIO6fAOXKkg9MhIwCDz3px9h68KIX/u7X+add/exvnkM4z0g8RaEjSjrCW98q2T/7h5PvXADrbp4BzKAdw6pBNa9f16e5Qy9Q0WKSDbBnlAS8ESRBiFR2qBlIJEgpaCyHoJAOfcnvPArVqxYseJfRx4q8FtfX+PnfuHPEYkYH8BTo4QmBI+QEoHE0SQ8rBLcvH/KO+/e40c++QStJEMGi9IwGU9487d+lcnNb3FpZ4N0cxMfMlrtmDjuszgco5yBVBKFBIWmKGviRHPvrW/QW9+ks3GBNG7jnMdbiIQmilok2tFpt9g5P0JJTXABS1MWNpVlUWi6KiISLb7xzdu8/tabLIsKYwpiLegPetR19Uez2isemkDAe4urHAJNKtpoDWnsyWvD858+z41PDnHKgQsIBSIIRCEIc4uXFVGZU7zxKogIIdfQwzZ2LSN4gVcemcHms2v8xe2f4lf/9j/j1a/fxBFhfI6WGkJT0kVKbLXg9Njyu7/9Bo89NeGZp68jnMLYgJIKKQXBN60M3ge0VijdlJe1Bu8CUgicdwQcaSpIYomWAucFwgfq2qHSh3prrlixYsWKFX8oHurqIoQEoXEagvcIn+KFR0hBCAKEOKtyOWSIsZXj0RtX+S//u9/i4rkhOo55rC958NW/T8eesjYShJ5AqApfS2wJUs0p8imtQQ9jAlkrY5kXxFmM0AqZtDi49xbSFXQeeZnT6SknC8ud3ZwrFzpsDdtomSJkwAcHyhN5kEoi25pWS7K5lvLsY2v89OevMj79MW7eO+CdWwd89817HJ6Mv9ezteJPnhAC1lms80jh2NrMePL5EY8+cY5/+svf5NnPbuG1QXuFjCTCC1iCP84xxxP00GAOD+HdXUy3i81uEcYZLXGVsJahohghPC4yxOfg5//qF1lbG/Clf/oK1oG3FUJqPILgFaDwdY1wljdeqTg9WfLJzz1NK05wziKkQsmmn6+uA1J5VKRwZ6Ve5wORlGglETKAFzgbsHiEkEghyFJFYJXxW7FixYoVHz8PFfgFYDwX/Mrf+00+85nHePrGxSbgQyCFQgiBEIHgFQrHE49ssH8048WnLvNjn32Ud195g7d+9ZcYRlO6/YyoPyTLBkzvHYMu6Oyco8zHIB0hbYGMv9dDpZSmrErSuINrt5ksCvK7dzg4cbz7nTcIdsx3frfiJ37ix6kmM8xsThwJ+lsjBjsbZL0O2gW89ORFIG1FOJvR7cW89PQGn37+SQKWxbLi5/79d/5oVnvFQyOkBBkIAra2E/7iv/8k/csR5dTQ6rUYtSLC26eISU6lFIKUUBn88TFRvqS+vYcqDxGRZuKgPrhFq9/DBEO/uEEYACOJRyClRg4DP/5vvcRw1OUf/8qXOZ17XAho5TE+NH2i3qNEjK1qdu8d8k9+9ct85jNPc+XqBaraIQg4DyDQSmCNBwXgSWJJrEEIjw9NYCsCKNUEizpSGGMIYjVDe8WKFStWfPw8VOC3zCv+k//s73P33gG39yb8O7+Y8tQjm0jEmVjWNyUxYYGEyaLg1u6Up564TLmo+O6v/T1GUU46SIiGLeLeCKEjuhsdTO7Yf2uX7noH6SFEKd54nKrIi5zYRsRRQhkWqChFCYc/vU28/w7PDr+OLwztBxvc/JUTYulQWiHjFsff9lgl2XzyBo//yKe4f1Dztd+7TX8Qc/nKFuPTGZ98egfvAkHG6DSQxNEfyWKveHiC9xih6MQ1X/ipy/S2FL6G+b2C9VlO+Y++hTpe4jOIsk3EIMCmIz4XMblZUD+YoPbu4GWHcq1NWedI50lcxun9t+lynSgdoBIIKuDjAG3Hs194jHanxd//W/+Mk2mNlR7hZaP09eBlU4JWdcAXCV/6rdcZjye88InrVJXGI0kiRfAW4yTx+wrfIPDeIwR4JB7QogkoQ4A8r9BaoNRKVL5ixYoVKz5+Hirw00qRlzl3734Xpa7xu195hWce+QmCCyAC4ACJEJLf+cqbvHtrj/3xhJSnufOVf0KnPiBbk3S7LbLBOlE/Q+g2Mo4pxzOW0zH57Ije9iYpEUIKqnyfyD7AlxGmdwOpO8g4ojh8wPLwt0mSu1iTcO/NIZQJkcqJpEaJgPIFBotcGA6+9nVOd+/zwi/8Aj//sy9wuJsznRyxUR3y3X/4CrVdcPXFT7J28TqB1UX3XxkEtFLBj//pyzz36e1GOWsdW7akXZ+Q3OlD0qWY3UW2UmJlsKdHzJdLjg8XOGKW04B1h8TtDiGLqL2iNHPK47t4UzKInkCu9SACqRVBQmjBI5++xF9Kf4pf/lu/yd7hHOcMeEcQCi/C9+xclssZQkV8+/duM5lU/MiPPoPA44PAO0+sJHgQSmFtYzfknMMTcAGUOJOb+0AURzjnCNb/Sa/8ihUrVqz415CH6/GTkkE35ulnnsMJuLBznuNxzmgQI1HNwwmBk3Dl0hYvv3CNybziH//S32R0+i26fei2UpJWio5TtGiDLbD5FGdKkihlfDqleyHFSwH1CdK+g1vuIWWGMyd4WtSuopzfopXMyWeOB291Ua5PkjiUCMgQUCJBYFG1w5UFoa6ZTk95VQuuffGLLO++B8UuJtV0Lmbk9zy//J/95/zEv/dvI/7/eP2t+GNECJ680eELf/YGIjWN0EJA+e5t0tkpYX0TU0pqGbCzO5TLiqKeM68CpYlx1mPP71BWNdPJGApPX/cJzmCtYXH/u+TFCecefRG1NsSTITPwykMqufziRf5i/Gf4lb/+j7l7eIprarg449FCY4NDJgJvHcEL7tw8ZDH/Bp/7sWdopSnOhjPrIvDeo5REBJCh6YsVwSFRje2gsATX/B+9X20+VqxYsWLFx89DBX6HRzM+9fJjbK8P+Rt/57f527/8z/nyVwf8r//az7K91kYHSRAeEQQ757o4H2hnmnh+QD8xxJ0+JAkqTZDKE/Ily9N72GVJuSgoaof3ARVrYpaE4h2q6hhhu4TYof19lDd4swQPs5nj/qtdYhEjoxIVFLEEpSUBR/AQggMskQxURc2Db36bVrZk47HHsd1N6koSK4Vf92TtmC/9j38P/Kqx/l8VhBCcH3bxezOitQyrBTISJJ021WyCFAfk7Q2WOqY4uo10U0oiRG+D6WJG7TwXnn6Cp248wt33Xme8e5fJ3gmLdEar10F4MPm7uFnJzgs/gk41JBKFAgVeenaeXOcX/spP8Lf+619lf5wjpIPa41TSiD6cJAQNpqJwFUfB8Ou//k0+/6NPszbqUVqHJkKKgLEOczYwR0iHVJKqssSxRCmFEB68QIjVObhixYoVKz5+Hkq+WpY5X/nam9ReUywrbr7zJq+8/jp//Zd+DS/AuwqsB2sIKKRWfPd3vsRIHKK7iiR1tFptTo4ryqVhdnSXyZ0xs4Mlp6c5y8rSWuuRjrZw9QxpdzF1jhenSBXhaWOFJARDCIL92xryFioIVMjQOKRU4AMagfIO5SqU9AQR0JFA2SVH+ydUpma2WOB9xSyfEsXQizSzo32K5eKPar1XPCQCaLuKsDsmHBaIKuCNRV3fYqYcp9N74Euifgd669gkwzjLYrHPPD/BhMBwPSPOpgwHgf5aj7VL11jkcLA/Jl8WzMuSB/vvcO87X6XYO8QW4J0lGPDW4pVj5+lNfuEvf4GNXoq3DiHBe4NSHuFrJAaBQwaJN4JiWvKVL73D8fEpUkm8t4jg8LXAVAHrA1IGtKTxHkTgnCcgsL6xPVqxYsWKFSs+bh7q8qK0wvmAs0vW19tceewxYuW58cRlvvbVt/jsi4/z9s0jusOEr3z920SmxL7+z+kmnrg3JBr2UaMNDm6/RZbA5M4pRydzCBKhAyfTiiuD8+h2B7+8i9QGKTK8KknSy9TFu4ggcT6izCXFGFo6oNAkOsI5g44FQTUTF7CegMf5AN4hcSipKE/GlJMx7d46SkfUtUJpQXvQYTBrUS2P/qjWe8VDIgS0Y5CVwecWugFJgPUB+dYAf+cB6YWKjR9/ls32M9z/xu/i3/wuaSQwqo1MRyRRyezebczcEiWKy48+yblr17n51psc3r1JZ7RFFWD37utoqTinP4PY7hO0QyCQXuCl49oLF/nZ8kf4lb/zm8wWDksgeINAIrTEOwhCQl3jjeHUOX7ndzyPPXOZCzub6FZARxpwBBEIQWK9a5TwViLwNJrg961jVqxYsWLFio+Xh5zVm/BzP/NZtBF89uUneXBwSrmc40pHmZf82lff5G/8zS/xM1+4SiTblG/8BpvJCe1eCx0nKKmp5hN2OhHvvHqXvZkmC55+x3E6i8mrgFQRcdSiDjnB5ziRk4gOvnoXKWYImRGCJ7gILTU+aPLKYuoJWlpQI5KoaOb4qoi6qsE36lB8QAmB9++LUSQ6TvEuoH1N5HIymaNXNn7/yiCEAC0QWhMqjyhs0wPXUmx89kX2igmDz2/CpQyiNpd+8ke4G6Xc+cZ3WFQp57Z6WLOkzufEaQdXgqDg3JVzSB7n5N4DfB2TdDv4vOD+e6+iWh3W7dNE20OIgNCMcfNR4PFPXeXP15/nH/x/vsSscNQhELzHBwvCQ9NkgJAJvq6YTU957ZuOk7HluWe3ybQlCNBNU1/zf6SZOyx1QKJZlrY5X1esWLFixYqPmYcc2SbZ293nsSvneHJ9xJOP7VAuFnzn5h1ee3fO773yBj/62Uf5zGde5tv/7B9z+UaH+nTGIy88zXS+IBKBYnLK3u6YN+5aHli40YILOzGTpUFJSX9bY80CJWvMEmId0METiSOsEljrqCpHuxOQ0uDzEqUTsu4IoRXCQHV6DKEm7g9xpm4u2h+YlQrNNVdpTfCgk4zFfMZos4vLFWIV+P0rg0Dgg0YKiS1zGFeItR6+8nSfv8L0+DJypPEqwQuLSCou/MTz9M9f5c5X73M63uXd+V10sAzXA1IKTvbfJQTH7js3kb7g9GSfzfYjtAabHN99h92b36E2FReil5FrQ0Sim36/yoMKPPHJqyymc/7pP/oqwSisCARb46VCKIEINSJIhEvxhcGzZHd3nzjRPPPoBu1MEqeN56UHgvMQFM5CwBNriFal3hUrVqxY8UfAw11eRCBSlkcfGZHPAWHpnh/xzI1L/Kf/za/zY59+jn7f8kv/xd/hE63blL2S7kafvCpJ0gyKJcvDE3aPDGMnkc4y6MJ0Ap0Yzu8MGWyew9YOZStc7dFtkGqJE2kzYcFJvDd4ExGsQmlNnCXoqPl9ZNIC2cfWc4wvCUJQ1QXOAkIhhTgz61X44ImVwpY1WZCMpxPavTZCfTxlNiH+8C68QogP3P7erT/kfc9uhOYu4Yc+a/hDPN4fdEz44Y8pGl/ifqdFp5tC8AQf8N7hjMF6WBSW2gWstR9OpioCS+MIcYIwFj9bIFsKohi/1eLcjz+PjyYQPNgZVLexQtA536c1qJlPlhSF4tz5y9RmSZmPkbnj5jtfZXlaYjzUpqaoQWtNb7TO+HifYEriKGHtsedR613IBNIHCJqgPJ/6sWeZTef8zpffwZpAwJJmCbV3BCvwoUR4iVSKuqxQTLn1tqSqBM88tcZICKQIOARaBQhgXSBJBYmS+FXGb8WKFStW/BHwUIFfr5PyzBPXee3NfX71115jYxj4D/4XP8v+eMIsN+wMW/z8T32a+L1vk9oxabfN+oWrCO0IwVKWJbO5YX9qKVxMK7IM+y2OxyWnC0t/YAnxGlEkoRri5T2C9ahEIqWDEOFMhQyKcipIdBclNWkcE7xr/NKcIeiIOFrHOoswJVoaXKgw1iCzBBEp8BqkwpRLcB7vA8HkWNsEhR8XjVKzmcoQzqKnD/77/YDv/dtSCpRSKNXMfX3/+Pfv88H7f/CrEBD82eMJgYCzLGcTzFlv0SpqioqhERbEWtLVGi0VSQRbaYQNjjvznMNl40MnsDjb+NV5Ako16dBIKDrtiF/4Uy/zyU/eQMUBUefMJ1OEXVAcjZlNJhwtNX/9n9/58AsY4OBojntyDRUFwrImzKfIbITIPbI3pJ4fIKJjpF/g7RzhPaE6YLTume4HFpWlmi+wbo43UC5qYpVCK+BbXZJWoKqnLF3FzlafvJgxnR2T7L6FinsM/VWirT5SeLz3eGeRkePzP/k8e3tj3nj7ECEMvpqDlwgxIuARqiT4CFMLPAVIuHcPgs/59AtXSaNAIKC0wOPROjT9fiJ877VfsWLFihUrPk4eKvCbTkv+j//X/w4lFCpVvPX2hL/0iz/GYE3xuac3ePTGZV7/3d9Fze7RXQ+0exnoQDbcpJifUuYl+5OaqU8hLDnXiagWNXmtWRqB7LVAeKK4j+hu4fMY75dAQCpwziGlx5WB2Z6AyhK8xmtP0kpANvYbcZwSgsTXElyBdwEhNC44kAGdxqg4RgSPcHWTFbSeoxNPb83zcdZ6fzCT9y8GcL//OO8DITicazzftFY/EOD9sMdtBqd4EAIfApLGc1ECQUqCM0TI5nlloKM1l4ZtWsLTywSd9SFXrp1nMExRUcr09JDf+NJ3eeV2jgkRUCGFQEtBrDTdXsbF9XVeeHyT55+7gNYlmAJXBHw+w5sSnfXInGEoDe3kw2evBIKj05rJ4TFrfYVqJYRFTugu8IsI2cpQnW3CcoYTS6QtEDYg8xn1eEqsYnqdIcPtq7z97d9AuIoo28AHiakKfDAICbLM8Ykgzbqsnxtx9+3bzOYTkvEercE5VKeD6jR2K0JCqGuyDH7m5z7F4X/56xydGHwNCAXSNa+Pq5osqAh4qymWjhR4sOv5bjvj+cc3kSpgnaSuHTqWxPFZBlesAr8VK1asWPHx81CBX1GW/Mhnn2EysSx9zTe/fMKbr9/nsy/d4Kc+9xx3797k5ld+jY12SdbPaG9skfX66LiFCFN8adifOKY1nOtqrm10yBcFpa3wlSHqdImiDlEcsZxU+BDwBpRIidIIUVd4C/mpJFQxEkEcRygpEMEjhSQSMcJanK3xZQGiCaJM3fiiSS2J2ykqjlEypvYOrR12uaScCipT8hAV2n8pHwzOpGzKd9/P0H0/0/dBPvhv7wN1bc+ygBopm6kPH0QqAQSEkHjfTJNoSrNnzxUEQkmckGykMZs9sMahQ02mM5578RlaWx3StEWURPigONfr8mfbEfU/fIXcxKxtaNZGivWhZLgG2sZQ7vDIo+eR0pDKNmW1xLkFOlHU84qimCGzPkmr/p6I4cMQKc+skrz+3hGfeaqPThP8skTs1xDHuKlGrY+w1RTtcoIvCK7GVwXK5YS6pD9cZ7DdYrA24uYb76CSFjK21PkCHTRJlmCcI44Ekaq4cuUi7716h8WkojsqGI/H6N4AYQVBN8F0JCOsKxhtJPz5n32Jv/N3v8R82QIpwXukaMYXei8aJa93KCnxdUHU6fD2Ow+I45hHrw3JlGg2JyHgXUAJRViZiK9YsWLFij8CHirwGw773Loz4dr1Te68dkivr3jskcsIDCZS1MsKRUk26jK8cI5stI3QGcV8gVnmzI7G5KXCe0ccwFYVp6XDBc3FtYhuP0JFEu9mhOoE7wwBsMaSSk0UC6rcUi9baBVQUYyQAaVAydBMPZA1QmikACk0KEsUa6rSfC9zFiUJzjoqU1DmJ6xtrnNSC1S8pNf+vgDkoxNo4rRmUOsHg7YPZv4+yO/PCkLTs2eMQcrm500ZWJ0Ffe4sOfR+MNk0+TXfagQESkY8uZ2yrh3UinOPX2bjfBcdJ7Q21kijCKU0OlaAwZqY9c2L/Lu/0GaRv872pYSgU4zvUS4E9V5g7cZ5ZKRxyxmLakm1nLOcTqEqcbWgtjX13i6m3ftIA/B63YjHtyQ3Dyq215Zcv6wRcYRfzJGzKUrF+FaLuD2kfrCLUh7vDJGQpIMW1f2Ske6gmNBqJwipmE7HFCXkuaXX6tHrB0pfsn5uSLur6fVatNpDqtxgqwLvcrzz2MIRdZrJG94BOiNQ8+hzl/j8wTP85m+8xjKAE/5s/q5EimY8m5IK6QLWOIrpjGww4rU37tBqSa5eHBDFgiyT6KgRETu3CvxW/M+XQa8XtocjZBzjihKZZXjjkJHG22ZCjUAgdNRsDMUHNsqhGWEYQiC4gHceb21ToVESodT721vwAW8MwtUITPO5GQLhTClPECAVQurv7z8DZxn18L3nen+f1TTGeIRSBO9RWiGkOOtftngPgoAXmoDA2xqlJMZYnLHoVoe42welSNKUB/sPODk+XqXvV/wrxUOWenOefeYGd+/vc/fWXT790jWEMgSREMmIm1//GtevrVPOltg8UJeOICJarTYphvdCylFeYISidoqDac7uPCbgGfYiBusjyO+SL29Rzu+gfTMVwdYG7xReCsqFQNQaFVQT40jZZPgAdIQ3AWTzfekD0kqCD0RJSmUsPgjQCqU03U6LJLZAYHw4o6oE565vw2+ffkzL+36PXvMnhLMPGwQhvB/kwfsfckLIs/47eN9bO5x9MH3//gLnAiFYQjg7XkkIvgkuEAitwEJQgVYi6UeK9U6H5545jxGacxe3ccEiZYTWCYlSVNbivSAIjVIRCInqrGHdM/zmq4pTo1Ane3ziasrFyyOQEpfPKPIFrprDckkWtZh7hfOO9miT3C2I2zEfJY5WCn78cx2+9W149a0j1jIYDDvINMEejtFRhxBr/PYIoQaIYowKkoAhSjSJshzc/DZVvYOMB9x45kneffc24/dOcUExLwvSTtMrakpHHEE+P2V+MqbMS7wvWRYVMuqxtnUBmUSoJMK6Co0kKI3oRXz2z77Em+884Pa942az4iukjEE4vPBoqTDGgohxRUXJMaoz4Nuv7tLuJOxstBrPSRfAN9NnVqz4nys7m1v8jf/L/x2bF7jKUJ7mqDRDJhpblegsxQuNtAZf1wjnwFtEVRCkwGuFWBQU85KqKLB1hdApgUAkHEk3RQbwVU0xPkXkY5J2hhGekE9w+SFRNsK31ghEEIkmiAwSFQJCeSJpMHkFSiOiGukDy1xw9fERCwu9YY9Ou2R6PGP39pxe22B1G1cuSDdGnBxNufPOTYp5TkWb9Sdf4OV/76+y89mXSRG0EsnnX3rpT/qlWLHiX+ChAr9WO+E3/9nvsLd7xGdfvsp/+B/8NK2oMaC9f/MOy+N3maOIUkVnYxOftFGJxEuNbHW4e1xQEoixCBVRhYQTC10lubjTJSiJqL6JWRwT6orC1CgdUEZhbECkEcVSUdcBhQAnSUSzAwyAqixeSYQMOBUIUkF43xPNE4QjSjLSLCXSMVVZNztIn7B3fw9nFe++doCz9mNbYCnlB/rzflA1+/t7/YT4/vHvN/d7/8Hy7g/e1zn3vdtCCAIeKQU6SLSGdqzYyCK2Bl0evb5Jb2uLVq+HDxYZNEIotNQYLCrRCH/2uMHifYkMmn5nyPbkLS6nnvPnN9CdCFuUeE5JiOmmHep6Tu6hdkDaIVCxqCwWR+LCR2pXE1KR9ju8+GLM731lwStvH/DcVcNga4D0BfbkiKiV4JcJycY1lt+9S5Iagq1QZcVGb8HuPGf//gO6g3X6/Rbnzg84Pc4h20KFQDk9ROqYwz3L3qFifO89ynmJ1IrlbAn37zPYuE9vtENce7CSSCicBuGgPjlGra3xs3/l89x+74BX/6d3uHv3BCMUPhjwEhMKZJQ1s6xxCCOQxZzcGr79yi6jH7veTP5QTZZWfMRZvamOQjeOkEIgpERHAucEtU7xQpFJyFKFiiIIAVvUONv4EEohaOQmASVBRaqx1UE0E0iEgAB54YhjhZa+KVUTEEiidhcpJULKD2SwPcE27Qd4T/he28MHBEvvK5nDBzIxcJYwl2c/bzY23jmE0qgkQSUp+EBwNW45ZVHWHM0W1LZ5fzjvj0MIGx9pQVc8FN56it1dbFHhTfM9W5ZYVyOdPdt8A85CZagrhwgOLcA6hysqwCNCQNYOZT2IguA8IdY4HRCRRupA0o5wdPEyIGWEjztI1cKKiGAhSINbWqLqPsP1HpTzph0kt0gPIh4QqjlSK0Y3nmTzM59ksxqDiinzmrLc4/DmKxS9Lj5dsFjMqG7dZ3Y6Y1kGkt4On/tf/W/Yfuo5Lj15kZBocis43J9STT6uJMKKFR8fDxX4mbrixz97g+ee/UmubK+TqWbUlA+Kk9vvIOsZy6lkuLVG8BEiaLQELwoWk4qWqNiQApkK1qOc/UWEIlAWjlgbhJMoKpRfsixMI1Zwikh7bJGg6aHjAq88tjBoWSJUwmwyI+t0UDoiCNGUBvBngRDoSFF5S5ACJ0DHEd47VBBYLLPJmMl4SpQIDueeovj45qSK3xf1fLCE+0HV7vs/+8FAsVEF/7DHabKG7yt7AWRzMUQgCGglMcZRCIkJnp1r14lbMVIqIh03tivOErDN47jAsqq4+cZ73H/ngKo0bJxf49LFTRaHOZuXt5Fdhfa+EdgIRbWcIlTcTJ+IE4TQ+MpjypqgQWtBaWxTKvmQBCkIUUY8ELz43Abf+mbJ117f5fODAa2sS10s8JMJ+BqzMyTqXsKMv0YkA3hLJ5OksUNYh3ZLTvePMHnBYJRRRAmpECzqmDKvuH9YcPCPcmJrUM6QxRrhoFrMme7fZNLr4kbnabe7VMWYqJeSdlsQdVCxYufpc2w/vcXTn7zOP/3vv8K3X7lPWTZWL1JKkA7nDUIEnPWICnSQHO+Pee27fV54dh1Re9JU4T5ij1+/1eIvP/EELkR0Ns9z5cYNiFrcCQN2reQJteDxS5o4Eah2F3s45eSN99gbV8SdhKsXO0RxgR706LQknfUtZvOSmzfHzK3m0sgzvX2AOzqg3RnSe+w5qA2d7QukrYxYCoRSaBGaIHI5xZ6cUi8XuKLG1QZkU2JLOzEqTfA+4K1tsjsh4MoltfGk0ZJ0cwfVaZHbhFsTxzEJa5eu8cxTT7Kz1iVSgfzgLvmdVyhPdjlaGP5Pf/3vcGv3kFuns48gK1/xYfBVRX14hKsMQURI0fhV+qpCColPFCoCawzCWULp8SE0E3BsjfA1uKalItimjAoBGTWfhz7PsTKAr8FOSOJAcJ7IFQQKbNAUvkUIEoQkUksiN2Nx/4iKIVG3TbvfYvvFF0kvXKQeH2G9wwTBW1/7JqPtC5wu9jl85RWK0zmVmVFNCoq6ZFEZ8tqhe5vc+Ok/z9O/8Be59OhVPIHawd53bmOmC+bvvc1DTkVd8TEw7HbC+bX1s7I/39tQBu8/4HUWmuvSB669zeGhKeeLxnHBWNfYkTlLFAniNEGpCG89JQnBO1xVInSEVhojFJJAJwoIJZq+/qLCGtdUvqQiViDjxs5LKglSE0Ign+XISJNoiVQSEaeNw0hoYgUhFeJ94alUWOtRSqCkINi6ERMqh0oldrHP7t2ak+qHb3ofKvA7t9nn3/7zL6F8hMUxqQoOj0r2dg8o33mVVpawsZFy/pmnMEITMGgXg1CUkxPqIjBoR5R1wc5GnyAK5lMIMaxdOodI29TmHEW914gUPMgoELymWCwIc4v1DhmnmIUm8gIXBbSKiKIIqxVC0KhhZaDMSxweF9xZz0ZAqAjjPFophA8Y4ynLnPmywi5hpyNIko/vzeq9RwjxPV+2H6bMff+rlPJ72T3vG+sQeZY1+f5xTfZECE9wzYng8UgB3SQiVYJu1qh2kxguXLvMxtYGcbdDrBvFsncghULKgCnmjPffoM5PMdMZoqhJvCK4hPzuXe7lC176/Mtk/RY6LLG2QAhJJxkyK3MW0z1coZBRwunxPgaBLwz9rXWW8wmW+If2Mv6hEUCU4GwgGXR47okhb77t2b9zl4s3zqE6fcxkSRwr/MSSnHuEye3XUX1LcJZIeFIVEL4gRlDVC/CObr/HsD/ELBeUi5RMaERhmNYV7V6fuiyoEYjaECUpwUW8++Y32do5ZbS2RSwEqpzic/Buhj+MaD3yGLLfob+R8Qt/9Ys8/tVbfPk3X+G9B1N8EE2AHAACIjRiH1cVKC24+c4+w1HEtZ0u75f+Pwo6yRg9+WlG2+dIIk1+POF495C+PqIKGpk4qvVtaq9oxY7bDw6p8ylRBOu9mGI5J+71uXzjEmVVcbh/gF3kLO/cZ3t9g+4yZeuRa0TPv0iIe8h2HzNbUs5y5vunVKUhyxSJrIhjTTA5YVkQVMCZHBtAmJokVTgvCLUBqQjCEkSNsTVWeQoLea1JKxBBM3zqE7y8c47ZouK0cty8eYdqP2VrqJGAHt0gUS12ejP+tz/9Of7mr/wmt05nH2ktV3wIrKOezAlIPI05vzEOnMNK0E7hVNPKoWhmX5vaNsIoZ8BXOFM29l3W0BKO4AJBxtS+TTCSmKq54KUx3hsiWeDLGao3pLd9CTmtENLhi5Koe5lk+AxxpsjnDpukpLHjZDxl8cZ7BATT4z38bEqol9xzntoUWOPJK0PuHbYSlN6Syz7rP/qzrH/mRxndeJzs6gXuHUxR0xxXLqiPHlBOcszBfVjZcf6xszMc8bf/o/8IZyyhNpiqos4NzjhcbVA6EGKF0Jr5vMZHJTVzptM5h/s5g7bkwiMtRH+TMrvCV2+WvHlnl+ubQ774hecYDPvUxvD2773H4mDGdFmQAN12hg0BYw2PPrlGa3tIKAuOvvEK9nCGFYrTpaU36HD9k5cYrWeI8hTdv0RdK5ZHMwY7l+n3OiSDITptIaMUX5WEsgDvsfmSKi+YjivevZdz5VzGzoU+WoMvSnxxjLDfpv/2L/Glvzfjp79Z/9BN70MFfs7De3dL3n73Lu/e2+d0XBGCY6NT83g0o7XeobPZw8UtYhEjkwQvABc4eOcuBTGnpaWyit3DgriVkMxLNvuCrNvHqRSxiFBCYr0g6TikFEjdoq4Ny0WJTgUhOOK0g6hrhHWk7YwQKVCaSDRlX29KVAAbmgxgcAEpFFVl0TrBGEscK6RqQeSYmcDCwcaoIIo+vl7c90td/2LG7gcDv9//cyEEWn//5ZFCNj1+wje9gEiiSNBuRfQThSsMw3YT1D37+CUuXttGJxlRZ0CSxLRbCYKA8Q4dBAiJwXOw+xX6cpdW5Kk7gm470B8obr2pGW5s8/inniDuClx1SlXnxN5SlMfM8gWeFnF7QPA5tXHooJFZitcFD/bH6FhQ2hI+SvZKCkKiETVYKYnXhjz9TMru7XfJj47pbMUkQlIdHaOCxF3bpvXCT8L+VwnllEDJxkaXw4MZyuVs9GLUokTbgBMzjJ/RHnWQBg7u7ZEkMUhBnMTUyyWxcyznFculpjsQhOI+4weHRHpAv51QncywxRG1Lbnc7iPr87hEoDopT33hEXaubfG3/l//iPtHS5AK50BJRUBgaouVgbgAIca88i3F5ugRokjgP6Tf9ftoYM0G4oNdkk7EwgpOy5pCS57Yiuh2YuQyR8sWy8MTbh0eQtTh+k6fzUsdivEppprxyj/5x5jcYMcT2l5xfbjO4PwVOucvErwgeImpSuTJAX7/BDHPCdbh60B+agiqwrUStPaN0t4oVKJIlEfKjGTUJR+PcQ6K6ZKqKlnOC8rTE5LhBq31bTprO4yuPMHaE08hohZZK2UQzTiXz1mUDutyivsL0pbAYzHFnGBrtroJn9zu8Hff+UhLueLDIBzBWqxQOMCYAMYhhQcHVQlJKkAFqqoGPKEsUPIUn88QcYtEVmgVUVYBH0l0S+F1hK1SlAgYLXHZCO9g+8YIVxf0zw+J20MW+YLhIylx2mZ6/w5Zt4OP2+hWxOTLX8Fax9G7N/GmxMsI4SsiYVjMpzhrcD5QGYfxEuMFxgfmpLQf/yyPfPHnGTzxOBsbaxyMZ4y//ArbpmDU77B8cIuWFMjFjKzTR+jVCJ4/brzz5LN5k+FzDmM9xgVqJzAyxtcFh0XOa3fuMFvOefpyymM/+jLXNy6ycW/MZKI5PD5grb3O1qWrfHEnQe48yuHJjPGRo9uxeG+59ug6C7Vk992c/UnN0fiUT1yOGFzdRsc1kS0pXaDbT7g7z9jsSPprgXYWs9ZfR8dreDvCTw2Dy9fpbypmpwX5rCaJSoqjMcYafFESihIRbKP8U5LqwZz6zozCtlhmm0RaEowF5xHhOq9Xf4b/Yv//C9Q/dI0e6qw8PF7yf/uv/jm9bsL6RsK5y2v00pLuco8L3T6LcsH6+RvIpI0xHpRCaUU9HxNCjg8FQirWWzU6SXF5zlZHsTkaodJRkxaV4BxIFYgziFNNlq2TFyVFdYAvFD43yKrEAspUSAlpEqGVQITmTWuNITiHDAGlBMEL5nlOq68ZDDoYk6NUjLUOHcdMakPhA+NlCyHLj+H0a/hgcPeDZV75L9wW4gNdfLIp2n7w/s47AoFYx1zY7vLkoy3s1KHnFe1WSiICexNPb7RGPNqmlaYksUQpgXUGHwJaalzUyFL337nNyb0Z7QsaFZXoEMhnMZPTNk+89DT9zTWcL1FWUtdzWjRf7XxKNS3wZkHVX0e1W0zGB4RKoOKS2aLEW0FhA0kv+wMmifxhFxBESxHqFOEDWIfuCM6f3+HBO2+jY0X73FWSOsYeHWHbGn1xCzO7jKrHeGNJteXcxTXGB2OEKNlcT5kbzd7pPt4KWt0dElFg/Q4hHpDoQLmY8mC6wNtAkqagBN2eYWcnY3Y64/jwBLsc0E40+XLCcnpC/53vMMpG6LURoh0IFobX2vz8X/os/8N/+1vcH5dNOdzVTRZaSAQRznh0tWA+ifjWt+7xpz53DSU/Wp+pDJ5iOmFsSzqtjBMfSKOIme9xMKtZOkl/fEw/c9ROsL97yFK12HvrLY66OcNhl0wp0jonQ9PaeoJ0/SK98xcRWUoxqcA7RFFgyxJjK/JpwTIv0d6CNahMELcjog7ks4Iki1DBkc+WxL0WMuQcvHoTW+S4IJhNltigsO11GF1j48o11q9dYWfnPDpJYTpBRjnViW16A21FK/K4eo4pTnDLJcQRzjrK6QNqa+mvDT7SOq74kASHyWcEHeN80/OnfQBpGvFGiEHEyFhirSShpgoLcj+CpAPpABGWRJFFjwa0NgeIVGCMwxw5bG5pDRR1UORFzeG77zK8dJ7DuwdIdQhoqmLB5P4uZl7gioLOzjYhFozv7OKLBaYqcd6jpANqikRhhWBhJZWHRenwIWCDZHTtGT71Z/4Cl154iW6nRba5xu7eKf0ixy1nRMpy8M23SKucspWSPnaVYv+ocZpY8ceK9567t0/oDjOEL5nPK0rrWUxnzIo5s+WEB4spb84dcdZmNBtw/sTQu7jG+hMXid68xf39jOP9nMXBN+mNMtYnktOJYTGZUceXcBZs5JBiys4jA+bfOWI6tVgraWeWWAecK6l392kNN3iyt8Wgl9BeO0fS6WIqS5AxemsNMz2lOjolOI+bFxQEonqGywuCd9iiyWyr4Ag6gkgRpaGJo4SmnC+pFGAsdWXRvQ5u82le/NyUX/kf/sEPXaOHHtl2+XKfee4oSk1RnbJ2rk/LzlmWns3LV6kJUM9ptfpU1lLOC+zpCXsPZuRVI5Gf2xatZclmP0MbQ79r0Z0UKQaIeJtKKLwwCCVQ0mH9fZQOSBtY7ipYJs2s3ViidUScpUitGlcAJZBOoNOIGotbVk2d3wUiHaEzSZQm9PrnWCyXaO347utvMy0kPrLUvjizAvjovJ/E+/2Zvd8fAH5Q3CEI3yvp/sD9Q2CQZdy40eeZZzUtveD214/p6hZkgclc8NhTj7D94gZJ1mmyKkmjsPUWtI7w3uDwHN19wOtff4V8PGFrWDGNDFEqkSGiLNe48cJL+LMehFQ5yuldVHAs51Oy7iWWC4eXCc6U6AoKafElGO9JXEK7l3B455D2aEB0ZrnzUdbQpwKRakSd4kWJVx7d7bPzyA2O7twEEZNtbKNribl5F9HO0OvXqDGo+D20WyKsYSuLyMdjTFVRLpcMYkWs+hzXjrpaIGNF0s1QlCymOUFYKp2xc/4aTkxY29oky1LK+SHDbmC5mDGvNbZcgCkxR/dxB7dRG11oaUg8TgfOP3+eXxRf5Jf/29/i/smiEc+gIKhGQCEEdR2h1ZLbd4+4dXGL69fSD71m0FSYHpQVx+0O11swcIaoFbM7c+xPF5ibt3nxWofd/RNe311yd1HT63e4upbx+MUtRuvrxDpFqBZxZ5No7TzloqJalITDMd56vLdoYbFFCcZgy5IssqxtSNa3W0znhijVjOc1eVlQq8Bk9x754REi0uSnRywWOUnWozVYo9MdsPb0y6RXbkDW4Tj3vOcEYpxzcTsl0gK3nODKJTZfQFkQfEmoxwQzxymNDz0wFcXhIYvTCVJ8fFN4Vvzh8UE14iYjEDIQUJTB0dIxUjoEGucEGE0IgipK6T+yhpMpUTtpepjqkiSqKYuC0+MHlCenhLpEpinBlByfFAgVE1xNsZyST8csj/YJtWmcCVQAPKXzSJ3S6/S5//arVEcPkN6R1w7nJUZKdCbAVuSlp/aK0grKGuq4zdOf/QIv/4W/THfnAjqNmN/bQ1Y1yd072Du3CSpgdu/TGa2hh33y/X3M/7RLsrbxB83PXPFHSFlX/Nbbb3JhIHHLMfdywXwypyscSavFaKPP09sX6UZXOe5fZO48y6VhvnuCiKYEEdAY8rmhUhJblmzqkiubgc4wJiqPyVTCPPecTEp2Lre4cFGTtD1S1xy+8S4+d6RxRO/cdbYffYlICKJ2SqSi5nPTzakmCxYPDhF1ialKQvBI74jbMeURuKL6nh9vMBXGm2azHQFRi9G6R2aB2uTMjyYMrt7g3NMv0N0+R7CWSy+/xP/h4wj8pIB+luBcwex4wcZWF1Xsc/lqgnNDhIrQSUbS6iEEuGJOcXKfW199lXxZoqVnmGjePS2oswjKkvMDxYUrW6Da4AV1bfHBn/W7CXQKOu6ynBmqYkE1D0gcQieo0ChRcYFgPFppvAURmoZgKSDWEYuiQARBojVJ3KM2AWMtcZZgbY0OAaFBK8XljYTfvZt/DKdfww8v877v69f8IXwgIyhlE/BJhSCgRGDQ6fDMU12efspglxNe/zbcO/DsdPs8+eI14l6bJOtjJKRZB+cMjoq8sJhyiSlmxErjBHz3994h3zumnXguXHBsbhniJDCbxZBd5cITNwhak0jQboGoc7RXhLrCzhZMJ29RiYzOxjbTZIyrSkJuaa0POLi7h8xSZrMSW9SUpSHEjh9S5f5D48saXTQlRR9qpK9BSnzSQg5jhtZxcvcdQizpdDYIy4rivV2ylx4jbT9HfW9Imd8hO9eimh2TtnaJT49YEwWT+ZRhy2JKT+k0djEmMpZ8mVPlNa1Wi976FURkGXRCM4vXGTpthbCeSEn27t3H5XNknHA6myLfeY1Rq4/21xEbARVB6AsuvLjFL/jP87f/m3/C8aQZEQjNiGHvHU4qqEoinfHaa7fZ2X7ywy8aYK3nMMCD8QG97jaXr5zjuOzQCWNmb7/GgH2ObsfMc4cIbX7uiQ2eOj9ktHmJbDAi6a0TdEZdC7yL8GVFmM5YzuZErsLMc7yCOBEkqae/rTAuRimBjgLz2jOeFpzuTZkenRCsYXp4xPLkhLrIcTpibW3I6MIj9LtdRo+9yPZzLxEPR2hABcdlZ3EqYjGeMds9oDuIkc4gqjnSLDDVHDPdQ7U6OJ1RlTX1ZJdiOmZ2eMTJdMHe+MP19/W67bC1tYlUEVLHmKqiKh2uzGn3WqhIn/XtNu9vqVTT6G0MtrLkFoR3tLtp4/tmXfM+b/5CSImzjiI3hOBJkgipJL42yEg34oYobtoDaoOtKryzJJFCZdn3vo911NYiaCyAZJJiqwJjHDqKSJIY5wwiNL6nCEngzAmAgNYKF2BeeryIUHFEnMSIs7GV4737LKfjh38HC4lstXFlIGiBCwoRHKXSiBDIkkClE6RQiKhECYkTjsAp0wcLEgx2NmX3/h6L+RRJhCcQXInWEdZ6rKmRgmYNEAS/Tzj7f/vgkRHgLYVxlEaw9+AOONf8PkJS1gETaCxggsdpSVkrTBUIWnP5yRf51F/4K3SvXiPb3CD3ApuXRCiWt+4QuVOyeo43NcmFdezsAPPOdxFRH7csOHrt6wT38ELBbrcbtjc3sdY29/cOKTXBO4IxhABREp+dewGpFND4FWJrAgKhIggemWSEukRp3UyFQhKcI3h35vUqmwt78E2p8CzJ0IgiHMHUCCkI4myClICAgNB8rgdrm+cOvilDninwG2tEASLgrWm8ZaUEGaPSVuMUEKnmuWTT7CmCR0YxXqomKA+C3be/89CK/HFe8M3pCcWiJs3aZJ0+AwUu7kPUon/1UVqtjMuloHIRwdfESYStCmIhiQY9sranmJxyMKl47OkdOu2MpB+Tdlt0+h1E3OL4vQdM6WD2StZbEfp0TLSwjJ58CdoD4jhDt9qUxxOsVlSTKXEk8XUJyxnmdIwxFjtvfGKF9KhI41yCSpIzEWCNLUokgboqcD6QZob2Rh8nU44XmrszRT58jK7axk48V9IFsbToqPsHrtFDBX5ZFlNWjunpklp4crtAhZwgWwgcSayJ0y5R3GY+O8F5TYQnaaXMTMlSSHwacX07Ynwyx/uEKigGFx9FqwSkR9AmySLKmcMajxSNMjjgsJVECYVQAuENKolRsUKoZlKWxzXmndYinCM4hwvNh6z3gcrWtFUgTjRKKWoREDohQmBciZYSa/VHMhz+Yfx+te77at7GkLk5pvH6k0gh8NITBcHmoMdzzw947PEpoj7h9uvwxuuBfq/NC48MufzoJTrrXTwpQkVkQmN9halz9m6+RnXygDiqSBNHLT3WaTa0Jly2tDqgpMU4yeHBiAvXn6czbOFDIPUVqpohQ46wjcquyhcYH5AyJU46nB4f46sSn0gWpzMqGxA1jA8mdIYdapUhQqBelGj94bMu1SKnOJiStAYIGRN0SvAOJRwES9ruMVjbYHzrDuqyINUKczLH3NpDP3kBuXMFWQyp1C7J2jqmu4ZKv8taZ4zcU1T5jJ1uizIOHAmB8MfsbO6AiFjMCjrtLjoLOO+ZTUvS2GFOa6TxRGrC5nrg4LBFFHeZ5ktayxnLe+/Q0S2EWSfqpHjhcBbOP7fFL4Yv8nd/6bfZHy9xwjfqMesxTREfWc44nUu+9d3dj3TO+RCIE0eqNFEn5t67NxlPHecf3WLrch91uiQJMZcv9+gliu7GDt2tK/SuPYnLq2YTYh0aSZ5PKccz8mWNKXJk7EjTvDG0FZogBUUdk/RbLCcLJnenhDhmcXzE/funnExnVFUFMkG1thDrXU6TLus76zz13KMkUUq6fpHWaKM5p8oKWxUIU+HrnFR47HxBWWiiRCDMEkJNoMTUS/LFHIOlmE4oy5rpeMJkMmdv6bhzWn2o9dvZOc//8z//f9MdrFHmJfdee4t3v7NLojOuPT2ivZGhnKU2DktEp9sm+JrF0TH7r97lzmHBznqLCy/fwFYV1e4BUZYiE0Enafodc6tZ7M8wwSHTlHrqWI4nbFweIvtDvCuRWYyvlnSyhHw2QcUpJljS7pDjt97AHh1ysHcEMqYzXKdzbgNfLDh57xbD0YjB1gUe+fSnWb92g87aFjJKycsKm48ZbV5AxW1Odt/j3b//S9STCf76pxi89GmWvRF7U89/8ld/5kOtX/AWZ6co3YyMlKKmXsyI0gwfJPmygrSLVIEozaiLMfp0ynj/lKylmNeGyaSgnC0I3uONo7HkEljvcTaADgQhm81/8EgZcIAwvulnrjw2QOWhto4gBMZLnBNYwHqBERKHxRSBWgicF5y//jzP/6k/zRMvv0T73Dlkr8/B3bssb72NOjrG3X6bKD9Fe0/a6SLShNZTj1POusznlpDPsMaSXH4GvvXmQ69dP1L87z73DIlU+HyOdg7VHxG11hFO4qUibmfopEWUdRDtAV5pKmMR1qKkIOq0oa7xRYkrLTI4vPMoGaCu8LUFHTUbkTjFWk+IYsJsD2WazztTWEJRgrQI7UFGCE6ok3XSaEmsPUe39hE2J4kj1NoF0lYLNRoxXOtgkxakbUSsaQ+2GVy42BhcD9YpFiXH94+bqUbA0Zu/x/LWq5jpEb1Hn2f46R9n1uvxhWuXHl6RrzTxcIf9EHj68iNsDweMx2N2hl10t01SLXn75hFJcFxMYkpb4Vp9inSEUy2mew/YPZmSKM+58x0SYZhNLdevbpJ1Ew5295gfTTDTBcOF5/zVK7Q76yRXn4S4gxMRwQbq8YzycILPayrnaceWOPLEkSRUOd7YxtlAWYKWBFsTRWCdxFV1E6RjCZFlOV9Q+0BlYGEFs70FnZ1rXP/C53iqnTE7nTPNS04P9ojmEzb6TSD9B/FQgd9iUVOWCz7//AWeeHydS1t9br77Okmxj2i36K1tUs1ylos5SmsSDXm5oK4rhK9xtWNaWlIc5/qw2bL0L26SdAcInWBtgdOSRQ6RFFgX8FaR7nwaN72Pmb2FFAK8RwmF1wp0DBqEcogQ4bzFGYeg2YGbYBFaNsHg2Y4FqXG2mdtbG8vbd/epRYIlcHj8EcUIP8AP9vf9gDpXhLMfh++Ve4UQSOEZdbu8/PyIp55YIO0hZhJzetBjPW3xZ/70Bv21c8StDISkdjWpisALXLAUi/uc3voqg2RJdO792b+CJPFI5Vm2K0wtiaSmdjHz6iI3nnmG7npKNT3G5TO89kg81lmkzKhsQHUHJFZROQFaoCpPrBWHkzmSLtbmtDb7RHXNclqRdD15viDtRej4w5fOvXPc+da3uPj0s8StDqrbR1QVrsqR0hKUIOv2GBaG8d1bDDY2SbsD6rtHiFihbmxhu33UrMJMbiEGbYS4QEg0Q6FZ7DlEmJINMra3+rx7K6Y3eoxJcUwiNVVVEkRMMuizmC+oxSnhtKbYmxH1C3ZubCNSyd69U5yxLBYzzJ03ECJmmHyWejxH9mNES4MMXP3EOf5C9af427/0TxiXDhsMCokIDu8M1kBkSt56+6MFftZb7t++zW6+5I03vsM1X3GjrVFqi2e3R7TXrxLrjLS3QTY4j+5t4o2nHDd9jQSH0h5XVfhFTnkyw1QLOLMaSDsWFQyHY0vtFYvphMkbM/LFghAE08WSxfiU4AP9/hrDy9e5/OzzrD3/CcZHpxyJlINFxbtFzcsXztFKYsLhPfxyiasrqAugySLYqkRgUArqRYmzFmMKymJJcXKMWcywSjCZlsyWNeNZzsG8ZFwGzm+1P9wCCkkaSRaHd6gqS72Ykh8foYcDnNqgKg3YiroyqCijzD2xgiQWDLdjWh1PZ6iw9RLpPKI6ZumGiBzSCyOKySnohN5OlxAlnM5qjt78LrO9B7T71+ltrlEeTVjcPyYQsP0ucavD8d27yKpi6m9SjmeI+ZiNdp/++ha9rR2yzXMsb7/OUz/3l1l74hPobIhyATMrOdi/j5me4NFURNwXJ6gih2pBGl0h7S0hn2F/4+/RunqDK488wYfdswVbUN97FZskOFMTakteVCgfqMsFaaeNSiIkDhkrXG05Xi6oixorIC8qnI9wLuDxKAECh5AS6z3WB7zXgEeJgJdNH7cQAlMZhBTY0IgyjBfUocl0mhAwZz6RNYIqOExQGCnobN/gmc//FOeffZ5Ll64yujjEzZYs945Ya8eM776OePWr9NfPQapJ3ZykvYHcucrstW/g223CfB8hI1z/AqGcEcLDZ/xiqRjtPIHQGXEaN2Mqkx5+XiBkEw740iA464efNp89wdLYmEmJnc+hNvhliauaKgk6IpYOGQy4EmRCbUHrElfX+DIHN2ucMKwGM0P7GUF4qEC0OkjpmQfNlRuX6HQ8/RtPMT+dYkoH/W2SXovhpRtsPP4kIs6Y338Pe3yECJr5/hznZ3h9zOzgkOlJweL4lJ4sCUpi3TlENKDaL5jf3OXm67/8oc69dtZh7eILuLrE24p+fkrmaszBHuoU6LZBBx7MLVfSiMcvdZtJYNMJPjhOH+yyV1vSpM/WRozuCvSs5OY3v049mSBKT1RUdFsDBtcep3vpSXSaEmqPKS3Kl/hiid8fI2qDqwzOQCkLRKYQmYJgmjYsL9GpIpGOrNuHVsrkzn2CECync8rlkuWixNcFa9cfp3/+PN31LdZuPEO6vgVCk4iIuJUwoGKrJ6mrOcW9GWnnDw7vHirwWxsm/If/y88SpwnSB4rJAjPdJ5IL2tE608kC7WriLCZJWtTVhGp+zORkTlEFnIvRwbDdU1zsBtpbLTYfOU9tLJlYEvkpXp6iJIigMHWg8jEubLMsl1QmQSCRkYagaKcJSuvGF8qDlx5QjYWArREhECGwnAVWEoRSxHEKQtFpx+RHJzx+7Qpf+e5d4jRmXjms+bh8/D5gviz4nlijcT88+7YQaKXwwdPJUj757DrPPmuJ9QNuvyoYDc7T6W9z5YkUawNR1CPp9nCiIjhFplPy0hJphSmOOX7762z1SrJBU8pRHox1WNeY+LYzRS07hOgC3eFlzvczhHeY6SmJsAi3ROSGg70lg36LoKeY3GJ1Fx/H5JMJqYW6LomylP5ok0XhSFSgyA3VoiKSgXkhaHVjnC7xH8HTwFrDve/8Hif793nuJ3+abDREigjhPb4qkGkL7x3tdU99UHK0d5/trIMmwr65izUVyROXCPEas6O7dKXHFQWq30PG0E0zzHSCsAWDzPLo00/g5Tpl6RneuM7RuCKvDUnLo8IpurODklAf32Q6Kzl57ZDl0jfnewlx4jFmQfrgNmn/Cqq3hvcBrRR0FC4yXH55mz998gn+4f/4dU6dAGEJTkAzdwVT5gjx0dSAri6YHu+j65LzvYwvPH6Oa6MOtWyT9bY5d/EqUXsIISF4sNM5tqwx3mEWBZgKpT3BO7wIQI6ILfVygYi6mFxxejDh5N4RVTGjNDU2WGSc0UkThr0eV7dGTTZv8wJCtkhERLqoOd/pcbXTotyJOTiYMj84Ie5JIhGIRI0QhqqYEkyNyhJ8XeGXR9RBUi4KTFUwn04p8znONeXVRVVzNDUcLSr2l4aZlbxwdcCa/JDN9cHjnSUvakxpSPoZO0PB+rU2aUtRG4P1gtJIQlWyqSNqF2h1e9SXU1JgY73LnftTBqOIjZ1Pc+fOEYv7dzk9tEBGPV9SL/bpjNYJ84qEgrXMUd+/QzGdkrXbjEaN5VCxO2bzxWfZSrZp9bqkgxEiazG7c4/+1SfQWYvgDLP9fdTcwe3bzI8fsDyaIIocWc+QIlA5h1MjKtVBiJik10F4g44TkBF+YdFra8ipZfwP/wHZh3zvWmM4fHCbZn8tcEpg6sa4Gx+YThfEkUSpgFCS2gQ04IXDeChd40DghUY0c9LQQmBdE/QBKN8I3oQCE2gMxoMjIDEu4BqvcDwO6wMWSRGaY11o9I41ktFojSc+8zNcfulFEAmd9U0OJmP2fuMfsFbNCNaRrJ1ne+sy+WOO8u1X2PjEjyLTBH98RNi/i9Mef3pI/MhFlrdneOfIi2bk48Mi0w6ivYGzijIv0dZiF1OE80TB4I1D4xDB0gg5m2DYn/nQhRBQQiKDRfiAVI25uTAlUOPsBG8cNj3XbCqcIOtm0E8YXrzM8ZuvoYMj5B4RrZG0E9LhiMJLcpPSzXo8mCxhqkmHXQaPP47u9VncegtlDEdf/jLzV79DkBGLe9/CPngPYUq8sahYo9sDwvAKC9vm5N3Xce6YuDfCZX1UawufZbhvfpNkefihzj0ZoFcsmVWGUWtJKiJmixnjsqTV1pSnc4TyJPGI48Li9gM9VXAuzgmHjvHegjunJbkRlN/a49L5Hv1Wi0wIsiqns3aV7LHHifvrpMM+deUo53OkqQhVjc2XeGOYnS6pnaUtDFkk0WkgG7YwtsYUhvaoy+LoBLv0yDTi6L1dZnsHRGlCkRvKyiGSHvXoMrS7qK1LbFzZYXt7E5VoGB9BCFTOgqCZAx9KhJlhi0Ns/ge3rD3U1SWJI1pRzP5bv8f0wXsk3W3Ucg6DiFglVKZGxyCFhVChCCidIXWHkOWYqWEQw8UNTTtRpCKhlSqYfYel2aWqjhE+ILzDmcaCGWuwxT3s/D2EqQgyIwSBUgLhmlKZDYEka+Glx7rmDeBd88EdHM2khLPeg7iVYp1DaE9R10RZhhOCoCTOO1qJ+kiGwz8U0UzieD/vJaVoZnhIQZAgteb5RwZ85mVNf3iA9BnHd6+wvT1iuDVCxBIXBFEW40ygdhYhm3nEQmqEsMwnBxy99zsMswUOT50Her0MKQNVaciXoEME+jzDy08TtYdU1RzlgSqnmN2nyAtaUYp3gu4wo6pLXNWUzKvFlMo5goG6yJkVOd2NGJ1pNAEbC0xtGO4MObhzQNrNyKuK9iBp5tp+2KUTksWi4GjvdYLM+NSf+2lCK2pGy9XNJAxsiW1lDHYuUNy6xeTBXdYuX0FqTXj7Hk5FqMtbZJsvoGJPObuH6MygnSH72yRW4ccTjGjT6z3GcmLIThdYLGnbYRgzmx2zvb7NcPMJWtkAqbrMb36D5WTBdG6xU0Mnizme9WiJmvzkAVrdYf3xFJW2sA8OEaMOaquDT+GlH3+S5azi137jNcogkKJRozlrEUGSpR9N1atE4Kme4vFzV7g8iulv7bArL/DbfoNHsx5bgxGRq3DTGXVeYOclzhnqvKDOSybGoWPBqKcYT+bMlzP6nZSqWDBbTPCm4mD/gJNlzdrF67TOXeTmvfvYSHO9JVnb7rDR65D0dogH2+j2AKKEclFSL3NS94BsoLnsLVIrpFF4X4G3UM6JxRIralQNPkiWVUW+u4B6Qmkts3mBqw2LZUHhAqeF4cHUcFA4TirP5z95mQ2VUy0+3Dp67/CmRvsl5XKBV302nr5Ke9AClxNEgtQRaebIp6cU48aChHmOkBn5dM7x5Ai1tJhlQhlJNmVg58ajlNM5i90HtNe3GFx4BGcduleQXr3GfFExuvYEOmuTDjZwznHwytcRMkMrDXRQZcBPDdXNdwiTA6b3dwlK4U5vszidQbxFkk4oTxQyKHzIcCJCtwXBCoJVRCFgqwlm9h4ohSln+HgD390hmk6IkxIdp1SnH27yhHGeSdGor12QcDY6UwhwMmAsaOfAgSMgpSLVgtK4JkvnGrEY2iGQCHu2fZSSKggCoQlOAtQ+YAUoJO7Mo7dCnZVzmwBQ0GT6ctGUeD2anY0LfOLJJ7lw5VGe/NmfQ3jP8d23mH/j1zHTgrQ6QuZj0rQDJw5zf4kYv03aWWd59x2E0I3h9HKf9sYWlYmIF/dZzHNcVDJ99/ew9cM7RAhvYXKX8vABXvWIZAVJFyF7SCWQBIyU+LrGGI9xgnBm9O8FEGq8TpG+yZSiGpsvFWkqJERbIGqk1uQ+ASLSXoxxNYs60H36ZUw+wxtJ3O5RnBwhNs/TW9tk1F+jPDnGnOwjnEWtXWF5PGHx279FOLmPHuzgXUr+zju4ySFxu4XSG3hpECrg4xZBC8J0H7Wc0DIOsXaDwuXUu6+jeJNkdBFTbCPqkw917nnv2HALNnzFVi8mdGKOKsNNC+044fGuYCOLOVpGjGcF9969w046o7UW8+7tE14fG6pOylPrPT598TJbWyOy3hCpugSVkQzP472kWJYUDw4JeYHzHrAo4bB5iRKOmIrhGmysS6J2zLIIBGlZHpdM53Pmdc74nXcoFktUBIvJmLoOdAYbdNc22Ll6gfUXP0u8dQ6hYw7nNXtOoE9yzm1oYiVwxQJXLDDzOaLKcWYO5rSZGBX/wdWOh08rSNi+8SRp1uebv/4r9DqBNBlR54HO4EJTs/cVQXkiFXM4rbk7L5ktPP0sYa1VkChFJwU1iImje7jJ28wPFlgXSNIYa22T+5CeYuloLb5FOc2pa4H0jVWFPEvjaxURatOIOSKFUh4RSSojKWsPsnkziABBCJJWB+8FPngSlaLbKcenE0IQ6KCpPkb5fQicuWufNfPJM9VuEAghiRFcWO/zwtPbbG8eIAqB6z2Grdt0+xLZ6mOAUFuStIX1Hq1iYh1jvMe6AltCMd1jvPsN1tIZUerPMqCBRVEileB0EuPsRc5dfpK0twU4TL0gmAprcqinLPYWBGdRHY21FVpLiGN0u0t1eIAtCtBtuls9DncPyZIUhGd2coQXCiclkY6x3tM5N0TriDrPGV1sHPk/LJFUrA2H7C6X7L/3Fu99a53Hv/g5/EYXJwPicN70MgiJiFusb21ysHuXe2+9zrkLF1FRgn33HiprE3U7GKlI15/Hz74G5IRRjO9eRGsBpwWuWBCJBZnOmUxPcDjs8hAznWH6XQ7vvUORl0gq4iQhjXPSy+c5PprSaidkrRuUJ/vUJ7eYvDdGqwf0znVRfoEKO4RWhsg0PhP8yE89y8nJjK98+xaeZsMTnCfIjHKx+EjnXhZF/NnLQ/rDjNZgDXHpE2ykG0Q3Z3zjwYRH7IJrnYAKNa6yVHlJVRkmZUkvhTQLHE/n3H/7iIPdQ851gc118vmUfDbH+IBQKY9cGHLpucfoP/tJLt/Z4/VTw66FI2/4sa1tbuyMcLMC7Src/JR4ucBPl4hYEFSCiAQqbiOFJYpqQr3Es6QIrmm/WC6oy5zi8DbGLIHAsvYsSkvt4XRpOJ6VHFee3YXhsPIMByktXTAva3w7+lDr553ndFYSakE/83gRszQJkVOEsmBxdIiYj4lCoDfcwowPqQ+OqPtDskGb1Ae0sgx6I5TXuCoQm5w46qJba/SfuUzUHcD0gKiTUOYWGxSJzpm9eQutFfPoPUScor3EHO5R4RG1RYSAXSwxJ3to6XFJC6Fjgh6hRms4DEGvYWcPIGiCKaGeUucelXUQKHyIEElMYSvqk1PSdhcpcpKNHhuPXae92UNmmvhXf+XDrR+CPDQBh/HgRDMKUH4v8AJ9lpmyLqBCoHAW+/7xoREUeEsz3QBJfSbssu5sEoMUBB+og8AAIjSTNyofsFrgpMB7Q8Dhgqb2HisTdtY3eOryVb74Z/88dNpkSczhP/27RCc3EYt9Wosc2iNE2iPq95G+xIUFQRiS0QCdZZSTU6rxKfHoInXnMosTh9rYZO8bX6euayxHuGwDz82HXrvgaurpId4XOGsw3qCrAh0VmLiLx6KEpHYe2e1iC4czNRaFEhqFAx0RdETr/CbVfIo9OMBFMVGWkPQ7lCf7qKxFV2o6Vx/BB8lwa5sgAr6qCGXOYHuD+evfYf36s+juEDc9prj121T3bmKchtYm9Xdep9x/j1DWxHEgVAvq2QxRT/BFQT5tEw37BBkhg0QicKHAV3MwNXEUMNN3qIsc4S0Bj/GaWnYQo86HOvcK61lKeKwLYjljMltw6GDhS2ZTx9OPn6OyGa2tLuXk2/jFW4jgeXBP4Z3mc9fXefzikM31bdrbl4nSDrrdwzlJVQpC5QnFkupwgqsLZF1iy4oQK9qtQJZ5BjtdyqUHHEs0fmo4PJpz794RUljyyZzpwQFVvgTvcK0u1y9fo9Mb0BuM2Hjh84xuPI5QClmWSBlYixReCJYnM5Z7+4SORroayjnSLTE2x4zvotcuYuoF1XT+B67RQwd+jfdvi7ffuk9kCyJhEYUi21onSTUibqNpY4PD21OUkOA9nZbG2cCxVVR7JU9diuhvb5K2W1jzRjNAuxaUtgIZoXUjdHj3puf2wZh6PyVpg0LgihRsBGWF7LRJWslZalt8f1fp7JmSXuErAx58BMs8J3iP1pLalAghuHFhSCoa37/DvPlg+rhwzmGtQymJFIE40ug4ECvYigOfuJjSNmPScAnfXiPYjHavzVIv6LS6zf8Dh9ARwtZIKaiqAnC889pNpvtv8+jVKeeHBhEEQUiUFDivODlWRPEFRheeYbB9DqTF1DW2LIikbexw0i52UbKx0WM2zalrh9YxpTF45/D5KTpOkWnABovXAd2OyTpdnBaUhzOEjHGRpNVpsRjPcRrSyDActSlN3ii+PiRSCqQpWF/v02rF3PrGl0m7Kec/8wLRpT4mzyG3aCXxShH11zgnY9576w2+/bUvM1hb49zGNtZYWk89ilgbYtM1pPwE/vA7KGkJlAitcaf3sacFs6IgiRzD6ID9wwXaeS5un2NRdYl6V8la+2RpTbzd4s4bNYtiTK8rOTo5Iatu0QsWUy052HsXJyzCrKP7CWnHkR4Z6Bh0P8V1BT/15z7N8cmMd++cYtHNeWot4SOaiCdxytqjn6I12kSnHcpFTXx0l6ed4ncWJacJFCqBcoG0BrwljmtMWeKThOV0yemDe8wXc2SoiNIurizpZRkbWYRUCmkaRer0zh3GbsBoY5tPbkX4JOZ4XmNnNTY+II2ajZqUBUIW6AEgPD4fo3QgHUUIUVBZh61LFiZhdvCA4Az1eI/ieA+PxyKoPeQmcLKomFg4WdSMC8teYTmoak7qmnZfc3g8IyLgow9XMvchcDKeIKxhWix48Nq3EeMjBkmgl2nS2kK5REUJenZKojNC0kIlKcpA1B6i45QQtbAebG1wVuCmDp0qzMkJ5d0HuMqiaMY5KeGbqobxGK2bKS9xgrUerzVhto9yAuc9tnQE2yhh8XOQGiXGeN1DJjDsCO4VY6S1aFGRbF5Fpxmq16e7OSLptfBZH91qEWc9ejsXSLp9VNrCiZiTewfMjqdInX3I9YNZ3WTyPAETZJPVtgZ3tg+MAKkhiIAIHiclBocXEhuakVlBKEJoerO9aNS8joAHjIWAwomACR4TAnUAKwTON+O2fPAEFJtpwo31HX7kZ/4SL37hT3Hx8Ut887e/RvnNL+FPjxCLe0QY4tE5ko0UIUtM8NSyxSIH5xUhinFOU03vIp1AbjwCgwuoLKGqdxEnd5hZS3z+aS68+CkufuoL/Fd/7Rcfeu2Mc8xrS9AZ3jrmy0AsC/rtZvxjXhraa9t0r1ymd/kCdV4i4hZZr4fP57h8SWc0wgdL3OqwODzA3biBcwIVR2hXErbPIeNeY40jBTJ4wqKgzmvmh2Pqk2Oq9i7JoMPyeIq7vY/Z30Ut3iJYg6sstnoPbw1pbwf6CoLDy4goGRFMjaRR8VpbE7USsJbOqIVQlvrUIk2MjjKKksbHtPaYouLcJ16i/cjjXHv5E/D/+E8fev2cq7l/cpedzYxeoljOLToOCFvRHnQ5mM4o3ngFub3BsA/Z+XXadc0o6zDqJLRbXToXH6W3cw2RdhsDZSsQ3iIJLMczitMFVVUjg0WpmvXRkkUJLmicUCwWJUEJJg9OWC4MxtfMjk/ZO5gynS0gish628iNHmW3x5GPePLaBtdvXKHTG6FbI5S1hCLHLnOoK7AlrsqJFbj5HFNmSFkhfUXwNc7mlOUSe+d1al9Tzj7OwM8F6rrk9M53GGiLcg4VRSiZUpdTRL1EpH3irMd0vM/seEEUEkKoWVY12mjWO7AksNXdxtNFxR2Uqim9QYkICNTB4qvGvy3JJMu6ZvuiYr7nQHjiRBKMxweHiiMQkqo0KK0JzhEnEXVVkpc1PghcgKydsX1hiziLSLMU6wymKilN3eweAR/8x6bq/b5fXzOA3iFJMs1arHjuwgaPPrZN7+IGOkmJtcadyfSt8Sip8P7MgiGopsE2WGpnKec5b33tVU7u7/GJlxz9liGOxZleRGKEYjHu0Rs+xvq1K8RplypYyvGYbquDlIFgc2w5p9W+SDQ6h3cBn7WYHZxQG0hExJKALWt00qd1LkXN5kweHJB1+lgZEbdiOmsJxTLgipLjSYmQgihVzEJJYmr6VYt68eEzfrU1HJ/Oaff7dHot0lGf5cEJD37325z/0WdROx3c0QEB1exykei24uKVaxSvz9l9sMf6+iYDIanfeg+219CPXgd1FS9TQhxQnYRgp0QXUqwEDjs4c0Ica86du8qyzkiyDqmOCTrj+M5tpBNcfvQxXAi88qWvEEWKSGfsPrjFrHL0gyDKmrF6i6MjkniD6ckxF6IMCkMkhtBN6e5o/sK/9Tn+xn/9m9yf1I0zQnC4jzi5I6gE273CYlGjl0t83TTWX7SBR5VnOSlZyiW3F/8/9v4j1tI0ze/Efq/7zPHnurjhI31mmSzXXVVNNk2T3TSYIYdDCRhoZiiNzELQQoBG2kiAFtpoIWinhRaSAC1mMRoJAiQRdCORPexmO3Z1+cysdBGR4a6/x3z2tVp8N4vNYTXVGVnTTUnxBy4Qcc25537nPe/3vM/zN1smE8Or88Tq+JwyK1ltKvqmZ1RmPAsL3ETTz0pu75XMl1N09IhiQat2uGTKexctH6zg67HmF3JPIXsOMz9kp8YJKh8RbECaIaMyKzJIoKUiCcfq9And5gIvoD27oG9auotzkOB7S91ZGuvxgEVReTitHaed57RxnHSeU2tZ9xYbBReN5aIPTCaDiu+51l3f8ujd79A8PkOdbtjVkd2dCbNb95DFLsYodFmgyhLXC0TfI4wchEfWgwfna0RTEW0kheF1dSbDSUHqLLHtr5R9EmkyNA7JkLmZbI7zAm0+Jd63iLghJkdwZiDepy2IONBGRtNhXCRH3Li7w+G+p7z3FtXlhnpVIWbXKK5doyxHXP/atyiXO6yPn9Hefx9/seWyfkwSz4hKY32iOrng/LSmu3i+UW8k0SgxUBeEIklJ8A4ZEwmBFIIuCpILV9YGEttHXEwoOah1fRoex0ZPjGLw+0uJLgyPEUnDYTamqz1+KB6DSPTeo2TGjduv8otvvs2rBztMVMlbX3sbv77g2bM56+/8E+Q7/xgz2mF8eAvd14z2XkZMJ4i+Imye4DaniG6DG92maocBsskOERKy6S5Mx2ybLQ8fP+alt7/N63/2r/PSt36R67ev0QfQ+eizX7sk2KSMJAShq6haTxEtMinmB4ZR36JXnxCKkta1qMmUdUw8fVJRtFtuvnYPrgSOzdNzwtbiux5ZFviwIZkMkRKuewbBIW1PFIloHXbbYuuGGBKVkPQ5FKOC2Pe0x0dkakQQgtifIEUO8+t4ZUjRoWSOoh/cIqJHZhJSx2Rvh9mtOcV0jAor3v/+BWU+JmrQI8N2c8Lx4yPM/l1e/3f+fb7+H/43yPIcuj+6cPnXQUXH6r3v8uMnY0Zvf4HTuuOT0wuON5c8+eEP+Y4M/NmJ4VV5wWuvXGd+94Aim6DMhNHBXfLJAT4obGWhq4idH7wnhcO3PXFT0623pNQixiN04aAIaKU5P2/IeqgefMJmtcHHoYt9ebEiuZ4sK3n5pZfZ2d3nzrd/meLGDTbrijM94+n5msc28JXJEoMjnT3G1Q3RfSp2S5AifVtjdED2F3R1Q4gR21V0TUV3eYFva6yQrNZ/NL/0MxV+Xe95fOTp1o/Y35kTqyOKzFCWhtBdImNGXo6QySCSpq02uL6j9pH11pGFyKsHkXs3DOZgl3I0pe4bZtkBo9ka5zzOR4iB5W5JbQWdcIwmgq6UqDIjuYxcF0ThybMCYQZ7F5Ig/dQwLhKiQ2UCJRLlzoLLoxWjkaGYLsiKCSCHDUnIgQScNEoGbk0jDzY/n9Lvp2pemShUzms3xnzztX3mhzcY7cwZTSZDPFd0CK3RgBCRmIZsS+8sMuX4UHH88JSz0zXd5Yb66RFTDa9cj4zzfrA4SAmt5pyd7JPv7XP4xZfRoxlSBVxdE0NLLlt8VaFEIvieMQYXLkhnPWKyR/f4IS4EklT42BEqT/CJWm+YTnaHMYuPbDc1yXp6q4heI5JAGgfbBkyBl4LxLcFkktN/1KE/j5GfkIjRlNolPnp4zHw6YVdPOGnOSUXipT//i7j9JfKsGZTcIpJyTTZf8NIbX4SPP+D07IjZzeuY3UNi40jHa+TLO+hXDkFGou1wZ0/RpmK8m2NERn9s6O2YEA0Cy3Tk6ELg8uL7bI4/ZHumWRyMUUTe/oUvcH56zOmP1gPZNnpqkQjVKfmZQ4QxbayJreVc5cyXBfFpRN/YxZcZe/eW/I2/+Yv8p/+nf8LGDh5Z8jlYGH8Y1vasP/mQ3msyBSMVCRikEHzBhKGoijUfXq6hMqhNxPSO4Fvy0Zjdm3vYU0e2GPFocsj7dc/ZRcOfefVtbu1NKMoSHwWzbcv2smURK76ctoz7jjJfMzISn09Q0cH2BFXuIvMSoSIqNYSYaOqK1aNHNNst0Tf0gAuJblNhu56QwIfIpu6pXaQVii4mTrvAUeM4ax1r62mDp48BhUATudh2bLvAja98lZ3r+/Cf/tPPfP363vHkw0fsrFrmAqYHLzN946tIOvCJtgtkfU1sLDFITAqojQUfyEzEV4MPmfdDgeJTYti4O7QYxvoCUGp4LxIrkAnfHRNiRiyvkVLEe8FoPiEtSnbvvczpuz/CpEhqFahD8pEhX8zpkqb3BSMzYuMjFx8nxocjpnfvsfdnbnPx4Y/R9Zbthz+hee8+KjfYzTHtwx8i2g3Rdkip0HkG0wPc9DbVRY/fXjzX+vNJcBkEAj1wbVIYpjEMVioCrkQakii4sjMSxKSQVzYrHoX1fjCxZRjnOgKWSEyRAOAFgYHHl0jIIDgYT/jmrbu88eZXufcrv4YZL6kfvk/7j/4zPvr+32Oye4/X/vv/E67/mT/P6fu/RVYuyPbeQveniNSirMO7DaE5w6zOcEnixZI8N/RR0vcg8hJ3fslo7wbmjV/gr/z7/12Wu7uE6Gk3LR+895D20Q/xz1G8eOe4PD3BK02sG+Llmt2dnLKLcOwYvf4l8skeSZRk+/uD7ce6o920tE4wfXLKuJBEWxE2LZ9aCdpTh/QeMTLI6Ei9w3UerYbfmfp+GOlZT4phsCCLBT55RLJkRRzcR+hImaJnPBxKZE/ZPUDJRBQamSn23/oiEYcQkvG911Fa0fY1x+885vjBxyAlVVdTdQ6nxtz5pX+Lb/6d/w7XXrqNF3Dx+IST3//t51p7hdZ89XBOVVU8e/wR7eqYpsqhrrldGL5174Bv39qhTRmV3Of2nZdYHN4iWQFC4zcVse1xztL3jlg1COFRGlJMuNQjdY/3Q1fNs2DTBJ5+eJ/q/ALrWvoQwUiKfMTucs6br9xhMimQFJSHdyFpdNVTNIEiK7k50rw+3uXy+JL6+IjpSKBFRCtP6ltcsyZFjy4H83JbnZO8pt9u6PuO7WpF3zbEFPHWse4dp5c/O64NPnPHL/Do4oxnv/nrHKonLMYRVWZD0aIVeTkiCIkRAtdbpMwoxoYUVoxKQd8Jehcp5mOmN+4gTcZyvE97cYIuHjCaWPpOQpLYpkcgmWWG2ErGM0vsAqEHHzwqg6QC0Xu00kgkRhsSiRh6tFIEaSiVYnVZIaImyzKEHEbCAiBKtMy5e2fObhG4CAYhw+dWVH6KlBJZphmVhq/e3uOv/8pb9EaizYxMaZSMxJBQUhMCaD2YwhpdEKMkRs/q7Jjf/AffobtYMykF81ni5k3BaBYoJxFdJIyR+FDSuDe4/fYXYTQly6FdnxNjQmrNxCjatiU2Ncl7UlTYco5wDtdVaCdRAnzvKCYZOIGXklRIJvsHrDeXmBSRKuGjG1Sznefysma23IFeM16W1G3P5I6mLAXqJ45pTKjP4eMXQqRth1ONcx1ESes/QKAINnL7m18nu3EdXx1jvMcrSBqU0kyzjNe15JOH73F5dsHhq1+GSU4i0T+qyRcZcqEIpkTnB4TLh8R6y2ZVsThQFH5M4IscPTmi71Y061NWTx8jYsd05w3e+YMnnD57yricgkhMigXFjSXt6py0OSW6yHa1QbUd43XPKJW4+QGXtmJnuSSdCsT+kpCVvPaVG/zK0y/y9//xD7EhIMTnTO6QghPfcrQRdBFenSvmxoJ0CBybi4ogPF/Lc2Y7c0y1xuSG/MZNogvE1jG/+0X2p7u8pQvWfeRZbdn2GdEBskJ7y6hd8dZki1gkxnGFkAI53kH4U9R0zOjggK69pLs8wx81+M2K6Adj8a6p6Ot2UCRqQdV6bIg4n6hsxPY91kUaF6mj4LTrubCRp63npHP4ECmvOLxSCDINOklscFQu4tNAhH+u6xci/skF8/0dJiMDboVYPyHLBZunDwjOY2WOyhMi2yMGTZlJUop0VhCsx1pPEHowaxZAiggRiKpAhoH+4RGQJMkYugSiuI0IHpShjRkITTHJsL5nXVnGb31tuAl4RT5d0pwew951pgfX2N85oDk7w58fEdoGNb6GbeDh//3vYj/5IfneXVIwhPYT/MlHaKVR4xmpGAM9FDOCGKYu4uxDsvMLpM6f6/oF4MJFkBobEyINJtOfmkdHkYaDd0p4EpaIQBJI+BSJIhJxVybZQ3EYSIQosMPcF4nAk9BCs8xHvLJc8JVbN/nSl77OwVe+wt7hIcVyn15EGnWdk+sLVH7I5I1v8eR77+DrNaO7v0hcfUhYf0SSY8L2iBQtqhgPmdOTKdiEyCdsqjWrtmE2XnD77W9w+PbbiMUe88WE8XyGSIl61XH2wT/Hf/IRvv+Q2H92rq51jgefPCUrcmJt0UqxK2bM7r5NUiUUu3g/mIn76hOkDMQ+EFfDeLXzJeXI0a+7IS5PDG4B3rrBJ9Z2aA0yWmKIOBdQoSeGlmB7lO3RBJLKcGmC7IbCT2aQmQ7pa8yta2SMCH2Pykuy6S+SUsCKCVkB267Dd7C9OKb7zg8GI+2upqk7tm1PbxOVyJl84y9z8y/+Nb70536ZEBzHD0/YPn6IPTvHHx0919pL3pGaC/R2xXp9TLGT83KhuH3jkFf3Jyx2ZrD/Bt9t9jhWU2b5jAkaaStcs8ZtW3zncK7D1Q1N51nFwN5cI3E8OV1RKkuWZbRVRX92RNfUPD05R4wWXP/GX6RXmncePqYwhukUxrsj9q8dYsaHZLuHqGyEs57mbIXfVEyKSDFR3CSiVY5wiZh6RHDIfkVmLN5a6DcQYVt1cLGCUFG7QFP3WGup6o7aRS46z9P1H61X+EwVjjEZ3/9nP+Lu+hPyRU1hiiFdwyhkpkBrcl2gpMY5T6Eky8UIKTf0tSckTRMcwWiK5R4mG9E5Tzl5icuL30OX1WCHYRN5qRFCEBFsqhYmiv6ZRiiNyTKInijBpMHAM8k0CEukQRCxbUsKEW8jdd1iRmOSiqir7qDtLSiwtuHibI2NBus8mybAz2nYK6VkXJa8vl/wa3/+TaaH15FNgwgRlcmB++I6TFIgDM57jNa4AEoJunbLb/2D38dUFYsdw+Ftx3Tu0Tqy2IWukZyeGpYHN9i5/U3UaDyMUahxVcBohRAR+i1dfYbsLTEkCBrb1eAFUSq6ak1yF1hrB6d4H0nW41REy8jpw0eUixExRkZlRjI5rU34PrB3uCAlT2oSbfLMX85Z7gmaJ4HZRNOvh0za50WIEZ2PiLalrQUJS6j74Q21B7FLyJlBLkf4J2fIzJCUJImILkYUKuNOpnn69JjZ6Tnl/Dbkinwbsd9/D324RN+6hhgdYIvXkeXHZLri7NGHlEVBa3uEz+l9T1Wdkyk/WMh4Q0pTpBqzOtti8gnKCGxyzHYWVN0K33dsu0R0Dtv2RCJlaShvXkdkBVIWUA18v1hqvv2XvsonH5/x/Q9OieKPPq39ceCC553zipMetkCrFEXsub9aE4DrWca0LHll5zYH8z12X/ky2c4eusgJ1YZkAyEkQufRtuda7LmxBMGK3IEKlqw0hHFEK4MyEckB2XyG8wFXKZwqWG1r2qMjqAclcL/Z0LUNXWfptzUpJZzStCHSWY/zicZG1n2g6iw2QI3guA88ah2nnaP2ER8HoVSjFEYOvoNDiSeICVovOX3v+6jV7vOtO++JnUWOCrKxQaeEW91H5WNCvyECvq8RPZjcYdSULuakaEEYvAQ5nwzjomBxSZNniuQ79HiJ1Jry5i71ySnu/AJpxmSjgnxa0p4+Q03GzJRheu8VfBTsHF4HMVAq6BsWhwdsfvwD9u59ET3bwW9XNN//bbr77+CcII0P8B8+oDv6EL+pMDoS7bsEH4ibE5Kr6UKBasaoyfTKuzEiiwy/fkYMHiV6SM+3Dl1KHKdE9JaYIiQIDJJbkYb9IIlhzGtjuvqaGEzNk0AJhSIOnGUx8B+1yInK450CIgejMV//0td59e49rs+n3LpxnfntlzCjEl0Y8nHBycfvMdIZWX3O9TffRk726VYraC8QfYtIchCUbI9A5MhgUZnGdzXOOTqnYTRH7x0yvXGXN157iflL95hdu4m1jjw3VN2Wp4/eZ1YUOLfCnvwuRdmRzRbPtfe5EHlcddzLDC4ERJGj73yVqDTC97RHzxBy8KZVCEyuUDFQuMBYSRaqZVrU9BcJ3xd4oUg+oGIgiIRHMZpIuq5H+BrRbwezcG1QwhNNgZmPSTqn2+boPOFUhs4E44OM0e4UkZXo4OnWW7rLcyjGeN9x8cFPWD15QgiOlDxS9LRNS+8iTe/wKHoyite/xhf+yn+dg699DS8077z7MTdDi95copsNIiWyyfPRNHrneHS2Yayh1DnXZwfcmI5Y5obZ/g5y/xX89be4cRZ55/Elv/P+loNDzUi4IWO67bCNY9X0ROGZTBN59Hz87ITjx0/IfcPB/gyRG+r1Ctv2JKm5trNkb2fBjdduI3ducP36Hd7vNB/7wKVW/NrNGywygdvWSNehtxVl19JvKpQsBqpumSMSGJNQqSOkFictrY3YWBDaNe3qDLs+wtuWiKDqAnXv6ZPkfGs52/acdpGnzc+p8Ftte6Q7ZT7p0GYwJC6ybCAli4RUAiJUF+e07QZfrVifnlHVCakNvQ00faJ3CVVO6LseM9LYLhFIjCYZyQWiTEwWkiLLcClQZIqjM0tblWRGEwMYkw+BpAiE1gOHxHmiG8QLWoAXg4dTWZakIifEhMlyQBDtVWcrM4ggiC5QCMvNueZ7p5/de+lnQQr44mHOr377VYxUVJcVMldIbdBSYkNPWZZE5wk+IpUhDlM4Hv3kA37y++8x7XvKwjC/5bj3EkwngvU28ej+lPHsNoev3WK8e5fRWKF1IlpPit3A/fEd0VlEdQJe46LA1xuKnRv4ZoPYrmFc4Joak48w4xwXLH3fEHpoqo7cZEghGM1y2lXES42ve0SmkWkgazsXcKmlOJBMrw2KM3cZWUUJ1hPs8xcxCWj6fiAUB0/tFHkxw/qWZ4+fsXl0zuzNQ+RiBCdm6BuoHFTCK0WaztG2Y7loefy93+d2psnu3CD1Adl0tL/zHbI7d9BffJNi5w3SeIpIf0ChWmzdUYgVPjZUdYemR+aGfHaH3Zd+mSgzTp/d55P336VZrejrDfnY0G57rOvRItKHwWcrxgDnZ6iPBpJxudglFRO0kITtFkYleqb4tb/5Szz73/0Dnm6fL3HiU6joWa3OaIXhlYlhngxrOeI8z2G2x3h3bzh5F3toMsZOoTdbUlsjbUusaxQW1Xck16BEhylmiGLGaJKTkh7Gl0oORrxGILTCpcjJD35EWD8jzXZwSmDXF0hviSnQbCps7+kjtI0lpEiXLI1LWJvoU6IKiW0fuegDJ1cfFy7Q+CuuF1xxIRM+BBB6sK1IkRQHw9/HZyv26i3iYv1c1y/GiM4igUjbOaSSdEmQ2TWoHESi6XpsE5jHClkmWieJSTO7uc/s5i3m1w/Ynm2YHl4flHvVhmR7du7eoT47pdzdpVrOcPF1RD5CklChZX54A5nPkXkGQiBSJG1qbNOzPVlhz0/pyk/Id2bUF1vS43P6o6eo6kNSvyE4j3v2mNhXZOMD8sM7gCcmiVQgR7vD36dyQhQIFZBSUIxyspEm9pJ+c4malEiTPdf1C8AmeFIchBkiBXRKSJFoUhyeS4IgFSEl0lUxONA1BJrEWCnGcij8hMwYFwXLfMHXvvoL/MIvf5t79+6RLZbkoyVBDKZ9wfqB47S+oDl6BNsKMyoH8cJ8l9i1THRLL2qwa5zv6PMZaEERA15EujYQ8xyx/2WmX/xFdu/e4dpbb9B3HiEj2+qS44++h0iJrvdsNh1RFVymluAbynxMkU9ISQ/xZZ8RnsRl8txNHqkiejxFrD8BmxGdJZgxUWR0NpHpjNxLdKYYY5nIRF8rLruevpX0rh9G6D4h1GDijywGEYLMycYRubM/JMREKEceHSK2bXBVhcwDtq/BGJrLC5ozja23ECBGj+sarE8sX3qF0ycfYy9O6Xs7WOiYRMoEVROpg2TbQbbY51v/tb/DK3/+L6NTYpIpzu/fRz15gjt9hsgyYvRkuzMSz9cw8ClRCcGd2YLXFiNu3LjF9PabGF0y2j/E9YHu/iPuqIKX+pZtF2nnZjDbtg0yeEwWUN0GOZ5Q9y2ro1M255c42zAtNVIrcgSLvSWGIVIvtB0hwcfvfERxt2R3suSXZwqnNKtVQ//0FPYFo0yQUkDoFlSP2VWk2EPboLOSolAEX9H2Fu8Sm1pQP3uKkILu2X3a9SVJS9yVor3qI+e15dIlLirLeRd52lpO7c+p8LOd56WZQNcKncFiZ498PicvF0gyNk9PCH07nFyVRGea/WtzRtkRl5WgKApGqme8PCAv56S+Rescx3b4g1VP5ZohdjA3mIlGY2ibiAxxYIF4gUg9zmZkWqKkRCSJ4Cr7zwcCDdJIRJBopVAhUVc1ZhLwvqWq1xRGIUQiEzmZkdwbWdZec2+pf27BHaWRHJzXfPyPfsjutV2mr91m8cp1kBFSRA/mbYAajDaFp9psee9336F5eslSwqTMiNOGnWuWdQXrlSSf3ebe177AZHaNvFAE3xMs4AfVkbcNmoCMBu86fNMMTuxK4oPi6OFD9vZuc9ndx510mLzENQ3Z7pJAj0wShGU8NrS9Z7K7S7+qIQ4jNTktqfsObSKu8ngB2Y5idi+idWLkA3UssdTsHs4Q6vmzj4VQkMYUWYleFFQuIXVJv10hguPD3/8O37z51wmjAmYzpE2DyCNZlDZ4JTG7ByxGY+Sj+zz7vd/iTvoGKkiSLjGZxr/7PmFTkb39ReTNQ8zONwZSfxERChb5iLLMENFzcWZhlBP8JXW15WB3gn7rSzz+8AGb409oN+cQIlIIVDlFyIJ2c4GPgegUZrNFnx4T3vs+t0ZTyPaQrSPRooxmLx7z137pkP/s//Xgc609FxJHF6fk8ykyTPDja1x76WsQcqRSjKPH+JZQ94TTiqZNzA5zlMyQsSKmCkyBGUl0McOUO6jpEi8KkncQWkI18Jdc35Fqh91uiL6nXz+hryvsdk0TJX3TkmWK4CNtb68OgA5nPU4K1p2nj5Ktg41PnDnHaR84t57KBdynxQNDB3vQdA5d+ZgGQ19l8p/yYlPoObmsedRptt3zKcqlGNSB26OneG0Y7e+iVEHrNcJIcI62C/RNh3ICoUeMJyPSxVPScSAVY5rqEtSIJ32B226YyMT1V25hL9coWdB8coLfdkQvkUXAe0vMckSMuO7JQLx3PVFAtBa77bDVkOtZS4V99oRiUpCcozs5QQdByvYI9hzhO8T8LkFlRCIiGYz0DIw4B0ojVQUp4/YXbhGIzPdKnt4/ortsEdMdSG7wwXoepMRNLVEx0cWEVoJcyqFLGyTPXKS/mv0KBEqIKxWvQApFEgmXBJPpklevXefLX/sGarTkzhtf5pd/7c8hmhX0Hb11rB//BDVZIHwkWIcWEeVaYgioELCbS0qRUES6boXbnCNkQmeJqAxK5HQyYTOFGN2m6Sz7b36FNM4hE7gUuf/ej+hX54zHI6yvkd6RMMSqo/QOOc6xscAIgUxLghpUoCl+dh/JADREkjGoznMwLhGhxlYbVJKE5vLKsSKD8R5OjAkItJLUpkRJDX6EytyV+bog6oiVg1OGzqDVJSrzw/Ue1fjqlGQbHv3wCduLCxJDd1oohXNDGEL0AZ/iYJ0mFUInQvTUfeLo+OmwfpWgtYk+KbxJ+F7QtgIznvLWr/wlvvBX/xbL11+jaXrkZo398AM4e8iss2TLAnvxBNdatg/XCP3ZhTEAQcCNnSVfuLmLRFHlN8jy63jrsc/WqORINlKKii9lkQ/WPZuzwEYHPqoavnpnQlGvcK4jWkVbW3Kj0eMxKzUmjXL2FwU3DiaMRiUqRZjeYO3HnIkRv/foEnfs+LVuzS1alrnluk6Ds0GaInRJsIMGQaDIRxkxDCb/ve05ffARtl3jYqK7uKTdbLHVFiEHCt22dbTODy4HQrG2idPGcdp6TlvHSec4c5aq/6PX3mcq/LT0ZPGEznbszPZgsmB1UjHZEUQxuIMrHMklYhBkZc7OtSWvXz/n+KNA1/RMbxTsXL9J1zrGJsN1gRQspRlRr4+u4nkUruuwmaQoZpA2VM80ModyUdGtC1IlECoNUVddTUxDxzEEiRSRQEDpIRc3zxSJhBLgui07y0NkgrrtcW3Lg9Nz7reaLx8Irt/Mn+eQ9jMxHinykBgvJcvrJeff+xEqRGavXyf6NIwko0AIjTGSxx+9z0e/9y6lFRwWUOSGXILdD8Qk6NZTlje+wq03XgY85WhBkh1FnhH6NUZENpstRkt8c4ZOJb6+xLUO52tQBc5axst9hsGjIp+NcRLSVUu+WMwJ3QbvEsVc059Zgu3xocf7QFaMh6xkWaBySZcqyms18wODMYlFEVHR8Cg6MDlnF5b4OQppYzL2dnfR0bOqL5jlChcgKEgSnj14n4uHX2Hx2k1YTnAnDUZpUkyAHkyyS40yOdNrLX37IUf//Dc4uP0ycrRET3bRUWKfPmJzdsL0V/8C+eIAOX4Fkx6jshX1pubRg1NyU7BZN5TO0FQrms2aj8571o1hcfA6s+UBNn4CjUVkYxaHX8THjtZV0Ht6Equuw1Qrsm3J2cP32dUKrSDV3WBEffSAL335LvfPIv/Hf/j8160LEZlrFirxNCZEn7hzesY1qbmmYC4Dk0IxvlESzi2FSZgiQxmPYoOaBygXzG/fwvYtUirIM9qTS/pmS3P6BK0Hq41us6Y6PSI5C0VO5zzNqqZPgjokfG8xxtD7RGMHbl4XE51NXLrARRvYRs+zznPRB9bO41JCSzHkxV91g4YGv0Imfkr4R0hSjPgQyIoZWeboekPbN7zbOvbl8/n4JSFp/eBr2NBStx1qMkYrhSxybG+Jqw2jaJksx8jjh+jRFyhe+yrIEdn8Omacg034Vc2mjoQUmTw7I5OR2G6Hoi9JiIneWnSKmFJDcMTO4XuPVmIoZvseEIg+kKJHCEkqcrwYijmTeURQSNGjTcLqBcEFYmhR3VPGpsN5gdSC0eF1ssNrhGZDef0eerlHaSLPPn7IsycnbO5/QsgUm6ahaZ5v+pEQPOgjO0qwkEMmTR/FTxM4lkawidDGgE5XPqBJkmnDjfmS1+7e4+3XX+XNl1/h9je+zmJvl+ATPip83aG6jlit8E1D5jq29z9GSZBK0DcNUmVkN3ZoqxPCZo2IoAx0dY0ymq6rCdN9vEyMFte4/bVvMjo8pLIbtsdPKKdzZFJoZZBa4aMlKwYhm+o0sWvoTp4gz87ZPjwh7i6p5rvsaEn/6D7yYIbaHZOe08rKOgdSEIKlPnqEKHLiaIwuRqgAI5PI2CK3NU6WiMkSVXhSa5G+IAZHs22Q+QjvHcG1ROuIKaCzHGkkushoqwv6yxVBDFnkXWOJITCMngQ+BDxXQQM/fW3TQLnpxNC594lAwEbwVwV7C9iQSMWMl772Tb7+V/4d3vral9CzBauzE87+8f+D7MknZH1FLgLFzVcQyxFR58TNGTGbIMzzFX5CSL5465DHXvDOZc8XueDV+D5GZLRBspPHYU+Rml0RKcaJwkSe1lserDpyLLdCQ/KBVJ+we/0GsgssRxmFmHE82uU75xes6hnffuvLTEuNzgom1lM+u+BL0bLUNfdCwKSG+bgGpUjlEmVriAoz3kdICLIjuJokBBfPTtk+O6LvW0Jo6JLEuUC/rbC9IwpBZx1VbakjdCjqkDhuPceN47yzVC7QRU8IgUwK/qi50WdL7lCJuD1lNF9g5IKH73zI3v4Y3wS01ld5s5rYBtrmkmxSkGclX3xjwQfn51xWoEuJnpYYnUCKIY4pHOO2p6g0nFoEgdAPqR2JnrrS7Fz3LO84ZOlxp5FHv+9JaU6Ugz+UERKhNEqD6wbDZCnSlcmnJyCYTuYoJM5bVBT4pqVQkdT14AXTUcTs7aKy51Oy/SsXN4vc/be/ilxfUO7OWRrB5e//mEJKipf3kUVJShlR9Dy5/wkPfvc99tSQ81kYgx5nmJ2S8e5NpvMp7nrOzs07GOORKkfKzRD27CO+PR9yXssxoW4RZpekNWlb4/sVKWiKnSm22dKdHYPQsE3UXYM6DOh8io+O5AX5fEm36QmtIDeGfFISbWSej2gsnD9YIVSO2BfM7tVM5omRCvgq8oNfF7i2RSaJzs3w3NzzV37e9SR/iRKKa9d2aVxLW1fQSMbLfaKNvPd73+Gbu7voRUEqPMkrpJK4aJFpIIcnk2Ou3WRPa07e+R712SmLm2OiNoRihE6e0eaSy//nr7P41V9jdPhVqo9a/PYJ20YQ3YyHT04RJmP39pSnD+/TbfvBWkNPEdmI5eGcopScPX3GW7/wazgx5eTofW4Ur3L8/gckL+iV5mjVEvJTrAooDbO9G8jtOfg15sYNxPKAX/vrt+B/9fxrTynB6/sFH64dF77jujqjty1BTynyAjMxbNWMa8scMy7JRwojB2VntENygo6R7fkF7flT7GqDtQ227Qgqp1udoMucEMHWDduLS7yP+ARtiPjO0kXo/HDjc21P4yO1S1Q+UMfEees4ahxbn2i9Z3vl0TZIrwYLJiXSkMWarnKu02D1Mfw7klIiAcEn2uaS3OTMlteotuc01YrH2/q5rp9znnUXqesNpZbo2KDX1ZCrrQSlydlebrm5HIjiZCPS8jYUOaHtaI6O0QJCtNAL4qolmIw+bZCpxTZuaPZLTYwDDzRKQeyHGLOBeD940Sk/EO+9tUjXkyVPUgbfTZB9GIj3JpFlPbLfoHcX5NkI7yMpJLLpa5hc0fkcUUjEpGSzqug6Tf3bv4tdnYNWVKsVVd3RWkfrEv3sBnL2fBxJAI/gNCS2UTCWAi0SMkqEGAye5zrj7mLO3esv89IrL3FDSb7wrW9y52tfp1mtGC0WIBWEhF1fDvnRwhB9TfQ9se/x2wrlLbMs0jU1sWsJbUcfEra6pFs9Rawu8NMFeVng57uwN2dx7zWWN2+zsR395Sn9POfy5AGpb8lNTjndx4dE17ZUlzWkfshzlZK8HNFLgZmMqR5/SPf0XYpqiVpex40EzUcfI853mH/lDkJ9dmHbzcND/vZf/CU29x+wDoHjizWVhq3SzKYF0idY5EgjCO3QOVciYiRDJ1dLrOsxeNp2sCdzIeGsB+9BKbIMhBIEm0jJY1OicRAwpBgGhW4MhJhAKlJIJDG8pjEOXUkbIaSEjxGbBE0Cn6AnkYzh9pvf4JU//9dY3nsVtZjzye/+NhPXoMyIRVnQdltGt15Bq2EI1j59gBxlxNLg04jq5OFzrbuJEpSi4/eOa5we0RQZ5yrSbWqe9oKdXPLaXJN8g8wcXV3z5KJjrOFXr11jlDwmFph5gZkt8U2LLHfZe/0VrkWFz0rODq9zGTT1yrIwgL8gcy3LeMm3DnvKPJDZC1IxR+RTZOxIiyXlzoTNyVP82WPcek1o60Eo1m3pqhrb9oQh6Iaq8bgQsQGqLuBsT+8SjU9sg+C07Tizkaed46zzqJTIrsRu6g8lhf0sfKbCT8qEDZqs63n0kw/ZX0hwkuAkwSekVpi8JAiN1gUpSYJKzG9d53B6yXplmU52UVnOdFxSNcMiNKrD+jU+CkQUaK0INtE1niBaTJFY7ilkDtpout5hmF5l7yai9yiTk7QhhYTKRoQ2kWLEaPAxopAkpRFmTCYMUguYCFyy3N3N+fKe5PW7huWtm0g+eK4F969eMMHizgHNowohI/OXb7I9uuDkux+QfvKYg6++QXF3h+3JCUc//JDdTDPOJGVeUO7PGb11i/HOiEmpcduK3gYKbZExYLKM2Nf022NcXUPICKMJmRWYyS7N2VNcX6FSRrF7jfXZEXZ9incRicFXFh6sCSlhdg/JZgUq5sjSsD5+SjGeoMuAtYJ2fUqMGtsGmExhp0RkEXPQkk81synMC8XHHyZy7xmPMqqNpXCWIBOfJwEvxEjTbFESshIuz89QKaKjJlhFkJbjBx/x0fd+xOt/6euYOwXpLKI6NYy4vEUqRTQGnyR6vsv+3dc4fv9HpBiZ7x+is4wkJiQVGW/X8Mkj/OGr9GqP9Xofoxpm4yP8vMAzJbSRUa5ILDg/aRhfu8vNu9cw+Yb+PIJKrC8umSx6Qv+YcpqTzQxxHeiSYDqdcbyuqWNL03pu3XXs3b6NzvZAaWxvKZ/vsPtTaCn40vUxD44e8/Io8aqULPoa6XqC2OOZlTw4b1Cq46Vdi06eEANKm4HpIBX28pTt0ceEbktftbRNi+ta2s4S3HCY6txQ2LXWD0bDQOsDjQ/USWNdonOejQ2cdJ4LG6h8og6etQ34q4gtRLpKtRn6CSBJcYj3yrRAIfAeuBoJItVA8Uj+X3D+YsC5jm57zigvEHFC2z5fxyqkxEUIKOvJnESmhHKBAOyOc1TsByHE7BppfgOVAv3lKWGlgYiIEQnkI4OsWkZes1A9c9OQqRVnlzkuGXyy4CMqBSIQhKAYXxHvXYN0a5LvkFpjGJTEelJAVtDVBcokUrYgCcn4Rk65KNGTOS44gotUz57CaEyajVn94Edsjk9oV5dEKdCppe8bejvkgvYB+ijwoyW7f/av8NZf+rf4e/+L//i512BKQ0pRqwJWZExJXCvGTJe7XJss+dv/3r/HL/zlPwcJsumCTMH29AwZAiE5+iePhzQbo4dIuhgxSmGPNyTvyTODjpZgO7xtMAo8Dulrkgtsmxo9nmDvTpE71xi98QXG4wmdC9x46SUcntUPf4uJNmQmQ0+X5Ndu0VUV9eYMk+UYJZiOS5yXEAJnJ49YlJIQGkiWfJ4jwwa7ASNySIP/nD3XiHU7HLA/I/JRyb/7H/wHHD34gP/8//J/45MfP6RzkT46bOXJErR9R0qR6ANSRKRIGDUchobMhIECMbRRGPhyKRFJBOcGI/AkMHrIue99oo+CKCISSbKBqCR9FMg4WBF1KYFQhJBwSHoh8dEPtkUJKgRGZXzli1/ni29+kVe/+cuMcsnR9/8Jftth7AZSR8r3SPU5pehx50/pvUcajTAabMvm7BF194ztw3eea93lEo5OLnljNkHNxgQs33vWUqSMDx3seOiF4tHFBWe9ZWEy9oqM/cUeb45vMt+dM97bI1/MB255VSMi2N4jfI+u1tzVnpfHEiFrdBvJdETlCjVXFFONlBpTvISaTOnXl9h+6O63z07ojp8iXYfvGrrNlq7v6OoO17REqbAIuk/FbgGqPrDuPM0VN7pC8KT1POkcZ72n80M6lgAypdD/ktjtZ+OzrcoUab2nqFtKPZwiQh/pwpZiNsXonLIoqDcVwkiyyZzxco9Ntebtt+9xcfkTbt7OMTrD2UB03cA5UIlRqYheEJPEuwAxUq8DVC1ZHslygxcJu55x/IFHMUKIgHctSqjBI8r6gR/j+sHnKYLzw8laJA8pkbyj62tGkxmhFYio+f1316gIWZ4jQ4H7OaW2JTLc2bBZCZHQ+Ygb3/w6m0eP2b77jKN/+l12v/0mR88eMxE5ynQUJiObzSlv75EXUIQOv2kIXUshS2J1jjIFbbMCu8Vu1/g+YCYL+rrDZJ7N5gzV1nT1BqJA6BKlclzo0aWhe7pBPW7xk4IQHdnc0PWnxGAo8xFhW+Nsz/TWEldv8HWPHCVWjWNjR0xuLSl3DNFmbC+hUBmrJyecPjiiEJIYFCZLKJlT1e3z84QAKRVt0+Gt52TTYVNEENnNZ7TVmso3NMbww9/6pxy+fI/pq3uIPY09cmiGyKfEVSq7lIhigtq/w07vePjj30PJwHT3gJSVmPEUKQX2vXeonjxjevMu7F8nbj8gjTw3S0WQU1aXKwqTCOUN5qJj/3DCyePvE5sj1hcdtg9UFz8mT7tI32CtZv/WTS7UmuQjdVsjRcSlMZvVikf2e8iiZP/6XWhqdOpI4fkSEz6FiBE6xzcPCpaLEaWQ+MoyFVsymZGnnixaRi7HV556G9BlhpSSmAS27+mrCuscKVjqTY3te7x1dNbRtz0oTS8N3vvhxuESXQhsbOIyJC69G1I2mo61d9RuMOj9tFCLSVwVfXJYImLIVhQ/HY8NCl0fAmWeU5Yj6qYmxeFW9mkyz5AMM3QIU0x0zoGQjMdTytGMZ88+e+dACthGjyGwxRNTQnmBFJJxUCxVJJkRRiXE+gkhdAiR43WB9xIbYGQkKXk0MKVhIqBdS2ys6LtEiIFIJDpQJoBKIArip8T7UU+Sd8kWY2zvmS4lhdL0TY2ttmiRCH2NoCO0NSfvdQQEoWtJEfpmi7WeycF1QpY4+8m7RNvRu4BQCZlDF2DbD7ysjYX9l7/An/tv/w+49trrZD6g4udJ3ZFIIq+Op3z9xj5v3b7OW7/yt8hffZPVZYVZV3ibKLRAh572fINsa5KzZIirjnIkOY+rWxQR1/fEriXaho5IEgnp/WByKwUx06jr+0xmO2RFRhsadiczdvZvsXP3NawXmMLQVBXriwt2ZjuIJFFSo6dT2rNzTK4H60HbE6TE95YQPNJIxrnFtdWQB+8T2c4B8zde5+L+U9anK66LgqQ0N24aZuP2qnv9GZESbnXJ/mLCf/gf//f43/6v//c8/fF9uggbH8mJ6OQodEKREFIhtAIfh8aJHLwdfBgKPx+HNBMphvxjISCXQ2yp8AwigSAIDO99IRWBQV2cUsQzROhZBC4F+iixApxIw+8QkuV4zre+8cv86l/9VYwpSaePcd/9+zTHP2GvLBFG47MRQYywyUIxRuSOVF8Q0ow0vUHdOrqzx2zPa+avf4Pr3/wV+MH/9LNfvqxgdnhA8GDyQBbPONomjlLGbma4pzQpjunKfTZFhrl2E6Mk26SIcodREGRVh/YeJSOy2pJch0mW1LcIV6NLiRkvkfmIcpZhe49PgFYokYMCipz64pzTP/gugo602Kd3LWF9OWRYu45602J9oA+JrrGDiX2AzoF1kS5B5ROb3nPee476wJkNXLqA9YNXKAz2i6Q0+PBeid2GffJn4zMVft4F6BusimjvUKM5yAylQEoNPtKsVxhlyMcTVFGSAkzNHsvFmls39smXS4aoxYSQAq0Erg9kpSBedQ/kqCTGhLMWFzxmPLQ+Q2t4/PsecVmgijiICuJADBbRk4IHLSA6hIhkRuN84CqTAyUN3guMyQe7gNhjg+WT4zVdC7ee9OzeOCXGz2el8SnWVeLpxSk3r8/pV0/JuilivGD+yivI3TmX/+xdHv3mDzGzgnJsiCi894wWBjX15L4mhIAIPcIHvOjxdSBkJSlGZAhEB0lN6OwG0Ts6MydcnrNpWqYHu2zPjjFaIss5ZTGnvngKU0P+zWugHdMm0CVLZgVRWbbPKvrWk+c59vwMoTWyCNQdvFdPuGBC0UTKMrC77NmZ5Dw59diTCDYwvblgs64g5FSxZzwGJZ+fNClSREVFMGPK+ZLdkWB19ozL7SVKDBtbb1su3Tnv/s73+ebhXyAtMsyBIVwmpBiRvBv85YTEAnq5pBQvcdis+eSDH3HDeSYHdxAmh2xGXkJ7/IDLyxNGN/fJbl5n1A2GzKYoUXLL+iLSVaf0tefsk+9jRE9yFu96xrPrZHmGMjmHd79AsfsSlxcnRD4hbE9ZP3OIUOBaTT6LnJ9fkr77O0gh2b95m9RckKrV51p7fYxsk2S8v0/jHA+Oa66LwGLUkSnFYnnAbtdSFhHbtQhvqTaR4CIhRvq+43Ld0vRhEASE4fTZWYvzkeHwH2mT5aJxrPrINkSaABe9Y+UjVYh0zg8+fQnSp+PaK989Lflpqs7Qybt6zcXA+hdC/DTVwUcwSrC73KWqK+q2YVBhDd8zKEcH3p+Pgf5KSb6zdx2eo/DrrD1775OHP/MHf/CZH+3/i/Dk1/nf/Mav/+HP3H3eh5prxa8dXOfbN8fsXd9ltn+LYnWf/FnBYu8GarpAb89ASaonWwgBlEYITfBuoHk4j9QCFTzB9YSmwdsenCWECEYjdYT9JWk2Q06mWGuZ797g8Po1Utcyni6QeTFMg4Kl2XREN7xfdTYf7g+2Y/PkIY/+3j/k2q1D8i+8Ru88dC0py4kyQ+nh+0QMiBQoRguSXDJ+5S36NnD5oOLawtO2irPTHn3gSM9ROLt6S/XkfcYLjdz9Om//ma/z/R+8Txkhl5JCRHIBWQStFSpBsEOCSUpDgWdSJJMAiZgkSUiS6weVuJBsfRzeN1HQ+oSUw/vQC7B+UID3Ebo4xBdGkXBpUMx6EQeOeJK89vqX+fNf/jI3J0ve+IVfZHnnNg/+6d/F/fbfZX5wl3J6QHnzTYTs6S4fs15dYM0OlRVkokDqknznFp00PHv6lHtf/bO88d/8H/Ly176Ax8D//LMXfj7B9Xv36KuWplpxuqk5WTWslObezpyeOTt33+Ll8gYHKEoiZehJfU++3dDUlrivULJEFxDjOSgGN455TjYaIYsSM9+lrT3oSLJronMk5/G+xzU1se+w1QbnL3FNS7te0/hIsj1aa6xzdL2n6/2QTOQ9vZBsW0eXFBsHaz8Ueie959J5GheHHGsG4aMcjo4/PWDElAhJoFQx5M93P5u29pkKvxAShQQXPWYkCMLhYkZuSpRWgzmkUBSLfXQ+wiOGm/4YFjdv8uq6o2sLlFRIPZiSZlqis12ik2QqEr1F6oTOS1wnaHsPOsc7xfG7idDnkCWS8OigQSZCciQZUUqSksJoPZigpoTRmt57jNSk4CknGW1XDyeZ2CC6yNd2PY+V4/CmoW77q9nT50eMgp1DRUyWpCW2PcM5Rzm+xnRvD/PnvsQnv/cBIUjyIqfdtIQyR48ifnNOrTNUtBgEKSqiH25mVnqQiigUwQVGyx2a+oJ2s6I/OyN5S9f6IccyavQI+mZNFg14gS4zqn5NaBzFqEQLCSND8lviukGpMUl61LQkBcXDWvDUZ9x5I+Ob++cspxW7k0SpHCAIUvLge4GHnyjWl2u0EqhckJUlWA+pe+5r+On4LshAbAX5aIbJDcFmtF1PVo6YCEVvOz555w+4+fJNbv/S60QpMdMMj0XYYlCCCoVSV4SS0ZTlvS8QO8+TTz7gXj7BCEhREIRhOR+Mh6tPHlC8vEumI1USlGOFyAK2k8S6ZqoUs7HAeU0QkuVyyu7dt1ksb5AFS+MqlIDDG7uI0PD4B09og2OkD9D5dbbbTyhNzvnpCeL73yUrxkwKgeg/n6VQ5wMfrRveuLHP46fnnJxsWJYBm02ZGI+tjmlOT6nOBSk4ZDbwMa112BBpfeBk66g7j5bDzaQPQ16ujwzfEwIrlzjrA6fOs3aeLgzUisBVN++qNrt6NYc67qqzIJUml5qQAs5djWyH/AWu/FqGTN8rkrNsWuRIspwvyLOc1XZDTHHo6Md0xacaKB4ueaKD7fb57FxSSvuf6wX4/3MI4N++seAbezl78zl7L/8CO2+9DXkBWiGSR1pHVIMwRzg72Lo4d8Upcyhnhz2v97i+GzJTrygBxkTMckK/PMSajHxnh/1bt9CZQUsoxgtcCMhdQ991dG1DW20GgZ9zBNsimi2di8jM4GxP9+x9Vu/8LuLDJcvocaOS1ffeYb6/w+jtL5FKAc4hsEjfgJQkUeLGu4iiZCqOgSXLmeTJRzWT1ZbnsYj46MnR2d/4H/8vn4/g9ieM09/7DX7r937jj/jqP//sD/h//k/+y5/5zAePbddxuV0zL2eoOOMn7x+zVZplpnAx8FGUbC8rDppnvKYUOzIw04nJvMTMwK96sjxHjxIiNZSTmihKRteuoadjYt8PtjMIuvoM327oV6dkZYntLM3FGe3FGUIKYp7RNJZu1dAKRd07ZEpIFWisp3cBFwJdSLQWznrHug+sgufoU2qM80QG3l5MYhC1MfBkEVcDtZ+K3QQxeLxU5OUC+DkUfkopgijQWiGzYa6vsxKpr/zTTIYZlajZPtKMyKTA+R5jZhTXDK+ZBWfrC/rtJeW8BCnpekcp94fc1NCQ5xlRDmHSIQZkUIgUOH+QUx1rTNJImYhRkpk08BuGpO+fjotCiKQ45AorEUEMp5u22SKEYzLdQ8kcLzPq/gxvxix1NeSQjsqfXtjPi+kYFtee0Z7nZKMJfXOM29QYqVHlnGw+4+Crdzh97zE6STIjkBOBtWtkAksD3uPlkNdLjAglsSIgiozJcsl2veH05CmTw71hXLJaIeSYcqRxoUW2La6XmOmMqASNT+TRQoxMZgsCDtdZRuMR1SbhjMHajsxKnpwnVqPIrS8Z3io2aO2Z5p5pkdByMFqNKAg9BzcMD03AtYlsnOOjx/URIxLKPH/HTyuBIhBSSd95zo9OGRnJ3uIarbME59DFnEInQn/CD37rv2B54w7j2yVBBpIRiKSHG4ur0SIfimYlEfNddl95E+s7HnzwE1597U3y+QxVjkgqJ8sz9nxLuGywcUU+GiGEQeoxlc/xvkaIDbP5kp3dl+nqmqen59j+hK7uCP6EzapiV36RZnVGRs4v/M1/l/vvPeTZk558dB3/bEvsz1DBsHr4gPdF5M1v/RJZvvhca8/HxPcen3E9F5wcn/HGMmdiDKttxXgnx57X1JcrotFYaejdYJfkfMSFSBdhayO9HSw2PII6BKqQaELi3Ea2PrD1kT4m3NXGI6/eO0NCA4BApEFVj7jqflzx+GKIoCCXGi0lne0GMjniSgBw9SNiGIl2LhJqT0qJxWKP0WTJ6eU5rm+u8l+HtSi1ocgLinLE7rUbHD27/7mu5Qt8dky15Bt7JXs7Bcs3vsHky9+kbx2qrzGDoHJQbTcd0gxRi9E7YnCk6IYMY+eIth3yt6WCsSaWU5gtYTJC7xxg8oJyNmexXCKVwrvBR7SuK+pmgwwJQkOwQwpICAHX9wQivm9pVxty2Q0dm/aE+e1d1h9cwDsfk9+cEc6PqKtLlm8eYibX6bMZ3g+WIaXWeFvjk8YcHiDfe5eHDwz7+4E+Klarnhg+u53Li0PH54MPkY8+ecprL+VM9+fsXFtQtA4S/LjqGKWKXXmfTo9RZkI5NvRFQbk35WAhEfszijygVIuIgb67SoPqHE31hP7ilH67xfYNISisbQl9PWQh955mvabeVKQ0+Oy11uGcp49uoFkISe8cdUw0NrHxgSYmTuqeky7QhEjlPE1IA3dPCIT4VOwWSUhiAkFEpMGBYKBJfyp2E0Tf0zZ/dM72Z+P4CUHfW0wJMivw0RPTkP5gigmqKJDGDBYMIgz6eTJcX6PMglTOMFVLvznGl0tUNiPWlySToc0hKt3/KQch+Q4tA8YIrDNsT3qULwcrhwiZUYg0tDtFkhAjKXmElggFMfirTqBCCtAyo954qnWFLvxwJLU11bbnNz7c8q1rCiFmoASdfX5eyx9GbkC6FUO/6xZZMUN3FbF7hq+PyfQEM1Lk10pWJ1sKIRBK4LuOtmUoahNElUAI/Kc2DjFSZJKTxxuKYon3Dacffoita1SS6N0RfrNCaoPZ3RnUugRi5xnnGW1dQ4p0m3PMdIwZ5VhryUaSqEa0beDxVrH/suYr+2cYCZny7EwUmQGSJEQ5ZG7KiEEx243cfjtx/KOM1lryMicvFVmeQD6/j1+WCW5eX7Jq9weVcqiYlxmLxR5eJmy/4nTVs3f9Lv0KVAw8+9EPeGXyi8SxRGpFdA6cHYxJm2fkoxwpx0O282TCwcuv0m/XvP/eO7z+xlvk4/HgCRkUKWTI5BD9AU6UPHs/cbKeMbtxj3L1gGr1ARcXgYNrJTrrKHOLkifE6ikXVc1sdpPNxQOyUqAU+O6CO/cW7BwWPPjwFDUu2D5qKKUBpzh7cMTR/idce+Wlz7X2Ukocrbfcfyq46HreOw9MSRS+57LbMsolVRuwyVMDIQbwDpcGYYMTgtZH6iBo0xBhJlNARsGUyEd94tQPG9OnhdrVGxIp5BDLNTyRP/SshhHu4MmXiMnR2whZxrgsyIxkW/f4GEhXj/upiWtiEEtYH6DdkoDZbMkrr3+JunecHj2i1IpRrsmKgiAGB/+L1R+9+b3Af3WYGYXGI5BIbag/eh8lFflkghyVV36rAQmENCRLJGdRwuODJ3UtUsYhHrLIKe7dgoOXmd24y2KxoA+e7WpLMZ0RkqTtHb7bEJot1tZsLk+x65rZzpRoiiE3PVh81+GtwwE6JYosEC4fokRCa83s9be4PPkB26NzUJYqCKa5IKvWFHsT6r7G57uoyQ1qVyGLSGp7xGSX4qVX8evAiZ7Tqvc4P32Cd88/7XiB54OPkd+4f8x8MSFTFqUkX7k55uNnG/Zdy21peC26IbigTNRmxv3kKR+eMvKJ2ajHuYBPDOEQwpB8YP34A3y3wdVbbO9otlucdXRtR0pgY8L7RIiJ1vqrwi/R2kATEi0K20fa4LnsPCedZ+0GSszWe7ZumF4MO2a88jZI/9IWGoHMDMpqHwRCDns9UiGiuAoxBEj/2kPHZyr8EgJlIGkHaomQ1ZAWESXWBpJySBKxqREukM12MeWMUASiLhjtZINtgV0Tui1CGwKOqEqqdspBOQa/HYyaJSAFImiac0F0ApkCQqQhriYNnD0hFUQwOiOGIXJFS8XgOiQJISBTpOs7AhEpB2NWoRKZgc1Fw45sWfclq4snzMsvXN10fh4YSOajLLHqHjFlyWSUoaQfyNWxY+JK0uGc45MLxNVYDR+IQtDaHi0VRieMVvS9G26dIWJPL0nRszldXV0TRTad0Dcd0+kOm8YyWuT025psd0pzeYmvGpCJ8c6MartBC0lezqjaBqU1643g2Trxo7OSX/1Lkd3pGaW2zErJJI+I6K549JKUNEmoK6NbR0Kw/5JkddmzfaCJocU1gr2788/VP82M5O6tQ+Ij8NIjdT50TI2n25xxdnFK21m63FEUg5XH6ugp1f0HTG7dRC4K1CInChCrM9L5+7RHHcW1N2C0h8jUIKy4eZu27vj4/Q94+ZVXKPLBRFYoRRKaYnKbp6efcF57yt2bLPcOGB/mVCeB8zNHtWkoF3MmdkuzOqW3Ee9yrFuwrisWRqG15fjj76FHE3b3dzk8EDxqI9nODpkuCTGRxcDZg8eMlgefe/WdVi2/+zQyEYmj2pFSIE9w3cJOqSEOVgydDrgUcDZSh8jaR5oIzZXfXkTwhhbcVRElICD4kobvRriMV5566Wo8K4aNqxzNh3zOtiZczXw/FXJwpTwcisNA73qEFMwmU/K8ZLXe0jn7aZ34U3VoEgGkJkRJCBEfPCF5Du7cQeeCbnNBb3u2283VDw75ry/wJ49EQjhLahvs058wvvEKopiQaoihR40ygoik0BOdRHhLNjVEZ4FEVNDrAnPnLaZ3XibfO2B3dw/nA3WIhGSYLPcQCugtITqibeibNc35Me2zJ5z88/fZ3Nhn96tvoIoCkcB6CzEQuxaXQNVnsH2Gml/Dp8Ty+h6bezcID4+YyDVh1IMF19XYTiAmh+isROgCb0agIKYToovMfumvUR8/oPn4d5HpJ4hycsVRfYE/SaSUePdsw+S9R7y9f0ZVRch3uJULbt4o2VvmxMqjYseUNQnFbXeBUYrUadq2xRcZymiEkjgfsU1NX7ckAn3T0FQNvu9xztP1Ftc5Ql5goyAET+8SvY80PrK2iVWCtXdUnees7dl4T+3ikH4khtrqX/jdiqGZJWAoANOVekMQSXjvKYuSUuZUTXUliBseg6SuWDKSP1ra8RyFnzSCvCxYbbYc7pfEEPDd0JrvGonKNPnYI7QiKsEoLwdpc2ZwfUuSntGsYF2tyYoFIpsSXI+U90g8QcTVlWQ8oJRACUm7diifoa66CUIqhBCDp5YUKD38XxtNQBDjEPyNkCTnkIkhDklKqm3PdDfDuoAkMR/DssiwsUKmCUpBWX52Cf7PgiBglEdoS5j21FUgyV0WoxKCxfcOLXqWCHhlnyfvPqXAowIIkTDaELwnU5roPeLKszCmRLSRGALJC5RRQ8ZldEx2S2y9YpRpqs0atw30vSfLJeP9KU5EHBGd5xTlGCccQvVctJLV+Da/8+NzvvVNza3lMya5Z5pFpBSQBsIvUiGlwZicmAKkIW8zpEgsYPlWRrNK+POc0TRh8inCrJ77GoaY6G3NYjZltdkiU4lKDcL1bE6fcHxhKea79NtLru3fZbkzxXWO849/QFg9ZHTzBnp3H1Wv8RcPyOUWJzyi3aLyBS5JxGhJedNwT2V89IPv8vjhfW4d7qOLArW8AXFNtz4jOo/wEdkd0ZwdE+hRvkKJSFMFxrMl0zKjqxPTyW1eWt7l6ckK52FzUZGpDQHFcv866+0p84ObiLDL421DPltQzqcUtuf8o4/ZbD57uPt/afHhY+Rh05ELwVIrjBw2g4c2cuSHQ1CXJM5AubzJ7hsv0Tx+wIf338PH+FN6nhCSdxG0CG5c7UdCwlcMvOskx3EYMXy6QUllKGa7uOqCcVnQ9B0hXJ1DP+3kfbrJpeH02vUdIkVmsyU3b9zh7PKM7XZzldIBWhvKcsR4OmM8nQ9+gwLWq3POzo5IIRCD51OJiFLDOES+uPH+6SAEXGcJMwEq4NwKXE1UE3yR028FJpfoQl0J7hytLbBJIxb7xMmS8eFNrr/8KllWDNy8JDg7P0OHjmK5xLaBECzBDR9dtcbZHhciWoF2G7bvrAibCxZ3b+ApUYsRapSRZYpkGzJpkdMFQeVIFbCuZnl3F9qPmZWR0STH1+BVRu0yVEq4+n3Gi7uD9VC7wa1XuNazvXwI6/ucfPh96qOKyc7eTwVNL/AnirPe2oe/+dFTfvOjq8/86P936B4e6Po/RqTn4E7yMzmSn03V6wNSSaKT+K4h2RGy1OQjg9CaiEAZgwsRGR2+2RCKCaqc4vrh5F1MJnQpkbUrJAGhM0xRMJ29glv9ANIFhZFkxuBcP5hShoQIQBzSGlKKZMb81DQ6CcjGGeNFSbPx9K3F9BbvPEbn2OAGTlGKpBTwwTEaDTFlq8stz6rA3ZkgeEm1XuP8z2fUK0gYFclMQmURk0W6xrFqBGOT0JlHoNEhMrueY+OCuNngbUIrCSIhxdDODd5hTEYIgRACyIgyCWU0UQRMMti6IWDJpxOikuhWEGQgHysCiZSX6L6lOr9AMMRZZcslIctY3Cg4/WSGV6d86RXY3xk8gWT0JCnAm6EQDfGKcF8jxBA9h5JoL5lpgZoE0rcND3/bEsWENCo/l1ZGAHXXQL5DsdzDVedst+dUnaLXBWo8Jp/sspj2yMzj+qeIIMB3g2w+bODJU6LuSe6EQIYxe7jLE6QoEONdUmEw5TWQkoPVBe999zsIEbl19y5FOaattqh8yc7By6yffsT2/BEqrNmfebIikWHIhODy5EOUkMynd8nmr9NV98niCaLKyGY5oZVgAtX6GcV0RFdtKQqY6QY2Fts32KpD2UC32X6+xZc4Cz48DIAFtv2/zqOohrNz+ODHP+NhAAa17jsW/jjOWt47Th69+6/9nv/yaTQAzlo21c8ueEOw9H3NanX6x3gG/wqeW5n6As+HiCDpHDkuB4pKt0UJjVURGUcIrRFZSYyC+nKFOpih9u5S7N9mef0my/0DCq2xwNHlhmI8prq8ZHc2w7uMvtrQ1VtS8ETb/bTgEwz8QVGOMBOJf/qAvm+4PL+kWgmufeMt1Bu3iDKQ5QXjbElsFbbtsGaK35wgxiPStWs4Ep0p6UJOHeewXuHaY6RoiH2FdxCsw7UtSThsdcb6wSMuHm6YzMesmg4ffj73khf44+MFR/L/Mz5T4RdjBNvjU0YiksQgQaa3jLKcYlTiY0AwqGd62xJXZxhbM1rsgRzhogQlcL7DhC2inOBjIJcz1OxX6ex32Z79hFv3SvJC4nuNVhuSyIhCYkRCIlEIcJ6gBGoyRU8WeBFReUKFHEWNLwxZF6lkgjD4QYmkBgVyEqSoOXuypooBYQSFANdarP35jHqFSBjpIAeRBGXWUY8amipQ1xGdxHB6zUAKx/Iw47wKSAY+mFAgpMAnS5KJYB0IQTmfkYaUbEwxxuSJ5viSaCDLMzIBsShoQgeFR2U5ufJ01Yqu95STCV2AVrTo2Zrp7hBFNZ+OycyEcQaFGnIEkxwR6AZj3yiRcrDGEVeGoBJD8omoJBIYTXK69U2+11u+bi4I2+a5LA0+RUqJru8xWUDoSMwVfSXZXFQoM8ZkEt9cspGConSw9uwvZ0z3ikGZJcFvV2TaIFYNbX9K1DWidxRmF1nsEFMalN8715jfvMed7ZZHDz7EKMG1OCSwJDWiri4J9WN034ILyBI8kfE40vYN65OKxWKXrMjZnL+DdMdgDcKXKG/wQeFtTzQt2XhEszmjO7NIazAiw0RDMBBUT3f5+dJjXmx+L/CnCR/BSol1EWUdflNjyglKOzwaVzvqjz4mFpprb73F3i/8BfZefp3ReEoEOhs4W9X0TQXrS8JV3OaxBFkUIBXBOkJ0KDzgyIjEMJi2YwzT63t07/8EazvCWoGXtM+eUL58iDaSsVHMdWLTNQTbDobGO7dRwrCNmlTMsZ3FFT16MibZE2J/TtIJ2yvaOuDqDbHZErTg4uic4z/4EFE7NjJhJpPhnvkCL/BvGD5bVq8SqNGU0gicS8iiRI8nlGWBdx7RW3RmUCKihabvemK8wMUxvdSYiUabEu9yCiPZnj5hKkpUURCSxKUD5PiXqC/nnKyPmY8iwpyQCkMIChNypFAYoxApgdQoQPuE9RNEfQlSIqUgG8/wrsO7jonO8d5SJYd3Q9wPWYbSmkw0jKRgPFZY9BUP5F83Hf/jIwEGSZCCyhZILVnmljI3+Olgt+KsxHeavu+wsWM0NegOkkkk60hp4DcKNDZ4RlmJloIYA3mZUV6b0q7P2L15iMuGxIPt2QnC9eiomE6m6GmiX22IfnD837iG8b5heR2U6slyxWw05trumO99MOK/+K7kb//KNXL1FCUkhS8IIuKvbFmkkEgMzoHI41Coygnn/ZJ/9gcZVTPlb/5VxeZpzf33n9HZ5/dF9CHg+oaOE+q+I0YwWUaRB6rOQVFSO0nmx5ycB0Y+0lwc8VK5T64Sbbehe9IxHk2oP36IWYzxokX2GtKH5DpDpiVqvCBKTf7SG9zOx2gpefTBj9FEdg9voGcZKfYkV5PsmvloSfKJJtTsznNCCnRtoip7/OoZJxcr8ixnOjtgsj/HxRpJjmw9qU3Ujze0W0d95khqil4U+Eyze/cWmw8+oO2fXxDzAi/wp40AVH2HaXoYeYiS0K7IREPKa0Qumb5xk8XLX+TOt77N8vbLnK/OWT+6T+gs548f05+eUcSIEAG3qZDSsV13aF2w/PKrhDJHCUGeK6SIg5DCOoRzZFoRDm6j9vbpHveUcc1kOkGsnjFafUzSB0SfOLIrTs96JpmgXd2nKL5MzJeonTEuCprUoJTFxRUxNMgs0vYW3Rlc6GkuTujWp4hizulHj1ivt2BhNtOEbIoPx3/aL8ULvMC/gs9m50Kkqlvm1/bIi4wkxEB+T4I8LwjBD+qsIiMvR6gQQAmE1BipCH2PKDRC55CPSZcXbJ7eZ/fVr+CjREdI+ZLrr32bptqwwdOtfhffnRBDN2RZkhBCDco9lZMVBbEsoLeoyQFmssOz+z9kNtklVhck4ZAiDoKPBBKJ0TnBD1FvUgvqoDjbekSZY6Ylmf7s+Yp/FLog+fioRKUSU9asRE7nBK+9rEi+w5mMvmjwtqN0ic5GygQiG+xEEvGqOwkxCIRWTPYmtI0cTDvbnhgkfjYihS3t8SmprUl6TLmzpG4usUeWLkn8yDG7NmIysuRlS6EVk5FiXIKOPfAh/62/8RL/13/0lP/k78/49tsT7u6fUegaIXqgRMmE1ILoFNaN6fWIxydj3ruvaesJr9/KiOMnvP9bz+h8S1YqkM/fQc2LEbu7c45WPX3X40Nifv0at1//Ao+fXuJERvTg20H4EeQ5vYw8frziNHxMsfHMptfZXLmhF0GRzSaQzWHd4h69T5l9haBaBJGYFejrhxxuXqKr1nxydEQ+3mVqIiF6pGrYW4yQwVCaBctxTlWfIFTFjZtjnADrSur1Fje+UnS1RwjtmGegxhPMfJ/q9JwYE/n+DBdGlHv7BN8SZUKPS+L652Mi/gIv8KeBM+vO/md/8MlD+ORP+6n8q/g//An8jic9fG8FL2gGL/BvID5T4VdHzTZNODm9YG9Z0m4bNBGfl8zmM4wx5EVGEND1FaYsEbpECANCYYxGGU1W5ih9h3p9yeqThyyv30FODulcw2RnF6EkMRVcnj6kbb/CRXiMlx9hooUocSEh05BG0UbLS3/5P+LDf/o7jGRL75eINEOIAmkFshjR+w1RiSGvVwSUlvRNhZY9WVGgjCdY6FpLsT/6OYo7QAmJNoLTKvDWIuNsK2kDnJ8GDvclxnhy35OJnl6B2y84Pa8YXfYoI5BJkmc5IVikkKiQUV2e410guDgEZTcNSo/Jl5ouOHQ5IqjE5eaEDihu5kzmAjUqyFRJKSWjzDIq5JB4hSWJnsQFB6OG/+hvXecnD2f84N0tv/4HBaMysZwIptNIpsH2ibZV9L0gyzS70xxpTwmbd/nO77YUJjKZRHIViS6RPkcDVWYlu4cHtDKytecoNmgsOrXc3AepJW0r2V62nD3+EBlaRCZp6gLZBcatYBN6St2gYkR5QZmNkeUcRUNqLL7rUWVP7IYnmqREzHe496VvYL/7e3z4/ru89gZkWcHefIfMJ+ptZFJO0MUSXdygdSuK/acE7VlvFNsmsViOOD2pSTikHNOqElUuWd54hf3XBVlacXl6wepRTVc9Q8fI2eYUu2mx7rP7f73AC/ybghdUgxd4gX9z8dlUvUJRFbfRZz+kUJLJ3kDOFSJh+xrvJTKbYrIMKQ0gB2K/FDhn0bog+oBrGiKJTGkWe3O2qyP2929gvSSTHqkyGBmu3bzD6uyUyegG+b2SR7/5I/xZhyontK1jPB9TXbQ8/M3fxj/6iH7UIfJz/MUpfbHA+oBZ7uLWa1KMJO8ZlROsl5Sjgth0PHh8ROtKlrsVWeaH56V/PoVfApQSvLRvmZhIFJqmcXz5tYgSLckxdE2TxwiB1GPURBPfgu5JS74KyBBxVY9UklwpSA5rBTYMqRai64kiQbvlrGoRZYY1ATPN0eWM3cyhZIMWijLzjCeOkRkh0g4pVQiGvD/PBiGzQYnMR3zhnuDLL89pu4zWTqhagW0N2gQyYxmPDY8eHHH8aMuDH68JMaBNz/5MIWXABocPOdELhPgc4cdZzs6bX+O0e8x+UpioicHRbp4Q0hmj+QEHN99iXBq2xx/SdFzZeDQkB+c2Um4bxu0WDSyswkvIzSWy7jFmij48J2UCxiVpWwMCOTaIbJeXvvAlvv/P1rz3kx/z2sv3mBSLIZatFEjhUcqhR3MKc4Drc/LJQyYHjuuv3MBuImdnZ/SuYHn7Bge3v8DJkzPOn54QJ1tuvBLZf1mibOKj7z7FpIw+CGzv8PFF4fcCL/ACL/ACP3985uSO/Ve+zfnxO8yqlnqiEVpiuogWBSkz2M6CMiilEDERQwdGYmPASIuImuChry+p+zPK0Rhbr7CXT5nt3kDkBSorKLKIawKTsaRLYFtByAoumg17skLtLtk8ewgeqh//I8RoQl1JZLXFp4Z+LRGTMd3lCqKgd56UKYJQWNcjkyPZQLP15HHL4UHOeD4hRcHniJb9l5HSTz3LDvcjRsD1nRGeHoIYfAdlIiXDYAgVETiulZ745oTVoxrRZ+AF/aqmtQ4jFcV4TtNbxrMRlydr9NhgJ2vysSYrDGMDUitEytHSYDLBYpwYaUGSDiE0kjExWWI4h6QRUhJDDwSENEP8nt+SaTDmhJ2JxOSKzTry5HHPd36z4pP7lqzwaJ3QSiODoPYe6yRalpgcggrEz2NpoCXjV7/A/nFLrI+wnScGg85naGHI8mvYriMrPK9+5W3aGqrNhpMnH2J7Tx0EfeNpvCIFzyZ6zu0Fo0nOjWuH4D3iJ99j/taXkeYWVklIjtRHRJZjdpe88ubbfPc7v8Ojxyfc2R3hExR7t0gyErse0hl6T2LGBySVkOoEVEcxi1zbn9J1Bluvie6MzB4Tt2c447BuglEzslEJVmKFpg0RHzXuhRrwBV7gBV7gBf4rwGcq/Moy4/1nPeX8LS7XP2JuA01rUUGSFSWZNkAieEde5kilEdLgMNRdhUsJU3iS1JTzMZO9P0vqWi6ffkh1+ZjJtetIMWTShc7R1RXBWoIP1JsNTdsgxiXrPlFeNojJDLoVHk9ZRUQeaK1DGE3XOowZ01cNtXWD718u8cGjfIvAcXb/ATIGJtpQLkcUB4ecHhu6n5Oq98HjdPZ3/kfdH8pcbIGflR36qSfPHzdX9HN6vP3J4/l5LgrCfsHi9pL41HJhA9YnEmOmO3cQylNdPKLIFeOdA4qFp3c1Cc+mC0Q5wntJEILF4S7laEKMCb28RlwIto8+wTQNmncptEHu30SLCeyOSGiC1OzcSrxlK773nT9AuYbpZMrszh6xuyD6GpPlhPNj4nyXYv51LB+D/wm0jpH2yFSjbaD5+AeEsxUiSjqh2IxH5Bo++d4x3gm8cNggiDJH/NxOHy/wAi/wAi/wAv8Cn63wKzJ+9w++z/XliIIR2/WWPFtg/SD6UGUGEWTw2K7DlCOcswTRsbMzR6gRPqrB8FeUtKGDPoHMCdU5YbOmi5eYcowUGt+1dJ1lu22p1x0f/+SSfVOw3M1okcj1ilA3qFJQy1OSVQQ0OmWUsynd2Ql11yHznG7TMl5MyGczpJIYM+GjD9YcnXa8uRPZyRI6jOk3K9bVH8Mc8Y+BFzyXnxPGkemXXsaEFcf/8B9guxbNDn2V6O0JMmyR+Q7Xrs+J8f/N3n8925Zl95nYN91y2x5//c17b3pblWXhioQlSNE0QUgdlDoUUiik1/4HpAg9tjokRUiK6H7QgxRSK0I03Wx2N7tAEpYsFIBC2fTuenv8tstNp4e1MwGCFFGlOlkFofYXUe5UHrPmXmvNMcf4jd8QVLMJt/wDpMlxQmKrCpVpTJGwd3VAqhQXzp2nSGqmuQDvsNWc5uYb9LGQDAh1j3RjjyAh5gnD8YC9C+d49/ZdbugCc/9Deqnq5o1SkhU9sJF68oRk8yKEgjq8R+AmuYro4MhDQm845Pigwu5HHp4e49oJ9dRR0Rkua52iZMBr8+Ne9TVr1qxZ85eQHyjwM0rw4rUxj2aaE7HNZdfQNhYlNdJ5XGsRMiJ1gqsila/IiyEySOanJa1f0DgYbe+hjMU3Ft+UtE3FxvYFZkf3QWkmRy1J3gcSqpMD3HzB/u1DjpYNi0RQxkg/i2ht0OMN2thiywXSW4pM4oNlcTpBpgkogy9LfAxk53fYHI4IpeN3/um/4vD+PjsDx6U9SbpRUC5rrIjkRfIpLfeaHxQBSGFoNwX5K6+z/d5t/N0PsO0+CVMWh5NOX3quYS9cpL9xgUtPXeTg7n3u3j1FSE0MCarYovGSk8NThr2EffEe588VxKzG6By9s8WT790i3L1Jb+8K2VZGmB9DqhBGMt49z3MxMFmUvP3gGKUlcThguHUZlWcsl0eIxRQxkbSTIfm1F0kHXyIbDYC3SbMNlNbAgv5A44Mgz4Y8fjzBKqiiQMi0u2jXMLp4/ce46mvWrFmz5i8rP1DgJ4Bf/aVX+Edf/ZDk8uc4eusxydCiQooMgumyoq/7OEs35st7Tk8OcG1LAHSSMhhvUs7mNPYUJROkVKhsk+nJIeV8TtRjWrdkNJrTVg5rA7PFnNPZguLKHvXCsbAOW9cYXROrClGkFONNbNUSegVhPiPvZVTLhqZakOQJ4uIm1197ibhouPftb3KxX7HzdNqN2rpQUJy7TLA59t4dsrC20vgLQwREREUgTbn2cz/L3uUdTg738U1ECsPpoccoxcmjDzndf8Swv83Vq2OcyEhHr1Aul3gHUlloH1OXJUUqUNKwMUp4+GCf6eMFPd2jaSN6XiGSKWmRIeUQjMLnOf2NIa8+c43DWckb9w/4zHlHnmaUsUdsatK8D63DHU9o5U3kcIOt8RV4dczuq1/iwXe+zuyjdyEGJI5qOWO8pRlkGZNbc3xoaYDBYJvt68/+mBd+zZo1a9b8ZeQHCvyq2vJ4ojneP+apZ68yEzsMqwOKItK6gC8lQUiQC6RskEqwXNQI7dna3gUhmRwfdR56IRKQJHmfKD3trGR2vCQdNAwHWzy+f0DTlOT5DqcHYGtHFAm7L13j8ft3kfMFYy9R0aFRCBYo4Wm8pQ2R5WxGEixeBhoTeeHzL1Foi68WXPncdWxdMzk4IF9OyHd3SPIhR7efIOMjBGdT6l1zBjjw3iMBn0DckeTLgDw4xPpI46YUvR7z4wntoibIA9qdU7a3NzmZW5TqIdO263a2U+pFw8HjA2bHmqacs7fbZ3J0iGgSMhVpHj7BH5/C7BB59QZBdbIBYSMi32K8NefVaxf57Tdv8uaDKTI6dna3Mf0dtEzwVYWzDZonuOkxerzL5tYeIpFsXr1G+egm/V6krBy9HPJRxqRRNB9N8EhUkrPz9Ms8Pjr9ca/8mjVr1qz5S8gPNrItel54aoD5+7/AP/+NN7kx3KMKpxwfW1LdYAwwb8mKBK1TsjRBoghe8uTRKVme47zFuSUQCVJS+yVa9qhtQPZzrIfH959A0yATz/TwHmXlWfiI6zuGu5v0hxtMDk85un2POJuzIVq0aDEbCflOwunNI+omosYpl56/zo0vvICoFmTDDLG1QwwKe3JCvp2ihCFRGa5y7B/u099JUWr56az2mh+YaD2cCOhHiBIRxlib0lMG5w4pMs3Wtdd5+9vf4PD+Pr2NHoukwdWH9DPD8cG3mJ1OGYx3McYzrU+oly2hgeXsiP07++zsbJD2x1RLQTxZUOwNiLZPGnLkcJs2tqgkgySBQ83Vi3u8VLZ8473btPuGz6Qtl0YpQRoWsyUqTZjPLUkead0RonVYZdl67nXK5WMWD75F6AckhsfTmrc+nFIGg1YJSdbj3Y++x3T+Q87qXbNmzZo1a/4d/ECBX69X8NTFc6RFy+FnZrz53Yrc5riwT5GnFCYSSotvB6AcR3aCkQqlFLWtQUa0kiAgSRJ0miKAVgU8kd7mNtEk+GzC9MFjTDSMr1zg4M33kIXgwpUruNmUNC/obxeUcYem2aBqG6KW9K5d4taDJyRXr3Fhb5utS7tcvnyFlJJscwtUJGJwriJNJdYmqPEejobjW0e0k5KezvHubLp615wBziGelHC+R9QRREE+fg43OmTAnPLYk/SG3HjxVd6cfI2yloxlQZJrVCLJ85SDeyfEas5wa8D5zW0GARbzitoLZBwQ5TZeJlSmJulLNl64jBTnqU4bst4E9kYEIxG2wVy+RDZZ8LrQnCwrvnvvCHf3CYTI9iAnSRKiNHghsDEl0zlaZ0SZ43xk5/kvcXp0wOHju9y9W3HcBJpQ4HGoYFlMDhAiAmud6Zo1a9asOXt+YKdipQVCRr78uevsHzzkwf41Lp2cgq2IuUYnguZ4gtSKvJdjfUPTBqKMED0EgU4TnG+RDmwFOk9QeY9iuEVVtWTjHVqr+eYfv0N6YHhwKLh2bsxGs6QKlul0QtAJw3N7pMMdnA+0ZcU8wvDZ8+ye26SnA+dHPaQChKH10FQlvp6hdaRZLjDSEGTJk3stt9/8iH6YUkVNXHuo/YVBBE88OkFmCWLXEEcJIblC7j5H+e1Tju6/i61+j9F4m6Q35O6dJ9Tec+3GNsORBNcw7mUslxpQjDd7FFnDuN7Eu0hZnjCtLQMhqY/3u9GC9+/xTGhQiynt6UcUW79Mmw3x3sFoi1iM2IiRrzzdcDivuTmZ4m8/4sXzIy7ubJEKR9obEETXxJGaHJNvsjw6pHfhKS599lc4XX4bMbvP/OEtZGxx3hN1JISAEpr1HbhmzZo1az4NfrDAL0a8d8goMUbyqz//Ov/wH8847D9DnL6H1NACeZFQpCm+LhEyoJIEqTWJyWnqmugjTd0QIqgILRPGvT7LyYKmqmmt52D/kCOX4vRV3pouueP7LL55l1xbfv6vfZann30OqXOU0jTWofSCXMDWxoCdwtAzAaksKQ6tBHXjqJslRId1niRL2X+wj50vuPXdu8g4ZXhBozCEdcLvLwzReeTplJD1COkI0xcQJcnoaQaX55j3nnB055i7/ohlDSrJmc4DdaM4eP8Jy6OKJPZRJsWVFU0FURWkgwHDYZ+796a42nI6aWmtYfvcNWqh2K8a9uZzYm2xx6eI7V1UopECooL67l3Gu0N+7pmrzL/3PndqT71fYZlzYZTTREhMg7UJFYJRPqRua0yWkw3HPP3FLxP7u7x37zbC1/gQsN6hpCEq8HF9E65Zs2bNmrPnB8z4Cby1KK1wM4vQff7Of/BX+a/+63+FdQ7V3mQzUbTLlsR5SANCCELrKbKC1jpikEQHWuV4qwhCIbzn5P5DktEAn2hmx0uW84q7hwuMkaC3ibGhf+kZQlQ8Pgp85uUCNShIVYJzjlNXMe4JxgPFoJeSxAWuXFDahsPJKUsbGG4O2BykuMpz78N7zI5PuPfePqN8Rj7IqEqFnS3ObnLHmh+a6D1hdoTIU4TWeN9D9xXNAMzTL/OSVbzxz/8J+w8e0ziQSuGB6cmcagFCbzLa6CNDSb9I6fV7nC4HuHZBmvX5qS98ng/eu80H92aEZETlJM7D6PM/Tf3db5EevIGcnxKcBxkIUiOFJH/pRULV8OxwFy8U37p9j3f3T5nePeSZnREXBhmjXsYwZqRpjpjPiSEgHj5gqGBnb5OqukREUrUNRmmsa9EaVBAIqX7cS79mzZo1a/4S8gMGfoG8V9CKlqoVULcUgwF/+xc/wz/4J3dw/c8wn7/NdqyoW0grgVIghCZSoRKF955EJbRNgwsOv3Sk/QxTKEQdmJ0Glk3K770/5e6hRR9/nd7GLnNv0e2CaCuufPGnUalBxQSkxgsY5CkXh4LBIJD6lnJ6wmx+wvHcEUSORlI9mnPgDti/dZ/Jo2OibwkLS4skRks5a0hTQVxnW/7iEANhOUOdaHzrEP4cUY3RY4mMOePnXuaFesnsn/0TWt8SfcSj0GpIf9xD68BwnFLIgK0rZvMFUaW0tiQ8fgIPW/bKJXeiJWYFPjbsbezy5M7bqPKQrdohb75NcmmH2B/gpISiT+z10dkQozJezROsm3H3eMIkRL67P+HxPOOpjYLd1nNusAdB0B+d4+jgMY0P5DuWxbykdQ0xggseHxy+BUFEn9G86DVr1qxZs+ZP8wPtLmXlWRzNybNAnqWUKqKjYvP8Jr/2d36F//K/+zYfTi/wtH7C1cTSmIpEQq8nWcyXCClQStI2FmJEGlA6EoOgnSum9YyPnngeiTH354rXXn+FOzcf8sKrL9DWgYv6Abk4YXskiDHgbY2QhsxBmsaurLsIHB7e5uh4n0VtCSWE2SM2NgUuh2ZZkmlHklS0jWBQRMoqwTaOJHWMtnOQ5ae13mt+QJy1TJ88YhACWW8DN5shIrSqwOQ5bBlGP/VZbkyOufuNf0X0jjomkAzItCYxDbWtOJlM6BUSoQTDcR8x3KQ+uAsR+sMenz+XYI3E1jXFYMbh/gx6KXE5pr15h438d8hefpXQ30Sp7n7xwSODQxYJFy6c49z9KfODQxyCh2XNcVmyOSm57DOutorROcHpyYRq/4BHh3/ER/ce0NQLJKuxzhFAEhFY537MK79mzZo1a/4y8gMFfm66z9f/r/97ZmXKK3/j79B79hla5zGqYOPK0/z9//AS/7f/4h/w5qOMj8pDrmeBc7ln4TwyeIpck+USIX1nqdICXoJ37B9XvLNv2XrlK9x98yG0NT/zs7/Ed7/1n/Pg5j1+8Vd/mdN3pzz/XJ9iMIK2IaSWUBWc3noDYyT1eIBsJjRHdwmTBfZ0Qj919C70IDe4VqBlwWx+yGATHCBUwfJ2ybKMZCOFSjVhraz/C4P3DrtcEssF7ekxInYdr+rAEC8XiF4gyoQbX/xp/OSQ+f49Fq3AR0fUiqS/h5aOVAeSMKF2kiwXIMYkpk97+5vo9pR+T+FCCxKCHXNh7xr3700JT28Rmm32bz3k/PA+8vkdhO6TREtYTLHLkuWiYn9as2haolBEBERPiaReNhx9+BEPTiZkd2/jWsfpdMmsnINvAEEUEhEFggQhDTEa4g/ed7VmzZo1a9b8uYgfpKwphDgE7n56f85fGK6u5+z+xeAn6J77d7G+D9esWbNmzZnyAwV+a9asWbNmzZo1a/7/l3X/6po1a9asWbNmzU8I68BvzZo1a9asWbPmJ4R14LdmzZo1a9asWfMTwjrwW7NmzZo1a9as+QlhHfitWbNmzZo1a9b8hLAO/NasWbNmzZo1a35CWAd+a9asWbNmzZo1PyGsA781a9asWbNmzZqfENaB35o1a9asWbNmzU8I68BvzZo1a9asWbPmJ4R14LdmzZo1a9asWfMTwjrwW7NmzZo1a9as+QlBn9UPUkrFNE0JIRC8J4TQ/QKtMSbBe0eMEYTAOYcAhPiT7xdEBrlCSYHQCmMS8qKHUpoYAQFEcN4TQ8R7h3MN3jna1tHaSIiRELufFmLsfh9i9a3dL+u+FnHOEUIQfMokSRq1MTR1RYxx9bfEP/NPrS7u3/iK6L6MQEqFMQatuv9ECEKICARCCrx3NG2DbS0x+H/r5wsh+DjGF0QiESHE6neIf+NzCGH1/wkwSpIaUBKkXP3zUqBNhhCBGFz3swUIJAiFEAqpJCA4Op4yX1RnusYbwyxe3Bt0ayZE93tX10SEtvVUZcVw3EcIgVQKhIIIYXU/xE/uje5r2mQoqRBSrq5dfvKZhBA++dnddXa/V4ruHiOG7l+ffIJ/+qaW/8bafvw7Y4QYAzFE4up7QfDOezePYow7Z7lefx55XsTReMh4PCDL9Oov+fiP/vg+En/qjvqTr3UEiAIQNI1jOiup6wbnBTF4hBAMhymbGwOk/Pj7BBAIeLyPuNC9L7rljPjV5xMJSKGQQiJl91coZUi0ASIPHjzh5Hj6qTzDvUxEgcD6P/maWl1+lueMN0fEGGmaFuieD+8DWqvuOf/kPop4HxBCoJVCSkkkUlcVy9miexZj7O4TKUmKHKk1xNithbWIj5+zGAgxYn3AeUGIH79LBINBjzxLV5+K4E9/hB+/D2KMBO/xIRBCQEqBkvKTd83H3xsJeN99BvNlTVm3P9QaJyqLeZojpMcHS2I0/V6G1pLT6Zx+kZIXCcFbYrBE74kEhPhT78vYvbm6fUPigmRZS7z3bAwLsqKHkB/fv/8mf/aO/fhrH/9v5xynp1PyPKPXK1YbU4QYaZs53i7wLqCNQCjRfTar75WiewcL8Sc/8P2bzZk9x+PxKG5ubuJxhOj/1Dule766d273jpdCEWJArN7yQkjkn/p8YwwoNInW3edLJIr4yXvv43ui+1mSGDxHTx5j65q86LG9vUMUgiRLqesF0/kxPjha65BSkSaGpmkQQpOmmtZanHVIoSiKjOAjtW0JIeCdYNBLEHK1HyHxXuB9xf7j6kf+HpRaRGUACVJCPy+QSlI1LUmiEEi0kaSFIRKYTyrKZUueJxRFSiDQ1t36SQVCdp9JjFAvLG1rUUl3r6A9yNjdOxGiM7TLQJr3aOsaHxz9vI+PgaosQXTPpKvDma3LmQV+xiTs7pwn0SAIKG3IewNuXLvBc88+zW/+5m8zHqd87433QUXS1GCbhkQp8jwjDQ0/9XRKPhpy7ZWXuPrUc1w4fxGhBFJqQpQsFwtmsxOWy5bFYsrJwfss50seP3jCg0dzpmXLolE0PsFGTetXmwkQg8IHT/AB5xyT6fFZXfq/l/5gxE/9zC/S1nM++vBdnjy+v9oAVy/iQPdwikgXF0u8d0ilefrpp3n++RfYGO/Sth7nHYNen3JZ0bQOZRJq57sXTwy01ZLF4pT333+Ho8MnAKuXAquNSOC9R6nuhWBMglLdQ58kCSFE2taCjDy1u83FcYtWLUVPkScZqUyRmWZ85RlGgwHN7DYEi1YCoRRCDdDpmLy/gZaa/+1/+v868/W8eG7Ef/Wf/0cINFIpIhIlJCFYBDA7XdC0JaO9XYzSIFPy0Q5t27Ccl/ggEVLRti1aChbLJRt7z7F17hJZ1kOZBKkM3nVBtLctbbPEtiXEiJAGozVSdhtz93AHvPPdoUMIRIzE0G2eQSlMmlH0NroN21mca7FtQ/ChC0hDwHvPi5/723fPfMH+HHZ3t/lnX/1/cP7iACEtIihESEA4EIG42tG650ggRBfsiqi6CFa0EDVdeOyorOebf/yQ/83/+h/w5GCfxBj+yi+9zN//+7/CjSsjxgNBlIYn83325484ntYsa4uMDr+M5EmONpEmWk4WFbdvH6NEzvVre5zbztjuDbl+7hqJ0vyNv/6/+NTWZXMg+PwzKe/fywgyoJ0jaVsCimI84n/2v/ofo5Xk/Vu3SXWCMZL5omQ8GlHXNVob0iThdDanbixPX7vMxsaIGDxtU/OHv/s1PnzjHaSQhNiiAFUU7L70DIPxiOGoz+m9Ryyf3CNLAjE6pjOw0VPWLcezwLyK2OjZ2d3iV3/5Z9gaj1bPukKKLlj/OMj7OJCZTCdMZ3OUVIzHfVJjuoOdUCihQHTBRfSR+XLO//m/+K0fei0zU/DaU6+g9CEbGw3XrhU8c8PQH1b4oOilGhkUxVB/8i7DS0IL9alDpxIXQElPlJKjY8HXvmVYNg2/8AXJ1mYgSQZk/Qv0Rs8z3nqaJB0RkUSx+nnI7vgiRHdOiQEQNOWS3/jqb7MoLX/37/4yRa8g0h3GgoeHt/45j27+PtG1DHcN2giiikjRBftpbjBGIrXoTgYh8nO/9uGZPcd7ezv8H/7T/x13jp/g0mPS3gbBe9J0iPc1EtP93igxUoOIJLIgSEtjA1onaCPJ8oIYWoYMubJ9gX6/hw+R2w+ekG8WJFnKfHJKkucUWYqQAmcb/uC/+Qd8/R//vxGh4O/8+v+Q5z73Opeev877t36b3/7Gv2BRz3nw8AiTpjz91EVu3rqH8j2uP3+OhwePObp/SpL2eP21p8Gn2Npy595D7j2o+MpXrqOSGhcsSiTMp5qjk3f4v/wnb/3I34PSwPZzCaZwKCRbgw1+7m89h84DG5s7uMZzethy8ZldpLJMp6f8d//PP+Jv/vpfoRgVqKzhvTduMnsQGO0mmEKhtEAKzcNbx7zz4H2SXJOQYfaW5KNIjuH4vRFfeOnv8f4b3yHfOE9x8QGPn9zj9fNf4b/9J7/Pi19+jSeP7xEkfOMf3DuzdTnDjJ+kyBPSNKFpGoIXjEfbqETz1a/+c4yWeGvI0wSluyBEqECaGIyIjHJQqeGF177A1WefZXN7hyI3+BARJLStJXiHt44Ya5RskVHgrcWkGSZfoiqPtiXBKYIXyBBpvML6iA2y24yjI/j4SUby00ZKSd7r4Z3j+Rc+Q5ENuXXnnW5jjQngu4xmlAg8EY/SGV/88s9y/fp1pBDdizhYlDDUdQtSILWgcQ3BR1RUhOAByWi8y2deH3F0+Jg33/wWwbWAWAWY3YlEa02WZQghCd4jhMQ6T/SB4CObg4JB3qIVmLTAu4jXgQZPJgq8c5hsm6K/iQwVs+PbuHqClFOEMHjfR+t0lWk8W4SQZMUWNgTyLKGta2xT4m1N9BaTWFQi8c0c0gyjcmxd0dqmu398JESJsw4vIpJAUfQxJkNKSXCO6CwxOgiBtppimwopAkIafGhxtiExCqUSYowfbykIAc63+ODw0SNCxLURV02wTUXW20ZLTVQOkeSEGIghYK3Fenfma/X9sLHR59KlHkFaRBQIobtnUwq6hLggikAUH2fPV6cWEYgCQuwyEAKIUZOnGZfOb7GzWTBfKvJBiikybt59grMNN57OIfM8nO0zaxccVyVl5QmNZXZQMhz02NrKUFrQG/dx6pi33/6I6aTmMy9ex4+WnB87kn76b2V2zhIpI/00kqjunlM+EJGkmWYyOeXmR3e4fu0Kic5QRuODh6hWFYnQVSroYmMpBEqJTzIvp0enPHp4iIsCJESREnOFMoa6rOmP+9hlhZsfkxjPovaUlUPKhHntqBqJUZadgSMKhXWO6EGEiJAg6aofArpsYoxI2WV0CJHRoM9o0CfPugoNMaJkV235k0w3JJkh0eqHXssQPSezx3zx1YrrTztGW3N6o5asAK1zFBEZJVIEdGKQrDIhmSRLDULAZL8mG6RMJpZvvtHiguVXviTZ2YvoRCGpcPYj9u+9z9GDIcXgMuOdVxiMriFMCkRi7DLNcZUTa+sl//prf8jjx0f8vV//H1AUOR8n94mB4C1EiZQKFwPWOpTWaCmJ0ZKmORCQ0qyycIEo/2w154fD2obx5hbDGtLxFlYGtDaEIDByjAseqTUhdPtIojXWB1KV0MYZKI9SGiE1edZDW4VJc7IsxSQJ15XidFaB92gj8c5RNY4YQQm49tKL/PF/02e5XPLev/4qOk0Z725SV7NuT1KRoMG1Ld4GpPBU5Rxru0OOjRHhLI2DXAV0UTLYVMSHLeBp/QIfJSKkxBBoquWZrt/3iwBuXLnBw8OPaGsYXh9w7tJ50oGkrTwmTZidLrn9W++jd0fsPl/w87/0KoOtgv3bMzYvZyS5gXTJ6YFn81LeJQG0ZGOnT3JgiNHSL4cMsoLTBxPe/T2ol3NGvMPLf/syRx8dcffxIa9e+TLf+PZ3uP7Tl/m5/8nP8I0/POTeW9Mzvd4zC/wAijynrCuW1ZLN4SbDQc7pyQlJplhOF0xnJ2RFglZduVfpjBg9RsHWwPDMy5/n+c98nl6WUvR7aKVp2pbgA3W9wPuKKGqsK3G2JoqAD45ECgZ5yly3SB3BWmSMKA/SObyLtF7iXeg22xg+Kbt92jjbcvjkEXVVkxjN7t4eTbvkwYObSOG7TB+xy15FSZb1eeHFV7hw4SLOOVJj8M5RFDlCaLz3zBczpNa0VU2M4KLFW0vTtAyHI5yLbG7u8tKLn+HmzfcolwugC0KTJOnKxbAqM4bVJiFQRpElmnEuUb6kqSV5f4ckMVi3QNKVVgiWGCNpb4wUPXq+YnnqkKJGUCFW5YZPAyEEtq0QRGYnh8TQhV2+rQluiVAKlMEHR3QBIZpujVzEOUtr/SqA80jRlRlb52nqCu8sSmskEXDYZoGzcz4udUr8J5uplIauzLkqG0lBDB+XpyR4iwtdqSN6i62XLKaHmHREb7wLUiKjXGUguhLJj4OuhCoQPkFKBSiQq/L2nypthxgIwVM3LXXbdFkGCVJoUpOgpUIAri1Blpw716MYX+aVz13ntdeewrvIrF1y82iOzhwBS24URao4PDrm9GhOW2UcLSr2Z4btnQ36m4oLVzZ5fPeIj966y8mdJXvbGXujLQY31L9d0zvLdRGwuyW5UI6IPjC/V1GFLsurgudb33yDZ5+/zrPXrzJfLPDBkRpFFJ00RSrxibRDSYXUGiGgrGree+9dTiYnOCISkCp2BVYbEMFRnUxwriS2CyYLy8lkybRyGK1REkLQDIZqFSj12Ni+QJ5pXAiIKFaHykgIDucCLoRuE25birzH9taYvD8AInW5IPhIkqSIGP9EkhMCHolSP3zgJ1VgZ6PhyhXFaLOlP4ikmcboBCUVRmmiC9SzgEkFMkbykUFLDX2PiIrhbg+vBO98tzvY/a0v5ySxJUwDvuchNyip6A8kkRLr3ubRvXcRYYv+8Dk2z79I1ttFogBBuSz517/3B3x46yG/9nf+Gts7G6u/NhKDIwSP801XhMwkMqrVITygtUAKg1YCnWqE6P7GGFc3zhkSYmBrexN0Ti0dTluEkLSuQUqD9w0hRqRU+BCQQiJ8g5CCPPZwzpGkPbQELROU1iitkFLQ1hXDXk5wgXc/+ohWlhQbIxI0WmnStMf5p25QXDxP9cEdpk/2eeed73HpmasE7wjeI0WCTDVtWVO2FTGBsm6oywYnPE2EUAtElLRxyswucThEhIDDBd/txwocjnJZnen6fb8IAdPlEYaCmhKVCg4fzBnspBAiaW6IItK/kmEasGVgdH5IOV9SN0tOnzhs4xDKs3xUMT6fYYVH6U6etTUcMZueME4Fxw8q3v/9SFtGer0+lTviH/2fvs21a1cIuaQ3KNi+lqDzhu/+/jd453fntLY90+s9s8AveE+IFtdaRAQbGk5OjxkON5hMFkwmR+SpYTQaIJVCyi4QsY0DBOcuXeL51z5Pv8jIez2KvMB7T4wN1joELbZdImPsMjfRdroZrYgikCaKxGikdnjhEaQEWtoIjY8rfQSwOv0K/6MJ/Lz3TE4OmE7njDc3UVKys3uFqq45nTxBCUUIASEjG6M9vvDFn6M/GmLrkhADNkb6vT4xRFrbUNUN1jmWszk+dOViY7qMqHeBg4MDXHBoowHNC8+/wnvvvUVVzcmy7JMXuVzpOlrvUFJ22j0tKIygZyx5ItFKU5U1Sit6gw2id0Q8tqkol6ckeUHRyxlvX6dIN8Avu9JK0lttaZ8CK/2hbRuCCwRfQxQ4t+zCswjRh+6zlh5hAt61WNuVWb1zBBwxCgJQ1zXVcooATJaRZwVaQXBLbDPvfow2ED1doBc6falXKKVwzrKKkbryMAKiQcmMECUhenzb4n2NUpKmmtIuDhiffxape0ThsWWLa8/2wf5+iVEgSFZ6E7kKplZayFVgPF0sOTw5ZraY0jRLhJTINGAShVE5qdQol1LOIo2fIvuBL/3qM7RuwrPP7SDjlJOTFi8y5o1gPpliEkGWRZJcsnduSFU23PnoGBcMtS/J0yOeffYqvSTl2Ssv8Ltvfovbjw7Y23ia8XBE3Vaf6uEtAuM+3Li6S1t7bj/Yp/KRRPe4+vwl3vnoA77xh9/jK7/wRfZ6WzgfGI3GgKeuG5raEkJX0ViULd6Bc5a7N2/x3jsf0dYRZTwRg2w9cenJNzJGpoZygWsdi2rBrKppXHc4rJynSLvMnA2Kczt7nL9whTzLsfWS0DbIlYxDStBSkmpFP0np9XqIoNBakQ76qCSlmk1RUZH1U2SqwQlkWxJ8wHuBCvJMsvZSSs5tbZLkE9LCkGeQJOCb7vMXMiITST7KqE8cixPLjtAQLfnYgBKoFL73Rst0CX/tVzYZFg5XSVwT8HVDWDTYVHeSEy2QaNJE4NyE6eQPOD39FnlxifHOK/gw5rd++xvUTeDXf+2X2T23/clt76PDOY93bfceiQXRKaSORKkQGrTSJHmX/bZVS5KknYZOsNJ1nh1p0lVOiiynLEsGg4LKVgxMig2eJO9RtzUAMQQCkGcZVdsw6OVM56copdHGoIxG0GV+kyShsi1KSTY3BuxubvD4yCOjIs36aAmJTjGJ5uozzxE+uslRK7n17ke88oUHyMJiQyTQ3ZPTSrKYVyTa0MYlJ5M5InEEAkWW0csjJ8sFIXqEFkThaF0DRIRWEBTOLXHNj6fygYDFfMFr5y7yrXv3efzwhGvbxxSzMZO+Z2d3QL1sUFpx8GCCXhh2bwyIQVJWC44fz0g2XCf5bjsZTIgRFwNBeDZGY9rlguPTBR++UfHSa68jhebiuWt8/Q9/m9AIJicTnntlh3v227z85Vd48zsfcvtr9zi5u6C/3T/Tyz27jF+M1PMFWVKglcJZy/6TB0yOjwiuIUkVvV7eRfcOtNQ4V3P98kV+4Re+wDPXLrC9tYExhqwoVoFDpK5bvHUrQajFeU+iNbV3QJdqVjICgSQRGNWdbJahxUbwqFUTQAAR0VoBEet+NKVe5x1VVeGDo2lrNja2aZrAeOs8s8UJwTmEVPT6Y37+F/8aSdJnWS66TFHohMTeO4IPWGcpywV10+J8xIeIMYblckld1yghadsGRKRyDYnqcldPP/00t29/RFfCiUjZNS+0bVcGlggQikRreqkgSRzCdE05eZ4h8WilMXmKtSXettTLI5qiwEiPTA2mVyCjRqmUpDdisZjxZxtWzoIYIz7arvxI97L10ROVIcKqGQNUp5qisQ5weL/6Xh+6spzoMlTRB2aTA6xrGIQ+qerKwbYuUVIQEODdSjvVifClVitRvCRJ8q48jENGQfAQosX7FmdbtBIECaFpiT4iVU5bnnJ8/y2Ge8+ATmjbBSH++F54UUYk8ZOmhE5Ir3Gx5d7+I969dZfj2RQbKoYbGZsbGwjZ4mKkto5566lOAoupJxkkJHnEjyzzo8CTyRwpG2SSIpgjhGb/6IiFbRluKTY3h7TKMd7Z4PDwLpNjjykAD4d3l9hFQ1tBs5RcubjJ5z7/PLs7Y2JsCOHTC/y8B6UbRvkpZdjuGnlE5NqN65w7t8v7t27x+7/zdaQS/OxXPodWmhAcxmg2NjaQSuFsWBVOPYtFyXvvvs83vvZHzCddxloLifJLhItsXxjz1DNbTBYt0/mStAjIfIlpAgGFFIqumCEYjjI2tka8/PKLbI9GnRjfaITqtMIgu6A4dtlnITVCGXSvQAhFWB3MYpKh0x5ZMUIZja2XNHWFjCCiwAd7JoGfEGBSxbLymEkk093TWfQUSkiE6zKkUkf6u5Jiu0ArjfxY/xoFb7/d8uGHDb/4S2M2BhDRmJ5C54GmgmYmWJ7ULKuAQ5BkCm00Ek+IAR9LFpMZB48+wIaMjd4eP/XX/waj8faqJN9Vj5xtca0lBAdEpMpoa0nS6/4+qQAsQnaSJWMyhFjJXYM881fex41meZHRTmf0REbrHFmW0NgWlRSkSbfPIAONs6SmwHqHlgqlu/dclmQYoSjSlCxNkUqh9J9Ufi6c26LXK5gFS5EXiBhRNmKylMtP3eADLB7F6emE9999l2devkEIUHvf7alSc7hYsD3UqCRyeLBgaxfSNOG1z1ynCguESpDRkiYCoVqadonUkEmDQNIsS2yTnO0Cfr/rjGBvdAXT6/NLrxX8zju3OSjnXN7bw80XlP2aurFIH7j+xXPc/d5t0n6P+UlDVZVUJy0h12ij+cLfvUbVlBydzroMuoBeUXD90vOEdMBB+QH/0//5f8y3v/vb/MG//F0WiwOc9BTZHlJEjp4cM337Fu+/eR8zjGQjw7OfG/D+vzy76z07jZ/W9Ho9vIjooIlRMp/NUVFipKAYDFFaoZTGO49rW5LE8OLLzzE7PiJ7/hrSgDIapTO0Mti2RakUZ1tiCJ0Gy1uiW4KbI0ONjA6iRyDIckOy9F3HpRB4IZBGoqPH2ubj3rZVd/FZXfm/nxgCi0VFFLBYLMiyHKLEtZ7RcMzk5JjBcINf+Wt/m9Fom7ouESFSlhVKChKjuixVjGjdlSih0+R47yjLqgvkhMf6Fu9bkjQjMSkidKXkpBhw/uJVHt6/88mLJEYwJu0yWTFitMIoQaE9aWJIsgJjZCdeTkHJQJbnDDd6VPMSW51SzwoybVBKdlogIYjR07QNPrhPI+4jxtjpqKIgCgWkCNESowJ819UsI05A0d/Ee6hXJXHvLSHS3YPh46YKh3MVwWeImODsrJMD+IBQ2UoQDkJoOjWfRyvVleZWQaaUghi6cp7ztss0RoWWCa5tsc7igySEBuVL8C3OWxYHN0nHlzFJeiYltf8fVxRNWK1nF/RJBKWreXjwhHc+ustHjx7TOEueRbKxZMIMLSCxBl/XKCxeKRphqZoKsVA83D/h4MkcTEW/l5GljtA29NUGRyc1N58csuf6BN1t8E0wxEYxvT8nHUqMzFm4OYGSF154geefvsrLz+3wU68/j9ESYoo84+zKv700EWUedZqqVQe5CzV3791EiEhVN/zWb/wu9+8+5K/84pe5dOkcMsauczXtSufONcxOJ7z1xtt894+/x3y2JE0Vqm1I2u7wef7aNleujYGaGJZsbTQ0vqaeR5aLrtt8MJDY1lH0R7z++dfZ2RhjjMLFSHB+lY3vutS7bvGuEzBEEFKSZhmDfIxKezgX8L7Fe4WzNYuq6SoptgbbkkjVlYs/flf+kEgpSJOSECTzhcfgGWYSXzmEDSihKDYMaV8gdESoSMQThCJEwVtvl/zRH5f8wlc2GRSBZePwNtBah23AtQ3BBtoQCUSChenSkfcdeU+jtSJRgBbgBM4tyEzDrbf+EReufpnB1nWcdbRt22mmhUIZjTGKLB/jvSYE8YnbgZQSEUFGuepP9wipOqeDeLYJBaUMiTE4F2iqmmUFrilplSGETnqSmnTleGE/aeTLVU4TGoxOiEikUKQyIVU5eWa6zuSVxti7ltQkbI0T7HxOprpALtiS0LScv3ieE5mjnMNEx/tvvMvOhfPkps+sfQQ+YBLJYl4znTakJjKdWvRp5IVnniXLI23VYy+5xuHyAbWa4ZGUdYkuFEnsEUKgKRtIzlR99n0TAzw+eMJe71lmmeI/+o+/xPf+8AHTfM7mjmZ6WnYNlqUlakUMfewSSFtU1KRigGtPUVpCAlvjTWaLBa3tSvP5IOPSy9u88Vv3efal6zz/3Kv8y9/7v1NO9tF5tye9/NyrHOm3SE+e485bt+iPU1773C+S9Y/Z/uKc//Y/ef/MrvfMVjmGTjfng4PQCY1ToymKjOjdJ11QiIjzDT5Y5rOKj95/g7/3N3+ZLM/Is5SiN0RKiTam+zSEw9olzlY41yBCTb08Itop3pZdNtA7nI84H0BJpAZUilQS7QNBgoud1kYo2bW82x9Nxk8IQW84JEbf2VEQsa6hLqe41pKlGS+98hnyYtBpdKQiLzptxmJ2SvCO+WJBmmb4sGC+XJIXfWKMKC1pWtvZuERPVS5RUlNXFcZ25V5jFNZ68qygl+dUTcPHHiMxdhmJxChSE0iVJUsSjEkxOiFJDSrRpJkiSQ1pZjAmIR0bJtND6uqEqtKYTBFlCoCLNcakaJ12Pe1nTCTifEREQaQL/nzohPPOOcAQhaTX3yGSUFeLlTUFfGwlElcZkeADPkSkiAjpCKHG1g1yZRsSvOy6hnWyyi45nK0JBEySEEXXNRjorDyikKsAs+si9IAHunZfRWL62HpJOTnFZBkiBmRaYIot4o9IevBn6Zo1EqJsiSJA9Cyahndu3uO9Ow955/YtjucztJRsj3voImUpS9AV1SzSLhJ2tzPEMrBYwNTNKY8b7n445/HNU9pmQNHP2NscoxEcHzY8vmd58GDBonaEpWN7PKCcWwZpn+XxXcplIEkLti9ssX1ph42LKc++OuC1F3cYDcDI8Imu8lNcGNJUEH2NGNxh6xJMPozc+ug2SktiVXfNY9bz7psfcPPDm5w/v8OlS7ts724w6I2o64ajoyMe3H3I6ekxvg2Mx2POb2xw+uABLlgu3Bhz7foGTb2krBdsbQYq5ygngXIeyXPBdl9QVZ5F1Dz11FOMBgMikrJqVwfBP+ncRwmM1CgtOm2h6PRaUini/Ai3OAYPLrTgu0DFBU9EoKUiS1K8794nZxXEGK149sZnsOFDrHzCYhkwEqL3iLqlKgPivmS0lZMPNCYXmEFClJa332v4+tfnPHtJU89OuflRxHpL8JBlGSDJjaQYZYx2ElQAs5Lfzo5rbOvJ+waVCJSOK8sTg7Mtvjlh//5XeXhnl3z0NBvbT5EXI5I0Q5nOekcqhUkHhHhMCI4YNVHJT7S+sdvS8NahUtVVac4Q7wPTySlV1XB6eojNCmLsGhqjEEilSZIEH8D6VcOEbUiNYbGcoYTqAkIfSUTG1rCPVp1zhDGGuq6JrqGsWja2tkiEwMTIyeKEBIHxnr29PZLeAL88YkzkeP8BDx8ecem5V9ifHdFUUwgBaWDWtKRKozOHY8Dli5c5OH3AVnaeRFmM0JSuK6eXVUuhe8Q04oJjNg3IQX6m6/f9EmKkbRpefH6b3o2EjWtjRpsbXQOmcdx98xaN8pwezdl/csTeiz2sb6l9zc7FIXf2p2gZaUrH0aOS7XOaop+zPK5QJPR6I0JQnH9uxM7Wq/zOO/8lF5/SbP4Hr/P1b75F1SguP3UOfzxhLhZcfm7MhWs7ZLuwPJ1y543TM73eMwv8pBD0BynLRcQHj0k1g2FGlhcsl1WXOl95CXVdtS3ntnP+9l//Jc5fukCWpvT6myRZ2m3ItqapK2zT0NZLXLMk2AbvPLatcdZCcATXdLYaPhJd51YntSFJE4zSNG2g38vRKjIeD/EB6tby7nvvntWl/3tRSlMUfWxTkWcpPgTKsmY2m+KD5dlnn2W80ZWSXNtSLhZd2byucMEhI1jnCFS0bcuyrJgvK7ROukxfiNi2RWuBUl2pN8bOtkV6hQmGXp4Cnu2tMQ8fPyZE8clnprXq9BwEBpkizTofPmMUSnfWJ0LKrulBCLQyqEQxFNtUVclyftp1Co62kQKEihgied77lDZmhTGDTlwsIQbbdeCFzo8KAll/B6l7BB9W/nDxE39CpUwXKIqA822n7ZMCHSy+jnijiVJCFCihiTJibdNZCvkAQpJkBVKqzmeNroOT2HXVdSXo0GVhokfIzgKm20QVSTYg9FusLWmWc9KxX3lXmk9hrb4fIoiAIEHFriz28PGEf/rP/pB7h1OmzRLnPb60HGQznpcZ7anDhgWTozmu1sgXz0MFD26VLKlZ+oo735twdGtGJDDesuzfbKCNLKaeZW05OlrgSg8Ljz2nCaVkelDhGoH0Bp32uPHcBS6/0MPawG//zhscPFzw0rMVr79wiVGqENH/+Zf3QywLRHqpRCv43JdyJtOG45MWctUF7SKiRecL2rSBh/cec/T4CUWmQUtElHgXEasM8ihNOLcxpK4WRB248dw253dzcHOMrCkKy6KpWCxgsVTIRHFuN6VuK8oSNjc32N7dwtuADy0xdBKMJNEUadZ14SYGow1am658iliJ+Ttpglj5O4UQPrafXGWtFYPhmKYqOdh/RG0DzskzSdonRvHC8y8TxVO8/dE/Q6oJrQ8ooUlzQWYivnScHpac7ENTR0RqeFIbvvP+kvMbLSLzPJlmjMYpvUGPIk/pJZK0SMhSQZKaTrsdIjJ6pFIU45RyUnHyYEbay9AjjTSh62BWiqBA6YCrHtLOpjRJxWD0JfKi+MQ/UCQZebHJorxLWNmDdbIjkNKv/Bs9Kul+fzzjV16MkUVZUlfdPuiDRkjwMXY2PEqRmhRrV04EgAuOQuVdqV5KfFMj2212tgcUmf6kuTGuDr2z2ZS6dYw3N/G2ZTZfYhQ0OIowwcclz//0c+y/bZjf3KdxLR++9wY3nv91Lgyv8mH9BvV8RlQCoQVVE8hky7Vrz1PZGS5GEpNS2ikBRWsVzrfMS9CJxhUe7RX5KKG0P54DMKtEwpFd0h/2mZ3UaJ0yOy4xeUREiZtaykWJbR3ZeIDWBYvHS+qJZuNKQbHZ481v3Gd59JjJYcnGeYmzAU/LnfcfMzzNCC6ynHzI3rV90kLQf7bPK3oDazzDi4LNxZDDk9u8/ve+wsH9h5w+eMD29REP75ztu+4M86qBtiwJ3mFdIM1SjNEEb1Gii6h96OxYDJGNnuKv/txnuPrUZYqiYHNrFyk1MUDwrvNdW1bEzhyNui5pmyXB1QRX4toS3zqi84gAiIbWOUIMtDFBZgltE2mco2zm9Iuc08kM61rq2n8qZch/56qEiHce7zxHR8eoNIUYiFKwvX2O8eYePqzMUmcz6nKJFIK2bWiahvF4zKKsKJIu6yarGqkUVbnsTCWVxlpHCN1Lui4r0jRlUVXkRY4gMLMVvSIly1OyLGVZVijdffRaQCIcg0yRpbLTr2mFUAEhfNdZt8qWKmXQOiXNMvLegJGzHB/tM7fHQGQwGKDQ2KYG8emk7LVOybIB3nUWI9E7mpX3IYlESk3e26BtLFVVA6LTNUWHEKrzNvMO7233PRHaZklqwGNwIpIkGYKu6SbEiE4ShFBd2lgoPAkIAyoiQ1g1d3SZ3BgjbbXENjW2bVbejKZr/vAOoTQyG2CMwSDRpo+IgjTrfSrr9ecTQVrwCRrNsplzeHLIBx8+5PbtU/JeQR08sZZMdE2xeYwcRZp6wWK/YnHs2EwGTCYLHt0uyXoJZdNycndJedzy4L0J6umU6cEhykuaKlBWNa1tqYRmFlKeLEtmT+bcvztBJhpnW5bLEw73Uy49d5Wt8Q7LR5F/+o//gPeevo/+Wy/wuRcv4J39dJcmQKo6Q9btPnz+c5p/8S9r6tIhRCQTgkZAFJGElc8hEINABoH0XZbfu05vNRgNmU5OMbrl5c/vsDUCV09Bebx0LJY1y0oS1YhAw85OJE0i5SKSaMOVK9cxOqMl0NMJ/aKgSHMSkyDNyjhWdIa93QG7+5vCqpJgTMr21nmMSZFSdR6eSiB1d4iJwXJy2JIVOcZrmvpsNH7aKPIixQXJ4eEWSXbC3pbAB5gtBSpK8JHadTKdUmi+907kcFIzSiwZkYQe5y8UbGwVDEYpMgbmR5b+QKGNQgTRBboSou90jdoIeltd4Ht8Z0Yzbxmcz1BJRCmFTCLI0HkH2iWL6TdZzB5x+Zmfp79xrbOVkZH+aIvZQnfNHgFia2npGhQRXTa1M1T+U3ZHZ4QQYJsG29QU/UhiDAiFUYai6NEvehitMW1nVC+iI9gan/bwtoUYcXWL7Cl6RbKSpXiic121yDakeU6kJoRA3XjMMEPFzpHApQlVuU92FcI8YX4XfBuZHh1w6+YHPP/MS5wuDijnJdViiZYRlUQa1eP8+V2my31SU2A0VE2DtQnzpUe4hLa2NFWkyhyjLMH0JO7wxxT4EennPfLhNtMDi+xbRNQcH0xJe5LdqxuMQ8k5kUGV8uC9Gb0dTSJylqqkbRuMzakWLQtnKfKcdKBXwwICdd3QPqlxteeFl19gMX3CR99+xKXnxzy+49i+eIlHyw8wKuWVz/4K7WSOHtaYk11MEjiaPDrTqz2z3Xk8HjIoNMv9Jdqorhu36bRjIVqkitTzGuFbdsaGz3/pZV579bMUecJwOCBNDUopvLdYW1GVS6TIVuJZ2W3UrsLbJaz0Y2G16X7c/EAMOM/qYTQMxjkvXbrIvXsPOD6eUtU1Wn/ssv6jEflFAk1dYtu262gKnQv9aLzJiy+9CkLT1C2Tk1MmkxOcbTCq8+vTJiVEiZRmFQR3pqLBWkSMhNhSNktCAGu76RR5XiAEnWVOUxNtS4wW3xqUiRS9HvPFDO88JlFkRjAuFKN+ijaKNMkwRiClR0lQqmuIUUqTpil5XnTZRq1pYk21bIlYguiaLsbDETFk2Kb6VILrEBxBqJVfliJKECqH6LqONakoK4u1LSFKlDZ471BKEHwk+pWuxbuuKzj4zjy4LVGyIPjOy0qITnMntF6d8B3EgEAjY4DoukAudlnszpCj02Bm+RjXHOBtN00FoZHCdA75wRF9pK0tpteHGKjrklT/eETNzkMbIom2BKc5XBxyUp6SZTnt7JjZyT6tadGxoC0dy+UMkUXaStEsHFmmURzz6MFjqllLlhhETFicLmligz9Q3KseEl3nddY2luVijpCRUMNiUnKUKUKp2buywfb5Me988zaL05Y7b81I5CNuPKPZ7o/RVvP+N2/y9nbLc0NFsJ9iQ4wAo2HYV7StZVq2SA0mCbi2c/G3MZLHrunHENEatJZ4KamkxgWP8xaHJBEKM19ybgeef3GTVFTUVYsQnsYGZgvLfA6N75pqxhuSrQ3BdL4AodnaOseLz92gl6TUzrOoLMuqxblIUXhSkWCC6Xz81MeiBjrd5sfm7cF22aBsjBZdGbP1DXVd4+qWcjGhtjW9ok8MjsQ0Xcf/D4lWisQY3n7nXX73jz5k0PP8ws+lbPZst46LyHLhqGrBcSX48JFHq5TnLldkSpCnBVlryFpFLkGLBmEMW5eyzktTRmLsOvWjFF1nv4jdcyoFWd+wfW3E9OGS2f0lg/M59A0aMDohtA2pkV0VoH3I/ff+CRvnv8zelS8QoqfobeNDShsq0jqymEfyQiBpMamktY60lyBVAHW2mXvvOxseJSPDzRFZNkCZjFF/g16WoUXXUCKlREmBE9B6S+tqPDV106K97qx/Vh3frWvxMfLurT/ka9/4Pb7yhb/Jlb3LzCbTzmMXwTe++1Vuv/uAX/rKL3J/8S7LaLHBIvsSneekMXLr1ttcvXqVz176WdJkzAd3v8d8cUSsA+Pzu6SJoJ5bdjdGxKarLHgXOZ1U1MuGqrYkWYmYJ+w/+IDTcr6Syfw46CaGpbuBtrSc3JxhisjJ/hKdw5Xr2wyHA3r9Hvfeecxbf/SI3Wdy8iIlusjx/oThsk+7dASg3Lf0tkVXMWot7rRFjhOcixxM32d6dMxs6nlw11L0e1zcvoyyEypq8PvEakJ5+5j33r7Pz994kZ3x2ZbAz07jFz2+LUll5+BvGxBJy7ntMcuFZDaZsZfUjAaKZ1++wUsvv8rO7h69/oA8T4nB0dgG7z11Ne0yWspSN10GsRPitzjfdoJl0RkiQqfZs00EF7BWdRO6Qpesf/vdD5nPZwTfeeoV/XzVmfWjOVnEELBtd11S684HTCqu33iawWDE/sER+JZmWZKmhvFok9PTWWdTEwJt21CWy26j9J4kSamrLrPU1E0XTKI6fVuMxJWZc4yB4AImMQgiVVWSy5Q8S8gTgcTRyzSjvmRj2EMLhUk0RgsSIzCqGzGltSLJEkyiP2lASLMMow1ZmjIcjjg8eIBJE6qyYlD0Cd4h0J/KGocQqZuubKqkpLUBqXq4Zt5ZBMVubJx33RpIoWibLvMWfPcida4leIt3liLvPPe8C8SQQvAEbwlKo6SC4ImuXXlWdaUX61oUyars4ztvw9UG60Nn42GyAucdkboT0zuL0qbrQg6eECytrTF1hckHzI8Pz3ytvh9m8wXf/s49XnjuMkXP8uio5qv//F1u3znExoZ803D1xiYmpjz5oOTk0RznWlQiee3nbrBxNeV0vyLeVDQTTyssuAbZh/FeHzeNzE6WWNugdOhkBHmObSvqRYurAr1hwqCvSTcjL//sHq++fpHf+q++yYMPFnz07QPuvbfPjSvn+MpnnuLqhuHGDrz3tfeZT8pPdW2MEoiV1q1tu8FSmRG0TWeUq/Bo7zqvNwUhNVS6oI2BfmLZ3BZkxiBMZDDMOJ05bj6ouPU1x87IsrstyYylbQRV6XE2hSQnyWp2NzIav8RaT8Cwd+EKeZGhpGSQGQZFiveBZW1ZLEtmy5I0TSjyjCLLUEYgUEjZGbQLAUpqyvmUti67cnCakpiUECKz0yPKarGSJyjEyuhdqR8+8BNSYEPLd975iKpuefWFZzDJJkHcJy8WyOA4mBi+e8czW0q2B5Gnr1jGQ0VapPTSjFR10ou2FAgZELomzZNOXuJU55vou2YWucoABtsdDpGQjhSbuWLxRLKc1BQyInsCKQ1pz+BdwKiAJODtjEe3/ntmRwdce/HnyfNNhCuw1YzKRVzrUaoT90WtUbozSI6fQme+d47T/Sd4rRk8tcugv0mWpuRpQRQfDyIIXdYxBoxKCdJ3HqR1ibcWHXNcY/GhGx0YfKBuTnn/8I9ZpEecNkdszMeczhuG25s8OHyD79z/Aw7rGa+Xr/Bg8gijU9Ksz+gK7G0O8YcnHJcT7ty/xTOXrvILn/01nrnyMl/77r/g4f5HFGmfsi0RwtDvDynLI1rXUNtAuVhQt4FQQ1Ut8E3gdBqxBGT8lLP4/98QkcnslO/8q9tcf2GHxx8e0N82HB1XKB1J05Tw2JFmCeWsJoiG/Xdb5AD2zm1SlxYpa5q6G5mqiiVtDSJXtHXDaDOlct0he//eIcF66nmDygKMBxQbDpkpFvsNZTllJ3mVxr3Pr/z1EQvu49zGn38NPwBnFvi1Vcn5RNMbSrzqghYlYPbkCbn0XMktUgp2L53j6o3rXLx0gdFoSNHrfeIv5G03f1dqTVb0mU3LVZDT+bIF77sZu1HQuBZnA9b6LoPiAt6D94I2gDSCyemSpm2JUeKDQ6xMkgMfz9H80dDZjiisjUgZSPOMXm9AVVacnByTaYlzjr1zO0ymU5KsIFpHXS+Zzib46EhVireWqlxiTMp0Nu00ZsETo1vNWnT40PWZKaVBdhmylXsLzloSpRgVhsx08xR7RUaWGpQSyFUXcZpIEiPJ0pQ0NSQajJEoJTFGo6TAJBotFU9ducLRk4cQJbb1LOclOikQwfPptE4LGtvZshgNQiZ0MycV1nuE98S2JKxKwVW7oLV1V+b1spsDS1cSk0IQfYvHI0SOa2u0lCjZNRbFVcczsZvwoaTurGO87brGO4+YT2xiEALnbHe/2dBZ26QSoyPOeVzbEH0DsWt2YjWjdTGfI+SPJ+PXK3I+eO8Wy3LBSy9e4PDJKQdPSibLBrUpefFLV+htGSZPKkIyo/UWEeDC1Q2uvbTJvPEoU9PYJU3ZQrSYvuHqC+cZ7I1YHs159B40+1NwLclgRH9riySN1PMlx0/mJGnO+PKA8W6BSTw3njvH0Z1nePL+m5heD5OW9Ps9bt28hygCMhM8PKxYLD4902spRGfnEyR15XEuYjQMhpJ51QVSo9yQRWgBXwwI/QH9QckzVyLbo5woLIumpawCbVsx8I7tseVw0nDnUPDBI0miFP0sMjCG0SDB4NkdZ2gdmC3BWcNguMuNG08xKDLaxuFcXGUfMzbShOEgwwePtZ2WdbGsusBNSZKkM0rWCqIMICMmUfSGY3qDTeYnJ0xP96nqBX4VuIjQdc0CZ2KZI6Xg/ZsfcfPuAz778g3+w1/7VWK03Ls34N6j73HzdsPDR5FUa56/ENkZRhKtSLViXGQMxhqpuoOzUAJrBcILIh4fA0pKjOoOphGIXnSToF2gPlmS5CnZZopOBPl2RhSS5WFDcIZ0FJE+IlU3XSWazsuyN9A09Xe4886Uvat/FZ3uUi33wTTEIKmbVfm8CJ1UwkoCIM7YJiyGSOkNT11/imxngzzNkUp3lTHvkGnaBbnOQdsdrHoyZXq0hCYQVU0kQ+Co64o8z/G24YO7f4zzFefEmAe33+Fy/zKHh/eYLu/w1vEfsHAlQkHtZlTeIqLEZpGLr444t/UKi+UEsX9EjcVGyeHjA65ffYVB3uMf/uZ/xmi0ybQskSLB6ITSLmgqSwyC2EjquSGcC9RlzaLJKFcxc25+PF29ALmWpHHJwWNFaDXt4266mPQ5T+6esCxLpIJ+v4/QkAXPYhJot/2qOaS7iOACVQVFFZEpuDaSZxfIlGVePaSqHGmSMEwEkoQLl0YcnTxERs2TBwtE3tIX36FOM1rd0vicSfv2mV7rma1ylidc2jNED9a67kOOYENXTmudJKqEzZ1NxuMN0jRH6y493bXJK2K0VNUMk+SEIFFGQRWwbYlra4LvynUhgrUBayPeRrxbdW1Kg/NgPSjhKJclAUFiUqRQIDqHcynUpzJO7N9FiBGhupFrXZbOMRgMuHX7Nv1eQfCWZdsZaZ6cniCVZqAzWtfifOh0Fs7T2gWuaUmMoaqmEANKCdrWYl3bBXdRdo7stiFLs07sDCQmQanumiWCQZGRqhZlBKnpLFik0OSJIk0gTSRZkpAkikQJjBZIAkliSFNDmnbB33g8pp8XXLl6g8dH+0gTWJZzhE4YjrNPbT2bphO2t1Z0lhZt01nSJGlXyo1+pW2KSJWSyARixMYWGRzLeg7RYFRn3yJJUT7pSumBT3y9ELH7WSv7mI+F8UQItiX4ZiU5gIjsOntjQMoUdNbZUQSBD77zWEsCuIirbedTKQ3WWabTKSY9W4PO75ckMfzCL3+Jb33vXX7v99/i9v0jTo+XDAaGFz5/gY3Lmqq1+NigpMbIDK8aBrspx0cnlHPD7KBEuIBOBK0X9Lf7XHlhjOop+qMeoYVytiAGQzaUbF9LOH9xg1BXvP3NeyzmDUFkqERweDKjV2wwmVYk48iNL+yxuzfCnQTO723z5ee22PWnbD85ZfT2p9egpZQgTyVGdJ+vEJ1H6O45wck0QSIxmWW7DzM34lQPuHKx5YUbBuFrjmdzqjagEoWSASMjRd9xoRfplTA9iSynnmAjTS25v4CDsubKbopSkrKsqZcQvGZ39yqD4Qaj4RAlBJPpgmVV47GrLHQ3SSLLFN5ZAqJr5tDdnPMYoW4cofZUVcNiMWcymZLkBzTLkmU1wznXHSS9x9qAjxEpO+uks+D92/dxzvK5l25QViUffniXb33vPZ7sl6Rpymeez7l+0RMqS111QWwiNVmeEEXkcN+yMVLoLOCcQKbQ1oHEa4wJxMSh6cyVO/Ncgco02Vaf6EI3VSEGTadRlgAA42NJREFUkjzDnNeYTHD6aIlOC+gMCZDGYFTE2VXXePQ05Qc8/HBKv3eVw6PIrGmZHEWefWFADB6lJbJn8MqjC72aC3x2SKUZ7G6QFO0nYyWN0igh0FLSVDNOTw6xPqCi7sZV+pbt8ZBs2fKkmiCNY1meMjkZk13ICMFzsLiPSjQn8oBlPefzaoEd3+bh/kMms5POo5TIW3ffQm0o2kWNkH0q2XD74Td5/ulX8eYG0ht8rNm/c4fpYsrWXp9sOESnimk5Z2d0Dm8bqnKB8xEpB13SIAtELamqASdHnmzoubhzkS/+zGf4F//wPzvTNfx+UELymef3SHcFZb0kESOe/tw2B+VjvvM79xmPR1S+JYhAcF3SSCU9djONEhClxweHSbqBEl/+9adpmpqH905wLnCyPOHyUzsMfMLhYYv3nmK4xeboHFoF6qWlmpVMJkuuXtlk9+kRrZ/R62/gwjl2t06AB2d2vWcXXgtQOmE+PUUEgRKyG0kkNDFJGIwHFKMhxbBgNB4yHA4wSdIJj1e6KxE73zlnO1sOrRN86KYxeGsJodPJhRAIPuB8151kbdemD+AJ3WBy1yBXZs4xQpJkIDr5tTGrjtgfCRHbVp/owbTqmjSmszkQmJ0eo7VkNBry8MEDdnb3MCZjuZh1/lTBdx3MMRJDoK6rbo18jWscznZlcGdblNQIbdCye/i1StCrKSkfz1NNlELErgPYKNlp+JQn1QKjFJlJSIxeZfg6zZLRmixNyIyiSA1KQV6kWG/RacLTzz3PvK26LlkhqasSZT4dA+cQAs6F1XgxsG23riBX90P4RGCtjIYoMMoQfKCpKuaTJ8xnB2RpD2k0StREpVdefd1czs75ReDdatoLEqPNJ6PLVKfgx7u2a3xRGVEoQogE67FhidSGELqfI5UiEPGtJ0kyIhJXlyAkEQeuom5/fKOKLp7bIO9/lt/4nd/njfdvMa1OeOrpTTZ3M7yoqOumK3XLgJCONOuu9b1v3SW0GYvTCqJCqi77srE9pt8LOCGIqWawkaBzg20kO5fH7F4T7F6AfraFjAn/+jfe5+RBV6pcnkje/O03Oby9j0gjOk0YbCWkvYy3vvYOFwc1z76Qd0J1Pk3Ta4ELkErHMAUtBHWAi5cNi3kP24BJ+yyD45SEy1cqnr/qWCxqyiqSpRkXt8DjOT6yIC26FylEQGhJKiNLCe1ckA00XknaIJjPHb/zTUuv8PQyyaDIuXDxIiE4Ticz8iyj18vI0oTZcoG1Dqm67LyQohuhJU1nb0BX/vx4Nm2W9xiPN4mym3HunSOVoHXAWdtN3YHVyacbF2bOYFavkJJnnrrO2x/c45/+5h+gf+sb5Cbh4qVdvvT6C4z6HlsfYvQB6S60y4ZmYfHWrTxGBfUC6hT6RURpQbCS2cQykoaQgAuBNHi89iSpRsrOiklnCqLEu9XhS0pQhmxTkC0d8ycNG9cG+NYiYsBkigDE4LrDn5RY95imnXWNelLy5KDm8GjKS6/1uKYMEgt5J24RZ+zH6bwj7ydok+HqGfPylLqZYZuK6eQDlgvPqddEmSBCS3QlRnqe2twgUZfZKi6SDMdUB4d891sP+bz5OYgtlSsJ0fG4XBKj4cPD79LomkpLgjE8V1xFZZ4TucSonIOFI0kG7Gw+Txg8xPo/IhUv4vUFjstT7j5+n/rx90hPFoisxUuolzXpdsJyOaHyLXiFiwYzUuzeMDjhKWtPIuDizi6f/alX2dj48di5FLkmu2QICoq+oHY1VeV45qWnmB6VvPn1R/QHGaSOua0Y7mSkWZ/nXzuPGteUfzClrgMqE2Qmo9hMKGLG/uMZronsvlAwncx56tk9DvbnhBAorgTSzRMaKygXjqMHMxaTBtds8OTeHBUyDiZL7k2OSEVxptd7pnnV4DwhxC5zEi0+Qtbr0zt/kfHODokWbO1cZG/vElobUmO6TdF7nG2oqiW2rkF4orAgNatdvXsXhS7ICSF2XaxREoTvSrcRJKHLcgmB1obBsMd0suj++6BPVZWrrMyqMeRHQIzQ1A3Odlm50Wjjk7LqfDahaUq805x6BwRm0wnOd3M2fdt0M3htt/EqqWjaGiFDpxmUnVN/DL4b0WQkUtBpyfBEAibNEIDWncDX6E7noZXsypBKkWhJnnZGoYlJUDJiEtkZOSeaLM1Ikm6KR5alJGlCmhkQsmv8GA04f+E8+4fHncmx6qaC+DPKFvyb69ll/PI8J3i/Ktl3dhVdB+2qRCfjytIiQycps6MHPLr/Lr5ddllmP8WFCEmPKBKi0J35Ld16EiJBxFWWeJUBXA2zD6HtgnG6zTG6GiG6zt98MKJtumkzztZAAGkQsbPSaNq2007lvc5sNgSyLGU2O1ufph9gRTv7C1o29wpeeP0Kg90xOovMmwXeN8wXnuUyUlU1NjpCHTm6M2cx86sNURBlwEE39SFY5k8a5nODThKW06oLomNn99AbGKKwZD3NaLsgOstyv+X+vCEvGqbHFUK2yDRy57sHLJ8cs5Nq0rbl+PFD4tOXaOblyh/x08F5weEiEpKVx5eH2gtMEvj8l65x4fxLHJ0u+c2vv8HeYMKNc5bp6QKVKC5t9ZEaWldzeFBTt4Fks8v8YgUiRoSGNIc8GiIpTjl2R5HBQLNcNhzsS/ZPA4s25V/81pvElXg/yTS9PGFQFKR5AqEzz93cGDIaFYx6A7IMtNAIoZBaITtTF7ROcd6iUSjRjZQSotMvaaUoy2plg6UIcWWjeiaTOwSvvPQ088UJSkaeunqRCxd2GI9HCAmL6TH7T+5x8CTjdDbBuwWDkSFPBdEGBIqrzw2oZh7XCNJBp/NLspzFtMGj6e9ogo9keQTlSJKVXGNl5ySE7uboChBCoQyMLg44qScsDysGu1k3qs0HXGUpjy3BelTePbdaLBn3LIczx2AsufuB5zv1nERKLl/pxsspLVHmrH38PE8efkA5zTienjBZ3uLuwzfZP71DmtQMkgFbW8/RH76IMzl1mHA6+5Dbjz/iySMPyZCN/jbPnX+eZQuTP5jx1IUU71qqtsHGiHMtx3afgcpxrWeQjzi3s0FdVtTdNDjSmDDIxhiVI9R1ar9LHR6g3GPs6Bx7X/oKlX/EafsbxNOaEOwqIWNYLhdUAVSwSGCUD1mMTqmmAec8/WHB9edvUPQM7XJ+puv3/SKMwCux6oqHnasF09khy2/MeP71y7z/zSfI0KIjLJYtg+2Ek8mU2+8JfubvPctP/dWUr/3mWySpRIuCw4cLdNIlDLwN3aSwtLMZSrJON799fryqtin270+pljWxDBw+mPJgccyV0WWW6ZSD/QeUx2ebqDqzwE8ASb+PmizwApSE8fYu/XMbmDSnKAqKfo+XP/M6Jk0wptPdBRzOtdTVAq0USirqT7J7auXFttJadSHearxO5z2lVSBPoa67OcFG6q57CYlJCkK/86pLkqRzZo+Wtm0/1Tmff5oYwspk2n+il7FNzWI6gfhx56xiuVySpBrbNtimom4qRIS6KnHBrvRgXaAQ7CqoM/qTl7cQf+Is71yDSTRSgG3a7tSuFUUhMMoSCaSpQctIogWplmgp0Aq0jmS5JssUeWpIs5y8GJDlBb1enyzLyIvuVBa7mWdkec7Fi5coG4f1CkkncP201lgqTVXXq/I1q4xaZ7WghMQjuhFVpoc2htnsmFs3v0d03ZQTpfzK90+BCCBbtElAdqMGBRIrGqQx3RSYGHG04B1SdwGeiBHnms7rC4EQjigMeIWUCSrNkEqxXExRsrvXldaE1q7sFFqE1ISgEDJZjdr68eCi5e6jRzw+eYJMK5ytsd7g00BAMDsMTJ60tEtJaBWtLTlspgyGYywVrW9xretahLG4OnB0u+XBnROyXoILDbHurHFmx0ua6ZieKPA6pV0sCLbLXvs2pZlYvG8gUeANx7cPOL0nafYk/8u/9To/9dIYP31M08ZugsenhI+K47nG5RUbqaBsoBWQ9yRanyKyId946w20PuXKdolvYXenx7DfGbtUpWU+rwkq0tsUOOmJHloXWU0RJASJ0xHblgTnOTwJHE06C5Gsp3imn/P6F77I9adusCxrjg4nPDmccDqbcnK8YLKoWdYtZdNimxofA4M85eq183z25ad55upFkjRBiI8PSK5rcIqGEN2qY9StrI4gzzNa2yC7lDZ2FTSdBZvbY376pz7HfHLIcNij3y/QJkFqw8ZORj4YMxhtcvP2TR4+3qdpLWnS0O95Mi1AOkxfUc4a8kHedYjiyXpJ15hVSqbLmrCdkcSuwdDIrkNVIFG6SxDEID7R16pEMbo0YHpvRlM6TK6IQWIby+zY0jQBk3eNi1nejbMUA4PdbFHPSbZ2JFK0OKtoZCBMW/LibLW68/KIf/xP/49Y3yBySyiWiOgZFIZqGTk5XHAwOeSp84+5dOlnwAzI0x2e1HO8votsZkzshPT6axQS9mdv8cyNG/RmhqNpQ9PaThtPpGoqpos5e1tXsJRE4Yg+UOQ9vvzaUyyPp0y8IKocIVKGwzHl4pjZ0S0GoyuMek/TK/5HPKx/j9bNuqqUddRVQxUCWmhu7A55RuzyzQ8Ubz24yXAQeOrGRTZ3NohVRcuPp/IRPn6T+0jbeo7rKS6AParZvNzn+me2efD2lHPbm9zZf0K1dChjOa0Ejx+csrO7waWrW3zw3mNsZfngjYfoVDEa92mt4/HdY6IH1wa0UmS9hOP9OeWiIc9TykWNb2BrtMn8sGR+0nJlU3Fu+xzv/Ou7HD2enen1nmnGb+fyBZbTBWE2pz8esnP9YmeonBj2Llzi3PkLGA1Ge4LvNmFnu1l2SncaKZPlBFHTNG3nSp6mtJXppi3Qnb5DiGhlQHlkIrBthRSS4CMa6OcpjRNkeYIQ3UQH5yzGaBaLBc7ZLpj8kRDxvgXRiYebpiEvLBvjEcvFnCQzNHWzGqwuaZqK4D11U6/saRwheozuumS7LtIu89m2bff1CEqYVTDjSUzXeBHjyjzWBdCSIkvRvsvEKaHJjCJRES0tWotO9D3oPLeKvCDLMtK0oN8fUfQHZPmAJM1wvhtbFojkxZAYoJdnnN/d4dHhFKRBcjbZgj+LEBJrA0pLXOx0LpHVfNkYicFhVI9+fwutNEdH97l78zu4doIUARk9IvjufpIGguzE0cF1j76USCW6oSPRE4Loyr+redDBe7QWWN9l/YwyIGV3X7oG17YEmRCF6bLQUhL9SjQvBGmaEmPoLBroPCsX8zki/phGFQHv3b3Dtz56l3v7J5w8XlAe1VRtxfHihLKqaSqFWwSaWQnRkg9Bm4QqLGi8x1YLojco7VGZ4vh4ydGTmmZhWcwlMRpCGxA4yqOKB+/OOEoavju5R7sI+EYTQnesI7QYKQhBoaQAESiGhld/5gbnr+dU9ZzTkyVbu9vo5NMbcyekYDTuEZqWyjkqCzKVDPKEsj7kn/3mVzk5mfDS1ZK9bcPm2JD3JEEpmqqhPKypXUBmESEFwXW+dSFCsILWRmobcY1l2JP0h4GoIosFTKawLAN7Gz3GozFSaXZ3NhkNe5y/uNll+2M3faN14G1gsaw5PJxx884BH77/mLfe3+f6U+f4pZ95mRuXz4Ho7jln69VoRT6RRYgoEQSkhMQYpOikD4oz6rAUAqk1u+f22NgaUs6nzOcTquWCJE3RiWY6nXN0WLIxvECSjnn/vdvYkFLZhrK2bIwDWRrpDQ3ex65ZpdW4tjNRF7LFt5560uIdhEyjEkevrwnO43wESfe+lLGTdaBIckg3NPW0QqW91RjGLvPTGZcHbAtl6cmLSDGEra0EIWryXkBIz+K0Rivob+Zk/bO1cwnOMp+ckmQteuAQFvq9nJ5IOZlPEVJQOcdH++/TiiPOb3+OvtlgnG2QbEWumouo8QbbVy7w6PF95pPArFGwkWOqAlfWSJ0QXWDqG2bVkmeKnKaa4IOkWjYMhmNIAk2yRDQDhMqIWJyDdHAOkxUcHd1mQw0ohue5uPerzJd3OJy8Q1s1NHWDtR4hM8Z7fdpqxvb2JsOHh2gJexeuIIOldnOi/fQatv59CAQiCGzjqcqaqlwgVEae5yyXFVef2yYVGS987jIXp0P+6F/ewsfA4zsnfP2r7/L6z91gMBphzD7WBvZvT3EhcOVpQdtYntybYiuPrQJRB2ztuf/wiOWsYWNnwGJe8fQr57iwt8sH37tPWzr0luL+/hMe3DvGVu2ZXu8ZavwESZFz47PPc/Obb9Ib91DCsXXuObJewcVLV0mMILiW6BRdz6fCNjVCdpohazvD0M77THUdksERAiitVnMou0YQZMRkCte2aC1JjCLLElwDLV2HZdOUCJnQupa2bFZ+dJJlWZ35TMV/H1LFVWAhsLYmMRonIj56mqab8CBX49diDLh6TgyuG3+3Et109jQCKSRRdAGKEN0AbrGyXhGiWyeEQBuNa1uEDGgjKfLOpyqEgFFdl2qaGLQUJNqTmkiRK/q5oMgNRT8jSXKSJENKh8RhVj5l2qTd+LlVJ9yyLjEaNsZ9jiZzmiDRiVpN0jhbQoggDN6B0mLVdKFWJpkVTV2yszdisZhz8PgWJ8ePCHay6sDwIF0X3MnOeFWZFKk6w2YtVhqgj6eu+9BlFWUnDRB01+tX1jnadNk/5zxCQACCt7Rl2Wn+YjfkPWIhSmKQRDxSaZTpfaIzDb75UdlK/ltUTc3X3/se+7MZJ4cN0+OaJ49OmU1qVALVstPM+rKmbRq2Lg65+NIOLjiOH1Yc3p2CdSgj6V3IGQ8K3FLw5MGE0JTQZqANxkiEkJTHlruLfVob8W1EhJpoAREJtF33YIzgQWnYvJzzzMvb5HsJ332wZLn/mLEX9PdPWVSfnvVD0zqWrSSRkkktCECmFUnS4+3bkv2DUz57I/DM9YKNDY0pJFEJWmup5pbStshMEkXAeoG1EhcC1scuk2AlVR0ZZoLdbUl/nOCipdcT5DnMp5Ekz8mynKapiCGQZArjOt1qXB3eihSSQcLmxoALF7f47GvPUFc13/zeTb72nY/4R//9N/nKF2/wpc88i5FdUBKDWx2ku3+Xsivrdtm9zowYQBt9Ju4Hgi4TL4Qkz0bkWZ/gW6zrAtE7N+9SVQ3nz2+SJQl1WzMeGt556w69QU7wlsPDGilaBr3/D3N/FqtZdqZnYs+a9vRPZz5xYo4cyRyYyeJUKopVxVJpbEmQyxYattxqXbgNGPCVDRgwYMAXvnHDNuBLA+02LAGWoW51q9UqScVSValUA8niVCSTSeYcERnzGf95D2vyxdonWN2llinoBKkFkpnMIeKcdf6917e+732fN3Xety9pVBZoGstwI41px9uK6eOWwhuE8GSZoAupSEx3rwhdRxslZZ4hpCDqyGB7gG1WdGuHkB5XR0wOLBXLhUTJiIyWtRfkOWwNJTjDdNXRiY5aeoYjyUBVhAuWt0RSo8QUKf7RZJqtQcXs8CxFhKXod5wDshNWq39F8FsocZnRaJdp7LixtYmSkda2LJslj+ZrJuMMITVbgw12xDZMAzZPfFyhIvPlGlxktlixe2mfGFNqlhKeYZ6x9gI6T9O0yKJiND7g0envUrmMifgscZ1cUXXXYK3D2sh4nFOUivmiJR8a8kuavfF+SiJpZ3R2ieZnQzeAdDw0dcdy2jA7bdm+XLFxqaScwN7BAZPhkkd3z7j84gGf/bLi23/wHtY6bn//iMEkZ2d/zPbeBkePFqyXlvWio6qWdI2nPrV0dSAvGqpNiW3WHN9fsJp1KKMZbQ7YvbIFwrN1bcDGfsbHd97l4TurlMZ1wXfcCx31JrSFZPvaPuuzM4rVEhkjO5uXMDImEWwweG9/rANza2JUKF2glaZt6oTZkJpoW7zrko4qxtRFIonl81ISXBKimCwnKzuyRiNai3MdQhTJ8KAS184Hj28tEdAXIFj+t1sR1WvQQvCs63UflO5SxzOQxoAxMf+ECISeZ5RYcvFph1I8NSH08UsxooRE90WE67rUPVUyuXKNJM+gKCQKidGpwzUYZOS5oixyyhIGA81wNGRUJSilMQIlHUpZjFEYE4EWiaYsK6wLuOAwoscpZAUCydW9Xaarlnm95llMeruu4ejoEcSE1DBGYG1HXa/wLnVLT05PkBgUDrCIkNy3QqbkjSgUsT+IpFKgVF/gyYSEiJHoI0KnwzG6kFBA0I/sfTIjCYmn64m5ChklphigtEvaza7DxwTi1llBDJGuS6LqLFc9nFulOLhnGT/2b1irdc2DkznzpWB61jA7a7BOc+OlS2xtl9y+fcjJ2Zq1D2TDyItv7rH/8oi2s1SDSLte0k0NpirYuLbJ/oHBTQNHpzl+XaNUJBsa9CAV1u0y0K0X+Da59IkgYoUQLQIHMU8dIhx5Jti5PGJyJSeqhh98dMz3vn2XvXiJbn3EfP3szB2CwNFJx+6kojINa2dxTvGddyUf3vF8+vmOT79SMthQyEwidJp021Xk9LABLVEmJPqAEziXDmfXge0iXQu2hcGWoKwiWqXPpomBrIoMAFMotM5QUqR3RsgojEkTDBwChZIk7h4RLTRaaspqzF/685/jM2+8xD/+59/gK7/1PZaryF/+lc9Q6rzv9vn0/nEJ6ixIFx7XtSiVpYvMBWooE0uwj4qUCiVLsqykrhXNOvD6m29gMo1zltI3DCcTBtUmx0dPmExKZrNjprMzVsuaEGviqWdj4hltJ1NLO4Vm7imHJaqIqZjoIm0EXwZo018jjwQbaJUi0/Q6SMFgs2B6sqQc5sgi4E+TG7uxjhAlg0rSdB45g+19zfaOpm0877zr8dZz/apE5TVZfrGXXSkjg42OtgClJAd7I7rFCi8DsUjngYoQFVQa7t5tKUdPiLJhu/wMjQvkOsP6jlW9AOEoKkXd1nRB8vy159gqcpZ2zqxuSSa5jrptCa2ibgJVMWTdLfDRYW3D3DWMCphkK45PPQtv+M63/wmnJ3cplEeKtyg39lFVRr2e4YID67lycJ2uXtDZGiM048EEWSliqFnYOVJolvPmQvfvJ1+Cdt2xmjbMzzpaJ2m9w3VwfLhGmzXN2nM6W9C87RAKyrKkrRdY5/no+0e4lwK7VzeQYkHwga7xrOcdTetYLxy+g/W8pRhXtHPL8qzFe1jPWg5ubnDv/UMkimE1ZPkoMNnc5V57BlJceGPgQudLSqZRRz7ImB12xLbBdzNiWNE2lrLMCayJ3uNtyop0nUVI0Kanj2uND44YHCBxnSXEvluDTP8RCqXAduv0/3Wi5ksREDGgXEsoBr1TOKU4OJt0LD4kR+hPk+MnSN9bGlOnJA/nI0SXXsDB067XQOy7donGHmMgBoWUKdEEElQ1JZl4jElpJ8nBFnsXWnK9ETsEGi2TKzfTGulaQmgZ5AYjYVCWjCYDRsOcamDQSpLnBYqIUTrlfmZJ66dlAF8TncDbFN0We0r8eDJOfEUiw2FBFIbT5eqZ7GVwjgd3P0KpxBWUOkXJSanQUlMUZTJ5GPC+I3QLRHCk6LYecyEVQimkNkidoU2Wsoh10kwqpYiiz92UMmFhIInF8UglknM8uKeFeIp303jXpsuFKUGUNN0K2zVEKcmzEaU21PWyL9x1SlTwHv9TMhv991fXeY6PLKcna47uHWOC5vNfvEE0HXfvHrJ3Y8DgwDA/6jBoXvr0AdlWR1dLSj3h/vunKFMw3it47vUJk+2OuFKcnm7xYOWI0bJ9U3Pt5S2y3GBXMHsw5/b3HtPWDlWlJBPbpoJAKg0ZKBmZbOeUmxliaLjy3A4vPrfPwbWC93/rlFnIWXfPrlhWUjApGzor2d/do6pbvvuO52gOn3nF8+oLA0YTiS4lKIUnEDrH4qTDRQXGEwWIGAiAdwLfCYKFLkC9glzBeBLIC4mUAQ0pP1aBzCQ2plQcKQxSBlbLBV2WU5QFXdsRcTgv0QngguoTZgXp/Xbp0jb/8X/45/jKb3+H7/3gI4bDnF/63Ccx6k+grZRK3XDSBCZKg0gZmFzcKzISe302nJuw0jX+3t2PGW9uUFUjJIJMRyIVYRAZj7bxTnDlxhX26gXTsydMTw+ZT+esVgse12tGk5bRKDDY1hijWJx0lFpgg0PojFilbq0QHXEVECpHGYkIkfnJmmqSoNiq1AwnJYRAMZSMtnJiEIwtLNeeGCNVqVFEbB3IKsHOpZzRo4aj08DdBx2PHlg+9emL2rO0pAZRpgnP3lbJyEgedC22P7m1SN3MzSGcPoB5C0sV2C0WRDenyLYSYaNdUdcNkoTimk/POJuu2N3ep9we0Exb1DKQqZxVV9M1aRoFSdaxWCywXUfbduiBh7Bi2j2mdoHbt99nfvIAHQLeR6JsgIYYJLPpGVmek2UluzvbnD56l6624AVVMSD4lqZtkFh0UNy5c3yxG/gTrhgii+ma+bSjtZK1bTCzJe5gk4++/YSjhytcF5g+mbMxmJCPDMEJVMwwmWV+0nD0aIH3cOXGFe69/SO8C6xmLXXtsU3SvDcrS7cKLKYtXR2QJp1H3dpx9PGKeuH4wpeuc/1zc9w0cnrSYbKMQVmxOry4vbm4jt+5uQBPvV4nCLHUPeIjYrROmZVa0DYNg0FOIN1oQ+hHaVIgjSY2vtfsNXjfEqPtX2YaKRRS+ad6LCFdasWT3HLBdRQCPI4uCJytIXiMUkQizjnatv0pavyAvigTQhKjZzo7QWtN8L4HjnqI6eUSBeAVWqk03k1RxWid9VW/eKoVk/L8GhCSyUEIpEguXS3T0KZpGpTKsa1GSUsm06G2MRmR5QZjFFlWoFXi9uWmYDDI0CYhX1IIeXIcG5EDRSpIvUMqRVEMUaK/DWtFlmma43kfBH7xxYyQiqyskDpFoGWmSEVo9CgViSKgjUYGS9ucgq9JI/KEFkqFmkApg8oKsqwi00XKIe4PIxmSLksikIntgkTg6TvPMhWGIqaiMHUTJQKLcy0hCpAZOhtTSEW9miKixzmL1hlZNqBtGqIIT7Mzlfxpd6HTahvHx+8es5wvuX5pm0+9sUcUknc+POTNz13HSs/9x0eslgsmVY7OWqLoMEbTrBy2E0ShGO9FLt/0bGwWyFCyPJPMPm6o/ZJLL+e89HMjBuMKGXPs9CrjjQ1uv3fK1hXJYFKxmnY8+OAIVWjMKKNet7Sq5nS1JF8oVnHN9vU9PnvrdV65uuIP/9nb/PEHz84QMxrkvHL9ObJK4LrAB0cFh8sln3vD8qnnK8pKoAuBNCIJwwN0daRpO1QVsNL3lzrwIRL6vHIXE4u0bQU7u4rhALSJSAWdTTrlTKUC0NuOzjnKskAFQ5ZB23SsVg3VoEzTgRCQ2pJlJRAQvdY1xIiIUFYFf/nP/xzid77L17/zLpf2N3jx6n56Xwr6SDaBiAn8HkVK3YlC4mPgIpBMqYPuCMIgOX8PBtbrJR99+JBf+uWf78+O9DWI/p+jh8gbXVBtlAyGYza39pnPjjk7O2QxSwVg19RsbFryDYspVOq0nAWyYSDaQFxKzEaWDIMzR15JYga6Uggk3apD5ZJinLM+XaevzwekEJS5TvKhNqKI5JlKmg4PuYl86o2K2/dW5BmcPgrMVxerxYoCghLsb5UcbFQ8Op7SJVVzMu8JQIFq4OQMxBh8AytgF3BS40NMutN2TWm2EF6wrFvm9ZwDvYPWqXlQ5kMKG2jaNd4FRm5COdB4HKv1kthJGtsg61NMXID0WBbMlh8lw7RUCBlAR4rBmNp3TJsFW8Jzef8GMliWizlda4je0HYBr1fgPJWuOHw8xeifjebFe8/stKG2sHYdiMByURMR+MZz70eHWJueKdlaTNewXnZIqRlvjTl9NGf2ZE3oIvOTBtd6YoDVoqVbp3pFqkizdjy5O8fZpDutqrRnxw/mnD5eIJRm2j6gihN0KTm4tsXd2ye07cV2Qi8wsi11RSQw2dxk5hUyy6gGE7QxqWsXXIInE2mbOrluYyQK2cddeYQQFCZnvayRMRCDJzhHQKUsxpiO4XTQG2IAqVInrCyhHHoqXWKE4nBmsSJ1wBQSGzxaCfJMs3omqRL/2p1J3b5eY+aDRylJ8DZFevWFg+yBw0IKnPVkOkswTut6BIHoX5hpbKxUenlmRvc5jRIRAjE6fGsRSuNCByog64gIntY33LiywfbmiNG4whjFeFwxGo4oihxtJEVm0FogZXxaJGmt0CrxD/NywGi8Q9d5TJEjhH5qnIkhkBnFwaUtFl1HkecXvptCKopyE200SEleFIgYUiSYSmaA4C22neLtGhFToZscwLHvFBqUKsmzYTKsGNN3A1NUlQg9yIxIILmIk6Eo/Z6xH/We/1yk1FjnEufLd/1nXKeCWUhMluF9pHNLlE18PyHPk0M8LrRk6mfFryphqdjdGPOLf+4lykHH1796l63tEVduZhxOV7R3PV1naGVLMw+oaGiXgXvvTaFLAiqTZQyr9HmCks09S15GulZQjiQb2xWDQUGZlYhNwXq+Tac7tm7CztUS7fYZfEPRhpbNgyGZKfngRx+wmjYcfaD4sJKUO0P80HPtlYJf7Z7jN3/3q89sX4QQTCYZg4nk7R/VfPtHNa++YHntZk417GPDUEkbp1KB0cwsNniijoToiQ5cBNtA9Mk1GF2kWwq0FmxvK4o8YowgqPRpCzYkHXNUWL9msZ6xNdkkywQxOsoyg6bj8PAkOfOVwrpAUSanvlEQYs/vE0B0CC34pS++yuF/e8wffuNHXNvbIDMZWkCMSdMXhCC4FPkodKIpeBcuRK4xPZvzG7/x+yipyYsKIT2vvnqLx48e07WBvMiTkQpJECmQPMSAjBEpU+a4GVRkpkSPc8pywHC0yXx2xGx2xmw+YzZbUQ3WDIcNUXgGMcedX+5dJNQCWRR426bi23ryLOXImyJDyARtltJQL2t0oZHSYxSUpaJEo1RAy4CWgraxDHJFlXt2tzNOpg1RQV5e7GVXRDjYGHFlf8DR0Yxla/u+KQQNoQPZwnoFMhfkImI7WDSeer/B5AlP5XDp3WQc0XeEboltFhgRcbbFNh2hc4yzCevVCTmag8kmsjTYtqNrHHQFvnGYkSeKkKZCUlDkBUqnWEytFJt7L7N3cJ33Pvo2bauYFCW7Ozs09ZS2bVmvBTf3rkFzwsP2DFQqCBcrxc0XXwC+daF7+JMs7yLLpaC2HSJLDZQQPQ/uHzHcGHH2/iFdm56Hdn3EwGYYbTh5uCCvckxmqJcd3kXq5bzH0EVcF3BtROWAEHS1p10n/Fpakm7pWdQ19cqxfaXi7GhNPUtu63VX4/w5q/bi1gWOepMOTUpFWQ1gW2M7SzWcoLTCuSb5clXSZnnbonSGMVniqQmQIoE2QeMjWGeT3kdItDY4rZA23QtFTLfUQNJsCa3J8sh4kuNQiKZms8pYhZzZfI53LvGzhMTo/JkYD/516xw/E6Pvi9aIFpGARAEuRFIuRn8wBIWUSetHlnGet5awJenDlIxpHq0N3jm8AOta6Pq0lBjIywHeB3QW0CIQYsfuwQ6TiWYw0BiTkC2m7wAak6N1AjdLGTFaIWQ6lLRUyLxkvHGZfLQFaITrnt7SU+fNIBHYriXLJa+99HLvRL7g/ZSKrJqgdCqoOpuE76n7m0bkoVvgmmn6eUvRGzbSWFjqiFIJU1Pkg5TqojVKp/GvkALoo9hI8oIQ076n5mv/M+hdzaCemkWEiHivsJ1FBYFQEutSJzsi8DalLSBNGs2pFJUn0M+Cdf0Trcmo5PnrG8ihoxgFHjxZcbpY8+LVXdZrS9dGVosW13Ssas+9d5Y4E+iamtlRy2p2Rtes6eoR9UrjNiUueOpVmyDqQeBaQfCeYSEY5YIoDO1ZS1cv2bu+y+SgxPjIS92EB49mbF6RDIaCfHCDP/6Njzg+dpjcc+XmmHJ/h6OsZrifkeXPzgkdYkQXkjv3G775wxnXDmo+/VxFmcceNC/wUfR6XIGzUK8dTgocHh8E1ve80Jj0fT5GfJB0LWyPBMNhJMvT89UGhwhpYBuiTxncfYfEXNUJzi4UTVtTljnGaE5OZkzbGm0URWMo8oxBlZNlEaU1yNBLFCRZYfiln3+N//qff5N7T465dWU3oZdEQMSIjQ7veQoud86nq/EFiIvO5iv+83/w6xxs7bO1s03X1Py3v/5VnruxwZuvvYwP6fe2rqHrGrruXNvtOT09JUrHhthOekel0FnBSOcU5ZDheJvx4oTZ9JjFbMl0OmU4VmS6oXno0ZlG5hE390QfMFrSLSP5COqVhcpQNx1ZZsgKQMLqtENpg9SKstRUIzClQCqBbSzoiFt4hNcEGdgeGc4WltXcsdy42EmSMTk/98pnePz4Bxwu14mvKEE5cGsQa1AemAB5ZNDCSQeMIzauKGWPEMkyovVYY8F6urZNGvsAXVPTtQu6xmONZ9EsKE1OcalARcHxco61YJ3g/qNvcf/xBp/+7Cf7hk6aVCgVUcKwtfsKw+2rrPyc9Tp1eavBDoPhiOX0NrN5x+bWJcbbgbbzPHmYcqqbWrG9f4WNzdGF7t9PukJIZ7EKBruy7GzvMm2POHoyZaBGZFlGs67pOk+MOhk3hhKBYHHcpHNeQLP2+C4QAphSEsM51ixN73wXEEqC6KcETZJ7dbVDa0VeGM4erFlNW4qR5vTJkjLLWa0vFnNzgeYOkQ7ZHrtijO5BwToF1EuF0ZpIRCmdMCXBgpcgNMEnXUly9Iqkv1IKqWRyZcmYHJhdA871vZjY39IypIpoA5mPROtQIpKHlig1vqo4PZtiXepMGPXTS+4459k5l052ZSSDLIFVTxd/wvHbZ0764JL4GkUIIYGKQ+pUWZ9uewkjIuk6R1d36SWtYmq3C0ns0yeejntyz/7WmK0NSaElKjqEbzGySLF5CiDx5QQFSqYRvJTJ8aezHFMMMdpg6zUuQFlUaKOJ/sfpKDGkrqDSgj5s5OL3M0LyBHh8u0b1XdAQHXgIdoltzhDxx8Wg1Kr/LGm0lhTlkLLcIMsqhNJok6NUckOfd+LS/DZ15mISJfW4mNgfiD3gVoIPKU3mfJyvTWKLebukcx5TDDCmRGQqRfBZByI9B5HAYDCmXi0vfrN+glUUGf/r/9Xf5O17b3H3+EPuPHrCWR1YN4Jwuub0rGN23LGedpydrnn8gWXZJNabqy3rsyahII4bFocdpRnio+f47pzFrMPimD10LJ9IFkpSbOY8/mjOH/zG95AjyWCww2ikGOjAtc2rLH+vYbIhGO5pDq5WyMNrfOW/fJ8Wi0Bz9VbNpz57mUnMCPLZde2915wstvn6W7dp3ZLXrgmK0qYCX8rUKVOkzwwe21ha6/sINbAWXOfxVuBjJEjwnaBzSbC/s20ojEABUgtkADpQKmAM2BCJEmazM4zSeCH6LkKRNFFSsre3Sblac3y24ni9RMqUGDIoMqoipxxUGN1zNYVg/2DCzcsT3n7vLlf2t1ACQujSKLp/VgUQhUIrTaYUF7HFSgluXd3nM6/e4tbNq+xsb/D3/v4/57f+8C1efv4my9kpXdemFJIeQJ/lA1RmaNcCY5IOjx6oLoTGaEVmDHkxpBpsMBxsMhscc3ZWsFrMqAYzRvsr7DygMuhwaCUJztC2HflEowoIEqphhiCmA9sJulpQVhqpPSGopw5/YcDZhL0phgYfXJq2ZILLeyUnT1Is2UWuotxEmRe4d/r1dFRKIIBrAAsmCNQgEkooFtAGUBtgFZi6YliUzMWC7e1BIma0DTYkxWVrLc46rIW2szTe0rlTFu2aqhijCg0uEl3veG0trTpkp9oAF3BOsnYWH1qKwZC9/ddQ2QTEmpU9Y90EhuWAje0dtITZ2ZTFyvPKG5eoVw1BBdplg2hK9m8cIMscFS92VP6TrhAjUQWGxZj5fMHho2OKSUaUFqvWlOMhtvPE2GFrj4iadUzvQSkFoQv4mLKShRTELqYOeoj9QSieRstKGQmeFD/rPLUN4AVlplgfWlrbMxRDzqu3rnH7zhP2rl3n+4cfXNj3e3GjXlLkWowp+D76Dm0KTKawXUc+GiXRfEii5HPtmJAmoRx8apHHmETz3iWws1YKj4FogF6YLzXg6eM6iMqgdaTMZV94NljbMSwlYT2jUAOGg4LFqkWQgsvVMzw0/jv7EpOuMBVvCqMSNHlrY0AIK04WvWJDpIJDG43vKyalFF3XPYVPG52+7q5LhH2tFMF5OueS7qyPC5JCYr2nyDSVCexNNDsbJUbBatlSL5ccFFsYnTpeIkZkjFRFQZGXaJORZRmDsiASqAYjWp9i8qSMGCGw3RobHbkpkis1gDEZo9GYpklFpHwmjJI0RsukIToH0abPVOwIweHaM2IApEAqjVEKoyVKKpTOyPOSohhg8pKsrNBG9Z23ZCpKhZ+CkLAr/k+Aw5VST7t/IaQElODPC/f0kEcPCNnnMzuysgSlccGjtMHIDJMF5vMpRns629FLC38mSwrBtSubvPdYcO/xkocPF9SzyPx0ju0Ej+8sOL1f054K5ocdtlmgjCb2zMnziId2WTN9uOLsYWRddzz+YEq77AgKTj9e8PV//iHr9ZoXn7vK23/8AY/uzth7fgfXSoL31FYzGWbkuuTk/oKdq4LxOOPzv/ICrin47a98j3vfO+Po/RoVNDcuD7hA98GfWtVwwt7lV1jXH/LGy5rdLUGW64QPSiZuhAZEpLOB9crReIfX4alZxwboGogxFX/ORboVDErBZCzRuo+adBIV0psrhvi0uy+VZ75cYqMj0xVJXJZkMU3bYNuOojBc3ttgtW6Zr9Y0XceqbpBCUhRLilyTG4PJc3Ktef6ly3z92x9yMl9Q5Tmxx7fI/tJjtEpAdyQq3UD/nVdZ5LRNw7Je8J3vv8XR8YpZvWR7t+S9O/cIcc2NGwfs7OxSDgfooqTIcozJ6BYdUONci5ZVb+wzKKHS+1IJjM7Jspy8GFAUFWfTYxYLkwpm5sguOVu7VQeZpBhqgrepux8krWuQUbM86YhdQOYRXQl0zAh1wnDVq4BxEmst0QqqscF7j/AR7x3DUrOxoTibXqzTXCD4/of/irmrMSTQsLMC14nkBC8jYggjA5MKjrKk+xuYTW7d+izLesFydcx4mCOlZN1FbEgFdrQe2zTYokiOXd+ybFes1mvMJnShQXpF5z1eZDTNEtF1ZBtVcjurdPYMJ0O2936OGs/j5SlxukKXLVf2r7O1NaB1jsZanhxN2RjvMyhLTo7nKZO6gemjQ9TGmL1JSb06udD9+7fYaLJC09oVw7Ji7hbUZw3ZQOFUwKqazWGBFIJGeWwXoBGYXJFlgdZbhBe4NnX78BG3Ss2D6MGuewOVSpIKSBI17wJEQSY1MgqmpwtkIdi6VPL8rcsUdcX2Zs3j44s1vVzcrCRGnF2nPw0RcEwmmxRlkYDBKt2UnUsjT20KYv/CiX0GqhACnRmsDcRoe1ixS8O1KBOKI6bRL4i+hSoJUqI1BAPWB4RMuAIhFZnywBLyEhUz1jYmYf1PcbTmfUzjVOUZ5oas0Cgs1/cKfPTMlgkUHPuCodCJtyVlf5sI/qnOTgKZyUAosrzAzhbpJihkGgtH0l7jkbFlVBh2NsZoowg+3ew2xhnj8YSsKCnKijzPKfIywVR1hskNmdFY1+FjoBCCycY2y+kZUiVgclkWYDKC7wt+ZOpgRajKHGLDs5pfCimwziODh+hQUhHJ0ucvJlG2kgals8QdVJrMFGR5SZ7l5EWByTOk1r1GT6C0RoRIXlSAwHUNUdB3hlU6oIVAI3A+XVyCj0l9HQPBu4SGkKlY9M4iVYrJUllGs+5Sx1Yk53o1GNG1Dd41dF1N555l7uy/eUXXsJw3PDx0rBYSe7amPhZ0M8XJXcvqsMMvwdUe7yMmAy8iqN7QJSO+FUwfdjy6v2A6XUDtCZ0gKs/8ccP8kaVZWx69/RFdVyMkiCjplpZM5nRtpG5qfNvx6PaUay9ssLkhGO57Pvn5Mb//LzPaWSCsPe9/4xh3uXm2Bq0I3/jmu1w9yLh1OScvI8ZoUAJk6mJFDSEIvI8pwk9BwPdFHngvEoqJ9PIPXuA62N03DIYKk3tQMVEOAilBo5d8ECKIQNMuWa1WqJFOTvKYJiNaa2IAax0xOqoyoyo0nZdY5+isxXpP2zjaLjKIElFpdnZ3UepDzmaWaj85aaUS5DplcCshUX3HO4TYz1T+3dZoUGGUYTTc4BvfeY83XnuJv/7Jy9x/csTpbMHvfvMD1Lc+4Fd/8XO8+cbLZLmgq1uOHz3hww/eZmt7wOXiFpkuyXSGVAaJ5Nz5plVqCGiVYUxOXlQUecFsXqAHEpp5ytCuM7QQUCTpARJwAdEzJaP36KFkqCpi60nm5vTzGO8OaFc1rtb86AdrdvYjV65nrKYLxtsG2zjGA83J/GKfY9stOJw/AcAJUE4QLanok5LNjW2EbNipFjRZzov7b3DvaMZod4PJtU2WD+dUmWS6XJJlgsUqacYyBNG21M2SjWBSfGT01N2KemnpmpamXqNjAo+DZN3MiTZlk3ddi1Ca1mt2Ln+Blzeu88MP3ufj+QlulTFRgpsvXEYtHN0yZzU/Yjpb8vqrr7JeLXGhI3qLFhWUHdVII9bHtO7sQvfv32YVpqSoCs5Ojri5u8mT2QIdI6t5YP/5Hf7WL/5tnhye8uT0Acu2ZmeyQwBuXrrE//vv/+c8Pp3jOv/UkJmkW+nXjj51+4SPRC1QJhXx+1fGHDxXMv3Y8eTRiiAjJlO88OkD9i8NmL5vOZ4umK4vdiJ0oYVfDDaNx9Dk+ZDhaAupFFFEkm4qIQMQgohESpN0aOfmhV4rVdcLiI7gPIhe/yZIN/w0iwB6+UlMLwBxjvaQEkJARmh9wMWU4TvSKzCK1ka6KJ5Jjuz/0JJCYLRgWEmqPGOQSTITyDLDJ25u8d7dM6aLBml0ynvsx9AJ+ZF+De8THiJKgTY5SM2T0ylRKaK16WUtBC54jAgUBjYqzaXtLBWMWlPpyGBQcOVgk7auqYZFGpUrgzImRSiZNB51AcqsYJDnxKgR4sc6OKMzlJC0ne8ZYuHHXMEe16CUvhB90L9uib4rAr37Vuo+I9oSgkNJiTaGPEsdTaMVeZZjsiwVfZkhy3OMNsjetJLYZXn6vMaA6HMbUydOIlWCNRNT9y8R/tPlxLquz4/uUDLvsbjJORyjpF7Wyd2OJj4FPgvKqmC1XOOcw/6McC4IQVN7Htxf8OThnPWZxZ453vqjI2bzBbZr6dqWUFsgovKMamOIDQ5amcbcjUXrnOl0Rb0O+LbDtwAZMSyol0uErNA6oEwgU4q6iXgbOLvfsrNTEULgODTMHtcc3p3x4Vun7F0eUaslk0sV23tD7r9zjMk7njwKfPELr7G7+6Nnti1N2zGdr/gzP7dDrtfIzqVmmxFILUFDFJEQBM1K0LYRZMrkDK7PfCa5c50DHyB6QZ5HtnYkoyrlakfVy1WERDiZDorgERFiCDR+ztnsLNEPYniqh4shpOxfEXqlv+phvyIZbWSZXLLJnYQyCpOlTtmtKwcs50uGNw/Q/aThxx3nNH1xweJDvJALshAwW075rd//Nl/6/GtcvTTg8HDKtf3LvPrJTb7484IP7jzkj9/+kO+9dRuEI8sURW54/tYue5euM57skekyfa2RXosr+i875atqXaKqhGXSJifPS87OcjrzCBVO0V0k2kDoFMEooksosSzTKB1pu4iUET0EckWsJd5GbBPJi0g5keTjknXtuXO7YblouXLNgDbkBYzrQOcu9p3X2VX68fbqkxDS3SMLsHf1Fq+/8avU9pT33v+nmMkBv/ylv8Y//uo3uHr5ABc6VF5Q5RVHj9fkqsCvH7OeNylzPAqm8zW7mxv95TWAhdA42rbBdRbdEw2iM6wWZ3ixDUrTAt16TWMdN3Yu4XVLPla091Y4L9jefomrl3e5/8FHzI5noBp8p9nY3aZdPKTp1ogYGY4z9PYlRFywXk4J8meTYASRRTPFuYrRYEQs4EZ1mWW7QM07urXk42XNzN+hurnLr37xP+TPvPIF5m7N8s5H/De/+V8wW7fYpiaKhEYnRgZFxXKxSp9ZcY5bkkQfufbCBn/t77yJzgwf/eAh9//+j/AxUmmFFJoQNLFyrF2Lzv49zepN4fbnuqrIpeufQOsCIQSZyXHWoTUg+u6KgMQih65bgy56l1zoR5wRrTVN0wIxGT9sk7RbPe9OKI0MAhk8Qis651MnIiikEigPmdYIKehsR6kjA+mQQf8JFMqzX3mhGA0yMnP+kpEonW7aJhO88sIO9x/PmS59Yk/FmAwVIdBZRxCiv40DURICrOoFIQa0zhgO0r5FAoUObA8VW+OC0UAwKCDTUOhAlsOgKkE4lvMpk62tVPQpjdYGbQxGK0yWI4UBrVFZgdYZrm2fsvsi4ELAaEOIEtXPvYRS6Aj0xf0z6fgJEL0wNoSAVhCix3tLjB6tM8o8xc0Zk752YxR5ZhICR5kURG/SZ/C8o5cuHaqPEUy5Mv1v9+PPtlIEnzSYShtiEKgMglBgO7quIcjkqhQ6R0iFUjlCZrgm9IVjcmR771KREEKvU7z4rfpJ12xp+fDdY84+XtKuPevjlvWiRRrIixylHI4GMsXGpRF717eZLxcU85yVq2lcoNzImVzRONWh8gq7DKwRYDXlRDLYqsgrSV6BbCe8+90HnB0u+fjtBjerEVIwOz3izvvHrNeRH33rAZeulVy5ljE/67j6/Ab3PnxCiCUqKt585Qpf/cqzQ+CsVjWvv3qNIn8HhceLgLMOk+m+OEsUg8ZZVouOw7OWRRdpGoX14ERIn52QRKlKJMzGzqZiMpSUVcDbmFKtdUT5gDZgfUQ4lXiRUWC95ejshKqsCEH08PBkQBJogkidU08giIiIEnxI3eieBKBjKgC9BVTgyqUNPrj3hK67RpBJSwyhLyySWSWBROmlDf9uazpb8PyVa7z5yjUaW3N4tuDVl1/k5q3rDIYjTJbz8kvP8ed/5RdYLtdAMmWUWUZmkntS9jnk8dze2H9dTw1mIhmotDJU+RgjTZJxaMP0TNM1kA2P6TpB9ALbKLIiILNIFDFhm0SkWztklt4xKku64HIYUUawnHuKseS5l8bsHlRMTxsIHtdCWUomOwUbDy3n59pFrJAqBmSELkkN0RGKCvZu7vPaG5/go0eHjI++hF4HtqoJm8MNNkb7NOt72NjgoyQyx60dEHn4+BE7mxkytpydPMHtbaGcTylZNoHwu9YTnMXKjBgFjW1puhmjwWU6PNo52q7GYxhVhsX0Ma0PZNbw5qc/xdUbm0yP77BcTRGl5sG9+1TlAbmRzJZndJ1FC4EZa7r6mOX8BCknNGLjwvbu32rFJMuqu5qb2xvYLDLYN1SLXa69Jpiuzvjmt/4RmzsjDnYW/NN/+v9ky2wx3M2TXs8ZtncGBByruU3PvRAMBiWr5RqZSa6+NObRR3O8hcGo4PN/+RbVYITzkcG4QmWCvNCMtjJOD2csT9a4OnW0JSUzLk7/eKGu3ug7iIogBKONHYLtEMgUMxYcwdmnzkuE+PELB/W0O7JaTnHe0bUtzjlEDEgiSqZb8bnQXvbC4xA8Rmhs7ZLbLqZOWsr/FYjOpVGL1AgFG1EQZg3ip5SUoKRgXBqqXJFpQZYpTK7STb3P+cqU5Oa1DWYry2oVcc6nvPuQHKW2P2S0VgSh6TrbF67pYBBRUJqM0SAyymFcZozHOWVpKBRkJpLriDGSvDAgPNdvXGGys0WRlxiTkWVFQi2QiP15maDI572LEJLrSGcZzkeCj6mwESr5k1Uaw/N0FKqedmYvdEWIPo1WpUgIHEIguhalDMNyTFGkwk8rRZ5naGPS50eKJBw3BqUNxHTJ4HzAJsB7lw5NJDG6vpMXkSr9nucgbeeTy1KgMVmWWInW0bUNkYgWChsTLiKx/zRSamKINE2dTDMm3Qq1yZDyZ9TxAzof+PCD+6wPW5pW0HaBrBTsHgyIwnB2ZMFJvHTsXB8wuSZhqolFSXNcE6Ul3xCMrxpEWVANx7TzBu/mNAvDcGvE5VeHDEYD8iLDzwR33jmiWXW8850POfxgm7qxNLbGW0VeapanK558PGV/f4913fL8K5e5/+6U+x+c0MwLvv2N958p9DpGwc1rG6wWK4KN6CxdFGzjySrBfBm4+3HDW+82vH/Xcnzs6ZyEKAhBp86RdlSZYjCA8QYQA5tbGWUBKksj49DZpB8VIIIG4RAy8R2FBOE9s+kZq/EWggyEhygROl0OlVZomXBE9MVPDIEgEiA/ECAEnA/gApEOk2um0yXHZwvKLLnRQfSX6jSNSZiFcCEOrXXTIaJlsWp59aUXeP2Vl5nsbpLnJUqqp7SFwijKrY0Evu5JBoIk7ZH9hTNJfSB1TsRToxUxSQ5ElBiR0k5En2qipODsTHA2a8nyNUEFfBRpCOUSXLpdR8wwA5kQHEoHTCnxNtDOA8VOjjIFIlfUswWDUpEfGFA5wQaUdAiTcbBbAhfHXFMiKXhEhBxoBcgIcgR7m9d5/danqee/zePiCl1ccnS2YlyAjEkjHyIQAuWk4Oj+A4wOPH74MeP8OgMpWc4XNNanqZpP+lScoll3hNBfKILGh4bN8YQoB9RdizGexWLO9vgm3i9oXEdhcl59/VPs749ZnN2nXc6JSqGNp13V3Lh6Bdst6VqbzpAYmJ3cp25WZMUWnStYzJ4N+P8nWbEnObw3e8zrt55n79qEduU4e7Tk5vP7nORrsiKymrU4seD/8Xf/U/6P/5v/OyG3fOILB6itMR+8pfj+7z7B23SJms3nAOzfGLG1X7F9MOLs4Zqd6yOKMufw0ZTlasXDj07QhWC4kbF7tWLv2pBsMeGHH3xMaGHdrC/0e73QUW9wDq0ExWCUaOlaghQJTZBYJvR5QUkz1Y/pEOkBapsaYsR2Lc62RJ9uHVIkQ4fUFUI2CGER0kPwveEqJSoQI0JGpJZUWYn3gazTLOsOpEYQCJmgKPQzMh786SUFFEaQSciNJDMSJUEZ0wOXE1xYCcXWKGdjGGnqwLoJ1D2Pr20doAk20Nh10kMCKkZKJRlmgmEZKQqJySAvPEJast4MopRExJBE5t6T5Wks61wazStlUnyZ0ESRxkUxRrzzmAQGAyJIRcSk8aqIKf2ih2p7SDgYbbCuS1/zM9jjZJaxqF7/GHttlJRQVbsMy4rBYIBWSYdljEEb2ev+ZJ+MkvWpBQL6cHp8TN1iSF3FKJNRqXfspktK2r8YI0aX6VAlHSIhCgajLYrS07Z1kivIQIyS4EPqnKj02Tc6jeSbukm6wSJHPsP4sX/TSq5vyfXrB9x71JD5DgSM90ZsXi5olp7FUhBtRITIzqUhw72km5ytDD4GdBSUuaEaZ0SpkGVAPgwUjwraVcNgV3P1+YyNcfqcLcscM8ho1i3dCs66BaCwToAOCWfSFZwdRoI1lDoyjQ2v/cItdvYnnNxveHx/jnuGusjhoEQwxa1r8AEPqCrjyeOG739Q8/7HmtlCIdUYpdZsbTiqQpAXAm8lq9ZzdiZZNZrpUnH7EZSFpRpLrl7NkCogpSc49fR5lgS0lynHPHqkAuUlq/WU1arG6JgKPZWc9Mqkws8Yg5GmB64n6+c5TSDG0I9s00EbQsRoQ66T4chkBYReyRdTp17KiOzj4C6CfqC1YGtzzEvPX+OVl24x2RhhjD4nVSVjoEjd23jeySOmv9YXeTGK3kF9PuA9j7HqBVX9EufSDCK5GfaDyvQrShyrxW2ib5AmEnWHs8k9HXUy2GmdXJezJ46NbcNgy1AMBFJ7VBTYRUM5ynA2kmuFtRGvEnQaGdnauFh2qRCQy0SueP3FLzI9fcyPbr+DzCLKwN0ffcz9H50w2Z8QnObJ6UN8PGZ79CInM4WI6fJeDTf4tf/gz/OV3/lt7j0548FhidSRplszndXkBdiQikVhWybagBdY72i8oNBjxuVznHVLnIPVcsXZ3PHyzU0WZ0/o1pEoRkxGiunZHdrZjC6m6cfKrpD5gM3tTRaLYxpnsdbSOWjdkMzss5iv8bZjsjW50P37yTf6x3/qQ6QlghWs64aWltmDkq4NZJXnzvcecemFLTr/hP/PP/p7/M2/+FeZbE4w24YXXxd88J0zFictWZY0pVYJdq8OaRaO1798meZmy9o2zI7XtK2jXjXcf29KUWk+9QtXee3zz6MzuPv2IU9OZqzb9sLNfxcKcEYK0AZlSmybEjOs9RilUUqlTFl9nkmkkSpLL28BvqlZrWa4riE4S/QOYkqMyPIR9MYEpWqcjwjp0q3a9PoMH3ozhEZrkh5NCGJs0FYhg0QEi5eBMlfPSn72p5dInZ3k/tEYLdAmOU21TBwg0b+wlVAJOWMsWf9wRBnRWdJ2eOHJVO+WFYI8y5EykBuJISKkR0pNkQ8ZZJJSgVQJNEvUiXvoI86mnEei6GvxFP/mnaMajnCuQ0n91KwRY0SqDCULlMqQpNxIIcSfCHfvb0wh8b+Ujs9knC6ESHiWnpuhNEQvGQ42KAclRilUZijyohd9p5B2IUTPQotIJXsNYt9JiMlx20/Hkh0/yvTS7NmUIaaOs7PpguG9QwhJiB7nIh6Bk/0+6wwpTfr5xRopE2hcqISdiQTo4eRSKSAinwHz8CdZkcjh6SnClMmMIj0b+yVXXtskVAtWDy0+S6NrmkAmJKoV1KeWB7dPCL2mrV5aohuglCLPJLIc8CicEoRnY0dy9WbFaGASr612VIOc9VmN944Ol5JVcoUyoI2gW0juv13zPX1G20Rq23HjxRF/7X/2ST794qd5+eAV/ie/9nee2b5kuaZpHkIMPPi448ETwe3jFe/fa8jznJdfMHzxs4H9bU9YB05m0MaERrEu0jhonaCpA8uV4Wg64HQ65DtvN3x0r+OzbxpefzHRCUJIz5JRDqtjf8cSCAeIQG1r6rZNOluVOJtSJHSWRCJixOOIUSK9Svy+1DZDIDBKkmLYcqSAqqy4tLeDs5GdyVYqpKJIF/Feb50u27HviP+7rbIoeO3F57hx+RKD0QCV5X2hl3BTiERzoAf5PxWJxJTIlHiZAUJESd3bXwTnZE3Ej7NM4/n/irQ7RmdUxQQ/StD1GDrW6zv4EBBeEoRFBIk2GiEjoT9DNnaS35mQJkcpFQV0bpAm9GeUIvoIPiJNMt8QLlZ+oJWkNLC98RJ/7gt/kx/e/X0W8QmL9SmL5X1++NE3+Pbdt/j0Z7+Iq884PJzh88jL117lo4d3sL5BiGQM2d074H/xP//b/Bf/zT/kB+99QDUsAMuTx4+5fJC4r9GlSceXPvc57p8+oHEdXasBwc29qxR3nnC0OOPQHxL9BsMcZqen4MdE4Zi297HLWfqpeUm9XJCPhgzGO5RVwezshLatWa4DKyfZG+9ya/Q8dxaPmMszivKnF6X631n9nV9KgRCRvZtj2oXg7OOaUFpO2hNWxxYnSjZv5Bx9dEyQBd88+ycgFszrE+rbLboIlOOMxWmLyQWXtgs+vG1pasfqrGVx1tKdnLEMMcW2NY7VrMU2kje+dIuXX79Opiqa9Yr13BPbAD5eOHf4Ak8bgYwJeTHa2CV6Bb5FENIYMKQ/CiEhxASyFSqBlUXAOo+3Ld52EGJfVCjyoqS1Ees7hDJEndhxeIFUGRCxNo1zESrhSUTAhYBSKYnCdh4bIgiJCw1axwuvoP+HdyXp84wmFaRGkRmNViLx8oxGiX7cIiJFUeCiIsvaFMAeBG3dEITC+diLm9MNWJuU5WgyxSArMRoyLRiWBbkW6NCmF6uKSA3apCxarRQiBsoyS10FJfC2w0tF19YMhiO0ztBG0XVNjzFxaB0JfXye61IMjdbJrCBNnhJJkCgiLnbPzHUppU53fgWKgHM1zrZkesxospHGu1KgnnYv0sEidbp4xJ65F2Poz7tUwEnEU+OFtR3nbwMpNEZn6cUuXDqQlOrdlwEfOryjx/JoIpDlQ6TxhBjprEcKTessEFA9EzA434Og07/3s1g+Rj68d5/5fIah5eUXL/G3/+P/MV9/93u8c/9dVosn6eZvLa51nNyfseU3ePzemtVph7UdIQROHsxZPBoRECxOGs7unDI/WSE1CQarKoqyYDo9IzcZw2HJ3NTYHsmkcp3czpUhLwTzaceT20esnpzROsPllydcfXHE4ekZxTCweyl1c57Vctbx/R8ecu+h5/Ztw7qVbI3hr3x5xCufyNjbLUBH1o3l+G4Cp7de0MVI8Ckf1igBg4jJOkrjuLqviWqT+w9GfPWbK97+oeUTLwZuXLEMihSBaLRHO4kWESMjQYSEbLKWPHP4oHBeIaRDS9nHXUakzADxY62qSHKFczk5kPR+Il04X7hxwEcPjsk+kfdoFPoaSj0tGGMMF9LxMyanE4rT0zk3b91MRjBIj5dMX2fq8aUxtuyLwvMh8/lQN9F74jkW7WmBKOJ5FrB8WgSmfycghSHTUFXDJAmJ17B+SuxO6KzHFAolE5IoIsAGfHQYKVmfeKoqZeFqpVmeNpTDPAHxs2SCKUpDcBCCJAbNcnGx7svxcJfXnr/MfAUbu5d4zryBy1e8/8PfpV49pp2cIAYSU5QsFpImShQDimyHTBV0bgkqoLuWf/mHv82tg2v8T3/tb/DPvvLP+Jfffodce54c3yPPLmHx2NAhETTuLOWyY3jp+RtMNjbwp0ccv/c+7WzFWex49aUbdN2MtgUlNW13zLo+BuHRQSc8zOyMXAQu727huhOW0xNmC8u8jsiy5LmX9pCLBZtVR7uMRDe/0P37SZcU8ullQkrBo0fH7G9P0PM1tZeEzCJkZHW0xlyuGF7JaR4Hrn1qzA/e+5dsXMo4erhge3vyNDTg+ef20UKQ5WvqhcW2jtvfekQxDixqh3fgO49vJbde2ef6rT3Wy46jxw84fnLGx9+fYp1H58mYNT/591DjlzpHguHWdcrJTjqQfUwdEJFGjlKkiCMUxODomiVSpgO1bWu8bQneoZVByxxT6ES69xZnQ6/ZSJ2r6MHZBudTIaBVBgZEdCglsC5FagWdRNMSBQ7qNo0wxE+p5ZeKo6TTkDKSGUmRZSgl07gjU2Qi6VAQaVQdo6E0hlIbood2kNM5R/DhaSEmhUzurkySGUXV5+4K6RGyhSixrkUKwbAcpdg6CXmhyfICaRTNekVZDoFUKGcmOaS9s33No/uoM4FRGmsTwDZKgdaGGNNYUwiJ8IrG12SmIIYEr3wWkGwhem6XUYjo8a5hPB6xMd6gGo6fJpAIAohkeIlRoJROGj0iQqfPjyTF3rno+lFuKqgTqsXivUVIhTHJ0RtDQGmDkkOE1jjnUFpBF3F+jbctUWiUKXs0j0EJgfAN3icHsJYyjcgB5z0G0sErf5rZ0T9ewUfufHCKbCN/6Zde5y/88i/w+qdf5ds/+j7dqSDOclitCN0S7yPTw45BYQhLg3KaultDiNRnHad3aoqy5OzBksN7Z6mLFAX1rMF3Htc1tPPIeiqIOFQeQGqc90ih0EpSDDSjzQFR1dSrGa5LEgQXPD4G5vOW05M57oVF0sk+o/XwyQlf+e3AuApc3/S8+gnD1asZw82MrJAIJWhdxHWOdd1xVjsWXepNRZcaP4o0BJEmYkxgNHbcurbgs590vPfxhDv3tnnrvZr3bq95+VbNjYMGqQUqi2RB4hwoCZGkE9VConudbpbl5JlG6yKZs5RCadlLL3TfPJOAJwb5VA4j+g7ZpYNdvv29D1m3LeOq7AumvrOF6jWDF/P8nk3nrBaa6cDxh1/9Fp/73BuMN8YEYZKDWfRV53laTv8sKLJzOfjTdV4MpkP6TxqwZP/30jN//k+nqEuFUQZjoKwqhu0e0+ms14wnlBa9bjmEQG7Srz3azcB6gvWEEJnsmzRCF+ndHXRESE2Q6UJNDBT5xUYvRi949aW/zmoxYzje4YVRycn0A6rXX8fajlndsDmpGGSGk5PIOAyoRaCzLbnI8K1DmXSumCzwnR/+MR8++JAvfOZNalvztW++S1QzPnpg2dqo6JzBWXhyvMZ6z5UXXmA0nkC35v3b36ZWM9ZNy2S0zbXLeyzOfoCw6dxZ1qd468hEBsGymK9ZLtfo0QjfNpycrDg7a5kvVzQt7AyHlJWmsTPU0rJ++OCZyjf+/61rG9c5XR9R+5qH9w8pS83gxgblVDC+knM0PuHwhwu6bUNVFrz6F3bY2K3YuVZx551j3NKxch4hYPPSgJd/ZQ8l4XAx5+jBms3dku3JkNNmQb12+NaBjUwGY3Z2t1gvO5aLFafHCx5+MGX6aM1oMuRTX/g5tq9s8nf/039wYd/rBbp6wVrLbPqEze2r2AjOtmhTwbneJFikTg+olArvLEGkx9QFS9OuiM7SeJCmYGkti+WC8WQHIQxd1yFFRvAgZJaSEboWkw1QxqQODklXYArTf4gEIwSdD8S2T7wQLVKZi/rW/40rCZRTEZTnBUob8jyjyA1lmWKnjDIpAcLbH+9VzGEyJARB3bV0TUPdNChV4oPDKEP0Dik9mdQUuUbJFDtDBCU82TDDqDT6znLNYKR7I0EGyhB9xLc10WjKUUFRZD0cNY1Au3bNaDxMrqUYyXTiB3ofsLFBQOJ/aQ1K0tqO9bpGyZSEcT4Cvtj9FOR5htEaicdLx3BQYIzpKerpBSxERJBe5MlYIZMj2vs+Fzlpi3xInb3E9UoA8OACPth0IEUFwiTHuG2SJlVKQq9fiS4QUegsp6vXKdHCWZQWeCfITYbRitq2BOfxfdZvGkfRd6rlszHC/ATLec/BTsWX/pNf4zOf/iRVrnl8cszH79/nyZ0zVtM13bpNuJyoOH644uzoHeppyuYMIqC1ITSRe2+fMtoYM503RFVQbuVkmebuB1P2/njEzRd2efe7NffeXbKuIdswaBSzoyUiQF4qik3Bpee32K5hNZ9iZwqTSdq1Z3baMTA5dz46xP7cszXD7GxW/K2/apCtpcwyJrtZ6gYIlQCtMtJ0ltlZy7pNOra8R404lS4Q1kdCAN2PUItckGWCQeH43OSYSzuR9+9MmC+v8d69JbfvLrlxdcXuXo1QkTyLBAcuNIAnL3KqYkhRGrKsxBiJVvlTAPm5/ELIlEudCqoUB+iCJ/jUaRYohoMBw9Jw/9ERr75wo3fH9hKY/t8V8WLGljEG/sUf/gGffPF5fuGzn+QP/uDrvP7aJ7ly8wqoHJXYNWk8K8D7nuSgNRGVAjuE+PFYWMReG5jeref/J/bPfOp0in6K4pFIVAwYU4AqGEVBXR9h25OkTQ8ivfNkQGcaFR3dMtLGhrLMyccCO09O/xjSu9xakUwdZcCtA2qgkKokK0bA7QvZNyDpsH2OlAes6jPGwwlGWzqdc3B5k2+91VFpwbWtPT6K32cdHDjJwzu38StLbWu00mzICZ975SZf82/x6GTGv/jqH/Hc5avs7R9z/+EKasuJWxO1YraW/MH3v8uXvvAlBtWYaDs++NEfMV+skDojG0RuvvQywR/RrFdkcovOrVmtF0ip6YIlWsX6dBPpl/iuowtzFsczFrOWxWyO0ANeuHWLKEKSElFQtwL3UzqX//vLaEVja64Or7B5K+f+9D6HT6ZsjTaBgnxf8uZnbvCd00dsZCPCoMb6yHoRsD4w2sw5aQPljiEvNdde2Gbv8oQQPM+/vsfZ44/51b/zGpdvlbz7jYf87q+/S6kLCpUaAQ8+PkSfQFM75odrsjjiE596kS//1b+C3njMR1//95TjlxUjZF7gVlM++NEfIHTO/qUb5EXZa1ZSBqW1LUJFlE7gZhEjbV0Tug4RHKlvLhFR0LYtgsjZyRNGW1fJzIDWRYIs0iGv14iuS1otkyVNBoLOeaJMuotCaYpiwHy+oO0s2oDyGeICtCs/yZIiGQqMTl29ssgZDksGpaYqC8qyRChF17VIaQjxPMUkIUScdUglGBY5dR2xziIwGKWSiDtYJGB00v7poHrDiMNogZQwHJWUg4oiAyFLsqxCykhZVuSZRASHjB5vLTJIbGgIwSJlYD5tqIZblGWBdRatZYqRixbvOrpmhQpZylb2gaqs8M6xrptnoqOUUjKZbJIZg20XdHGdUmCC7ce9KhVwQhA8vci9j14ijYCsS5BnHzw+WKRIL9jQ51aFXiYAAqEU0bbp3w22HxMLQpryEpGpMRI1xXCSYn2IdDbF+cSgepNMDioFontvn7YxXPAInxIBfharazuuXdrjlU9epipTJMVoc8Sbn3mBRrV04SHHD1vwOUIImqUlsEBFg5A6IS+sBucYDXLeePM5Pnx0h1gMKUYKFeDJR44/+M2P+cHXjllPOzprGR5UbGxX5CKnbTvalSMblwwvGYZ7nu3hBqePtnn3m6d4vyBfljz8YMbpoxlxJvkbfzG5q5/VyjPNcJiD8QxGEpVHgo1J7+kjre1YzVt8iIxHOUpLprbF+oDrAl2XUEPWCbwHH1McWwiWVePp2kiWBZ5/ruXRwyNk3MVywJ3DwO2HS8bjnN1dyNUMwpJVPQNxqXe4qh6blf54bnRKMPLUpROcZ3wn97BRgiAl3vdGEql57rnL/PCdu3zy+ZtoqZJ8QqTJw0UuIcGowNsfvM+TJ6f8hV/+ed7+0R2ODk94/Y3XyKsK+lGbRKJk2Tt3Uxcw8OMLtIg/tmucf51PDSFPFTyCQOhlMakIVmqSUolCSwiestpjub5LiOkSJkTSVSolkUGjypAy3V1kfmwpRylmU8NTAkBXB1g6VutI5UrKjYzNg/GF7p1SmokUHK/f4/B4gxhbqqLkdCVZLEqUmNO6ho3xJdSkIK5qiJr33/4eK+ZUxrBqOoqtgudvfAqB4Q++/wOOz445Oo783K1fYjr/AxarJYt5wAwCwSmilewe7COF4KP3vs709BihSnQZ2bqxS1UK5md30T7Ba2arKcsmoqRHec3qVLFaR7ytCLamjRE72UUEj+k0+/u7jIcT2vqUtl5St4Gg1ij1s7kAZ8bwyhd2aZvIrdc3+Mylq7z1R3d48vGatm1ZnlRs7A25+so2XlmufvIKH71znw/feUhRZgxGGeu4Yqwzbjy/QzEqeHjvNGU7y8hzb+5z/YVdnG0YDEtGmyWf/fwnuP3Hx9w/fMTi/jJFQbaR0WjCq1+6wd7BBr74PvOjOfuX37jQ7/fihEVCsnf9VY4evAehwzeWuk7duhiT/zE5ztTT5A2lDM55QugILpk6CIIoFV3nkDLDZLJ/0GRKAbE13iWGXFkN8d5BH+MTASEloe0IPuJ7bLYPgdgHDhopKTKNlD+dm4VUAq3TaM8oGOWaYZkxHg8pB0OGozFFUbBczGjbJSEEfF/4Oe8oyoyuTq+yXAua+lwf43qIaXLzpTi8mHSWUqGU612AmvGoSs4/k5HlmyAk4/EAES3Wt+TFgM6u0TFHKwPBIaIl+o6AYrWIZFnv7g196Lcx1F2X9lxJyixDdS1tPUtJA1nxTPbTmIyNje0eD7RIzimVEjqKvMCo7MeHlxS9VinS2S4lL0iFiALvUji2EALbdWnPY8LW0GvwlC5TYH1oUSojnBfVIWXySgTrtkUIiY+RvJqgMkFbLwi2IfiWNrgUw6WSZtL1sGcRe/i0kLRd14vvf/prOKgw0vD+Bx9SvJpTFgNkVGxs5ezuVUjh0EaArQgsEdEhgkDodNnKtMaFACqwsztiuj5l83LJ5FZJUBbqSLOAxVHNw4+eoNSIfDtSbWVUO5qtwYTl8ZqHyzPMUFAMFGWZMdosuPnSFke35yxPGtanTZJ5ZJ6NbE18VpzIfoXY46FCMuKoXKNLjSPSto7ZvAYpGA5yFrVFuMAgk6ytIHiNNm3qvEuwEbyL2ChY1x6tA3mm2R0P0RoOdh0P7h9x+94T7q8uU5Y7LGvB6fsN1fgFLm+32HaB8x1giTJNCCQqdbjiedEjzn0NnCv7RDzXnoJQSYcdQ2KhXr9+ia9+40ecLpbsjid/ouA739uLkR/EvnhTOnCyeMJ/9U++wi98/vNkRc7v/e7v89nPvclkZwstDfQ6qygExEDoTSycQyFExAMypg5gPO8AAgiB6nWfUsheM+iAFiH6vF8MWTboJyf0dAeFDwopbbrNyTRNEE6SFRK1q4nep0jGXmOojWK4DbYDPQChJUpP2D74NPDbF7JvAFIrbNFRjldMl++BOGBrfIMHZ/dZrj3DIuOJm7FuO7YnG4RgWSwdU3PKoFDc1Ju8f3zI9GjBbKq5fuU5/tJwzIMnc04frTl+cMjlzW1+eNRhtAPvkd7w6Tc+S2YKHt39Affv3UbGHOFnLOQYqQPTk3fJwgKpx6y7BQ+PFuTjDS7v7LF+sGBaz4gOfChxSwutpSo3sEpS7Ux47oUDgvesTo+p15Gz5RxtFFE/u2f637Q657n6wgHDccVyPgdX8fqnP8ne5hm3v3nI+v6C92zL9tYWVZkzHAy5fvMKH7z1HXRmGI9HZMZwfK/m4KUKXWiOH09Zzx1hnXHw3JiHtw+pVzXzk5pf/KufpBppZivFx08i3cIho+Tq1oDB5YqzkyOCXOPCmKO7Sy6N/j3GuRSDHXb2PSdP3iNG1z/wqQATPj1TWZal8VZwKJn3AfcW1zXYpkPpAkeCNxPAx4DSMJse46ynqRcQLOvlgkiHzov0YggeLQ0BQWYErW8QPuL63EytJMYobGcxMuOnRcxNqR0SQURJSZYbhsMRW9u7SKXJ8jIhVrKMTA/pbJvqK+9w/W1VEvCuQykIzvXutEBmkpFAmkSq9y6QaYVRAqUjWiVNWZ4l/ZTSCq01OjMED4UxIAN5XtA2a/JRhnMNREvwTRLpas1kMiF0Dc5boq9pu+T6NVlJVY2JRNbLM+rVAu8dzgucF0/zly9yCZkceNF6fFeTmbIvrjUhJDOFiBCt7blevYZcxb7o6kc2Mekl00gYvA8Yk+G8IwaHUmnsLYWmsymeSCpF9B5nA96v8T4QhAQ0NvjEoVwskSL0jvKAJxCj6jOWW5SUBK/w3qKzlCe6tt3PbNSrteKLX/w8jx4d870/fo/JpGSwOWC5anDO4L1BRoVrLc5ZXIiYqsCUJSIqfKgREZwXvPP2PcrHBW98eZ8r1xVkivo0YM8EsrtMXEs+/Oghw/2c3ecqJntDdsqKxf2ch7cjIgZGmxnSBMrc81f+8uf4T37tP+Krv/d9/sE/+gqHZ3OKXCFaw/LsGccuxoApYDmNmAp0pogCmrqlbQNCK/IygdjrqWe9hLq6xZ3T5/EuoOMh4/xD8mxGDlgZ6JqIH8NmVZGXAZkHlNIY6Xn+mmZ7QzC4t8Ojx4ZHR5HNUUm9nPHRKsLlDa60HeNR+kyLQMIDnbscUnMMIXt3bO/UDTI+vfyIHmnkfHKyjwYVG+OKD2/fZ/tTwzSZ6bV+F9n5ixFs14GI5LqiC5bf/frv8fGD5/jylz7LV7/2LT7x8k1uPf8CSqc0oOR0Pg+xTuPbcx2fiBBETPrZc6IA53twjoNJTYbkhUuSnxSpGNHK0DUNwUZE1ARi4mgKia89ztVkWcbypGO8m5OmLxCDx0eBc8koJvNUAAolQA0phi+weenzF7Jn58sDG9sHmGzNx4fvMSzHlOVNClEyX9WMhhMix3x47y0Odna4/+QJuYqIzCSdqcq4NNzl6M4j/uv/7z/k53/pNTa3MzYGkrCfcfvklE988mXycpfvvv1D2qnl+ov7PP/STRbLI+588A6D9Q3WfslKT5m2S7LuLpiIKQrWbsnMZVy5eY2sGlBIweBqwd27Z7SNx0dFMx9im5aqgK3hgO1RSWEk69WM5WxF01nODh9iYpnYNT+DJRAsjh2PPnzC/HTG7o0148mAzb1N5KcGvP+NB9jDJfdmR1BnxCC5fGuL3b0JH985wbeC2UmNW8P44AqblWK97Ogaj2+gc4IHdzualcWvDKPtSNdGxpuD5Fdo4bnrm3TO0Z4uGcYS5x333j2irAq29y/W9HKhAOe2XiOEQYikK0ssEE9wEaFSioKUCql1wpN4i23X2LZBSkWWD9OFKwrqeoHudXvp17GE0CFlOrCRNnV6sjFSKJyztG1N8I7ONn3hGcCH1AlDotFkmcGR9ZmXP4UlBIMiR4SQ8AhGUQ4rtDFU1YA807gAMSq8lSij8NEjQtqrtmtQRYa1LfV6RbYlWUznRC/JiwxpFEpnCKFxrksJFdokXZ8JiBApipIiL3EhkucabTSDomK9npPJ3pUrBNrI3mDTIIQkeg9CsZidMByDzjRdc4btarwyIEa0pwusdX1XNSCiYDDYQugqQZwveMUQcZ1lPX8CIeICBB8wMY1oYwwpF5EeuRAcWsjEvY2JRyhE4vElJp9PXzeCtq1Td9oHYmyJ9QypCpTJMFriuob1epb+fkhwZ6RGqIyIZN3OcbZLSQNZlkbI3oMQeN8Rg0MQUudBJuF6RFK3Heqn1IH+U/sZITMFV69cZntrwvHJCe99eIezac3ZaUO9sDTLNa5bE8KaKIaMN3YQRjI7OkOFDqykmuRs7uVklWZ+0tCe7pOPA+2ixXl4/c1P8B/9zf8R/9v/3f+BsLWm2taMNjPGVWRjy4ATNAuHMQpTtXR2gYmSL/3Ca/ziF1/nc196nf/L//UfcPzojMOHxxwdn/wJEf+zWKGHxjuyIicAbeNpa0/bebJKM6hMYtHtS1YzOF6+wEZR8dFJjogl1g7ZGrzFoJr2ZgtBmWlMAWiLkiYZkaShXqyphOaVqw8YbPxZso1tvvd+y8h+yHCw5p3bx5zN13z5CyU3yoqAoweOQB8PFcWPHa/Qa94ifZV4ru8775xJpDI8f+0yb79zl5979QWM1j/GoUQQF3UZieCjx3uJdw1SJ7j0O7c/4OHRIX/1V7/E+x9+zNHxCW98+g3KapQ6diL0vNfzn0ifhtKDqs9Hw/1vgXr69Yan32tKESpSVzA6rOuYT0/44PYjHj1OhrkQPCYTDEsYDjyDSrOlIBtkLI86ooiYUuA6RzE0mLIHZkdPUBItNZgt8uHzZGZ0MXvWLxfg4eERO5MdnHub+eoRZT5hUu5xNr2Nd54yE9x98F1+5bP/AU37DSb5BstVjRllxGgxpiLQ8eGj7/Pi6Sd44bnX+b33/jGx3KWoStrOcuZnVAcjOLY899ynQEa+//0/pNCbKfEoExw9vk9cLdG5R5iCpXWsvcZsVETlEcGilePkaEXnNciIjuBcZLmyjEYNw6xEaUldT5kef8iyrpnaGVEZJvEltndWwEcXuoc/ybLecfuDh3RrT/NkjlivOdscoctp0tRuCg4u7/P2Ox9w+vEZfhUwpeLmi1e5+94xnehompYCSVd3TE8C63WLQiLXktgpattRL1r8MjA9XhCeRGZHNdde3uL69R0++uMnPDpaMpgkzN3hx44YBD/3y3v4+PBCv9+LxblIRRAgtUSJHCUN3nuU6Fv1MrnLtFTp4HUdrlunqZwusd4hlMQIgTSSVb1KBZGzONshZNIgZcUA7yzalIw2LmOykrarqZdTlvNDYhuTizKmCDQjNBaHMgGDQJ/rP34KSwA7WxOGZcX2zhY7ezsMhhWDakxVDeC8M+oVVkoy43AupNu7SEVCCBajJQqHayO+kISQkxcGZdLIHAyTyQa5yVFCYowgNyEBnOV590qhVHKjdt0arURK88h00i11KzJVYlSFtQ1FPsDZDh876vUZQz2ka5YE3xG8pes6tMqTq1donA9oldHZBkmRCu8LXpGA7da09RzvHNInDWQrAnmep+4cAkSSEAgiXUx6sHQYJHF2CJ4QHN6lNA4BfapMIHiPJiNGT6ZSJyrYFcv5aYqGC+eawHONX4tQOUqllBCTD0DIp2NdcPjOJQ9HTB1HqU3qPNiAtQGR/2xGvekxcCgVGQwLqsEBQWt++2vf5/G9U+rZim7liU5i9AglA6v5CabICb4l2o680Hz+V1/h0o0C13b88bfu8tZXH/CJNy9xfH/N4knH5IrmE5/Y4eDSmJkUCW8kOrRMnx8ZFc3UY5ca11asUXQRrAsUWvClP/NpNv7PG/z27/wmd987Zr5qaNpnp4uMpAQHjERmks4F2s7jXWBQZeRDTZYDMjJQmr2rGY/vBr7/cJP35/t8avtbEC0re4s8vkeWL8gzgTYpqlGTP40hU5WkGOWcPG4pN2dcVX+EVn8Ga67wg4dfxq4+YBjf5/FZx3/5G9/njRev8KVfeJmdp7DgAEI/VTz2Pokemi9SMzkmqHEkOTxxEWLk8tUdfufrb3M2X7K3NU5dsafF48W1VIPvAfp4XJ068zEKTrpT/v4//Kd88fNf4M3XR/zuv/x9PvOZN9jZ30/MvvTQ/AnPrkD2A93Qz7hlD3fmfPRLcifLpwUxtPWSD959n69+7Qe89e49zhbThHKSa7SIaCSRQOsDwikGReBgx7BZSq4caHZ2NIXRBA/rZYsE8qFC5oIgFEV+lWp0HaEu1tUrYuDjR49ZrisEOY+f3MZ2Btc6nIPj6YzxsODx8QOmi1N07EAKZrMzqmKPohD4usFUOad3zzg5XnCw/4v86p8t+Fff/T2MUiyWLc54ZBXZOCi4+dI13vnBHzGd1vzyF3+V0wcfcnrakuULjHBkRWDerhB+hK4qlHR0oWUoBDSRB0+OWXeCECMyarzzLM8CzWRBlud437FcnlBHjXVrVsuOQTGm3K7Z2N+60P37SVcInnsfHUEnaFYWGTXieI7Nkodgd3eDIDXPvbzP2f07zI4XHD864+qNfbZ3J8yma5SW7G9soqLg4Z0TnLcUWYbMV9RtxAeHaz3XJlepmwWL5YrxzoAv//ItBoMSqQUnX/Eorzi+N8P7yGBScHKn5YWrfxb4zQv7fi80q1dpQbNuMWZAZgpMPkxdDZEeUu8cWV7SNA3OOVy7JjqPkiXC5PjQEkLAuQQjzbKK9bpO401nQSuUzhNXKTMgFE2z5BxCZfIRRZWi3py1gASZRpwhBEyUhKjJg39mjLk/vQQH+wfs7I0Zbe2TVzt0rWW6Fswbj5JQmj5ObVAkREkn8CH0HaPk9HXNCpzDpAkaIfoeCWOwPqJ0xfXrL2M7SwwtJlMYGRNTWwqkksk0IxTWOoiJKSeEx9sly+mKamODcnOMCAKpIRuMGGaC6CLresp8dpZyA4VMYnKdqPAg8D4wGm8gkDRtQ6ZEP2K56N0UuK5O3TmfujJKKbyWkJcp4s62PSbCJcVQSOOjLEvpMFIKnOsgerxzgH86ChMxhdMHJEVRpDFZcExnZ9i2BWT6+8H37usUFRWdow1rsrwi5gHb2XSoS4l3LplFRKTrEhBaknJUl4vVUyH+z2Kl59MS0YiQI0VD8JZH92d8/P4ZuztbsBupW4ttNU1zyv6lERv7A5arCU/uztnYDNx6rUQUkm6p2N3d4t23D8lizsmTGTjN0aNTfvT2xyi1RbQdtALlOkJjmC0CUbS4teL4ToPWGXHs8Nc9VjkIkYdPTjg8nlPkE2YnT/i7/9nXePRw+kw3Zjl1FFVO1BJvPYJIPpDkmeyLOAXC44gMtyRbZwuqI8vCDfj47EV+/tpXOZpWnKzeZGS+i6CGmEwMJlM9ZD6CMlQ7Jo1onUPlZxj1ryj1JzkYPsdHp/tUcYPF4RNW6yO+/c4j3rn9kC//wiv8mU9/An2ub+7bfUm+IH5cvEX6oo/euRuJOEAymYzJjOCju4/Y3RxyHtmWjB4XVfhFonA96zIm6UBIueqZ19jg+L2vfY37D5/jL/+5z/FH3/wOLz13g+df/gTayHT55TzfnZ7ekPaR/vtCnGOdRb8H6Z910fHw7h1+63e+x7ff+ohVPWcyrvn0q4LdLUGhhxgJxRBQAbuCx/dXnJwFTtdzPnoAf/BDQ1Vprm1rXn4p58qlEo0nLCSF1OjqEuOt1zF6g4tm1kcPs+OW+WrK1qigsXOODu9wNvesu5ayEFRFTtve56vf/XX2JnscTle4LiGUonEp3o+CK7t7LM8eUbcNeX6LZvXrhMYSlMJJiDJntL3H2dEDPnj3Ha7dep3JxibLM0fedJSN4MGTOdv7Y/LBLjEOMFlCB5UCtG+5/2jBcnWK1gX1KqOLNk3XTiWLzRo3WbFcQGcVQii0eZ48O2JQBHQWcf5ik09+4iUEddehbUZwkvUqZ7IhyDdgdlxzerpEVZ6DaxuoTKFcQZVVzKYLrj63z9k3blOWBduTG0z2M27feYLOgWioBmVizXqP8hkPz47RytGsO67c3MGvBbO6ZfHEsVqs6axFRBiVhixqGEjkdn2h3+4FavwC9XoOBEajS2hTocuesi7TA+q9J3iPEgLrOoJ3KJ2hTEUXAJUKCnteGPpEW2/bFSF0RNcgZNb39RXEgG2XOFejTZFutVKSl2OiyOi6JnVcBKgQyEWBMQq5tj81jV8k0EbNwu7x4C6suzO61mFUlowpgMoNRjqkq9kZa567MWR7u0qDnL6QCZmkyAUxDHr+YYKrKpUQK0W1gRSCLNOURcqq1CnMGO87wKG0xAePEAGtdYJnk6GV5Idv/YhPfe7zRDTVoErjZgWNTcXpcLKLzhSZlkzPDgGPjwCKoqgYFwOEMiyXK5q6ZTq9/UyK6xgjXdsgIpgsT6PaEAhBEqIkWJ/0RKEf10afWGpSEVyH8xYRA853SawdPKkA7v88BpTK05ESPd63rObLJC9I4lR8DIn5FSMhiqcHpJCSrmvxcda7Pz1KJai4cw7nHFLKp/9tW5c4V8b8tD6O/5oNBdC9ASAQg+KbX/8Bd95/yK3nD/jrv/YFvvatP+LJ0Zp7d8/41M9d4s3PPI8rOt5595hl67hyNWP/qmZpW7Z2JFW2x8M7K37wtTsEl3S7v3V4wkfvP+Kl117jwe07tLMCW0QWdcdi1hHiinoRefL+GRujEXHW8YNwxKN3vsL0eM6DJw+YLzzdakoWBFqfm6Ce0bbElHCjMp2KPpW6dUmnq9Cq/7lLiVIenUt2tx5x5egS20c1rVC0ViFE4N7Zi9wYbyD5LsEfJTSOSNOLEANRRJSWjPaG2FWDaBzZoCNTf8xG+UOuTzZZ1Jssx9vU7gaP7h1ycnqPf/Jb3+Xtd+/xN//aL3Jpd6tPw0idbcT5qDOhglJx1O+X6F3vIZAXhku7Y3743h0++/oLSN3n/fJ0wvrvvpeke2amJdubGevWcnIW0NEThCUKT4yK23dv8/f+wQl/7S99kTv3Tjg+/hpvfuYNhqMJPHUsy6cYl6cdTvFjE8vTojcK2q7me9/7Ab/+L77NqhU8/8I1PvWJIfXyu2QypcasFpbWO+ozl4xcnUfoyNamY5QHNrOOs/maxUpw51HGO/cy8nzF81cHvPxcxa1yxP7+G1TjW2Sm4KJB7MF57ty/zWBngLcmcTKXC7rOslp0tHNH3L7E1uY+h8e3ybcNoVujrWM+X/UcSU1RFBzsvslktMXjo/t0Yc7sdMlyFjhat/gABkmlMu68/12aesXlnRt4u6JerhBOIEJB0x0gxs+Ra4kUEUekVJ4SzWI25+M7Z3gX2dufsLGxT+cEtknRcJnqyILFSJMYjl6jWDOowOQ5UmfY7mfDMxVCUFYF1/dvoKwiVxnbb1q0Utz76Ij3v31Iva7Z2Bmyf23M9Lih2DQEF1jOG7LMYMaG49Mzhtc3yAuNwzK6nLF1kDE7ttQ1bG4Pefj+ggm6/5wGHt87YfWkoX4Cg3HGjhqzs53hvcRsTbh0fYPD9Tcv9Pu92FGvUBT5mKIagUxCY+dtevBjBAJNU0N0OJtYej6INMoNAR88zqbRW10vCT3cU8oEmo3BQnQU1YQgdBqJBk/XNazXC1LmoyaSp0LILLG2w3Ztgg5nGetV20cT/XRGayYb0WUv8/BYpFzNPPRJHeDbwPRsQdNaqirHOcHjk8jbt+9z9WDAJ5/f4ObVERuDiq5d4XMNwVFrgSBlGCeGYUFZjimrihgc8jwdIiT9mokSKVJh7n0kzwtEFFTZCCEdq/mc+emSxfyMvSs7RDS26xDSUxYDQnDYnibrfItUCVGilCQrciTQtDVNu6BrHdYmiPazSFaIIeBtKvTFU4E3KDPAOpfctMH1GryUHCNEMss8Hav3xV7qhJyDmx0Rj9YDlMwTssUFVs2cdb3qo+kSykUqRcrx7T/X8sc4ieA9IbRpPKUkwcuUrCBSUVgUeY/4kdT1KoGhhQR+OnihP7UExCgJPftNBXj+xlW+/Muv87lf/Hm++u3fYbyVoTLDrRcqXnl9E6lqTuoWWQayypEPN1DGc3Unw+QFm6OKF19ZcP+Dh+TVkL3rmnIwYmNnyLvvvsXhxw3DoqVQOWuxYliOGAzHNAtJM4d3vnkPFwTvDBcc7M6IHhq/QomAigJhcuraEsOzrJYFJk/6VxMFuZKE4MjyLBmulOk7Zwksb7Rkc2PFwe5jfjls8mR9hfurT1HFuyxcziN3ixh3OV78Lnl2n0yDFAkILHSKCZNIGJWoqk0FswJdt1TyEePyEXUVWK4Kxjc3mG0XtM0GHz445v/2n/1X/K2/8St8+tUXkFHztN2FeFokcT4N7V97Ugq880Tg0u4Gv/utd1jVLZOhQfTmpAvjMUUoc0E1zqlXHfOlQwYIQiNCxCEwQhKEZ7ac8g//0W/x85//OV795A6/9Zu/w5/94p9hZ/8AgUqufCHS8yfOEz8S5kWJxE1EQNus+b3f/wa/8Xs/JM9LPv/mVX7pS5/gyf1v82jdsW4cp/MFdd2htCbPoNSSQmlQgqrMsZ1lvWrZ24TVukWYgJOOh9OWDx9YHs8rJpff4IWdT6NNlaYqF/zOEwoGm5rVfE0uh7hgKM2QxeIJTT2jbRtm6zXj4R6SMQ9n97m2c5m2njI9fYASuxTDkhBzutZR6AFvvf11Zusphw/nCGPxSiFCSZRrhpXm8NEZo9Emo81dpicPWM1nuKixmeHmp66RlQM8HQTPOEZ0kLjOs1x8zM5my2KRWLmCjMEwo9gqEJlE6Ei7fIhv75PniSdrZIPWFdpIgnXM1j8brJUUgldfeoP//f/y/8Sj1fv8vb/7/6JZNAxGGYNqAEJw9GDB/LRmuFGxXHS0jWVrssH89D7LecOljZInHx+zvSjY2BlQty3Xnt+iGCiKLOPspKYYaAbbiqLMUm52phE+8uH3Tth9acBf/Csv8vDbntBV6EqQ7QmKbcfl/YML/X4vtPDz1pNvjDCZRumE2JjNTwk+4IVHyT4zMqbxjcyKp+Bm5zxdlwqVpKNyONv0PLYUvSONIIRI0wakSXw8ZIQg8C4J+6WKWN9gu2VCb6ikZcnznODTDR0VU1LGT2EJoRLIVXSImKNVTucamqZGCsHu7g5122BdGr806xWEjvtHBXfvfkSlVty6PuaXfuFVhsOKtl7jTY7UEqNzjK7IyhFlOUIpQfQiZUfKiPcJg+NdJBDIc4OJKdHEd447H97j4b1HvPHmVX75y69Dpv9/zP1ZrG7beZ6JPaOb3d+sfu2+O33Hw8MjSqRIUbJEWbYl2+WmohhwoVxVCRC4ECDIRZCbJBdB7oK6DWAUCoWKCwWUU4lhO5IbWVRHiqLERuQ5PO0+u1979c3fzmZ0uRhzb6oCl01Da/PUAAieZp+99pz/P+f4xve97/PSLWYI7/C+xWhNV3dk2QCtFUrHFG2mUjJH9I7lYgL0qBNdpKzWXKJ0ee75gpBG3N4FsixDikCwCSnT1pP0nfG2f/UGiF3fEXzC8mtBpEgrevyFoOsd0wluLYTpPUmWRb2kbRtCTCHyAY8gwZuTfEr1ncE0JnY+9IiYBOHG2+ROR4JSffQdSCURStLZLvEkhfy0TL2kMZxN+IpoEAg+9/lXGF/L+ca3v4v1E7YulwwWS65cW2N1M0t6lc72RpoOKSUmF+gsIrVksN6wfSXDFDmjS4bn3l5jOKxQQXB6smB22PL49oLpYcCHJfXMsXnhIn7d0SwUTd3hQqSqcpq6I7iAk4IoJUYrtCno2vOLL/qfWiLvGY0iIlWkrFLijtQRERwCSewTMoTqyErJle1HTBdrjIoBbq3Cd5cIZzDxYygKPpn9ZYbFPyHPJkiTCARCaFTfhZMogkiHQLdYIqxE+aR5NqUgky2F2mGYKYajF3n55kv862/d5b/8h/+cv/3XvswvfelzKYf6CQKlTwMi9K/6vqiTiDSJ8bA6HjKfLzk8O2NlkJh6xCfD4T//0loyHOXMp47GhvT915KmbfEJcAhCJvNGDHSh4w++9YfsH7zASy8O+drv/g6/8iu/wsrqeqIjiHRQAZ66eoUQ+JjiGZtmztd++5v88z/4gNHKmF/40kt86QuvcLL3PvuP3uP0rOZkWiOlZHNjxNrYpAJFC3CBGCSuc/g2oy1SNGMIBhcs0hievwYHkwVBr/Hiy58hK9dAZYg+k/s8l5Sa7dVtjvwBe7vHbG+v4jPBoLrIfB6YLed4P2FuwfolIu/Yj5btiyss70yZTwQhjPFihb2DOUfDimsjQz3fIcoTVkcXeG3zNR4vdng832d+dg9HzcXVlzAa9h89pOtcD/m/hRcaH1tEABUd0jpc6Jg1A7z+DOVmYOVKlhy/9084fuzxRlGtDdm6cIGVjedRznJyvIfSHq0KjDLgZswixLw53xv4Y66iKPj7/9n/kZdeep0Xw6sUf3eF/8d/939lcnpGDFAOcybHS/bun7FxYYXJwZzb7+zyymcyqkHJ4myf+bhj2Sy598ku1aqhrArODpY45xBS4TrB/vEZ3sNyZimGORdvbNAs52y/MeCNLzyHjJZP6ofECqI3XDWXuLZpIPzPFOcSEwsAk5XoLCMvq3RqnJ723b40gpBKEr1A6wLnAt55tNJ0Tdd3Ai3OeUAiZUZwHdbWKFVg5AgpwHpPu5wm1ppJqQzaJNNCDC5Bc73D2hqp08jNtZa2dbSNp+vsM0mV+DctIST1okYJQTkQRO8psoxmOafzHt80ECXOe5Z1S9s2VFmKR6uGGwi1ynsPjvnkzm/z63/jZ7myXWHwoEJi8+kCaTRllROcRSqNcwk0rI0h+g6h0svWCoMI/ZhXWlZXJKvjS2Rl4gAqXWBkILgGoxTRWyKKGFPqRGgarG3TyD6m6iegKAZDoksO1yAkSmqaZ7QxxxCQKqWKyH48K5G4sEgh4yJA8CmCLaaYqvQ59AcDKRKrj+TKi75OHZyY0j49LcF3zJtZKpxlRoqZ7stJqZJgPKRO9NPUjT9TuIXoezSGJAbwOOiF9QBCJo6gsx6hUtTes4i3+7FWBGLWF34pvSR4ze7uCSenJ1y5ucXO4RlHs1PevHgFx5yjoxmHpzXd3DI7WHA/7PMzv7RJXTeMM4mwAbu0KA2bV4e89PlVNjeHSKsZFIbb77zL2eGU6fGUmy9uIcaJT/L5t59nctywc69ByFEqzJVC64j1SfoRokwuy0L1MXvP6r6kg6UwILVAa4nO0ucUrEdqnQ6v6gksWKKVYjiq2Rp/yO39nE8ObzIsK968HikHgS6sIbF8uPdLDIp/iik9mSrQSiZmYEzJMRGQQjFYyRFKcrLbMj1sGa7kDPOcTGsKE1gu71FWkb/51Zf5jd95n//u//016kXNr/3Fn8XInCgDIiqiFwjpezYe0B9UpEzC+7wswMPh0YTnrlxEBIEW4mkn+899KyPMZpaiUqyUQ6L1LFuJ1prZcobqXRlRRILwxAhKCKbNjEUt6WzDez98h5/5whdRIuCFToc7kQo+GZOkSApobc3vfO2b/NN/9W1WL1zkl7/yMl/+4meZntzl9vvfYPfwlEUTGJYVW1uC0QhGVYE2AiOBoFN3fg5L3+Fw+OAZjg1SKVAS20mqrGDj+muMVzeRSqWc8hD6ovv8lgvJoFNVFWeTObs7u2xtjcm0ZGXtAqoc0jWn5DFHHENbt4Qiw0m4dEVzvHfKYuJoQmTip6ybi+wfH+PnD2m6BT+485jv/3CXrVtD9g7vMDtrWBvdYG3rJr6rOTs9pHE+JRL5EkeC3puuI4+By5sXOFg+opl6hBkxGGmKzBJlzfhSjhoL6qllMA4s7D38iWU5P6ReNJQrJbkKWBYsAWwN/nwLnB93Xdy6yubWiMPpPpdXr/Azb3+Fr/3eZ/jGD79OCBGtNHlhONyZMRxX1AvLww9OGK2O2LywgdGPqKcd9dJydrhA6gEmV9zbPaBZOobrQ6QUnO5PQAg2Lo947vWLBBc53luglWFx1vDg24+pZ47LF64xvl4wHK6gy4ZwcL4ToXPV+IXQkuUZRVkhTUE6lEVAJeu9DChl0PkaXedwixneWXyQOOeo67OUahAlRJWKOO/RQhFJzpsoU6i2x2O9x3n3NB4suYMNUaieGyWJnaWtG6QqEEKTZbLv1pzblf9bV/gz8VxZptPLqW0oi4LFYklwHudbOmtRQjAaDFFC0HYdjXBUg5zVrRvMjwv+m//+d/lf/0dfYWtFoWS6v0YrsjKloyihISb2XsLVeRCBEFJ3KzeGGAU+NPjg2Lg4xrmA0aCkQcoClMSYAq0U3nf44JhPz/BK8+2vv8/Bo13+yt/6OVSeNqnhyhbSFHgRCCG5qWMv03gmxXWMLJsGSQfeE0kbpvfdUzRLCK6XCfRdYeERgJSG+GdGMREFQeDaJV0MBAF5UaXvGR6BQiqJ7jV6Sqas6XRVaYQrelRGHySNIJBwEpIYEsNS9n9u0Udq9TUzCI3WBq0N5+mg/PdZIaYQeiE1EU8Ukd2TQ3YO7nPpyogZM07uHKNyQYxdcs+3gelZy+ywY3FkWU6n3H2v5vqtHNl52tMB73zzIcHWxNCxtqW5cCXDCIMO26xfydhYG3Pp8ohXP3OJvaMZv/3b72Kp+fIvv8An77V8/MEE2wiE90gdKAqFbQVlmYNMmkz5TLv2CucEhUxTgxgdQkiUIWFPREhdsRj7yUOaJFRZzubahNnyh8xbuLe3wZp+jDxdYOUNblzdZlB07JzeYFjdxSuPMU+wJL3rNkQIEjPQoAUXXigYHTiWZz0QPsvIckE1sNTL+9jlGb/8xRv8q6+3/H9+8+tYB//BX/4yGSoVbyKmd2p/Okn4or7zLQVGpoPL4fEc6yK5jgThzk2q4UOgqDQ3r6ygjMI6gVKKd3+4j4zpuYnRp7F5lAgtuHhxnevXVrFuCdEznSRUkpQS+rF7Kl4VTwwenW/4oz/8Lv/on32NQM7f/NlX+dkvfJbF5BE/+Pa/5v7jA0SES2sDVtcl1SqUGnIlUFIgZJKF2IUlzwxxIAhdIBsJVBGRmSJ6CBhG6hprl1+iKEYkBDT44LHnLFELQdC1GZ13jNbGHJ0c8ejxDuNhSZYpdJ6TVWt416GvVCircEEhRc5zz43I2OPuvQmLtqXzGX46J6Pi6HjGWd1x92HNUGkac0bXGrK8RJcVmck5O3nEZNbiokAYTdPOMdajbIe1M8rqIoenAx6erqKKgqoMZPIEHSLCKs7mpziWiVu67JA6UDvHYt7gW82yAbKashyiVTKyiPDpZPWWecmKWeG73/89tr/868wXS9ZWNrBTqF1L8IGizFgsak6PFphcsZi0HD48YzioeOG1Kzy4e0B0kbZ2LM46dCY5O1xiu4Dt0jt/Me0oKs3Fa+tkSnP3/V3ufXhIUeWsbpYsl4FyfQNnPJ99+S+yOt5kYv+Au7v3z/V6z63wC8HRNTPqZZucs8MSGdJmK59S9mXqoMj8ySSCQExGj9h3RbzDO4WQCiUzhFGJ1yU0IQZcsAg00pQY5XDOpuxY2yEEaCmJqMShi2A717soFSFahBJkRfYT67DEGPvxlMRHR56XtLbtT6mpYAg+ZcU2TYegJc9ylNR0bYdQCYKdDVfR6gq/9bU/4e/+7a8gaJ9qBSFgdErWSHiSgJJJOO5icpFmpiDYGussITpGozWISS8Xg0UQcKFDypzGdsTW9UWNRMiCTFW42YIr1y5jqlWMDEQlcS4QYkdVDVguF4lw33c/noVRtbMO18wQtMSQEhaIpEMCPml8YkgbqEjOWYQn9vBwKWRymYW+QHM+mWS0AR+wrulP7iZ9b6TsTRwgQiTKgA9PpAJ9PJYgba6kRI8YIrLPj0qImKQF01KTsEeStksJAUorlDbJZfwprOlswf7pLhc2RsgIjoy7O5+gS4spHHvHU+Z2QrCexWSBE3C6Z5nsdUz3An4p6RYdn3xvgqq38GrC/Q/3+fiDQ7IigI8YoTAqIpVlvDakWo+sX4XXP7/JzedWGB8Yvv77msP9JYMVxZf+wvOcnX2HB7dnlGqUipB+fN51HikCMiZcxLNaUumkI+vfW0qnMbjwqeMYQ+hRKQ6iQAqBkmmiujKMrFePaVYmVPJVvB8Ru2MmteJfnb7KF58LHO9t0azco6w8waWfl0xCDmRE5qEfM6dDc7UBQniiV7S2JZMaUyryXOG7Obb9kK9+0fDenYKvff33CL7jr//qz1PoHKF6rV8ksf36zp+S6bDYBtGXnYlr56VEn2dsWwQlLD7AYtIRhGdcVQwGkulSJR2uikAkV5rrNy5x+cIIQUo/0UphsqyPr/S9hEcjYoCQ4upiDLz/zgf8v/7xv2bZeP4Xf+PLfOVLn6FeHPIn3/wtHu89YlAUrFeG4QiKypMbTWYEWaYQMRJtxNWW+UFLVpToTECQ6EJgqqR5DBKEHLB68SsMV19CKo1+spdIyXn72YSQRLFNCDNkPGF9fY2pFBwe7hPxZEaQZwYZItK2KJNx89YrhE6wWAQGoy3WqzmPd3a4v6fx8zP8cgVKQWsFgygZj1dZ31jnwvY6q+MBhczZvbtkOr/L6dxRVBI32UfHQOEu4xcVx3bKcXFINlpSjEdk+RJYYr1AekN75jl+0CLylrIyeBsIOn3/2ug5mFua4yVBONZWCrYujChX1jB5dr438N9jLfaW+AJ+67f+GR88+D3OxF02t8Y83KlTfrtPnemjx1PywoDoONqZURYnXLqxycHuSS9Ni7gOFtOGbpn2pMlhgzQC7yLj9RErayMe3zvh0SeHnOwveesXtykHGbfe2mZQvkZZXOTg6AMefaSZnt5Fmf+5avwiqBg5Oz0ka5eopkaKSLAdmSnQSqesQ1Pg0fjQIJVGhkgItufuRaTSeN/gY4eIJVJLtCqJaIJ3tPWUrptB7M38T/MYBT46fNcQQof3Fuc6hEqjxxAimVJIqdG6QOnzbcn/21YISSPmvKPpUuB9XddP/992FuctSinqZY2zDVprojAIVSSelTJIM2L/dMIPP9jn7beuQ/BPQ+NDSPFSeZ4x0EPqZY0IIGSZDAQokDUq04zy7QRBbuboLEObMULklMakro/zSJKDsa7nSOEJdsYXf/l1rLVYOyNmhtwXRCkpi6LHKQScd33smXom4vuu87TNFCMcQYjeTBEAR0j5TTw5ZiSNoUfrhA7wLoGmQ/AIbNIA+g4pIrZJhZfoC0YXQoKSyjR6EkAIMen9ZNomRR/tFqND9M5JpXTfgU4jQNFv3FIIQvBPLRw+RExeJDal1kT/6RR+Z5MF/+prP+TCSs5LL2wRMsvB6QPyUmBDw2TSsZgXTHZr7lUd5SBy/Khh/+6Uo0ceaz1Rez758CH3b+8iK0WztAQpkCYnMwYdMnTUScIhBabKUCPINiBfdVyuKl59/SL3Pjlkb+8xn/+ZVT73hUsc7X6IdAJrBUFqOrdg9+iM0WBI7FmXz2opZQjB4F1L8ODxxCBAub7Sjyk2TWq8T25woSLaQJ4rVkcS5yYMsj/keHaRrrvG9cGQ6/odZhNNMz/ldLZkOB6mojHrAcwijY0VCf+DcMQIMpMMLihElJSdZHlksbVEG0lWRYrCUlWWixuKn3nNsHvwbb71LcsXfvrnyPNhD0SGJ4eV2B9SlBIEl8aZuVHJABVSFrU6p7NxjHB07FldgfULa3jXspwumU5qZBAIlSYSw0HFzZvbrI9GEBukihQ6MqyGRG/o6jol9/TGE4kiSkmIlr1He/yjf/IHLIPhr3z1Lf7SL3+etp7y7W/+Sw5OH7KxljOqDEUWyQtBlveBd7bHMnlH7CKhduS5IopANspYHeSoIoAU6f1CJAqJySqQSf/3JPc9EPHnTjKIxNai4mVibNFxydraBvlwwOxkj9l0hnOeFTOi8mtkgwEm2+bh7gfcvnfMr375L3N09/sot8ul7SUYxfjSCF0YlC6RRmMGFdUICm3QImdyuOT2dz/gxq2LrG5v8sbrnyfEwGJxzJ9+633O9k9QhWFwdYisBFpOECEBt00X6XygHkiqqmK6q/CbDbJwNGcd8+mS5dQTnEhymaiYtg3TkyVVecz61qdkcgNOpsd8/M3vcFLvsrrpGa1pLt7MuH8v4qxPMrE2EL1kPBihdUu96Nh9cMrp0ZL5tEvynwCToyXeJ/lBDGC7gCFNfLrW8sk7u9jGsnd/nswiJw0PrWV1ZYvxsOH43veY7e1y4+2/SD6KzM/Z7XyOWb2CgMIRCdahwiJxzEih9VEkwb9zjhCTg9dHkdhOoc9njBEfkkNTBoU0Cq0MJq9ouibpHRBoqXBdKqKkSk5eqTKICu8CUngkPkWUmSw5h9sFIiq0iiD9T7DjB1mWE7xL2bZ5fBq03rVLiIK6WRBCoCgGCQMSHPPFAlQGMmKMwft03/ADvv/+I376i59jVGbMZ4cMB2OESMkSIcBi2WCUAKHQKifPK5yLEAxeLKiXc2xTp8KbgiIbUdsOax0roxH1YkZdzxF4rE2w0ug6UmcRhO+ITmCzDk2J61o8ga5teDKy9M4//evzXCEKDo+WXFhNHDCcJPRjokBMLFupemRFr6fzPh0ChEBIiRQCZxcQU8pGFAHb1SitkJj0vdUgtCFKQ+gNI2mom5AZQsqECZLJoeuD7wliqUcURUBJ02tfExg6BvFk6p/+kRAYnSGJ2J9Uksz/3xJa8f67j/navQe88FzJlRcqykuKdtbRhZqjoxmLk46T/ZYfzI6ItuXx7ilHR3O6eSqQzKhguGnQasBwK6J1iZSCxbKjGGTE2NI0sFhY/Bxil94KXjf4zFKNNJ/72avc+fCAjz/Y4XOfv8Sbb1/m4E7Hn3zrLp3P0V5ico/PBHme4brFU6jvM7kvSiJlRQjzpOF0SWcoBSijexlBOgAgBMH2/40K5KVhfSOwnHV4ItvjRyzbPeazC8wXa9x/BLn9gJOBZ2vDkpUS1wZ0DlJGIh6pUreHqAii71DLBDLOS42+aOhOHZO9FlkZiqFB5OCCIB8G1tY9tvsBtz/aZXX1dTY2XiAvV5EyuX5T7ZoOMb7r8CEyGObpWYrp4HJejqNIpPGOj+8cYB4co2SkbdNYVKmIkIqtzRVuXN8gzyQxtCglyDPJsKw4PGi5sG5ZzGsimjymZxDjUEiWizm/+Ztf53AR+NznXuJX/+rP4bol3//2v6Ztdri8kqG0Ii8FuRZoDbLHQMcuUC9aCmOQWULOqAJEJjEjg7SJbOhx6TNQBeXgdVS+gdSJORdD6DXP8tw1RNF76vkHMBihi0265WMigcIYiktXWNvqnedKIaWnbk+4/eibXBrd4quf/zU+/8Uv8sG9T2iCIXiB7wIqnpCriI8VZbGGzgbkWhHDkqOHO+zc/YQax85+Rz2f8Gh6lxdvvcyj947YfzDDGM/Vi2tM1TTt3TLD2JawrJkrRZ5VDEXgWDq6xtHsOVo6GtsQXIRQpGpI1gRh0FEyKBtefK5l65rmH5/rHfzx1tnZEb/xu/+AYM4oNyR377RURhMMXLlkOD6AyekS7wK2trT1IcokVN3stOZkf4mzDpNLgoNu6RFGILUguIiUT5BDMDtpiD6ymHZ0XUQKwdGjCdYFbr4q2H5RcGJPyTfGNCcPGd24ykazea7Xe65tLxsCIkZ0CBBtIvIrSOMtg3MB27VY5/AxpR9Y1/W6vnR6jjEghUYonbp5UtFal0ZtQpEXiWckhENnRTIYiHTSEiESXECoJOpX2iR4dPRobZARbO3A/Ihy/5NY9bJFykgIgeBrlNLUdY2QySBQVhXeQd0saJolQgimkzlCeGRcRwlDUeW03RIVIw8PO779vXf46lf/AqOYBM7KaBbLGUoI8mJIDB6tJc621MslUiqUdNg2MFy9gZED5pPbBKBu5kmbFgMHe3sg8vRFDQGsw4W6H5NnIHs2Y/DgE1YmhDnaFASXRtaddUTkM+nIRODxseXWtat0doaN84St8aHneMennDRBJHYOEV3685OKrZiEOhiTEbwjhCYVY8IkFppImxGYhBGKKXJKyIRy4QkcVySXuZQRJSLOW4KUiBB6PEdf+saEHVI6jbWkiFRFhospxcE592yNCv+WlRnFc9dLDh/UrK5eIuia2gus9+zsT3mwc8jZUc3ZzoLHBwdEJzBlhTIjTN4iZMfll0fcei1jZbTB4KKmqEq0ecz3v3mHup0zb4ao04xmaWhPW+r5EtsVtJ2lEx4jHZdurTHaKDk9a3hwZ5c3X1vl9ZfXeO/dB4RWsTLMWC5yyCWXS8OV4Zide8+uOyCExGRrWLffHzIhy1QyAeVpzGzymMaoMYHUPT1bTgfKQU6ha05PPNkYpOoYmIeM2h0WU8vZYeRsDvPGkbcyBdRHAfg+iE4nZbQUyBiJKn0Pn8hmVB6ptnMQGad7HUZ5qjVDpgTWC7yFLAPr95mcHXN6+m2K/CKr66+wunoTYwZ9YQlnkzlZrlkfpdxwJ2z6OUJzHg+xQKBFerdb6+lEwBNBGYwx3Lh6gUuXVpLOz3cYDYNckGclu7s1Z5M5bb3g+OSQgCMdwSpA4KTnW9/4Hu/cO+TGpXX++l/6ErmMvP/938V1D1kZgIwKrUCRJBhSKKSA0AbwirwoUjyfkWn/0YGoUidSCgEBpE8aSak2GKx9jrK6RJQqHSJdwjwhBEqd/84iTMZgIPG2Y+odXd2SqYiPLQiHI8PXLbmAC3rEcPQCFy/e4Pqtl9k7uc+Fa1fYfmGXnfsnhBq8yuhmltlxgzIPCfkeucnIK0s5GLL13PMMRzWzg5p5p5OJROdINFuXFA1LvGxQCwh5g2vOaGyN14bBIGkej/Yn7H08Yy1bx+QFzXKGIr3vguilOMGgdODiVccLL1uuXJQYVQCzc7+H/67VhinlrZp6opidtnjRMi9bilXDrfVrDN5Z0izhMJ6xdC4hbOrAxuY6SgkOjk5wNhnUfX9O00DwaVKkM9lLkyLeRaYnDa5NcpHlomUx82SF4dKNtRRdma2wvXWDclBzeLxLV5/vhPJcXb3Ru3RCDenvpUoROyngPsc6RxQyjRODS+5L7/ChSy81mSFEwLoWKQKZGWK9fwo6DiEZFZD6STxlGqMhsHaBtW1yxcWIUhpBKvB0pggxPaSiEgwGa0k/8xNYMUaI4SlM2hiDtY6mSagapTW5gKPpMZPpKVlm6FqH6zqi8FhvOZueUHZ5j1MJKKV45/1D3v7sHsNqiEbQdpFqsIY2SUdm24YQBFKnUax3jtpNqda30GoDpKYS29TTE2L0uM4i8GlUGjqiF3hv8W4J0SFVGqeHGPC9S1srByq99JIb2/aQbpcAtc+gI+O852wewAypqiFi7Fguzqjnh9hmloC4MSZtlNTpCYyhH22l38Nai/cO5xuMMYiYRl7eR6QKKFUiZJm6iN72AGjdF7NJLpAe4qYvsFXK7yUio++LxmR2kerJhqATxkJA0rJaMlXSBoe1T6LdfvLLWc/p0ZTT447941MuXPZkLqNuWnYeOs72De1JR5gJYiMQWjBYNTjlaEWALrJ1ZYXNy4JBERhczJBFzsq2QXaSyUHg9MiR60AMGWdHluW0QcYMIQUuOuq2weSSwYrHhsi9hwuuX59w6fIq1wc5DTmj9YxmPOb2ZI8bSvDWsOQ3nqG5QyAoi21Ozj6mk4FF61hdhYgnl6rvnKkkG9A9KlmIJJMQiqhh+8oKj+6fkeWGouroomcoPS/clHxQe5omMptaRkOJsQ1CalSURCuhAHSff43ou0nJ/QwxGS9UYLgNw/WMWKdOxPTAUm3m6EoTfEB7CVbg3Aznzjh4/DHHu2uUo+usrr2Clqs82Nnj1o0LDMsM5xuULAjRnJ+GUkCU4SnqJESBFJrxuOLWjYusjVeS9AePzqDMNSIq7j2a0DaestJMllNOTofoPEOZDKlzpGz55PYDfv9b7zFeqfjrf+VnWB1LHt79A2x9j0KCEelwoFTEaIlUDhEgNKCNQOdgcgk+ovLkhI4i9I5oj+iB7Z4AMsOU1zD5Zu/0FBiT40XE+Yj1Hn3OPE4hJEX5HMNySiuWbG9sM6mXdIsZGoUPliyOGIxXee3iZY4f7zJ3HW1Zk40MP/joHnePj7ADw3BryGJviq2nbBdXkL5m1k1ZLJe0Wcu4KqkGhiA0Fy49x5V1y3F70Ju0WoabR0wmp3SLhqOTE5x0uDygtSLLK0Z5gbaRR3v77O9MkVoyN/tcGo2ResTjs1NCTJw+4QXV0PHiK3DjjVfp3AGHzRGl/nQOwCjIM0nbRB58MOHmF7ZQhaJedmiVcTZZcOUzq6zbjHvfm3J59QKXL67zq3/p73Fje5v/3f/lf88n9x/RLNMeShS4JulWkUnbJ3pQe/QxaeMDZIWkrS1aSzavrvLgkyOaiSUrB4jsOdZGJSOl+OGDb53r5Z5f4RcDUUiCT65apEBFiQwe50XSNGUFoWtxjcO1Nd4/gelGYpRIYbB+iVQZAk3XtQiRUeQl8+WsHztKgg99Ke3oQodzfYElJVLGHvKp02jtSaoCCa+hswJEnsT1P4ElhKDplsTg8c5jrUVrQ5ZlNG2LiAEfAj50DAYVbduRZQVFUVJ3Na21aCPpukjmBZ2IZEZw98GEu3emfO6n1pEyoyqH2LZBRpd0dhHKaoj3jrpp0VoyyNfAeoRusX5GXM5+ZEqQEW9dMteEFHcnBNjYoYQixISioX+RPiXnR9+ngSStZvCpu2W9eyZZvSEEprOapl2yWowJQVAONsiyIV0zYTE/ZHaym8wuKnuqwwshIVaEBG876mXH2dmSC5c2yIsMHzWZSLGDCImPHhnSdabDhklBVlEQQyoiJUmDRRRIrdNn7B1C6iSgh15fCUprhEoxXRGBCw5nHa1PnDpxziOiH3fNzmr+h//+2xwcnbF2yzASBlErdj+Zcvv7B0wmC2b7M9p5hwfWL62wcqWicVOUzIi1YW1tnbVVj5GBYWlgYFhdrVDecPxwzuM7FVcubRJFzd6jE+anDhzgPb6tqa1CNB6twXvPo51jdh5V/NS1Eb9yeRX/yFE9P0SVgo+7ObdcwapT6GcACH+ynK0ZrGzyaMeg4oLQeZpCIpQiw6eYPqvwPmDKCFr0TDmF8KljlI8Ut16tuP/hkrVRjpQO6xpGIvL885IHO4HpWWA0tEidOnp53neFY28u6Q8EUskEG38yGQkh/b2MqCxx/vJhgR4ZbNshbMBNI0JrdOFT0ol1WNkSfMNi8pjZyfewYYVB1fLyzc8hCTjrkxzmHMkHTzSuUQg8gsIoLl3a5PKldTIB4DCyj4lUhnoR2Ts4ofMdWmZkRrDsOpatpbWWznmytub+/fv84//vH7FoHL/+q1/g2rUVdh99i+nZh5isQz/N+U2aPSRpmuQlIgaMEuhSgAYRFB6H1OJpcR1CH3MXEyTeiyHZ6BWUHiXjl0gYl6d+bCFSu+ccV4zg/Qq5HrJw79N2llFWko836GKkXcyYzxccnu7xO0f3KHPNG9df4M2bb/Pw4V1msxlnjQcK5HLJZ65f55P9uwzWDPN8lywsyQ4ExkRyFcjKjHp5zM7kjI1ijcF6RRSWgwcfM50fUS8nuOiIORhTUWVDpNGEaDg5bjjcndEtn8hfLKdLwayt2V5dI/gFkYAULaPVyNalmpvXXuTm1g3efXSXLgRKPh1Xr4hJ/Rpyx/rzGYuFZ7o3YzqZsbY2wsaG0Yrm2sWriLDHe3/4iGVY8kfv/S6XL/8vKbVEa/GjWqQPFiCkjp8PAmlAxIi3af8weSIBxCiRWtEuHHd3DjFa8dKXbnD14kX+8Bu/ycrgCpcuvQz8/rld77kVfiKCJ9J6j4wRQaD1HudqVsejFIitkh4rjdMkIfQeZyHwvk1jO+twPvadFodQBW1j0NmIKE2KxAoO29X4YNNLJSYXsBcBESJSGnwIT4uaNIqTCJlcxX/0vfssm5/MFywEz3w5R0tJ0zRoZYCmB/56siyj69qnvzbPC7TOEpam0fg+2m6+XCKiIxuUDPIBQtS8++EjvvCzb9O6lulsQZlp2tamrkRRYV2XulEEhNJ0vgUEtj5DdS0xOkKwpOCdACJlYCIlPgSiDwhVpBcaER9ij+VRJJxJctmGCMiQ9EFCpFa3D88E5xJjoG6W1IuWlVEqmLROhoosLxmvXWZl9Qqnh/eZnR0SfUeIgs5a6DuuUgvaZUc7B99rZuUT3VPPN0md6b5rCMTgUzfRd30x2fV6rxyBQsTkopYqKf2Ap65iKZPDOcaUbhMiWAvLpkljvU8N3pwKrYPDCTa0eNVytpDsPzzjW1+7x+E00HU1XeOISlBUcO2VAXIcMPUAHSOTesJyMWVYrYJ1FKSRVFcviCrSzGsevb/g9VcTe296mnK4vTV0jadddDTTQHvmODtpyVY0izZwZ2fGc9ue1ZEB7Yk4ilLy4mjEyqkhSvFMb5vrJtjFbXwsOTk5Y+9Rywsvajb6kX1ZSWRuMYUhOosQaQqBICVxEDBacvFqQVjGdIArAJEmD+urnqaFeuk5OXUJio7HoSlMhSQmxItKG4gQKZ029jqh5CRP+JMEJZcIBZWSMCgIXqCNZ7LjmD12DNcl1XpGXkRc63EavO+wYY+Xn1N07rucHO6xsvYCWX6d2Ktmz2sZBFFHRsMRN29cZDRK0hAvAlqmIqx1koP9BZPpBIVEiYgpa9ADwBD6+Ll6WfP+Bw/54+/d52zR8Dd/7Rf4zGs3OHr8p5ydvk+WWUSMSGlTkolNm6sSyRwjTCSvMoTySatrEso95Xpb6Peo8FQ5HIkiQ+qLZOWlhIUSCePjnSO49C5Mn8E5a/yiQGWafPU6FyrN44fvsmgmzJopRVFSlRUiN2SVoO4itp4xPztlZXOTblZzdneJp8Q0Z+Ss8Mu/+Ksc/st/wP3mY3w1I1pFnueY3GPKDNBkmYahxpeWZnnG4e4O0+UcYSTGaLRMh1gXCxZTy2zmWE4nuCbt+4hAkD650FHIOGfv7BRUTiYbttYc1Zol6MDByQ7m0XGKXVXJwf1prOAj0+Oas9NTZgtHN3UcPD6lqTva1lKOCw7uzVAiY317zOUXaravl+y03+e//e1jygG8+tw2O0dnTKYN3kH4s369GIlOECVJvyvTd4YWEJF8XeOngVvbF7n1/GVGz3+Gif+Qle2cXEp+6av/If/V/+2/PLfrPcdRr2AxneK5k7pA1hJCQCrNAxW4+cJLacQWQnKYCo0QHd7XhB42rHVFCOBdYLE4wDtHtEuEygmt6Hl0PukIdY6KBqLHOU+MffqA1oQYUSZ1+4JLmipvW9AlP/hwl51D+xPrsMQY6ayldo4YEhYkuEAIgQgsl0u8dfiYou06W5MXAZzFWUfoOkT0tPUSlWlMG5gtl1TK8fHd+xyfnrG6UiIzhZAicQ5VlsaOUrNcLsmzguADra0Zr97AnuwQRZeA1z5BtHsnRE+fV0jpeBLkjog4m/Apsdcq6lyTlUOcjUTnIKTuRCr4QD3Rwp37DQVnA52PfUZnTOkYIukLhRAMV7ZYWdukns+4/9GfMj87xTU1QmhEEBihGa1UrKwLhFFIqdBaJc1ejIgYiK4hSk0MMqFiokP2+gDZ6wCjzgnINN5FoFT2lB0Y+7QP6EX6MmFbhFIsFi2LhU+/vxA9j+zTAThLKVDGYEhGl3qZc3pUc3hUs35tyGCwxu3b95FaUgzg4rUBczFBDhR23iGkYHFaI7ttulrQzNJ35PTEYYXB+Ya9uy33PlgwXh1wvNeACNStYDYNrDUwn0UmB47JRLI6FIy0YnZcczpZMsgdZiAwSmBbQW4NRgpsFniSkfxMlojMp+9ghGQSIos68Htfm/LamyWvvWYQIUUmqiz2OdChJwUkd68MgqghE5pLL5bsfxLxEaIxdEik6Li0HXn4WDKZRJDJQDB0nmgAoYmxx/0ICK7vTIlkM4oB6DFVEAky9onLMk03iFQjQfVyjm8LumnA1Q1tE2m8ZH1LE0JCExWFpOtq2uV7nO3fpZm/wpUbP0OWbZ3LrZRSUFQVly5tc3FrJXVLQ8AoRWYURsN82fHR7V1m85pBmaGVIy8tuhijsxJtDK0LfPjxLnd3zjiat6yNKn79l3+Jn3rrRabHH3B69A65dkjlCCKgpUehCDGmd5qOaAWmkKiM9LziiMo/ffaDj71eOGVx+/AEGj5gdeOLmGwt3e9+UkMUeEgFXzj/N542krVNQxcPMeMR27c+w/z0MbPJHo1tmC3ntLamdjVCZGRhnY1qi1wP8PMBK/IKmXMoAtefr/hn3/pHeBXJumFKJQoelRlUrshEgXESWY5xvuXO4/scHd3DNwIvNUMTiTKj7gJ1Z2nbGb7JESJDKIVUFiVbCAYhI0Y8QV4FPJLhoOGFFy2FtMzqSCfhdFFjHufkmwWoJZ3/tHimno//pGZ2alHbhmVd0zaWetFiH1mUzVi0S5rOsXVplY2LYxaTGqRAFEecLBoGa4bPvHmN7/72feYTi8kVWgjWLlfs3ZumQ1vos7D7/Wv94pCXP7/JYCXj7IHj9Re+yEtvv8THJ99l0F3l5oUBeqvFre+c6/We46g3spycok2O0gaFQpvU2ZtOJhzs7bO5uZU6H5DyQYNAKENuMpTOsdYnPZlw5PkQpzW2s/1m7IlR9x0UkkBfgA9JdxQCKJmhlCH1VEPKX5UCvCfQ8eDRgr0jnzJdf0JfsBAjTWtRApx1OB+QMiWLpLGNpAse2yWkjTEZrmvQRiOsI9iuR6ik5AIResupgaPTKR9+dJcv/NTrKXdTeqSSKK3pmgbnLfloC2MyDh6/S7Qddv7DdDKNlqBSoYZIHTo8yTnMk5de31WJvbZSJfE5QqKLMaZYw/s51iWkitYZzrVptNqbTs59CUGe6fRiDn0mMf7p+FWJ3kkVI/lwhRff+jKuqWnqKTFKjg4fMj16TOg6dJ6Bt3Q2Xa+SGikFRguElhBcbxLxIDxCPmH7RYQwgCQGDyp1+0QI6J7/JvuINikNUmWYLEcohXWBpg2E3uaVQj/kp1b4CUjRaK7BOoFbOOZLy9pWxd/9X32F73/nLgfHe3Q2sHVpxIUXc/LFCk0biDPB5F7L/ff3+ejikNX1FR4fnNFYwc5Hk/66NCe7NV/7px9RVApbR3Su6IJlulgQ4pjgJYePTpjuTlB6RF5EjvyE/cc7XHPrSN/Qzmoqt4UzBp+D6eyzLfx6PNW4bOmsZ7wuOTnwPLhbs7GmMdcMSjkikbxIz7MSoHKT9AQqdbwDgayUXHq+ZP+TOUKUyMzRWkUpIpcvaO4/sBwfBro2wgVFHDpE3T87AdCyn2wIkLHnMSdMUBA9oCXI1AUVyXgkRS9tiAJTSYpS4bymvtvw8TtzXngx58JVg6wMmVV0WpIrQdd2LJsfcO/9+2xc+ALnEds2Gg354uffgBhx3hEFZHmkzAS5URwfLfjw9i4HJzMQkOUhQa11RRSG2dxzcnTIdPIYZQZUqwNefe4Kv/ilN7h2Y5v6dI+jvR+iVYc23VMZiiRpfJXSRJWE9zoX6NzQD50SCUAkHmiSsqSDi7U+dST7e63NGkJUCEzaY0jdKSUUwSXUi1bnndSbAP35yNBFR9sGospRGzepzAXUdEHIGnLheW68QtNpllZy+aWLfOP3/zX//De+w6yz6JHGF3t8MntADIHN8U1iE4jdGlZNUCpD5hGvNULmDLIR1le8+fItTi88z4MH7xOD5uL6ReaHGbG7g2+PUUFjy0FqOAibsGohx0uHQqR7FwVCwuqa48arLVtrGX4pcIcWbyOthIPO8mLYZBk83/nep6Pxa5aOTx484sblWyAt02XNIBuxiA3LRUtZAgJmpwvyPCP4yMnRDKwgrldcunSZs/iYzSubvPTZS3zv9x9QrSSGcblqWN2umOzXRAtBCUwhufrSGjc/s0n0gdmRQ+aCdz7+Ex6dfszNlzeJ1YTh0HDh0vO4o/m5Xu85Apw9MgqikkStUX2YNiJBF05Pjlnf2CD4mIT1IRCFQSmdRoMu6WZE6N912RgRBijt6do5Xdv0I0lwtkv6MZEA0T5GtFTJ0CENkNyWTyaNXnhmtuCTR6dIVZAX5ienqYppdCikSJ1Hb9MpSylcl8wQiawQcdb1o9+Otu0wShMBH2zKiQQ6JzAuUuUFIUju3N3hzc+8Sp7lEF1KsAiWiKYcjfG+5ezwcXqJ2RYhl3hyZJAk1nHoY8+y5GazBiQEm0blQkhs5wCdNhoEVbVJUaywXCxolnV/ocmdmv4yVffPwtUrgDzTFFmv39Eyvehl6poZleLvgOSwkxozGJPnQ3xwVKMx9YWL7N6/y+nRPkIrnE2Ih8FqmTKdg0NFhVKm56qln5vMwGmj6D0bT0/46smIQiTji5SGJ7FYWhmklFjr6CzJaKRS5q98ki7yKa1IpBIZbac5ObDM6iVnR0vGw5y8aMk3WsZXMqKHm8+tsr4qESpjOrcchA5n4Wyv4Y9+9x7j4ZCzpgFdYoKnyC2xhc5KprMaTE45LBEuo53U1CeGsAxslZscRolsDpmdOMygwZnAg/sL3EmO3ltQS0/ID/FnHTfXt7mxXeHPKVLs37SsDTx+VDMaKzbHBtuBeDmwsS5R0tLWEm9rKl+glMSUBmHSwSgNDdP3MvoUE5gVmgvPjTl5NEeLHCEkrbOMBp5rV+HjTzr2HoAWsQeqKyQabyx5oVESnBMJE6RUyqmOAokm4rARAjIdfKJH9SNlISSEhMqSAla3NZ97e8hwPUerDpTFGI/Wgk5IjMrJc2ibCad7v0Oz/PNvOGWRM6gqJssFOpMMMknRb573Hxyys39C3XQ474nC0yw0XavRy8CxDpjc0i6WrI1KXnvxEj/99nO8/MJ1yipjfrTD/oPvIpiicocIgahIUXXBEKHX4yVHKVIScUhh+olG6r4kEHxMfEghsB687Yk2vmC8/hb54Mqf6ebLp2Y3SBKO6APynPGw6QwRcFbQRo8ynkx78kwxqNYRQdEphcwk2s1ZKxXP3XyB95oTqlvrnD3YZfL4IerSAX4l4W9MadDTOe1ySBsDo6IC6TBmiMoqhsWQYjRkdXSBza0rXL15iyhq7FTw3t5DhM2JC4UdFEQZEaQ8dEVMTMseYSWEB+nZulBz7dUOXUbmscbkmnIssMcRbCSYlqPFkr0dzeGd8+Yg/njLdYKffeOX+fX/8G/yD/6H/zs0yRe4Nh7TLQKzeorOFfW84fRwivcR5zzdPHLsZhyEI8p12Llzyni9whiFyRWu8dRTi/e9BjRGskJz49V1xlsF9989olmkkAmlJCIqrl5+hecufwU/NrjZPn4Ku9PJuV7v+XX8QoAgAZ1QFtKkkwCAUjjf8fjxY1ZX1/qHJ/bpHBJCwDlL19UEb1HSpEF47McaPBlxkByjwfeFm0juYCJSmXRyC0+iu3yaUgqN0qu899FjOgtZVqB7fd1PZkWklDiXOpeR1KlyXYsgac9CTPo4JSXWphzhtm3p6DUPImnuvA8oYQg+dS29E+w83sW6FFVGTF2HpluidE5zNiXQkakIXhB8SxcExhT4kDqOyAwjU0HqQ96jqCJBWLTS6WfqHO8sQmTofERereNc6m4BPYok8fIQqQg6Z43z/+h+ViamxALbIKMClaNznTosPqClIpDuScrw7F3g/ci1HG7w/GubHO3v8tE7f8p0sqStPc+/kaN1QMiA7IGk2uQpjY2ky0DJXqag+hFuclGHQJ+0IJDGIKVGGYVUBqk0deuwXpCSBNOIVybpZI/O+PSqvyAjaPjgOzvkI8utz11icrSgniuyqkBXBrxkvFpRZJLCBHyRUxUFhCWSnL0HC5bjQGdaBquKqgqYEGgWEoUhyzyqjIy2MmwTkNEzOew4fuCwwdOeeNZG6zRmSlw6OiWpG5AXh2zeuMzWy5u0oeGXvvRVRmbI3h/9Hu6/fnY63RgSm28xDYzXYHvdELzDkT7HswNPOTQU47zXxgpi1ITOo7In+sM0Ro/0z1MJ61cGzI/bhJkSCqECaxJefE7zwceO3d2AcxIXHD5AWSQIuTKxfxd4fEjJPEoqonCoCBDwlqfd7qSpVqjeCBz7U4opBKMtTTYQSZfYF4V5LpA60jUgWolAQWwh/vnB4qHvxq9VJXkWyRQcHU+4fXePWeNoraVxltZHEIYlOXk5YDgYs1JUrK8UXLuwypuvXuba1ctUoxK85eDubU73PkTIM/KBxxMQ6J5BKEEKMq3SZxFCAi7HmNI1pE03RgSC89jOI/OUvBFd6sgHwFpJdAOk2UQK3Y/rQnJnxthTLAJKQRQBF877OY4oIdBRELoUoYmPDKRkY6NERcXR4ozGLVEqcGVrG2SNKYesXtmkKQL5+pzTdpki0YSkyi/QUbMMNc1xxdaLIxRTBnmF1oFoW1RmaLpjjBYUhSJEw7zdJYSGaAu8yhAqIKRDRI0IyXQTY0Cp5JQoVWDrSsfqtQZZgI+ipy506AEMW3ATEJ3j0WLC4f0SFT4djV9ZFLz0yoss5YzBSkm2pVmeNrzx81uc7jTsvJsRVUCWgsWioWsC5ViD8LRNDQSmJymrd3MDlJGc7TdED/Xc94QHSVZprj6/hskUBw+m1DOHsxFlAmsXhtx4ZQO1dsLjx7d5u/oyixD52Z/6BY5tw/+J//O5Xe+5JncEIXsXZMJYSKkwpmBl40LimCn9NEos9u34VNSlbk2WlfiQHiTvUmestU1f6KXOiA8KZZ7QEJP8ODM5RE0MDqFCcv7GpHlTZsTO3pLp3GGjQLhA23bPph31b1gJiSBRJsd1LdF72q5jUA2ol0ussyij8Z2jrhvKskQpRdc1tG3LsBr0GjSTNI8RvA+pMDSRum45m0wZVKrXlgekSmNKrEXlOY1NkGuTraRehDREBVW1SVmt0sx3klavjzdz1qJDwLsuaY1ERGWSvFxlsLJNvVyyWMzRWvMkL7OzlhhBK52KIPGsKr/I1lYCYltH/71JL2RtDNE5YvRIkTJCU00VQYZeMJ86lzHAxatX2dja4qP3/pjZyREidClzK6Q0BtnjOiQSZDIvySh6c0ff1Ym9gUPIVAzKJwVh39FDMJ3OmdUdOhuiVIK+PhnthhD6jcQ8o/v171ghIFXH6rigaCybGyPGoxGP7x0xOWkoVYFBM521STowGJK3gdBKXOMSiJXEIZzOF4yu5lRbgUGRMWsiQXYI4dHCJ7bdmmQkhyybGudGfPfrB/z0i+v8rb/2Jf7ef7zNH//gB/zGv/hdiixjc7jOf/Kf/x3WVlZ493vfom1LqgsriHbBbPGITD9DV6+HuhGINo1rNy4YLm0XPNypubPjqDLFjVt9F7czRB+Rw4DUKt0TpXrndyDE1J1WSiArGMsCtxPxvqPVhiAUG2str71i+OA9y/4hLFrHhUuO1ZHBB5/UHZkkU+kgEnVyqQZCSklREi0FAvVUhxtDX5C6nl2pBVFKVJGcqz6k5OrgFZGIlgZZRLSEJgaCy84lD9k7z7DUDEvJZDLnw9sH3Hu8x3TZUVY53dKxbDxlnpOXUFWazVHJtYsjnr92gRtXL7C9vcFwVCEInO7cZbr/gK4+QhuLUgGlLdEkyUb0CYSdUhNiKoYFfW5xIDw9qMXegSkRWuFtwHqRtNXeEzrBciJxTnP1pUHfrEj7lXU2dblkyj8PMRVo500yeLJ/5CKZ9nwH1gWKkWJrvEZdN7BY4NwSX3u+/bs7fP+dj1i/somVAqUNbdOkg2aQ5MUKa8PrTMU+MpxRGkNVFtjuBGMyxmXGdHEACla1IKBxTcPR4R32HzyEbhVjHF53vWNVE51P2nUhyaRDa4PRnpu3Frz9+ciHB+Bi5CnBGAgK1AqUdWTRKhaHhhh0Ksg/hVUMDO+e/jb33hNcfqtCrW/y8ONDjMl45acvMRgc8d439/jpn3uBh7u7fPDHB4BAaLBdw0peITLFvGk5O1wwHo7oFtOEAvIRpSTVWsbqRgXA5KShawPWRpRUVMOCclBSVSNW11ZYWx9y8+WXebw85eK1awwWi3O93nNsTKcxoPARr8CIlKHoo6drHdWgJDdFckj1wtgizwkhYtukC9PKpHJQBPAdzrs0Co2p8AtIdJanPFgszrcYU6WTKwl2mgCfEaVysmyEkBkPd/Zo2pa94zOKogbEM3Gc/puWFII8y5jNZn2CRCoo6kVK63iie2m77mmnD5mKuxBSOznLJUSP0imWzvmIDooQJYu6Y3I2IzjNqKoQSiSnGRBcR3Q28eaEQMoSkwKU0VnKQW59i5AZSqeM3Rg1eZalsbQ2hJA6sz4EsnJE23YolVMUka6rn2oWE3w7oU9CTKHrz4LjJ4Xg4tYaPqROEsKkbkZIo1gpeoaef5LBqxPb0afNT5DQF87bBAQfDHjt7a+wc+c99h99lBzR0WHynBgV3gEqoRtinxwThCQ80RFFkP13KSIQUiOFeurodc6zqBuk+pETUBuTxuz03VIBxnw6Gr9cKq6NV/HG8dnXtyk34OO9EzYGAw4OHrN5eUhctBzdO+NuYXn9c6+hYkS0jnZm+++pBeGRqqIaDxltFAyzAn2oEKqhZUmV5RRDg847BqOSIlYUPucrX/oK/+nf+Y9YXxNEKXjzp17m3scPuP3gAOUN2bLFdseIs5rZ/UdMxR/ij4+Z/OBODzN+Vivpu4pMEKzELiPZMLK5mXH7w4bFMvD4ceS55yKf/bwmLwIyyxFWkZeG0LokKiNBnpU0yRxFQBWe9cs5/l7E1wGqiBOwttrx2Tczvv+O5XA/0jbgrnhWvaPMNL6AoFxKnpAmQZ1lSm1QOhlEgkqHaIHqf55Ivy5I8ALf/SgVJIp0jUJEvO35LVEiDWQ5OPtEnP/nvJMxsDib8fFHp+wfntEFx9l8kWRAoUZnnu1RzqDKKcqcsjSsDYasrw0xmaRzLfX8GHu6w/x0j25+SmYgH4DOHKrsIHcpVg+RJBnepwxd7REy6Yd8DD9yg4de5kIgeEEIDu8Czkq6JTgLy9PA7u6E8doFrOt6PW7aOwQimRdlSIe4+ATffd6u3kiRgcTT2YgJEREiK+WY9dVL+JHlaLbP3cN7NPM5QQ7ZWi1p7RyjU466HtXktkC4mrXxVYpsiDIlwZ9RlQ2FVoigiX7J2698hj/+4Q/YO7zP7r5Bxsjx5AOWzSluoSm6DI8joohOIGSvoxcBE5OuOcs6bl3t2LroUEaSj8Au0yHY9dO+IKHMoFqX1DuRupVITUr2+BRW27VMz5bUnaKZBaphhUTzyQ9OeFwtWdsccvHWCqMrmi+/+QbDwT2+/4cPkVLQBZjUDbc2L7FSWpaHMFrJmJ4sscpjMkkxzKmGSeJRzy1t43FdkgyYPEtkCiSzk4ZCWn7t7a/gYsONjW20lLT+fAvic41s6+dpaaNFgUjixvniDPCokSLTJpk/TP+jJSitcDZdWHppCbRSWCXIi4KUX59CdlxwCOHABzJdElCE6PrRb+xHc7o3mVTUdceiFlif/myTyTFS6p4f+OxXBJ5woVIXSeD7cbX3HhsczaLpC19N2zaJn2dd3xFNY3BruzRGJGAbQaYrpFK4LtDUDWvj9Z57KFL7Ofhe4xMJUVCWFb5b4hDonsuXmH5dKpaeALGVwruOKDJ86t8igiAgsb7Ht/iWGBw+SLJ8kLoLPZjT+yQ+Dy48i7oPrQRlqfEhoWOESlqblI+buGVSxBS31jMivX8CWU7fL++Thu/JH09LzY0X3qRezFguDntav8B6hxEiGWp8j7ERAuv6JBiZIBCpr5O6J0+//yF1ipZ1KuglpFB5ElpISYl9ktbRpwl8GssgGJOzHDqOhjXzxSkXr5QcTi2TtkMdzxFNyWJ3ygM/5/iBT6fY1hGspGt8ui8yZcaOVgXjFU0hFUp5lBIIHaFQmMKwtl4xGme4zmPqwM0LGSvdKe7jKfXRgvxkwV9c3WB6e4e9b36Ph6MRa2pA0y54/GiP7UeODdugTjviMzR3aJkKpeBAlT16wWvGo8hbnxfsHTeo4NnbdVydVFxZ0bjgyDLZP+eyz/I1BJFGkKCRfQawKjo2r9cs3psxn8P4YtKODoeWt95U/OmfSh4/CDRN5MLlyNo4Mho4QiFJCouAkRJl0iaqjEBJD7FDBA06oTeETExVGTxRaFwHJhPIGFBSEqMkCpeKP+9S5mqUIJNpRZ4DnmQyW/AH3/4hulAYqXDRkhtNkUdMYSirFapBRVkNGVQlZa7REmJoaKY1TbvP6YlDOItWkSyX6FKQDRMLLUiBCzJBcrGEKAgxQ8gIKmBjKthEFAh0ryN3fXiAIHrVI1ki3kK3hOXccrRn8cqwur2CLrJELJA2df2lQkjZHwZjrwUJTyVJ57VEFOTRJAakcOQ5jIYDLm9fZ219i7pZIKRktRhyPJ+z+coVrl28RH02JTZTJkJwFBYgO0IOw8F2X56GpEPTFmRHlimWzSm5rriwdonAPk0dWCwieXEJ6lPm+4rpyS5+IBFlwg0JAcLJ1P3DkxUdL79huXlRMpsKlovAeqVYNqCJaARBpgMJMqAGAT1IKTUxhvODhv97Li0M994/JIpAOTIMipJ67tndOUYbzWA4IS81Dz606FcLXn3rJgf35jy8e0KwgdFmxuYrhrWtLd77vX2Ods/wPrm+g0+fo+08zdLhu+TKj5GEfgopd1oEzeLIsVEM2Fq7AFFxebDFu++/z6PF8fle77n9TkKgMoPINMiIC6GnmCeuX9ssKcoSk+VIldhrkDpcvoeZdV1D30snBjAqx+IwhcHH3nnaOQI+cYRVithKCR2xT1goMdnwKU9ttmiZLGpMnkwg3tvkLDu3C/+3rxgip6enSCEJPXcwhKTnc+GJCSEVel3r+k5VAp4iZRp7+9TFjMETZXIyO+fpuo4ociKOxWJJWfTFj/Sp+JISGyxlWeKjoLOQZxnWObRKXcVoPVqlwGxrPVmmsF6iVIHvem2CSJ9v1znyYshiPknf2igwJmMxn/VxdLGHVT6Bcp//Xc4yleLkRDIUeZ8A0lon9EU6OCRKukCkqDaZfq2QqesnZfp+CZlMFk/e2zdffIMP3/vjPmEgdT9CIMU79XZ0EZOr+YnTXMjkyJXSPDUyee+TiDxA23UJ4yJTmLvQEqFkrwlLvzZhET+tcPKIUY7aOj58f4d8ZcpKdomV1ZLxOOPsaMbuo1M665hOA49uL9AjCGeeycKDCmRa4r2iGGo2L6yytanxdURkkRCTFk36QKwTHb8YOJbLSDiF5fsPOZn/MeKspq6XmKXmp82A5eWr+Noymim0qxmPwFvH7t4BpcphkQwnz2oJAVWWoUxEiYAWgq5xlGPJhU1N6zR1banWJa6paZdDlPFILO2yIx8VqOAQRZG6a3EDbV5HFFexriQGi3V3uPLqdzh8uMPhQ8PmlQyX1wgBn//pyPvvez78yHN2LLl8DS5elJSZYzDMUycS0CGZ12gl2gSkD7SnlrPjiKoU61sFZpD0gUoGlpNAuZIR8BijUTogoko6WJV0yASfQOdCImT+57+XgMkUSkiiDyg8w1FOUeSUVUZZFqwMRmysDdga5QwzQejm0LbkfYEWWkGRKXQGOk+d8q4NiX0YA62NTCeWyVyxWEp8MD1Uu6MoIuurhtEIyirtHSHEhAjzMnUCAYKgqx31rKNuA+WqZqA15XiElHmSjARPjPQHPfE0mScB4lMm63muGCKTkw4bPNqUXLmwzs1rL7C1cRNjKvLujJvXnqOzNS889yZz2fHJ3ff5zu//kLOjFlMEwsUZPrMoITiZfkgRCxZxH3nxFCkdXmyTqwLvl9x/dAelhlxev0RrA/MmsqzX6Rjg797BxxpvQuoa0+JsCmcQwjGoBLfeslx8GXQdMXPJfOHZGAuUkoTgSDO9pFUlCFotWCpFsFkyx/DpSF60KehqmJwskEZg9DyZqxqFrT2Lsxl5pdh7FDjdb1hdr3jxjessTxQPTvcYb1bs78xwTlGt5bC7AARKCESQZKJCC82yqxFBJTanFL0zPCN0EruElStD3njzVTo3ZW4l1/RVhusbXCqy873e8/zNotJIpUH0Vvno+u6GIEZHs5wzqMYINM5FQnAIIVFK0znbJ0aQ2sgh4kOf20sS1utMMndLkm1LInWeCp2+2xd8h7NzkBFNieqjVtqmw9lUPIFMmJef0MkijT+Tnsc7h7UOKWUPt05cw2RISSfHEBzWRqRKUOLkTg4pVs11ZMYQZY8SiYEQHE3bkGcZzpPMMqJLL1glUcZQNzVdp1BRpMQSEdOfQyTzjZUSpQxCGXyItBaGgwqlPb5dJmdbFOSmoFkuE/cvBkKEydkp1jb9Z6CSKD4+xZ6e+/0UAqwPaB8Q3uO872HdaTNQWqUCTaUDiPICbVKBFcOTcUzAZEmfmFxoaSw7Wtvg+Rc/yycffh86h8zSPw9CIDygY981kAgMgv5z6AESSYPqiFLSNgnBo55gXZSBHhejjMa6dAix1pEr8xM0G/2PV1Zm3HjjEtPTx7z80hpbWy+iteb1n5a8e+8B0xz0WDPKDG6muPf9JWFUM9ubISrJeFPThGSmGW8mLdTGVsnB7Rl+2RKaFhMyRjKjUppZHaiswS4bfKtgf059eIdsIZCiw0aBCi2/kA9opef07gGz4RyrcobOskrGwfEhbcyfacydUoJL10pMKenqlKzRLCLSK6KwXFjLee/EsTwLTGcd1fEcdAG5TCd4JfGtR5qAZIti9CuUw7cQGOrlnOnkEBufY9FMuXBzwWBU8+ATy3hzwKDKyBS89WbD5obnnXcdj+5ldHaVm7fWyfVVzGiIkS3eLWiWtzFiSfQ1MgqcDExmjoOPLYNBw4VLBaMRDMaCZgmqFCij6LqA9BKl/I8O3CoSgiCqFQarr6LNN//8N7OfgIboUTjKTKKrko1xzsbYUGrFIHcM5ZRsGbDLDgUYDUoEMq2IKqCygJCKrk3hAGGZpD2Hpx13Hrc8OgzU3iBUgj+7YFnOlzSNRQnHeKS5djnjlZsZm+sZWqTcWKEjuEjXdrRNil/ThUIZQVMDsQTA2gYpUmEiRUKSRZK2Or1LBfacMWHORXZ3F2gTufXcCs9fe43tCy+iZPozZZni1vW3ECqnbhwnpw9R1SobL91Cj4+Y2e/gyyVSgPGwWD7k4/kubeWQMWKiwLoThvl1hPR8eOcdbt54i+AKykIyXslYG69zaeOrfHftG/z+P/lNnPSMTeSkhRgtWqZc6Fuvd1x9RSNUJGSBzEhmrUcGT2ECy0ZihSCLmsSy+NFoXkiJFI7gPp0DcFEM2Fjd4ORghm08DRbvIlVVcfXiFebLBUeTY0JwPP54wnylY7EVuXZ9i+npDJ0JpqdLFmeWC6sXWB2v0rUR5z1KakIQuC6mcIbeES6FTBpaIVGyoKyGjIo1Xn/1Z7l89RZHbU3rPQdnj7myff1cr/dcNX5I1f8vCTmjiEiZpU2PgHOeyXTK1vZlfPBJROssIdgeh5H3mieZTlLep/ixfoPvYnK8mnzQN43BB48P6Z/LmBgitusQFBAdbdPStI7DoxN0ZhCizwr8iZkoI3XdIEUkBodzyZHsnO2LVsjzHGstXWd7w0RqeYcQcM6hdNKgJPds+m9kUi4jZKSpa3KToZUgyzKyXOGsQ0qR3MNWgtbIrKDtHEVh+oZdMiVEIrP5EqU0eabJ8oJl3aT2fUw5g8F7fKhxzvXEkuRKdT3z72mkQExuuOTkewY3OUZCTOPG9NtHvHO0bY2UkhAdeVakh0rLxEATIWFuZXLi/UiAHXlCYQkhIIVidesSr+QlOw9v03VND9xMonDnkmPZmKzXPiZXXwxJTE6UxChoraezCdkjpE7fZ6mI0vRdRoWMgdZ1REBr/alx/C5c3+av/Se/wnf/i/+Ks+Oaw4MJdddx642CGDtUhGFmOMlaQueQRccv/q1XcCeW23d2+PidM+ZN5MpFTXVRIKWnXngOH1m6iSE0ChMyPrOyxueuXeQHkyPIFNIKKi8Y7XbMZnvgcoqRRhSKaAPRJZRQhsTYkkmV42zDw8NTJt6y66d0z/AZFiJ1gpUSINLhrRxpfHBIJakGgotbGXdnDUIKvAnYzuFyTTXISLxLRfAXKFf+ItXwMwhMYpdKjc4Kglhg4xaq+ArbtySD1V1O9jXrF69SjjeYzfa5ePmIW7cOmHSX2b78OlGvcBrGHPqcNWas5VPc/C6T5T3WwwRXf4DUCzavSNquo6kFB3sdezsZmxuBjW2dYiPbhEBSXhAMqVMlIVKgsuuUw5+hGr+M1N/7c9/LzJRsr95g1u6hdKA0Batjw8tbkdwI5o2HeoHVAZ0pjEndPaMhKzKQaUy2bAKd7WhtMrU4GXh0YHn/fs20Se+tYZn0iZn2OBfJTcZ87pkvBY9PAjsHDe9+0PDKcwNefl6zOhDoolfmaU0+luQyQIgs5pHJssCU6yiV9iahfvRuDkRwEmPSq896gT1nV2+aEEmMjqyNcwbDcc8Q7f+90FSDbV64MeDrX//n7D3Y4ag5oawMYU3RLBb0UFWCiHRSYOgoej2nEhEnzgjhCkolzffH999lc/UGlR8TFmdE23JjZZvHt09ZqwRrsqGLkZNgiKEiV5bXX2q48EaiA4jocVomTurc0TWRUQGL1gMSJyJOaDLpUQjyXKK0I9hAFJ9OZJv1nr/6l/4G/83ef83xyRmEBEI3Imfn8SFlkfH86xd5cOeAdu6YxIb5rOXiaIMv/OILMO748HuPCbVB1AWXL13k7/2d/5Q/+c4fcHp6yrxNo3GpNM6FdMhwDglkecbm+jqvvvoqX/4Lv8jbr79FAK5WY05OjtGmpMzOtxN6zhq/5HZUPdxWSg1SE8mRyiNl6nRMp1OyLEergtAH37fdkhCTCBoS/DGdQiNRBISSBC9A5vioU8ESberfPOmKPTlaxoj3LcaUCKAoSpTOaes2gXTdk9ixn8zyztOFBJy1vZbxSccxdZ2S80cKg/e+5/mkUSVKEoJPYmWdEiaM0cTo8d5S5iUCwXK5IM8MMUqcS/dEKZFydGVERoXrakSQNG3E2pY8yzFZRtfWKAld1+CDQrsW27bJriMEeV7inAcRE9KkJ/4nVEpG9me6Z8F5RAgEkQr2818RXNOLkVUqEJRMxg6lMTIjNwYXA0pqOtuhZExZujGNZ1xIzK4n5oCk90wvHGMyVjcvUFRDbn/0Lm09TU7pkPV4oieOwK5PBEwuYecDIhqaLrkGtUrFXErvkMn41I/HU2fUQUg/T+kMY4pncK/+3UspQSYNdmb54N6MmgWXny+x8xSB6BcRN3dYHJ30nPkWlS3I1izlakG2VjGyLW9duMJUTfEzwWTSsHd/Tlt7nIjIzDJUgeu14iQOeNw1dNEwCpIiSpwBUWS4KqNWHRPr6BBMGwvBUnea3d1T5s7yxx/usDOLWDqm4c+PGvmfWkJGkIJmlgxWTesY9iNSTTrEbazlPMpaDk8s1QoY7dHGY7RHeoPO1qlW/wKD8dsoleNd6DvPkaIYcvnymK3tK5SDAUbnbF623HhDoZVGq4xAKnS6tub07Iw2luRZztWoOFt0zGYZOttkKxtxsLTYwXOMyheY7v8Jzt1n62Lk5CggYqDtOk7PFNZ1bPabiNIKrUEBQnikGlJUbzJe/wpKb+LwuHO4xyvDdT7/xl/gex//S3xM04mVImegFB/eO2TeBq5s5UQZ0S6gteqNZpGmtjSdp6ktnZe46Gk6y2RueXwa2J8L8kLwwrWM9dVIllnyPFCVBm0UMRpspzlbRPaPHDu7jpNjyw/v1hzMSn7upwq2RzFhX1LoLq4TTOcdu4cRZMnKykaS6hD7gzuJU9dngHuv8MHhoiTdzfNbSknyLGc8MpTZKhJBCMsUUoDqAw0KlIy898fv8qcf/IBGz7n82utkucYv0ufnepNRnxqIBpSMeAJadLT+lFJWSJPhguXodIdBPUHngfnjR7z/je9S79dsmAU2g3cWhs4LlLT8zC3J3/jykG/EKTXp/YZUqFxg5pLZwlOtySchHiBSlJ5DoxFkA4kxEevNU634T3qJENm4sMnnv/BZfv8P/oRm0SGloLUtzgWG6yUbV4Zcf3WdO+/tsXt3Qtd4ZmVHHRpuXtjCfM7wzjcOODyd8jd/7S/x5V/4PO98/Eesq2v8H/7+/5YKQVGWKCFpu5aPHz2kykq2tza4fOECw/GYP/rkB0QpyKVmvmzYm0y4tHWRT/a/e67Xe76jXmTKjkQRkYQon/JrQBFjos0v64QxGQ7GFOUobc5eEEIfIxYDSJ00fTEVRynI3qSTqYj4YJG9qF+EgPAhZfdKjZIS1fPWyiJPJ8c85fxub21wcHi+Qsl/9/K9GSJdm3OhH3ErjEkg0bLIqOsapURv6OhSJmKMeGeROnWbhOgFxaFDxOQGmsxmjEYDQvDEKMjLDKKgbjq0TqP0KA3OWvJc07Q1WiX94GKxQEuB9alDFRE9VDqmn9P/u5RT27urQ/pMlM5S8eMDShmk9DR+gQC6+gzvn8HGHCMxdni/wHeWkK2keyPTBkesIer+ABHItMA7j/cOqRRCSbTQqYvXjwqzzGC7hhhDMt8EhzEZt557mbt3PsA2NQLVd2vSu0uSIrWSOihgnUDgiaTvn+ij2JLYu2d+pZkQUqbiMWUMpy7tp+TtIHaW5eNdXrmi2X71JR4fnvDcT61Ts+DuwYKzsxP2pg3RRvK1LZrXfoqTkHPRPmY17xiXNfk4462NMT88qTldOKZNy3La0NUzvG9QZkguK3I/YFsZ7jzeY/+wxlrJd8aOsm6op2fUwjMZwqNJ5MTOaFEcHS0ppOWzL68xGFdM1ySdUIw3huzdfvhM7005LnC2IzSGOx+0SNXw3AsZwS4Zr+vEU6sUi85yNunIM4PJI53VaJmRD19ntPIWSpW0XdvrPTVVOUpymP5Zjj0KRJvUepKid4gLxaAcURUDqmoFZz0mz5Jeynvq5TDpRhFc3hpz5/5tnB0yvPxV5N7HLOZ/yubFE+qlY4BCaVAhsjhN785yoHDao3xOUV1ntPYlqtHz6HyVSTPluNljaet/9436Mdb3fvgD7u8fsLGecXVzzKVVw3SxoLaB2dIzrT3jGKlKQdN56sYTQwLftlHhQiBEi3Oesy7w6MBRx5znrg25sqkoMwvSp+5+HqgGKYFHGoESsB0kz1/PaNvIvceWH37QcbqQ/PBjwfbFDGNSXNuyCZyeRQ5PJK3NuLC9glKCzjYg06QkHQRBYBFK9e8NjZE6MezOcRVFTplnaFVgxIjoICkMe/B/BCEUOsv4+V/9JdTVnMnSMnc10RYIqenwaXQuI1kEE1NDpRMhwa2k4u1rN9g9cJyJGVGGFALgZ5y9c5/5ItI1khUJ7aU1fnh6zCKmqMIXNjS/9tmKzdKzWi9Zxg68SMYrrcmkZLb0rK4Fcq3ouic0hD53WkqyYWT9oufkUYb4lHAuKMHcOF77/BtM3Cl33t9ndlLjukCmE9kiesna6pgv/dIGx4cTvv07d5if1jz4KKHq8tJw4eaY2z84YTga8Y9/+7/lYHrKoBhR5ZrXXn4NISTepWncpVs3mS4bNsejNEGTgs9ce5H3H3yEEBm3trbpugWttXDOEoJzzOoF5x1Sq56fFHoYcaLJB5HMCSI4NOkBmvopVTUgLyryKoPlHCcSpNJa24vnk2tSqR91lYSIRNmjNSJolWFd19PZ09w8kjRxVVWkkaSzdLZje2uDk5Pjc+FT/bhLKUnbplQNKZNTN8bUrQohIkQgy1PBEULAGE2RFxBTCLjWSTfxhEkniCgtyTNDNzlg56MpxWjIxUs3UOP0IlI9T8xaSRE1wXcolRJNMpNjbQuxN7oIRZbnCCmxbZM6jiQ9mhDJlZQYf2lkKZQnkBhWPgSKYYlE0TUz6vkJxwc7eLvkWZguIwHb1mQKPA4Vhvh2lhzOQRAyha3PkMagdYHOSozJkpYkWHyMGJPc4FLGhCPxHoFMjK/QJQeg9ORlwYsvv8F7734/xeXFhNMR0iBURZQ5geQKjOinXWTvXYptk6ZHPqROqVAK1Y91I2CMSZ+Jd4RPCVwanGN5PGNFjPgPfvolkIL7zSnfuPsePgiakwXtkWPbDvjs88/zydYmxytjMh8JsoFFRndWMbs+ZGLPmMzmnJ4tCbXExIKmcwijuDtz/OYnjym2Sg4Oljy+P0durvPBtubSCzc52Zujyfhg5y4fPDrAxRl5PqQJhlwrVjY2eOnWCqO1Ed/9cMm1l65x+84Pn9l9iQFsE8jKSD7Mef3NyJ1Pau5+UnP5qkFkGWUWWFnp2LnnycpAYVqMSYLtonyd1Qu/QoglXVMTI2RZ9fRQEGMi+SuTOJnBBayLKCmouzaNFaUgMybpRLVG65Tkk9BzirwqiT515bc2thHR8/5H79A4yeDSa5TLTaZnH0L2EMICJTxagg2RRVMTRaQYrrO2/guMV18hry4gleGsnfKDo+9jREah//ydaOtqpsvHtLZjZ6+mRDGiQvjA9jinaVqaDsosMrcO31qCF9jQ9tDkQBTJVHE8CxwvKm4+d52337zGxe0RZZ4RfMd8PuHsbJdlc0SIDQSPDKmrrbVAaMWgjKytZFy/UPGN77ZMZoK9vYwrmw3TmePkLDJvFUEm44lSGpWVZFnZo6pILbPgSUlSFusFRiYh/5M87/NaxmSMxxVdZzmdzVmdz8kHKxhVpHdWP+WKOHSRsbq9Rbu3y4d/cJvl/BR10VAWHVGDVBHVJmVdomlIdIBNNth0l7FmwbReYDKFcykxylsITpMZjw4zPj4paEKBEo5hEXhl26TvKJJtpXjcyp4CEbEaTKaJjcW1kVJFnIh4JAqb7qMQaAFblx31zDKbVcDJud7DH2c5Hzg96/iZl36J++8/4Gy9w6iCdulx1iOc5uyRR7gpF66vsbGxwVd+bch3vvYxew9Ouc8pg2GOVIo8N3zzB7/F0eQAO1eIlYx/+P/8h/xv/rO/z63nbxCB737yQ6QZ8Ob1myxaR+cttW24vLLK5miN9ZU1bJDcvPI8MXiubHz2XK/3HAHOEd9zo5wLT/4RUqaCMHqbEj2QCJkeYqKgrpc45xmMVzHFAOkyum5JjGB0hnMWUCjVj0li6ixqIXtxd99aRmCyEqmyHoOSzBJVmSc0jG1RynBwcMznPvc2e//yX5zbpf9blxAMhsMEo27B+a7Xc6XCM5KuaT6f9xtC7/TsNV+RBA012qCVxBNxXQ2DFaSOVNrRTs9oZ6fMDg+48vxLbF65SiUGCTRqHUUh+xGFZD5bkucJBh10+rx8P34uq6ofQcu+jyWeagAFiQ+GiKAEWiqMyTHSoLMCvGN/5zYHj+8iRPf0/p/3ijEmyJZXRCmpZ/s00pMVJUoonMvI84pga3y7SzYcMxyto7OKLMuJwRGCI8tHxOgQIo2nUbIfkzuUzIgRlIpkZsALL73Ghx+8ixYyFXsiR5LjYi/0pt/MSVrHSCocYmcxJiYHu0gYHS0SRkbK1OlLUG7xqeFcCOBbR6YM22vrRKW5e3vBhx8c4VYL3AFskfEff/FXCItj3pu1qMvXiCtjpg8PmdWSxb0z/uHOt3AljK7A/NiSy4wsC2jTYSQcNYGzxZzrw8jF9TVmx4GjacvDb7/D/Jse2QkurKwRDSm1wmpaGShLQfCavZMZL9zU/N433+fBceCdDx6xbNpndluEEgxXDc6mz3RtK+Mzo4zZrCHYSGghHwsubA94/07HyYmgyCJaOaIfs3XtFbQakdQOCqM1sX+epJYJB4LoHegCHwPWuVRZSJUOGh4QDuUDyhh4YpiKiQUphMTHhG2SUqJNwcbqJg92H3EY5gxHK2yt/TwqRhbTB0yPb9PZI3ycgILaa4bD11nd+imqwTaOwN2D27y7/z6j4TpvXHgVeQ6jyyyHv/03vsifvvMJ735wlzt7E05mc165OqJSgrwMaGOZLy0+0LvuJTak7qbSEesEe6eC1q/wlZ97g5//+c8xHo0hery1dG3DoFpnPNpiOp8wW5zQLE9wdpZkPyrJPOjd9KPS8/YrBX/8rmfnQGA7aFyBiJK8UiiVpXdcnicTR5QEfO96/jMX513q+omeYnDOmDBB5MrWCvvHS05OZwyGp6xuXMJkClD0mx31smX/8BStBaaquPT8Jrunt7HGpoOnFhRBE1SqXnOZcEM3t17gSv08Dz4+AAPdoqG4VBIVT/eAPDfkKjLwiuA9MSq0FFwtBKUSHE0Ma2sD1s0SGcHJiCSgIuRao4NguYxUlWAWE/Q+sa9SUwhhkAO4fPUqB/vnm0n74y7vLbcuvUA0FX/r1/9zrvzR7/DhnXfY1BeQSEbDMWurA37rG/+CB++fMdooGY8rNja2ON1rmB1ZmkWyNHoX2D3Y43h/TugE9TzwuTe+TDUsCd5zND9j6aYcLO/yJjfQIjJtaz7Zu8/l8VtI5fhw57tItcagGHM6+xglts/1es+t8LM2cHLiGI4cg0EC+GqTRM4+1EgVkCTshpAyuVtkn8bhHYvpGcqkTNPgXa/hk0jZx5w5ixAp91Iq04OdbbLle9eL6DWCZHsWMWlp8jKjKg1lbujahtPTGcvmw56/9OxXDIG6riGCMTkxKFCp4EuGl5YQRN/pM4mlJVTPxktRblrJp7+XEBKtNZnJqQpFpj3GFAiVOqp79z7Gty1bV66R5SXOJ85almUIBErlCJH0L51dolVKvvDeM5vP+j+D7/l4fXcypDQU1JPRgkRIQ4iKTGcIAg8ffsTuo49RwgIaJd15s0yBVHcGb/GdpKmXqNV1RPCoTqO0RJjE+KOPx6snJ+SmQOmMGDUmL9KItjeHhJ4oL6JPrkOlEUKSa4PzLUIJti9cwkfJnTv30LoCZYiih/NCz5NLo/EYUv6uj+BJkVDGRIzUaKNT/nJMB5YnRhOBfibMwx9ntc5zMLUcTgI/+GQfISTv377PxYtXqH3kw7MDbl1/jUsrF5jtn3D9SPD6Y8+2HrCfXyPYI9rtDe7WRzAoaJf7VGWOlIpFgLgIRFqqUjBWK7z5/HNcvrbGnXv/hMXE46LBtqB9ZLQCyyYiUsVDU9dkQXDzhTFvvn2Ns5NdYnQs2yOkX32m9yzpuALV2GC7gAsNOoP17YwgILqAkJ6iNIwGGQengdZvI7JNLtz8CtXoNawV/bsuvWYFkEJgEidS9s9TjCltJ5MJmB5912c8K2JfZYQnbECRMj2j66MsVZ+JHgJSGra2r4HUPJ48wipPzAcokbNabbCx/SZdM2Ny+jHz+R6LdsqxzHGLh6zFGmtbPjj7mPXxNq9tvcJIDRDn8BDnueHtt17ljdef53vf+yG/8S++zsF0xp/en7CSS7bWDJWIYFLqZwgCqTQ6V1gf2T+Bs2XG9tY2f/XnP8ebn3mOzGR42+I6T9fWtG1N09bUTUPTerzPCGEVa0s63xJc0ismk1rE9brqtVHEOw0qZ5CnA5hQCQQvRcoULvI8TUm8x2ITzw8J3qVc5RiTVl08VY+c21JKcuv6FZTa42ze0rQds0XqhmuVDBoAZTnkxRuv8+D3P+Tx3Y+Y+vv46hjjO6ocNkYCox0hRgaloMphe/VNvvql/4K7/7/27qVHrqMK4Pi/HvfRrxn3dOZlezx2bMsIIkU2IpjELEACFhEPgfIF8gFY8Un4CIgVC8TKEo+IBcgIBLEiLDBIluM4eJ72jPsx3ffeulUs6tpkEwlE28bM+S1mOVdzdLvnVNU5p373HtX4A3Z3tujOAmHkCd08llWFOJWjm+SsLg5pD2smM0U/C5xuOVzwbI8CSweW9kqGrYfxhiylURTo1GBVTPyWeh5lElQIzc6opwZ0cNiQYpOM0dHOfAP4bzrRXWQ422cj3+T82jqvnX2Xv27dopMPuLiy2Zx81ZxaW+PHP/0Ju3cPeWQLymmg0+oxqsdUUxebJ1uW8eGUuoxXrOY25eaf/8ja9VW+871vkxmDsTnDhwdMZgXdPMdMFIaS4WzMyf46H+5uM5rcJUtzXPmAVud/tLmj8po7247Woyn9xZLBiZSFnkJp31yjpagpUUphfXg6Hy4ehcU5SKFwsVuTePwYQng674xmWztOSVdPOyjjsRpNm7RpOl8h/mOt0Tah3885uT6gmO5TlAX9wUJzhPzsheanNgaDJXiFcw5rEopy0iS//2o2sTbBuXjcaGw8un7SxauVxtiEPNNkWtPvJpjMYJJYS0Ht8B72t3c4Ko44deYsaavDrJgB4H1Cllmcq0kSgzU5ZVUQfMDaWPPnaojrvPA0Sfc+xA7ioFAmwdicdrtLnueU1ZTdrbs8uH8bmDUrUDeHef+fEs8QqEpPkWiybBHI0QZstoBOU9KsS7vzyd29iqos8ZXDtk1TN1RiTM6TRiDVdKDHzvJmCa1iV6kmNn+sLC9Te8Xe/gFlCBBqlI8FywZi92+AEBSh2bn1zT3StNtxjEtThlB5B/imu9jH4/sXdFfvtHQ8eDwm6S3iV5YxSvNqbpl8+HfGj0Z88+IFTvtTLGUJdlzy5Vsj3t4YsZBbSC7ivn6Jg1bG7fJj3rvxPju1oreyyIN72wxLSza0aF9xQjuS2ZRbf7jNjfdHuNqS2oTZ7BBCTZZbrl57nZsf3OajPUetHbo2XL58nu//4Fv85he/xk1gdbDEvYdHLHR7bO8+u5jpRJH3YnLuhgXpgqV2HoWiOKpRwaOMRenAYDFh9yBnY+Mtzl+6wPLGZ8izPkrn8TOsNU8b3+tAVdcEPNrHtMqkBtPUfWqt0MGggia1lmJWxnr4YOLcTRwpzZgn7wlKxcVagCzPSdKMvJWTpS0mk4JOq0PwU0ZHIyoqVJIz2LjCClCUU+i0UYnmo8f3ubX1N9pJlzdOv0FXt6grN5fk+sliNUsS1pfaDLKKOs8pdGwg2jv0jCdxx1Qb4rD4EKi8pwqWbt7iC1fOce3q51hd6RNCzXg8xM1KinLKrJxRFDPKouComFGVsayndnH+na81LoAPaewEt/E5hkCaV7gQaHVaMfZaxRlrKg6Rbucp3seNiDgqSxO0ogp1M72gxFdl3B206fwHOGvD0uopjNHsH46onGJ/5wGj0T6nT56jlfebs5WKkycv8JUvfpcT+Y/4/Z2bzCYV/T5c+axmoasYT9uMhppee5H1hde5dOYdBoOrLH7tDId7P+Tg0UOW0i6vLKxxf7rF+PEBWgWSNCHrGJa6nlN4HnvH2Y6jl3uscsxKQ1FZFlWfC91XGRvFsLhDWW8REkdmE8auQHlPy9SUXmNVQAcogoHgcB52DvYIc6op/U/54Ln22luMigpFILUplzc/H6/91HGv2FjLV7/xNqc3znH959fZ3XnIm196k5t/usHPfvVLyllJVdb4OjS1mAadGSazI9asITExX+l1umy+sgl4UmMhBMo6jrLZebiPJ7DQXWV9cI5eprn9jyF/+fi3c/171bzm2Sml9oB7c/llz8dmCGH5WT/kJYzLPM09xv/n8Xwu7+QnveTxfGbxesnjMk//dYyPYSzn9l4ew9iBfA9+mvm9V89rkLEQQgghhHixXszUWCGEEEII8dxJ4ieEEEIIcUxI4ieEEEIIcUxI4ieEEEIIcUxI4ieEEEIIcUxI4ieEEEIIcUxI4ieEEEIIcUxI4ieEEEIIcUxI4ieEEEIIcUz8EzeYduhxBejTAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "hotdogs = [train_imgs[i][0] for i in range(8)]\n", + "not_hotdogs = [train_imgs[-i - 1][0] for i in range(8)]\n", + "d2l.show_images(hotdogs + not_hotdogs, 2, 8, scale=1.4);" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "数据增广" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 12, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'torchvision' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32m~\\AppData\\Local\\Temp/ipykernel_25960/371673525.py\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m normalize = torchvision.transforms.Normalize(\n\u001b[0m\u001b[0;32m 2\u001b[0m [0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n\u001b[0;32m 3\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 4\u001b[0m train_augs = torchvision.transforms.Compose([\n\u001b[0;32m 5\u001b[0m \u001b[0mtorchvision\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtransforms\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mRandomResizedCrop\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m224\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", + "\u001b[1;31mNameError\u001b[0m: name 'torchvision' is not defined" + ] + } + ], + "source": [ + "normalize=torchvision.transforms.Normalize(\n", + " [0.485,0.456,0.406],[0.229,0.224,0.225])\n", + "#规范化\n", + "#整理数据\n", + "train_augs=torchvision.transforms.Compose([\n", + " torchvision.transforms.RandomResizedCrop(224),#随机剪裁\n", + " torchvision.transforms.RandomHorizontalFlip(),#依概率P=0.5水平翻转\n", + " torchvision.transforms.ToTensor(),\n", + " normalize\n", + "])\n", + "\n", + "test_augs = torchvision.transforms.Compose([\n", + " torchvision.transforms.Resize(256),#改变大小到256\n", + " torchvision.transforms.CenterCrop(224),#中心裁剪224\n", + " torchvision.transforms.ToTensor(),\n", + " normalize])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "定义和初始化模型" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 19, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Linear(in_features=512, out_features=1000, bias=True)" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pretrained_net=torchvision.models.resnet18(pretrained=True)#下载预训练的resnet18\n", + "pretrained_net.fc" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 22, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "finetune_net=torchvision.models.resnet18(pretrained=True)\n", + "finetune_net.fc=nn.Linear(finetune_net.fc.in_features,2)#更改全连接层\n", + "nn.init.xavier_uniform_(finetune_net.fc.weight)#xavier初始化全连接层" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "微调模型" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "origin_pos": 25, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def train_fine_tuning(net,learning_rate,batch_size=127,num_epochs=5,\n", + " params_group=True):\n", + " train_iter=torch.utils.data.DataLoader(torchvision.datasets.ImageFolder(\n", + " os.path.join(data_dir,'train'),transform=train_augs),\n", + " batch_size=batch_size,shuffle=True)\n", + " test_iter=torch.utils.data.DataLoader(torchvision.datasets.ImageFolder(\n", + " os.path.join(data_dir,'test'),transform=test_augs),\n", + " batch_size=batch_size)\n", + " devices=d2l.try_all_gpus()\n", + " loss=nn.CrossEntropyLoss(reduction=\"none\")#交叉熵损失\n", + " if params_group:\n", + " params_1x=[params for name,params in net.named_parameters()\n", + " if name not in [\"fc.weight\",\"fc.bias\"]]\n", + " trainer=torch.optim.SGD([{'params':params_1x},\n", + " {'params':net.fc.parameters(),\n", + " 'lr':learning_rate*10}],\n", + " lr=learning_rate,weight_decay=0.001)#全连接层的参数更新扩大10倍\n", + " else:\n", + " trainer=torch.optim.SGD(net.parameters(),lr=learning_rate,\n", + " weight_decay=0.001)\n", + " d2l.train_ch13(net,train_iter,test_iter,loss,trainer,num_epochs,\n", + " devices)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "使用较小的学习率" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "origin_pos": 28, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loss 0.217, train acc 0.920, test acc 0.924\n", + "844.9 examples/sec on [device(type='cuda', index=0), device(type='cuda', index=1)]\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T14:50:45.644824\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "train_fine_tuning(finetune_net,5e-5)#学习率5e-5" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "为了进行比较,\n", + "所有模型参数初始化为随机值" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "origin_pos": 31, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loss 0.389, train acc 0.839, test acc 0.806\n", + "1612.5 examples/sec on [device(type='cuda', index=0), device(type='cuda', index=1)]\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T14:52:24.023436\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "scratch_net = torchvision.models.resnet18()\n", + "scratch_net.fc = nn.Linear(scratch_net.fc.in_features, 2)\n", + "train_fine_tuning(scratch_net, 5e-4, param_group=False)#不学习参数" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "origin_pos": 37, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([1, 512])" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "for param in finetune_net.parameters():\n", + " param.requires_grad = False\n", + "\n", + "weight = pretrained_net.fc.weight\n", + "hotdog_w = torch.split(weight.data, 1, dim=0)[713]\n", + "hotdog_w.shape" + ] + } + ], + "metadata": { + "celltoolbar": "Slideshow", + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + }, + "rise": { + "autolaunch": true, + "enable_chalkboard": true, + "overlay": "", + "scroll": true + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/40-\345\256\236\346\210\230Kaggle\346\257\224\350\265\233\357\274\232\347\213\227\347\232\204\345\223\201\347\247\215\350\257\206\345\210\253.ipynb" "b/code/40-\345\256\236\346\210\230Kaggle\346\257\224\350\265\233\357\274\232\347\213\227\347\232\204\345\223\201\347\247\215\350\257\206\345\210\253.ipynb" new file mode 100644 index 0000000..f095269 --- /dev/null +++ "b/code/40-\345\256\236\346\210\230Kaggle\346\257\224\350\265\233\357\274\232\347\213\227\347\232\204\345\223\201\347\247\215\350\257\206\345\210\253.ipynb" @@ -0,0 +1,5694 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "Kaggle_Dogs.ipynb", + "provenance": [], + "collapsed_sections": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "#调用GPU" + ], + "metadata": { + "id": "tk16IbugJtW1" + } + }, + { + "cell_type": "code", + "source": [ + "%tensorflow_version 2.x\n", + "import tensorflow as tf\n", + "device_name = tf.test.gpu_device_name()\n", + "if device_name != '/device:GPU:0':\n", + " raise SystemError('GPU device not found')\n", + "print('Found GPU at: {}'.format(device_name))" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "f4oIuBdMylP5", + "outputId": "15d743f6-d7b0-4075-f34c-8f4c059a2bb4" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Found GPU at: /device:GPU:0\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "#下载pytorch" + ], + "metadata": { + "id": "cFboGXYoJvhH" + } + }, + { + "cell_type": "code", + "source": [ + "!pip3 install torch torchvision torchaudio" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "vTwmJf2Zymlh", + "outputId": "bc9449dd-5c9b-476e-9095-11344f3ad39b" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Requirement already satisfied: torch in /usr/local/lib/python3.7/dist-packages (1.10.0+cu111)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (0.11.1+cu111)\n", + "Requirement already satisfied: torchaudio in /usr/local/lib/python3.7/dist-packages (0.10.0+cu111)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch) (3.10.0.2)\n", + "Requirement already satisfied: pillow!=8.3.0,>=5.3.0 in /usr/local/lib/python3.7/dist-packages (from torchvision) (7.1.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from torchvision) (1.19.5)\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!pip3 install d2l==0.14" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "0TMFx8JvyrxJ", + "outputId": "d259c6a1-0959-4a44-d451-5fdb898c791d" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Requirement already satisfied: d2l==0.14 in /usr/local/lib/python3.7/dist-packages (0.14.0)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.1.5)\n", + "Requirement already satisfied: jupyter in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.0.0)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.19.5)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (7.6.5)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.6.1)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (4.10.1)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.0)\n", + "Requirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.2)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.3.1)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.3.5)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.5.0)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.4.2)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.8.1)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.7.5)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.8.0)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.0.18)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (2.6.1)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (57.4.0)\n", + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.15.0)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.2.5)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (3.5.2)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (1.0.2)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (5.1.3)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (0.2.0)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.3.3)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.9.1)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (0.18.0)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.10.0)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (21.4.0)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (5.4.0)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.10.0.2)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.7.0)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (2.11.3)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (1.8.0)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (0.12.1)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (22.3.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (2.8.2)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter->d2l==0.14) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter->d2l==0.14) (2.0.1)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (3.0.6)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (0.11.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (1.3.2)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (4.1.0)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (1.5.0)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.8.4)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.7.1)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.3)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.5.0)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (21.3)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (0.5.1)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas->d2l==0.14) (2018.9)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter->d2l==0.14) (2.0.0)\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "#下载数据集" + ], + "metadata": { + "id": "eDOnSJYoJ0_w" + } + }, + { + "cell_type": "code", + "source": [ + "!pip uninstall -y kaggle\n", + "!pip install --upgrade pip\n", + "!pip install kaggle==1.5.6" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "VjuB0Eb0OeJo", + "outputId": "2c98b498-3503-41f0-929c-28d49e1088de" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[33mWARNING: Skipping kaggle as it is not installed.\u001b[0m\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\n", + "Requirement already satisfied: pip in /usr/local/lib/python3.7/dist-packages (21.3.1)\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\n", + "Collecting kaggle==1.5.6\n", + " Downloading kaggle-1.5.6.tar.gz (58 kB)\n", + " |████████████████████████████████| 58 kB 3.1 MB/s \n", + "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + "Requirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from kaggle==1.5.6) (1.24.3)\n", + "Requirement already satisfied: six>=1.10 in /usr/local/lib/python3.7/dist-packages (from kaggle==1.5.6) (1.15.0)\n", + "Requirement already satisfied: certifi in /usr/local/lib/python3.7/dist-packages (from kaggle==1.5.6) (2021.10.8)\n", + "Requirement already satisfied: python-dateutil in /usr/local/lib/python3.7/dist-packages (from kaggle==1.5.6) (2.8.2)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from kaggle==1.5.6) (2.23.0)\n", + "Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from kaggle==1.5.6) (4.62.3)\n", + "Requirement already satisfied: python-slugify in /usr/local/lib/python3.7/dist-packages (from kaggle==1.5.6) (5.0.2)\n", + "Requirement already satisfied: text-unidecode>=1.3 in /usr/local/lib/python3.7/dist-packages (from python-slugify->kaggle==1.5.6) (1.3)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->kaggle==1.5.6) (2.10)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->kaggle==1.5.6) (3.0.4)\n", + "Building wheels for collected packages: kaggle\n", + " Building wheel for kaggle (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for kaggle: filename=kaggle-1.5.6-py3-none-any.whl size=72858 sha256=5749773082a14909ed85e25bf3d13dfde1d682ac40abca3e0f2c2008b0c2b1a8\n", + " Stored in directory: /root/.cache/pip/wheels/aa/e7/e7/eb3c3d514c33294d77ddd5a856bdd58dc9c1fabbed59a02a2b\n", + "Successfully built kaggle\n", + "Installing collected packages: kaggle\n", + "Successfully installed kaggle-1.5.6\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!cd /root && mkdir -p ./.kaggle && mv /root/kaggle.json /root/.kaggle/" + ], + "metadata": { + "id": "bRhwA-fKPU44" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "!cd /root/.kaggle && ls" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "yfoUsOVbPvbH", + "outputId": "503077ff-a46e-43d2-b355-f71dd4bac7f5" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "kaggle.json\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!chmod 600 /root/.kaggle/kaggle.json" + ], + "metadata": { + "id": "A-7iBU3KQox2" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "!kaggle competitions download -c dog-breed-identification -p /data" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "7VoQH6vFOCQc", + "outputId": "8b15f88d-1314-4773-cf20-4b059652f29a" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Downloading dog-breed-identification.zip to /data\n", + " 98% 679M/691M [00:06<00:00, 125MB/s]\n", + "100% 691M/691M [00:06<00:00, 117MB/s]\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!unzip /data/dog-breed-identification.zip -d /data/dog-breed-identification" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "VkjMNs5zX5Ke", + "outputId": "d81c1800-ef9a-4f03-de36-e700bc4d5a9c" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[1;30;43mStreaming output truncated to the last 5000 lines.\u001b[0m\n", + " inflating: /data/dog-breed-identification/train/83bc62b0fffa99a9c94ba0b67a5f7395.jpg \n", + " inflating: /data/dog-breed-identification/train/83bcff6b55ee179a7c123fa6103c377a.jpg \n", + " inflating: /data/dog-breed-identification/train/83be6d622ab74a5e7e08b53eb8fd566a.jpg \n", + " inflating: /data/dog-breed-identification/train/83c2d7419b0429b9fe953bc1b6cddbec.jpg \n", + " inflating: /data/dog-breed-identification/train/83cf7d7cd2a759a93e2ffd95bea9c6fb.jpg \n", + " inflating: /data/dog-breed-identification/train/83d405858f0931722ef21e8ac0adee4d.jpg \n", + " inflating: /data/dog-breed-identification/train/83d4125a4c3c7dc5956563276cb1cd74.jpg \n", + " inflating: /data/dog-breed-identification/train/83f0bb565b2186dbcc6a9d009cb26ff2.jpg \n", + " inflating: /data/dog-breed-identification/train/83fad0718581a696132c96c166472627.jpg \n", + " inflating: /data/dog-breed-identification/train/83fbbcc9a612e3f712b1ba199da61f20.jpg \n", + " inflating: /data/dog-breed-identification/train/8403d8936430c2f05ab7d74d23c2c0cb.jpg \n", + " inflating: /data/dog-breed-identification/train/8406d837b2d7fac1c3cd621abb4c4f9e.jpg \n", + " inflating: /data/dog-breed-identification/train/840b67d26e5e43f8eb6430f62d4ba1ac.jpg \n", + " inflating: /data/dog-breed-identification/train/840db91ba4600148f3dcb06ec419b421.jpg \n", + " inflating: /data/dog-breed-identification/train/840dbad5a691c22611d85b2488bf4cbb.jpg \n", + " inflating: /data/dog-breed-identification/train/8410ced9ebc1759a7ebce5c42bfb5222.jpg \n", + " inflating: /data/dog-breed-identification/train/841463629c4833816e216cbb041c2778.jpg \n", + " inflating: /data/dog-breed-identification/train/8429dcca4ae91c4e0345e4ba48b0d69f.jpg \n", + " inflating: /data/dog-breed-identification/train/842e3c6e44fda4102fe83d07dac72b3e.jpg \n", + " inflating: /data/dog-breed-identification/train/8431a6ce7c70e5e36698e821eedf24b5.jpg \n", + " inflating: /data/dog-breed-identification/train/8434b6c3cee87e28395197d6fc7d3489.jpg \n", + " inflating: /data/dog-breed-identification/train/8436be99589db6a99cfac1b894421ea6.jpg \n", + " inflating: /data/dog-breed-identification/train/843cbc1fc239d24534859bd272c3bc16.jpg \n", + " inflating: /data/dog-breed-identification/train/843d766d92a7b6d6a85a81e56a99c51f.jpg \n", + " inflating: /data/dog-breed-identification/train/84421c01900b34e3e1ba42f2424fbd33.jpg \n", + " inflating: /data/dog-breed-identification/train/844dde39a9e8987e510e8d46ec4da714.jpg \n", + " inflating: /data/dog-breed-identification/train/8452a26d7243a299ea782a7ba4036f1f.jpg \n", + " inflating: /data/dog-breed-identification/train/8454b5e6546f04871561de8f10d868c7.jpg \n", + " inflating: /data/dog-breed-identification/train/84564a69c0d0fa36e0810188943683a1.jpg \n", + " inflating: /data/dog-breed-identification/train/84605f5fc5ad89a66b9b277e1223e962.jpg \n", + " inflating: /data/dog-breed-identification/train/8463aa43d88bee057082434ccc806bb0.jpg \n", + " inflating: /data/dog-breed-identification/train/8467fbd75a8fe64da70df5410b6c4f09.jpg \n", + " inflating: /data/dog-breed-identification/train/846d6384787fff8dc17d488e6b86c209.jpg \n", + " inflating: /data/dog-breed-identification/train/8470a6fdf4db9b088494aaa9384ba9d0.jpg \n", + " inflating: /data/dog-breed-identification/train/84728e78632c0910a69d33f82e62638c.jpg \n", + " inflating: /data/dog-breed-identification/train/8477ac111ca6a9f11c2edfa43a933cad.jpg \n", + " inflating: /data/dog-breed-identification/train/8480ad94841309fc4ce874c4b1afc90c.jpg \n", + " inflating: /data/dog-breed-identification/train/848133f97b3e97b1b0fab0402e572d98.jpg \n", + " inflating: /data/dog-breed-identification/train/8485bc3f3fd64b90be74d7f020c61f54.jpg \n", + " inflating: /data/dog-breed-identification/train/8486e8159f169e8c3d4697e5c859760f.jpg \n", + " inflating: /data/dog-breed-identification/train/848f7a0b665b118e4a3b85029b1794e0.jpg \n", + " inflating: /data/dog-breed-identification/train/8490222d4744064aa7a8621a1c274965.jpg \n", + " inflating: /data/dog-breed-identification/train/8494afd34e3a2e81bec37e4dfdc67f8d.jpg \n", + " inflating: /data/dog-breed-identification/train/84aaf49fb53d423d4aed05ab79559b0c.jpg \n", + " inflating: /data/dog-breed-identification/train/84ab21940432e5b42cfacc58cd84c861.jpg \n", + " inflating: /data/dog-breed-identification/train/84accc2dc9f5bb3ebee89fe1bf23639c.jpg \n", + " inflating: /data/dog-breed-identification/train/84adb2cc13b65cf25418cde969b9bb0e.jpg \n", + " inflating: /data/dog-breed-identification/train/84b612a8e43c6debbc9951cb24ec9ba0.jpg \n", + " inflating: /data/dog-breed-identification/train/84b62d2def32fc85092cabe2c722c135.jpg \n", + " inflating: /data/dog-breed-identification/train/84bcd47e09b0ef3f0b6e3f47f232a77c.jpg \n", + " inflating: /data/dog-breed-identification/train/84be9b9f59aa586f1b188781b2c47a3e.jpg \n", + " inflating: /data/dog-breed-identification/train/84c6bdd4bb818edd4c088f27312d028f.jpg \n", + " inflating: /data/dog-breed-identification/train/84d2dd9eff021b6095a4b1e2ba3c1c0c.jpg \n", + " inflating: /data/dog-breed-identification/train/84de398dd5408d91b133e2e95628120a.jpg \n", + " inflating: /data/dog-breed-identification/train/84dfe42ce71204b367c2b4000eb6ba5c.jpg \n", + " inflating: /data/dog-breed-identification/train/84e567b15311f0c891858f56f0175867.jpg \n", + " inflating: /data/dog-breed-identification/train/84f5f076b0b951d68f88c8b795b7135e.jpg \n", + " inflating: /data/dog-breed-identification/train/850732e3c9dae7a108dbe8740359fb41.jpg \n", + " inflating: /data/dog-breed-identification/train/850f1d90b81ff1a51af16c752c0b328f.jpg \n", + " inflating: /data/dog-breed-identification/train/8516b9f462f35ffa2d1d7f8abe860630.jpg \n", + " inflating: /data/dog-breed-identification/train/852b9c2a1af49cea2fed90f8eafbf0de.jpg \n", + " inflating: /data/dog-breed-identification/train/85311c814fa4f4bc4e4eb532dcc53e48.jpg \n", + " inflating: /data/dog-breed-identification/train/8533fb040dba89302f213f6c5cfa9367.jpg \n", + " inflating: /data/dog-breed-identification/train/8534a47edb618437a4ceb0047f83cc7d.jpg \n", + " inflating: /data/dog-breed-identification/train/8537c0a9ca6bda02ac953dd23e188755.jpg \n", + " inflating: /data/dog-breed-identification/train/85435798dee261580b06866dcb693210.jpg \n", + " inflating: /data/dog-breed-identification/train/85454a97c4e34a1249e0b8830a29c73d.jpg \n", + " inflating: /data/dog-breed-identification/train/854b2a257e9a5f72ff696941ad8164ea.jpg \n", + " inflating: /data/dog-breed-identification/train/854b95f91a0f65435da679bcdd3994b0.jpg \n", + " inflating: /data/dog-breed-identification/train/85555f0d533f33bf8d38e1490cabb05b.jpg \n", + " inflating: /data/dog-breed-identification/train/8562aa06d4e739391a4bdcb49a3eebeb.jpg \n", + " inflating: /data/dog-breed-identification/train/85662233bce63d5ac7e5e9fdbf49c654.jpg \n", + " inflating: /data/dog-breed-identification/train/856724e3772971c35b3a119208886e25.jpg \n", + " inflating: /data/dog-breed-identification/train/856cb8fa7ff3f2387b66f4119c800af5.jpg \n", + " inflating: /data/dog-breed-identification/train/8573ec2d5e4700ce8b74091d95b50603.jpg \n", + " inflating: /data/dog-breed-identification/train/857a4db72e722ecd29b900da9cee6036.jpg \n", + " inflating: /data/dog-breed-identification/train/858771bb039b3317f92b698874307126.jpg \n", + " inflating: /data/dog-breed-identification/train/8587cc1bb05b7997b4190485d07107a5.jpg \n", + " inflating: /data/dog-breed-identification/train/858e7823ed319c10e78d9df1f456ff7a.jpg \n", + " inflating: /data/dog-breed-identification/train/859433a164cbb202cfcf57ee6b24217d.jpg \n", + " inflating: /data/dog-breed-identification/train/859deae5a4eb97d0d32c94eb936228ed.jpg \n", + " inflating: /data/dog-breed-identification/train/85a3169034208ff803238c6eab4e4bd3.jpg \n", + " inflating: /data/dog-breed-identification/train/85a7359fd3bb01982912a73603acd2db.jpg \n", + " inflating: /data/dog-breed-identification/train/85a80f0663e1373dc17a9251a32a3d5f.jpg \n", + " inflating: /data/dog-breed-identification/train/85aa7704009ba5e05f4b73fc96602b7c.jpg \n", + " inflating: /data/dog-breed-identification/train/85af448e04bc04d107502330ff98ca65.jpg \n", + " inflating: /data/dog-breed-identification/train/85b160a087cbc927454fe01c52605112.jpg \n", + " inflating: /data/dog-breed-identification/train/85b52132e0d56881b16d301fa09f1092.jpg \n", + " inflating: /data/dog-breed-identification/train/85bb4a7d4ace6c2464e7969b99403b0c.jpg \n", + " inflating: /data/dog-breed-identification/train/85be6409c3f25aad1a2df99407e80278.jpg \n", + " inflating: /data/dog-breed-identification/train/85bfb0ee0c6071dcafd54d343a64a5d5.jpg \n", + " inflating: /data/dog-breed-identification/train/85c581c27e5087f96a06de686e97dc88.jpg \n", + " inflating: /data/dog-breed-identification/train/85cbe2f580485f7ac1df7821c3e9c467.jpg \n", + " inflating: /data/dog-breed-identification/train/85d2ea535f85e45ec3b027599da2f920.jpg \n", + " inflating: /data/dog-breed-identification/train/85d3972b2b9a373993bb17e8767b3dd4.jpg \n", + " inflating: /data/dog-breed-identification/train/85d4347373886842b7eb31e9e4598442.jpg \n", + " inflating: /data/dog-breed-identification/train/85d6ace4a0c1ffdbaf80479b9368ecdf.jpg \n", + " inflating: /data/dog-breed-identification/train/85e2b89f5760c7958305b446607be91d.jpg \n", + " inflating: /data/dog-breed-identification/train/85ee2259befe2e52c2200c7ca75f35e1.jpg \n", + " inflating: /data/dog-breed-identification/train/85ef8ce1e2e798ad6ce5289c42b9055b.jpg \n", + " inflating: /data/dog-breed-identification/train/85f08de63891d73f87e76f2763ab771e.jpg \n", + " inflating: /data/dog-breed-identification/train/85f0e9f401e46e30a86c5bc7fb7df968.jpg \n", + " inflating: /data/dog-breed-identification/train/85fd373647c7df4fe43c4f7f0e6c727f.jpg \n", + " inflating: /data/dog-breed-identification/train/85fde01acabf9dce3f4bb3bc3b5a5f28.jpg \n", + " inflating: /data/dog-breed-identification/train/86021dbc333b4e38bacdc24e3b5edea5.jpg \n", + " inflating: /data/dog-breed-identification/train/86066e2fd96394d8f10dc65e98becaaa.jpg \n", + " inflating: /data/dog-breed-identification/train/860e42bbe81a5d30d00bdb77e2c38a80.jpg \n", + " inflating: /data/dog-breed-identification/train/860f7e954b6e0a7dfbb39ac1d6d1cfb9.jpg \n", + " inflating: /data/dog-breed-identification/train/861a9b6f692cb41c875130c525f7a17e.jpg \n", + " inflating: /data/dog-breed-identification/train/861dee51881dc7a69672f63d5370f4a7.jpg \n", + " inflating: /data/dog-breed-identification/train/8620ce74bccbbdae679f6db87264c233.jpg \n", + " inflating: /data/dog-breed-identification/train/8630ae207e436f43f8d2c07a705f55fa.jpg \n", + " inflating: /data/dog-breed-identification/train/863424a9bef5ed69d17eb337cbc7d92c.jpg \n", + " inflating: /data/dog-breed-identification/train/8636932d5c5f80c9c53e49a6676213af.jpg \n", + " inflating: /data/dog-breed-identification/train/864dd68d1079dd10a784dbd2ff2c3995.jpg \n", + " inflating: /data/dog-breed-identification/train/864eccda6110b2021d0b68f781689f46.jpg \n", + " inflating: /data/dog-breed-identification/train/8650b152539571456f690c4f5a7ce77d.jpg \n", + " inflating: /data/dog-breed-identification/train/8665323b1e654eaa5a77736c5aca1516.jpg \n", + " inflating: /data/dog-breed-identification/train/8665d7a2c337fc178c60f0a7733594e7.jpg \n", + " inflating: /data/dog-breed-identification/train/866965dab54e1f3c641ed3e18b2f3313.jpg \n", + " inflating: /data/dog-breed-identification/train/8673461ceba52fd3b303f5825f9a0348.jpg \n", + " inflating: /data/dog-breed-identification/train/8677413d3a09837dcceea509c0e581ed.jpg \n", + " inflating: /data/dog-breed-identification/train/868395c7a1c99a52a059b5c52824a8bc.jpg \n", + " inflating: /data/dog-breed-identification/train/86870cfcbbd37fdc446f2371ba087e3b.jpg \n", + " inflating: /data/dog-breed-identification/train/8687dd4fbc2b4ec87aef79775c177c27.jpg \n", + " inflating: /data/dog-breed-identification/train/868decd906bb483bac17a005a3f06bc3.jpg \n", + " inflating: /data/dog-breed-identification/train/8693c4778a1689333d01a5e505fa7993.jpg \n", + " inflating: /data/dog-breed-identification/train/8693f425d12011e396db417311b3ad2e.jpg \n", + " inflating: /data/dog-breed-identification/train/86afe41dc1054163e2f6430f2bc009ff.jpg \n", + " inflating: /data/dog-breed-identification/train/86b9f437a4969f42f38b20ac4d7d7e7c.jpg \n", + " inflating: /data/dog-breed-identification/train/86bd30760385bad95c9c03f97619eb99.jpg \n", + " inflating: /data/dog-breed-identification/train/86cbfe570d713b9d5752874137532c6b.jpg \n", + " inflating: /data/dog-breed-identification/train/86da61ef74981730ff766ed0c61e8ffc.jpg \n", + " inflating: /data/dog-breed-identification/train/86e6525561e78e1b20494bd750f6ab0c.jpg \n", + " inflating: /data/dog-breed-identification/train/86ead5380434660cdd8a6625ad6fe341.jpg \n", + " inflating: /data/dog-breed-identification/train/86eb66186b5f7ddb93ada8b4cb4065b3.jpg \n", + " inflating: /data/dog-breed-identification/train/86f0e4abee677119258764eadc368b9d.jpg \n", + " inflating: /data/dog-breed-identification/train/87001075188b63c8edeb2ffaf4cdfca4.jpg \n", + " inflating: /data/dog-breed-identification/train/8700e299601a8aea94ad3e5e6094d2f0.jpg \n", + " inflating: /data/dog-breed-identification/train/87028ba151a48e01ea87c063e420798b.jpg \n", + " inflating: /data/dog-breed-identification/train/87079abb765755089eb393e884192e02.jpg \n", + " inflating: /data/dog-breed-identification/train/871477f33574a10e01a0b5b625f35b63.jpg \n", + " inflating: /data/dog-breed-identification/train/871d3264aad7e126ad6179f9c8242756.jpg \n", + " inflating: /data/dog-breed-identification/train/872838cb6ec0c4edb6faf5d5f9e5c1f3.jpg \n", + " inflating: /data/dog-breed-identification/train/872f6452dd5ff6458179fa3ba0a37cbe.jpg \n", + " inflating: /data/dog-breed-identification/train/8732e2cbe93e9d2fc5a1a11b33631c2e.jpg \n", + " inflating: /data/dog-breed-identification/train/873aa2ad565ea70f3a49ec993e323fa1.jpg \n", + " inflating: /data/dog-breed-identification/train/8748a52fde80c1b69f6a6af8a522bd2e.jpg \n", + " inflating: /data/dog-breed-identification/train/874cacae6ec28f300b601919625d6724.jpg \n", + " inflating: /data/dog-breed-identification/train/874ce47f8d4a7da0e205bf8f5f45d8da.jpg \n", + " inflating: /data/dog-breed-identification/train/875c993f6f71781ae8ba11e686669cee.jpg \n", + " inflating: /data/dog-breed-identification/train/87713edeb72187de21b9b3fca4fc5bc3.jpg \n", + " inflating: /data/dog-breed-identification/train/8781a1822fe617f9a70abf9d349a0c8d.jpg \n", + " inflating: /data/dog-breed-identification/train/878499737e214678c2947b8b4c82a6f7.jpg \n", + " inflating: /data/dog-breed-identification/train/87a2b416edf49358251c500097885887.jpg \n", + " inflating: /data/dog-breed-identification/train/87a8f6b9a0e618486e67662a133e9e61.jpg \n", + " inflating: /data/dog-breed-identification/train/87b33815ce34f2c2d98b2374c32ce3a5.jpg \n", + " inflating: /data/dog-breed-identification/train/87c3089fb8c7c00ae4ce78b939530f84.jpg \n", + " inflating: /data/dog-breed-identification/train/87c391fb6e83d6c2c8c6c0028e6726eb.jpg \n", + " inflating: /data/dog-breed-identification/train/87c76585ab973f2ae3e6f20ba061db10.jpg \n", + " inflating: /data/dog-breed-identification/train/87ce562699605ac7ef87b38dc1ae6f2a.jpg \n", + " inflating: /data/dog-breed-identification/train/87dfa4ae9b1b4e399e8819ad19a1cd7d.jpg \n", + " inflating: /data/dog-breed-identification/train/87dfb129c12c369824f5425980158386.jpg \n", + " inflating: /data/dog-breed-identification/train/87e198d332493454d0c5645945050f8e.jpg \n", + " inflating: /data/dog-breed-identification/train/87f0a743d0bec944c4bfae7ff5f5c0b2.jpg \n", + " inflating: /data/dog-breed-identification/train/8801d91c9e4a26fd20adccecf75291dd.jpg \n", + " inflating: /data/dog-breed-identification/train/880379ffeea97e48cf12349753838a54.jpg \n", + " inflating: /data/dog-breed-identification/train/8806c582754ce2fa439c410e1a2dd8ef.jpg \n", + " inflating: /data/dog-breed-identification/train/8810923d4134c41b7ca0e8543c31ffeb.jpg \n", + " inflating: /data/dog-breed-identification/train/88110fa3bbc63e4a6451f8b94f1c16ad.jpg \n", + " inflating: /data/dog-breed-identification/train/8811d2ca19ec7a6cefbb76097c6f7e4c.jpg \n", + " inflating: /data/dog-breed-identification/train/8815b374db13f9992beb2491dd70f9c0.jpg \n", + " inflating: /data/dog-breed-identification/train/881c91c19ebe23fca49d78e887be9efd.jpg \n", + " inflating: /data/dog-breed-identification/train/881ce596dbeb26cd8a74474e4f51eb0e.jpg \n", + " inflating: /data/dog-breed-identification/train/881ceaa386838493d2f42d756fbb7597.jpg \n", + " inflating: /data/dog-breed-identification/train/881d49b759a56cdf1956f0015099675d.jpg \n", + " inflating: /data/dog-breed-identification/train/881ff9eff7bcf5bb5cd5bc2e39b22a2c.jpg \n", + " inflating: /data/dog-breed-identification/train/88234d321053611f136c1ee67d3ee3ff.jpg \n", + " inflating: /data/dog-breed-identification/train/8825e914555803f4c67b26593c9d5aff.jpg \n", + " inflating: /data/dog-breed-identification/train/8825e9c2f509720b41ec9ae1866ae953.jpg \n", + " inflating: /data/dog-breed-identification/train/8831a246cd93a78fe26304f76d5dcbdb.jpg \n", + " inflating: /data/dog-breed-identification/train/88435e91be34908f134a5ef10ac80dd0.jpg \n", + " inflating: /data/dog-breed-identification/train/8847eac09561675bf7e3d6d9632b8865.jpg \n", + " inflating: /data/dog-breed-identification/train/88564167e9ff649f5dc9e8bde3a82734.jpg \n", + " inflating: /data/dog-breed-identification/train/88664fc7a27b5828860ef5bae18b5309.jpg \n", + " inflating: /data/dog-breed-identification/train/886c611904fc29caee5f9cd908b90dc4.jpg \n", + " inflating: /data/dog-breed-identification/train/887c620f29ed39bc5d8702cae6d47dd3.jpg \n", + " inflating: /data/dog-breed-identification/train/887e80bc7e5854b3d876822be7720ceb.jpg \n", + " inflating: /data/dog-breed-identification/train/88831230c898c7e4c56ce73ed285c9c3.jpg \n", + " inflating: /data/dog-breed-identification/train/888631e6931830b648622873fc5f6339.jpg \n", + " inflating: /data/dog-breed-identification/train/888dcbdc6f70220a8174befc34266a16.jpg \n", + " inflating: /data/dog-breed-identification/train/88928ad88da921d74a26acb27fb3028a.jpg \n", + " inflating: /data/dog-breed-identification/train/889623dfc6b119e79a931d34a1ec081b.jpg \n", + " inflating: /data/dog-breed-identification/train/8898c34ad05d2c95319ff2a2f326e69c.jpg \n", + " inflating: /data/dog-breed-identification/train/889c421c15dfaa61b6d5ede5c2b31d17.jpg \n", + " inflating: /data/dog-breed-identification/train/88a3cc5a66105089522f6fd894e65a55.jpg \n", + " inflating: /data/dog-breed-identification/train/88a9c4a74b1663c9b30ab62e3573ee75.jpg \n", + " inflating: /data/dog-breed-identification/train/88bdef57f5f510afd04e1452c4495e4a.jpg \n", + " inflating: /data/dog-breed-identification/train/88c0df28f7f27a13e48632be87111a53.jpg \n", + " inflating: /data/dog-breed-identification/train/88c2229b86b2fecb775bd4a6b2c863d4.jpg \n", + " inflating: /data/dog-breed-identification/train/88c8698176d181d5c6c7bd7f3acb1736.jpg \n", + " inflating: /data/dog-breed-identification/train/88d6b55d29157070ce6ab55a6040c7b3.jpg \n", + " inflating: /data/dog-breed-identification/train/88d848a23380b8a5f0140a47ff514331.jpg \n", + " inflating: /data/dog-breed-identification/train/88dccc33cd5ea95900a170623800ee6d.jpg \n", + " inflating: /data/dog-breed-identification/train/88f1fbdc15e7da1c28dfaa6b8cc01ac7.jpg \n", + " inflating: /data/dog-breed-identification/train/88f31da888f9a8007165cc1830b63fd7.jpg \n", + " inflating: /data/dog-breed-identification/train/88fa6d0c6f8ce60ea125a86b183c5a15.jpg \n", + " inflating: /data/dog-breed-identification/train/88fb448fb7d7fc731a8b2af2884fdbb0.jpg \n", + " inflating: /data/dog-breed-identification/train/8900485bc481a5e95a1930ee7e7c4adc.jpg \n", + " inflating: /data/dog-breed-identification/train/89031b60e3aea5f7c320997ee441bc60.jpg \n", + " inflating: /data/dog-breed-identification/train/890727594626c0fa7092624bb9261387.jpg \n", + " inflating: /data/dog-breed-identification/train/8907b3f7a888cbc5e6ac21c47922df2b.jpg \n", + " inflating: /data/dog-breed-identification/train/890efbec7147c2887c460be0af763381.jpg \n", + " inflating: /data/dog-breed-identification/train/89130ad7e00c707d912af935195cbc34.jpg \n", + " inflating: /data/dog-breed-identification/train/8918ef02d44ca4d62e1893aa590b384a.jpg \n", + " inflating: /data/dog-breed-identification/train/89259775667d598ec37018874b494220.jpg \n", + " inflating: /data/dog-breed-identification/train/892e56e6999bac353ddedf9162b04959.jpg \n", + " inflating: /data/dog-breed-identification/train/8935b2cad46c4030aa0801ae1415cb90.jpg \n", + " inflating: /data/dog-breed-identification/train/89374e5120de8f3daa32c55306039022.jpg \n", + " inflating: /data/dog-breed-identification/train/89380eddb8ec8802ec7e19f5dc65a74a.jpg \n", + " inflating: /data/dog-breed-identification/train/895266d2f46fb569b66268155bf28aa5.jpg \n", + " inflating: /data/dog-breed-identification/train/8966b48825cf5490e084a3ce71d17095.jpg \n", + " inflating: /data/dog-breed-identification/train/896ae4eda6310c5fa223131ac7574494.jpg \n", + " inflating: /data/dog-breed-identification/train/89714899135c134b326fecdd2c2647b0.jpg \n", + " inflating: /data/dog-breed-identification/train/89719dbbf15a9f468a620353781abe75.jpg \n", + " inflating: /data/dog-breed-identification/train/8977f6e2feaf60efc628ff91440ff64e.jpg \n", + " inflating: /data/dog-breed-identification/train/897ffbc20e2ee537a3bf595d4cb0895f.jpg \n", + " inflating: /data/dog-breed-identification/train/898ecc1f6e25205be2a465e338ff06ae.jpg \n", + " inflating: /data/dog-breed-identification/train/8992306d8e9bd6b844ebbdcec53d9925.jpg \n", + " inflating: /data/dog-breed-identification/train/89962510e8e4ad50a1de642bf49945df.jpg \n", + " inflating: /data/dog-breed-identification/train/899f284bbdd762992629dc31a20a7d49.jpg \n", + " inflating: /data/dog-breed-identification/train/89b539e909f580be06923c40ec11c477.jpg \n", + " inflating: /data/dog-breed-identification/train/89bdd9a0e2f6869c8c19785838f9c030.jpg \n", + " inflating: /data/dog-breed-identification/train/89c100870ed663f0136cd385dcc10e76.jpg \n", + " inflating: /data/dog-breed-identification/train/89c19bac9df678011ab39df96c7b6328.jpg \n", + " inflating: /data/dog-breed-identification/train/89c587a6a853c816d3cea9337a8e814a.jpg \n", + " inflating: /data/dog-breed-identification/train/89c5baab87b86eff18560be8f58f481c.jpg \n", + " inflating: /data/dog-breed-identification/train/89cbfc8c3745a9c14c2b73765f30fc9a.jpg \n", + " inflating: /data/dog-breed-identification/train/89d2d5852bccf43f7cc15cc1cf7ee5d7.jpg \n", + " inflating: /data/dog-breed-identification/train/89e04e8b173e1bd50911a94ddd6f1dd9.jpg \n", + " inflating: /data/dog-breed-identification/train/89e486c54ca7c2537abe3522142a3537.jpg \n", + " inflating: /data/dog-breed-identification/train/89e653251ebdb4125224d3af5230587e.jpg \n", + " inflating: /data/dog-breed-identification/train/89f1b9d773cca9a3adbf536a62947262.jpg \n", + " inflating: /data/dog-breed-identification/train/89f38923a2427b596553c4382841a640.jpg \n", + " inflating: /data/dog-breed-identification/train/89f6482c56119d9031029346746fc17a.jpg \n", + " inflating: /data/dog-breed-identification/train/8a066dcad24606cf4a5e1ada81093989.jpg \n", + " inflating: /data/dog-breed-identification/train/8a0717f2adf56db70bf360f6464f29b6.jpg \n", + " inflating: /data/dog-breed-identification/train/8a1129e4fd587efdc8ef8c8cc08664e4.jpg \n", + " inflating: /data/dog-breed-identification/train/8a16b61df5b24c926d2e79267a0d62e6.jpg \n", + " inflating: /data/dog-breed-identification/train/8a1b3f9f3ff9fef70962ac68704e629a.jpg \n", + " inflating: /data/dog-breed-identification/train/8a1c01869f6354dc1e4f237db70ef7be.jpg \n", + " inflating: /data/dog-breed-identification/train/8a248f8b155f9a7f65ac7a5c9090cc5a.jpg \n", + " inflating: /data/dog-breed-identification/train/8a2519eb2577ce76ccedd4283f44354c.jpg \n", + " inflating: /data/dog-breed-identification/train/8a2a22dd80d59c330c3995f49b74abc5.jpg \n", + " inflating: /data/dog-breed-identification/train/8a2fb8dc69a446d004d113ad8bbd0366.jpg \n", + " inflating: /data/dog-breed-identification/train/8a31a73b980d8f8350f1990c9391c60f.jpg \n", + " inflating: /data/dog-breed-identification/train/8a35cb0b51977007c8f812204b556883.jpg \n", + " inflating: /data/dog-breed-identification/train/8a3a39dfc2cf81434f2eec84790ac62e.jpg \n", + " inflating: /data/dog-breed-identification/train/8a415118e76252555a6ae5aa9c86bf62.jpg \n", + " inflating: /data/dog-breed-identification/train/8a4830e8d2f106d5a0c46ef82be96b0c.jpg \n", + " inflating: /data/dog-breed-identification/train/8a495c14b27b0b2da5bbfe03b2fe1a0f.jpg \n", + " inflating: /data/dog-breed-identification/train/8a5349f4b15dddd7472b569e9cdd3f39.jpg \n", + " inflating: /data/dog-breed-identification/train/8a5a7518881748d0c53cbe7f4d9d67c1.jpg \n", + " inflating: /data/dog-breed-identification/train/8a5ba5ac735272c8f85b0af39f8d7b29.jpg \n", + " inflating: /data/dog-breed-identification/train/8a5d03cdf0c886d1693ad8deca7fda5c.jpg \n", + " inflating: /data/dog-breed-identification/train/8a5dd8f626b274950bd3e92d832ea2c9.jpg \n", + " inflating: /data/dog-breed-identification/train/8a6113f1ca33960c10bd98ce0968d644.jpg \n", + " inflating: /data/dog-breed-identification/train/8a6334cbeca54f9d56b83a34ac491aa8.jpg \n", + " inflating: /data/dog-breed-identification/train/8a6b1a95cce4538252dbc28cfcebb8ad.jpg \n", + " inflating: /data/dog-breed-identification/train/8a766975e00b9ce3b75899ecebde5a71.jpg \n", + " inflating: /data/dog-breed-identification/train/8a7db9bfc4a377e316a1acb16403137b.jpg \n", + " inflating: /data/dog-breed-identification/train/8a80f532bf6321fcfd2580319c6eb775.jpg \n", + " inflating: /data/dog-breed-identification/train/8a81a9b6ef044de6bb40506e0db1d92e.jpg \n", + " inflating: /data/dog-breed-identification/train/8aa1bde6dcb15816ebaeb23b55f9c325.jpg \n", + " inflating: /data/dog-breed-identification/train/8aa65793a60f8feb59925d6ece493bf0.jpg \n", + " inflating: /data/dog-breed-identification/train/8ab11c1387ac94ef0d09bf39a96473b9.jpg \n", + " inflating: /data/dog-breed-identification/train/8ab20216cab9e31dc00d41cf65ac0db6.jpg \n", + " inflating: /data/dog-breed-identification/train/8ab260e1bd12a31b64d2e197033bce20.jpg \n", + " inflating: /data/dog-breed-identification/train/8ab5f027cc6bd007a106485540ee3147.jpg \n", + " inflating: /data/dog-breed-identification/train/8ab6438f29eef71dcce83617e2c73157.jpg \n", + " inflating: /data/dog-breed-identification/train/8ad3b583aae6f2cd17f89e6384bba799.jpg \n", + " inflating: /data/dog-breed-identification/train/8ae123d42a7fa0a086271728dc7e9226.jpg \n", + " inflating: /data/dog-breed-identification/train/8ae1475b493ceb4dc93924ccdf1f98cd.jpg \n", + " inflating: /data/dog-breed-identification/train/8aee90949d4c1edc9ae570287a53e6f1.jpg \n", + " inflating: /data/dog-breed-identification/train/8af6c2ecc9088cb2a6f84681124c0da7.jpg \n", + " inflating: /data/dog-breed-identification/train/8af9cc09eef85c49a23057412b329e1b.jpg \n", + " inflating: /data/dog-breed-identification/train/8afa296352be6e60b467b958654cb482.jpg \n", + " inflating: /data/dog-breed-identification/train/8afbc403c8f96b6dcb9fa0a34ce39ea4.jpg \n", + " inflating: /data/dog-breed-identification/train/8b0b1c5823015dc73d323643a559be31.jpg \n", + " inflating: /data/dog-breed-identification/train/8b21290d463f620d940bbe1ac32aae95.jpg \n", + " inflating: /data/dog-breed-identification/train/8b22f7121f8f487e43eca6d7b1e27dc6.jpg \n", + " inflating: /data/dog-breed-identification/train/8b27fd53a34c667b41ec5cf13be22de6.jpg \n", + " inflating: /data/dog-breed-identification/train/8b29269d1fa6760127a7dc14f50596ae.jpg \n", + " inflating: /data/dog-breed-identification/train/8b2f7389a0de18773edadeb6962e5c5c.jpg \n", + " inflating: /data/dog-breed-identification/train/8b30663909c1adfdca18f26300046ce0.jpg \n", + " inflating: /data/dog-breed-identification/train/8b332e46e140af54feecbdff46cad87b.jpg \n", + " inflating: /data/dog-breed-identification/train/8b3bd046cdd3d7562396aa63d0185628.jpg \n", + " inflating: /data/dog-breed-identification/train/8b3ed0409cab6259bd3f444bdb99bc38.jpg \n", + " inflating: /data/dog-breed-identification/train/8b477eed0b8ca385a5008c20753d16df.jpg \n", + " inflating: /data/dog-breed-identification/train/8b692b59001659cc57f63fa621c1b9b2.jpg \n", + " inflating: /data/dog-breed-identification/train/8b6e36d51e75cf0b1b81a94f33d5f5c3.jpg \n", + " inflating: /data/dog-breed-identification/train/8b757446986fba8aa687d6106f339831.jpg \n", + " inflating: /data/dog-breed-identification/train/8b77681bdddba868aac232c323dc6b01.jpg \n", + " inflating: /data/dog-breed-identification/train/8b7b0f3b6474962448c419ed8c46712a.jpg \n", + " inflating: /data/dog-breed-identification/train/8b7c503438843feb509ac38f505f77e1.jpg \n", + " inflating: /data/dog-breed-identification/train/8b80780bc559890236d06a312eb7b889.jpg \n", + " inflating: /data/dog-breed-identification/train/8b8d64d47df5dfa4fdd0581e476103be.jpg \n", + " inflating: /data/dog-breed-identification/train/8b9484507627b9aa40a2ed98d8023d45.jpg \n", + " inflating: /data/dog-breed-identification/train/8b9bb5dbe62e19115abfe6e13590c148.jpg \n", + " inflating: /data/dog-breed-identification/train/8baf3971eb1fd962a9752a24f4ce0a2b.jpg \n", + " inflating: /data/dog-breed-identification/train/8baf4fd87d634ccdcb7965b2b55c9639.jpg \n", + " inflating: /data/dog-breed-identification/train/8bba6c2e3b2994c4e313d2c489e29880.jpg \n", + " inflating: /data/dog-breed-identification/train/8bbb5cc1bb607fe461d5dd90052496b8.jpg \n", + " inflating: /data/dog-breed-identification/train/8bbdac6dc170956cbf2e73c55b611a12.jpg \n", + " inflating: /data/dog-breed-identification/train/8bd89fdacf4254cbb1739c541b654351.jpg \n", + " inflating: /data/dog-breed-identification/train/8bd905838263efd8c2cac790b5f17912.jpg \n", + " inflating: /data/dog-breed-identification/train/8bd9f03c437e44fc390e90d05b1af1bf.jpg \n", + " inflating: /data/dog-breed-identification/train/8bea04061f396a71de9ad8c45810ad0d.jpg \n", + " inflating: /data/dog-breed-identification/train/8bef88134879b7794217700319b25561.jpg \n", + " inflating: /data/dog-breed-identification/train/8befc822b56b744d72872428a0ef4851.jpg \n", + " inflating: /data/dog-breed-identification/train/8befd30dd8d44c30e9346b04a525b410.jpg \n", + " inflating: /data/dog-breed-identification/train/8bf58e563f62837df002040db7d84311.jpg \n", + " inflating: /data/dog-breed-identification/train/8bf5bd677a71e574a08dfd4fe7af1461.jpg \n", + " inflating: /data/dog-breed-identification/train/8c02f4026ef8f3595879db865c7b9f0d.jpg \n", + " inflating: /data/dog-breed-identification/train/8c061cc94ba2801a2c5f85af5653ee58.jpg \n", + " inflating: /data/dog-breed-identification/train/8c1304c22e356290cae941c3b29cddff.jpg \n", + " inflating: /data/dog-breed-identification/train/8c170e9b6122bf2d003e109bef89e42f.jpg \n", + " inflating: /data/dog-breed-identification/train/8c18c7bf930d8fcee918db12c9635bd9.jpg \n", + " inflating: /data/dog-breed-identification/train/8c1b4527b9d6fa6dce004f83325240b6.jpg \n", + " inflating: /data/dog-breed-identification/train/8c1c53f05c07eb4f67c809855bc44ac9.jpg \n", + " inflating: /data/dog-breed-identification/train/8c1f3220d32d79207534a062bd04fd85.jpg \n", + " inflating: /data/dog-breed-identification/train/8c36d92a7e002977441fbaa2dbfc32bd.jpg \n", + " inflating: /data/dog-breed-identification/train/8c396c0bdf28e07c823c5fcbb9d6840a.jpg \n", + " inflating: /data/dog-breed-identification/train/8c3c5a2ab2212dae4745f271854101b9.jpg \n", + " inflating: /data/dog-breed-identification/train/8c3e7fd14b975242c61afc05228b0e50.jpg \n", + " inflating: /data/dog-breed-identification/train/8c547b5df1aff986fa67f9efc97459b9.jpg \n", + " inflating: /data/dog-breed-identification/train/8c5b6ce48c6129af802c7a2b8c6b6a00.jpg \n", + " inflating: /data/dog-breed-identification/train/8c6264cf21c69e5fb58ba291c45e7049.jpg \n", + " inflating: /data/dog-breed-identification/train/8c66150fd3c6c144ce2cf79496b4a6e1.jpg \n", + " inflating: /data/dog-breed-identification/train/8c68af1aa5657527026a50081ac0bd48.jpg \n", + " inflating: /data/dog-breed-identification/train/8c6cd1ae33852154187c467495911011.jpg \n", + " inflating: /data/dog-breed-identification/train/8c6d252c208a9564a72522e6592b12d1.jpg \n", + " inflating: /data/dog-breed-identification/train/8c75d52decdc0749d1de591095752ce8.jpg \n", + " inflating: /data/dog-breed-identification/train/8c77ac7beb31548766f7965ae9b245fe.jpg \n", + " inflating: /data/dog-breed-identification/train/8c7e23bd90d01d924555b942be79844b.jpg \n", + " inflating: /data/dog-breed-identification/train/8c91464904ba9bfee08c4af8091b10e3.jpg \n", + " inflating: /data/dog-breed-identification/train/8c96a702b0a8fc525956f780ec87a28d.jpg \n", + " inflating: /data/dog-breed-identification/train/8c99f1d9dde16550e002b8ea3fc342cd.jpg \n", + " inflating: /data/dog-breed-identification/train/8ca7a862f8419c130ea73917d12dfce5.jpg \n", + " inflating: /data/dog-breed-identification/train/8caee24979a5ad8887ceba75d93f0ab6.jpg \n", + " inflating: /data/dog-breed-identification/train/8cb6a74c34257a22665589648d827aad.jpg \n", + " inflating: /data/dog-breed-identification/train/8cd3fb35c72d692d4c717b6052465f55.jpg \n", + " inflating: /data/dog-breed-identification/train/8cd4a3412502ee82680eb4761d68d0dc.jpg \n", + " inflating: /data/dog-breed-identification/train/8cd51950e3f5d3d119d195bde1eea9ad.jpg \n", + " inflating: /data/dog-breed-identification/train/8cd5ae0663d2d5fbc39f291a99f3ecd2.jpg \n", + " inflating: /data/dog-breed-identification/train/8cd7532d405da73d94dedca7e887ff92.jpg \n", + " inflating: /data/dog-breed-identification/train/8ce5703ad6a594ab9ba7540964789fe4.jpg \n", + " inflating: /data/dog-breed-identification/train/8ce94c7eec80364765b1c21f440a7f9f.jpg \n", + " inflating: /data/dog-breed-identification/train/8ced482ceedeccca651a81a15ebe47bd.jpg \n", + " inflating: /data/dog-breed-identification/train/8cfc893064157fb61fc90bf9a5ab14dd.jpg \n", + " inflating: /data/dog-breed-identification/train/8d06f684b12bf2c64375acf001cd8285.jpg \n", + " inflating: /data/dog-breed-identification/train/8d081a85379005be35b81747dc8873fa.jpg \n", + " inflating: /data/dog-breed-identification/train/8d0f47344bd67acf7ca366190aac064e.jpg \n", + " inflating: /data/dog-breed-identification/train/8d19a2d1a7027646ed367ec35bfe6b8c.jpg \n", + " inflating: /data/dog-breed-identification/train/8d19b5c9f212ccbc4df8546c7a572919.jpg \n", + " inflating: /data/dog-breed-identification/train/8d2dc9668eadcd1eec60c27a9680c45f.jpg \n", + " inflating: /data/dog-breed-identification/train/8d2eba220c4199ebd37db6bceabf4969.jpg \n", + " inflating: /data/dog-breed-identification/train/8d32ca9b3faddc11fa5488ab18b4cefe.jpg \n", + " inflating: /data/dog-breed-identification/train/8d469d98dc5b185f7dbe736389ed6234.jpg \n", + " inflating: /data/dog-breed-identification/train/8d4b28ae56c225f870139cdfeff5131c.jpg \n", + " inflating: /data/dog-breed-identification/train/8d545ea0a05cfbdc5b0ba95a3290bf48.jpg \n", + " inflating: /data/dog-breed-identification/train/8d5737b4275e379c36e3797400862f6e.jpg \n", + " inflating: /data/dog-breed-identification/train/8d59450a985ebb3790c70d672e616c68.jpg \n", + " inflating: /data/dog-breed-identification/train/8d60fa3151beba31a2d32ae462166d83.jpg \n", + " inflating: /data/dog-breed-identification/train/8d6215f66e6d426a8c8f34d3f626d97e.jpg \n", + " inflating: /data/dog-breed-identification/train/8d622ccfee16bb45cfd55aeebea08687.jpg \n", + " inflating: /data/dog-breed-identification/train/8d628ba3a953baa57b98f46a18ae9f4b.jpg \n", + " inflating: /data/dog-breed-identification/train/8d636c0e0e64295df6bbd2e434b47459.jpg \n", + " inflating: /data/dog-breed-identification/train/8d64b701b234458957a62af9b81944dd.jpg \n", + " inflating: /data/dog-breed-identification/train/8d66efb7040095b0397cf73761d3bf41.jpg \n", + " inflating: /data/dog-breed-identification/train/8d7374555bffed40c4f77a55d348f8ef.jpg \n", + " inflating: /data/dog-breed-identification/train/8d77611798e17dc31a663fae87f46a14.jpg \n", + " inflating: /data/dog-breed-identification/train/8d86decc51df3d4730dd01ea23d531ec.jpg \n", + " inflating: /data/dog-breed-identification/train/8d871e0d1106088b8cdb9f62bd7de91b.jpg \n", + " inflating: /data/dog-breed-identification/train/8d8cc902a0f1277b78d1a8b376083999.jpg \n", + " inflating: /data/dog-breed-identification/train/8d91d75c382b8685accb5aa89e5ba489.jpg \n", + " inflating: /data/dog-breed-identification/train/8d934bef01917d2b5a60fe59ef755740.jpg \n", + " inflating: /data/dog-breed-identification/train/8d93e1171042807702d5e4cb4073b2ed.jpg \n", + " inflating: /data/dog-breed-identification/train/8d940744e1e8a9cc5c5bdd11333b261c.jpg \n", + " inflating: /data/dog-breed-identification/train/8daabe9ded307c802b5f84492bc14d69.jpg \n", + " inflating: /data/dog-breed-identification/train/8dbade75e5d93537341de6a378beecbb.jpg \n", + " inflating: /data/dog-breed-identification/train/8dbe0cc0a87f2bdebbe93f9a62a5c6eb.jpg \n", + " inflating: /data/dog-breed-identification/train/8dbe74d29b6c3839cf69f03a40d397dd.jpg \n", + " inflating: /data/dog-breed-identification/train/8dc0b8413494816aff0d87e9f64ae4fc.jpg \n", + " inflating: /data/dog-breed-identification/train/8dc1697a8eb8eb23f3585ee1705fa4d2.jpg \n", + " inflating: /data/dog-breed-identification/train/8dc51b503e88122ad71dd0c7569fd3bb.jpg \n", + " inflating: /data/dog-breed-identification/train/8dc980c9f136b41bf36d0490cdad2ce8.jpg \n", + " inflating: /data/dog-breed-identification/train/8dd4e667d9ba987fe4c42fbb09206e60.jpg \n", + " inflating: /data/dog-breed-identification/train/8ddb0c01060fa25b3df031c1f8eaf11a.jpg \n", + " inflating: /data/dog-breed-identification/train/8ddbcd3564fa5bc83c0054c2d15ad30a.jpg \n", + " inflating: /data/dog-breed-identification/train/8de72f0f1be9df62944c9ed2f7841ccd.jpg \n", + " inflating: /data/dog-breed-identification/train/8de94658ff000fd09d40e16038ce8fae.jpg \n", + " inflating: /data/dog-breed-identification/train/8deb39440ce06c0c54895334e556e421.jpg \n", + " inflating: /data/dog-breed-identification/train/8df3d9a938eb532873ad6fbd67e89598.jpg \n", + " inflating: /data/dog-breed-identification/train/8df4b1fb271a526391593679992f9060.jpg \n", + " inflating: /data/dog-breed-identification/train/8df66073740f76663abf8b5b13b34eeb.jpg \n", + " inflating: /data/dog-breed-identification/train/8dfc43ad52aae79f3a05d46fce633692.jpg \n", + " inflating: /data/dog-breed-identification/train/8e007c1c798c69de28267843b9894539.jpg \n", + " inflating: /data/dog-breed-identification/train/8e06a294fc3138b2db92db326d017967.jpg \n", + " inflating: /data/dog-breed-identification/train/8e09f934070402cd81a77cd07a148960.jpg \n", + " inflating: /data/dog-breed-identification/train/8e0e9d43edac86c555d515a2a9afd252.jpg \n", + " inflating: /data/dog-breed-identification/train/8e10d1f33b37f98f019c97d77f5d3721.jpg \n", + " inflating: /data/dog-breed-identification/train/8e11ed7d590db5f624fc248051dfa485.jpg \n", + " inflating: /data/dog-breed-identification/train/8e17a8cac3752b5341c951fcba07ece9.jpg \n", + " inflating: /data/dog-breed-identification/train/8e1863bda935ce99536f77a786e414f8.jpg \n", + " inflating: /data/dog-breed-identification/train/8e1aad62d6bd523796d7c47ef8f020a4.jpg \n", + " inflating: /data/dog-breed-identification/train/8e28f759e7eb06080d1ae4cd12cdf31c.jpg \n", + " inflating: /data/dog-breed-identification/train/8e29cf48d5da70aca6548d8b51526dd7.jpg \n", + " inflating: /data/dog-breed-identification/train/8e2a385becd796ab5997a171f5cbfecd.jpg \n", + " inflating: /data/dog-breed-identification/train/8e36fbb03c79117c3c7ff2962d255c91.jpg \n", + " inflating: /data/dog-breed-identification/train/8e39b8c4de4944adf3ab7edcc51357d4.jpg \n", + " inflating: /data/dog-breed-identification/train/8e3b7695d93b91002370bdc9d01bfcfb.jpg \n", + " inflating: /data/dog-breed-identification/train/8e43f265452b2ff6c99e69c27f590614.jpg \n", + " inflating: /data/dog-breed-identification/train/8e45f42d744b4a48ffe7ea8520a96f65.jpg \n", + " inflating: /data/dog-breed-identification/train/8e4a535a93b830de0cf9a2ed46bcd010.jpg \n", + " inflating: /data/dog-breed-identification/train/8e4d2ac20aa4433988f8a0037fae8c31.jpg \n", + " inflating: /data/dog-breed-identification/train/8e6091bd33ecf18beaf1e2ee9f362463.jpg \n", + " inflating: /data/dog-breed-identification/train/8e68f99a0188facb43e8d767d3918c81.jpg \n", + " inflating: /data/dog-breed-identification/train/8e6c4873efd059a52cc41084b6824878.jpg \n", + " inflating: /data/dog-breed-identification/train/8e709d582d2c6e443ea7237848a3a083.jpg \n", + " inflating: /data/dog-breed-identification/train/8e7256b23446acbd33967122787c1eb3.jpg \n", + " inflating: /data/dog-breed-identification/train/8e78aa56df01e3e6c29a6fa1f0b19575.jpg \n", + " inflating: /data/dog-breed-identification/train/8e78fd35123e6824f396ca237700f853.jpg \n", + " inflating: /data/dog-breed-identification/train/8e7d364f77c61d6721e1f574edc0d4e5.jpg \n", + " inflating: /data/dog-breed-identification/train/8e7e04072f8a9398f668e0a4e16b0734.jpg \n", + " inflating: /data/dog-breed-identification/train/8e802d5459b5e857e832c721d6cd3a35.jpg \n", + " inflating: /data/dog-breed-identification/train/8e8e8e089554f1c73883a0172649a0f2.jpg \n", + " inflating: /data/dog-breed-identification/train/8e94ccf10370e5d5d49972353ade1f2f.jpg \n", + " inflating: /data/dog-breed-identification/train/8ea40cd8c90ff5e7d388b6712b955aa4.jpg \n", + " inflating: /data/dog-breed-identification/train/8eaafbba23d55cfe9422651b32523ad8.jpg \n", + " inflating: /data/dog-breed-identification/train/8eaef67a7269e3e766cce9ce9a1a9ebe.jpg \n", + " inflating: /data/dog-breed-identification/train/8eb9c258aea79be2d03f7218a6458b4b.jpg \n", + " inflating: /data/dog-breed-identification/train/8ec7f3cb80b55638e2054032cef8374c.jpg \n", + " inflating: /data/dog-breed-identification/train/8ece811c1c9f81737fc824116b70f51a.jpg \n", + " inflating: /data/dog-breed-identification/train/8ed5e9327986e8759a13ad6b6544cd39.jpg \n", + " inflating: /data/dog-breed-identification/train/8edad149506278be0d351b3d2a4aaea7.jpg \n", + " inflating: /data/dog-breed-identification/train/8ee7ec8ec21f4f1dd3374d496da41989.jpg \n", + " inflating: /data/dog-breed-identification/train/8eed4ce43508497ca2fb97b5052c719b.jpg \n", + " inflating: /data/dog-breed-identification/train/8ef2c118289d0d5a142ae28651a46ab4.jpg \n", + " inflating: /data/dog-breed-identification/train/8ef5879e23d4903e557e1d8d523b19c8.jpg \n", + " inflating: /data/dog-breed-identification/train/8ef8122b39dff2ed803b2c8b5e267612.jpg \n", + " inflating: /data/dog-breed-identification/train/8efabc4160e30aaa54562478a9cca2c9.jpg \n", + " inflating: /data/dog-breed-identification/train/8f0411b4916e9345b4007c3245eff24d.jpg \n", + " inflating: /data/dog-breed-identification/train/8f049f42311282d60ce3599d34a47a04.jpg \n", + " inflating: /data/dog-breed-identification/train/8f0ccd6491456a35075e0e35af6fdd56.jpg \n", + " inflating: /data/dog-breed-identification/train/8f15c00b8e73fd9afc8f6ebd7ea44c80.jpg \n", + " inflating: /data/dog-breed-identification/train/8f1c26440b26139072c1b6d10c4be2cd.jpg \n", + " inflating: /data/dog-breed-identification/train/8f1e25e2485ecbce36b765b5a992549e.jpg \n", + " inflating: /data/dog-breed-identification/train/8f2aebc12f23322b1bf9bbc7f710772e.jpg \n", + " inflating: /data/dog-breed-identification/train/8f366598018cc9e785cfbb461b3b01da.jpg \n", + " inflating: /data/dog-breed-identification/train/8f3854aaa58f727a9ca017c25595a345.jpg \n", + " inflating: /data/dog-breed-identification/train/8f3bd64f0547f57bd227bbc2aff8f770.jpg \n", + " inflating: /data/dog-breed-identification/train/8f3e10fab6ea57479f91a5c6efc11351.jpg \n", + " inflating: /data/dog-breed-identification/train/8f73fe936d2477bdbef77ebb0deaef78.jpg \n", + " inflating: /data/dog-breed-identification/train/8f7a434e4b09857f30c6a35056cfd5d1.jpg \n", + " inflating: /data/dog-breed-identification/train/8f801a4ef6ba82252569289352a152d2.jpg \n", + " inflating: /data/dog-breed-identification/train/8f8daa0da029ecf2ac47fbe8d19eef39.jpg \n", + " inflating: /data/dog-breed-identification/train/8f8ea6a2db06234fcd7368112be96c47.jpg \n", + " inflating: /data/dog-breed-identification/train/8f93451056594e3730feb5a7b831d94b.jpg \n", + " inflating: /data/dog-breed-identification/train/8f98905c8a1010074eb939c26734ef5e.jpg \n", + " inflating: /data/dog-breed-identification/train/8f9b37ee8f502bd3750f0e7573702f13.jpg \n", + " inflating: /data/dog-breed-identification/train/8f9c699fe464bdb7aa7f2edf7661d379.jpg \n", + " inflating: /data/dog-breed-identification/train/8f9f48b4ad92e8f3082d2ab40d9f5e51.jpg \n", + " inflating: /data/dog-breed-identification/train/8fa8a4c1355b991d8641c8fcd471e9b3.jpg \n", + " inflating: /data/dog-breed-identification/train/8fac9aa474ecefc9b60dbd185f6450d8.jpg \n", + " inflating: /data/dog-breed-identification/train/8fb070664c0597ae883da9195c281a66.jpg \n", + " inflating: /data/dog-breed-identification/train/8fb29d378d44d3f4af216cf4912ed789.jpg \n", + " inflating: /data/dog-breed-identification/train/8fb2be84faae8faae043077b5508bcfb.jpg \n", + " inflating: /data/dog-breed-identification/train/8fb95a14aa29a8649502eb0f26f2215f.jpg \n", + " inflating: /data/dog-breed-identification/train/8fbd1187590a9b65878e467e96705637.jpg \n", + " inflating: /data/dog-breed-identification/train/8fc54285359ddda909d9059cfa13ff27.jpg \n", + " inflating: /data/dog-breed-identification/train/8fc79f8c31da438d2ace564999e943e9.jpg \n", + " inflating: /data/dog-breed-identification/train/8fc88385d3033eccfcc88736e5f0cfa8.jpg \n", + " inflating: /data/dog-breed-identification/train/8fd5567056096a09aad51d3518b95b49.jpg \n", + " inflating: /data/dog-breed-identification/train/8fd62f7306a0e8260bd2ceec5d46ff82.jpg \n", + " inflating: /data/dog-breed-identification/train/8fde47266eba1478489404c5b5282b4b.jpg \n", + " inflating: /data/dog-breed-identification/train/8fe632cb1840ea68cb6305578196dc96.jpg \n", + " inflating: /data/dog-breed-identification/train/8fea7f05411053a85da10c447c1691bb.jpg \n", + " inflating: /data/dog-breed-identification/train/8feac68f15de7bf1965b2de4f821ec14.jpg \n", + " inflating: /data/dog-breed-identification/train/8fef339017219eb3967f930c062f9dba.jpg \n", + " inflating: /data/dog-breed-identification/train/8ff0a26e39535edfde6fb02bdd83e9ad.jpg \n", + " inflating: /data/dog-breed-identification/train/8ff5946c8a2962c47caa9110fe56f91b.jpg \n", + " inflating: /data/dog-breed-identification/train/9000dac1467ed4a3fff4d7658bbbf3e4.jpg \n", + " inflating: /data/dog-breed-identification/train/90109c343f43458deb83972cbd552a0f.jpg \n", + " inflating: /data/dog-breed-identification/train/9012a23092a021327763c4227cfa2f62.jpg \n", + " inflating: /data/dog-breed-identification/train/901543bd8e1efb32027ccb06378dda4c.jpg \n", + " inflating: /data/dog-breed-identification/train/901efa01e94e8601a4a1755a47377748.jpg \n", + " inflating: /data/dog-breed-identification/train/902a7fc56a4422af481df08912a4e62f.jpg \n", + " inflating: /data/dog-breed-identification/train/902f55bfe2b33a66ba183de39a453d5e.jpg \n", + " inflating: /data/dog-breed-identification/train/9034ce4ee16750acd935da10c4eb1fa5.jpg \n", + " inflating: /data/dog-breed-identification/train/904af4de88a7146b714dac3475bf5ca9.jpg \n", + " inflating: /data/dog-breed-identification/train/904f13f221653c8389dc5fa88d68541b.jpg \n", + " inflating: /data/dog-breed-identification/train/905a5a1957e65d2fb2ec199eedfd3dea.jpg \n", + " inflating: /data/dog-breed-identification/train/9064e11f8694a2af667baa8ea997f6b1.jpg \n", + " inflating: /data/dog-breed-identification/train/90668f46101549651824cb05e2224269.jpg \n", + " inflating: /data/dog-breed-identification/train/9067e5010d2694616f1507de5c5185fd.jpg \n", + " inflating: /data/dog-breed-identification/train/9069cb5ba1da1f40788406128b05a53b.jpg \n", + " inflating: /data/dog-breed-identification/train/906dce11c81ed107495bbbf1e96891b7.jpg \n", + " inflating: /data/dog-breed-identification/train/907ac782dcb11020894b547df9142223.jpg \n", + " inflating: /data/dog-breed-identification/train/9083b6eabd4555704ba900d3bd163145.jpg \n", + " inflating: /data/dog-breed-identification/train/909714da5eb9fdf003b571843a820cb9.jpg \n", + " inflating: /data/dog-breed-identification/train/90ba51fd14179ef0f16d537cba990103.jpg \n", + " inflating: /data/dog-breed-identification/train/90ce42cb4fa91a9e5709c799021e0e38.jpg \n", + " inflating: /data/dog-breed-identification/train/90d3bc63d8ac5e4cd614b35459b469ea.jpg \n", + " inflating: /data/dog-breed-identification/train/90d74633492b3e4ec00767dc695e80eb.jpg \n", + " inflating: /data/dog-breed-identification/train/90e5311b4bc74fd9b668b1c728ec1bca.jpg \n", + " inflating: /data/dog-breed-identification/train/90fc6e34d3ab3e7c6fff4f854df6d3a4.jpg \n", + " inflating: /data/dog-breed-identification/train/91008238e223348a72f36eb1e042a466.jpg \n", + " inflating: /data/dog-breed-identification/train/910571cb39d0ea06634d05a2b22e6334.jpg \n", + " inflating: /data/dog-breed-identification/train/9108f7ab07ed5d12dd618f604867ed75.jpg \n", + " inflating: /data/dog-breed-identification/train/910bd1cbf310a280b98777fda080a929.jpg \n", + " inflating: /data/dog-breed-identification/train/9115dfce9d3cffa5e2c7866c0d4bc176.jpg \n", + " inflating: /data/dog-breed-identification/train/911a822b55c0de0e85d8839ef61b329c.jpg \n", + " inflating: /data/dog-breed-identification/train/911abd1a39cdc943dc4b0b5d3c19d86d.jpg \n", + " inflating: /data/dog-breed-identification/train/911c794b5c4cde06d8eefa654c246351.jpg \n", + " inflating: /data/dog-breed-identification/train/912a0d50758238e3be7ee4089f529a25.jpg \n", + " inflating: /data/dog-breed-identification/train/9130e19e00eb59baece5d77695016eb5.jpg \n", + " inflating: /data/dog-breed-identification/train/9132fd0d9cd69b9940ab4fceaf52f872.jpg \n", + " inflating: /data/dog-breed-identification/train/9133a4283e1921dbda180a11e9e147a7.jpg \n", + " inflating: /data/dog-breed-identification/train/9150cd31b05a8a2be35dc642ed08d889.jpg \n", + " inflating: /data/dog-breed-identification/train/915bef1332462fb6e5c60f2ef93371d5.jpg \n", + " inflating: /data/dog-breed-identification/train/9165ca35661772654f3b8f851d28d830.jpg \n", + " inflating: /data/dog-breed-identification/train/916a6975c9b4c50fe51a37475b1e6189.jpg \n", + " inflating: /data/dog-breed-identification/train/916acab38c9b1ef122851c0815c6826a.jpg \n", + " inflating: /data/dog-breed-identification/train/91769e1a1f3d0701d3b2d947d87dec0a.jpg \n", + " inflating: /data/dog-breed-identification/train/917a2bfef2b7949073a246443592ee50.jpg \n", + " inflating: /data/dog-breed-identification/train/917c71c8931c03751320006ba9deeada.jpg \n", + " inflating: /data/dog-breed-identification/train/917ce8504f2a5347de1686975842b4bf.jpg \n", + " inflating: /data/dog-breed-identification/train/91849dc79b956a72d9e43e299ca52de5.jpg \n", + " inflating: /data/dog-breed-identification/train/918541385e6e7bed1300d95b79d638c9.jpg \n", + " inflating: /data/dog-breed-identification/train/91892e88b37536ba2356b582eb2cdff5.jpg \n", + " inflating: /data/dog-breed-identification/train/919275bc732b771a349d6515bacad30f.jpg \n", + " inflating: /data/dog-breed-identification/train/9194630fa4708d66fb2c5b114dac0786.jpg \n", + " inflating: /data/dog-breed-identification/train/91984e95858f522d6a29150813453e7e.jpg \n", + " inflating: /data/dog-breed-identification/train/919e8336fe2c2834454c6690155a0620.jpg \n", + " inflating: /data/dog-breed-identification/train/919f861b7bd4a4514a0b4b9994ce4aa1.jpg \n", + " inflating: /data/dog-breed-identification/train/91a41fe9c86b0fc3cd70a72fb29018a2.jpg \n", + " inflating: /data/dog-breed-identification/train/91a5e8db15bccfb6cfa2df5e8b95ec03.jpg \n", + " inflating: /data/dog-breed-identification/train/91ac3d5ac0af34da508d0e53466f91be.jpg \n", + " inflating: /data/dog-breed-identification/train/91af3f0d18e5bd9b09a145ff5db9cb65.jpg \n", + " inflating: /data/dog-breed-identification/train/91af963a2f4cc1b5ab4f7e5d257b4830.jpg \n", + " inflating: /data/dog-breed-identification/train/91b435e846b2a1ebda379174a8ecc755.jpg \n", + " inflating: /data/dog-breed-identification/train/91b620611eb4e0fed6093251f3d4c1f8.jpg \n", + " inflating: /data/dog-breed-identification/train/91baafad0bfabce8819dd375963677d3.jpg \n", + " inflating: /data/dog-breed-identification/train/91c25bb4539f68d68c7a23b8aa5d04b9.jpg \n", + " inflating: /data/dog-breed-identification/train/91d2a1f1fed71f812361976b22e2dfd4.jpg \n", + " inflating: /data/dog-breed-identification/train/91d8c78f0201eb7620fe1cea512ab108.jpg \n", + " inflating: /data/dog-breed-identification/train/91e21edb68e80e0cee4bf8c11f0026db.jpg \n", + " inflating: /data/dog-breed-identification/train/91e2c8d330465c87720a92984675dc4a.jpg \n", + " inflating: /data/dog-breed-identification/train/91f9e0ea60dfdc91939efcb892ee7d94.jpg \n", + " inflating: /data/dog-breed-identification/train/91fcceacb76b43385198e96d1f82240f.jpg \n", + " inflating: /data/dog-breed-identification/train/9215151844a0caedca69036cc1f574fe.jpg \n", + " inflating: /data/dog-breed-identification/train/921f88563193b64bcdba94784751cc82.jpg \n", + " inflating: /data/dog-breed-identification/train/92362bf7e6ef7dffff3b8fd449b528aa.jpg \n", + " inflating: /data/dog-breed-identification/train/9236bd07cce91361f798145b5edb8a78.jpg \n", + " inflating: /data/dog-breed-identification/train/923ae97ae8d714eaa8a566c8a0370f22.jpg \n", + " inflating: /data/dog-breed-identification/train/9249992dfe9dd7276764d40a5175f8cf.jpg \n", + " inflating: /data/dog-breed-identification/train/924a21b260bedc69e175e60713fe841d.jpg \n", + " inflating: /data/dog-breed-identification/train/924d9891d144fe3df9c06cc281bc5e86.jpg \n", + " inflating: /data/dog-breed-identification/train/925b2891c064453b9b2b94cd4e0d6307.jpg \n", + " inflating: /data/dog-breed-identification/train/925b64154ebed0a44e1f19d7586e7a25.jpg \n", + " inflating: /data/dog-breed-identification/train/925b987166c0bac0a7ed760473bf7e65.jpg \n", + " inflating: /data/dog-breed-identification/train/92668066dcba05ab51143ba413aaaef7.jpg \n", + " inflating: /data/dog-breed-identification/train/92684aa71e785e6348baeb491e53797d.jpg \n", + " inflating: /data/dog-breed-identification/train/926f5368e7a665fe7aa25286cb0e8af4.jpg \n", + " inflating: /data/dog-breed-identification/train/926f9499d8e4f113d2e9bc788c564f54.jpg \n", + " inflating: /data/dog-breed-identification/train/927137299c6bfa514208a03212bbeb5f.jpg \n", + " inflating: /data/dog-breed-identification/train/92786aef192c6738c05da3014f4fd913.jpg \n", + " inflating: /data/dog-breed-identification/train/927d59ba35da645ee6b17bb17ac6fe31.jpg \n", + " inflating: /data/dog-breed-identification/train/928f281ca2a484573ffa2d63813454f3.jpg \n", + " inflating: /data/dog-breed-identification/train/929558678c0d624ae801d9935b469ff1.jpg \n", + " inflating: /data/dog-breed-identification/train/9297419afb8c6dfea90bfaf2b2dfb81c.jpg \n", + " inflating: /data/dog-breed-identification/train/929a85fb96525c2cccdc40529a8c75ba.jpg \n", + " inflating: /data/dog-breed-identification/train/92a8d330ae081b4514acb5c28fc3c2c2.jpg \n", + " inflating: /data/dog-breed-identification/train/92a9243497a51e86ad3a842f829c1270.jpg \n", + " inflating: /data/dog-breed-identification/train/92b1992357ae55d1e0a1b5e1fb13764e.jpg \n", + " inflating: /data/dog-breed-identification/train/92b51694f777ef7f2cf473fe4591d2a1.jpg \n", + " inflating: /data/dog-breed-identification/train/92c2c86dc5ef0824884d25c81077da5c.jpg \n", + " inflating: /data/dog-breed-identification/train/92cd9b84d46f409747f172060b45200d.jpg \n", + " inflating: /data/dog-breed-identification/train/92cef15bbcfeb99daf148dfdd7587d61.jpg \n", + " inflating: /data/dog-breed-identification/train/92d23591a857f9adb0024d587683a1cd.jpg \n", + " inflating: /data/dog-breed-identification/train/92d61143bceb0f64e4630eec688bebc9.jpg \n", + " inflating: /data/dog-breed-identification/train/92d66c4265d4697642d27fceceea8dd4.jpg \n", + " inflating: /data/dog-breed-identification/train/92dc766692d8771417ada42a2bb4fc5b.jpg \n", + " inflating: /data/dog-breed-identification/train/92e945f27cf8202cfb225e74dfddfda1.jpg \n", + " inflating: /data/dog-breed-identification/train/92ea22d7f8dad0ac186507fb6fb529c4.jpg \n", + " inflating: /data/dog-breed-identification/train/92eac03e18067b690a54a0e301f4448c.jpg \n", + " inflating: /data/dog-breed-identification/train/92f9b5279deaab33fe384021a05be7b1.jpg \n", + " inflating: /data/dog-breed-identification/train/9302083c61e3af329ca6143f6fae43e2.jpg \n", + " inflating: /data/dog-breed-identification/train/9305df8ac8d5c0744b124ae260d55099.jpg \n", + " inflating: /data/dog-breed-identification/train/9308c8fb4de942e11368bce562e92d7d.jpg \n", + " inflating: /data/dog-breed-identification/train/9316df7ef6098a60baf4223290765df2.jpg \n", + " inflating: /data/dog-breed-identification/train/931ed9a15a347fd467fc081006da00cf.jpg \n", + " inflating: /data/dog-breed-identification/train/9326b74fa6ec50134e10d05437be338a.jpg \n", + " inflating: /data/dog-breed-identification/train/932a3f628a53b53847bb86e17a8e9534.jpg \n", + " inflating: /data/dog-breed-identification/train/93363408e03721a55c0e179e9540d048.jpg \n", + " inflating: /data/dog-breed-identification/train/933985d766d5a834262127f0d6f5d8fd.jpg \n", + " inflating: /data/dog-breed-identification/train/933e245251bc667ce6ff74f12dc998fe.jpg \n", + " inflating: /data/dog-breed-identification/train/9347a734a6475b2cd88c24700260881f.jpg \n", + " inflating: /data/dog-breed-identification/train/934f0466bfe1d0e3f19540cee4cb35f4.jpg \n", + " inflating: /data/dog-breed-identification/train/9357ef3fae73f47e2446f4d39e8af919.jpg \n", + " inflating: /data/dog-breed-identification/train/935823502911fd76718a9f8eedf7037f.jpg \n", + " inflating: /data/dog-breed-identification/train/935a2fe4c6068678737d39564e45de66.jpg \n", + " inflating: /data/dog-breed-identification/train/935f0487ee57424b20afdf7d48b56849.jpg \n", + " inflating: /data/dog-breed-identification/train/93660caa9d048421c2ce42040b1b3e7d.jpg \n", + " inflating: /data/dog-breed-identification/train/936dad2f17cd6e7a47bc4a25ec0e0bb7.jpg \n", + " inflating: /data/dog-breed-identification/train/93769b0544775870913200306eaecd18.jpg \n", + " inflating: /data/dog-breed-identification/train/937af3a67d2b55afb480c7c624f470d6.jpg \n", + " inflating: /data/dog-breed-identification/train/937e122b29817a9aad7786d14b713cf3.jpg \n", + " inflating: /data/dog-breed-identification/train/93806df837d1bf71567f511c6cb19a52.jpg \n", + " inflating: /data/dog-breed-identification/train/9384b167387012b3e80326656c3acd55.jpg \n", + " inflating: /data/dog-breed-identification/train/93870e5bf9d1a2efe65d010ab08173f1.jpg \n", + " inflating: /data/dog-breed-identification/train/9389fa9ac75b25ce54da351cfcf0b784.jpg \n", + " inflating: /data/dog-breed-identification/train/9391d2d55670ff680c588dbf12e26105.jpg \n", + " inflating: /data/dog-breed-identification/train/9398217aeb0aa9604b9ac9ef9f681cff.jpg \n", + " inflating: /data/dog-breed-identification/train/93998d7d1338168e6bf005ebd83dce3e.jpg \n", + " inflating: /data/dog-breed-identification/train/939a394d7e18424e4412bce67ecb9ced.jpg \n", + " inflating: /data/dog-breed-identification/train/939eb0d3ec4ba70f8d9dde67b9ddd7c7.jpg \n", + " inflating: /data/dog-breed-identification/train/939f51df57282705c7bbf31e385c4ae2.jpg \n", + " inflating: /data/dog-breed-identification/train/93a1e38d497f452883eae18ab2b11467.jpg \n", + " inflating: /data/dog-breed-identification/train/93a2eb58762c2ace7604975ae4a5c4c6.jpg \n", + " inflating: /data/dog-breed-identification/train/93a364a9da700ae22e87f9666b5365dc.jpg \n", + " inflating: /data/dog-breed-identification/train/93a4a72783af7b60f3a00bb32fc6443f.jpg \n", + " inflating: /data/dog-breed-identification/train/93ab7f7688f5f974cab9bd3c43d5d726.jpg \n", + " inflating: /data/dog-breed-identification/train/93afc2044e1f3e9ab0134f569a1c11b7.jpg \n", + " inflating: /data/dog-breed-identification/train/93c8ecbaadb89b66147f239f743cae6a.jpg \n", + " inflating: /data/dog-breed-identification/train/93ca132582022982cc79d99dd77a3953.jpg \n", + " inflating: /data/dog-breed-identification/train/93cb4701772622a7932c99114338a034.jpg \n", + " inflating: /data/dog-breed-identification/train/93da5672c36d4aa8c89b9a526f4a33fc.jpg \n", + " inflating: /data/dog-breed-identification/train/93e85b8ae6bbee24861f9d80449230e5.jpg \n", + " inflating: /data/dog-breed-identification/train/93e99e11f97f9f31f0a68f91e955fbe1.jpg \n", + " inflating: /data/dog-breed-identification/train/93ed4d236ca388844089c0ea9cc935fe.jpg \n", + " inflating: /data/dog-breed-identification/train/93ef939778971c7e2892360ff61eec17.jpg \n", + " inflating: /data/dog-breed-identification/train/93f174c2b71d2b7680a7162f00d79da4.jpg \n", + " inflating: /data/dog-breed-identification/train/9410454220ae7b13ce2aab4323184032.jpg \n", + " inflating: /data/dog-breed-identification/train/94134ebebe54ced088ef9f4d602018d3.jpg \n", + " inflating: /data/dog-breed-identification/train/941bda07379c291b78adb2813b96bad3.jpg \n", + " inflating: /data/dog-breed-identification/train/941c912029f030c71f748d7affd35fbe.jpg \n", + " inflating: /data/dog-breed-identification/train/941dff46bad4b57495199709b7ab04b5.jpg \n", + " inflating: /data/dog-breed-identification/train/9424861acb9b6a9f645cd3a8da87bee6.jpg \n", + " inflating: /data/dog-breed-identification/train/9427f3bf8510ab7da8ee0b7e232b1a05.jpg \n", + " inflating: /data/dog-breed-identification/train/942d5a6f10384d47e38cab1b52bdcd75.jpg \n", + " inflating: /data/dog-breed-identification/train/94350fdc51b84578d0548f0321ac1149.jpg \n", + " inflating: /data/dog-breed-identification/train/94375abe500e8a5797b25db22ec1de06.jpg \n", + " inflating: /data/dog-breed-identification/train/94407c29657a0c84ce6c7176999b63b0.jpg \n", + " inflating: /data/dog-breed-identification/train/944776650f21c3fc9cc1410ad6054989.jpg \n", + " inflating: /data/dog-breed-identification/train/94592671cf58b50eb1f8ab70dbebdee3.jpg \n", + " inflating: /data/dog-breed-identification/train/945adc048fc9ca656180736fd60cf8d1.jpg \n", + " inflating: /data/dog-breed-identification/train/94620cc5c87cc1c630d9e76391d151c8.jpg \n", + " inflating: /data/dog-breed-identification/train/9463bed0f3812eee19c6bf929e679638.jpg \n", + " inflating: /data/dog-breed-identification/train/9468380b65de0c9bcdaa5921305fe362.jpg \n", + " inflating: /data/dog-breed-identification/train/946ad160e437edff6204c8260fb9704b.jpg \n", + " inflating: /data/dog-breed-identification/train/9478b7532067872cbdfaa180484c3064.jpg \n", + " inflating: /data/dog-breed-identification/train/947a24f75382955fcaba54d61b83cb96.jpg \n", + " inflating: /data/dog-breed-identification/train/9484e1dd83e7c87396813004aab18326.jpg \n", + " inflating: /data/dog-breed-identification/train/949ab3e56fca7aee2a63d9648f5f9320.jpg \n", + " inflating: /data/dog-breed-identification/train/94a0cbd51eebabc4e51bd32520298f58.jpg \n", + " inflating: /data/dog-breed-identification/train/94a513cd73ba792bc888b644541121f0.jpg \n", + " inflating: /data/dog-breed-identification/train/94a59b2c3c297306245312eda9c83f98.jpg \n", + " inflating: /data/dog-breed-identification/train/94a6b39e3ee0f670929b89a5496ae9fc.jpg \n", + " inflating: /data/dog-breed-identification/train/94ae24ccc10fbb6a0e9f9102527434db.jpg \n", + " inflating: /data/dog-breed-identification/train/94af603ff6f83dbc8079a1d315981f5a.jpg \n", + " inflating: /data/dog-breed-identification/train/94ba4bdcf98c9ff1a7f7be7c02786874.jpg \n", + " inflating: /data/dog-breed-identification/train/94bb794f3d3c30cf570938a121b1805d.jpg \n", + " inflating: /data/dog-breed-identification/train/94c4a769c0b1944dcaefbf2588952a44.jpg \n", + " inflating: /data/dog-breed-identification/train/94c4d59a7e93326c11db4bb1d0ca8a23.jpg \n", + " inflating: /data/dog-breed-identification/train/94d9e41e7e54cab9fad00dd7d9f18ff9.jpg \n", + " inflating: /data/dog-breed-identification/train/94df662b75abda1cbe203738a2b22148.jpg \n", + " inflating: /data/dog-breed-identification/train/94e6a6c033f6181f22b4642bf661ff5e.jpg \n", + " inflating: /data/dog-breed-identification/train/94e9f738b011bc5258d0593fbd9d4bb6.jpg \n", + " inflating: /data/dog-breed-identification/train/94ed2c37498d2189871f18e377843b1d.jpg \n", + " inflating: /data/dog-breed-identification/train/94f64671fa5c87ded83a0b1915f82d71.jpg \n", + " inflating: /data/dog-breed-identification/train/951117cb0e8bea7251480a08f6833686.jpg \n", + " inflating: /data/dog-breed-identification/train/9513287b371865067bc3448ab4da646e.jpg \n", + " inflating: /data/dog-breed-identification/train/95144edf427950c3430cebdf43676661.jpg \n", + " inflating: /data/dog-breed-identification/train/9525457649263d9043bf96495394eceb.jpg \n", + " inflating: /data/dog-breed-identification/train/9527fc067fec743019a444465358c23e.jpg \n", + " inflating: /data/dog-breed-identification/train/9529b8093aeca32d8828004347667c44.jpg \n", + " inflating: /data/dog-breed-identification/train/952ee6aa4050c9593c2bcb2baf54b266.jpg \n", + " inflating: /data/dog-breed-identification/train/953a85c624c95ff9c6f59d4518533883.jpg \n", + " inflating: /data/dog-breed-identification/train/953aad8d1b43596857b4b558804e751b.jpg \n", + " inflating: /data/dog-breed-identification/train/953fc878b8830cb73c0cb9c58a0ce2e1.jpg \n", + " inflating: /data/dog-breed-identification/train/9547e29629c4d7f523eb1366a1ab4def.jpg \n", + " inflating: /data/dog-breed-identification/train/955677ee4ce177492ce886426271f831.jpg \n", + " inflating: /data/dog-breed-identification/train/955aa14970c097c30fec72f106e4cb44.jpg \n", + " inflating: /data/dog-breed-identification/train/955be2ac08466bd001b285ee907670b6.jpg \n", + " inflating: /data/dog-breed-identification/train/955fdc8f29b6b78c8888d0dfdd1c6a8d.jpg \n", + " inflating: /data/dog-breed-identification/train/95660f727282e9db785ea010b0dd4160.jpg \n", + " inflating: /data/dog-breed-identification/train/957e390e19baa6d163079835a28155b9.jpg \n", + " inflating: /data/dog-breed-identification/train/9584bf3b986648ffcfd08f27b16d3c1f.jpg \n", + " inflating: /data/dog-breed-identification/train/958695b0c7323ac786ec86f6f1222eac.jpg \n", + " inflating: /data/dog-breed-identification/train/958f26d170394045f98ab3278a4f2f8c.jpg \n", + " inflating: /data/dog-breed-identification/train/95a35be5b90604e39e5177723ca0b348.jpg \n", + " inflating: /data/dog-breed-identification/train/95b04ef45b853922685ba6d257fc2f0c.jpg \n", + " inflating: /data/dog-breed-identification/train/95ba2022991e02fe0cfea89fe8ae6f5f.jpg \n", + " inflating: /data/dog-breed-identification/train/95c246b63eb7f6c5312181d954d5d01c.jpg \n", + " inflating: /data/dog-breed-identification/train/95c9870d3e06e1e640e6e39daf65e25b.jpg \n", + " inflating: /data/dog-breed-identification/train/95cafdd4b2f6be2ba46c472c2dedb98f.jpg \n", + " inflating: /data/dog-breed-identification/train/95cbfe6dad49dbaec7230f270e968603.jpg \n", + " inflating: /data/dog-breed-identification/train/95d1224b21964f4fd5a4033e3bf3384e.jpg \n", + " inflating: /data/dog-breed-identification/train/95debc3253f72a005a8473afdd06e730.jpg \n", + " inflating: /data/dog-breed-identification/train/95e04c4e07aff8b120404927e30ffecc.jpg \n", + " inflating: /data/dog-breed-identification/train/95e82d8ad15c46cb1bb9d4fb439ef62a.jpg \n", + " inflating: /data/dog-breed-identification/train/95eb7716870f9b11cecfe57851d1eeb4.jpg \n", + " inflating: /data/dog-breed-identification/train/95ec164b9719385742168962be4eda40.jpg \n", + " inflating: /data/dog-breed-identification/train/95f5d4bb20b7d03dd1c3577586fa460d.jpg \n", + " inflating: /data/dog-breed-identification/train/95fddaeff7bee1d481cff457b5401c7f.jpg \n", + " inflating: /data/dog-breed-identification/train/960291f7df24618b6f643bdb7db726f5.jpg \n", + " inflating: /data/dog-breed-identification/train/9606853b9f54adf4537c061d7c9023ad.jpg \n", + " inflating: /data/dog-breed-identification/train/96139f25790911cf399af1aef2e81b6c.jpg \n", + " inflating: /data/dog-breed-identification/train/961aab1d30044142287d7ce30c45b65b.jpg \n", + " inflating: /data/dog-breed-identification/train/96269bea463a7b189a636ef082c4e324.jpg \n", + " inflating: /data/dog-breed-identification/train/9629f46ec0bd1eae63121877c7d6787e.jpg \n", + " inflating: /data/dog-breed-identification/train/962c60cd9ad2e083547caa891ca0d466.jpg \n", + " inflating: /data/dog-breed-identification/train/96304329e537c8202c65e246198559a9.jpg \n", + " inflating: /data/dog-breed-identification/train/96332d61312a248ebe3a3007090000d5.jpg \n", + " inflating: /data/dog-breed-identification/train/96484b44c4a40f180bfb4333fe16fae5.jpg \n", + " inflating: /data/dog-breed-identification/train/96502473e0aa30557885892ed8a3e1aa.jpg \n", + " inflating: /data/dog-breed-identification/train/9653df0f5da50d6d0c1f71f356ada7f3.jpg \n", + " inflating: /data/dog-breed-identification/train/9653fe4984785611b21b367cdaa6e050.jpg \n", + " inflating: /data/dog-breed-identification/train/965565e9bb4647b962f56d128a81c37f.jpg \n", + " inflating: /data/dog-breed-identification/train/965ab0fa7629ecac86f1ebc2623fbedd.jpg \n", + " inflating: /data/dog-breed-identification/train/965ec39e068162e6dc15618377e01571.jpg \n", + " inflating: /data/dog-breed-identification/train/965f39ec77e59f042714aed7ea826bb9.jpg \n", + " inflating: /data/dog-breed-identification/train/96620010901dd4aa4768f5371ebcae4a.jpg \n", + " inflating: /data/dog-breed-identification/train/9663beed651287e84f9ec04dfdb1aa1e.jpg \n", + " inflating: /data/dog-breed-identification/train/9664790f704ca0796bf8c790ca276e9b.jpg \n", + " inflating: /data/dog-breed-identification/train/966552158b2bd4831994618a1b8aca0e.jpg \n", + " inflating: /data/dog-breed-identification/train/9672be8c46f615dd8afa075e93960e90.jpg \n", + " inflating: /data/dog-breed-identification/train/968319ab218a721298197d0819f519ab.jpg \n", + " inflating: /data/dog-breed-identification/train/968d6f3b48f707e6e22341bcc8dea9e6.jpg \n", + " inflating: /data/dog-breed-identification/train/968ea716b790aa6aeeaa6cd74bf6634a.jpg \n", + " inflating: /data/dog-breed-identification/train/9691721b06aaa21c4caa8e0cfd3aced0.jpg \n", + " inflating: /data/dog-breed-identification/train/969263eab216a0ead070d0f3736ce9d4.jpg \n", + " inflating: /data/dog-breed-identification/train/96977713e768b5ebf16e3b8b8734736c.jpg \n", + " inflating: /data/dog-breed-identification/train/969b7c7525b379c6e86c124f94f99d17.jpg \n", + " inflating: /data/dog-breed-identification/train/96a7a15ba427e697067d62c8cef5a23a.jpg \n", + " inflating: /data/dog-breed-identification/train/96b8968289174e8bc89c905b3e44a858.jpg \n", + " inflating: /data/dog-breed-identification/train/96bb7481a8b240dba59c8b8a21befa90.jpg \n", + " inflating: /data/dog-breed-identification/train/96c040dad1ec2e3305419e088a4fe17d.jpg \n", + " inflating: /data/dog-breed-identification/train/96c63d961030dfeebc8814b1e406fb0a.jpg \n", + " inflating: /data/dog-breed-identification/train/96d2d27cf5fe8af265e35e0cf2179fd8.jpg \n", + " inflating: /data/dog-breed-identification/train/96d6f4caa2b832efb51870c55c534275.jpg \n", + " inflating: /data/dog-breed-identification/train/96e02cc2cf7e3327f3cf1bf3eea1640d.jpg \n", + " inflating: /data/dog-breed-identification/train/96e05a4c7dde1737370357973d146709.jpg \n", + " inflating: /data/dog-breed-identification/train/96e158af695d168ded068b4b115b69ab.jpg \n", + " inflating: /data/dog-breed-identification/train/96e2ca5c370468190471ddf41514170c.jpg \n", + " inflating: /data/dog-breed-identification/train/96e54cfe8a3b74a495df81dba3ac566e.jpg \n", + " inflating: /data/dog-breed-identification/train/96e7a7493dcf1bbcdc6be986fc04a71c.jpg \n", + " inflating: /data/dog-breed-identification/train/96eb04c5fc2efe0701732dc775e33ef3.jpg \n", + " inflating: /data/dog-breed-identification/train/96f04a330af8b03da147644ccb75b4a0.jpg \n", + " inflating: /data/dog-breed-identification/train/96f83ed9c2c64f469c85edd8a808f398.jpg \n", + " inflating: /data/dog-breed-identification/train/96ff29c52552cd25bb3b49b77a2fe32a.jpg \n", + " inflating: /data/dog-breed-identification/train/970206514684e8482b46c6f1ee6c4b43.jpg \n", + " inflating: /data/dog-breed-identification/train/970444352377502f01c93fe527fd965d.jpg \n", + " inflating: /data/dog-breed-identification/train/9708b549ad1a33a5635220927e200fe5.jpg \n", + " inflating: /data/dog-breed-identification/train/970df71375e17e998d03900b2e3557bb.jpg \n", + " inflating: /data/dog-breed-identification/train/970e70ffb6f0f950ef2e7b5978522da7.jpg \n", + " inflating: /data/dog-breed-identification/train/97130695095c34cf19887e986923979a.jpg \n", + " inflating: /data/dog-breed-identification/train/9714847d51d4a74b57857be28f13dd24.jpg \n", + " inflating: /data/dog-breed-identification/train/971558f84a2f4325f0d7460798f65331.jpg \n", + " inflating: /data/dog-breed-identification/train/971cc3bd35960509cb1b1a51930a67a0.jpg \n", + " inflating: /data/dog-breed-identification/train/9730dfff0af50b9bbbed93220b440005.jpg \n", + " inflating: /data/dog-breed-identification/train/973156751af162e836c62bf2634d0220.jpg \n", + " inflating: /data/dog-breed-identification/train/97338b5e572b8154cf9aa1e7fc507c92.jpg \n", + " inflating: /data/dog-breed-identification/train/9738de1cd9864b573a97c6936956d720.jpg \n", + " inflating: /data/dog-breed-identification/train/9741efdd134f5b90d268e671b02f5a3a.jpg \n", + " inflating: /data/dog-breed-identification/train/974e68e6708b5c382bda7ed0c68656ee.jpg \n", + " inflating: /data/dog-breed-identification/train/974eb81110fd75acbc0a9aeb489439dc.jpg \n", + " inflating: /data/dog-breed-identification/train/9768ee7f14491c11917288326ab31fb0.jpg \n", + " inflating: /data/dog-breed-identification/train/976982b85d3f8c7b7297ffbaf8a78d55.jpg \n", + " inflating: /data/dog-breed-identification/train/976c7c90a2ee23ee68322c943d8b8ccf.jpg \n", + " inflating: /data/dog-breed-identification/train/9777f569b6116476eeade6c12a234d8e.jpg \n", + " inflating: /data/dog-breed-identification/train/9781621d2b39b78b12cd1023e6bb1e69.jpg \n", + " inflating: /data/dog-breed-identification/train/9787f0345521092b6921a518609bc5d3.jpg \n", + " inflating: /data/dog-breed-identification/train/978a9eee19b794afb59da56af20e8714.jpg \n", + " inflating: /data/dog-breed-identification/train/97b4eef9edb80dd5e6abd60ebdb9a80b.jpg \n", + " inflating: /data/dog-breed-identification/train/97b801f08019ef3ba8c974e5f6e2fb46.jpg \n", + " inflating: /data/dog-breed-identification/train/97be986f65ee4567ecf15cc72d777e9c.jpg \n", + " inflating: /data/dog-breed-identification/train/97c18e836c38efdcdaf33648c17b2d78.jpg \n", + " inflating: /data/dog-breed-identification/train/97c1a3631c969b4489ede8ec9c80b4fc.jpg \n", + " inflating: /data/dog-breed-identification/train/97c863ca10051ae4822b13264c40589e.jpg \n", + " inflating: /data/dog-breed-identification/train/97d24551ed20f55b7323e37e85adb4b6.jpg \n", + " inflating: /data/dog-breed-identification/train/97d712e7a54a5c89ebb6c41947128fb1.jpg \n", + " inflating: /data/dog-breed-identification/train/97e8c5fb46a894b5817e752259315886.jpg \n", + " inflating: /data/dog-breed-identification/train/97e940e416301389fb1c3eacd424ef35.jpg \n", + " inflating: /data/dog-breed-identification/train/97eaf551c872ffc382aacc77fd2766e9.jpg \n", + " inflating: /data/dog-breed-identification/train/97f66937029b254fcf94be88cdffaf56.jpg \n", + " inflating: /data/dog-breed-identification/train/9802d488e7a0b8b4fb04bfe2af2361f9.jpg \n", + " inflating: /data/dog-breed-identification/train/9806378e6363a0c93fa66c152fa094b2.jpg \n", + " inflating: /data/dog-breed-identification/train/980657a2afaf0fdd549e166fa899407e.jpg \n", + " inflating: /data/dog-breed-identification/train/980ecf1bdb0d8d48093f6a83a9b349a0.jpg \n", + " inflating: /data/dog-breed-identification/train/9813633e973fbc891a9bc031f8ec6d08.jpg \n", + " inflating: /data/dog-breed-identification/train/9825dfdb2952299a99a2d049a0451636.jpg \n", + " inflating: /data/dog-breed-identification/train/98398963f090dae624c421e6b6a0ebe5.jpg \n", + " inflating: /data/dog-breed-identification/train/9839d076759d03fb33db62f180efeaa6.jpg \n", + " inflating: /data/dog-breed-identification/train/983d4d40106bbaee0404d242221acb6e.jpg \n", + " inflating: /data/dog-breed-identification/train/983ea5e5fda9b67b419bd10ab8828835.jpg \n", + " inflating: /data/dog-breed-identification/train/983efc3c13e21aa72a5bf89bf5fbeb99.jpg \n", + " inflating: /data/dog-breed-identification/train/985bbacc9509d794dde32e0619b3264e.jpg \n", + " inflating: /data/dog-breed-identification/train/985e9ae5e9382a61fc14d9ed109f2cff.jpg \n", + " inflating: /data/dog-breed-identification/train/986202ae18232b607c3f7a815914e4e7.jpg \n", + " inflating: /data/dog-breed-identification/train/9862df0bfdc859432fea16cd147ca02e.jpg \n", + " inflating: /data/dog-breed-identification/train/98653befb4ff74e23ee1a0a9f2a91df2.jpg \n", + " inflating: /data/dog-breed-identification/train/987370a40fea9e37765009a6592823be.jpg \n", + " inflating: /data/dog-breed-identification/train/987a026f2789da6e4a88cba40f4347e3.jpg \n", + " inflating: /data/dog-breed-identification/train/987cfc296938c20a95949c759e4ffb7e.jpg \n", + " inflating: /data/dog-breed-identification/train/987de7672a009521f6d8c7cb6f679255.jpg \n", + " inflating: /data/dog-breed-identification/train/988a8a6e3f780ae0da254fad869af84b.jpg \n", + " inflating: /data/dog-breed-identification/train/98948082d4faa052198208a4a8081ef1.jpg \n", + " inflating: /data/dog-breed-identification/train/9895cd89bf45084a10e95f9d57eb6b4b.jpg \n", + " inflating: /data/dog-breed-identification/train/989ad181026cc26dc11dd6f31306f0c1.jpg \n", + " inflating: /data/dog-breed-identification/train/98a58bbf292fb12b78698e7458e6b570.jpg \n", + " inflating: /data/dog-breed-identification/train/98a75a5cafbc3855fb37474cb755f0f6.jpg \n", + " inflating: /data/dog-breed-identification/train/98b6235a39ffbd772524aae2c4e1b3ab.jpg \n", + " inflating: /data/dog-breed-identification/train/98c1ebcef878897b2424774aecddcdbe.jpg \n", + " inflating: /data/dog-breed-identification/train/98c91e68395f3ecd645b5b21987a1a4e.jpg \n", + " inflating: /data/dog-breed-identification/train/98ca3754dedce74f75155973f1018a93.jpg \n", + " inflating: /data/dog-breed-identification/train/98d93f25c790be32356634ef856a738c.jpg \n", + " inflating: /data/dog-breed-identification/train/98dbdc1a5152948522cace80b264b3fd.jpg \n", + " inflating: /data/dog-breed-identification/train/98e15ae4dc543409d0cb6a573ac306fc.jpg \n", + " inflating: /data/dog-breed-identification/train/98e71279e0d96b32ec28498e630aa069.jpg \n", + " inflating: /data/dog-breed-identification/train/98e98242b7e57c718a0d1a643d40146d.jpg \n", + " inflating: /data/dog-breed-identification/train/98f128b7373bf66e0379e7ab488dc599.jpg \n", + " inflating: /data/dog-breed-identification/train/98f9c9d78fbcbf204e6f0d90c5dca63a.jpg \n", + " inflating: /data/dog-breed-identification/train/990103331becc3d72956ba681d15e9bd.jpg \n", + " inflating: /data/dog-breed-identification/train/99165d7393c0160026d50e609872ff78.jpg \n", + " inflating: /data/dog-breed-identification/train/99182fba5c37873fe94ebbe9e5b7e298.jpg \n", + " inflating: /data/dog-breed-identification/train/991e84fa081bd97caec9f23672a3bf0a.jpg \n", + " inflating: /data/dog-breed-identification/train/992985d9287168502fa34d6de994e946.jpg \n", + " inflating: /data/dog-breed-identification/train/9931b86cd8146a12ab93965606f71309.jpg \n", + " inflating: /data/dog-breed-identification/train/993d092d60fbdf285c20183c5dcf1832.jpg \n", + " inflating: /data/dog-breed-identification/train/993d2f8fb3e675c362d4f502899cb260.jpg \n", + " inflating: /data/dog-breed-identification/train/993f57792daa5783e0f4c93b32ca1fec.jpg \n", + " inflating: /data/dog-breed-identification/train/9951edcf2d8205b7ddc7933f93b3bb24.jpg \n", + " inflating: /data/dog-breed-identification/train/99562538c149ae254d2ba0172c145e66.jpg \n", + " inflating: /data/dog-breed-identification/train/995b2cd115180d93f87a72f7b09715ce.jpg \n", + " inflating: /data/dog-breed-identification/train/995b2eea7f725108196ad726553ceb41.jpg \n", + " inflating: /data/dog-breed-identification/train/995f929ee21cd978ccb5d6dd823b88e9.jpg \n", + " inflating: /data/dog-breed-identification/train/9967da09835c2259a11196be6783c2f3.jpg \n", + " inflating: /data/dog-breed-identification/train/996b4b46fab484460373f80a1780babd.jpg \n", + " inflating: /data/dog-breed-identification/train/996d48c6dae4e17bc8e08f6f451f59d8.jpg \n", + " inflating: /data/dog-breed-identification/train/997673ede472a2fbb056bb4803d577e7.jpg \n", + " inflating: /data/dog-breed-identification/train/997c98bfceb94c21d1bf1831cd181de5.jpg \n", + " inflating: /data/dog-breed-identification/train/99848a37db92c99f49ae4d3b3aa440f3.jpg \n", + " inflating: /data/dog-breed-identification/train/99882fbaaa35f4e6864616b9e83b999f.jpg \n", + " inflating: /data/dog-breed-identification/train/9989a2dea7f209379685902c71ddc048.jpg \n", + " inflating: /data/dog-breed-identification/train/998a01cafea7ff6d709f967e65e50ede.jpg \n", + " inflating: /data/dog-breed-identification/train/998aa2b3f9d6379b4ef051025443601c.jpg \n", + " inflating: /data/dog-breed-identification/train/99931be4da09f3b2aeb396027fa75b16.jpg \n", + " inflating: /data/dog-breed-identification/train/999bb3a6d00cc6456d4e71bebae48f78.jpg \n", + " inflating: /data/dog-breed-identification/train/99a04a61814333706a0e70197535b9b8.jpg \n", + " inflating: /data/dog-breed-identification/train/99a3cbafa74640729139a6cb80a1c513.jpg \n", + " inflating: /data/dog-breed-identification/train/99a553328787ec86117af34870867ca3.jpg \n", + " inflating: /data/dog-breed-identification/train/99b0f0ff409732318fbf7d2cf8c8d01c.jpg \n", + " inflating: /data/dog-breed-identification/train/99b31cac1b20997c30d49cd6a2d9cde4.jpg \n", + " inflating: /data/dog-breed-identification/train/99b5c534b1d362f73ec2e2d3ec9b9530.jpg \n", + " inflating: /data/dog-breed-identification/train/99c80aaf5bc0531ed9b177b0c2b696a9.jpg \n", + " inflating: /data/dog-breed-identification/train/99cc6ef02b4814f30f487c358d318c49.jpg \n", + " inflating: /data/dog-breed-identification/train/99ce3a403e4d0a7f57c2c520603aad50.jpg \n", + " inflating: /data/dog-breed-identification/train/99cf80317e4a9abd484fce74c47941ce.jpg \n", + " inflating: /data/dog-breed-identification/train/99d562e317f52acd1f86a5674b473146.jpg \n", + " inflating: /data/dog-breed-identification/train/99dcd0466a2200c131cd225f4fb60e95.jpg \n", + " inflating: /data/dog-breed-identification/train/99e0616fbf8014e22cfd09ec55fd448a.jpg \n", + " inflating: /data/dog-breed-identification/train/99e2a7f2e8b397cea5ebb113a1fd8438.jpg \n", + " inflating: /data/dog-breed-identification/train/99e6015fa51cce6a285648fe9ae75cea.jpg \n", + " inflating: /data/dog-breed-identification/train/99e92f87e797b24911992e019c3f940a.jpg \n", + " inflating: /data/dog-breed-identification/train/99f66bd73ac12814fd9bd2c98997a73a.jpg \n", + " inflating: /data/dog-breed-identification/train/99fbeea51938834952e2bd18d3244dff.jpg \n", + " inflating: /data/dog-breed-identification/train/9a0268daff4ef2ad1f0bf88784ca3882.jpg \n", + " inflating: /data/dog-breed-identification/train/9a03d170d51aa6ea1a07a9fca4e0428f.jpg \n", + " inflating: /data/dog-breed-identification/train/9a04b5f3f77c5513227587f6fa70b7a1.jpg \n", + " inflating: /data/dog-breed-identification/train/9a0b645559ef189a2677dbe98e70b08a.jpg \n", + " inflating: /data/dog-breed-identification/train/9a0bd02ffe451fc028e8e9a4ecef22d4.jpg \n", + " inflating: /data/dog-breed-identification/train/9a0de42a29dcb892cf72a9e7a00c1f0b.jpg \n", + " inflating: /data/dog-breed-identification/train/9a109338c196f95c4000a7c1b6478b06.jpg \n", + " inflating: /data/dog-breed-identification/train/9a10d6f7fb0c766648b1ce1374fa72fa.jpg \n", + " inflating: /data/dog-breed-identification/train/9a18a07e133f715bee957cfa12ae92c9.jpg \n", + " inflating: /data/dog-breed-identification/train/9a19bbf958fc3f25d247eb6b60524d79.jpg \n", + " inflating: /data/dog-breed-identification/train/9a1f6626718348e63497ed0279d1dd6f.jpg \n", + " inflating: /data/dog-breed-identification/train/9a1fe5b5e1cfb439566e412634730a27.jpg \n", + " inflating: /data/dog-breed-identification/train/9a27688062adf7750b4e8a71cdd240b7.jpg \n", + " inflating: /data/dog-breed-identification/train/9a2b8517016e7afcdabb489395d27f49.jpg \n", + " inflating: /data/dog-breed-identification/train/9a2bee2420ddad2275d50ece888ee34b.jpg \n", + " inflating: /data/dog-breed-identification/train/9a303c60ced446a367364eeb122eea3c.jpg \n", + " inflating: /data/dog-breed-identification/train/9a368640c7d925f041990f6e2e050b2c.jpg \n", + " inflating: /data/dog-breed-identification/train/9a3b8444244e433a6c28ed49e9f847b2.jpg \n", + " inflating: /data/dog-breed-identification/train/9a3e73ec5c6f18eec87d936cf73dbcdc.jpg \n", + " inflating: /data/dog-breed-identification/train/9a48d6dbf65a53d46b9e0b4557d7f193.jpg \n", + " inflating: /data/dog-breed-identification/train/9a4c68bc356af529465603fc23a5ff28.jpg \n", + " inflating: /data/dog-breed-identification/train/9a4cdf5ff66e73284b449dcc7efc2f51.jpg \n", + " inflating: /data/dog-breed-identification/train/9a549f3e214768132fed22aaf9ce34f8.jpg \n", + " inflating: /data/dog-breed-identification/train/9a563b73f97246e5c3f8a214b54c5300.jpg \n", + " inflating: /data/dog-breed-identification/train/9a5cb9330f48119412b0dbeeb7d092c2.jpg \n", + " inflating: /data/dog-breed-identification/train/9a6857664466f542a510819a1a5bc98b.jpg \n", + " inflating: /data/dog-breed-identification/train/9a686becc6719a6539719a0f80dc4c8b.jpg \n", + " inflating: /data/dog-breed-identification/train/9a6e0802e2fca380f693595c04047fa5.jpg \n", + " inflating: /data/dog-breed-identification/train/9a80f8a8f25112a363b0456d756de93e.jpg \n", + " inflating: /data/dog-breed-identification/train/9a94571c3563a04915b5b4e4d6d448fb.jpg \n", + " inflating: /data/dog-breed-identification/train/9a99a6fb820491cb1bd4a0d1277f7df5.jpg \n", + " inflating: /data/dog-breed-identification/train/9aa1487dc5bbb2c34758d1a608f09bef.jpg \n", + " inflating: /data/dog-breed-identification/train/9aa1d1b17f8ddf539f968adea55120b8.jpg \n", + " inflating: /data/dog-breed-identification/train/9aad83b24fe76204e25cd5f7341586fe.jpg \n", + " inflating: /data/dog-breed-identification/train/9aadfae56fcec73fb13b370ed98599ef.jpg \n", + " inflating: /data/dog-breed-identification/train/9aafe6c0c807161f1c31af871603e9e4.jpg \n", + " inflating: /data/dog-breed-identification/train/9abebc3957dfab87b9166fe10750869b.jpg \n", + " inflating: /data/dog-breed-identification/train/9ac3aa31819e0fbc75bb8323127a2941.jpg \n", + " inflating: /data/dog-breed-identification/train/9ac83c1117b6e08b1da506bece346e4c.jpg \n", + " inflating: /data/dog-breed-identification/train/9acdf755109d8d01521a0c8c440749e4.jpg \n", + " inflating: /data/dog-breed-identification/train/9ad048af1f80e335fd8e3e719b0269d2.jpg \n", + " inflating: /data/dog-breed-identification/train/9ad0c52436b1b6c1a34be0b308dd0887.jpg \n", + " inflating: /data/dog-breed-identification/train/9ad725a03c572df37763c08c9ac0c545.jpg \n", + " inflating: /data/dog-breed-identification/train/9ad7b3b7280073476c05c62c83fdea19.jpg \n", + " inflating: /data/dog-breed-identification/train/9ad7b84e62adb614a516e742d2df0f1b.jpg \n", + " inflating: /data/dog-breed-identification/train/9adbe43816a43b4e4510c7ebdf88d1fc.jpg \n", + " inflating: /data/dog-breed-identification/train/9ae65ac25eebb9df589c09eaf01ea818.jpg \n", + " inflating: /data/dog-breed-identification/train/9af046cbe8971e1105a44271ef91cd00.jpg \n", + " inflating: /data/dog-breed-identification/train/9af4a4306365fe6e74e26c726de05c2f.jpg \n", + " inflating: /data/dog-breed-identification/train/9afd17ba252823662440863d6c0e6a5c.jpg \n", + " inflating: /data/dog-breed-identification/train/9aff86672f9ee1acf0efaa7c1c9e368e.jpg \n", + " inflating: /data/dog-breed-identification/train/9b01ea1c6fd64b0e1b6184904391d3e2.jpg \n", + " inflating: /data/dog-breed-identification/train/9b05f77bd3c04ef6e7006fe07d1697c9.jpg \n", + " inflating: /data/dog-breed-identification/train/9b0e6b008295897e9904d80b03fa4647.jpg \n", + " inflating: /data/dog-breed-identification/train/9b1dcfe05fa204f6f8ced6494d8ffcac.jpg \n", + " inflating: /data/dog-breed-identification/train/9b22718e4ee1e343608045d65af33566.jpg \n", + " inflating: /data/dog-breed-identification/train/9b2472991fe3434d0b36f83e8cf5a03b.jpg \n", + " inflating: /data/dog-breed-identification/train/9b26dfa18347cdb0aaafad0baa5522b4.jpg \n", + " inflating: /data/dog-breed-identification/train/9b29a9cfec9981e81d0aba83dc0be40a.jpg \n", + " inflating: /data/dog-breed-identification/train/9b2b87ec7679c63db2d4061987e732f4.jpg \n", + " inflating: /data/dog-breed-identification/train/9b3580ec11fa3e3064cb65d47215eba2.jpg \n", + " inflating: /data/dog-breed-identification/train/9b36711d6b30d9e868622cb2524cd4cf.jpg \n", + " inflating: /data/dog-breed-identification/train/9b370c5ad5420941ed36037665a2ab00.jpg \n", + " inflating: /data/dog-breed-identification/train/9b390d74110d57d254eb6d2683d5b000.jpg \n", + " inflating: /data/dog-breed-identification/train/9b43109fbcae180376e10ab60c522f48.jpg \n", + " inflating: /data/dog-breed-identification/train/9b4fb689d5adf1f5996613704c038519.jpg \n", + " inflating: /data/dog-breed-identification/train/9b50462f24fad5970b2dd973e69b1d87.jpg \n", + " inflating: /data/dog-breed-identification/train/9b566d6d96b9f29361065f0d72df4857.jpg \n", + " inflating: /data/dog-breed-identification/train/9b596f56c3f3bc8058d35eb1944f60b5.jpg \n", + " inflating: /data/dog-breed-identification/train/9b5a4262c5417a1b725dc1fb1c97b195.jpg \n", + " inflating: /data/dog-breed-identification/train/9b5c596b85007cc6c4d5d4648bb40bdf.jpg \n", + " inflating: /data/dog-breed-identification/train/9b650543f76bcbdbdfe14118fcc4bc33.jpg \n", + " inflating: /data/dog-breed-identification/train/9b68b5724c63b5127703bce31dd1fa65.jpg \n", + " inflating: /data/dog-breed-identification/train/9b68e62359ae82765612cc902d7a83d3.jpg \n", + " inflating: /data/dog-breed-identification/train/9b78211f25e38517e749658e8355c024.jpg \n", + " inflating: /data/dog-breed-identification/train/9b7abb2d62395fe947683a33eee54d43.jpg \n", + " inflating: /data/dog-breed-identification/train/9ba6ef3e015eb8637565361a6218c65a.jpg \n", + " inflating: /data/dog-breed-identification/train/9baf45e1dffbf945d7390de180a631da.jpg \n", + " inflating: /data/dog-breed-identification/train/9bc7f0d3198ab08ba7b7de306f4058db.jpg \n", + " inflating: /data/dog-breed-identification/train/9bce03c60ab62bf945ed6005ef81c1a1.jpg \n", + " inflating: /data/dog-breed-identification/train/9bd01fec0fe84bf86b25219968808abc.jpg \n", + " inflating: /data/dog-breed-identification/train/9bd6bfe9d0f21fbe059596962a49313f.jpg \n", + " inflating: /data/dog-breed-identification/train/9bdbaa146c1c71e9db759348e9f5f56a.jpg \n", + " inflating: /data/dog-breed-identification/train/9be08e8cc72c67bd3a0cb9dfbab9fd3b.jpg \n", + " inflating: /data/dog-breed-identification/train/9bef849130e5759f4e77d9cdbd72034a.jpg \n", + " inflating: /data/dog-breed-identification/train/9bf204c0add25dd912699fedc1f25aac.jpg \n", + " inflating: /data/dog-breed-identification/train/9bf3d79a743c864430ddc69fcc4e4c0d.jpg \n", + " inflating: /data/dog-breed-identification/train/9bf88e3c2f361f5c93acf7b277546375.jpg \n", + " inflating: /data/dog-breed-identification/train/9bf8d209bc22d6bd10ca8a53c3b25c4b.jpg \n", + " inflating: /data/dog-breed-identification/train/9bf9f1a4c841a1812524e218e121c048.jpg \n", + " inflating: /data/dog-breed-identification/train/9c010ac92006c01fd68602e4a896ba28.jpg \n", + " inflating: /data/dog-breed-identification/train/9c03345af7afb548c2d3fa17116eb825.jpg \n", + " inflating: /data/dog-breed-identification/train/9c0d4cee4acefbd21b643266a22aab7d.jpg \n", + " inflating: /data/dog-breed-identification/train/9c10668d7a4038549c8349f9037bf33d.jpg \n", + " inflating: /data/dog-breed-identification/train/9c1bb0db2b3239a8d7e44c3fa01feba8.jpg \n", + " inflating: /data/dog-breed-identification/train/9c2498a498e9635d76a6ad66f9d81a1a.jpg \n", + " inflating: /data/dog-breed-identification/train/9c255970bc0a106ad9b031c1a5cf97d1.jpg \n", + " inflating: /data/dog-breed-identification/train/9c2989138fd806ed66a8fbb74f02fd0c.jpg \n", + " inflating: /data/dog-breed-identification/train/9c4f3a8d5d506f5f368de386a3015c81.jpg \n", + " inflating: /data/dog-breed-identification/train/9c5ab09758f935bc9e3d5670d1f14cd6.jpg \n", + " inflating: /data/dog-breed-identification/train/9c673a702cd1ea728c1d693b2f684df3.jpg \n", + " inflating: /data/dog-breed-identification/train/9c692e77df5e3a9dfd38f487624d8053.jpg \n", + " inflating: /data/dog-breed-identification/train/9c6e6453cf7792a3d8b9ebb78f543b31.jpg \n", + " inflating: /data/dog-breed-identification/train/9c6f8f10027e0076ba75d10bfc3232a4.jpg \n", + " inflating: /data/dog-breed-identification/train/9c82a714a5a61104e2fbfb255a7e13ba.jpg \n", + " inflating: /data/dog-breed-identification/train/9c831e62e16d7764fd04178342af0f72.jpg \n", + " inflating: /data/dog-breed-identification/train/9c88de8027c36fc397daf2ca403e14ef.jpg \n", + " inflating: /data/dog-breed-identification/train/9c8bab83db2da5aad21f41e44643de64.jpg \n", + " inflating: /data/dog-breed-identification/train/9ca47f914b1d7b8ad9d59297e02d2963.jpg \n", + " inflating: /data/dog-breed-identification/train/9ca510930ba5f61cfdbba63d9fa99128.jpg \n", + " inflating: /data/dog-breed-identification/train/9ca75c6889227cb3beffe2e4f929bcce.jpg \n", + " inflating: /data/dog-breed-identification/train/9ca7fa5c1ec7340c2ede88de2a2c55cc.jpg \n", + " inflating: /data/dog-breed-identification/train/9cb7daea9f8c91fbcb0c021b80e30092.jpg \n", + " inflating: /data/dog-breed-identification/train/9cbdcc4ff6fc0daf8e32caa3ca818aef.jpg \n", + " inflating: /data/dog-breed-identification/train/9cc4a7f129eca8f31387c624ada69790.jpg \n", + " inflating: /data/dog-breed-identification/train/9cce6280e5e2de7c87fc71995dd092c4.jpg \n", + " inflating: /data/dog-breed-identification/train/9cce979c602d678cb93a97f8e69a6070.jpg \n", + " inflating: /data/dog-breed-identification/train/9cd270ddb9ada1f6457938f6844baed1.jpg \n", + " inflating: /data/dog-breed-identification/train/9cd7a36bc18f40588ece5be4189536c6.jpg \n", + " inflating: /data/dog-breed-identification/train/9cdc2e80866af2cf68b972489fad988c.jpg \n", + " inflating: /data/dog-breed-identification/train/9cde15dd34bbf752d6ac654295938a1e.jpg \n", + " inflating: /data/dog-breed-identification/train/9ce19c4796bc350837ee8c46df56adfb.jpg \n", + " inflating: /data/dog-breed-identification/train/9ce66abbf81462439d6dd6805af1d9a1.jpg \n", + " inflating: /data/dog-breed-identification/train/9ce9349cf6b83af44056f47fc340372c.jpg \n", + " inflating: /data/dog-breed-identification/train/9d137534989cab3200f2abd6d6409902.jpg \n", + " inflating: /data/dog-breed-identification/train/9d13cfdd4fc7d0f3c3c5fa488facb853.jpg \n", + " inflating: /data/dog-breed-identification/train/9d171901854b046234bde698045b1044.jpg \n", + " inflating: /data/dog-breed-identification/train/9d1c8c760384f8d02e4d32c93bd77110.jpg \n", + " inflating: /data/dog-breed-identification/train/9d2451f69ee64a6d605739f98cb627d7.jpg \n", + " inflating: /data/dog-breed-identification/train/9d267017b07f6eac408bad624f205484.jpg \n", + " inflating: /data/dog-breed-identification/train/9d33d4af3070c9676140336bf7bdca72.jpg \n", + " inflating: /data/dog-breed-identification/train/9d345c3e0500664c5bb03e99c0f1dcdc.jpg \n", + " inflating: /data/dog-breed-identification/train/9d3ecae65dee9723c4df0845aef7f6ca.jpg \n", + " inflating: /data/dog-breed-identification/train/9d53f7360ed130a897dbaf560aa0650a.jpg \n", + " inflating: /data/dog-breed-identification/train/9d54d3d97ce7f394dd8c08b332e03cbe.jpg \n", + " inflating: /data/dog-breed-identification/train/9d58e010fca3218581f0771b8981ee04.jpg \n", + " inflating: /data/dog-breed-identification/train/9d59793a5c249214baf030d2f6cf107b.jpg \n", + " inflating: /data/dog-breed-identification/train/9d682c186c174363cdd6c82d299bb634.jpg \n", + " inflating: /data/dog-breed-identification/train/9d8e975079a6d0327e22f0fecbdc8bbe.jpg \n", + " inflating: /data/dog-breed-identification/train/9db59a1ca4943ade583f67c96652fdcf.jpg \n", + " inflating: /data/dog-breed-identification/train/9dc1b89d30ce214f27c8a3327c2f4484.jpg \n", + " inflating: /data/dog-breed-identification/train/9dc6ab764c94bd20c63fd4efb2bb8aac.jpg \n", + " inflating: /data/dog-breed-identification/train/9dc749a8498efff9f9e81bb6bb159451.jpg \n", + " inflating: /data/dog-breed-identification/train/9dd77f5acf4a62c7c978bcce4affe2c6.jpg \n", + " inflating: /data/dog-breed-identification/train/9ddfdc3d8e6fd346ce146cf2aaef7a9c.jpg \n", + " inflating: /data/dog-breed-identification/train/9de493aa10c4c94b2130bbabd57c6456.jpg \n", + " inflating: /data/dog-breed-identification/train/9de88b5e7341f561dd3b69330b96d854.jpg \n", + " inflating: /data/dog-breed-identification/train/9dece95cba6069a919447dfbb19f75e2.jpg \n", + " inflating: /data/dog-breed-identification/train/9df5e6ab4c12f8cddd6bc3ed343251f8.jpg \n", + " inflating: /data/dog-breed-identification/train/9dfc1168e6366d5db462efa79c8774ec.jpg \n", + " inflating: /data/dog-breed-identification/train/9dfc8ee9665e1f2645eb7752db110ba6.jpg \n", + " inflating: /data/dog-breed-identification/train/9e03036e569f2a305e264b97c1aaab84.jpg \n", + " inflating: /data/dog-breed-identification/train/9e0aeb0f36433ddddcbf36154e51e62a.jpg \n", + " inflating: /data/dog-breed-identification/train/9e0ca729fb426bff43493894b35e9915.jpg \n", + " inflating: /data/dog-breed-identification/train/9e0da33733ad5b0272ac43663a8b0672.jpg \n", + " inflating: /data/dog-breed-identification/train/9e0f478fa1403405ab209051e4e0afa3.jpg \n", + " inflating: /data/dog-breed-identification/train/9e13beffb5b1aeda1aceeec82648b058.jpg \n", + " inflating: /data/dog-breed-identification/train/9e14a1584d8ee8447ff5f71896fca8a8.jpg \n", + " inflating: /data/dog-breed-identification/train/9e18f1df8e71e373a30a04036c4c8f9c.jpg \n", + " inflating: /data/dog-breed-identification/train/9e2e3c2bbbb8cfa1baabd8ade4ed7c80.jpg \n", + " inflating: /data/dog-breed-identification/train/9e31cc4d5a9f9e240ee8dcd15b2b537d.jpg \n", + " inflating: /data/dog-breed-identification/train/9e3229a03c3ce5b187028fcf35feba3e.jpg \n", + " inflating: /data/dog-breed-identification/train/9e33dfdea9e96ca060f24c6c5cdd3e37.jpg \n", + " inflating: /data/dog-breed-identification/train/9e3639d3fb9c25fab7b95a5166ee75e3.jpg \n", + " inflating: /data/dog-breed-identification/train/9e39ca2c3e9caba90877f30f864035fa.jpg \n", + " inflating: /data/dog-breed-identification/train/9e42f02b9fb363b7ea477484713f2615.jpg \n", + " inflating: /data/dog-breed-identification/train/9e486b33df53bfef19c178ecc95f3e4d.jpg \n", + " inflating: /data/dog-breed-identification/train/9e49dfded30a2e9401a149f72d9c7ca0.jpg \n", + " inflating: /data/dog-breed-identification/train/9e53073f721a3c189bcb8acd5b411e38.jpg \n", + " inflating: /data/dog-breed-identification/train/9e577dd7c29bc6074a80fed5ade75d0d.jpg \n", + " inflating: /data/dog-breed-identification/train/9e57b14c4cda56646341c8c7305a4739.jpg \n", + " inflating: /data/dog-breed-identification/train/9e59cf6d7e916083b3178522091e5318.jpg \n", + " inflating: /data/dog-breed-identification/train/9e5fd58282bfd7649fbabc090d578287.jpg \n", + " inflating: /data/dog-breed-identification/train/9e64e1f3ace7361fbbee8b685552012c.jpg \n", + " inflating: /data/dog-breed-identification/train/9e6a69607bd3e25091075b59b49dd0a2.jpg \n", + " inflating: /data/dog-breed-identification/train/9e6f089109b1365d940bfe93b17fb72b.jpg \n", + " inflating: /data/dog-breed-identification/train/9e7ceb9ae79ad60616a6bdfbbc92c963.jpg \n", + " inflating: /data/dog-breed-identification/train/9e7db8711f479dfa968b2297bee33104.jpg \n", + " inflating: /data/dog-breed-identification/train/9e81e7884652773e257986e45df70cb5.jpg \n", + " inflating: /data/dog-breed-identification/train/9e876636758732bae4a7e870e699bf74.jpg \n", + " inflating: /data/dog-breed-identification/train/9e8d139d9f2bd0cd22abefda056cdbc6.jpg \n", + " inflating: /data/dog-breed-identification/train/9e90e30a460f9c139984766dd957e62a.jpg \n", + " inflating: /data/dog-breed-identification/train/9e9573c91725e4399224b623dc48beaf.jpg \n", + " inflating: /data/dog-breed-identification/train/9e98898921d8ac40effaa332fe44257a.jpg \n", + " inflating: /data/dog-breed-identification/train/9e9fe3cf8953b8f7c237bd44a688986d.jpg \n", + " inflating: /data/dog-breed-identification/train/9ea6bbd4ff2c706480a2abbe12a8e358.jpg \n", + " inflating: /data/dog-breed-identification/train/9ea6c71d8497241ae9bc892e9aa2d36d.jpg \n", + " inflating: /data/dog-breed-identification/train/9ea7f8ae85ab9b6c2e48175978dbaf3e.jpg \n", + " inflating: /data/dog-breed-identification/train/9eadcc5e6b5c3b8b0f7f191126255089.jpg \n", + " inflating: /data/dog-breed-identification/train/9eb2557572aff149c5143e287799b48d.jpg \n", + " inflating: /data/dog-breed-identification/train/9eb927fcd2caba4b3d0dfc193a1dfbb4.jpg \n", + " inflating: /data/dog-breed-identification/train/9ebb8fa8327664b97a42a36e153a79d9.jpg \n", + " inflating: /data/dog-breed-identification/train/9ec8d364e910ec64201b6e7e9ff534b1.jpg \n", + " inflating: /data/dog-breed-identification/train/9ed07b7a65a1ed54b2b22211e26b09b1.jpg \n", + " inflating: /data/dog-breed-identification/train/9ee987cd2242f87a036d495ef6dfab60.jpg \n", + " inflating: /data/dog-breed-identification/train/9ef0b7e985d38526d6f600a0efd0d39b.jpg \n", + " inflating: /data/dog-breed-identification/train/9ef443276459fc1a2a45ea2f3456375a.jpg \n", + " inflating: /data/dog-breed-identification/train/9efaa25fd1823395c4d8bb023a059e62.jpg \n", + " inflating: /data/dog-breed-identification/train/9f134ebe862c8fcd77bf283f89510fda.jpg \n", + " inflating: /data/dog-breed-identification/train/9f16e8d651033662302441057134f4ec.jpg \n", + " inflating: /data/dog-breed-identification/train/9f29b8becdfc5ad6202efe43d6c7b2eb.jpg \n", + " inflating: /data/dog-breed-identification/train/9f39f256dfc427cad1f89094369e6d3b.jpg \n", + " inflating: /data/dog-breed-identification/train/9f3dcc950c8457c87b0176db2ffe00c3.jpg \n", + " inflating: /data/dog-breed-identification/train/9f3e89f1166b8a5b9161d934c8ea79c4.jpg \n", + " inflating: /data/dog-breed-identification/train/9f402226e7a3cc98cd08d1e508826cc2.jpg \n", + " inflating: /data/dog-breed-identification/train/9f4d736dd20e1d3e13b60119f4aafc2c.jpg \n", + " inflating: /data/dog-breed-identification/train/9f4fe127f5ee381c9c38660c08c71971.jpg \n", + " inflating: /data/dog-breed-identification/train/9f540b678368faa65bedecc417ff0d16.jpg \n", + " inflating: /data/dog-breed-identification/train/9f59f466dec517cbec6a32a201891156.jpg \n", + " inflating: /data/dog-breed-identification/train/9f66890fc55f6ea42abf3e926056d54b.jpg \n", + " inflating: /data/dog-breed-identification/train/9f66b4be4892cdec9c0d53f646195b04.jpg \n", + " inflating: /data/dog-breed-identification/train/9f682747d236dbaf8c530933e5dfcb44.jpg \n", + " inflating: /data/dog-breed-identification/train/9f68d045a396679a778eb54c5ed29038.jpg \n", + " inflating: /data/dog-breed-identification/train/9f6a267ebf271f72f91ca0a97d60a90f.jpg \n", + " inflating: /data/dog-breed-identification/train/9f6c4d16ab3154ba2f57cb9d7f2af40b.jpg \n", + " inflating: /data/dog-breed-identification/train/9f6f16bf36bb2d9a144f70ce09404020.jpg \n", + " inflating: /data/dog-breed-identification/train/9f70486e63ac01868d87763df94226ec.jpg \n", + " inflating: /data/dog-breed-identification/train/9f705b2db2425e112e4ceefcbaea7e03.jpg \n", + " inflating: /data/dog-breed-identification/train/9f784b4b261cfb7b398126e2077a997b.jpg \n", + " inflating: /data/dog-breed-identification/train/9f7a6cc0713a2984947db2463b999133.jpg \n", + " inflating: /data/dog-breed-identification/train/9f7d5eb41b3bfb57db764b6d6049da3f.jpg \n", + " inflating: /data/dog-breed-identification/train/9f7d62f1f74eab82149843a32ff8ec9d.jpg \n", + " inflating: /data/dog-breed-identification/train/9f80e62e65d0cdbde6647f054f12bbef.jpg \n", + " inflating: /data/dog-breed-identification/train/9f82e181388d76f44066c05f3bf6c67a.jpg \n", + " inflating: /data/dog-breed-identification/train/9f89f0711c1781582859ff9aa3b12906.jpg \n", + " inflating: /data/dog-breed-identification/train/9f998d6174571017fdc6e8c0ee14cadc.jpg \n", + " inflating: /data/dog-breed-identification/train/9f9c35c4628713c82de8499119c06850.jpg \n", + " inflating: /data/dog-breed-identification/train/9f9c667f8eb625df654915087b9746ee.jpg \n", + " inflating: /data/dog-breed-identification/train/9fadb3a7aa97e0c652a99c7939d92875.jpg \n", + " inflating: /data/dog-breed-identification/train/9fb14d359862987b31fee92145566e70.jpg \n", + " inflating: /data/dog-breed-identification/train/9fb9ec152d4262348922fbf4c03f6083.jpg \n", + " inflating: /data/dog-breed-identification/train/9fbbb409d665cd228436cb2c7cfff39e.jpg \n", + " inflating: /data/dog-breed-identification/train/9fbfd7b551343c7e18cb75fd2a8c6409.jpg \n", + " inflating: /data/dog-breed-identification/train/9fc04bb29966246b78c79add744c5581.jpg \n", + " inflating: /data/dog-breed-identification/train/9fc3f540a8973e5d4a212b0a2b5ffa6b.jpg \n", + " inflating: /data/dog-breed-identification/train/9fc5dfe2f42c2ffae1361da2414addf0.jpg \n", + " inflating: /data/dog-breed-identification/train/9fc7c30c893bdbb6d0973062b73b91a6.jpg \n", + " inflating: /data/dog-breed-identification/train/9fc89093fbd02be1c4a6e390ed413c97.jpg \n", + " inflating: /data/dog-breed-identification/train/9fc99626f294cd981ef5bdf6fca91b4a.jpg \n", + " inflating: /data/dog-breed-identification/train/9fcde195fbd3c4d12f94e54346fab6a1.jpg \n", + " inflating: /data/dog-breed-identification/train/9fcfe0a2711b77135d622190be6e8773.jpg \n", + " inflating: /data/dog-breed-identification/train/9fd140da24aebf0273ba365ef3e28a19.jpg \n", + " inflating: /data/dog-breed-identification/train/9fddcca650e796157ab90ccebcfe36a4.jpg \n", + " inflating: /data/dog-breed-identification/train/9fe57edea044ecd8bfa235f3644cf05c.jpg \n", + " inflating: /data/dog-breed-identification/train/9fe7b8f0306f2baf466abb878d11576c.jpg \n", + " inflating: /data/dog-breed-identification/train/9fe860ddf6ea9ca602dc6da23fb10b74.jpg \n", + " inflating: /data/dog-breed-identification/train/9ff162b5cdfe5b67a6d7bb684d36795e.jpg \n", + " inflating: /data/dog-breed-identification/train/9ff9b9fc9fe47a3b3d0bf54ce40bcf5e.jpg \n", + " inflating: /data/dog-breed-identification/train/a004e452a808e75b654d898cf776c973.jpg \n", + " inflating: /data/dog-breed-identification/train/a00af55eaca223de36018867b5cf729f.jpg \n", + " inflating: /data/dog-breed-identification/train/a00b7dc2280db885250308a5fb1d33ed.jpg \n", + " inflating: /data/dog-breed-identification/train/a0112c3175f463aa11e37ac21a6400ed.jpg \n", + " inflating: /data/dog-breed-identification/train/a0149e2a9b0584d29be0a68207899b9e.jpg \n", + " inflating: /data/dog-breed-identification/train/a019849fb9cd376e41fca11bd5808eae.jpg \n", + " inflating: /data/dog-breed-identification/train/a01ae294072cdf98548d45d1b60f200a.jpg \n", + " inflating: /data/dog-breed-identification/train/a02657e33f6b2895506aa317783b9d09.jpg \n", + " inflating: /data/dog-breed-identification/train/a02a029a33acd0278e134b0176d54a84.jpg \n", + " inflating: /data/dog-breed-identification/train/a03f21fc41f09a6bbb0332c7874e726c.jpg \n", + " inflating: /data/dog-breed-identification/train/a045be196aef2d7f4ffec520900d6a01.jpg \n", + " inflating: /data/dog-breed-identification/train/a048795ed9051cdcf322e407293ef855.jpg \n", + " inflating: /data/dog-breed-identification/train/a0581987fa74f323e695331abfd4537f.jpg \n", + " inflating: /data/dog-breed-identification/train/a05954e6a0ed3423ec8b4eb09352032b.jpg \n", + " inflating: /data/dog-breed-identification/train/a061fd7e669c4382b56c3290abaf8a38.jpg \n", + " inflating: /data/dog-breed-identification/train/a064e19920ce6da2dd7cca371ff9efd4.jpg \n", + " inflating: /data/dog-breed-identification/train/a0743e9ac0f0222bd7593cdbf3f3fc86.jpg \n", + " inflating: /data/dog-breed-identification/train/a0753c0a9da1276e7e05249733425150.jpg \n", + " inflating: /data/dog-breed-identification/train/a07e99186fe1459309d6b81c971313f7.jpg \n", + " inflating: /data/dog-breed-identification/train/a089101ee54c8a8dade827acdd75e90c.jpg \n", + " inflating: /data/dog-breed-identification/train/a08dd49cec96abdc1c1cf20f20f1c0eb.jpg \n", + " inflating: /data/dog-breed-identification/train/a08fc31fdfab13d99e36a64abb4b6dfc.jpg \n", + " inflating: /data/dog-breed-identification/train/a09920def5c696a562a2dd7ca6feedc5.jpg \n", + " inflating: /data/dog-breed-identification/train/a0a53d7b5d4f9d9004d9e2b21785f88c.jpg \n", + " inflating: /data/dog-breed-identification/train/a0b56777ca68afe292994d124ffd23b1.jpg \n", + " inflating: /data/dog-breed-identification/train/a0bd99771f2dc6b7512f0db9612534e0.jpg \n", + " inflating: /data/dog-breed-identification/train/a0bdc9dbefdfa54b40423728f1f3627e.jpg \n", + " inflating: /data/dog-breed-identification/train/a0df51e0873ea98da80780648a8fa805.jpg \n", + " inflating: /data/dog-breed-identification/train/a0e4125ece19a96d65f26615219bdab8.jpg \n", + " inflating: /data/dog-breed-identification/train/a0ec3fd448700805fb60c3b9f3d2b3bc.jpg \n", + " inflating: /data/dog-breed-identification/train/a0ecff6d821562026d1200804d3f9cb1.jpg \n", + " inflating: /data/dog-breed-identification/train/a0f2fcabc1e9c57a62ed2b546619966b.jpg \n", + " inflating: /data/dog-breed-identification/train/a0f32a4e0fcb8bcb6a38d41782fa8f13.jpg \n", + " inflating: /data/dog-breed-identification/train/a0f4a310216c7d377f90f7cd812c0c3e.jpg \n", + " inflating: /data/dog-breed-identification/train/a0f900fe444c2c09f03d5170960e2825.jpg \n", + " inflating: /data/dog-breed-identification/train/a105cee000c3cc2c68b476b69cbea205.jpg \n", + " inflating: /data/dog-breed-identification/train/a10abeea4e3b11aad2b005fdb3aefa79.jpg \n", + " inflating: /data/dog-breed-identification/train/a10ad3bc9874ac7defb37abd9aa4826e.jpg \n", + " inflating: /data/dog-breed-identification/train/a11dc1f6219527ba2aa258e9389a75a5.jpg \n", + " inflating: /data/dog-breed-identification/train/a12274937a2a953932ca67db079b5816.jpg \n", + " inflating: /data/dog-breed-identification/train/a126d857dc3a612bba02ae1d27122d41.jpg \n", + " inflating: /data/dog-breed-identification/train/a12ad579f50f6615214f1f8fe800a3ee.jpg \n", + " inflating: /data/dog-breed-identification/train/a12cea27cf82ace00454429e62a93d4b.jpg \n", + " inflating: /data/dog-breed-identification/train/a13095bdd2938c8666addc680b84589a.jpg \n", + " inflating: /data/dog-breed-identification/train/a13585067b7a9149f3dbb2ea87fa9955.jpg \n", + " inflating: /data/dog-breed-identification/train/a13b56a0dbeea4d70e52d48e8e9a7710.jpg \n", + " inflating: /data/dog-breed-identification/train/a1463d84598b869be8da2b7ff3f59070.jpg \n", + " inflating: /data/dog-breed-identification/train/a157149eee9f9262b497534d5f05742a.jpg \n", + " inflating: /data/dog-breed-identification/train/a15a34f665a19be5fff0011601e41e14.jpg \n", + " inflating: /data/dog-breed-identification/train/a161acd4b2820bc68e196614ed669ce9.jpg \n", + " inflating: /data/dog-breed-identification/train/a169ca16d659e93c93bb68a34183f0de.jpg \n", + " inflating: /data/dog-breed-identification/train/a16e005757d8c4f1e9bcce0050a35e41.jpg \n", + " inflating: /data/dog-breed-identification/train/a16f16b480a2147d0f7598d246a63406.jpg \n", + " inflating: /data/dog-breed-identification/train/a1891d8dc13175e926eeb5a217098755.jpg \n", + " inflating: /data/dog-breed-identification/train/a1895c6e41a09df9be74fd64a65d4c78.jpg \n", + " inflating: /data/dog-breed-identification/train/a18a913cb80fdd9aec49672d08a297ea.jpg \n", + " inflating: /data/dog-breed-identification/train/a18a9dbf77084d1cc202ded555db7c77.jpg \n", + " inflating: /data/dog-breed-identification/train/a194b577d699f7c47359db2692e3b464.jpg \n", + " inflating: /data/dog-breed-identification/train/a198692ee75d8466fd1e3b4691dcb6d3.jpg \n", + " inflating: /data/dog-breed-identification/train/a198dfcff6e7b12eb4427dbdfe74349e.jpg \n", + " inflating: /data/dog-breed-identification/train/a19e0dd329d5e18a91a1d82775f00490.jpg \n", + " inflating: /data/dog-breed-identification/train/a1a08aed21aafc7d2a271b1653639fd1.jpg \n", + " inflating: /data/dog-breed-identification/train/a1a164808a61bbf0dd05ad49d6ff90d8.jpg \n", + " inflating: /data/dog-breed-identification/train/a1a5779dd322d8c2ba98670331686837.jpg \n", + " inflating: /data/dog-breed-identification/train/a1c0d9dbf4f0bee7763cc869881bde58.jpg \n", + " inflating: /data/dog-breed-identification/train/a1c1bcf72785306063ae603cb70fc500.jpg \n", + " inflating: /data/dog-breed-identification/train/a1d4504294f8435423c56b96c4a76c18.jpg \n", + " inflating: /data/dog-breed-identification/train/a1d50b778141d7a13c07a7f366524f1b.jpg \n", + " inflating: /data/dog-breed-identification/train/a1d611d5b9c3ed31ecfe766edb018892.jpg \n", + " inflating: /data/dog-breed-identification/train/a1d9956e2e50c9f909342d26e1d32016.jpg \n", + " inflating: /data/dog-breed-identification/train/a1dfedd16f94ca093e79f93f3eb6e03d.jpg \n", + " inflating: /data/dog-breed-identification/train/a1e14ddc9300d0cad934c3db77f37f62.jpg \n", + " inflating: /data/dog-breed-identification/train/a1e4ab6fe9d1255bac0157273d64e18a.jpg \n", + " inflating: /data/dog-breed-identification/train/a1eb2b04c498844ec38bf5ac5b6184e7.jpg \n", + " inflating: /data/dog-breed-identification/train/a1f247bf7e85c9b23b5361dab02e253a.jpg \n", + " inflating: /data/dog-breed-identification/train/a1fe7ef738fcd920e4d521d5fc4092d4.jpg \n", + " inflating: /data/dog-breed-identification/train/a204ac96b29ca9f6af5571194e14ea8e.jpg \n", + " inflating: /data/dog-breed-identification/train/a21caab32c00011d38f1e409fbf65d19.jpg \n", + " inflating: /data/dog-breed-identification/train/a2231ba7d07b51253f5c443540896d16.jpg \n", + " inflating: /data/dog-breed-identification/train/a224f59b846b137d254f6a5ad0d79039.jpg \n", + " inflating: /data/dog-breed-identification/train/a2257c79556052121735b812de9ddbbb.jpg \n", + " inflating: /data/dog-breed-identification/train/a228f3f366b74618daec68dcbf0d6ec4.jpg \n", + " inflating: /data/dog-breed-identification/train/a229cee8b4845e9ed1597bdc524eeeaa.jpg \n", + " inflating: /data/dog-breed-identification/train/a22e9ba8ca7760673290efe27bea6dea.jpg \n", + " inflating: /data/dog-breed-identification/train/a2319a41db4e46818b7e9785cc9debd9.jpg \n", + " inflating: /data/dog-breed-identification/train/a234febaeeba77417fd731d9c5d3a692.jpg \n", + " inflating: /data/dog-breed-identification/train/a239c6a040d2d7ceb27e45ed664ba41c.jpg \n", + " inflating: /data/dog-breed-identification/train/a23f0d69b478a31672abde229ed0f4dd.jpg \n", + " inflating: /data/dog-breed-identification/train/a23f53a45ee2d6d6218c7e3b70234091.jpg \n", + " inflating: /data/dog-breed-identification/train/a242dabba9d7b42d9bff1cd28eac19d4.jpg \n", + " inflating: /data/dog-breed-identification/train/a24baf3b524b3c8c8678c784ff213a5e.jpg \n", + " inflating: /data/dog-breed-identification/train/a2507f5290cd1e8a60f6565870a39832.jpg \n", + " inflating: /data/dog-breed-identification/train/a250d15f21001cdd2fcda1d8a0d61920.jpg \n", + " inflating: /data/dog-breed-identification/train/a25ddbe69eddafe9bbf2e542dc8cb487.jpg \n", + " inflating: /data/dog-breed-identification/train/a26037c6c5c90dbc2cf50d641e366e19.jpg \n", + " inflating: /data/dog-breed-identification/train/a2702282275212144b1683a434651d7f.jpg \n", + " inflating: /data/dog-breed-identification/train/a2711924a604f08ad42fc221db25f5bd.jpg \n", + " inflating: /data/dog-breed-identification/train/a27283e8fe735bf0127fed8bf0807b84.jpg \n", + " inflating: /data/dog-breed-identification/train/a276c2a66007b922fa5148d5ea29da84.jpg \n", + " inflating: /data/dog-breed-identification/train/a27b81688b013e16d73dd1cf7539d063.jpg \n", + " inflating: /data/dog-breed-identification/train/a27dc59c0bc6b4358473fb3ed45a1f8c.jpg \n", + " inflating: /data/dog-breed-identification/train/a27f76f364d7e757197d5f6dc4f0b033.jpg \n", + " inflating: /data/dog-breed-identification/train/a27fc9c9f35579d2ff1c7f2a2c6fae60.jpg \n", + " inflating: /data/dog-breed-identification/train/a2834a578d8a2ec1823e940c9efa1458.jpg \n", + " inflating: /data/dog-breed-identification/train/a290e911f2c3fe117944ef7e0b42cb03.jpg \n", + " inflating: /data/dog-breed-identification/train/a293948f43c84eeb43055bc78ecfdc5d.jpg \n", + " inflating: /data/dog-breed-identification/train/a2974cd29095193d21bad9d4429a0146.jpg \n", + " inflating: /data/dog-breed-identification/train/a29b6e5dc3618aa9ed97ed75ba73e5a0.jpg \n", + " inflating: /data/dog-breed-identification/train/a2a210e72f5c6c1ff5a9bcf5fe9442e7.jpg \n", + " inflating: /data/dog-breed-identification/train/a2abeac260a7efb447af530cf5ce70c9.jpg \n", + " inflating: /data/dog-breed-identification/train/a2ac195c257b97664fcafbe23b060205.jpg \n", + " inflating: /data/dog-breed-identification/train/a2b19aba1a7dd25dca3437cc2bd769e6.jpg \n", + " inflating: /data/dog-breed-identification/train/a2b9bd0ee3360ec1a174daf76f385d21.jpg \n", + " inflating: /data/dog-breed-identification/train/a2bd873b70daa34386f0d9dea9a8ecd8.jpg \n", + " inflating: /data/dog-breed-identification/train/a2be9ffea5e79e2e1d5d9bc50523dc8a.jpg \n", + " inflating: /data/dog-breed-identification/train/a2c45749d89597425da581670dde0160.jpg \n", + " inflating: /data/dog-breed-identification/train/a2c5cd0d85ebbd6679bd8320d90ecfba.jpg \n", + " inflating: /data/dog-breed-identification/train/a2cf649e9d08ee2289ca1dd90e089cc3.jpg \n", + " inflating: /data/dog-breed-identification/train/a2d1737311988806e991f908de9fb088.jpg \n", + " inflating: /data/dog-breed-identification/train/a2d67239a3ca734a0d30fecf7047e649.jpg \n", + " inflating: /data/dog-breed-identification/train/a2e01339b8f644a81cf73c0506193d25.jpg \n", + " inflating: /data/dog-breed-identification/train/a2e6a489417c806e02b39b8f1d9fa17d.jpg \n", + " inflating: /data/dog-breed-identification/train/a2f617b60a5bd5b459f43a76a5bc3961.jpg \n", + " inflating: /data/dog-breed-identification/train/a2fcf508524f0601a122d705ea8a34b5.jpg \n", + " inflating: /data/dog-breed-identification/train/a300b9437a786533fbac5331bcb6996d.jpg \n", + " inflating: /data/dog-breed-identification/train/a300d63d9d5277cb11969fb24f0061a7.jpg \n", + " inflating: /data/dog-breed-identification/train/a307fca579f19edcc2311fe216679c37.jpg \n", + " inflating: /data/dog-breed-identification/train/a30c0b4c6b412361c1196216d071dc71.jpg \n", + " inflating: /data/dog-breed-identification/train/a30d63bd1995db9ae684a9ceb1d4d603.jpg \n", + " inflating: /data/dog-breed-identification/train/a319f3dc68e86ef3d4fb40dcbcc4de99.jpg \n", + " inflating: /data/dog-breed-identification/train/a31f36d3aa5d81004b0d454902e40889.jpg \n", + " inflating: /data/dog-breed-identification/train/a326caecceeed4705ba786f1d8504430.jpg \n", + " inflating: /data/dog-breed-identification/train/a32893132f6fbb3635df77c157c7d8e0.jpg \n", + " inflating: /data/dog-breed-identification/train/a32faea2f67d4949761454e07437f9ac.jpg \n", + " inflating: /data/dog-breed-identification/train/a3396e27b0b6999be8bfff4d094392a8.jpg \n", + " inflating: /data/dog-breed-identification/train/a33b2afe121439623baa051f2c396c01.jpg \n", + " inflating: /data/dog-breed-identification/train/a33c68a84afbc249ab37d98425693d0b.jpg \n", + " inflating: /data/dog-breed-identification/train/a341328ad1bca600b3a8a0bb0cdde1bc.jpg \n", + " inflating: /data/dog-breed-identification/train/a3497ac3214a11cfe377ddf69fb2b727.jpg \n", + " inflating: /data/dog-breed-identification/train/a34ece95db67dc41281553e082f2e52e.jpg \n", + " inflating: /data/dog-breed-identification/train/a35063740a0bcbcb1b6a60b418284015.jpg \n", + " inflating: /data/dog-breed-identification/train/a35abb795e84cbe8ca7813ce4b28a756.jpg \n", + " inflating: /data/dog-breed-identification/train/a368664bc35a0afdb7963bd5bff0b603.jpg \n", + " inflating: /data/dog-breed-identification/train/a379cc5509118133fc0c40ad495e95cb.jpg \n", + " inflating: /data/dog-breed-identification/train/a37ff46b9d664e5f9174d4d03a06fbef.jpg \n", + " inflating: /data/dog-breed-identification/train/a382982d276e0c26d96dfad7d8c7d696.jpg \n", + " inflating: /data/dog-breed-identification/train/a38a1bd2bbae7dedd6d5f78abaad5e4a.jpg \n", + " inflating: /data/dog-breed-identification/train/a38d0965c4820c618931106e66ce167f.jpg \n", + " inflating: /data/dog-breed-identification/train/a3929452e7341352c0263072c15f37ea.jpg \n", + " inflating: /data/dog-breed-identification/train/a3964ad4e38e080dfddbededf5e189f1.jpg \n", + " inflating: /data/dog-breed-identification/train/a3b14c411be2d29bb47940c509d53236.jpg \n", + " inflating: /data/dog-breed-identification/train/a3b7269c99889adde5b102ef461583bd.jpg \n", + " inflating: /data/dog-breed-identification/train/a3b99336fb9ff5164a23876567a4f7b4.jpg \n", + " inflating: /data/dog-breed-identification/train/a3be5dfa7460f431c01946b9d1f78df1.jpg \n", + " inflating: /data/dog-breed-identification/train/a3c86eed8bd50060f0d3e8d03c600087.jpg \n", + " inflating: /data/dog-breed-identification/train/a3c934375f373943efe9fbec8af54fff.jpg \n", + " inflating: /data/dog-breed-identification/train/a3d1004edc7a8c4db6f5fa3923d1983e.jpg \n", + " inflating: /data/dog-breed-identification/train/a3d1e3d009024dd7fe66921f90937c6d.jpg \n", + " inflating: /data/dog-breed-identification/train/a3d4baf88f32073eb9fda83ff629728e.jpg \n", + " inflating: /data/dog-breed-identification/train/a3d66583008cf1e8eff924620f6e982c.jpg \n", + " inflating: /data/dog-breed-identification/train/a3d7422ab0cc1dd638c56759d5f278b1.jpg \n", + " inflating: /data/dog-breed-identification/train/a3ee76dea7dadac0ae8c6f62740fbf9b.jpg \n", + " inflating: /data/dog-breed-identification/train/a3ff0fcdbd1e66768366ce73fa98083c.jpg \n", + " inflating: /data/dog-breed-identification/train/a409d83b4e22c12c931b6cf1544e0310.jpg \n", + " inflating: /data/dog-breed-identification/train/a4104f53330c1da9e99e8764af1d2a04.jpg \n", + " inflating: /data/dog-breed-identification/train/a410a7107470eae5f8d6ebf741e6c5a8.jpg \n", + " inflating: /data/dog-breed-identification/train/a412134bb9e841155922b89d5a261d26.jpg \n", + " inflating: /data/dog-breed-identification/train/a413faf2ad1a024839f2f9968a23aebb.jpg \n", + " inflating: /data/dog-breed-identification/train/a420274ff9508fa213b15344d19122b0.jpg \n", + " inflating: /data/dog-breed-identification/train/a42071349ea1285dc2de87a8148a3dac.jpg \n", + " inflating: /data/dog-breed-identification/train/a42494c29d0cd8099e0da27de9cc9373.jpg \n", + " inflating: /data/dog-breed-identification/train/a4263bf0c3841ddd5ed4b0c40cbd6cf9.jpg \n", + " inflating: /data/dog-breed-identification/train/a4268c554ba77905c520dd76d061dc35.jpg \n", + " inflating: /data/dog-breed-identification/train/a4297f62afc356fe42979cb11a8b0ba9.jpg \n", + " inflating: /data/dog-breed-identification/train/a429f16af4f529fe398b90b740761d73.jpg \n", + " inflating: /data/dog-breed-identification/train/a42ab2df772522cbc2d81d7d7541f670.jpg \n", + " inflating: /data/dog-breed-identification/train/a42e354335eb827257a3fe1cb83180d5.jpg \n", + " inflating: /data/dog-breed-identification/train/a431ffd2168b523a85e4f17d94678bd8.jpg \n", + " inflating: /data/dog-breed-identification/train/a43429987cc5877ff55c09b0f3d08a6a.jpg \n", + " inflating: /data/dog-breed-identification/train/a43b07a54777ba55c95736a7f8af1b2d.jpg \n", + " inflating: /data/dog-breed-identification/train/a44001de312c09b1e8d14099c7553cf8.jpg \n", + " inflating: /data/dog-breed-identification/train/a44e6838a3b66411426d3f2961408b29.jpg \n", + " inflating: /data/dog-breed-identification/train/a45d0ddaca7309b47f2b47bedfd1914b.jpg \n", + " inflating: /data/dog-breed-identification/train/a45fbf6af618701f6dc93eba5729689a.jpg \n", + " inflating: /data/dog-breed-identification/train/a473b2b889c5c3292ad9937d5303593f.jpg \n", + " inflating: /data/dog-breed-identification/train/a47abd556e3d7780891cac8fd822d72f.jpg \n", + " inflating: /data/dog-breed-identification/train/a47b26a9e5be27130dbaff2ab901e5c0.jpg \n", + " inflating: /data/dog-breed-identification/train/a49151672825eab94674d8dff6639f72.jpg \n", + " inflating: /data/dog-breed-identification/train/a4a278e6cbadb8c0b65d808d0ba0e3cf.jpg \n", + " inflating: /data/dog-breed-identification/train/a4a6b8038f571b8399e916a1b08c41fc.jpg \n", + " inflating: /data/dog-breed-identification/train/a4b5011bbbc0b9e1689c34d335fae49a.jpg \n", + " inflating: /data/dog-breed-identification/train/a4b52955a828aa7ecb06081718ad89f5.jpg \n", + " inflating: /data/dog-breed-identification/train/a4b629ce39bca6206ee3841fea8c557f.jpg \n", + " inflating: /data/dog-breed-identification/train/a4b87a548f1122f4f1aefbbb7a25994c.jpg \n", + " inflating: /data/dog-breed-identification/train/a4bb3277153f52493ccda42857eeefc7.jpg \n", + " inflating: /data/dog-breed-identification/train/a4c3902d1ebb959d51e2488804b43c99.jpg \n", + " inflating: /data/dog-breed-identification/train/a4c70912e3ec79a20c670bfb9fcfc6df.jpg \n", + " inflating: /data/dog-breed-identification/train/a4c75ababee1602560c6783244a06274.jpg \n", + " inflating: /data/dog-breed-identification/train/a4c8a08f7738cc7e2eda28aeb44e3468.jpg \n", + " inflating: /data/dog-breed-identification/train/a4ca7f2c41ef46180fddf84d300a6341.jpg \n", + " inflating: /data/dog-breed-identification/train/a4cb52d4fb0d6cba912707fc15fa095c.jpg \n", + " inflating: /data/dog-breed-identification/train/a4dcc34cf079e1d7312b8375c715bdd4.jpg \n", + " inflating: /data/dog-breed-identification/train/a4e604d242c562547365f464e38e4b13.jpg \n", + " inflating: /data/dog-breed-identification/train/a4ec1fa929c97464f92a88d876e1bdb5.jpg \n", + " inflating: /data/dog-breed-identification/train/a4f26075839c36bb615bf347615e02dc.jpg \n", + " inflating: /data/dog-breed-identification/train/a4f8ba6aadd7017751b56ad0be5b96b3.jpg \n", + " inflating: /data/dog-breed-identification/train/a4fc81951216891e7017a51b8dfee270.jpg \n", + " inflating: /data/dog-breed-identification/train/a506c9fe7bdde1f53ae0e5c495b3efc0.jpg \n", + " inflating: /data/dog-breed-identification/train/a50ddcba1998c1d2b87c2c0cf4df1409.jpg \n", + " inflating: /data/dog-breed-identification/train/a5151c64e12e4b8bb173e357af6e5ccb.jpg \n", + " inflating: /data/dog-breed-identification/train/a5153e32a00c1461d9a2f778cb590ef4.jpg \n", + " inflating: /data/dog-breed-identification/train/a5202d48a73088408e25a7b22be33922.jpg \n", + " inflating: /data/dog-breed-identification/train/a524d48e3678d93cba520d128889a84f.jpg \n", + " inflating: /data/dog-breed-identification/train/a526a377295cce6c0f10867f3fc6057d.jpg \n", + " inflating: /data/dog-breed-identification/train/a5282db9b821559c9136bc875f724bde.jpg \n", + " inflating: /data/dog-breed-identification/train/a53d7471bef882d869f898da801e4eab.jpg \n", + " inflating: /data/dog-breed-identification/train/a53eeaf39233bacbcae5f70c7fe4b9af.jpg \n", + " inflating: /data/dog-breed-identification/train/a54dda20b015dde42e80a339109fc79c.jpg \n", + " inflating: /data/dog-breed-identification/train/a5545758fa85ed04ac848736e0eea7c0.jpg \n", + " inflating: /data/dog-breed-identification/train/a555e9940227d9526928a8e0384ad910.jpg \n", + " inflating: /data/dog-breed-identification/train/a55866a734b915d94b260d2a5bc642a8.jpg \n", + " inflating: /data/dog-breed-identification/train/a55afed96114f7dcc48ee9ed9731f7da.jpg \n", + " inflating: /data/dog-breed-identification/train/a560b3e2b9403610346568f640278c89.jpg \n", + " inflating: /data/dog-breed-identification/train/a5621787bbc9210a919ccbf5832426ef.jpg \n", + " inflating: /data/dog-breed-identification/train/a569d8cd0ef1095b9712c07995bad65e.jpg \n", + " inflating: /data/dog-breed-identification/train/a56acb2a3904093efc2d935e2dd024a6.jpg \n", + " inflating: /data/dog-breed-identification/train/a56fc127e3c0b271e37873d3ad6c9e59.jpg \n", + " inflating: /data/dog-breed-identification/train/a5714270823ccc88ee078810752a9707.jpg \n", + " inflating: /data/dog-breed-identification/train/a57247bc5d572abd95f1aea215b7de77.jpg \n", + " inflating: /data/dog-breed-identification/train/a57766c60a23dedbc29ade3517cf544b.jpg \n", + " inflating: /data/dog-breed-identification/train/a579a1802c57cfbc31b79781f6f37a39.jpg \n", + " inflating: /data/dog-breed-identification/train/a584737e67ddd6ec1e6bd7ff618581e1.jpg \n", + " inflating: /data/dog-breed-identification/train/a5883bb0395e9a69929a7d9585e406be.jpg \n", + " inflating: /data/dog-breed-identification/train/a592a87290c75a2fb22a9e4a02573057.jpg \n", + " inflating: /data/dog-breed-identification/train/a59959c6df2a2b9903d5c7bcfaa8f1e7.jpg \n", + " inflating: /data/dog-breed-identification/train/a5a310eddec7e170634569244ad70d42.jpg \n", + " inflating: /data/dog-breed-identification/train/a5a53eff680b48fedf9352844c094335.jpg \n", + " inflating: /data/dog-breed-identification/train/a5a76e7db524d8e11728cb77e5e32f8b.jpg \n", + " inflating: /data/dog-breed-identification/train/a5af5ef59eed5c50a0aeea94c8f016f2.jpg \n", + " inflating: /data/dog-breed-identification/train/a5b1a215d12786204c5323415433df9b.jpg \n", + " inflating: /data/dog-breed-identification/train/a5b4a427b06d56a3d3aeebda27b5639b.jpg \n", + " inflating: /data/dog-breed-identification/train/a5b5b960900c7e0d7c96d2a88f44e84a.jpg \n", + " inflating: /data/dog-breed-identification/train/a5b9a138dcc77a871472e226604c7584.jpg \n", + " inflating: /data/dog-breed-identification/train/a5bf14a3b9f161ce9c458e82d823bea3.jpg \n", + " inflating: /data/dog-breed-identification/train/a5c070dca3f40179283251f1a1184bf2.jpg \n", + " inflating: /data/dog-breed-identification/train/a5c2986a3e2416a6ed58b842fe0e5d68.jpg \n", + " inflating: /data/dog-breed-identification/train/a5c6ca31780472bc6297dcb230ba8d0d.jpg \n", + " inflating: /data/dog-breed-identification/train/a5d812a813274bd656337ad3a76947e2.jpg \n", + " inflating: /data/dog-breed-identification/train/a5da71c4259e319c5f66bc170f751350.jpg \n", + " inflating: /data/dog-breed-identification/train/a5daf7e90363737374840aee6cbf6ab5.jpg \n", + " inflating: /data/dog-breed-identification/train/a5e12305354299b560665bffbd811f8d.jpg \n", + " inflating: /data/dog-breed-identification/train/a5e57f1238951796094e0bf72409e90b.jpg \n", + " inflating: /data/dog-breed-identification/train/a5e8b55e8a408c9e63bd2986b56b9911.jpg \n", + " inflating: /data/dog-breed-identification/train/a5f391ecec6ead7ff92785232f7c1d1d.jpg \n", + " inflating: /data/dog-breed-identification/train/a60cee9ebe302922195274c34d55e989.jpg \n", + " inflating: /data/dog-breed-identification/train/a614fc9031217ffdb1707fa4f8a48dc7.jpg \n", + " inflating: /data/dog-breed-identification/train/a61c439d669536fe7c93e8df65e06798.jpg \n", + " inflating: /data/dog-breed-identification/train/a6230f82a77ecf899cdcc1ba62073084.jpg \n", + " inflating: /data/dog-breed-identification/train/a6247763d4791f9bf3cb080311fe8f3f.jpg \n", + " inflating: /data/dog-breed-identification/train/a628532fb55e1d2bdf8a3958df74de35.jpg \n", + " inflating: /data/dog-breed-identification/train/a62b21ed79d822fc6428fddce9c2a954.jpg \n", + " inflating: /data/dog-breed-identification/train/a63484a418084bbfd3158505b0bf6f9e.jpg \n", + " inflating: /data/dog-breed-identification/train/a63becb08aa9d4c47d5e4a948ae3ddf5.jpg \n", + " inflating: /data/dog-breed-identification/train/a63e32a12656d930387940f9eb31e535.jpg \n", + " inflating: /data/dog-breed-identification/train/a6418161b67df372016f510bfd71ddb6.jpg \n", + " inflating: /data/dog-breed-identification/train/a647794848794ad6265f069e56753dee.jpg \n", + " inflating: /data/dog-breed-identification/train/a654a3b3e443be0d5ceb0aa369e26be1.jpg \n", + " inflating: /data/dog-breed-identification/train/a65b55ba1e520c585f53ce1b8fb90303.jpg \n", + " inflating: /data/dog-breed-identification/train/a66612b2661939992858740f88452cd8.jpg \n", + " inflating: /data/dog-breed-identification/train/a669f4da695562c34d6498cf1bc92066.jpg \n", + " inflating: /data/dog-breed-identification/train/a66c39c49a97afc680ed4da5cbaffe47.jpg \n", + " inflating: /data/dog-breed-identification/train/a67104d39ad3f52c822698444a2dda9d.jpg \n", + " inflating: /data/dog-breed-identification/train/a671803d1eed0aada084f65eb084ba28.jpg \n", + " inflating: /data/dog-breed-identification/train/a67241098d832eed2413c37dd4f42624.jpg \n", + " inflating: /data/dog-breed-identification/train/a67ec52536eb7e8c71c9c9d41b9f1de2.jpg \n", + " inflating: /data/dog-breed-identification/train/a68057c18b0977b5bedacbb3225a15f3.jpg \n", + " inflating: /data/dog-breed-identification/train/a680e3773b4adc043c95180315e73e94.jpg \n", + " inflating: /data/dog-breed-identification/train/a6827689ba680fa5d3ce835f62ed1b04.jpg \n", + " inflating: /data/dog-breed-identification/train/a68e6419efb8c5675fead034aadae947.jpg \n", + " inflating: /data/dog-breed-identification/train/a694bb8740fe5042c5d1b1a19445a250.jpg \n", + " inflating: /data/dog-breed-identification/train/a6951e4f0cff9c000f2f112043abb085.jpg \n", + " inflating: /data/dog-breed-identification/train/a69d3f94caab2e0854a7bc80fe7f2a90.jpg \n", + " inflating: /data/dog-breed-identification/train/a69dbc3bb27b3b0dd9b74b7f2da13113.jpg \n", + " inflating: /data/dog-breed-identification/train/a6abba1d01ba07a581b0e1ae2e17e99e.jpg \n", + " inflating: /data/dog-breed-identification/train/a6aeb00d5829c0e2a5c0c4998900bd8d.jpg \n", + " inflating: /data/dog-breed-identification/train/a6c40bfb565f1f99d400ed89f332db75.jpg \n", + " inflating: /data/dog-breed-identification/train/a6c8e21983dec5202535bc6ce06272de.jpg \n", + " inflating: /data/dog-breed-identification/train/a6c988709c96bc166247eabb115c2d24.jpg \n", + " inflating: /data/dog-breed-identification/train/a6c9f19bfb2b8e23d5e73294704d842c.jpg \n", + " inflating: /data/dog-breed-identification/train/a6cf3f47adc06eb5671790454b562b88.jpg \n", + " inflating: /data/dog-breed-identification/train/a6cfd1852f7821cd0ecf7cf92aff06b3.jpg \n", + " inflating: /data/dog-breed-identification/train/a6d2985d1428a3cfe6e4aa66bd9ee314.jpg \n", + " inflating: /data/dog-breed-identification/train/a6d5caaaef25e159036ed2de0e69b2df.jpg \n", + " inflating: /data/dog-breed-identification/train/a6d7c6cc8162c58d6f75d6f46cd6e0d4.jpg \n", + " inflating: /data/dog-breed-identification/train/a6d97d2087ad8627c78f88f619b91a92.jpg \n", + " inflating: /data/dog-breed-identification/train/a6eb4c19bbe8efd5d4fc9ac41e0cba4b.jpg \n", + " inflating: /data/dog-breed-identification/train/a6fbcb0ef8c766183f56fb005bda285d.jpg \n", + " inflating: /data/dog-breed-identification/train/a6ff0cf6aaa1194be83bb73d5eaecf81.jpg \n", + " inflating: /data/dog-breed-identification/train/a7035c27920c29332a8b12cf0d03aa90.jpg \n", + " inflating: /data/dog-breed-identification/train/a705cbd42cf04daaef1961ebb9b50d2b.jpg \n", + " inflating: /data/dog-breed-identification/train/a7167022b4c41ddd1d4aa8e5c18162d2.jpg \n", + " inflating: /data/dog-breed-identification/train/a717381e64525267a879ea44f0b76a00.jpg \n", + " inflating: /data/dog-breed-identification/train/a72448bdbaa5e486245d38f50ca37e42.jpg \n", + " inflating: /data/dog-breed-identification/train/a7295fb63568fc0b1bfe4900835fc159.jpg \n", + " inflating: /data/dog-breed-identification/train/a72e256f9b65b1b0ba527237f204d77f.jpg \n", + " inflating: /data/dog-breed-identification/train/a72e49a69c8409fed67c47bba4c4f1c6.jpg \n", + " inflating: /data/dog-breed-identification/train/a72fac01497e296e32fab4192df57531.jpg \n", + " inflating: /data/dog-breed-identification/train/a730da7fcd2f9699ba71745e8b5256e5.jpg \n", + " inflating: /data/dog-breed-identification/train/a73d4f5c3ae0eb8dc94321d0d342609c.jpg \n", + " inflating: /data/dog-breed-identification/train/a73f41b13b33cb177001a55a940505fc.jpg \n", + " inflating: /data/dog-breed-identification/train/a744f11051daf6c9bddc47fd757d0bb6.jpg \n", + " inflating: /data/dog-breed-identification/train/a745bd8479c64408093c2116a58e38f6.jpg \n", + " inflating: /data/dog-breed-identification/train/a7478e4233a1b25de97d72c9b48f0c0b.jpg \n", + " inflating: /data/dog-breed-identification/train/a7488d6640ff1fe321e06bf0559f3bbf.jpg \n", + " inflating: /data/dog-breed-identification/train/a74b32169c1b42a218a9f72f954ec9a4.jpg \n", + " inflating: /data/dog-breed-identification/train/a7551603593a58e352ecc73d487fc7af.jpg \n", + " inflating: /data/dog-breed-identification/train/a755566f4f69456d4a5a192511bfd744.jpg \n", + " inflating: /data/dog-breed-identification/train/a75a772a44c46e03333878c9cef8d81c.jpg \n", + " inflating: /data/dog-breed-identification/train/a75dc89b84b4986afa0365ae9f734681.jpg \n", + " inflating: /data/dog-breed-identification/train/a768d7fc2b42dc15018ba9641b401db5.jpg \n", + " inflating: /data/dog-breed-identification/train/a7702c83da79d54e27d2e9b1f01fe64e.jpg \n", + " inflating: /data/dog-breed-identification/train/a77619f67680f34bb77a80d67e6efa70.jpg \n", + " inflating: /data/dog-breed-identification/train/a77bfd1136d119f59fc87eae105874c2.jpg \n", + " inflating: /data/dog-breed-identification/train/a78e2a1cc253ed02b273f41b20a2d117.jpg \n", + " inflating: /data/dog-breed-identification/train/a7911b0e539a90bf79a9352161a37e68.jpg \n", + " inflating: /data/dog-breed-identification/train/a7927966a651971484cadea0dc318de6.jpg \n", + " inflating: /data/dog-breed-identification/train/a7930bb879d8475d3dc1816817a88a3d.jpg \n", + " inflating: /data/dog-breed-identification/train/a7961824578e3da96551005ff58fa51b.jpg \n", + " inflating: /data/dog-breed-identification/train/a7a260558be026398395591fdae8fb23.jpg \n", + " inflating: /data/dog-breed-identification/train/a7a3ada2b02b42c729fea4f19bc83091.jpg \n", + " inflating: /data/dog-breed-identification/train/a7a3e358844d4ae4d423b2754c1d54a4.jpg \n", + " inflating: /data/dog-breed-identification/train/a7ac233c1ffbcf8ceac2ed36ee0f2426.jpg \n", + " inflating: /data/dog-breed-identification/train/a7b0055c47417e89c9769f28b668b04f.jpg \n", + " inflating: /data/dog-breed-identification/train/a7b2a87a394fd17c848d2bf21cb7fe59.jpg \n", + " inflating: /data/dog-breed-identification/train/a7c46b81fc529182e513f2328b1262b8.jpg \n", + " inflating: /data/dog-breed-identification/train/a7c84235852fd06e7f3d4ec70b2b65a9.jpg \n", + " inflating: /data/dog-breed-identification/train/a7cb736f47dfad51eaea833d6bd45e85.jpg \n", + " inflating: /data/dog-breed-identification/train/a7ce55dc85d2f4dbf62a4ffa82bf5df4.jpg \n", + " inflating: /data/dog-breed-identification/train/a7d5a944bec21c0d6f570cd04b8a661b.jpg \n", + " inflating: /data/dog-breed-identification/train/a7db31e112e983e61b6d7c880abbef7f.jpg \n", + " inflating: /data/dog-breed-identification/train/a7dc3f5d620fd4ce214f7a043372d485.jpg \n", + " inflating: /data/dog-breed-identification/train/a7ede45f2bcc3b5f918cf6626da1be91.jpg \n", + " inflating: /data/dog-breed-identification/train/a7f36feff3c6fc2406d316f75381c491.jpg \n", + " inflating: /data/dog-breed-identification/train/a7f7f75a029078eee61b15d44acfaa0f.jpg \n", + " inflating: /data/dog-breed-identification/train/a7f9839ac0a874686eb418d317eae284.jpg \n", + " inflating: /data/dog-breed-identification/train/a7f9a20d9d6e79fc41eff2ea61ad8ce8.jpg \n", + " inflating: /data/dog-breed-identification/train/a7fb600cfa14ef2e1667d34e087befd7.jpg \n", + " inflating: /data/dog-breed-identification/train/a7fb820b4ca5f9dc6ba8e1a3823cb882.jpg \n", + " inflating: /data/dog-breed-identification/train/a801bed4876123334ec0514da4642b50.jpg \n", + " inflating: /data/dog-breed-identification/train/a8040246b5632253425a04dbb3cce104.jpg \n", + " inflating: /data/dog-breed-identification/train/a805b37f7b8cf08cc6853792c2bd9f82.jpg \n", + " inflating: /data/dog-breed-identification/train/a80e292d5a6eaaa6b3114a555bc9f221.jpg \n", + " inflating: /data/dog-breed-identification/train/a813fb413eec1274376cbddf9061fed5.jpg \n", + " inflating: /data/dog-breed-identification/train/a819b8f7e2df438ecb22e1566eb5fc32.jpg \n", + " inflating: /data/dog-breed-identification/train/a81befb939cd1857c616da33034268bc.jpg \n", + " inflating: /data/dog-breed-identification/train/a81d6c9e0b71d1e8b156c530a4c45816.jpg \n", + " inflating: /data/dog-breed-identification/train/a83eb7262a110f944bdf2281b55ec374.jpg \n", + " inflating: /data/dog-breed-identification/train/a8477825497bebe7ee3fa6c816f6d499.jpg \n", + " inflating: /data/dog-breed-identification/train/a84b22ba806665c7cfa4a63e59ed81bf.jpg \n", + " inflating: /data/dog-breed-identification/train/a84c3f417d8674d3a1cc861e6f6b6564.jpg \n", + " inflating: /data/dog-breed-identification/train/a85157e452c6d284d7f3d5aeb8e5c0ce.jpg \n", + " inflating: /data/dog-breed-identification/train/a85648ed4bd628838f5294085495293c.jpg \n", + " inflating: /data/dog-breed-identification/train/a874515996b484c8cd2864b4ae7fcd85.jpg \n", + " inflating: /data/dog-breed-identification/train/a877b4bdeeb0b02ef99eb24f3faf6213.jpg \n", + " inflating: /data/dog-breed-identification/train/a8859e94273a3a1d558d13d2b5d04c47.jpg \n", + " inflating: /data/dog-breed-identification/train/a889842a9cdd447f2fbbf91e8856106d.jpg \n", + " inflating: /data/dog-breed-identification/train/a88c75e47ae3388cbc66e4863428b753.jpg \n", + " inflating: /data/dog-breed-identification/train/a8933b557e35f2ee5405febf10fa0e30.jpg \n", + " inflating: /data/dog-breed-identification/train/a8965b5b9649335721b516e0e6e67122.jpg \n", + " inflating: /data/dog-breed-identification/train/a8a60e1becc6e317f9244f60c16b9e03.jpg \n", + " inflating: /data/dog-breed-identification/train/a8a87c46f4bf134ffd7a72204c83c865.jpg \n", + " inflating: /data/dog-breed-identification/train/a8a8c1484c0dbf38493dcc941907984a.jpg \n", + " inflating: /data/dog-breed-identification/train/a8aa5d6471ed25275dc461acca233221.jpg \n", + " inflating: /data/dog-breed-identification/train/a8adb0bda1b90f78bc23eb9a320ac8a3.jpg \n", + " inflating: /data/dog-breed-identification/train/a8b00ee3288f5be5fd11334ce610a7de.jpg \n", + " inflating: /data/dog-breed-identification/train/a8b4a3acf8141175c75f5491576e450e.jpg \n", + " inflating: /data/dog-breed-identification/train/a8b996d708cd39d11a9368a4d0e3da5e.jpg \n", + " inflating: /data/dog-breed-identification/train/a8b9ff7ca06057bca49a48efe93487ce.jpg \n", + " inflating: /data/dog-breed-identification/train/a8c5dfc993593fe2b14489d5fd38bde3.jpg \n", + " inflating: /data/dog-breed-identification/train/a8cb1a353bf80b0c55919bf5d929c694.jpg \n", + " inflating: /data/dog-breed-identification/train/a8ce1ed48da6f4291eafc8e61cd7d06a.jpg \n", + " inflating: /data/dog-breed-identification/train/a8da1804e683a16557ef4dc51b281399.jpg \n", + " inflating: /data/dog-breed-identification/train/a8dc5a206ac4407615c91f522fceaece.jpg \n", + " inflating: /data/dog-breed-identification/train/a8dcd9886518ec9c154e3a1bee625f9a.jpg \n", + " inflating: /data/dog-breed-identification/train/a8de8a9499558fb26f78edbf8a8f1311.jpg \n", + " inflating: /data/dog-breed-identification/train/a8e7f8791dcfa0211fb001860aa35179.jpg \n", + " inflating: /data/dog-breed-identification/train/a8eeac0072bd96ade5bec1cf3bd974b5.jpg \n", + " inflating: /data/dog-breed-identification/train/a8f10fb1589b332c43d101b318cf1c6d.jpg \n", + " inflating: /data/dog-breed-identification/train/a8f63475284ea8b1e903465cac81a92a.jpg \n", + " inflating: /data/dog-breed-identification/train/a8faf31b8ae83769e228f999294b40fc.jpg \n", + " inflating: /data/dog-breed-identification/train/a9028e14b63eaa3745d34c48e2fa2d21.jpg \n", + " inflating: /data/dog-breed-identification/train/a9054d27b758b05eac6e483eba0f9f78.jpg \n", + " inflating: /data/dog-breed-identification/train/a9091140a1c2246d094da61d00fbd58e.jpg \n", + " inflating: /data/dog-breed-identification/train/a90914a01e1ce4788b914b25c1a647a4.jpg \n", + " inflating: /data/dog-breed-identification/train/a90c8510c0d41d4e40efa58e4c93a996.jpg \n", + " inflating: /data/dog-breed-identification/train/a912ccb24c2ceb88f29f63a424805a09.jpg \n", + " inflating: /data/dog-breed-identification/train/a914a6e12f40b6ff0945f84ae4b3d662.jpg \n", + " inflating: /data/dog-breed-identification/train/a923f859ec7b8c0f74439a2058917297.jpg \n", + " inflating: /data/dog-breed-identification/train/a92a34eb1cd3fd3197ff0f2147251606.jpg \n", + " inflating: /data/dog-breed-identification/train/a930ca18e5688b01f1cb223383faf9bf.jpg \n", + " inflating: /data/dog-breed-identification/train/a93570762a77a6692af5f770bddd04d8.jpg \n", + " inflating: /data/dog-breed-identification/train/a93e82d3fe942c7e871a8fe7202d4be6.jpg \n", + " inflating: /data/dog-breed-identification/train/a93e86c093b1dc0c4070c9418cf48db7.jpg \n", + " inflating: /data/dog-breed-identification/train/a93f2659ef71bcaf4859c8f8395f258e.jpg \n", + " inflating: /data/dog-breed-identification/train/a93f96fb5fe9d5f0242a98656465c261.jpg \n", + " inflating: /data/dog-breed-identification/train/a95e44d25232d5584356a86b894049f1.jpg \n", + " inflating: /data/dog-breed-identification/train/a9607591357d1f93bfbab4cabbe73c7c.jpg \n", + " inflating: /data/dog-breed-identification/train/a96413fe2be3eca592182fa811663976.jpg \n", + " inflating: /data/dog-breed-identification/train/a97178b47e90071a3f5cef3f746b665e.jpg \n", + " inflating: /data/dog-breed-identification/train/a9797886f1e5b8097e4c27c98e662c54.jpg \n", + " inflating: /data/dog-breed-identification/train/a9883ac08095cdbd08c2a36f836ebde0.jpg \n", + " inflating: /data/dog-breed-identification/train/a9a4ef737174d6169ea7be588f6c71be.jpg \n", + " inflating: /data/dog-breed-identification/train/a9afa61628fa4a149e8254778a682efe.jpg \n", + " inflating: /data/dog-breed-identification/train/a9b4ea005a892de1cddb16e297917426.jpg \n", + " inflating: /data/dog-breed-identification/train/a9bcfb541a3436b82ddf3cc9f6f408a5.jpg \n", + " inflating: /data/dog-breed-identification/train/a9cc695bcbdf1b0ae3784149fcffd8ff.jpg \n", + " inflating: /data/dog-breed-identification/train/a9cfa457bbf648e22241dbdee7ba126f.jpg \n", + " inflating: /data/dog-breed-identification/train/a9ddd222a196bfdf07f52b227b27bf1d.jpg \n", + " inflating: /data/dog-breed-identification/train/a9e1530fa58de136e797b36bc16514b4.jpg \n", + " inflating: /data/dog-breed-identification/train/a9f3be811452a710a8c390a9b2590f32.jpg \n", + " inflating: /data/dog-breed-identification/train/a9fafe79f816b5032af49b12fa0cebae.jpg \n", + " inflating: /data/dog-breed-identification/train/aa199e6564bd8d92f50f83275f628389.jpg \n", + " inflating: /data/dog-breed-identification/train/aa27bc4e1acd52b224cdfddde476face.jpg \n", + " inflating: /data/dog-breed-identification/train/aa2d66804ee69b60bc090da5f2ff37da.jpg \n", + " inflating: /data/dog-breed-identification/train/aa39f6a8d629483e5c56e86a7434229b.jpg \n", + " inflating: /data/dog-breed-identification/train/aa3fc38f922112659b9a2b566ba3dc8e.jpg \n", + " inflating: /data/dog-breed-identification/train/aa45548a844d26700e149d7692847625.jpg \n", + " inflating: /data/dog-breed-identification/train/aa4b8e10696a7e2f8d07a7b13c1f5c6b.jpg \n", + " inflating: /data/dog-breed-identification/train/aa4b973e445141931ec097aafa317395.jpg \n", + " inflating: /data/dog-breed-identification/train/aa4c9b1ce189af4fd67141b2a0bb353b.jpg \n", + " inflating: /data/dog-breed-identification/train/aa52399aa598792ef63fe14828660bc3.jpg \n", + " inflating: /data/dog-breed-identification/train/aa5be862598201f0b88ce2146b99102d.jpg \n", + " inflating: /data/dog-breed-identification/train/aa5fb238f634ac164f021d17058c594c.jpg \n", + " inflating: /data/dog-breed-identification/train/aa61fa4ceb51d5c6d954237f4cedad7d.jpg \n", + " inflating: /data/dog-breed-identification/train/aa66749e1fef393bd14b27e0e305b82a.jpg \n", + " inflating: /data/dog-breed-identification/train/aa68b2a139183a747f9555814280e7ae.jpg \n", + " inflating: /data/dog-breed-identification/train/aa709b13301acf8e0410f08edf4aa44e.jpg \n", + " inflating: /data/dog-breed-identification/train/aa738e200b779d28af87b388f1dd309b.jpg \n", + " inflating: /data/dog-breed-identification/train/aa76a766aad6753f32298c00a0cb92df.jpg \n", + " inflating: /data/dog-breed-identification/train/aa7b7278b4337d65cf392cd2124ebf17.jpg \n", + " inflating: /data/dog-breed-identification/train/aa7c04434a8f718b639c4b73030349e2.jpg \n", + " inflating: /data/dog-breed-identification/train/aa7d9f08fc9ac1d45117848137adce9c.jpg \n", + " inflating: /data/dog-breed-identification/train/aa7db357433752780cdcc0d0d65473fd.jpg \n", + " inflating: /data/dog-breed-identification/train/aa899804eafe2c221f72b6dae066a55f.jpg \n", + " inflating: /data/dog-breed-identification/train/aa91acec6c98b310559872490609fbe7.jpg \n", + " inflating: /data/dog-breed-identification/train/aa94045f0221be29ee6508361b4913fc.jpg \n", + " inflating: /data/dog-breed-identification/train/aa9482ec3a28d952eac962c174e45024.jpg \n", + " inflating: /data/dog-breed-identification/train/aaa2f6f88d506cc598f33e8f682939de.jpg \n", + " inflating: /data/dog-breed-identification/train/aab072b369a558ceb6baed590eaa29e6.jpg \n", + " inflating: /data/dog-breed-identification/train/aac11706ef19a09f6884615c4352a0b0.jpg \n", + " inflating: /data/dog-breed-identification/train/aac57b0a7685289a66eb1fac7ec2ed2d.jpg \n", + " inflating: /data/dog-breed-identification/train/aac6e8cf0ca09868f9906aa70854dce3.jpg \n", + " inflating: /data/dog-breed-identification/train/aaca42b24d63226fe1dda8e6863def96.jpg \n", + " inflating: /data/dog-breed-identification/train/aad0ede6b29383e88c475623f13864ba.jpg \n", + " inflating: /data/dog-breed-identification/train/aad973458e8698378678df548ee4c4fe.jpg \n", + " inflating: /data/dog-breed-identification/train/aadf0358c789a5b5aa558c66f580e92d.jpg \n", + " inflating: /data/dog-breed-identification/train/aaefb2067fac2f6714f77de222b13cc7.jpg \n", + " inflating: /data/dog-breed-identification/train/aaf069f36b79c6499c6e746454c91f64.jpg \n", + " inflating: /data/dog-breed-identification/train/ab004601229c51044696419bf3e682fe.jpg \n", + " inflating: /data/dog-breed-identification/train/ab04c82dada7f9657782b29695a7c36b.jpg \n", + " inflating: /data/dog-breed-identification/train/ab0dd924f95990e2db5f4b65c606e4b8.jpg \n", + " inflating: /data/dog-breed-identification/train/ab15179d96c2fbc0c16c46683f92f001.jpg \n", + " inflating: /data/dog-breed-identification/train/ab16b0a2feea44546a293be0cf233261.jpg \n", + " inflating: /data/dog-breed-identification/train/ab1715c4f68734a7fecb8901e9baf72a.jpg \n", + " inflating: /data/dog-breed-identification/train/ab193bb06012552321a1abb5ef2556e7.jpg \n", + " inflating: /data/dog-breed-identification/train/ab1b5ed3e3894eb6caa70609add09443.jpg \n", + " inflating: /data/dog-breed-identification/train/ab23707f7d469c51b198ef69e75c990e.jpg \n", + " inflating: /data/dog-breed-identification/train/ab35094f483eaf915536835a4df9d46e.jpg \n", + " inflating: /data/dog-breed-identification/train/ab3aec7ac57f86ba1298d43d6fd3613d.jpg \n", + " inflating: /data/dog-breed-identification/train/ab455404c9b1970f6bb4957a2b92f868.jpg \n", + " inflating: /data/dog-breed-identification/train/ab45ee0ee453c130b9f66e8a41659c6a.jpg \n", + " inflating: /data/dog-breed-identification/train/ab46c386186878210a528e7b5f7d822f.jpg \n", + " inflating: /data/dog-breed-identification/train/ab52a394113d5f6964070d2dd98d845a.jpg \n", + " inflating: /data/dog-breed-identification/train/ab573a84b77fc415c8c37ca9e4413c1d.jpg \n", + " inflating: /data/dog-breed-identification/train/ab588fe6350567256eaee1f93e9e065d.jpg \n", + " inflating: /data/dog-breed-identification/train/ab5c5ee4b0431f391fde0ba59a253907.jpg \n", + " inflating: /data/dog-breed-identification/train/ab6555cb3eadd631940d914021d8bd76.jpg \n", + " inflating: /data/dog-breed-identification/train/ab72425d2a8e4cc69689f80560018616.jpg \n", + " inflating: /data/dog-breed-identification/train/ab7b1ed1d8c171e0e491b7fcde86ef16.jpg \n", + " inflating: /data/dog-breed-identification/train/ab86df93beaba665a8e517bd02307f9a.jpg \n", + " inflating: /data/dog-breed-identification/train/ab904c74f49f5740e7f42f18ada3a72e.jpg \n", + " inflating: /data/dog-breed-identification/train/ab961388b6368660077a9e1dbb4f6b30.jpg \n", + " inflating: /data/dog-breed-identification/train/ab999d8dfd944bd5450eb6d8e1432d43.jpg \n", + " inflating: /data/dog-breed-identification/train/ab9e3dead24985735ae40b5bba7cb4af.jpg \n", + " inflating: /data/dog-breed-identification/train/abac28bf9ab101121411cc1e77590afe.jpg \n", + " inflating: /data/dog-breed-identification/train/abb048471253a57c27ede4e9ff15c896.jpg \n", + " inflating: /data/dog-breed-identification/train/abb194f697f5e00bc8be1e29c54ec3d7.jpg \n", + " inflating: /data/dog-breed-identification/train/abb198660ee09c413223848381046ace.jpg \n", + " inflating: /data/dog-breed-identification/train/abb9fefaa637485a468bac9d825fd5c3.jpg \n", + " inflating: /data/dog-breed-identification/train/abbb0aedbef9d47257f3577ac3d1771e.jpg \n", + " inflating: /data/dog-breed-identification/train/abbbd390647afb9e206005d01ab92316.jpg \n", + " inflating: /data/dog-breed-identification/train/abbdb3f746a69c747d42b9b13a01c28d.jpg \n", + " inflating: /data/dog-breed-identification/train/abc4dec9183b686aceb15f4de7c99c7d.jpg \n", + " inflating: /data/dog-breed-identification/train/abc50b5c5809ee3dd97022c2e1b21db0.jpg \n", + " inflating: /data/dog-breed-identification/train/abc6347441eb3b1d42e208a6aa8ac05f.jpg \n", + " inflating: /data/dog-breed-identification/train/abcb98b8f6840bd5563b037ba85a4d06.jpg \n", + " inflating: /data/dog-breed-identification/train/abd1d27a731a793608ba05ad1fd1487b.jpg \n", + " inflating: /data/dog-breed-identification/train/abd2b21b2ff694e62448318b14ca752e.jpg \n", + " inflating: /data/dog-breed-identification/train/abd43fd42eee6c32c7d95fc0993603b6.jpg \n", + " inflating: /data/dog-breed-identification/train/abe55e39cafb1abc85bb6b00376222f0.jpg \n", + " inflating: /data/dog-breed-identification/train/abf00a2af02c171d55821ddbec2bd36a.jpg \n", + " inflating: /data/dog-breed-identification/train/abf12f0fef8c729da906177c9d2bc47e.jpg \n", + " inflating: /data/dog-breed-identification/train/ac09435914fe79107bf31ed3d436a798.jpg \n", + " inflating: /data/dog-breed-identification/train/ac0daba3a690ae7e0aa3ceaee2cb41a6.jpg \n", + " inflating: /data/dog-breed-identification/train/ac1e8ca8dcdcf845060365dc00631aaf.jpg \n", + " inflating: /data/dog-breed-identification/train/ac1f0dae1f65f34824afb54cc6d658b8.jpg \n", + " inflating: /data/dog-breed-identification/train/ac21b78f89d6939f36effb79104e7758.jpg \n", + " inflating: /data/dog-breed-identification/train/ac225d146cd712799a4581aad3ac1267.jpg \n", + " inflating: /data/dog-breed-identification/train/ac2e27fa8891e1d7a2ee88196a2a4223.jpg \n", + " inflating: /data/dog-breed-identification/train/ac38f7a08bd51ef93c400e5d2098db92.jpg \n", + " inflating: /data/dog-breed-identification/train/ac44e2f1e64ee853996b772fb1f703aa.jpg \n", + " inflating: /data/dog-breed-identification/train/ac4ea6b4fce3058dc84db8ee7c56fb81.jpg \n", + " inflating: /data/dog-breed-identification/train/ac50e3733b58fdecc1eaba7ee8efdb84.jpg \n", + " inflating: /data/dog-breed-identification/train/ac54633fb411ba5f4a403cb2cef10e9b.jpg \n", + " inflating: /data/dog-breed-identification/train/ac5789fadc9b8a6691c70a6ff62a25a7.jpg \n", + " inflating: /data/dog-breed-identification/train/ac597b01b21a68f14b1d57e326cc9184.jpg \n", + " inflating: /data/dog-breed-identification/train/ac5bb7ba4513320fb91f52a8190a55b4.jpg \n", + " inflating: /data/dog-breed-identification/train/ac5dc2c927e0c8ba959dfd1938d288b2.jpg \n", + " inflating: /data/dog-breed-identification/train/ac63a26c445b5e7c92bbd09b1abe1673.jpg \n", + " inflating: /data/dog-breed-identification/train/ac6445debf13b337a2ae5c26cc63f1e5.jpg \n", + " inflating: /data/dog-breed-identification/train/ac73b824dd06d8d6527257edd6147c5f.jpg \n", + " inflating: /data/dog-breed-identification/train/ac7499729040d6037a2aac17f0466451.jpg \n", + " inflating: /data/dog-breed-identification/train/ac81bd5676488e9523fba872efbe383f.jpg \n", + " inflating: /data/dog-breed-identification/train/ac85c489cb78fce3b455bfbba0d02094.jpg \n", + " inflating: /data/dog-breed-identification/train/ac880daeb1f7203245c2c9e47b0e3db7.jpg \n", + " inflating: /data/dog-breed-identification/train/ac8869ca8acc2334a3e034c4ad8d0b68.jpg \n", + " inflating: /data/dog-breed-identification/train/ac936b37580301b42c4f3f1b6baf4142.jpg \n", + " inflating: /data/dog-breed-identification/train/aca5fb5688fbbd2949a369534af84c6b.jpg \n", + " inflating: /data/dog-breed-identification/train/acc563b94d6d975020a4fdd036bb1aa7.jpg \n", + " inflating: /data/dog-breed-identification/train/acc860884c9db42453495a4b330d1cff.jpg \n", + " inflating: /data/dog-breed-identification/train/acca62cebd23de23384fd8c95d381b0f.jpg \n", + " inflating: /data/dog-breed-identification/train/acd32654070ec86fa6d5fd2145327563.jpg \n", + " inflating: /data/dog-breed-identification/train/acd58f9a3178e1e0acb60bd00d314e96.jpg \n", + " inflating: /data/dog-breed-identification/train/acd70a662678da5020f8053ba2557c96.jpg \n", + " inflating: /data/dog-breed-identification/train/acd9f4e9537a1a4474024393ecfbe456.jpg \n", + " inflating: /data/dog-breed-identification/train/acda07106c58300bc0dfd4b8391bde20.jpg \n", + " inflating: /data/dog-breed-identification/train/acf27cad5544fbed6baa345666f87700.jpg \n", + " inflating: /data/dog-breed-identification/train/acfe87af21ad64e518f9ca6a64d728bd.jpg \n", + " inflating: /data/dog-breed-identification/train/ad019e394b55406f412b9a03899d5705.jpg \n", + " inflating: /data/dog-breed-identification/train/ad0e1974ddd6973dd600f9055def0a5a.jpg \n", + " inflating: /data/dog-breed-identification/train/ad111fe98c5802b0e61e23cd857ce509.jpg \n", + " inflating: /data/dog-breed-identification/train/ad15d290cc87d4c3afbe21061c71e465.jpg \n", + " inflating: /data/dog-breed-identification/train/ad17371333dc89057c529972489ad416.jpg \n", + " inflating: /data/dog-breed-identification/train/ad1e1dcfe83a0925bb27ab720e6b28ff.jpg \n", + " inflating: /data/dog-breed-identification/train/ad26c36ca5249ded86644ef1f01edd91.jpg \n", + " inflating: /data/dog-breed-identification/train/ad282d2da0cc82233dbc487e8ca80aed.jpg \n", + " inflating: /data/dog-breed-identification/train/ad28b267501ce726440c9d5cae4e6f33.jpg \n", + " inflating: /data/dog-breed-identification/train/ad2dfa0202d8ea3766fea1e743cd5166.jpg \n", + " inflating: /data/dog-breed-identification/train/ad33e4e1f411d0730d962a6a87ee9154.jpg \n", + " inflating: /data/dog-breed-identification/train/ad33f1ae1ec279951f9fc2664119fd2c.jpg \n", + " inflating: /data/dog-breed-identification/train/ad344f41cef114ad6a1705fa6dc5dde2.jpg \n", + " inflating: /data/dog-breed-identification/train/ad3f5c4c67bf388eaf1eb180e46fccf6.jpg \n", + " inflating: /data/dog-breed-identification/train/ad43804fc40b176421fea125c43aa056.jpg \n", + " inflating: /data/dog-breed-identification/train/ad467c591ef2892ba9a450613ece9097.jpg \n", + " inflating: /data/dog-breed-identification/train/ad4d50d47a14cd8c68a7b474491429cd.jpg \n", + " inflating: /data/dog-breed-identification/train/ad4d7770f3228e65faeb968c1e0465c1.jpg \n", + " inflating: /data/dog-breed-identification/train/ad531a8d9389f0dea4b1224a10243e85.jpg \n", + " inflating: /data/dog-breed-identification/train/ad54e9a8bcdda2309332009dc4873384.jpg \n", + " inflating: /data/dog-breed-identification/train/ad60895e22b73a4e96040d82af3d2b15.jpg \n", + " inflating: /data/dog-breed-identification/train/ad60a6fe919d2b63b96cddcf0b1641b2.jpg \n", + " inflating: /data/dog-breed-identification/train/ad61ba2639301bda9acd01999a7eb80f.jpg \n", + " inflating: /data/dog-breed-identification/train/ad63bee15bb862f4de889d5bcbde12b2.jpg \n", + " inflating: /data/dog-breed-identification/train/ad65b6e2e9ab5a215f931155b4ca27f8.jpg \n", + " inflating: /data/dog-breed-identification/train/ad6b4e69f343d5b08ecc8f983bf2ce86.jpg \n", + " inflating: /data/dog-breed-identification/train/ad73ccb66bea3c236de86fb950593173.jpg \n", + " inflating: /data/dog-breed-identification/train/ad7d271bedbe576ccbf5c646e4ddcbc2.jpg \n", + " inflating: /data/dog-breed-identification/train/ad8ef2b2772064312bda4a188b7b5fd8.jpg \n", + " inflating: /data/dog-breed-identification/train/ad9104f9ae466e303c1b01c0a356852d.jpg \n", + " inflating: /data/dog-breed-identification/train/ad9678a1da7b9a05266ee1b0d7316711.jpg \n", + " inflating: /data/dog-breed-identification/train/ad9c639bb2516d7433d0f0bc10487116.jpg \n", + " inflating: /data/dog-breed-identification/train/ada32a5a413db0763e807e86562f2695.jpg \n", + " inflating: /data/dog-breed-identification/train/adbc0c1e5c3c13e91701d064239c1eb9.jpg \n", + " inflating: /data/dog-breed-identification/train/adc32f768a2cd01a5fcf01bf49fa024a.jpg \n", + " inflating: /data/dog-breed-identification/train/adc3d70faa55039ac9fbc49ba68e758a.jpg \n", + " inflating: /data/dog-breed-identification/train/adc5daa413e246287aacf9b7f6e16d36.jpg \n", + " inflating: /data/dog-breed-identification/train/add47db070492c3ed9d3b76f1886d684.jpg \n", + " inflating: /data/dog-breed-identification/train/addf2f5e7426173ffb795fa485edae63.jpg \n", + " inflating: /data/dog-breed-identification/train/ade0242e0bfe15d202e398c2380efb3c.jpg \n", + " inflating: /data/dog-breed-identification/train/adeb01e891e95b347ea3132451fab8ec.jpg \n", + " inflating: /data/dog-breed-identification/train/adede2c5e311633f1b6d6b3109c733db.jpg \n", + " inflating: /data/dog-breed-identification/train/adf50131b2c975bc32fdf57194fc3cff.jpg \n", + " inflating: /data/dog-breed-identification/train/adfe10ae8d4b90ba0abb56a0648dac8e.jpg \n", + " inflating: /data/dog-breed-identification/train/ae041dda85d54e44e581ab76f536208a.jpg \n", + " inflating: /data/dog-breed-identification/train/ae0aa7d59b4d311e57fd7fc47ba65c4b.jpg \n", + " inflating: /data/dog-breed-identification/train/ae0cf4a8076e52a04632de2b8e9a911b.jpg \n", + " inflating: /data/dog-breed-identification/train/ae160437df0d4def173e66c76bb2ab56.jpg \n", + " inflating: /data/dog-breed-identification/train/ae162648a4b30714dca7d86364237c63.jpg \n", + " inflating: /data/dog-breed-identification/train/ae16b7a7f905244eba139f90f48d3171.jpg \n", + " inflating: /data/dog-breed-identification/train/ae1a8d64cea83e9f1bda35351d2174d7.jpg \n", + " inflating: /data/dog-breed-identification/train/ae1aeb3bb01b156aad5db0bbdd7cf6d9.jpg \n", + " inflating: /data/dog-breed-identification/train/ae1ceabc7573a73c2f020afdc42aef50.jpg \n", + " inflating: /data/dog-breed-identification/train/ae1d6243e4bc8dcc48cca2aea2367b94.jpg \n", + " inflating: /data/dog-breed-identification/train/ae1eb096845f419a35c1f3f5396c9ec2.jpg \n", + " inflating: /data/dog-breed-identification/train/ae2099cd620c148c433235f85019e151.jpg \n", + " inflating: /data/dog-breed-identification/train/ae2486ae8082c661055682333874cafd.jpg \n", + " inflating: /data/dog-breed-identification/train/ae2ea774187ab7e25dc6597390b15765.jpg \n", + " inflating: /data/dog-breed-identification/train/ae321243a4dfba310868a20dfaba514b.jpg \n", + " inflating: /data/dog-breed-identification/train/ae379664238894ccaaaa789b988b9b4e.jpg \n", + " inflating: /data/dog-breed-identification/train/ae4431c09466aee8c31a707b6aac0666.jpg \n", + " inflating: /data/dog-breed-identification/train/ae4c32628d35d498c9fadc9b56137d52.jpg \n", + " inflating: /data/dog-breed-identification/train/ae52363c0fbb11eb3b15b067e4b4c4ff.jpg \n", + " inflating: /data/dog-breed-identification/train/ae588184cfac8fb06f9ba96b44b63e4d.jpg \n", + " inflating: /data/dog-breed-identification/train/ae68592523bcc268bdbeec99665b44c8.jpg \n", + " inflating: /data/dog-breed-identification/train/ae68c66840d47f2c0619aabee7729ac9.jpg \n", + " inflating: /data/dog-breed-identification/train/ae6c48de2e67185c62fd8bfb1311bc9d.jpg \n", + " inflating: /data/dog-breed-identification/train/ae7d5f98febfb5761f789e0642ab6cbc.jpg \n", + " inflating: /data/dog-breed-identification/train/ae7e1799c6476eadab886c2e3caddf62.jpg \n", + " inflating: /data/dog-breed-identification/train/ae7ea3acbc17ea070b3da00d6d52be11.jpg \n", + " inflating: /data/dog-breed-identification/train/ae829def01226daffb29306308a2f2a0.jpg \n", + " inflating: /data/dog-breed-identification/train/ae8693b4868b1744be1fefb328a8a486.jpg \n", + " inflating: /data/dog-breed-identification/train/ae94adcb87dcad2bc5e4db5b71012441.jpg \n", + " inflating: /data/dog-breed-identification/train/ae9566fed2f45407a4282a6677652c0f.jpg \n", + " inflating: /data/dog-breed-identification/train/ae9eb83de4161c01bb3e1fa53fad719f.jpg \n", + " inflating: /data/dog-breed-identification/train/ae9ef42cbf311e628355ca4ad5116e5c.jpg \n", + " inflating: /data/dog-breed-identification/train/aea2976886dd8b79a498cf6328d53d65.jpg \n", + " inflating: /data/dog-breed-identification/train/aeab7ea2dca5c298c13e87eab4a654cd.jpg \n", + " inflating: /data/dog-breed-identification/train/aeb846fd220051321d1295eb7388fe23.jpg \n", + " inflating: /data/dog-breed-identification/train/aec0ee307cbabff63516b55f3a605144.jpg \n", + " inflating: /data/dog-breed-identification/train/aec5d7550b6fb110ce6b1adda05dffb1.jpg \n", + " inflating: /data/dog-breed-identification/train/aed2415f3d996819d68ea82acaca4654.jpg \n", + " inflating: /data/dog-breed-identification/train/aed285c5eae61e3e7ddb5f78e6a7a977.jpg \n", + " inflating: /data/dog-breed-identification/train/aed87189e6b60b02c3f16566afe8cbd6.jpg \n", + " inflating: /data/dog-breed-identification/train/aedc70a30016c85485369ea541058b2e.jpg \n", + " inflating: /data/dog-breed-identification/train/aee2775bb66001a6b3b48840710c907d.jpg \n", + " inflating: /data/dog-breed-identification/train/aee6daee7cf47f6daa6ee98c4ceb15a0.jpg \n", + " inflating: /data/dog-breed-identification/train/aee76a0c645d9e2d728ed283c142d04a.jpg \n", + " inflating: /data/dog-breed-identification/train/aeedf5985e1a8b0b4dc9f2e43f389150.jpg \n", + " inflating: /data/dog-breed-identification/train/aef0d510ecb238da32b77e26da8ff327.jpg \n", + " inflating: /data/dog-breed-identification/train/aef239bc996a3b364aba20025acebc3c.jpg \n", + " inflating: /data/dog-breed-identification/train/aef36e28ef84d9c3833b3896e27c98ba.jpg \n", + " inflating: /data/dog-breed-identification/train/aef57a16eab44e62bc3a68cc9b2e7ec4.jpg \n", + " inflating: /data/dog-breed-identification/train/aef587933252b2077f53be2227bcf9c7.jpg \n", + " inflating: /data/dog-breed-identification/train/af18dee7a5a66fd6c6b454058b57167b.jpg \n", + " inflating: /data/dog-breed-identification/train/af253aa8c133175a74e66df6017f44a3.jpg \n", + " inflating: /data/dog-breed-identification/train/af25571106623e3c34d260cda4bd3096.jpg \n", + " inflating: /data/dog-breed-identification/train/af27e830588a16b0a0ae74d1170c2a30.jpg \n", + " inflating: /data/dog-breed-identification/train/af2a164d245c2b4e92e073e2227dfc39.jpg \n", + " inflating: /data/dog-breed-identification/train/af2b739748b90d1d3741171a9414ae59.jpg \n", + " inflating: /data/dog-breed-identification/train/af2d0af36d82e3e97c9182772cdc9ae6.jpg \n", + " inflating: /data/dog-breed-identification/train/af2da8ee3367dd69472b5540750e7afb.jpg \n", + " inflating: /data/dog-breed-identification/train/af39c8e69b35029608e2c600e40fc515.jpg \n", + " inflating: /data/dog-breed-identification/train/af3a495fd0f9f520ecf3be769a28a23b.jpg \n", + " inflating: /data/dog-breed-identification/train/af408dfdb162d16e74828ac11f784cd1.jpg \n", + " inflating: /data/dog-breed-identification/train/af45f5f49cc096591d8481706fc1f44d.jpg \n", + " inflating: /data/dog-breed-identification/train/af465eb9b2371bb3639518f0daa8529e.jpg \n", + " inflating: /data/dog-breed-identification/train/af57c552420b045bb5e15c402389eb0c.jpg \n", + " inflating: /data/dog-breed-identification/train/af5ea2c0ba78ce06af14acc92b0c8803.jpg \n", + " inflating: /data/dog-breed-identification/train/af5f51e5e660d9e0459c81d621dbab6e.jpg \n", + " inflating: /data/dog-breed-identification/train/af679c36a907c9a5006df4504ba67991.jpg \n", + " inflating: /data/dog-breed-identification/train/af6fb94b286f01effa542e8ea69c1322.jpg \n", + " inflating: /data/dog-breed-identification/train/af71c5531def7f83df30ca3990050d66.jpg \n", + " inflating: /data/dog-breed-identification/train/af750c18726a7dd02f0880a9bf662e31.jpg \n", + " inflating: /data/dog-breed-identification/train/af7ffd88192a31d29936e4c000519d48.jpg \n", + " inflating: /data/dog-breed-identification/train/af87254221899ae2347592b26f23a449.jpg \n", + " inflating: /data/dog-breed-identification/train/af8c60446292fcf5a0d4a98d24db6ed5.jpg \n", + " inflating: /data/dog-breed-identification/train/af9ac866a70d87a1072f6b6f7ac03394.jpg \n", + " inflating: /data/dog-breed-identification/train/af9e1633f7fddfcaaa0c5a5488b29e2c.jpg \n", + " inflating: /data/dog-breed-identification/train/afa9e8ba3e59af33fb61a068e4fceab8.jpg \n", + " inflating: /data/dog-breed-identification/train/afadd4327b01b62e6e98289c8f72e1ee.jpg \n", + " inflating: /data/dog-breed-identification/train/afb8f485dad4a02658dbc1ea57312ddc.jpg \n", + " inflating: /data/dog-breed-identification/train/afbf6843f56d14242c3393850a5cb167.jpg \n", + " inflating: /data/dog-breed-identification/train/afc5597e39a5eb338afde2bf75c6a0b2.jpg \n", + " inflating: /data/dog-breed-identification/train/afcc31a22f2ef6c2a70c7e0c64cfd495.jpg \n", + " inflating: /data/dog-breed-identification/train/afcd1d553c48f0d434b873920dd6d4a4.jpg \n", + " inflating: /data/dog-breed-identification/train/afd5babaf9bffa52667c40b78375e7e4.jpg \n", + " inflating: /data/dog-breed-identification/train/afedc4c3692a91ee44ef8d4d62c475db.jpg \n", + " inflating: /data/dog-breed-identification/train/aff4222a0f4fb441d627f7ad396caf3d.jpg \n", + " inflating: /data/dog-breed-identification/train/aff4ce7424acc69087a8aad14dfe30db.jpg \n", + " inflating: /data/dog-breed-identification/train/aff9e4c944e3e7b24b8166590d7ca95c.jpg \n", + " inflating: /data/dog-breed-identification/train/b0000c33070ce6ccf2ab25e10b7affde.jpg \n", + " inflating: /data/dog-breed-identification/train/b0039cd9822380c7181e74653cecb7b0.jpg \n", + " inflating: /data/dog-breed-identification/train/b008606ddfb5f48c2a68891fe861acce.jpg \n", + " inflating: /data/dog-breed-identification/train/b00cff12a22ac141efc9b630d76740be.jpg \n", + " inflating: /data/dog-breed-identification/train/b01372ac131079a990d1ba093b9c6311.jpg \n", + " inflating: /data/dog-breed-identification/train/b0209bfa37c3a298bc2c660740e970a4.jpg \n", + " inflating: /data/dog-breed-identification/train/b0235fffba60fb4e4fa87cafe79983e8.jpg \n", + " inflating: /data/dog-breed-identification/train/b02a0ed4c12a5cfa5be497f548de3c2b.jpg \n", + " inflating: /data/dog-breed-identification/train/b02fdd22c35b59fc509d27e1f5d02373.jpg \n", + " inflating: /data/dog-breed-identification/train/b03a73190ffd9c423257b94927599460.jpg \n", + " inflating: /data/dog-breed-identification/train/b0405b307aaf230933f30af5f4b52bf5.jpg \n", + " inflating: /data/dog-breed-identification/train/b04060a5afa67689678ed8459b6a2236.jpg \n", + " inflating: /data/dog-breed-identification/train/b047a2436ec6f415212fe82f4eb5bfa2.jpg \n", + " inflating: /data/dog-breed-identification/train/b049ea9287e3c5f20af982e1977df9fd.jpg \n", + " inflating: /data/dog-breed-identification/train/b04dde81f2e6c0b0a37ecf99714d716e.jpg \n", + " inflating: /data/dog-breed-identification/train/b05d49698b7426ac9420983f0b8cacf8.jpg \n", + " inflating: /data/dog-breed-identification/train/b05d8eb1d33475e05dd986dd0dd2554b.jpg \n", + " inflating: /data/dog-breed-identification/train/b069dbe6b93ea0082792e2e5160d8574.jpg \n", + " inflating: /data/dog-breed-identification/train/b075c74041a14bc388192ebabf092084.jpg \n", + " inflating: /data/dog-breed-identification/train/b076ead1c800594f7aad2ff784bc3427.jpg \n", + " inflating: /data/dog-breed-identification/train/b07f9490b150ada099c5c09004c9751f.jpg \n", + " inflating: /data/dog-breed-identification/train/b084d252acbffe3520120c45314ffeb7.jpg \n", + " inflating: /data/dog-breed-identification/train/b08e4205506dc41bdc7ae8546677bb68.jpg \n", + " inflating: /data/dog-breed-identification/train/b091cbf14b893d3c13cea2b1a8cb503f.jpg \n", + " inflating: /data/dog-breed-identification/train/b092be3d14897804d887ba436f9ddcb7.jpg \n", + " inflating: /data/dog-breed-identification/train/b0a63d52383934ed0caec038dbbe7d92.jpg \n", + " inflating: /data/dog-breed-identification/train/b0ab7bf62ba55c1c4a6b037132576cd4.jpg \n", + " inflating: /data/dog-breed-identification/train/b0b2a18852c2380285649efd5947f408.jpg \n", + " inflating: /data/dog-breed-identification/train/b0b2f68210c947e1365d9d8687bdec0e.jpg \n", + " inflating: /data/dog-breed-identification/train/b0b7aaaa673295ace0ae4d9f248b71f1.jpg \n", + " inflating: /data/dog-breed-identification/train/b0ba782b54de66ab1c3a340b5cfd9d9a.jpg \n", + " inflating: /data/dog-breed-identification/train/b0bbf2fba6147815e11b198902e35aa4.jpg \n", + " inflating: /data/dog-breed-identification/train/b0c2de19f94d48e6d0c05b2c61effe74.jpg \n", + " inflating: /data/dog-breed-identification/train/b0cbfe76d07597b948782dc3c74cdf66.jpg \n", + " inflating: /data/dog-breed-identification/train/b0ee2f2ae56110de3409f31e26a8704a.jpg \n", + " inflating: /data/dog-breed-identification/train/b0f80934881f210a6cfc50e425660ac8.jpg \n", + " inflating: /data/dog-breed-identification/train/b103316ae94567dd01890e7fbbee90e2.jpg \n", + " inflating: /data/dog-breed-identification/train/b106bb7f5c07cfdd55f979ae7a83a41c.jpg \n", + " inflating: /data/dog-breed-identification/train/b106df46d6b5fa931d0913dce8a46362.jpg \n", + " inflating: /data/dog-breed-identification/train/b120195ff2d099b37987f076a5c1078b.jpg \n", + " inflating: /data/dog-breed-identification/train/b12779f9c69a049eaa53a01919942d55.jpg \n", + " inflating: /data/dog-breed-identification/train/b12ca57dbc800fd7caf1327516b4fa1b.jpg \n", + " inflating: /data/dog-breed-identification/train/b13299a4da33368f9d14b3ff6ac0eb02.jpg \n", + " inflating: /data/dog-breed-identification/train/b13e4a67a6592e0088b088d1cc2aa9ec.jpg \n", + " inflating: /data/dog-breed-identification/train/b141511bd1e9f3010faa8582dd85303a.jpg \n", + " inflating: /data/dog-breed-identification/train/b141d970534f603326c0d3bf1a60765b.jpg \n", + " inflating: /data/dog-breed-identification/train/b14cc2a47bd6d42a14b982c8da211aab.jpg \n", + " inflating: /data/dog-breed-identification/train/b14fba6e4c376bbb8b7ca5c803816551.jpg \n", + " inflating: /data/dog-breed-identification/train/b15de8f0e944266ddfadbf190d98532e.jpg \n", + " inflating: /data/dog-breed-identification/train/b15e7ec2ce8795370ceb95d99843a85c.jpg \n", + " inflating: /data/dog-breed-identification/train/b15f22df40ec97250147e9daadba50ab.jpg \n", + " inflating: /data/dog-breed-identification/train/b16549549ac51020efa4ab86923c37b3.jpg \n", + " inflating: /data/dog-breed-identification/train/b16c46c98def02584cb4a3f7c493f61e.jpg \n", + " inflating: /data/dog-breed-identification/train/b16cd5f2778788a00ed00c93d15143f1.jpg \n", + " inflating: /data/dog-breed-identification/train/b185661d8d5b7c934b66f63d813cb407.jpg \n", + " inflating: /data/dog-breed-identification/train/b18a0f00efc8c6e741bf392c4fc34a2b.jpg \n", + " inflating: /data/dog-breed-identification/train/b18fcbb4e08227a471b4dc88d902dca7.jpg \n", + " inflating: /data/dog-breed-identification/train/b19813c11f94f561e45242347d8da73a.jpg \n", + " inflating: /data/dog-breed-identification/train/b1a81d15f7628ab8d88dbe0ed24924fa.jpg \n", + " inflating: /data/dog-breed-identification/train/b1b4adaf22d7927653f4188725d6ce27.jpg \n", + " inflating: /data/dog-breed-identification/train/b1b60ca1c61fef3650e276634cba7e9c.jpg \n", + " inflating: /data/dog-breed-identification/train/b1cc94466334a8caebf966aad4f50484.jpg \n", + " inflating: /data/dog-breed-identification/train/b1d2238899cfa91f4da91994b304e090.jpg \n", + " inflating: /data/dog-breed-identification/train/b1d69c0d60af2cc45ca95a3136e9cf7c.jpg \n", + " inflating: /data/dog-breed-identification/train/b1e04e72134f93f445ebb10a1aa7c20f.jpg \n", + " inflating: /data/dog-breed-identification/train/b1e148a8ac377ba2148d061db9fa35da.jpg \n", + " inflating: /data/dog-breed-identification/train/b1e5e40a1122d30cf96febd2fa974938.jpg \n", + " inflating: /data/dog-breed-identification/train/b1e89362737f51b64095b10b1bae93d2.jpg \n", + " inflating: /data/dog-breed-identification/train/b1e8dfb27a65a838de14079947684fb8.jpg \n", + " inflating: /data/dog-breed-identification/train/b1eb6462f0c3801b72d5b0246e9b748e.jpg \n", + " inflating: /data/dog-breed-identification/train/b1f61f24517fba0fd284ded11f5ea9dd.jpg \n", + " inflating: /data/dog-breed-identification/train/b1f80a20da3b0bc83d6e5d526592ef92.jpg \n", + " inflating: /data/dog-breed-identification/train/b1fcbc9b6ba9c36d53eef5229e54f815.jpg \n", + " inflating: /data/dog-breed-identification/train/b1fd800efb2e36febe4ac7df73b07038.jpg \n", + " inflating: /data/dog-breed-identification/train/b2004690624be6cb621bd557815e7638.jpg \n", + " inflating: /data/dog-breed-identification/train/b200db1a2dc5f3d5a6521aac2130713c.jpg \n", + " inflating: /data/dog-breed-identification/train/b205121465ba657ca587e8c0764f3a6b.jpg \n", + " inflating: /data/dog-breed-identification/train/b20ac3f8cf850665723bb13cc31cc316.jpg \n", + " inflating: /data/dog-breed-identification/train/b20fd8f849b2460dec6c505d2c342ddc.jpg \n", + " inflating: /data/dog-breed-identification/train/b2100c289c13d12ffd4161716f2bb646.jpg \n", + " inflating: /data/dog-breed-identification/train/b210e9dec268a9d616f29fe91a2420be.jpg \n", + " inflating: /data/dog-breed-identification/train/b21f1277b79851fc6a196c4c872dc921.jpg \n", + " inflating: /data/dog-breed-identification/train/b220bfa6780f5e36871117680bed8b03.jpg \n", + " inflating: /data/dog-breed-identification/train/b2260bbc0d9a999d6e886bc271da6733.jpg \n", + " inflating: /data/dog-breed-identification/train/b228a8cc82df53d3725566931a9f2f82.jpg \n", + " inflating: /data/dog-breed-identification/train/b22e3e2987ce4e7b599d109df6ef9d98.jpg \n", + " inflating: /data/dog-breed-identification/train/b23af128a502079098556ca7ea2f981e.jpg \n", + " inflating: /data/dog-breed-identification/train/b23d0aef01e57245cdcd4af901a04ae9.jpg \n", + " inflating: /data/dog-breed-identification/train/b24cc2975f49a209ebc40c94ef88cb42.jpg \n", + " inflating: /data/dog-breed-identification/train/b25f133c4dffc6d1eba2ae381ecb6d18.jpg \n", + " inflating: /data/dog-breed-identification/train/b261916c6a46fab4c1b14a7f89e072d1.jpg \n", + " inflating: /data/dog-breed-identification/train/b262b17b84dd3887a6e3619e82474b53.jpg \n", + " inflating: /data/dog-breed-identification/train/b27075fa27a6c795ac1299a7a8bdd06a.jpg \n", + " inflating: /data/dog-breed-identification/train/b276d7894879af88185c200ac301d8a3.jpg \n", + " inflating: /data/dog-breed-identification/train/b279dd6df6740a08aa98ea68cce2396b.jpg \n", + " inflating: /data/dog-breed-identification/train/b27ac1eebf09e74f5a627c10bcd3b198.jpg \n", + " inflating: /data/dog-breed-identification/train/b27ccad9788e1868e258a96f8538830a.jpg \n", + " inflating: /data/dog-breed-identification/train/b281302557af3f809a2beda37bd5a73b.jpg \n", + " inflating: /data/dog-breed-identification/train/b2873c30dc20a77365b071d7dc6dec79.jpg \n", + " inflating: /data/dog-breed-identification/train/b28dd4ec5ad70c3901a54447ef7ec5ab.jpg \n", + " inflating: /data/dog-breed-identification/train/b29300592161bba7893ceb5fc9dbc75f.jpg \n", + " inflating: /data/dog-breed-identification/train/b297a6f06bcc920e872fff4eeec4f9bf.jpg \n", + " inflating: /data/dog-breed-identification/train/b2b05fba50969ecd8ae5a8351dc273b6.jpg \n", + " inflating: /data/dog-breed-identification/train/b2bab263aaa473785d7c88eec236acd8.jpg \n", + " inflating: /data/dog-breed-identification/train/b2bac9338d6156ee0ee73990c32ee3ed.jpg \n", + " inflating: /data/dog-breed-identification/train/b2bc11ef2ab31c65e49f6f03c82cb279.jpg \n", + " inflating: /data/dog-breed-identification/train/b2bebb5f6d7db2784ddded8a2728625e.jpg \n", + " inflating: /data/dog-breed-identification/train/b2c2f184cea6a16eb97267e1eae4bc04.jpg \n", + " inflating: /data/dog-breed-identification/train/b2c525d80d4a5434a538ccb0b07b9e71.jpg \n", + " inflating: /data/dog-breed-identification/train/b2c7527d2688dc1eef6c31ff9e6e2395.jpg \n", + " inflating: /data/dog-breed-identification/train/b2e1a5fd7eedab2fad957eed10d66733.jpg \n", + " inflating: /data/dog-breed-identification/train/b2e3a24c98822656f3db28f6591b2b14.jpg \n", + " inflating: /data/dog-breed-identification/train/b2e57528bdc9a6a67868320e9a56a492.jpg \n", + " inflating: /data/dog-breed-identification/train/b2ecb9fc9cfb36041e60d03f44fc181d.jpg \n", + " inflating: /data/dog-breed-identification/train/b2fad5b45f040f4bd58c907fc5b9983f.jpg \n", + " inflating: /data/dog-breed-identification/train/b2fd316532f45ac165fba77eb5ce44fa.jpg \n", + " inflating: /data/dog-breed-identification/train/b3040f625c658c5e5e41716a837941b9.jpg \n", + " inflating: /data/dog-breed-identification/train/b3076ec0548248586d37e38a10026b4a.jpg \n", + " inflating: /data/dog-breed-identification/train/b31052594390ff1ca89670dda5884859.jpg \n", + " inflating: /data/dog-breed-identification/train/b31735ecd2e53d59cdbf2f668aab027b.jpg \n", + " inflating: /data/dog-breed-identification/train/b31fe7457dd912584bf932eeb40f4664.jpg \n", + " inflating: /data/dog-breed-identification/train/b3211bbd40fb727bdb50fdad2b6c2638.jpg \n", + " inflating: /data/dog-breed-identification/train/b327e9b91d0698807067f416d161afe7.jpg \n", + " inflating: /data/dog-breed-identification/train/b333b37df9ebb167887a6f7a2f25dccb.jpg \n", + " inflating: /data/dog-breed-identification/train/b336c665f071a1b9d0a8020569c4b2c4.jpg \n", + " inflating: /data/dog-breed-identification/train/b33ebbb55eb1b3ff1587deb055529e00.jpg \n", + " inflating: /data/dog-breed-identification/train/b340dcf23d4edfcf93b00c400decb824.jpg \n", + " inflating: /data/dog-breed-identification/train/b34b9cfdd2240ef9ae82e399f6887933.jpg \n", + " inflating: /data/dog-breed-identification/train/b35c6faadffc04f701df7b07c7944c29.jpg \n", + " inflating: /data/dog-breed-identification/train/b36026757545d392c8becd69b87bd908.jpg \n", + " inflating: /data/dog-breed-identification/train/b3644d932aa482a37603623c0792eb1d.jpg \n", + " inflating: /data/dog-breed-identification/train/b36db4c42629a8fda912011e967d9f8a.jpg \n", + " inflating: /data/dog-breed-identification/train/b36ef116ba297a4e2564aa9f9924d8bb.jpg \n", + " inflating: /data/dog-breed-identification/train/b375c3eabf59a3d0e071b8b0343f2d4d.jpg \n", + " inflating: /data/dog-breed-identification/train/b37a0e859b10171d6e76615454ad7efb.jpg \n", + " inflating: /data/dog-breed-identification/train/b37bd77fd4cf8db40bfe52340ffbe969.jpg \n", + " inflating: /data/dog-breed-identification/train/b37eeeae39be339eb04a3705bfcac2b3.jpg \n", + " inflating: /data/dog-breed-identification/train/b384e91febd6cee8aff2771939794777.jpg \n", + " inflating: /data/dog-breed-identification/train/b38a2b69d7a9addcb61ebfd0507ee770.jpg \n", + " inflating: /data/dog-breed-identification/train/b38b639670034f8ef9bd87c17ce29b56.jpg \n", + " inflating: /data/dog-breed-identification/train/b38ca23d57777786a6e641b7bfb18329.jpg \n", + " inflating: /data/dog-breed-identification/train/b38d84aa581bb69d437ce2b128198012.jpg \n", + " inflating: /data/dog-breed-identification/train/b39551fc6a24b76d219c3244b587a2e2.jpg \n", + " inflating: /data/dog-breed-identification/train/b39b851ef02720a4fc4ac85dc1ade7b9.jpg \n", + " inflating: /data/dog-breed-identification/train/b39c3d11e16d3432246aceac3e4ab3fe.jpg \n", + " inflating: /data/dog-breed-identification/train/b39fd55dd45e4f1066b80960c8f39a84.jpg \n", + " inflating: /data/dog-breed-identification/train/b3b149700702da3865004745ef4f6001.jpg \n", + " inflating: /data/dog-breed-identification/train/b3b5eb135bc850b54a04d2c43b83a237.jpg \n", + " inflating: /data/dog-breed-identification/train/b3bcaf1fea645fbad288f546fd18cc64.jpg \n", + " inflating: /data/dog-breed-identification/train/b3beb6bab59975305146110d51fc7120.jpg \n", + " inflating: /data/dog-breed-identification/train/b3bf158af5af1610595320d4c78441c9.jpg \n", + " inflating: /data/dog-breed-identification/train/b3c04085f4a5d7f8dd5165efea027eb8.jpg \n", + " inflating: /data/dog-breed-identification/train/b3c360cdc204d571b5287268fa4b4c25.jpg \n", + " inflating: /data/dog-breed-identification/train/b3ca18e3ea2d722f3ab9b62cbde4bad5.jpg \n", + " inflating: /data/dog-breed-identification/train/b3cbe3c848a691f2b94838723ffbfb4b.jpg \n", + " inflating: /data/dog-breed-identification/train/b3ce58830523907af62e0069e081bc71.jpg \n", + " inflating: /data/dog-breed-identification/train/b3ce89788191e0f62e14820abd176628.jpg \n", + " inflating: /data/dog-breed-identification/train/b3d40aabb2e2514c0443d7d91c5c6696.jpg \n", + " inflating: /data/dog-breed-identification/train/b3dd2291baf6f3eebaf839331393cabb.jpg \n", + " inflating: /data/dog-breed-identification/train/b3df2b20e52b1a2b87506b565bdea231.jpg \n", + " inflating: /data/dog-breed-identification/train/b3df9627c5df05fb38ae5990e3b47a35.jpg \n", + " inflating: /data/dog-breed-identification/train/b3e2de0e9d1a5c6c180277b87b85971f.jpg \n", + " inflating: /data/dog-breed-identification/train/b3e3556e19d2d067314dcbfecf2ab7b7.jpg \n", + " inflating: /data/dog-breed-identification/train/b3eb6305969fbae09c13ef3b9e5c2dc4.jpg \n", + " inflating: /data/dog-breed-identification/train/b3f01d9493c18a2a4a41c7a4458970a3.jpg \n", + " inflating: /data/dog-breed-identification/train/b3f1413df57dc23d8dfaa2a342c6a6ff.jpg \n", + " inflating: /data/dog-breed-identification/train/b3f7b7db3d7a9bd040eef5c4bdb15f13.jpg \n", + " inflating: /data/dog-breed-identification/train/b402c34356b9488d0e162f9d29260fd4.jpg \n", + " inflating: /data/dog-breed-identification/train/b41399cf3293bc2b53a677529ba5a8c1.jpg \n", + " inflating: /data/dog-breed-identification/train/b4173f95948cd008831eeea46ace8498.jpg \n", + " inflating: /data/dog-breed-identification/train/b422718878d017e73752b3765d0c361b.jpg \n", + " inflating: /data/dog-breed-identification/train/b4265284e8c57252ce24cb305b95b241.jpg \n", + " inflating: /data/dog-breed-identification/train/b4387fb303b18cf2c7e49248f3bc27f8.jpg \n", + " inflating: /data/dog-breed-identification/train/b43971979a549110b9670bfdcd8f037a.jpg \n", + " inflating: /data/dog-breed-identification/train/b43adc80f87f77949a64118df86bbeff.jpg \n", + " inflating: /data/dog-breed-identification/train/b43ec84e89e07bd43395777a5dcc0bc1.jpg \n", + " inflating: /data/dog-breed-identification/train/b44040bb52e1d8c2c32534abd2581cbf.jpg \n", + " inflating: /data/dog-breed-identification/train/b442ed5d4d240a8204a63d5849d8d07a.jpg \n", + " inflating: /data/dog-breed-identification/train/b4446e631a52265229fd439b18a2df4d.jpg \n", + " inflating: /data/dog-breed-identification/train/b44d4785a27a7b5a0f793ff15956df20.jpg \n", + " inflating: /data/dog-breed-identification/train/b44ddd50615e6526250b84754798c47d.jpg \n", + " inflating: /data/dog-breed-identification/train/b45eace2c8ee482473ab91f6ed2f3105.jpg \n", + " inflating: /data/dog-breed-identification/train/b45ed1800440f35413bd0f2ffaec5003.jpg \n", + " inflating: /data/dog-breed-identification/train/b46db7fdd97b79ea62592e62cc695197.jpg \n", + " inflating: /data/dog-breed-identification/train/b486c3b6151cad246e272cccb05a9f2d.jpg \n", + " inflating: /data/dog-breed-identification/train/b48a7b5aca76a96057c135f5f968ded7.jpg \n", + " inflating: /data/dog-breed-identification/train/b48e1881fe48313231e5163b57bed9d9.jpg \n", + " inflating: /data/dog-breed-identification/train/b497f0cc2b97e9286e947de0bb883a2c.jpg \n", + " inflating: /data/dog-breed-identification/train/b49cdf4de3c6a0807520fce130559a27.jpg \n", + " inflating: /data/dog-breed-identification/train/b49ec4188baa0d5dc7eebf45e7bb9ac1.jpg \n", + " inflating: /data/dog-breed-identification/train/b4a0d7fe23e53513df499adf9ee04bb9.jpg \n", + " inflating: /data/dog-breed-identification/train/b4a8871365faccfa51e33217c4fd1291.jpg \n", + " inflating: /data/dog-breed-identification/train/b4add6ef5fb345821a5da77c4b87e745.jpg \n", + " inflating: /data/dog-breed-identification/train/b4b57dfabb7b3c94ee7a081492c9345b.jpg \n", + " inflating: /data/dog-breed-identification/train/b4b902c850a291cdfbf36335429f5230.jpg \n", + " inflating: /data/dog-breed-identification/train/b4b94f2b42428a8260e67127bdeb4341.jpg \n", + " inflating: /data/dog-breed-identification/train/b4c4ee94372525c264fa01ecefb6d597.jpg \n", + " inflating: /data/dog-breed-identification/train/b4d673a70a77504b16d2f975a941dbbf.jpg \n", + " inflating: /data/dog-breed-identification/train/b4d893d6a05c58cbbd9b8b5a0173bfce.jpg \n", + " inflating: /data/dog-breed-identification/train/b4e20187a9b91d20f7e55615eb710785.jpg \n", + " inflating: /data/dog-breed-identification/train/b4e88d85a18d11b8543c8fb53feab48c.jpg \n", + " inflating: /data/dog-breed-identification/train/b4ef4f738cb055e2da9dc32281485e44.jpg \n", + " inflating: /data/dog-breed-identification/train/b4f39398a413d13590cef95dd5a67915.jpg \n", + " inflating: /data/dog-breed-identification/train/b4f8d8eff4f0b4d93b8416a87daa3b93.jpg \n", + " inflating: /data/dog-breed-identification/train/b4f8fcd6d01a2b5e79b3920f8fc8804f.jpg \n", + " inflating: /data/dog-breed-identification/train/b5026720fe5e6ad751c68f3138fc3e7f.jpg \n", + " inflating: /data/dog-breed-identification/train/b5109fc2e1a22542a8fe60e56b378b20.jpg \n", + " inflating: /data/dog-breed-identification/train/b511a56821ea5208ca924754056298aa.jpg \n", + " inflating: /data/dog-breed-identification/train/b51261d75a427148d502ecd9108312db.jpg \n", + " inflating: /data/dog-breed-identification/train/b5184cf2fb46e3bdc8a3d4910713da92.jpg \n", + " inflating: /data/dog-breed-identification/train/b51b8b30da4501c9907d100df382b87c.jpg \n", + " inflating: /data/dog-breed-identification/train/b51e9d68eb9a95484d2e64ac7a606384.jpg \n", + " inflating: /data/dog-breed-identification/train/b520efcc428dc8666a138d1f6eb65853.jpg \n", + " inflating: /data/dog-breed-identification/train/b522b9ad41145386ae378fdffaf009e7.jpg \n", + " inflating: /data/dog-breed-identification/train/b527d1075cc07fc8375378ca990ee241.jpg \n", + " inflating: /data/dog-breed-identification/train/b52bb521afa1dfe802a27a8939a8c7d0.jpg \n", + " inflating: /data/dog-breed-identification/train/b532e2e6f68876649639aa216ea4cddf.jpg \n", + " inflating: /data/dog-breed-identification/train/b53646b931972d951b7306584a00cfc4.jpg \n", + " inflating: /data/dog-breed-identification/train/b54c71c47f3d46a3bf72fbd9301de6fa.jpg \n", + " inflating: /data/dog-breed-identification/train/b54cef53d681db71d03a4e4230acefc8.jpg \n", + " inflating: /data/dog-breed-identification/train/b556b3f4cfbf4775dc52b0a62aa21e3d.jpg \n", + " inflating: /data/dog-breed-identification/train/b55c04ec49499cc1383468ac5620ae63.jpg \n", + " inflating: /data/dog-breed-identification/train/b55cbc70edb5bbcb907ab4969283d9fa.jpg \n", + " inflating: /data/dog-breed-identification/train/b55dc1e83b83d21795dd1b2b5c551780.jpg \n", + " inflating: /data/dog-breed-identification/train/b57f4d8d01aec87fb4e257b1e265ec38.jpg \n", + " inflating: /data/dog-breed-identification/train/b58abe0f8254c308bb3334e5784caf91.jpg \n", + " inflating: /data/dog-breed-identification/train/b5982ad65efe3045bccfad767bfae41f.jpg \n", + " inflating: /data/dog-breed-identification/train/b5a15162ac1421377d827ecc1ec0859f.jpg \n", + " inflating: /data/dog-breed-identification/train/b5a628e9eb023913a2f9c138676ec455.jpg \n", + " inflating: /data/dog-breed-identification/train/b5aa440f4f161b2f3b97ec382c68cf4a.jpg \n", + " inflating: /data/dog-breed-identification/train/b5adc561d92d4804993d0ad88910f2e8.jpg \n", + " inflating: /data/dog-breed-identification/train/b5adfdc1436b06e71caf363fa8d4d4b9.jpg \n", + " inflating: /data/dog-breed-identification/train/b5b5058c122d5e340187b6d4ecd8e3b3.jpg \n", + " inflating: /data/dog-breed-identification/train/b5b618aec816a9dd7d7b9588630fd4e3.jpg \n", + " inflating: /data/dog-breed-identification/train/b5bc5edb725fa2c3405a5e61b4bd4318.jpg \n", + " inflating: /data/dog-breed-identification/train/b5be8566b6f12c926f5bf6d8cfbc67c2.jpg \n", + " inflating: /data/dog-breed-identification/train/b5bee2902e2e1a80367e023d0d2a43fd.jpg \n", + " inflating: /data/dog-breed-identification/train/b5c30f3bd5c0335cc02d923c31fb606b.jpg \n", + " inflating: /data/dog-breed-identification/train/b5c8acb11bc2aee5a8fd04afb99a28ab.jpg \n", + " inflating: /data/dog-breed-identification/train/b5cb07aa4fca883205dd6d9b05d0689c.jpg \n", + " inflating: /data/dog-breed-identification/train/b5d64452ea01f52960c0a7d0966fa736.jpg \n", + " inflating: /data/dog-breed-identification/train/b5dfca72919172ea1aba496636537e1a.jpg \n", + " inflating: /data/dog-breed-identification/train/b5ea84ce627b5c808e2e408512ec71bb.jpg \n", + " inflating: /data/dog-breed-identification/train/b5eaed19ce0231fc6157d6e8ac6239a2.jpg \n", + " inflating: /data/dog-breed-identification/train/b5ef2df715af6077ea126ac35855670c.jpg \n", + " inflating: /data/dog-breed-identification/train/b5f44c74eb649127aaedb0d0ddc6afd5.jpg \n", + " inflating: /data/dog-breed-identification/train/b5f9c89e10da55d264acc6396aac39c7.jpg \n", + " inflating: /data/dog-breed-identification/train/b60b61fec62e723569a084cbb13e5132.jpg \n", + " inflating: /data/dog-breed-identification/train/b619ae437b7a66ec716f3a34d7b40965.jpg \n", + " inflating: /data/dog-breed-identification/train/b61df6dac68a083446aea7738c74e5d9.jpg \n", + " inflating: /data/dog-breed-identification/train/b62225a14aa1f870dd9b33c4ff4c9de4.jpg \n", + " inflating: /data/dog-breed-identification/train/b6253dbeac027cb99c69109c87cb7c42.jpg \n", + " inflating: /data/dog-breed-identification/train/b626c49f46a7685b9b4c124db95cb4fa.jpg \n", + " inflating: /data/dog-breed-identification/train/b6281e28d445000d206c5d89af27335c.jpg \n", + " inflating: /data/dog-breed-identification/train/b62c6746fb6911627bf62e0e312e79f4.jpg \n", + " inflating: /data/dog-breed-identification/train/b62f1c768b5eb401fdf27a4fc2b64cf5.jpg \n", + " inflating: /data/dog-breed-identification/train/b632eedd6378ef5376813c11f0d454bf.jpg \n", + " inflating: /data/dog-breed-identification/train/b63b0200ddbb97df81972b26574959ab.jpg \n", + " inflating: /data/dog-breed-identification/train/b6441584ff99c3bb376c67e0bc411c47.jpg \n", + " inflating: /data/dog-breed-identification/train/b646b8f2863d2e5ccf87b79024f4d517.jpg \n", + " inflating: /data/dog-breed-identification/train/b64fc34ac26b60c15706e82abef11ba9.jpg \n", + " inflating: /data/dog-breed-identification/train/b6561ca0cb1acc71733564cab65fe6b2.jpg \n", + " inflating: /data/dog-breed-identification/train/b6587ec99aba10ea24f850dfbf2f1654.jpg \n", + " inflating: /data/dog-breed-identification/train/b6591fe0f1e14cf4aadc533c97eaf558.jpg \n", + " inflating: /data/dog-breed-identification/train/b65a39808830bf4c7ac7a2144f17161c.jpg \n", + " inflating: /data/dog-breed-identification/train/b65dfb7122f819dc9bdaa9b53e6880a6.jpg \n", + " inflating: /data/dog-breed-identification/train/b663fe4bee1907aac5c11313307f328c.jpg \n", + " inflating: /data/dog-breed-identification/train/b66b6ff2ff16c6e746af3ef624e471f1.jpg \n", + " inflating: /data/dog-breed-identification/train/b693577aa9e7c6669457395d2aad073a.jpg \n", + " inflating: /data/dog-breed-identification/train/b69a73af56dec1a60e4251b1f000b414.jpg \n", + " inflating: /data/dog-breed-identification/train/b69c61b430c0eb2c2044992afce42556.jpg \n", + " inflating: /data/dog-breed-identification/train/b6a1ede9dc7235b08abad10968740033.jpg \n", + " inflating: /data/dog-breed-identification/train/b6a6bf17290dcccfb399b78cfeb2524d.jpg \n", + " inflating: /data/dog-breed-identification/train/b6ac6d8b9923fc4441457305959ef92d.jpg \n", + " inflating: /data/dog-breed-identification/train/b6b07f0325935daac54d34495e96dc80.jpg \n", + " inflating: /data/dog-breed-identification/train/b6bc2e417e8a8144c5de1822b5fef901.jpg \n", + " inflating: /data/dog-breed-identification/train/b6c15ba2271aae59b55443cc46ebab60.jpg \n", + " inflating: /data/dog-breed-identification/train/b6c2bb1848f3063b69bf5e6b74e25582.jpg \n", + " inflating: /data/dog-breed-identification/train/b6c51bc40c0c8e3a618e3f8470f25314.jpg \n", + " inflating: /data/dog-breed-identification/train/b6c630af05e14b4b707bef3e9dcba09f.jpg \n", + " inflating: /data/dog-breed-identification/train/b6cdce834b0eb3c01bf03ffa0be2b973.jpg \n", + " inflating: /data/dog-breed-identification/train/b6dbcd16a92a5ae62bae15dbfd06c889.jpg \n", + " inflating: /data/dog-breed-identification/train/b6df66f656a99e3965d858e7252dd073.jpg \n", + " inflating: /data/dog-breed-identification/train/b6e2543f961683afcedb006ba8a54d04.jpg \n", + " inflating: /data/dog-breed-identification/train/b6fc90a99b4489f067852fb2b38e641b.jpg \n", + " inflating: /data/dog-breed-identification/train/b6fe6d1a08a8766f5db00a06e635bbc2.jpg \n", + " inflating: /data/dog-breed-identification/train/b703bbc774d79659d9890a1bab2eeb4a.jpg \n", + " inflating: /data/dog-breed-identification/train/b7123d5d7de79459bb6a1f62fafd7ed8.jpg \n", + " inflating: /data/dog-breed-identification/train/b71ba77f52b28c5171c20415a25dd6b9.jpg \n", + " inflating: /data/dog-breed-identification/train/b7210485096e88407ad8bd62d1969e02.jpg \n", + " inflating: /data/dog-breed-identification/train/b72130fb5921d7a8711029fcd1f49e88.jpg \n", + " inflating: /data/dog-breed-identification/train/b7341d35618784cf8f9844796df7f249.jpg \n", + " inflating: /data/dog-breed-identification/train/b734f78ac374507744a9c25610b5784a.jpg \n", + " inflating: /data/dog-breed-identification/train/b737fc5d09de76ab8b5778cc5cc77dcc.jpg \n", + " inflating: /data/dog-breed-identification/train/b73a0d54dbdf69a271b117756ed53c9b.jpg \n", + " inflating: /data/dog-breed-identification/train/b73c2a5a4f1bd70716a1da4b5e53081c.jpg \n", + " inflating: /data/dog-breed-identification/train/b73d5a749a07c8e998c1a016a48ed170.jpg \n", + " inflating: /data/dog-breed-identification/train/b73df1ea118d708edd2dc6dd2bfa6b8d.jpg \n", + " inflating: /data/dog-breed-identification/train/b7450c7d0e5dd2e6a700db767364ef53.jpg \n", + " inflating: /data/dog-breed-identification/train/b74e428a933c308646bf769d309e7a8b.jpg \n", + " inflating: /data/dog-breed-identification/train/b75100433bfac9fe49acb18d3dd934dd.jpg \n", + " inflating: /data/dog-breed-identification/train/b768b02deecca8391d2b4034cebf527c.jpg \n", + " inflating: /data/dog-breed-identification/train/b76ab109cca8b7170e7b8983dfc1c9db.jpg \n", + " inflating: /data/dog-breed-identification/train/b76c4352046031a43a0c4d20821579eb.jpg \n", + " inflating: /data/dog-breed-identification/train/b771da5baf383aa0af9d7da9e1d6e087.jpg \n", + " inflating: /data/dog-breed-identification/train/b778fcbea5a3d039632be558a404cfc9.jpg \n", + " inflating: /data/dog-breed-identification/train/b77df5bcb04c0bf4381a2ace3adbf2b0.jpg \n", + " inflating: /data/dog-breed-identification/train/b77f9a819d00738b165c8055bd2378f9.jpg \n", + " inflating: /data/dog-breed-identification/train/b7831e4626212d1b11d5a98c9eaeabdb.jpg \n", + " inflating: /data/dog-breed-identification/train/b7859bc0940bcff3bcd02155742ec4be.jpg \n", + " inflating: /data/dog-breed-identification/train/b78eb9db784ba1849ae8e21a8df79a05.jpg \n", + " inflating: /data/dog-breed-identification/train/b79c9bf7f5a375c12e6495be0013faff.jpg \n", + " inflating: /data/dog-breed-identification/train/b7a1bcb92b15f5bbb11bd81d41115b3d.jpg \n", + " inflating: /data/dog-breed-identification/train/b7a588fd384c51e3cc07b16caa84239d.jpg \n", + " inflating: /data/dog-breed-identification/train/b7a84afcc388d91869ec8bf85cf0449e.jpg \n", + " inflating: /data/dog-breed-identification/train/b7b17e7649e3c85654d9e79411de0730.jpg \n", + " inflating: /data/dog-breed-identification/train/b7bcf988b4dff199ec966696225a3aa0.jpg \n", + " inflating: /data/dog-breed-identification/train/b7c6ac4395621347dd6203f421a5ac28.jpg \n", + " inflating: /data/dog-breed-identification/train/b7cae62d0e5082ed3a7c05a578303b4f.jpg \n", + " inflating: /data/dog-breed-identification/train/b7cc2933c599642388ff6d446bbef59f.jpg \n", + " inflating: /data/dog-breed-identification/train/b7e925345d316dc4b79a5307f978ef05.jpg \n", + " inflating: /data/dog-breed-identification/train/b7ee7633af99443dcad5d24677f36404.jpg \n", + " inflating: /data/dog-breed-identification/train/b7f1bfdd7a26c4eb24edd61bbc842167.jpg \n", + " inflating: /data/dog-breed-identification/train/b7f69b255149ea94edf291e0b6bf1bc8.jpg \n", + " inflating: /data/dog-breed-identification/train/b7f8b7abb1d81857420081be4d087785.jpg \n", + " inflating: /data/dog-breed-identification/train/b7fce1387fe5122ffa26a00b8fa0e3c8.jpg \n", + " inflating: /data/dog-breed-identification/train/b80f2392885a0bc5bfc3aa79a849de93.jpg \n", + " inflating: /data/dog-breed-identification/train/b81200d1831287bbe732fc74fe485cf7.jpg \n", + " inflating: /data/dog-breed-identification/train/b81fd8eafb7a41381e66e8d42053c007.jpg \n", + " inflating: /data/dog-breed-identification/train/b8241dace56c5a7ead41326a6417ab65.jpg \n", + " inflating: /data/dog-breed-identification/train/b82bc22d0ca3d81abafe53f12994d203.jpg \n", + " inflating: /data/dog-breed-identification/train/b833838e583a0f38bf8be0971074be52.jpg \n", + " inflating: /data/dog-breed-identification/train/b83be4ce6598d3d8db843fb1256f7b18.jpg \n", + " inflating: /data/dog-breed-identification/train/b84a7d0ce8f87e7f4eb1aead58cdef24.jpg \n", + " inflating: /data/dog-breed-identification/train/b84b1bae39abc7937145592fc00ec809.jpg \n", + " inflating: /data/dog-breed-identification/train/b84e0a99e4cd34f5dcce539c7ed3f91d.jpg \n", + " inflating: /data/dog-breed-identification/train/b851f508d91b68fefb42bb8ab57535bd.jpg \n", + " inflating: /data/dog-breed-identification/train/b851f6cbfd20d707f7427a0278913045.jpg \n", + " inflating: /data/dog-breed-identification/train/b8536d60999368470b9fb64c4ad3ed6a.jpg \n", + " inflating: /data/dog-breed-identification/train/b85bf8ae41f4d46370b3bada2b7d1acd.jpg \n", + " inflating: /data/dog-breed-identification/train/b85ebe9e29c2ae04e09ca480722e778c.jpg \n", + " inflating: /data/dog-breed-identification/train/b863750398c8fcf41aba873f884e5a7e.jpg \n", + " inflating: /data/dog-breed-identification/train/b8653d62ba8e324fc284a1a9defc91c4.jpg \n", + " inflating: /data/dog-breed-identification/train/b86820556af40d864baf167f9dacc9ca.jpg \n", + " inflating: /data/dog-breed-identification/train/b8876a1aae766190b66f0aec39530bdc.jpg \n", + " inflating: /data/dog-breed-identification/train/b88d4e63f9151739573ed538c8dc999e.jpg \n", + " inflating: /data/dog-breed-identification/train/b89557cf7a288ef15aafc5251ff58765.jpg \n", + " inflating: /data/dog-breed-identification/train/b8956103fc289ab5fe375163c4282228.jpg \n", + " inflating: /data/dog-breed-identification/train/b895b457d62b5ed6a98883d114e92cd8.jpg \n", + " inflating: /data/dog-breed-identification/train/b897b5c77e688a12adc103d5d3a85c2a.jpg \n", + " inflating: /data/dog-breed-identification/train/b8a6ef891fb512189f283b38922e7ab8.jpg \n", + " inflating: /data/dog-breed-identification/train/b8a954a3139ca98cc9dd5b2f7dee20ad.jpg \n", + " inflating: /data/dog-breed-identification/train/b8abefab7031d960bf6a9d5dba34856e.jpg \n", + " inflating: /data/dog-breed-identification/train/b8ae8c78f07933037c525c57c659ba04.jpg \n", + " inflating: /data/dog-breed-identification/train/b8b7240e9e1872e04ed62c7182b88f4c.jpg \n", + " inflating: /data/dog-breed-identification/train/b8bbc984982b63f97de5015a53806ef4.jpg \n", + " inflating: /data/dog-breed-identification/train/b8bea4b3a40836ea881062273abd805f.jpg \n", + " inflating: /data/dog-breed-identification/train/b8bf2c8ec3a5da83eb19f9eb918bcb44.jpg \n", + " inflating: /data/dog-breed-identification/train/b8bf653c56a4e2b4f3cf67a4690f73b1.jpg \n", + " inflating: /data/dog-breed-identification/train/b8c56a7e9f658d94c6d07b8496fc6411.jpg \n", + " inflating: /data/dog-breed-identification/train/b8cc9c5226ad59f53538f1f04b2b426e.jpg \n", + " inflating: /data/dog-breed-identification/train/b8d85bd3233f9337fd9b683365cd9707.jpg \n", + " inflating: /data/dog-breed-identification/train/b8dcf30dd6d0b661fbdc892c8047e179.jpg \n", + " inflating: /data/dog-breed-identification/train/b8e09f10cfea58a7bdd11c9280f0690a.jpg \n", + " inflating: /data/dog-breed-identification/train/b8e233e679db8a5e7bcdab32c3d891ec.jpg \n", + " inflating: /data/dog-breed-identification/train/b8e73b52fb1d13a6c4b978358c911806.jpg \n", + " inflating: /data/dog-breed-identification/train/b8e77ec7272a78a3340dab5513917a85.jpg \n", + " inflating: /data/dog-breed-identification/train/b8ebdd7228a8cc54e064aedfc5860757.jpg \n", + " inflating: /data/dog-breed-identification/train/b8ed5c550941c7fd275f1aa07e242f16.jpg \n", + " inflating: /data/dog-breed-identification/train/b8f03e7c0df51e41d8ef79d9d90df144.jpg \n", + " inflating: /data/dog-breed-identification/train/b8f1b0d8fdb130de46b61a34ef0278bd.jpg \n", + " inflating: /data/dog-breed-identification/train/b8fb19ddc7d9973fc1bcccb5abf8ff22.jpg \n", + " inflating: /data/dog-breed-identification/train/b8fd495e7565e47123ef1500113942c2.jpg \n", + " inflating: /data/dog-breed-identification/train/b901180f89399824b9bd0dc8a2eb3a30.jpg \n", + " inflating: /data/dog-breed-identification/train/b902b671e9b94bab23b5f559194b6787.jpg \n", + " inflating: /data/dog-breed-identification/train/b905b9008e830033dedadbda5102f409.jpg \n", + " inflating: /data/dog-breed-identification/train/b92326736c446df97a64932c02113b47.jpg \n", + " inflating: /data/dog-breed-identification/train/b92385d243ab068cccd12337210af6b7.jpg \n", + " inflating: /data/dog-breed-identification/train/b925a34373763d846c1438768efec3b5.jpg \n", + " inflating: /data/dog-breed-identification/train/b941a00fdd9bdab17af68eaf935a1690.jpg \n", + " inflating: /data/dog-breed-identification/train/b9442ec825000b9b061cd71ed5eac36a.jpg \n", + " inflating: /data/dog-breed-identification/train/b94652e2a663106e16ea174e5797b49e.jpg \n", + " inflating: /data/dog-breed-identification/train/b9469cd11471bab7e748e5fa5bf4f157.jpg \n", + " inflating: /data/dog-breed-identification/train/b947c09bf1e90896085a59299eeb8f2f.jpg \n", + " inflating: /data/dog-breed-identification/train/b9480efcc378fcc84c6e939dbfa59414.jpg \n", + " inflating: /data/dog-breed-identification/train/b950f361d3be81bdc9cc509dd059155a.jpg \n", + " inflating: /data/dog-breed-identification/train/b957aa0e8dc0628b09708213e9640a2f.jpg \n", + " inflating: /data/dog-breed-identification/train/b963a1d5bba4328707c2266a1296f33b.jpg \n", + " inflating: /data/dog-breed-identification/train/b967c474a6d472cc101e94ed9c43301b.jpg \n", + " inflating: /data/dog-breed-identification/train/b9703b559578d34c832cab8b43887289.jpg \n", + " inflating: /data/dog-breed-identification/train/b9757ae6aae924b0b5771d9dcd7590ca.jpg \n", + " inflating: /data/dog-breed-identification/train/b97ab6811b1db9b236a4c01c9fd35a38.jpg \n", + " inflating: /data/dog-breed-identification/train/b97b359479d37c9e95a6071d9e5ac59d.jpg \n", + " inflating: /data/dog-breed-identification/train/b97c4e63dc686e98bd5095e21dd7680f.jpg \n", + " inflating: /data/dog-breed-identification/train/b993bbf484887455232d9954827b0cee.jpg \n", + " inflating: /data/dog-breed-identification/train/b99c64276e8233986be5e93a6f803c67.jpg \n", + " inflating: /data/dog-breed-identification/train/b99f4f25c568301b36e3746070012f15.jpg \n", + " inflating: /data/dog-breed-identification/train/b9ac2372389f74c8fe9c381c7f97273a.jpg \n", + " inflating: /data/dog-breed-identification/train/b9ae33d10433281e7cf30ce09f61df37.jpg \n", + " inflating: /data/dog-breed-identification/train/b9b54494a2ed02ea74f0ef26a8cca728.jpg \n", + " inflating: /data/dog-breed-identification/train/b9c46e0f2c351d5d1d02eb2c7d062bbf.jpg \n", + " inflating: /data/dog-breed-identification/train/b9c9aa20744c64e4c0d1e8dc0c2c2bc9.jpg \n", + " inflating: /data/dog-breed-identification/train/b9cc361e5493db5effda92f09ab61f92.jpg \n", + " inflating: /data/dog-breed-identification/train/b9cc7a9a3060ada2527fb0f7499d1107.jpg \n", + " inflating: /data/dog-breed-identification/train/b9cdf1e7b56c671e985c6910ee5a4ce0.jpg \n", + " inflating: /data/dog-breed-identification/train/b9d2bc5ec2a7ef1468378711e0cb6bc1.jpg \n", + " inflating: /data/dog-breed-identification/train/b9d7fe12aaa39ae416f1d1bbab67e800.jpg \n", + " inflating: /data/dog-breed-identification/train/b9e9ce2495d96099fc799022fc074d0b.jpg \n", + " inflating: /data/dog-breed-identification/train/b9f0ec3a1b55e4e56d7f1b489fe4cac0.jpg \n", + " inflating: /data/dog-breed-identification/train/b9f6625029445139bd765939c4901cc6.jpg \n", + " inflating: /data/dog-breed-identification/train/b9f961b985f5ac62cd781584cb96aec1.jpg \n", + " inflating: /data/dog-breed-identification/train/b9f96dd0c9f3dc7e755d9b8cbb124f3b.jpg \n", + " inflating: /data/dog-breed-identification/train/ba07e1dd2bd25cbd3cd985d9842f4056.jpg \n", + " inflating: /data/dog-breed-identification/train/ba1b7aa01e1c871c5e3acd08b65516e7.jpg \n", + " inflating: /data/dog-breed-identification/train/ba1d2dc55bc4d91d554ba2258684c7f0.jpg \n", + " inflating: /data/dog-breed-identification/train/ba26b73f1246de107a9d05840f360c01.jpg \n", + " inflating: /data/dog-breed-identification/train/ba387af6a979f26d1b75b50631567a12.jpg \n", + " inflating: /data/dog-breed-identification/train/ba4521b608d80f576b9301d3ab132d77.jpg \n", + " inflating: /data/dog-breed-identification/train/ba6eda43b8cf8607e9f7af6dbfaf9538.jpg \n", + " inflating: /data/dog-breed-identification/train/ba778f63824492b71f2325ae1ced3649.jpg \n", + " inflating: /data/dog-breed-identification/train/ba9616b899da4c0623a21eb2404ab1b5.jpg \n", + " inflating: /data/dog-breed-identification/train/ba9d74baca668da5f8f7451fe3734cc1.jpg \n", + " inflating: /data/dog-breed-identification/train/ba9f69563776b93721095a6550c22021.jpg \n", + " inflating: /data/dog-breed-identification/train/baa740b07dfb86b187041dd39a2eef1b.jpg \n", + " inflating: /data/dog-breed-identification/train/bab0b97cc5b817c669e10fe2c3a5a457.jpg \n", + " inflating: /data/dog-breed-identification/train/bac0e06a0b3416506eade8daa9aa3810.jpg \n", + " inflating: /data/dog-breed-identification/train/bac48c1d1224be5945c7a0b823f919b6.jpg \n", + " inflating: /data/dog-breed-identification/train/badfe4c762c03efd40c9a50423ccbf60.jpg \n", + " inflating: /data/dog-breed-identification/train/bae3d77665746cd31c98a27c42163b92.jpg \n", + " inflating: /data/dog-breed-identification/train/bb13c838ee59df4d8d7db7e1b62bcfb2.jpg \n", + " inflating: /data/dog-breed-identification/train/bb18d67bd3491334e88519ae81586b9c.jpg \n", + " inflating: /data/dog-breed-identification/train/bb2a76124ba043a98b39ee37d033d746.jpg \n", + " inflating: /data/dog-breed-identification/train/bb2f95b99c054cf43996501e14bce4e2.jpg \n", + " inflating: /data/dog-breed-identification/train/bb33d498880146058bd331d7cb6e7fda.jpg \n", + " inflating: /data/dog-breed-identification/train/bb3e7d4682a0cbebee6393cb435a2cf9.jpg \n", + " inflating: /data/dog-breed-identification/train/bb498a83a13172068481ab3513469efa.jpg \n", + " inflating: /data/dog-breed-identification/train/bb4a10d3e3a0e0a476d49553be9f9983.jpg \n", + " inflating: /data/dog-breed-identification/train/bb4aa91f61fa6e06dbc1a3869738b889.jpg \n", + " inflating: /data/dog-breed-identification/train/bb5b6572e6a673c36165fb4f9950f1a4.jpg \n", + " inflating: /data/dog-breed-identification/train/bb5b7382b5f016d064ae37337bc6ac58.jpg \n", + " inflating: /data/dog-breed-identification/train/bb65d628f5a121b781f411e8598f8d60.jpg \n", + " inflating: /data/dog-breed-identification/train/bb7004e9b1710030c71ea14df7c79598.jpg \n", + " inflating: /data/dog-breed-identification/train/bb724c8973031fa308fdfe81183c1ede.jpg \n", + " inflating: /data/dog-breed-identification/train/bb725eb9bf95ceeb3d1fdf12b7b9dfb6.jpg \n", + " inflating: /data/dog-breed-identification/train/bb7fdde5ce18544f51b1091f8f14533f.jpg \n", + " inflating: /data/dog-breed-identification/train/bb85276dbd61f284f520733f76e96b3b.jpg \n", + " inflating: /data/dog-breed-identification/train/bb8d85a0318c0fd0269f60b1cf579baa.jpg \n", + " inflating: /data/dog-breed-identification/train/bb96cd63e11e044ab9051e578b6332a4.jpg \n", + " inflating: /data/dog-breed-identification/train/bb9bab1e3645d068b4a52f86a63e4bce.jpg \n", + " inflating: /data/dog-breed-identification/train/bb9e85b9dee23f7f9c4e7fdd10d2121d.jpg \n", + " inflating: /data/dog-breed-identification/train/bba2a170e2f2cf3c0ff4eef378fdfed5.jpg \n", + " inflating: /data/dog-breed-identification/train/bbaf88be76553b3314045f057b94e0af.jpg \n", + " inflating: /data/dog-breed-identification/train/bbafb16f97edbdeb9225f7eecfa91ae4.jpg \n", + " inflating: /data/dog-breed-identification/train/bbb2ba350d76eda23bb65ebd4f2c9925.jpg \n", + " inflating: /data/dog-breed-identification/train/bbb300ef4fd6a59dbf8bd3a041547ced.jpg \n", + " inflating: /data/dog-breed-identification/train/bbb3573b1f9a2f3ab1b1d62cc26a7b88.jpg \n", + " inflating: /data/dog-breed-identification/train/bbb4831f4cddf33b5ca25152dba1963f.jpg \n", + " inflating: /data/dog-breed-identification/train/bbb7767c16389ec8cb94245ec952db3e.jpg \n", + " inflating: /data/dog-breed-identification/train/bbb8f439fd4a3563bfd4adf9a13f613d.jpg \n", + " inflating: /data/dog-breed-identification/train/bbc6ea23db33da6d67f5b4ad9fcce758.jpg \n", + " inflating: /data/dog-breed-identification/train/bbc95880ac02d4f5c83728c81ace158f.jpg \n", + " inflating: /data/dog-breed-identification/train/bbcc126ee5c5105308e461de70709905.jpg \n", + " inflating: /data/dog-breed-identification/train/bbce1f2dcd7160bf39c92d9ace657bb2.jpg \n", + " inflating: /data/dog-breed-identification/train/bbd33c81755e1243855a1e11f44f4db6.jpg \n", + " inflating: /data/dog-breed-identification/train/bbd9727d650ceb70bc57c35fe13f3cda.jpg \n", + " inflating: /data/dog-breed-identification/train/bbdcf5e0c3c76f8ce4e1edf534a1545f.jpg \n", + " inflating: /data/dog-breed-identification/train/bbdd2335f51e0d7f30b0c68fe942e3cb.jpg \n", + " inflating: /data/dog-breed-identification/train/bbdf0182c894bda733805993317a6e3d.jpg \n", + " inflating: /data/dog-breed-identification/train/bbe07700afcf58f521ef1df0c122fc2b.jpg \n", + " inflating: /data/dog-breed-identification/train/bbe48b3332c05b09c649869e4e024275.jpg \n", + " inflating: /data/dog-breed-identification/train/bbe814c589a59d64da00fc4939ee09ba.jpg \n", + " inflating: /data/dog-breed-identification/train/bbe96d21f3aafe38ab3b3ce24a498d77.jpg \n", + " inflating: /data/dog-breed-identification/train/bbf29c0682dc5b3ff78c810053aea0ec.jpg \n", + " inflating: /data/dog-breed-identification/train/bbf5a9514379e83803f3c27ffdd8869a.jpg \n", + " inflating: /data/dog-breed-identification/train/bbf5b722c6535dbcd4f47e9b938a1f8b.jpg \n", + " inflating: /data/dog-breed-identification/train/bbf96da4759da24dbd84cce870b0f9b1.jpg \n", + " inflating: /data/dog-breed-identification/train/bbfb5af237211ef355c277d696f6db8c.jpg \n", + " inflating: /data/dog-breed-identification/train/bbfe23a9f9ac256d07179643aba1212f.jpg \n", + " inflating: /data/dog-breed-identification/train/bbff9ef9066d365c1042805e1179f0d9.jpg \n", + " inflating: /data/dog-breed-identification/train/bc02f269c7365631689b00cccce4c4d5.jpg \n", + " inflating: /data/dog-breed-identification/train/bc0430c756b298a58b9aa5618e2629ca.jpg \n", + " inflating: /data/dog-breed-identification/train/bc0672a9e4c3f516c2cfb164fa9ae579.jpg \n", + " inflating: /data/dog-breed-identification/train/bc0ee8087d31eef274598d4af706bcf9.jpg \n", + " inflating: /data/dog-breed-identification/train/bc166a0cc82d1445d71396c2ee5ee42f.jpg \n", + " inflating: /data/dog-breed-identification/train/bc198e31d0c17f265239d4cf515104cf.jpg \n", + " inflating: /data/dog-breed-identification/train/bc1e529af6d62fca70f79f9f498f8d26.jpg \n", + " inflating: /data/dog-breed-identification/train/bc203c17a2664449384299ab1e19773d.jpg \n", + " inflating: /data/dog-breed-identification/train/bc218eeb424cf93092b0a0b23035bbad.jpg \n", + " inflating: /data/dog-breed-identification/train/bc32a77ad912e807ae82846de95bbc9c.jpg \n", + " inflating: /data/dog-breed-identification/train/bc3f7ee629b6ecbc058f6e24b823ba28.jpg \n", + " inflating: /data/dog-breed-identification/train/bc3fb193916e517a86bc908cdac872c6.jpg \n", + " inflating: /data/dog-breed-identification/train/bc408c0d39c5c9c8e184d83c45ff891a.jpg \n", + " inflating: /data/dog-breed-identification/train/bc44f873187f4d02264c3a2c8d49ad6f.jpg \n", + " inflating: /data/dog-breed-identification/train/bc574f3640d7a63308a51ecd05f076ac.jpg \n", + " inflating: /data/dog-breed-identification/train/bc5ca8ccf9ff7861528ad80ae89aafa8.jpg \n", + " inflating: /data/dog-breed-identification/train/bc72a2f3d71b51dc3e8adaca9dcd0dbf.jpg \n", + " inflating: /data/dog-breed-identification/train/bc76cd960369039bf7837408d2c50fb4.jpg \n", + " inflating: /data/dog-breed-identification/train/bc7d13f323cfc48854ed90ca52078553.jpg \n", + " inflating: /data/dog-breed-identification/train/bc8404a3b9d96fb5626360e0011e20b8.jpg \n", + " inflating: /data/dog-breed-identification/train/bc8503aaeb6b36c91fe82fa0cbe1dfc2.jpg \n", + " inflating: /data/dog-breed-identification/train/bc8fa54fc7dba62820098ab089ceddaa.jpg \n", + " inflating: /data/dog-breed-identification/train/bc92c4d60a03505c5c2696bc5e0fe974.jpg \n", + " inflating: /data/dog-breed-identification/train/bc9ead907720f9b02024a23283c1c9c3.jpg \n", + " inflating: /data/dog-breed-identification/train/bca1bc19e90029f7361eafd43fd28247.jpg \n", + " inflating: /data/dog-breed-identification/train/bca1feecf7cb3a75340527fe79152a3f.jpg \n", + " inflating: /data/dog-breed-identification/train/bca7dc1437ea1ba4bfb4739a99520492.jpg \n", + " inflating: /data/dog-breed-identification/train/bca88d42e4fc84b3169b13a615f5fdbf.jpg \n", + " inflating: /data/dog-breed-identification/train/bca8c43f2dd4898cd5d17b99f5a17305.jpg \n", + " inflating: /data/dog-breed-identification/train/bcad32fcb84cd4e40579f762cc0f62ac.jpg \n", + " inflating: /data/dog-breed-identification/train/bcb2c121cbdd74ef9e4959490463eae1.jpg \n", + " inflating: /data/dog-breed-identification/train/bcbf5ec7a0cddffc15917d416a90c6f7.jpg \n", + " inflating: /data/dog-breed-identification/train/bcc300209b7bef71fcdc9d5aa75462f1.jpg \n", + " inflating: /data/dog-breed-identification/train/bcd801a92281322094b9f7a3bd55450b.jpg \n", + " inflating: /data/dog-breed-identification/train/bcda35496d68378c027646b90cd49e4a.jpg \n", + " inflating: /data/dog-breed-identification/train/bce16a980008e997450eb48c3cdc7581.jpg \n", + " inflating: /data/dog-breed-identification/train/bce9d20fe4d4a165d03d542c14f7b2e4.jpg \n", + " inflating: /data/dog-breed-identification/train/bced7d651d024b5a55b9062f4edca875.jpg \n", + " inflating: /data/dog-breed-identification/train/bced88130af56fd6d8ae65564e3dbaba.jpg \n", + " inflating: /data/dog-breed-identification/train/bcf1184ebdd1766a71ca793e5fa03e44.jpg \n", + " inflating: /data/dog-breed-identification/train/bcf3f1b3f1424edece51c1275a226fd4.jpg \n", + " inflating: /data/dog-breed-identification/train/bcfeb7f43c4cde358910c70b8afb0d3b.jpg \n", + " inflating: /data/dog-breed-identification/train/bd25f56e9d30f2659f639c8b520053bf.jpg \n", + " inflating: /data/dog-breed-identification/train/bd29e47b1d4dfb779b5a31df235ffecb.jpg \n", + " inflating: /data/dog-breed-identification/train/bd2a4492ae1a0fd8b77dab961de0f71f.jpg \n", + " inflating: /data/dog-breed-identification/train/bd3086afcdf8d1571c615fe7c7472e11.jpg \n", + " inflating: /data/dog-breed-identification/train/bd38394c6fcd00d108b4f84eab91a9b9.jpg \n", + " inflating: /data/dog-breed-identification/train/bd3dfb3eef2ca1708cd9120a217f72ed.jpg \n", + " inflating: /data/dog-breed-identification/train/bd44350905957efade85f098ee2e2b33.jpg \n", + " inflating: /data/dog-breed-identification/train/bd4bf652ce079538e6bc3de2612c1f03.jpg \n", + " inflating: /data/dog-breed-identification/train/bd4dc4f436e88c6b6418140b33264130.jpg \n", + " inflating: /data/dog-breed-identification/train/bd53e2ce5f76dee81d7c6879521ae8cc.jpg \n", + " inflating: /data/dog-breed-identification/train/bd58517f201e88d3726593341f2482e4.jpg \n", + " inflating: /data/dog-breed-identification/train/bd5e32eeb9eaf11979d45530818d2f86.jpg \n", + " inflating: /data/dog-breed-identification/train/bd6320171c62d5a9b1ff60c4e2575fd3.jpg \n", + " inflating: /data/dog-breed-identification/train/bd64158308bbc5e2584e89f6e2ea2ff4.jpg \n", + " inflating: /data/dog-breed-identification/train/bd66845e2afbc4fa641eb5fde30aa294.jpg \n", + " inflating: /data/dog-breed-identification/train/bd67952d62b23e87a420c8c80fd99756.jpg \n", + " inflating: /data/dog-breed-identification/train/bd6927dadc87d3e7c5beb8009ecac5e2.jpg \n", + " inflating: /data/dog-breed-identification/train/bd69b10022556a234eabafbbc5348bba.jpg \n", + " inflating: /data/dog-breed-identification/train/bd6a14ec09f86f3fef46f9433db8c445.jpg \n", + " inflating: /data/dog-breed-identification/train/bd77320f65faa0645aeaca3939aeb856.jpg \n", + " inflating: /data/dog-breed-identification/train/bd7ae2fb53c5d792b3a7bcf942fd935e.jpg \n", + " inflating: /data/dog-breed-identification/train/bd7f701cd4f8e66db0eff2218e763f73.jpg \n", + " inflating: /data/dog-breed-identification/train/bd80e8056b70b7af3ae012fafe57d7f4.jpg \n", + " inflating: /data/dog-breed-identification/train/bd89c1185696801213b6e641fae6186e.jpg \n", + " inflating: /data/dog-breed-identification/train/bd8cb94a46ae36e1bf2f0952a3e4a79e.jpg \n", + " inflating: /data/dog-breed-identification/train/bd8ce4891d932fa8762fbfc064129b6c.jpg \n", + " inflating: /data/dog-breed-identification/train/bd9334441c2067dfc01dfca5eedf7f21.jpg \n", + " inflating: /data/dog-breed-identification/train/bd93e6dd836bfa50db1bbb071efdd641.jpg \n", + " inflating: /data/dog-breed-identification/train/bd990d4fc279bbfcf6bf8cc832101b39.jpg \n", + " inflating: /data/dog-breed-identification/train/bd9b29f039b5ecf255916a26662070c0.jpg \n", + " inflating: /data/dog-breed-identification/train/bd9b41a4737f840a6fe4580dcb6e5aaf.jpg \n", + " inflating: /data/dog-breed-identification/train/bd9cd8814f227af4c9aab9b9db57e3b9.jpg \n", + " inflating: /data/dog-breed-identification/train/bda8225dd3edf5e413f18e52fdcf5050.jpg \n", + " inflating: /data/dog-breed-identification/train/bdcb7a341f67363503f974d7596780db.jpg \n", + " inflating: /data/dog-breed-identification/train/bdd0ac143814012a86b6c23ef16a3094.jpg \n", + " inflating: /data/dog-breed-identification/train/bdd3210e76208a56f3b9a0505e44296b.jpg \n", + " inflating: /data/dog-breed-identification/train/bddc026d7aa9d642f64af402e7b39c43.jpg \n", + " inflating: /data/dog-breed-identification/train/bddd13e14c54a5ca227fddcf968d99c3.jpg \n", + " inflating: /data/dog-breed-identification/train/bdec813dff4bb79ff07cbe0440f217a2.jpg \n", + " inflating: /data/dog-breed-identification/train/bdf2a1c73fa4fe7fd2a26356a30cb25c.jpg \n", + " inflating: /data/dog-breed-identification/train/bdf6bdb0c025a71d9860ec28c4521b92.jpg \n", + " inflating: /data/dog-breed-identification/train/be0dff173da9c8e0b222ddac13b8f2e9.jpg \n", + " inflating: /data/dog-breed-identification/train/be0e673fcd23b2d4248d7c982635fe99.jpg \n", + " inflating: /data/dog-breed-identification/train/be1032723cc8abb85c6cf81e129e6161.jpg \n", + " inflating: /data/dog-breed-identification/train/be188d4334b4954ea69e16290c396fa1.jpg \n", + " inflating: /data/dog-breed-identification/train/be1a5fa3080c11a75880d3c57bc634c1.jpg \n", + " inflating: /data/dog-breed-identification/train/be1eb6914aded0282c88d6218ca94769.jpg \n", + " inflating: /data/dog-breed-identification/train/be32d9afea6b7ddb96724df1e617873d.jpg \n", + " inflating: /data/dog-breed-identification/train/be33b4ae29b609b6d3b005e9b7b298a0.jpg \n", + " inflating: /data/dog-breed-identification/train/be38310e94a728b390eb5b3e23af74bf.jpg \n", + " inflating: /data/dog-breed-identification/train/be390f190d4ab118992e2784b8522d50.jpg \n", + " inflating: /data/dog-breed-identification/train/be3b1217d5ab68fd6866055e498a432d.jpg \n", + " inflating: /data/dog-breed-identification/train/be3ede4734509f6c5d5de4f40b867f9c.jpg \n", + " inflating: /data/dog-breed-identification/train/be4a591611ba024547744245db075c98.jpg \n", + " inflating: /data/dog-breed-identification/train/be5304acd0cf03c1a64f12622b8605b6.jpg \n", + " inflating: /data/dog-breed-identification/train/be575caa5b993cf44f40ac8193db9597.jpg \n", + " inflating: /data/dog-breed-identification/train/be5893c02de466a9b1eed0ed4d66a87b.jpg \n", + " inflating: /data/dog-breed-identification/train/be78c940a00fda5f1822c630c75f1ee9.jpg \n", + " inflating: /data/dog-breed-identification/train/be7ae0578e96cd21c69b54b5252c1443.jpg \n", + " inflating: /data/dog-breed-identification/train/be7e3d9f900baa5f8b62c3a909ba5a67.jpg \n", + " inflating: /data/dog-breed-identification/train/be80e33520a60fa111dacff844efe848.jpg \n", + " inflating: /data/dog-breed-identification/train/be851757fed72c4ce6d20188962c0b19.jpg \n", + " inflating: /data/dog-breed-identification/train/be8b04c1ad3228932fb0ebb9d103dbd0.jpg \n", + " inflating: /data/dog-breed-identification/train/be8c5599351bfc3946cd302da9496d99.jpg \n", + " inflating: /data/dog-breed-identification/train/be9686a8b005707ab13d8f5dff1d7431.jpg \n", + " inflating: /data/dog-breed-identification/train/be9a01353a44e997ffab49c6d9d3a309.jpg \n", + " inflating: /data/dog-breed-identification/train/be9a79b75ab68064bbcecbd4eb4ef959.jpg \n", + " inflating: /data/dog-breed-identification/train/be9c349a5a1ebba4ac4653f6316cfb9c.jpg \n", + " inflating: /data/dog-breed-identification/train/bea765154749ffbccdc2e4321027e3a8.jpg \n", + " inflating: /data/dog-breed-identification/train/bea8d132fc8d6264c6fa1c2eb10e4482.jpg \n", + " inflating: /data/dog-breed-identification/train/beaa155f4a9f24e2fd3cf4b6e94dc1ad.jpg \n", + " inflating: /data/dog-breed-identification/train/beab3428d92da3040ffb94bfe6355205.jpg \n", + " inflating: /data/dog-breed-identification/train/beac84b40247dca35a77409a001f274c.jpg \n", + " inflating: /data/dog-breed-identification/train/beb42ec1708f471d474bb4de9bf2968b.jpg \n", + " inflating: /data/dog-breed-identification/train/beb47fe72b012b653b5618e546838fa2.jpg \n", + " inflating: /data/dog-breed-identification/train/beb5d7cd3bf499086ec61617dd8ae871.jpg \n", + " inflating: /data/dog-breed-identification/train/bebd0cb48ae4b4d3cf9ceff4e70ef1f9.jpg \n", + " inflating: /data/dog-breed-identification/train/bec2055925d4bf2902604df521bcec76.jpg \n", + " inflating: /data/dog-breed-identification/train/becbfec66b1e2d7c5ac05cd9d9682931.jpg \n", + " inflating: /data/dog-breed-identification/train/becdcf664798e240a8eb4fea44de4f94.jpg \n", + " inflating: /data/dog-breed-identification/train/bed0cf9b069065faf7250a922812db5e.jpg \n", + " inflating: /data/dog-breed-identification/train/bedae99cee6a6dea3726f299e26e5ae4.jpg \n", + " inflating: /data/dog-breed-identification/train/bedcd4451681407aca58ea40c6ac1af3.jpg \n", + " inflating: /data/dog-breed-identification/train/bede571095680ca06bea8b3f243de826.jpg \n", + " inflating: /data/dog-breed-identification/train/beed8b7605a53ea93ca1fa8acfd13112.jpg \n", + " inflating: /data/dog-breed-identification/train/bef015f8b2a3225aa0ab4009bf3fd6bc.jpg \n", + " inflating: /data/dog-breed-identification/train/bef1e6124f9c413e20e24e40d50276f9.jpg \n", + " inflating: /data/dog-breed-identification/train/befc8bac9dc5e6e64635a50602aec2fb.jpg \n", + " inflating: /data/dog-breed-identification/train/befd3156c627a87eed0e34c68d89ed5b.jpg \n", + " inflating: /data/dog-breed-identification/train/bf03d141ce99683a817108ea9cea17f7.jpg \n", + " inflating: /data/dog-breed-identification/train/bf0944bf0ec05af3d018b40c9c5e1a3c.jpg \n", + " inflating: /data/dog-breed-identification/train/bf10ca18e06a1a6a49388d4209556ec9.jpg \n", + " inflating: /data/dog-breed-identification/train/bf121d8d8af31728d8882d30242289e3.jpg \n", + " inflating: /data/dog-breed-identification/train/bf17d4425cc75eff00389103be0edbb9.jpg \n", + " inflating: /data/dog-breed-identification/train/bf18acf12d970f9e93a414bce6315a4c.jpg \n", + " inflating: /data/dog-breed-identification/train/bf25cf910748150b47bbf727a0dd6ef7.jpg \n", + " inflating: /data/dog-breed-identification/train/bf2a9b332d6d6ab0ab0b3e1a2d31e51a.jpg \n", + " inflating: /data/dog-breed-identification/train/bf350ef1b1f2c61c5312bbc3d7896a98.jpg \n", + " inflating: /data/dog-breed-identification/train/bf376d80e8752194a70b88cba386b4f2.jpg \n", + " inflating: /data/dog-breed-identification/train/bf38a009aaf46963526ce4a35d9535ee.jpg \n", + " inflating: /data/dog-breed-identification/train/bf461ad78efdb24bc7d30a062b3d82de.jpg \n", + " inflating: /data/dog-breed-identification/train/bf495079b850ccf36a2868ba788bc53d.jpg \n", + " inflating: /data/dog-breed-identification/train/bf4bfefe73ca9e7b22c7af5dd75e8e36.jpg \n", + " inflating: /data/dog-breed-identification/train/bf4cee97c84ad782f0bcb59b9d42e022.jpg \n", + " inflating: /data/dog-breed-identification/train/bf4efb3a9c94b5a92ef3751faa091121.jpg \n", + " inflating: /data/dog-breed-identification/train/bf5a837a9050a1dc1fd43027f99c9b7b.jpg \n", + " inflating: /data/dog-breed-identification/train/bf6cf509b2ee89fcc7f1b627ee1ed7e7.jpg \n", + " inflating: /data/dog-breed-identification/train/bf7004de0f825049194139051f7a8025.jpg \n", + " inflating: /data/dog-breed-identification/train/bf78b7ca10cf7d1b7769d5538374a524.jpg \n", + " inflating: /data/dog-breed-identification/train/bf8a2477f71e61185aff00a6925b5173.jpg \n", + " inflating: /data/dog-breed-identification/train/bf904a3e0ad73be75221a5fc271bece6.jpg \n", + " inflating: /data/dog-breed-identification/train/bfa0490063b75ca9fa1b022dc711d88e.jpg \n", + " inflating: /data/dog-breed-identification/train/bfa50acc112f4d8b89dd212eceb9447c.jpg \n", + " inflating: /data/dog-breed-identification/train/bfab7814cbe737f504129714fdc61e4f.jpg \n", + " inflating: /data/dog-breed-identification/train/bfbc2d849ad444ce3c7d4f7b0e4ea4c0.jpg \n", + " inflating: /data/dog-breed-identification/train/bfbc332f78159db3fa558392311ffed8.jpg \n", + " inflating: /data/dog-breed-identification/train/bfbe2ffaeca6aa155e09d75b750d1849.jpg \n", + " inflating: /data/dog-breed-identification/train/bfcbd77df4e01f4c01bb8c23c857ad27.jpg \n", + " inflating: /data/dog-breed-identification/train/bfd383676d64ac4b280719a0ef6e48b4.jpg \n", + " inflating: /data/dog-breed-identification/train/bfd5b637965b0594c9b115d4ad3426d6.jpg \n", + " inflating: /data/dog-breed-identification/train/bfd6b583a0f6de822f0bce0477f856c4.jpg \n", + " inflating: /data/dog-breed-identification/train/bfdd93b11d5e4e0df2566e4344838488.jpg \n", + " inflating: /data/dog-breed-identification/train/bfe7835703f7ccfe18cb2bd837d61742.jpg \n", + " inflating: /data/dog-breed-identification/train/bfe8e8b7d66e75c0af82b6e0d136a7a5.jpg \n", + " inflating: /data/dog-breed-identification/train/bfec42604e41393fed1e5a2946dfb221.jpg \n", + " inflating: /data/dog-breed-identification/train/bfee9396063ff719130a3a61569325d5.jpg \n", + " inflating: /data/dog-breed-identification/train/bff3b4988983b8dfc0376e010a68dd12.jpg \n", + " inflating: /data/dog-breed-identification/train/bffc7036e3c90a13e85e540837dfa9df.jpg \n", + " inflating: /data/dog-breed-identification/train/bffe7f9c91a8af465f928a33aa8beda8.jpg \n", + " inflating: /data/dog-breed-identification/train/c013b74dd463214e272a09400d1ba088.jpg \n", + " inflating: /data/dog-breed-identification/train/c01ab33476ca5cac3c38942b2f860a9c.jpg \n", + " inflating: /data/dog-breed-identification/train/c01fb7605bebd817eab818ec264fce09.jpg \n", + " inflating: /data/dog-breed-identification/train/c023eaf6c091913c9e6fd4732bc049b7.jpg \n", + " inflating: /data/dog-breed-identification/train/c02707afcb697c42ee9e15028d5ef685.jpg \n", + " inflating: /data/dog-breed-identification/train/c02d063b4d8dd7dc77d22deac788318c.jpg \n", + " inflating: /data/dog-breed-identification/train/c02f83947002646c705681ef6433294a.jpg \n", + " inflating: /data/dog-breed-identification/train/c03241c397a1159f6fb358ef9d1874bb.jpg \n", + " inflating: /data/dog-breed-identification/train/c040498df5ee68644331b9049c55a320.jpg \n", + " inflating: /data/dog-breed-identification/train/c04178bd174b64611ad72477b8126921.jpg \n", + " inflating: /data/dog-breed-identification/train/c043f726c52f93c83fa0100a8a776481.jpg \n", + " inflating: /data/dog-breed-identification/train/c04bf0ad26971adb6613737f4b557e5b.jpg \n", + " inflating: /data/dog-breed-identification/train/c04d5d982202a31296b7da98b6ec06b2.jpg \n", + " inflating: /data/dog-breed-identification/train/c05756fc992ab5863853dafe9cf50675.jpg \n", + " inflating: /data/dog-breed-identification/train/c064f334a8216234c4ffe6566fc9ca8a.jpg \n", + " inflating: /data/dog-breed-identification/train/c06b704f3223bc02e29d762d5e0f7c47.jpg \n", + " inflating: /data/dog-breed-identification/train/c06db3b81882812395ed0b5babada638.jpg \n", + " inflating: /data/dog-breed-identification/train/c0725c69b3af65f3e884c2b9bb41f5cd.jpg \n", + " inflating: /data/dog-breed-identification/train/c072c6274a441701d005c42d17cdacf0.jpg \n", + " inflating: /data/dog-breed-identification/train/c075c594a9483e562e8a2a198f384c6b.jpg \n", + " inflating: /data/dog-breed-identification/train/c07ed68e4c73960aecc258a445ae8b14.jpg \n", + " inflating: /data/dog-breed-identification/train/c080a6d7d5bdba6e23c0f854a67493ad.jpg \n", + " inflating: /data/dog-breed-identification/train/c081155c1c31116f8accbe4c0bef6e0a.jpg \n", + " inflating: /data/dog-breed-identification/train/c08217328f5635e74fc0c0886bac26e6.jpg \n", + " inflating: /data/dog-breed-identification/train/c098acce3ddd9919cb04d6f47f450891.jpg \n", + " inflating: /data/dog-breed-identification/train/c098cd2465fd0a1f709b522078986b34.jpg \n", + " inflating: /data/dog-breed-identification/train/c099b358d9f75595c1de41b5b27bfd36.jpg \n", + " inflating: /data/dog-breed-identification/train/c0a34f1fd1f8ccc767a5fcf9a5763e1d.jpg \n", + " inflating: /data/dog-breed-identification/train/c0a4e82e33d14987683715ef833e8401.jpg \n", + " inflating: /data/dog-breed-identification/train/c0abba0e417be48e778809d94dad57a7.jpg \n", + " inflating: /data/dog-breed-identification/train/c0b53533209ff39a4620a5959a1c5c98.jpg \n", + " inflating: /data/dog-breed-identification/train/c0c3cd7054d43a350f772b8242cd0217.jpg \n", + " inflating: /data/dog-breed-identification/train/c0c60bf6e9807b71e15c63309ed58e36.jpg \n", + " inflating: /data/dog-breed-identification/train/c0ca40ffa35be4d2772870794fa298cf.jpg \n", + " inflating: /data/dog-breed-identification/train/c0d10ee669b1ccc5bc60950efd19cd53.jpg \n", + " inflating: /data/dog-breed-identification/train/c0d13d17657300bfe361ad0f8511fe38.jpg \n", + " inflating: /data/dog-breed-identification/train/c0d456fd289a6f0f9a2e40192a37251f.jpg \n", + " inflating: /data/dog-breed-identification/train/c0de582157dbd2320e406cdfea59ef11.jpg \n", + " inflating: /data/dog-breed-identification/train/c0df63bcc4d3e7d829307a331bcd6abc.jpg \n", + " inflating: /data/dog-breed-identification/train/c0e21b16483abecd9449bce9122cb82d.jpg \n", + " inflating: /data/dog-breed-identification/train/c0e6e8660b28cd6f91bee7e75f63cde1.jpg \n", + " inflating: /data/dog-breed-identification/train/c0ec60ef57bce7f784d8a6f1615d169d.jpg \n", + " inflating: /data/dog-breed-identification/train/c0eed44536c5f094235cb78635fa01ae.jpg \n", + " inflating: /data/dog-breed-identification/train/c0f1af3542270b1e4f1be3403d17ec35.jpg \n", + " inflating: /data/dog-breed-identification/train/c0f28a8976414ad02d4d4c256a45711b.jpg \n", + " inflating: /data/dog-breed-identification/train/c0f768f1265e116dba127a8f904ee376.jpg \n", + " inflating: /data/dog-breed-identification/train/c1191ce52692dce2e821849c5445fb6d.jpg \n", + " inflating: /data/dog-breed-identification/train/c12741ccd98df347498106679a65fee2.jpg \n", + " inflating: /data/dog-breed-identification/train/c133de12833bef664547ffe9d596b760.jpg \n", + " inflating: /data/dog-breed-identification/train/c13427fc3db9194dac02b1721bd10fe4.jpg \n", + " inflating: /data/dog-breed-identification/train/c138a69989ef76b3a5720df81f592f92.jpg \n", + " inflating: /data/dog-breed-identification/train/c13ba8b85ba2f68af5b6104e18b22994.jpg \n", + " inflating: /data/dog-breed-identification/train/c142993056fbd776aace8217acc45267.jpg \n", + " inflating: /data/dog-breed-identification/train/c1486d60b3c8c6b1b02b6e13363f5cef.jpg \n", + " inflating: /data/dog-breed-identification/train/c148beb6d5877985701256584d104b6a.jpg \n", + " inflating: /data/dog-breed-identification/train/c14d79e9e4e88583fcb603aad4ca2493.jpg \n", + " inflating: /data/dog-breed-identification/train/c1510b474b3893197ac87c869e853c1d.jpg \n", + " inflating: /data/dog-breed-identification/train/c1535e422539ba3f3b953baf2b527058.jpg \n", + " inflating: /data/dog-breed-identification/train/c155eec0acaeded243319bc1401bc783.jpg \n", + " inflating: /data/dog-breed-identification/train/c15d94d9d6a8167fbcf4b81c6938bc93.jpg \n", + " inflating: /data/dog-breed-identification/train/c17b1d9d7c5ce38464836cf2e2eb5243.jpg \n", + " inflating: /data/dog-breed-identification/train/c182aec9722d11b6cd64933aa33b932d.jpg \n", + " inflating: /data/dog-breed-identification/train/c18aff6cabebcd673aadc864781f31c8.jpg \n", + " inflating: /data/dog-breed-identification/train/c1905697b10e53b26a2445cbb94f7227.jpg \n", + " inflating: /data/dog-breed-identification/train/c19177c750afffa00a7ba5f59ace92be.jpg \n", + " inflating: /data/dog-breed-identification/train/c1954af7d1cbae4f23232c06a8b66351.jpg \n", + " inflating: /data/dog-breed-identification/train/c1a5f4fb96ec93193d7d105bcf62fd45.jpg \n", + " inflating: /data/dog-breed-identification/train/c1b1a26065c28ee43c7bfdc1ec7dfdeb.jpg \n", + " inflating: /data/dog-breed-identification/train/c1b6ec8d72008f10ca5bbd928b41ba97.jpg \n", + " inflating: /data/dog-breed-identification/train/c1b7e93e3a09c54c89ea5b9fc543ff14.jpg \n", + " inflating: /data/dog-breed-identification/train/c1ca7ec9bb04a6b33e8780cf944d3184.jpg \n", + " inflating: /data/dog-breed-identification/train/c1ce9942a8cc03491275cef3d696ac40.jpg \n", + " inflating: /data/dog-breed-identification/train/c1e67e0e0bc4e3670d89511cf0484795.jpg \n", + " inflating: /data/dog-breed-identification/train/c1e854371a2316fc28ccb4cb5527c478.jpg \n", + " inflating: /data/dog-breed-identification/train/c1eadd472eb3727e82e69fe3f0170f10.jpg \n", + " inflating: /data/dog-breed-identification/train/c1f782c71d46690a156d8bbb6c5dca22.jpg \n", + " inflating: /data/dog-breed-identification/train/c1f814e2be9f657c2713d56bd8be59b0.jpg \n", + " inflating: /data/dog-breed-identification/train/c1f81cabd7d33a6f434af359226c191e.jpg \n", + " inflating: /data/dog-breed-identification/train/c1fb8ad3d13c74f56571af4fbf5ac0b5.jpg \n", + " inflating: /data/dog-breed-identification/train/c1fe9cb194aa867a96b1db81ef01bfdf.jpg \n", + " inflating: /data/dog-breed-identification/train/c1ffa2df86776926d86514a0921a9dcc.jpg \n", + " inflating: /data/dog-breed-identification/train/c20aa2f09a18bcbf98b62c0bffb3bf0e.jpg \n", + " inflating: /data/dog-breed-identification/train/c210e7dcbc9438e0bf04e8e0d15bee04.jpg \n", + " inflating: /data/dog-breed-identification/train/c21404efbc13ac4523f6f6a09c7ed703.jpg \n", + " inflating: /data/dog-breed-identification/train/c21453385ea62b14fca225f79f863520.jpg \n", + " inflating: /data/dog-breed-identification/train/c21dfca70ed4f6098881a2687c229f3d.jpg \n", + " inflating: /data/dog-breed-identification/train/c21ebf600c0136815b16143e205638ff.jpg \n", + " inflating: /data/dog-breed-identification/train/c226fabe1302bfeff9898aa02a6d7623.jpg \n", + " inflating: /data/dog-breed-identification/train/c2270e9f348520fda98ed82026fcf8a3.jpg \n", + " inflating: /data/dog-breed-identification/train/c228896e03be709a8950d9d839fbba31.jpg \n", + " inflating: /data/dog-breed-identification/train/c22aec850a32b0963304741102b70cc8.jpg \n", + " inflating: /data/dog-breed-identification/train/c23585d9731052eb35ccf6308aba585b.jpg \n", + " inflating: /data/dog-breed-identification/train/c23a7d8e922a941a6c487c6da30aea8a.jpg \n", + " inflating: /data/dog-breed-identification/train/c247f70883cacbb611d73df2c12339ca.jpg \n", + " inflating: /data/dog-breed-identification/train/c24811c315db62982a49b87b1e95d6e4.jpg \n", + " inflating: /data/dog-breed-identification/train/c24c3298ec501f8f7960ea76fdd317e9.jpg \n", + " inflating: /data/dog-breed-identification/train/c24eb78ce2156f05ec5b2736bfaa5e18.jpg \n", + " inflating: /data/dog-breed-identification/train/c2510b34cfce799715d87a1ab1516e92.jpg \n", + " inflating: /data/dog-breed-identification/train/c259c58c68675139297a4351963628ca.jpg \n", + " inflating: /data/dog-breed-identification/train/c25b1b2e1919a58239ac16ee53bdd44f.jpg \n", + " inflating: /data/dog-breed-identification/train/c262517d318bf15ef67b2e6164d1d665.jpg \n", + " inflating: /data/dog-breed-identification/train/c26f118f94d84ffd5ced4669a29b4898.jpg \n", + " inflating: /data/dog-breed-identification/train/c2722ca437529f5cfa8f2147dfdc165d.jpg \n", + " inflating: /data/dog-breed-identification/train/c2849be96ad98ded20e336b20d519f35.jpg \n", + " inflating: /data/dog-breed-identification/train/c28801dd739c5070a19461bb1d6a72b0.jpg \n", + " inflating: /data/dog-breed-identification/train/c28e2f22672805efa4d1bbba93a09662.jpg \n", + " inflating: /data/dog-breed-identification/train/c28f1f46775257d6830693702657390e.jpg \n", + " inflating: /data/dog-breed-identification/train/c2914cb7fb33bb683e148a568f1e7492.jpg \n", + " inflating: /data/dog-breed-identification/train/c2956f64221e5f8ab9aef1540f47f7ac.jpg \n", + " inflating: /data/dog-breed-identification/train/c2985294aa6f6566578f945986107757.jpg \n", + " inflating: /data/dog-breed-identification/train/c29939ec3329ccbed3b7f3094c48ec85.jpg \n", + " inflating: /data/dog-breed-identification/train/c2a094b3b0217960355c60901cd9894a.jpg \n", + " inflating: /data/dog-breed-identification/train/c2a48517799dedbde16fa299a88e8e13.jpg \n", + " inflating: /data/dog-breed-identification/train/c2a5c44a3cb6d8e5fc36ff0bb8640776.jpg \n", + " inflating: /data/dog-breed-identification/train/c2ae87dae29b344f92d82983d521d5d5.jpg \n", + " inflating: /data/dog-breed-identification/train/c2b343504429a8015d78fdff2642e01b.jpg \n", + " inflating: /data/dog-breed-identification/train/c2b94313a6ba125f1b7839484e08ba00.jpg \n", + " inflating: /data/dog-breed-identification/train/c2c0f72ae94cdf9bddb23b3708568f61.jpg \n", + " inflating: /data/dog-breed-identification/train/c2c60183f18666aaa714efeff54a808a.jpg \n", + " inflating: /data/dog-breed-identification/train/c2d352fbc92dd1fd1a043634676fada1.jpg \n", + " inflating: /data/dog-breed-identification/train/c2d9baee3714a8f3a6acecf68dfab93e.jpg \n", + " inflating: /data/dog-breed-identification/train/c2e139591c5752222dd5f2026b0bdda1.jpg \n", + " inflating: /data/dog-breed-identification/train/c2eb9f929f297504755132d03e74e009.jpg \n", + " inflating: /data/dog-breed-identification/train/c2f628d5ddd930809925bba8492c3d70.jpg \n", + " inflating: /data/dog-breed-identification/train/c2f82b0c18aba9f04803f72e97126fbf.jpg \n", + " inflating: /data/dog-breed-identification/train/c2f9dfaab06ef5f7c1a0336d01250854.jpg \n", + " inflating: /data/dog-breed-identification/train/c2fe78e1b91e678b65d2a403dcef64ec.jpg \n", + " inflating: /data/dog-breed-identification/train/c3008b4470c7c53a12d4490e51643548.jpg \n", + " inflating: /data/dog-breed-identification/train/c3093c45a6da6089d47e6e0f21b9a66a.jpg \n", + " inflating: /data/dog-breed-identification/train/c30c5194bffad8dfbad7e8db70ad449e.jpg \n", + " inflating: /data/dog-breed-identification/train/c30fa1c2e03315c58df49b74e9268b93.jpg \n", + " inflating: /data/dog-breed-identification/train/c31a79ecf4c0280d60c0c99d9fb9710c.jpg \n", + " inflating: /data/dog-breed-identification/train/c3236a5718f80fdd3227477bfbf84c40.jpg \n", + " inflating: /data/dog-breed-identification/train/c32970356e71b965d203a0617fe1ecde.jpg \n", + " inflating: /data/dog-breed-identification/train/c32fb0c78bfc35f176ae7090155ef2c9.jpg \n", + " inflating: /data/dog-breed-identification/train/c3352727287c486b9620890d961ec458.jpg \n", + " inflating: /data/dog-breed-identification/train/c339148268da677c54ce6b4e7a697391.jpg \n", + " inflating: /data/dog-breed-identification/train/c33b87c9fb7199316fe5182751ca38f1.jpg \n", + " inflating: /data/dog-breed-identification/train/c33eda1d0346155ca7a6c8f03121945c.jpg \n", + " inflating: /data/dog-breed-identification/train/c34297e6db453a12e69c2ff3a107125e.jpg \n", + " inflating: /data/dog-breed-identification/train/c3487bbf17be7def166026f875edbdc0.jpg \n", + " inflating: /data/dog-breed-identification/train/c34f899f92484db897374e9567eda5b3.jpg \n", + " inflating: /data/dog-breed-identification/train/c351c6388350e97f13e22a482b91c6d2.jpg \n", + " inflating: /data/dog-breed-identification/train/c356c52c408f1099adb4a106718a2935.jpg \n", + " inflating: /data/dog-breed-identification/train/c35ca6430da03f82df091ced49f0b07e.jpg \n", + " inflating: /data/dog-breed-identification/train/c36626ef165cd73783a2e760aaa2fc72.jpg \n", + " inflating: /data/dog-breed-identification/train/c3664530805157a7230319d164ceee95.jpg \n", + " inflating: /data/dog-breed-identification/train/c37ce9e2e471477f053b69eb39fb5535.jpg \n", + " inflating: /data/dog-breed-identification/train/c37db8384c30b6da21ee2879eb54ffee.jpg \n", + " inflating: /data/dog-breed-identification/train/c37f52e1a8288a9bf678b19f1e4f9bfb.jpg \n", + " inflating: /data/dog-breed-identification/train/c37f92c33db324963a0a00725b8e7172.jpg \n", + " inflating: /data/dog-breed-identification/train/c3811d9749d6526bb0c415e3a1476a06.jpg \n", + " inflating: /data/dog-breed-identification/train/c382eb18b911729652590dd12628dc24.jpg \n", + " inflating: /data/dog-breed-identification/train/c38369fbdcdd27f1cc723181df8b6330.jpg \n", + " inflating: /data/dog-breed-identification/train/c388a84f93ba4fb28c7b1a7a880474f1.jpg \n", + " inflating: /data/dog-breed-identification/train/c38e77ac6a0ee0c8ff5fdcd1df42f6f0.jpg \n", + " inflating: /data/dog-breed-identification/train/c3912710a89f0e88320b8fe07578a972.jpg \n", + " inflating: /data/dog-breed-identification/train/c397eb87347ad681ec1b72f9cf5b5007.jpg \n", + " inflating: /data/dog-breed-identification/train/c39c3cb599426bfa1cd2ba7e9604d463.jpg \n", + " inflating: /data/dog-breed-identification/train/c3a0166a2d77b608de86c14739d84ba6.jpg \n", + " inflating: /data/dog-breed-identification/train/c3a7c85f61db74978cb692931639024b.jpg \n", + " inflating: /data/dog-breed-identification/train/c3aa3163aef4f0762c61ed7c49bccea0.jpg \n", + " inflating: /data/dog-breed-identification/train/c3b19b9e1ce0c005511fa14e2d9a89ec.jpg \n", + " inflating: /data/dog-breed-identification/train/c3b7f884bf97c8caa64377672521a7b0.jpg \n", + " inflating: /data/dog-breed-identification/train/c3bf9ce1db1a3fc772bdff96055032e4.jpg \n", + " inflating: /data/dog-breed-identification/train/c3cda65cbca60fd5c82f4a4914260560.jpg \n", + " inflating: /data/dog-breed-identification/train/c3d2f20dbe5f10c1cab69cc01c87cf37.jpg \n", + " inflating: /data/dog-breed-identification/train/c3d41b0f8c76eda86e44c2dfaa654907.jpg \n", + " inflating: /data/dog-breed-identification/train/c3dba0dcd9c74e05a71d351108568f3e.jpg \n", + " inflating: /data/dog-breed-identification/train/c3e1b303c9f7cfc15f57038400548af8.jpg \n", + " inflating: /data/dog-breed-identification/train/c3ffe489a6dd65d192c0d839ded36e94.jpg \n", + " inflating: /data/dog-breed-identification/train/c40118c553a50c1cef797460f297a4f5.jpg \n", + " inflating: /data/dog-breed-identification/train/c4015267ca5b03b24e25932d4555d5bd.jpg \n", + " inflating: /data/dog-breed-identification/train/c4030a679815a7bf4f9087165155730f.jpg \n", + " inflating: /data/dog-breed-identification/train/c417aa964d70adaa3cd866b7b1bc9e92.jpg \n", + " inflating: /data/dog-breed-identification/train/c42b1d589af04e9184bfa3adae83518b.jpg \n", + " inflating: /data/dog-breed-identification/train/c42b9482728498a9e76ba5d1908e5406.jpg \n", + " inflating: /data/dog-breed-identification/train/c437bfb85769b34e46d50dafea7fd716.jpg \n", + " inflating: /data/dog-breed-identification/train/c438ce4eba5cce0bc6d11d077ae0e758.jpg \n", + " inflating: /data/dog-breed-identification/train/c43f8987dc2992971f4316cecc4cad73.jpg \n", + " inflating: /data/dog-breed-identification/train/c44069838d7f06552dbbd8dea615d901.jpg \n", + " inflating: /data/dog-breed-identification/train/c441a06c1b1d3969ee1654d4a63b6c22.jpg \n", + " inflating: /data/dog-breed-identification/train/c4438c44317572f7a432469a63203819.jpg \n", + " inflating: /data/dog-breed-identification/train/c443f3ba58dd6a986c974b56b1f1cbb6.jpg \n", + " inflating: /data/dog-breed-identification/train/c44756c419398a3f3a06e814ac73a3bc.jpg \n", + " inflating: /data/dog-breed-identification/train/c45ca06210977c966dfb8e7fa0cd706d.jpg \n", + " inflating: /data/dog-breed-identification/train/c45f15470d6627726ecf5f80dc299a76.jpg \n", + " inflating: /data/dog-breed-identification/train/c4649d3948446cf3cd90da2ee922b3c1.jpg \n", + " inflating: /data/dog-breed-identification/train/c46e2bc23aa32ce2fed1fe08beba88c6.jpg \n", + " inflating: /data/dog-breed-identification/train/c474bbdf3334f107a1d6ca5c9af86df8.jpg \n", + " inflating: /data/dog-breed-identification/train/c47bd2467bbecfdd7ebb15abcdd9ba69.jpg \n", + " inflating: /data/dog-breed-identification/train/c47eac4ddf7273a67b7b6984052d4303.jpg \n", + " inflating: /data/dog-breed-identification/train/c47edf5a8b9abcf72e0579a001b83c1b.jpg \n", + " inflating: /data/dog-breed-identification/train/c484f496ccb2608c07e9706f567e3546.jpg \n", + " inflating: /data/dog-breed-identification/train/c495c76907568ee68be1b098d5a04f6f.jpg \n", + " inflating: /data/dog-breed-identification/train/c4979ed313483b6b64f04385ff6453b7.jpg \n", + " inflating: /data/dog-breed-identification/train/c4a294f606c6e01dcdeb25348a2a972a.jpg \n", + " inflating: /data/dog-breed-identification/train/c4a70cc9be80f67614e4cb0a756f28a5.jpg \n", + " inflating: /data/dog-breed-identification/train/c4a98c78f34950aa9e0efb1e52d8c826.jpg \n", + " inflating: /data/dog-breed-identification/train/c4adaa3d82e0b13abb349cd00324c85f.jpg \n", + " inflating: /data/dog-breed-identification/train/c4b4a3adcb838be6d9d9fb5fd9304101.jpg \n", + " inflating: /data/dog-breed-identification/train/c4bf9248192b875822e30f5e2a240c19.jpg \n", + " inflating: /data/dog-breed-identification/train/c4c7028d5b4e3908836261eaab862230.jpg \n", + " inflating: /data/dog-breed-identification/train/c4ceeed5a3de99701f516c39181ce974.jpg \n", + " inflating: /data/dog-breed-identification/train/c4db7de5ca4286cebbf924a1af21bdd7.jpg \n", + " inflating: /data/dog-breed-identification/train/c4dbdd6201127e4f5b5ce08f6186834b.jpg \n", + " inflating: /data/dog-breed-identification/train/c4f33705a88c63dc7c40c64f0c94febb.jpg \n", + " inflating: /data/dog-breed-identification/train/c4fad81571f6010bcf6948738346ad16.jpg \n", + " inflating: /data/dog-breed-identification/train/c4ffb7da3faa4c07dc7fe120f6b00127.jpg \n", + " inflating: /data/dog-breed-identification/train/c5030860ba094c86d3bef88c659448f0.jpg \n", + " inflating: /data/dog-breed-identification/train/c506d9d6ebc35e17296e5311d7ecc4f3.jpg \n", + " inflating: /data/dog-breed-identification/train/c50f5dd19ef8e5e4e81ce1daf6736638.jpg \n", + " inflating: /data/dog-breed-identification/train/c513ff37e33e660598dee5257956ac2b.jpg \n", + " inflating: /data/dog-breed-identification/train/c52852b1d9ab94c9ebb948c38dfd1bf3.jpg \n", + " inflating: /data/dog-breed-identification/train/c531e6e15b507dc9b26ea1036f9317c4.jpg \n", + " inflating: /data/dog-breed-identification/train/c54aa01f89ff36bcf5426cde29fdc944.jpg \n", + " inflating: /data/dog-breed-identification/train/c54c1132c4f2322347fe7e0ae3062c86.jpg \n", + " inflating: /data/dog-breed-identification/train/c552deb03796d5f050d9d2cfd1323d73.jpg \n", + " inflating: /data/dog-breed-identification/train/c56ca357ff472cb75ae1bb0fb0f16f6f.jpg \n", + " inflating: /data/dog-breed-identification/train/c56d360b7601ce3882d5016abdc83157.jpg \n", + " inflating: /data/dog-breed-identification/train/c56ff3790f57858458eebd14050f51b1.jpg \n", + " inflating: /data/dog-breed-identification/train/c5762a5e8323239afefd077786270113.jpg \n", + " inflating: /data/dog-breed-identification/train/c57758b4e77717dbb2cdcb876b2b13ca.jpg \n", + " inflating: /data/dog-breed-identification/train/c577c620fc1c49c52706683485f9d147.jpg \n", + " inflating: /data/dog-breed-identification/train/c579b9716e6d0e777010642d40757752.jpg \n", + " inflating: /data/dog-breed-identification/train/c57a69507bcb399ada4d8cd7f6ecaee7.jpg \n", + " inflating: /data/dog-breed-identification/train/c58a5e34509845ddf019edd6d896c6b1.jpg \n", + " inflating: /data/dog-breed-identification/train/c58b4fb8d7666df20038fa6fa106ae33.jpg \n", + " inflating: /data/dog-breed-identification/train/c590f731ed54997d03c8b42b565f06a4.jpg \n", + " inflating: /data/dog-breed-identification/train/c591c9885407dd541e72a86e3d95785b.jpg \n", + " inflating: /data/dog-breed-identification/train/c5928c863685a331b3fc2c16193a72ae.jpg \n", + " inflating: /data/dog-breed-identification/train/c599b943919e28315cfe2cd4c0e4f51c.jpg \n", + " inflating: /data/dog-breed-identification/train/c59a3e9d84640ab0cfdd1a8898e35a41.jpg \n", + " inflating: /data/dog-breed-identification/train/c59c717f3f699529820090cd02563faa.jpg \n", + " inflating: /data/dog-breed-identification/train/c59c7960ec3d5ca79757cb5f10e8cffb.jpg \n", + " inflating: /data/dog-breed-identification/train/c59df3c5d6b7f12d5086f1b7cf64858b.jpg \n", + " inflating: /data/dog-breed-identification/train/c5b05b2bc80eeb044bd3cb7b9ea93037.jpg \n", + " inflating: /data/dog-breed-identification/train/c5b1bb2153ca0ac74e7049f5264518a0.jpg \n", + " inflating: /data/dog-breed-identification/train/c5b6d08c694425175f1c85be059a98ea.jpg \n", + " inflating: /data/dog-breed-identification/train/c5b95a6c73427b6901110e1f11bc5569.jpg \n", + " inflating: /data/dog-breed-identification/train/c5c26998c5ea34b06846b514c2418814.jpg \n", + " inflating: /data/dog-breed-identification/train/c5c9f454cbdedf855efc0f5104e4f1fb.jpg \n", + " inflating: /data/dog-breed-identification/train/c5ca525e7477a7cd05bacd9544995397.jpg \n", + " inflating: /data/dog-breed-identification/train/c5cb2d2a9038b1181c7820ef05e838db.jpg \n", + " inflating: /data/dog-breed-identification/train/c5d485d5c12306d060bba17295d3d435.jpg \n", + " inflating: /data/dog-breed-identification/train/c5d73c1cd8adf2c822cb79b61ed9c9c3.jpg \n", + " inflating: /data/dog-breed-identification/train/c5dd9170863cc99c078f0d4298b0a352.jpg \n", + " inflating: /data/dog-breed-identification/train/c5e1c138abd4747d63e52253263904b0.jpg \n", + " inflating: /data/dog-breed-identification/train/c5e2482dd2a845ab95cc77b138c23c36.jpg \n", + " inflating: /data/dog-breed-identification/train/c5e50854bf07fc3014c42c6a94a6af51.jpg \n", + " inflating: /data/dog-breed-identification/train/c5ea038cd31e3b6a37fee4b1338a3ae3.jpg \n", + " inflating: /data/dog-breed-identification/train/c5f7de37074cbb2b02e5fa4fc3185b83.jpg \n", + " inflating: /data/dog-breed-identification/train/c6010d9e2561ec1dde10fa6e9e3e8244.jpg \n", + " inflating: /data/dog-breed-identification/train/c602f7842271c85e8d779246746c8350.jpg \n", + " inflating: /data/dog-breed-identification/train/c60b2faebc57760644f3ad3556b521ed.jpg \n", + " inflating: /data/dog-breed-identification/train/c60eb85ec2a2e4e90c8e6a0d7fc36436.jpg \n", + " inflating: /data/dog-breed-identification/train/c615fd2e426968372fa662c94dd868a7.jpg \n", + " inflating: /data/dog-breed-identification/train/c61a74191ee3f5a485c17f69c6f9ad12.jpg \n", + " inflating: /data/dog-breed-identification/train/c61ad9cd1f695d4f5d3c5ad3998ac67c.jpg \n", + " inflating: /data/dog-breed-identification/train/c621e63e70cf610d0998508443e613e7.jpg \n", + " inflating: /data/dog-breed-identification/train/c62ab8bce010c72b8b783472e44fa6a7.jpg \n", + " inflating: /data/dog-breed-identification/train/c62d6c163cbe1109462aa729bc4f425c.jpg \n", + " inflating: /data/dog-breed-identification/train/c648c75a66ac06d71242e46e628ad3cc.jpg \n", + " inflating: /data/dog-breed-identification/train/c6490dd60b2c31345e74f93b1c4231e5.jpg \n", + " inflating: /data/dog-breed-identification/train/c64d89570426c2165226520c9041eb8d.jpg \n", + " inflating: /data/dog-breed-identification/train/c66887200fcb9a487dfa29f7fa1a0c9f.jpg \n", + " inflating: /data/dog-breed-identification/train/c668cb4cfab62856f6520c4c3358fe08.jpg \n", + " inflating: /data/dog-breed-identification/train/c66942afde3aebfbc462ec9466c946b1.jpg \n", + " inflating: /data/dog-breed-identification/train/c66bea53a3b615683a2248d664fd3813.jpg \n", + " inflating: /data/dog-breed-identification/train/c66c6cfc9bb48d1093d24583f8722f55.jpg \n", + " inflating: /data/dog-breed-identification/train/c66d073129b40067e523e013fd9fcdd6.jpg \n", + " inflating: /data/dog-breed-identification/train/c66eb3e1eb11ce2482beb25b2a4095c5.jpg \n", + " inflating: /data/dog-breed-identification/train/c680c7aa880ab72ae585f401a223470f.jpg \n", + " inflating: /data/dog-breed-identification/train/c687909cd22b408a8e8fb16dffd1d6b2.jpg \n", + " inflating: /data/dog-breed-identification/train/c693a2bf26360d8b25e44a9e4fc42cf1.jpg \n", + " inflating: /data/dog-breed-identification/train/c693ed75220ef6de098208d7361fa5d6.jpg \n", + " inflating: /data/dog-breed-identification/train/c696ef33b6a562d11f581867cf19042e.jpg \n", + " inflating: /data/dog-breed-identification/train/c6a27dd1098ea7b08973b9199072050c.jpg \n", + " inflating: /data/dog-breed-identification/train/c6a38c57908b41c162bff2889280896d.jpg \n", + " inflating: /data/dog-breed-identification/train/c6ab645e969263618b547ff66fd19d1f.jpg \n", + " inflating: /data/dog-breed-identification/train/c6ae68d5e1e8d543b2cabc4c76097cbf.jpg \n", + " inflating: /data/dog-breed-identification/train/c6c3eb8fd9a955d17e57b562fc34e3ce.jpg \n", + " inflating: /data/dog-breed-identification/train/c6ca642c97eddb5c3d206d0c473f3e51.jpg \n", + " inflating: /data/dog-breed-identification/train/c6ca92fae93fe8522b240a663c9065d8.jpg \n", + " inflating: /data/dog-breed-identification/train/c6cdcfdb8ba3b460bc07e63e8d057fe5.jpg \n", + " inflating: /data/dog-breed-identification/train/c6d8ab3e57c1bd07ca582bc46f8ec74b.jpg \n", + " inflating: /data/dog-breed-identification/train/c6dbaa6aac1d6c5186dbdc19ec043f5f.jpg \n", + " inflating: /data/dog-breed-identification/train/c6ed146231120aec6fe2dd8004d0e3cd.jpg \n", + " inflating: /data/dog-breed-identification/train/c6ed696d18a65e388fb7050430469d1a.jpg \n", + " inflating: /data/dog-breed-identification/train/c6f47117753a8f83c93ad62369c7266b.jpg \n", + " inflating: /data/dog-breed-identification/train/c708a817e8bbc05d9287c611d96d061a.jpg \n", + " inflating: /data/dog-breed-identification/train/c70d4ad5aee0d56f5a8bc63c0079380d.jpg \n", + " inflating: /data/dog-breed-identification/train/c70e2005c583c20713fb481dd72fb30c.jpg \n", + " inflating: /data/dog-breed-identification/train/c713df3041d37a0e82f266fc2bfaf1e6.jpg \n", + " inflating: /data/dog-breed-identification/train/c71b9f1812873e44361051814cf2f3a3.jpg \n", + " inflating: /data/dog-breed-identification/train/c72a277989f49b475ebbe4fd18906370.jpg \n", + " inflating: /data/dog-breed-identification/train/c72af1c49f764ef8150bd1b1a096e5c6.jpg \n", + " inflating: /data/dog-breed-identification/train/c72cad3c532886e10001f99c4a954156.jpg \n", + " inflating: /data/dog-breed-identification/train/c72d3f068d6fa7929104de1c6f308c1c.jpg \n", + " inflating: /data/dog-breed-identification/train/c73a535187af35a25faf061ce0739b21.jpg \n", + " inflating: /data/dog-breed-identification/train/c7441fba1f18864b59b1d474936def91.jpg \n", + " inflating: /data/dog-breed-identification/train/c746f8858f83b2be8d985b152ebe1303.jpg \n", + " inflating: /data/dog-breed-identification/train/c750ad78e8e6ac798ef2b9c648abfab7.jpg \n", + " inflating: /data/dog-breed-identification/train/c750f80ab143fc7e82843db3722be53c.jpg \n", + " inflating: /data/dog-breed-identification/train/c752f1673409acb094792c428025b94e.jpg \n", + " inflating: /data/dog-breed-identification/train/c761baad1c531847cf53b7e48c802601.jpg \n", + " inflating: /data/dog-breed-identification/train/c76fed6a1fde742765e9e78347dcf664.jpg \n", + " inflating: /data/dog-breed-identification/train/c77266278126119fe02a3550dd0c3c1c.jpg \n", + " inflating: /data/dog-breed-identification/train/c773d2a49bbaa7ed09e64946d34ebcd3.jpg \n", + " inflating: /data/dog-breed-identification/train/c7741797d824bfc430494923974daa33.jpg \n", + " inflating: /data/dog-breed-identification/train/c78846e0622e79e1ccd749bfe6081c8e.jpg \n", + " inflating: /data/dog-breed-identification/train/c78dbf1038e7923b9cf0227890cbc93f.jpg \n", + " inflating: /data/dog-breed-identification/train/c78deaa29212d7866ae0b619fce37f53.jpg \n", + " inflating: /data/dog-breed-identification/train/c790a45b98d586f77f428b07f1fb1f17.jpg \n", + " inflating: /data/dog-breed-identification/train/c79f6aef73a97b81be61abf487cd4215.jpg \n", + " inflating: /data/dog-breed-identification/train/c7a166b06d765dc146e516a3ae3aaadd.jpg \n", + " inflating: /data/dog-breed-identification/train/c7ac8dc219cb3bba12deee6e9778f5ba.jpg \n", + " inflating: /data/dog-breed-identification/train/c7ad83b9c257921de61d0e43d814c31b.jpg \n", + " inflating: /data/dog-breed-identification/train/c7b05bd8992c6c7653e219d3242c01c4.jpg \n", + " inflating: /data/dog-breed-identification/train/c7b107dd35e6cfde679cc1be795f11f5.jpg \n", + " inflating: /data/dog-breed-identification/train/c7b614f768b8f540ee3b77964f13852b.jpg \n", + " inflating: /data/dog-breed-identification/train/c7b6194c7f653f9c38ce8dddbecc2e38.jpg \n", + " inflating: /data/dog-breed-identification/train/c7c6ff42d5b63555ec23fe5170fdcd90.jpg \n", + " inflating: /data/dog-breed-identification/train/c7c952acb35ee96c30f4e6dd14241efc.jpg \n", + " inflating: /data/dog-breed-identification/train/c7d3cc378059bdea113257eec7302f32.jpg \n", + " inflating: /data/dog-breed-identification/train/c7d8a86998c9970e0d398192a016eefe.jpg \n", + " inflating: /data/dog-breed-identification/train/c7f915c8e4ced5a955be4679b389b83a.jpg \n", + " inflating: /data/dog-breed-identification/train/c7fc05c790e7edf1b37df9f1356698c9.jpg \n", + " inflating: /data/dog-breed-identification/train/c805222ea9c389d5223c08e30cfefc6b.jpg \n", + " inflating: /data/dog-breed-identification/train/c80a85e009935022c6401517fb1cd02a.jpg \n", + " inflating: /data/dog-breed-identification/train/c80beeef5084f7c27882c939669060a4.jpg \n", + " inflating: /data/dog-breed-identification/train/c81748117aecc58678b1677528fdab97.jpg \n", + " inflating: /data/dog-breed-identification/train/c81778e436da96e7a89d9ef84b367525.jpg \n", + " inflating: /data/dog-breed-identification/train/c825a17e20a29f767bf4b915d036c502.jpg \n", + " inflating: /data/dog-breed-identification/train/c829799f837a358ca95191c065d3406b.jpg \n", + " inflating: /data/dog-breed-identification/train/c83eed4ccffa2bee4dc6098ba09a15e7.jpg \n", + " inflating: /data/dog-breed-identification/train/c847b641c1424f39725470c28bc93262.jpg \n", + " inflating: /data/dog-breed-identification/train/c849f027fa1ae5c4925d24cf50111b1e.jpg \n", + " inflating: /data/dog-breed-identification/train/c84ec775ef1865e81a7552825982900f.jpg \n", + " inflating: /data/dog-breed-identification/train/c85fdb661bce3147ba6560f1f72cbbdd.jpg \n", + " inflating: /data/dog-breed-identification/train/c86661fc69fdc300f6bfb5f4e9e4fde3.jpg \n", + " inflating: /data/dog-breed-identification/train/c86c7ff482a6f2c10b48695b090519e5.jpg \n", + " inflating: /data/dog-breed-identification/train/c86f8b08a4dcdb08f1f27a7b288d5af4.jpg \n", + " inflating: /data/dog-breed-identification/train/c870c6e336ba6fe2e10a3d16d5731c87.jpg \n", + " inflating: /data/dog-breed-identification/train/c872f67d6cc59584cf643dab086a6371.jpg \n", + " inflating: /data/dog-breed-identification/train/c87665c6e5606b6aaa100ce89367fae5.jpg \n", + " inflating: /data/dog-breed-identification/train/c87d6967fb9aec26a6ca305368088610.jpg \n", + " inflating: /data/dog-breed-identification/train/c87d7b6aa6ad07a6af1fde957779c1c8.jpg \n", + " inflating: /data/dog-breed-identification/train/c87f27b3b22233dbd9529604630a7767.jpg \n", + " inflating: /data/dog-breed-identification/train/c87fa55ed185f89542a7ab8d9e92b52f.jpg \n", + " inflating: /data/dog-breed-identification/train/c8819f17b98148a141429f21ff3753c4.jpg \n", + " inflating: /data/dog-breed-identification/train/c8b104dba92b39bc262ab7929e6bd5d1.jpg \n", + " inflating: /data/dog-breed-identification/train/c8b1cb661bab73b5493f7ddee137564e.jpg \n", + " inflating: /data/dog-breed-identification/train/c8b321b43e68c11b6f9b9154de7fe1f4.jpg \n", + " inflating: /data/dog-breed-identification/train/c8c1b76b12acf31fbaef3b420099391e.jpg \n", + " inflating: /data/dog-breed-identification/train/c8c5771ea4778f608d8674e0edf48fac.jpg \n", + " inflating: /data/dog-breed-identification/train/c8c63044e46f67dc58bc676e99ac8a4d.jpg \n", + " inflating: /data/dog-breed-identification/train/c8c69933448197f54c7a03325fbadf80.jpg \n", + " inflating: /data/dog-breed-identification/train/c8c8daf81110ff42e849399940b46caa.jpg \n", + " inflating: /data/dog-breed-identification/train/c8c9a0e17722c096e117408c137c251b.jpg \n", + " inflating: /data/dog-breed-identification/train/c8caed58369e7cbd1af1d57ed8499220.jpg \n", + " inflating: /data/dog-breed-identification/train/c8ccf79994355ac29be50dd98eca91b6.jpg \n", + " inflating: /data/dog-breed-identification/train/c8ce22f51941dc011dc544ee53076600.jpg \n", + " inflating: /data/dog-breed-identification/train/c8d106f81eb85d7166224a0d947304f2.jpg \n", + " inflating: /data/dog-breed-identification/train/c8d4d8654df681140522be0f3b1ba422.jpg \n", + " inflating: /data/dog-breed-identification/train/c8d9160a26b275e85fb06b5c0e9fefd8.jpg \n", + " inflating: /data/dog-breed-identification/train/c8d9ff63c134fdac4967815868841f55.jpg \n", + " inflating: /data/dog-breed-identification/train/c8e3dc121315607f4460e92f8ca6da03.jpg \n", + " inflating: /data/dog-breed-identification/train/c8e967a80849bedc46693fa1b10d3aa2.jpg \n", + " inflating: /data/dog-breed-identification/train/c8eab8d5c97dfaf0bd993d6ad59895a6.jpg \n", + " inflating: /data/dog-breed-identification/train/c8edd4f20e6fee048cd1a61067a1a4a3.jpg \n", + " inflating: /data/dog-breed-identification/train/c8fca164680fb72050b8756b451873df.jpg \n", + " inflating: /data/dog-breed-identification/train/c90d5026d84f6f6ff79d8e4e0c503826.jpg \n", + " inflating: /data/dog-breed-identification/train/c90e3c991d6f2c79fdfd233a09f64f99.jpg \n", + " inflating: /data/dog-breed-identification/train/c90f18795a295fddaa50312408b49d1c.jpg \n", + " inflating: /data/dog-breed-identification/train/c91b930254bb79924d90086f3353a8ab.jpg \n", + " inflating: /data/dog-breed-identification/train/c91ca333b6b3dddab742a63d75a135ee.jpg \n", + " inflating: /data/dog-breed-identification/train/c91e77e8bed1a9245dc124408704b3cc.jpg \n", + " inflating: /data/dog-breed-identification/train/c9225fdea1d82387cd5b8bcf30ed9df5.jpg \n", + " inflating: /data/dog-breed-identification/train/c925c6050b9846ea6c7f8a9e1314c07c.jpg \n", + " inflating: /data/dog-breed-identification/train/c928acdd369336b14d87c2846770cac1.jpg \n", + " inflating: /data/dog-breed-identification/train/c92c0415e2b9686f3a5627e050ed856e.jpg \n", + " inflating: /data/dog-breed-identification/train/c93866c94f2fe0faee7f8aefe1145e2c.jpg \n", + " inflating: /data/dog-breed-identification/train/c93de7c42883d7819fbfc95914ba6493.jpg \n", + " inflating: /data/dog-breed-identification/train/c940c8808aefc1ad992d341273bf4050.jpg \n", + " inflating: /data/dog-breed-identification/train/c9467a941ab7f1e761918f65d287f557.jpg \n", + " inflating: /data/dog-breed-identification/train/c9579c8fd920a1e698c696168946a83b.jpg \n", + " inflating: /data/dog-breed-identification/train/c962302a0176ab136617cac958822928.jpg \n", + " inflating: /data/dog-breed-identification/train/c965e44d0fd6782c734883897d4be493.jpg \n", + " inflating: /data/dog-breed-identification/train/c9687bdd527037eaa6874a97dfa7c0d1.jpg \n", + " inflating: /data/dog-breed-identification/train/c96b81edd143405fc0ab3a6d7b2fda00.jpg \n", + " inflating: /data/dog-breed-identification/train/c97a4453b7247d68bb2f12c30dac1676.jpg \n", + " inflating: /data/dog-breed-identification/train/c97b5df44a2ce52af36cfbdb2482c680.jpg \n", + " inflating: /data/dog-breed-identification/train/c97c9eb7d372827cb1b67b0a8afe8f83.jpg \n", + " inflating: /data/dog-breed-identification/train/c984aa03d799d4235c311e5bd373cf1f.jpg \n", + " inflating: /data/dog-breed-identification/train/c9937a53e259b14126445b6e9f02bee7.jpg \n", + " inflating: /data/dog-breed-identification/train/c9988677fd4cba56d55d2da439c882a4.jpg \n", + " inflating: /data/dog-breed-identification/train/c99d1d9fea3e12ee989956c0ba3d617f.jpg \n", + " inflating: /data/dog-breed-identification/train/c9ac02c96b1acb4c8365195cdd319204.jpg \n", + " inflating: /data/dog-breed-identification/train/c9ae288957522206168096ca00048f64.jpg \n", + " inflating: /data/dog-breed-identification/train/c9ae8cad952c04be0da3d2f9da5c61e5.jpg \n", + " inflating: /data/dog-breed-identification/train/c9bbc4ce586c0d73e14bee1b1e674ba9.jpg \n", + " inflating: /data/dog-breed-identification/train/c9be1b14a664fab3d09d0ca4b8a79cbf.jpg \n", + " inflating: /data/dog-breed-identification/train/c9bf9e02568ef44c0f8ab9e4b59bf3d4.jpg \n", + " inflating: /data/dog-breed-identification/train/c9c0e4d197397dc8f210a397a063a42b.jpg \n", + " inflating: /data/dog-breed-identification/train/c9c4af524521a8ff906fa592574da9d8.jpg \n", + " inflating: /data/dog-breed-identification/train/c9ccbbb14fc3ac97fddcf20b95ff121a.jpg \n", + " inflating: /data/dog-breed-identification/train/c9d2cef73ce84a96095da1e79ff88f21.jpg \n", + " inflating: /data/dog-breed-identification/train/c9d8d9ef676e6b94c671599a28a63d29.jpg \n", + " inflating: /data/dog-breed-identification/train/c9df21697a5726fd015a9dc7fa8a9829.jpg \n", + " inflating: /data/dog-breed-identification/train/c9e7fe5e9da49b87e149cb8e317570e5.jpg \n", + " inflating: /data/dog-breed-identification/train/c9ea2b424b0074a33ec7a879b1cb25ca.jpg \n", + " inflating: /data/dog-breed-identification/train/c9f0a7765a138e2850c6ed6538c2134f.jpg \n", + " inflating: /data/dog-breed-identification/train/c9f66277a7804111d9491019cb3ec2ab.jpg \n", + " inflating: /data/dog-breed-identification/train/c9f93440c107a5fdf54b9652670c26df.jpg \n", + " inflating: /data/dog-breed-identification/train/c9fcc35552d72c6141aaf637a3fe394a.jpg \n", + " inflating: /data/dog-breed-identification/train/ca0125e172368e6807dd69f69534851a.jpg \n", + " inflating: /data/dog-breed-identification/train/ca01a96cd44e7add3961d9684332449d.jpg \n", + " inflating: /data/dog-breed-identification/train/ca03b339263e225c1d8ad51c7bfdd9e9.jpg \n", + " inflating: /data/dog-breed-identification/train/ca1bd57de0d5d1e08786b44ec7806e19.jpg \n", + " inflating: /data/dog-breed-identification/train/ca1cbf8bf7f799fcbbb0b63307238910.jpg \n", + " inflating: /data/dog-breed-identification/train/ca24aa6f316f6b2f28727421f933d3e6.jpg \n", + " inflating: /data/dog-breed-identification/train/ca27b14025a918c8f2964c6e945993db.jpg \n", + " inflating: /data/dog-breed-identification/train/ca2b3c1fd1ff7fb5e8c714359ac512d4.jpg \n", + " inflating: /data/dog-breed-identification/train/ca2c1407d0d53938af210268af290b09.jpg \n", + " inflating: /data/dog-breed-identification/train/ca2d7e7f3a7e7458cd396e18cf53a08e.jpg \n", + " inflating: /data/dog-breed-identification/train/ca329bde7d07c1e7d968f9b376906edc.jpg \n", + " inflating: /data/dog-breed-identification/train/ca3394ca53c450c10e688cdf985567ad.jpg \n", + " inflating: /data/dog-breed-identification/train/ca39d409bccf8034d2d11b7b44d8e0a1.jpg \n", + " inflating: /data/dog-breed-identification/train/ca5157b5031f9f164fb35284c7fbfca4.jpg \n", + " inflating: /data/dog-breed-identification/train/ca555743c121a3e8bdb06a83b8d17751.jpg \n", + " inflating: /data/dog-breed-identification/train/ca5fd2cb1f6e028918fc570cf74f539c.jpg \n", + " inflating: /data/dog-breed-identification/train/ca61b56aa74ae742e9022874b8676112.jpg \n", + " inflating: /data/dog-breed-identification/train/ca69a6c77030ae811f52acdad1bfb780.jpg \n", + " inflating: /data/dog-breed-identification/train/ca6d8156597b78fbf96f9e73b0319f28.jpg \n", + " inflating: /data/dog-breed-identification/train/ca6ef65ae71b197543c69095ff46f100.jpg \n", + " inflating: /data/dog-breed-identification/train/ca7befe874fe442e48d5cf546b2e5dcb.jpg \n", + " inflating: /data/dog-breed-identification/train/ca824e6b9c055e2d42c33b8c62db93d9.jpg \n", + " inflating: /data/dog-breed-identification/train/ca8478ac3e05a4374e0e02a2e89ac599.jpg \n", + " inflating: /data/dog-breed-identification/train/ca862f2b2d77f940c2a934acca05dfe1.jpg \n", + " inflating: /data/dog-breed-identification/train/ca8a5cca09a2043bf2abf727fb6587f7.jpg \n", + " inflating: /data/dog-breed-identification/train/ca8e824e2e6fd1321271321ea5034fe7.jpg \n", + " inflating: /data/dog-breed-identification/train/ca9cc76eca157a803c3459b34e722351.jpg \n", + " inflating: /data/dog-breed-identification/train/caa1cc6ebf435bc9f43d8cf32d67d48f.jpg \n", + " inflating: /data/dog-breed-identification/train/cac84e1b6d9ca1545dc7f6647b988952.jpg \n", + " inflating: /data/dog-breed-identification/train/cad5ac8153a50161c292a7ea2328bf96.jpg \n", + " inflating: /data/dog-breed-identification/train/cada57465146623e34fffbd0cf86cf0f.jpg \n", + " inflating: /data/dog-breed-identification/train/cae0ea3bdbbccefa728f9322981ed734.jpg \n", + " inflating: /data/dog-breed-identification/train/cae43050689de01cc048e8f15230312e.jpg \n", + " inflating: /data/dog-breed-identification/train/caf49640a8436f3ed39c56b1c5e447db.jpg \n", + " inflating: /data/dog-breed-identification/train/caf609b8bda665e04c40c7bdd706a4d3.jpg \n", + " inflating: /data/dog-breed-identification/train/caf73739fc74fdb9f27db7f14a913ec3.jpg \n", + " inflating: /data/dog-breed-identification/train/cafb4038e60e4806b0f6beeff7a61b51.jpg \n", + " inflating: /data/dog-breed-identification/train/cb00be608d3c2559eb807386e3c84e1d.jpg \n", + " inflating: /data/dog-breed-identification/train/cb0716607f4e66bc5bba7532bedf293b.jpg \n", + " inflating: /data/dog-breed-identification/train/cb0d458235fb16d22dd03d9886cf50e0.jpg \n", + " inflating: /data/dog-breed-identification/train/cb1a68e1ac430535ea3b072bf98fcb88.jpg \n", + " inflating: /data/dog-breed-identification/train/cb2355a42af96267976f7f0ecf0f0de7.jpg \n", + " inflating: /data/dog-breed-identification/train/cb26e46dc1578cb8816f6e5e24a070e0.jpg \n", + " inflating: /data/dog-breed-identification/train/cb3121245ba0e86572d9f46f10ca0e5f.jpg \n", + " inflating: /data/dog-breed-identification/train/cb3b74d4880e9cd8fca333daa7f2aa73.jpg \n", + " inflating: /data/dog-breed-identification/train/cb43f233915a147d538eaaa7f4b45068.jpg \n", + " inflating: /data/dog-breed-identification/train/cb50b731cb69db275c4e5a3dbf04c9d8.jpg \n", + " inflating: /data/dog-breed-identification/train/cb555b3a480130c2803ead9c1336bda5.jpg \n", + " inflating: /data/dog-breed-identification/train/cb6089de7b0550c129c6366272b90b03.jpg \n", + " inflating: /data/dog-breed-identification/train/cb64a5454e405b6eef6061fda3433fe5.jpg \n", + " inflating: /data/dog-breed-identification/train/cb6e8b90c3769f074a74f27296cad88f.jpg \n", + " inflating: /data/dog-breed-identification/train/cb7ab99df4dd35a0a966791c71bd9ed7.jpg \n", + " inflating: /data/dog-breed-identification/train/cb7fb54008ef21a8b55da46d5145acb3.jpg \n", + " inflating: /data/dog-breed-identification/train/cb862ba8dcd6fc9ab542e3cdb10992ee.jpg \n", + " inflating: /data/dog-breed-identification/train/cb87385d8e14973cf2a66f956874acd0.jpg \n", + " inflating: /data/dog-breed-identification/train/cb8a9dd977947ae3fd1c257eb047686b.jpg \n", + " inflating: /data/dog-breed-identification/train/cb901f7d812d25844d524bbe2f64f31f.jpg \n", + " inflating: /data/dog-breed-identification/train/cb907bbc5a0142a64c15dbcb07894637.jpg \n", + " inflating: /data/dog-breed-identification/train/cb981a6f87f8d8dd645c41710297fdbd.jpg \n", + " inflating: /data/dog-breed-identification/train/cba32ddfd44f83ed34d88326b2416937.jpg \n", + " inflating: /data/dog-breed-identification/train/cba6d337615d1b4b9e6a037de1652023.jpg \n", + " inflating: /data/dog-breed-identification/train/cbaf3cc8691cac3a27cdce406a7c5dcb.jpg \n", + " inflating: /data/dog-breed-identification/train/cbb65152c8e5dfbc10ec40421d65956f.jpg \n", + " inflating: /data/dog-breed-identification/train/cbb942c46449834575961dcf0d2c27f0.jpg \n", + " inflating: /data/dog-breed-identification/train/cbbdb384a6fb8b9b96b598ca1d4675b4.jpg \n", + " inflating: /data/dog-breed-identification/train/cbc3a36f42d9c1a97577354bcde3716d.jpg \n", + " inflating: /data/dog-breed-identification/train/cbc467503ed345317daf97f6f5536195.jpg \n", + " inflating: /data/dog-breed-identification/train/cbc950eb607d8744ee953bfa1221a895.jpg \n", + " inflating: /data/dog-breed-identification/train/cbcd2646088a01dce9e5dbabce5aca3d.jpg \n", + " inflating: /data/dog-breed-identification/train/cbd3c747a40068089dfd7036ff3c62c0.jpg \n", + " inflating: /data/dog-breed-identification/train/cbd8e6c8f98e12a8b336ecedfd874707.jpg \n", + " inflating: /data/dog-breed-identification/train/cbdc00279f7aa7424d7034ed5463ec79.jpg \n", + " inflating: /data/dog-breed-identification/train/cbe1e05f5c6a9cad9e5afa6e86dda4c0.jpg \n", + " inflating: /data/dog-breed-identification/train/cbe3751367be368a090cb0b160bd7bba.jpg \n", + " inflating: /data/dog-breed-identification/train/cbe63371eb2c2c9fb234a64f8bc149a4.jpg \n", + " inflating: /data/dog-breed-identification/train/cbe74cdfb13fdd3748c52629e38b1588.jpg \n", + " inflating: /data/dog-breed-identification/train/cc08970afdec42e3c35ecdcb82b88343.jpg \n", + " inflating: /data/dog-breed-identification/train/cc09a186fc07dc960dbbd5afb464c2f7.jpg \n", + " inflating: /data/dog-breed-identification/train/cc0ad277cc5f3474c3f337015643c6e6.jpg \n", + " inflating: /data/dog-breed-identification/train/cc0b22a383993745090aeb391272cc77.jpg \n", + " inflating: /data/dog-breed-identification/train/cc12832a69a89000c395c4398b72d0e2.jpg \n", + " inflating: /data/dog-breed-identification/train/cc1e070f808bc193b981e3b1b89d7173.jpg \n", + " inflating: /data/dog-breed-identification/train/cc315850f2853fd4e63220e774672ed9.jpg \n", + " inflating: /data/dog-breed-identification/train/cc3aae66ef2abe212cc5f682e71612ff.jpg \n", + " inflating: /data/dog-breed-identification/train/cc3ad59842c48f70b1ca9b323e5f09d2.jpg \n", + " inflating: /data/dog-breed-identification/train/cc3ebdab9d7f7a331090e55ea2549dec.jpg \n", + " inflating: /data/dog-breed-identification/train/cc4e63a104a2466da58a5f81550c37a4.jpg \n", + " inflating: /data/dog-breed-identification/train/cc761b9fd55bdd6ab132b44a9b77a7da.jpg \n", + " inflating: /data/dog-breed-identification/train/cc77de33c6b6f0ed97200803e09e1275.jpg \n", + " inflating: /data/dog-breed-identification/train/cc791d33102c4dd55ef715f98be1ff23.jpg \n", + " inflating: /data/dog-breed-identification/train/cc7ae3da3bebcc4acb10128078cdf29a.jpg \n", + " inflating: /data/dog-breed-identification/train/cc7af1c37c65fee4e036571c7f5b55f9.jpg \n", + " inflating: /data/dog-breed-identification/train/cc8ec46c8c1ff5337ac636a9c21abcc2.jpg \n", + " inflating: /data/dog-breed-identification/train/cc93915e06bc55626a02af95006a48c2.jpg \n", + " inflating: /data/dog-breed-identification/train/cc964d3bf1e317c9fbb0c0d4c8bc6b8f.jpg \n", + " inflating: /data/dog-breed-identification/train/cc97041986abdb8566a3ed4317f40c27.jpg \n", + " inflating: /data/dog-breed-identification/train/cc99de39a169a9aebaf34d4a514e266b.jpg \n", + " inflating: /data/dog-breed-identification/train/cc9b4190a7063f8e92dd21ff25152643.jpg \n", + " inflating: /data/dog-breed-identification/train/cca773094173965bbd04f829eea6eec7.jpg \n", + " inflating: /data/dog-breed-identification/train/ccb296c8257649527e45affde75d331d.jpg \n", + " inflating: /data/dog-breed-identification/train/ccb75b5d00281575fe98f1d56d23d7a9.jpg \n", + " inflating: /data/dog-breed-identification/train/ccbf2d7da8e85a3b60eb0ff8a87af58f.jpg \n", + " inflating: /data/dog-breed-identification/train/ccc369e93d792e44329a5f13ae6ae582.jpg \n", + " inflating: /data/dog-breed-identification/train/ccc49b7d2c895b6d6c9ce27845faf622.jpg \n", + " inflating: /data/dog-breed-identification/train/ccc5516a7db6d1d409ed66be35b6deeb.jpg \n", + " inflating: /data/dog-breed-identification/train/ccced0593b697192b5e5b7f9ded1edfa.jpg \n", + " inflating: /data/dog-breed-identification/train/ccd5d80be42329634437ae59df20bb02.jpg \n", + " inflating: /data/dog-breed-identification/train/ccd804f2bb1d8b97dd6d0ef6c0feb054.jpg \n", + " inflating: /data/dog-breed-identification/train/ccd907aaac91d10f9a5ad8f9391e0db5.jpg \n", + " inflating: /data/dog-breed-identification/train/ccd99c16e1a964e86dc5ee5ba5b2854f.jpg \n", + " inflating: /data/dog-breed-identification/train/ccde54b34bfa24e9fecdc9ad53ff4a37.jpg \n", + " inflating: /data/dog-breed-identification/train/cce1d5b417e2aa8afade8664a84d4576.jpg \n", + " inflating: /data/dog-breed-identification/train/cce6c1a924e06dd07bb86d76d6d705d6.jpg \n", + " inflating: /data/dog-breed-identification/train/cceda69318da7cff1dadf98a150d567c.jpg \n", + " inflating: /data/dog-breed-identification/train/ccf68bda9760941e198554e32b02025e.jpg \n", + " inflating: /data/dog-breed-identification/train/ccf7f33997edc88e2a25ebbbdc825105.jpg \n", + " inflating: /data/dog-breed-identification/train/cd00a72c75a1cb3a121a032560b47969.jpg \n", + " inflating: /data/dog-breed-identification/train/cd047489ca9d316cc15fe8b3c52d3090.jpg \n", + " inflating: /data/dog-breed-identification/train/cd0587a34594afd8b79387cf137c66e7.jpg \n", + " inflating: /data/dog-breed-identification/train/cd067bf6b895c629180fdf08f7a0b96e.jpg \n", + " inflating: /data/dog-breed-identification/train/cd12c6313e40ab559c00fda493c324d5.jpg \n", + " inflating: /data/dog-breed-identification/train/cd147766deb06c9eb2e43a24c0645fc2.jpg \n", + " inflating: /data/dog-breed-identification/train/cd17e777cdce3939575a66479ccdc790.jpg \n", + " inflating: /data/dog-breed-identification/train/cd19644fa2fcfeee4f92cabe64d3c324.jpg \n", + " inflating: /data/dog-breed-identification/train/cd1d661e9023be9095a28ab1d005b502.jpg \n", + " inflating: /data/dog-breed-identification/train/cd2365170b2191c7a5df0c9531f2af70.jpg \n", + " inflating: /data/dog-breed-identification/train/cd2467e4da2dc73c38e6f7bc24b0e6e9.jpg \n", + " inflating: /data/dog-breed-identification/train/cd31a52754616411d0484aa309c66b2a.jpg \n", + " inflating: /data/dog-breed-identification/train/cd3c21425c633a2d1c7d0df930245851.jpg \n", + " inflating: /data/dog-breed-identification/train/cd406cc58c3a6542293a947e017a7bc6.jpg \n", + " inflating: /data/dog-breed-identification/train/cd40acc3fbfd0ca5a1985eea44d8493b.jpg \n", + " inflating: /data/dog-breed-identification/train/cd53fc1890f5979b0ed9d2c8b69a93a6.jpg \n", + " inflating: /data/dog-breed-identification/train/cd5c1f1ec93244a933f377ce42b1b3dd.jpg \n", + " inflating: /data/dog-breed-identification/train/cd6faa4e0dac0ac5ec7a17d789b2547d.jpg \n", + " inflating: /data/dog-breed-identification/train/cd714df1f1977466537bbf2fcbbcf0d9.jpg \n", + " inflating: /data/dog-breed-identification/train/cd7ad93d62286e4d6ecd2c2c61e0bdaa.jpg \n", + " inflating: /data/dog-breed-identification/train/cd806882f79da1043e04c70888865c7e.jpg \n", + " inflating: /data/dog-breed-identification/train/cd9045d7c082dc4427bfd026d03c0e69.jpg \n", + " inflating: /data/dog-breed-identification/train/cd9755d122a2b1cba1155e1612923986.jpg \n", + " inflating: /data/dog-breed-identification/train/cd980b3f39d9ae566444c3ebb9096acf.jpg \n", + " inflating: /data/dog-breed-identification/train/cd9a0ccbb1b02224208062f3778444ef.jpg \n", + " inflating: /data/dog-breed-identification/train/cdb32ac86d0475b5d32667582acaf267.jpg \n", + " inflating: /data/dog-breed-identification/train/cdb97461991bf6d92b771a0c83076b69.jpg \n", + " inflating: /data/dog-breed-identification/train/cdba761dfa000859fb4a89f56730e513.jpg \n", + " inflating: /data/dog-breed-identification/train/cdc52aa70527d1ce57c1483f6a61c482.jpg \n", + " inflating: /data/dog-breed-identification/train/cdd066de3fd206038ac23ea487ad8246.jpg \n", + " inflating: /data/dog-breed-identification/train/cdd1ac938055dd3956dd17b17d941716.jpg \n", + " inflating: /data/dog-breed-identification/train/cdd98aebbc986af83308c15269c14e66.jpg \n", + " inflating: /data/dog-breed-identification/train/cdee721aacdca2d44cd02200f2e1e62c.jpg \n", + " inflating: /data/dog-breed-identification/train/cdf01b0e87faf32363ef0c77a67c249a.jpg \n", + " inflating: /data/dog-breed-identification/train/cdffff7690054e91eab8e7237df12406.jpg \n", + " inflating: /data/dog-breed-identification/train/ce0b7b688e294ae5ae0620354c946f0d.jpg \n", + " inflating: /data/dog-breed-identification/train/ce101a2ad530221d3fd8b252bc4eb682.jpg \n", + " inflating: /data/dog-breed-identification/train/ce125f9866c84532e94bfc1fb7998df8.jpg \n", + " inflating: /data/dog-breed-identification/train/ce1921bbb6d6cbe1ec4750a073bf956e.jpg \n", + " inflating: /data/dog-breed-identification/train/ce202f5b214ad2646bc52398fdef426b.jpg \n", + " inflating: /data/dog-breed-identification/train/ce2255d0df0d65119a0ac484a18d0275.jpg \n", + " inflating: /data/dog-breed-identification/train/ce293768d21d78ee39f43642af69a674.jpg \n", + " inflating: /data/dog-breed-identification/train/ce31f66a9991f1f2bb45b84b5a7bd6e7.jpg \n", + " inflating: /data/dog-breed-identification/train/ce3366408fc2cad7559b24f14c074655.jpg \n", + " inflating: /data/dog-breed-identification/train/ce35256ac863f1a40f62b52dfc107a20.jpg \n", + " inflating: /data/dog-breed-identification/train/ce384b69c955c293c02edb3888c03de0.jpg \n", + " inflating: /data/dog-breed-identification/train/ce3f73cdc0a7797ef56afa3745af7985.jpg \n", + " inflating: /data/dog-breed-identification/train/ce4758176f99386df7fde8e2b55fba98.jpg \n", + " inflating: /data/dog-breed-identification/train/ce4d01bfae7a2486db3f55813347b095.jpg \n", + " inflating: /data/dog-breed-identification/train/ce525c673cc344e8cb8f5a5265689910.jpg \n", + " inflating: /data/dog-breed-identification/train/ce549a7ebea582dd4ac694d6b1a92c53.jpg \n", + " inflating: /data/dog-breed-identification/train/ce5e29677c46c613e9831bd801f2d907.jpg \n", + " inflating: /data/dog-breed-identification/train/ce616db871b255cd660b2f845e7b75e9.jpg \n", + " inflating: /data/dog-breed-identification/train/ce7a719bf215d73fcf0d19f554bed5ba.jpg \n", + " inflating: /data/dog-breed-identification/train/ce823aba64bddcca96b7c3c7f8862f36.jpg \n", + " inflating: /data/dog-breed-identification/train/ce8aafb174173caa826cf76e0469d56f.jpg \n", + " inflating: /data/dog-breed-identification/train/ce92370c283c12eb64271cdec81fc1d5.jpg \n", + " inflating: /data/dog-breed-identification/train/cea803dc044c609ccab57ec3820bcc16.jpg \n", + " inflating: /data/dog-breed-identification/train/ceba42b006bf3f03f643d641d253f25d.jpg \n", + " inflating: /data/dog-breed-identification/train/ceca987bff2a3776e5ebc1e2ca54a9da.jpg \n", + " inflating: /data/dog-breed-identification/train/cecc7ef1ebe7682f38c497bbdcf8b391.jpg \n", + " inflating: /data/dog-breed-identification/train/cece87e72f771598e2c4d401d41a63e9.jpg \n", + " inflating: /data/dog-breed-identification/train/ced3ca13937555e20f9193e7c4ff865a.jpg \n", + " inflating: /data/dog-breed-identification/train/cedaa14045b1bea7a383f5bdf588db89.jpg \n", + " inflating: /data/dog-breed-identification/train/cedcd40ba220aafc622ebc603c2cdf25.jpg \n", + " inflating: /data/dog-breed-identification/train/cede92d889f449e6bb5ad7fe10e5f887.jpg \n", + " inflating: /data/dog-breed-identification/train/cef7b5d4e9d1d08d16611fbe6ca26440.jpg \n", + " inflating: /data/dog-breed-identification/train/cf006bb3f31c015f513071e9067f8ee0.jpg \n", + " inflating: /data/dog-breed-identification/train/cf01fdc8b8deede04eb88f0aee27b2bf.jpg \n", + " inflating: /data/dog-breed-identification/train/cf0973fa8d11be069b23769e781daf9d.jpg \n", + " inflating: /data/dog-breed-identification/train/cf0b75d1f10ab79d91069820c99dc4fa.jpg \n", + " inflating: /data/dog-breed-identification/train/cf16389be0d418812e47ae7694b441b4.jpg \n", + " inflating: /data/dog-breed-identification/train/cf1b216674bc2d431405ef57d01e6a3e.jpg \n", + " inflating: /data/dog-breed-identification/train/cf269794a592d3b6ab3a52d59830d763.jpg \n", + " inflating: /data/dog-breed-identification/train/cf27299362ada8a5e99e4368c9f8de35.jpg \n", + " inflating: /data/dog-breed-identification/train/cf2851761c1af3015e67d12681c40f11.jpg \n", + " inflating: /data/dog-breed-identification/train/cf2ab1c0c1009cceb951ca7d74bf13ca.jpg \n", + " inflating: /data/dog-breed-identification/train/cf2eae68033af7a625dcb3a2565482f2.jpg \n", + " inflating: /data/dog-breed-identification/train/cf3697f8f3ee67b50cedaa63904ab5e8.jpg \n", + " inflating: /data/dog-breed-identification/train/cf3b0064d1834e9184645d870021f148.jpg \n", + " inflating: /data/dog-breed-identification/train/cf44ff5245f6b6c1ea2ad82930bfb237.jpg \n", + " inflating: /data/dog-breed-identification/train/cf67eaba9efa7e9af92f8b0356ba02d1.jpg \n", + " inflating: /data/dog-breed-identification/train/cf6e0ec97666cf8ddc3de90f1273ff4d.jpg \n", + " inflating: /data/dog-breed-identification/train/cf82709a00a4f2168555b8f77c0d815d.jpg \n", + " inflating: /data/dog-breed-identification/train/cf835d1c4bc696f14bc631156c9a770f.jpg \n", + " inflating: /data/dog-breed-identification/train/cf853fd31d054b54395a43469d1f49a8.jpg \n", + " inflating: /data/dog-breed-identification/train/cf8646d1cee707152f2590d8a7c2c82f.jpg \n", + " inflating: /data/dog-breed-identification/train/cf8c9065ce9fe68a218371634529babc.jpg \n", + " inflating: /data/dog-breed-identification/train/cf954483c0d2e3004083144ffc432092.jpg \n", + " inflating: /data/dog-breed-identification/train/cf9628b5959c97e2be6a63e1e4335c24.jpg \n", + " inflating: /data/dog-breed-identification/train/cf98d353326b72966fe9e738b344b3cc.jpg \n", + " inflating: /data/dog-breed-identification/train/cf9a778a10670a978a44a7cdfb9dd12a.jpg \n", + " inflating: /data/dog-breed-identification/train/cfa922cad858484df5a928fe0db40b61.jpg \n", + " inflating: /data/dog-breed-identification/train/cfaf6ea651f31e1ab10d384b84cebff1.jpg \n", + " inflating: /data/dog-breed-identification/train/cfb1ca7dc6db8a6d3e0cd15b4a7be229.jpg \n", + " inflating: /data/dog-breed-identification/train/cfb25532ce2c35cebd553938f261e666.jpg \n", + " inflating: /data/dog-breed-identification/train/cfb2e02aafbed417593e849cbf9c30fd.jpg \n", + " inflating: /data/dog-breed-identification/train/cfb9775c071c3c447d1e7f132bc5b48e.jpg \n", + " inflating: /data/dog-breed-identification/train/cfbca2f86b0f5cac31ed9b41d9482661.jpg \n", + " inflating: /data/dog-breed-identification/train/cfc1665abbfdf9262f6d452769db5acc.jpg \n", + " inflating: /data/dog-breed-identification/train/cfc4f71ed016e4104dca21bac935047d.jpg \n", + " inflating: /data/dog-breed-identification/train/cfcab9b961c4dca7804e7159d8bd3d60.jpg \n", + " inflating: /data/dog-breed-identification/train/cfcc1612346870da991980196d34e106.jpg \n", + " inflating: /data/dog-breed-identification/train/cfcc3d9c2e589b87e90a06a3857f955c.jpg \n", + " inflating: /data/dog-breed-identification/train/cfd115aa91d1b341574869fd5b9a1afd.jpg \n", + " inflating: /data/dog-breed-identification/train/cfd1cdada50aee75c59957bb8ac00183.jpg \n", + " inflating: /data/dog-breed-identification/train/cfd768af4ad2872016ec398f91153fbd.jpg \n", + " inflating: /data/dog-breed-identification/train/cff9465a6b201d7ba4d83ef0ebab8147.jpg \n", + " inflating: /data/dog-breed-identification/train/cffca67e3dd2a1254c356c55bc84c322.jpg \n", + " inflating: /data/dog-breed-identification/train/d003186c0ba9adb9fd4407a2f1eb9510.jpg \n", + " inflating: /data/dog-breed-identification/train/d00642800e4ca6d84887a93e3430ae21.jpg \n", + " inflating: /data/dog-breed-identification/train/d00a08d162750e970a53c096f0e5c63a.jpg \n", + " inflating: /data/dog-breed-identification/train/d015ddbcda957741a34c705200094e5e.jpg \n", + " inflating: /data/dog-breed-identification/train/d019f029b4480dfcd04b0fb07249a9aa.jpg \n", + " inflating: /data/dog-breed-identification/train/d020d0461312aa24bb8eb462701a1088.jpg \n", + " inflating: /data/dog-breed-identification/train/d02aace37d23ed5e1bc81621bd1a6014.jpg \n", + " inflating: /data/dog-breed-identification/train/d02db74d4d275327c1d470372fdce80e.jpg \n", + " inflating: /data/dog-breed-identification/train/d03cc5012ecadd760e8e119e10ac98e1.jpg \n", + " inflating: /data/dog-breed-identification/train/d03d4df9ecba147112407209140e0f78.jpg \n", + " inflating: /data/dog-breed-identification/train/d0424ce62287e709fcad3cd5bf978378.jpg \n", + " inflating: /data/dog-breed-identification/train/d0441db03224b15c47422e6bdf351a52.jpg \n", + " inflating: /data/dog-breed-identification/train/d04b64a9c43afe278479d36e0f196bc1.jpg \n", + " inflating: /data/dog-breed-identification/train/d05051d644e85bde41fb021d0ed17a63.jpg \n", + " inflating: /data/dog-breed-identification/train/d05a666072e8d8734124a31e727f23da.jpg \n", + " inflating: /data/dog-breed-identification/train/d060e6fe84298ef6a37e3a0b9775c0c9.jpg \n", + " inflating: /data/dog-breed-identification/train/d06dfe63b8aa87927542f830cd7c5879.jpg \n", + " inflating: /data/dog-breed-identification/train/d07c7895817c3baa1ac25753d63dfc53.jpg \n", + " inflating: /data/dog-breed-identification/train/d09d661f984be8af06debd9ad7e4ec6c.jpg \n", + " inflating: /data/dog-breed-identification/train/d0a1c46e655213d0e856f51715a5248a.jpg \n", + " inflating: /data/dog-breed-identification/train/d0a79a274e32f238ee0a14781fab1f60.jpg \n", + " inflating: /data/dog-breed-identification/train/d0a8d73306d4ff25856bfd8a62748979.jpg \n", + " inflating: /data/dog-breed-identification/train/d0abd5df53b9a41a506ea217a70a961a.jpg \n", + " inflating: /data/dog-breed-identification/train/d0c3c1622518b8d9d26b4c71a43b3a85.jpg \n", + " inflating: /data/dog-breed-identification/train/d0e2d316e8e1a6d951340d0894b9f26e.jpg \n", + " inflating: /data/dog-breed-identification/train/d0ea05a2828daf94ed74664b0081d026.jpg \n", + " inflating: /data/dog-breed-identification/train/d0f6c63bf17f25be16f2671c0ac7b7b6.jpg \n", + " inflating: /data/dog-breed-identification/train/d10569ff8fb93c2ad2636f2e1e7e9d72.jpg \n", + " inflating: /data/dog-breed-identification/train/d10f9ba331c5f66331a449463c06cd87.jpg \n", + " inflating: /data/dog-breed-identification/train/d110e189aff527674e08285e7ff86508.jpg \n", + " inflating: /data/dog-breed-identification/train/d1158535e2b0ee1db603ce2b12411f9f.jpg \n", + " inflating: /data/dog-breed-identification/train/d11ef31e3d37fd218db0fcc76e416241.jpg \n", + " inflating: /data/dog-breed-identification/train/d120834472b4ef2520e5860b0f3b0500.jpg \n", + " inflating: /data/dog-breed-identification/train/d13131f5b2771e917e58fd49cb710cad.jpg \n", + " inflating: /data/dog-breed-identification/train/d132a0af92ff56fe35d1811b8eae95f0.jpg \n", + " inflating: /data/dog-breed-identification/train/d1337daa2bbfbfab34a937f0ff1cedec.jpg \n", + " inflating: /data/dog-breed-identification/train/d146d9004a5895ed0efd91636c751d74.jpg \n", + " inflating: /data/dog-breed-identification/train/d146f0cc0ce9bc0cb3514618affbd028.jpg \n", + " inflating: /data/dog-breed-identification/train/d153a102953bac7ee4dca7b2919111ee.jpg \n", + " inflating: /data/dog-breed-identification/train/d155359376470f2e707ac73e636eb5b2.jpg \n", + " inflating: /data/dog-breed-identification/train/d15663dbbbcbde9517ae67284c664897.jpg \n", + " inflating: /data/dog-breed-identification/train/d15c761ba986dc194be3170ded786d3b.jpg \n", + " inflating: /data/dog-breed-identification/train/d1676f45ee40b1c585ddc56b2a63c402.jpg \n", + " inflating: /data/dog-breed-identification/train/d1718b1895cc6a7e9ed92087d87f29b2.jpg \n", + " inflating: /data/dog-breed-identification/train/d187dac89d006d0fad4aa697dc49e3d7.jpg \n", + " inflating: /data/dog-breed-identification/train/d19209b37780178ee6b51afdf2536583.jpg \n", + " inflating: /data/dog-breed-identification/train/d194f1de2eb1f5037481388b04452b6b.jpg \n", + " inflating: /data/dog-breed-identification/train/d198549ea46f9f624b71fcffbcbe8e31.jpg \n", + " inflating: /data/dog-breed-identification/train/d198ac3fcba64d6a3996f42cc13b6465.jpg \n", + " inflating: /data/dog-breed-identification/train/d19bfb2a55eaa24aa45dafe1d04b7eb1.jpg \n", + " inflating: /data/dog-breed-identification/train/d19c2c775d85c2f50e1519ce67d5a5d7.jpg \n", + " inflating: /data/dog-breed-identification/train/d1b1bb6de02d5b193e61e018cda53e47.jpg \n", + " inflating: /data/dog-breed-identification/train/d1d430da93a267cec3abd75d99d91fc4.jpg \n", + " inflating: /data/dog-breed-identification/train/d1d46e8cce0706c19b41bc88b18139ae.jpg \n", + " inflating: /data/dog-breed-identification/train/d1e3d07958a55a2818cffbd52bc090ca.jpg \n", + " inflating: /data/dog-breed-identification/train/d1f38890c027d2cb93d2207446e83b10.jpg \n", + " inflating: /data/dog-breed-identification/train/d1f7ce7e656abd32521c790c04f768d1.jpg \n", + " inflating: /data/dog-breed-identification/train/d1f946eb5c6aa8da5be6f74bf98c4ea3.jpg \n", + " inflating: /data/dog-breed-identification/train/d1f9538eeb43a97a6299b56f44dfb503.jpg \n", + " inflating: /data/dog-breed-identification/train/d20381f965804795464f2e1d77dabcd1.jpg \n", + " inflating: /data/dog-breed-identification/train/d20e2c1d02d16b00f9859e4c7e4d954d.jpg \n", + " inflating: /data/dog-breed-identification/train/d2148e7b0ddc6d8eb6f5a4794e74c1dd.jpg \n", + " inflating: /data/dog-breed-identification/train/d216b973bd75fdf96a9566e159f5f286.jpg \n", + " inflating: /data/dog-breed-identification/train/d2181ad8aa2f5fe3f6ef63b0bae7cc52.jpg \n", + " inflating: /data/dog-breed-identification/train/d21c01d3de8cc6d3c6dd3efaba226412.jpg \n", + " inflating: /data/dog-breed-identification/train/d222748a1eea1d4fd0dc0c7e95c7a312.jpg \n", + " inflating: /data/dog-breed-identification/train/d22fd89f9ec70da2f38fc495812e9324.jpg \n", + " inflating: /data/dog-breed-identification/train/d2358aef08b791249aab787b25c6bee2.jpg \n", + " inflating: /data/dog-breed-identification/train/d23a9459022b02140e847eea64e05440.jpg \n", + " inflating: /data/dog-breed-identification/train/d23b5a08ba6eee9c78379bbe7ed4d64e.jpg \n", + " inflating: /data/dog-breed-identification/train/d2463d0b1c60260ad616be9cc1367493.jpg \n", + " inflating: /data/dog-breed-identification/train/d24723e50b59d53ef53aeb0189c2a73c.jpg \n", + " inflating: /data/dog-breed-identification/train/d247349a06da617a7d7c54281121cebe.jpg \n", + " inflating: /data/dog-breed-identification/train/d247fcdc9fe99f9d151e32fbd0df6d73.jpg \n", + " inflating: /data/dog-breed-identification/train/d248481e21f44ffe66f969af4206b45d.jpg \n", + " inflating: /data/dog-breed-identification/train/d25477a0de377042bccdd6d3dd4c15d0.jpg \n", + " inflating: /data/dog-breed-identification/train/d25a5f80d6980ae97d2941913fc1ddd6.jpg \n", + " inflating: /data/dog-breed-identification/train/d26227912cf7189f6974908b19e0445f.jpg \n", + " inflating: /data/dog-breed-identification/train/d265584fd8255cf0ffb477cdfddd32f9.jpg \n", + " inflating: /data/dog-breed-identification/train/d26797ba2b145963df704ac647e06564.jpg \n", + " inflating: /data/dog-breed-identification/train/d26c44803bcb77cc1d4d4d5c1565a561.jpg \n", + " inflating: /data/dog-breed-identification/train/d26ebabd606703ea526163568193fb4d.jpg \n", + " inflating: /data/dog-breed-identification/train/d273dff1609509570c7c9bf78dd3cd82.jpg \n", + " inflating: /data/dog-breed-identification/train/d2740bec1cf993472115c0d40ce6c759.jpg \n", + " inflating: /data/dog-breed-identification/train/d2792a2d53ef85403311ba8210cc4903.jpg \n", + " inflating: /data/dog-breed-identification/train/d27ba79a407c9cf9aeefb026fc965e12.jpg \n", + " inflating: /data/dog-breed-identification/train/d2800b5baf5bf85e6adda6f776d8829a.jpg \n", + " inflating: /data/dog-breed-identification/train/d286e40e002940218197b5251d47e96f.jpg \n", + " inflating: /data/dog-breed-identification/train/d28daf5f2461b81183cfd3dc489e0524.jpg \n", + " inflating: /data/dog-breed-identification/train/d299001b203489dc596f75dd865ab24e.jpg \n", + " inflating: /data/dog-breed-identification/train/d29b0c5038e14572446722d1aacf5200.jpg \n", + " inflating: /data/dog-breed-identification/train/d2a7bd18d1cf5d798648f2a51fab39da.jpg \n", + " inflating: /data/dog-breed-identification/train/d2b547e36b74750fec91ea6e086802eb.jpg \n", + " inflating: /data/dog-breed-identification/train/d2b6a8effca9bffbc4194e752cc37cbb.jpg \n", + " inflating: /data/dog-breed-identification/train/d2cd0fbf8d10eee50d53e8147000015d.jpg \n", + " inflating: /data/dog-breed-identification/train/d2e331df7b048de77bd85ca69288c568.jpg \n", + " inflating: /data/dog-breed-identification/train/d2e61a4e7c51fc88d45245f096df1745.jpg \n", + " inflating: /data/dog-breed-identification/train/d2fa72633103a5c503b4365563bfd3a9.jpg \n", + " inflating: /data/dog-breed-identification/train/d2fb39916010a712b342631075981c23.jpg \n", + " inflating: /data/dog-breed-identification/train/d30bd9a646de2550045bdba1c2f96c0c.jpg \n", + " inflating: /data/dog-breed-identification/train/d30c9f0e2f1a37fd9e44dfd4d35934d6.jpg \n", + " inflating: /data/dog-breed-identification/train/d3165e9b6451bd8ed6c1377994c81451.jpg \n", + " inflating: /data/dog-breed-identification/train/d31fe375dbfb1591ac809415b81495b1.jpg \n", + " inflating: /data/dog-breed-identification/train/d327e8ca5085a224a03dfcf4a6e984d7.jpg \n", + " inflating: /data/dog-breed-identification/train/d33a9df2a23a1ff3ef29ac6694cc7e79.jpg \n", + " inflating: /data/dog-breed-identification/train/d34ca643d6c19c9943b40af456e62cd3.jpg \n", + " inflating: /data/dog-breed-identification/train/d34fba7b6362358f5bd02827ce6389e8.jpg \n", + " inflating: /data/dog-breed-identification/train/d35eee4b5a67847371ffc082a25e2f03.jpg \n", + " inflating: /data/dog-breed-identification/train/d36c9b5e0bc2aa4c68a3f80892244af5.jpg \n", + " inflating: /data/dog-breed-identification/train/d3706963ae5c533b509872a98542a930.jpg \n", + " inflating: /data/dog-breed-identification/train/d3781e17b2296322306eda8141f6de86.jpg \n", + " inflating: /data/dog-breed-identification/train/d37b46b07275ff05d76b127f046c433e.jpg \n", + " inflating: /data/dog-breed-identification/train/d37b5544840f1aa6a63de67f18043829.jpg \n", + " inflating: /data/dog-breed-identification/train/d37e94590bdeb885b76c81518c8f6ea9.jpg \n", + " inflating: /data/dog-breed-identification/train/d3991dae442c0ff13cb18004a85ebfb5.jpg \n", + " inflating: /data/dog-breed-identification/train/d39daa01c97c879a2cc7e842519c4b37.jpg \n", + " inflating: /data/dog-breed-identification/train/d39ece223b9f8681bc23853cfc6412bc.jpg \n", + " inflating: /data/dog-breed-identification/train/d3a8cdfa1c884e4a2557ca711bcfcfe2.jpg \n", + " inflating: /data/dog-breed-identification/train/d3ab6b3d328b2cd91bed99aee99fa6ed.jpg \n", + " inflating: /data/dog-breed-identification/train/d3b93c2ca76232e541a73083f6f04332.jpg \n", + " inflating: /data/dog-breed-identification/train/d3c5a926ccc8dcbdc1a584a7acdecb61.jpg \n", + " inflating: /data/dog-breed-identification/train/d3c6e03d7b80ee6c253b1f3b0ac7c74c.jpg \n", + " inflating: /data/dog-breed-identification/train/d3ca4542578659abdb8a112baac00e99.jpg \n", + " inflating: /data/dog-breed-identification/train/d3cb2264c98b79ae0d65c84f84c4f85d.jpg \n", + " inflating: /data/dog-breed-identification/train/d3ccd246d07f2714cfbb28bd0e44ae30.jpg \n", + " inflating: /data/dog-breed-identification/train/d3d6ad637b0cc2e5830c435b8cad6f2e.jpg \n", + " inflating: /data/dog-breed-identification/train/d3db2b7203e86288b3222d137c45f679.jpg \n", + " inflating: /data/dog-breed-identification/train/d3efb07d4a0dc2d9d1824ca79964a222.jpg \n", + " inflating: /data/dog-breed-identification/train/d3f3306d5a62c333895fa98edfd9a028.jpg \n", + " inflating: /data/dog-breed-identification/train/d40130f4125a8058c4092f1a1e62426c.jpg \n", + " inflating: /data/dog-breed-identification/train/d404d11cf948b5683e066ec50477887f.jpg \n", + " inflating: /data/dog-breed-identification/train/d40d6d4e0f03bf31449179b106ac009d.jpg \n", + " inflating: /data/dog-breed-identification/train/d41481d28f1972447339510abcb6ef62.jpg \n", + " inflating: /data/dog-breed-identification/train/d414d4fc978a4129a6230b03c99333f3.jpg \n", + " inflating: /data/dog-breed-identification/train/d424c291a9c749ad09769d4f721bb54b.jpg \n", + " inflating: /data/dog-breed-identification/train/d425e6d733d2384385d00606307742b5.jpg \n", + " inflating: /data/dog-breed-identification/train/d4282be98a29612cbe5b01173e99f704.jpg \n", + " inflating: /data/dog-breed-identification/train/d42994838dea65d1516398fe86ff80be.jpg \n", + " inflating: /data/dog-breed-identification/train/d4327728d89413c88bcea2b1b5efb706.jpg \n", + " inflating: /data/dog-breed-identification/train/d4345a369e8853520659f0dfca8b92d3.jpg \n", + " inflating: /data/dog-breed-identification/train/d43b3de6be56e23c4894453f12a8179b.jpg \n", + " inflating: /data/dog-breed-identification/train/d43cfcfe55d2b37ec37e1ad1e679a059.jpg \n", + " inflating: /data/dog-breed-identification/train/d43d6256b3af171cda8b432de000afb0.jpg \n", + " inflating: /data/dog-breed-identification/train/d4430442fe5cda78d7e482714f7e5ca4.jpg \n", + " inflating: /data/dog-breed-identification/train/d446ebae35501cfb48832e81db2e49d9.jpg \n", + " inflating: /data/dog-breed-identification/train/d44a665b4936c6de124c864feb339f53.jpg \n", + " inflating: /data/dog-breed-identification/train/d4560db788eb0eff61623d3f5e2b58cb.jpg \n", + " inflating: /data/dog-breed-identification/train/d459a1fe285bc112bfe00615390e2632.jpg \n", + " inflating: /data/dog-breed-identification/train/d4602ae68c721a6c7a0839b7f1f69af7.jpg \n", + " inflating: /data/dog-breed-identification/train/d4673be6357164335165192fab6d97e7.jpg \n", + " inflating: /data/dog-breed-identification/train/d468a3e96a0a7249a32e614ab7a4d8fa.jpg \n", + " inflating: /data/dog-breed-identification/train/d476405a5ee545d4acc5d6840d7b2054.jpg \n", + " inflating: /data/dog-breed-identification/train/d47ae5d9f7c17055130be743362f4bfd.jpg \n", + " inflating: /data/dog-breed-identification/train/d47fb5b7a8097959d87cffa7affad58c.jpg \n", + " inflating: /data/dog-breed-identification/train/d47fcfb7f8b4de12cb78b54eb1c62dcb.jpg \n", + " inflating: /data/dog-breed-identification/train/d480f8d24f1479f7669c5def4e3af118.jpg \n", + " inflating: /data/dog-breed-identification/train/d48181ff46af9773a0b91dcc67bf701f.jpg \n", + " inflating: /data/dog-breed-identification/train/d482fb5576279496d6f8d72bf6b93d9f.jpg \n", + " inflating: /data/dog-breed-identification/train/d485c0dcb6754606694968b8b8957f59.jpg \n", + " inflating: /data/dog-breed-identification/train/d48a8aa150e8ec0f97868af26bcc0a93.jpg \n", + " inflating: /data/dog-breed-identification/train/d48ea539bfde72872b5deda0f543030e.jpg \n", + " inflating: /data/dog-breed-identification/train/d49881b37b1528c52dc418d03d81823f.jpg \n", + " inflating: /data/dog-breed-identification/train/d4a1b393012e3aef048be05d784c5158.jpg \n", + " inflating: /data/dog-breed-identification/train/d4a4869618c541244f4018aaac00a0b6.jpg \n", + " inflating: /data/dog-breed-identification/train/d4a4c720a052e2004879b0177473d53a.jpg \n", + " inflating: /data/dog-breed-identification/train/d4c244952ac15e6cceebdd7214e31167.jpg \n", + " inflating: /data/dog-breed-identification/train/d4c2dfd1b7aa5af65cd89481fa150ad5.jpg \n", + " inflating: /data/dog-breed-identification/train/d4c40837d07aca3085024b7c160d755b.jpg \n", + " inflating: /data/dog-breed-identification/train/d4c7ab7811b534b7038fadf986d45395.jpg \n", + " inflating: /data/dog-breed-identification/train/d4cc7be2d2a5a82d840c5d0d6438dab4.jpg \n", + " inflating: /data/dog-breed-identification/train/d4d90d481dbbff9729349b3e270a1d37.jpg \n", + " inflating: /data/dog-breed-identification/train/d4dbe4468560bb227aac6627237ab9ec.jpg \n", + " inflating: /data/dog-breed-identification/train/d4e186bdca24be150fa257283f5fa5fb.jpg \n", + " inflating: /data/dog-breed-identification/train/d4f4484540f2edfea1df2c36b536ba48.jpg \n", + " inflating: /data/dog-breed-identification/train/d4f88679b7b7600db5922139d3038d9b.jpg \n", + " inflating: /data/dog-breed-identification/train/d5098803fe8e8ae632cc11df0ec4f399.jpg \n", + " inflating: /data/dog-breed-identification/train/d50ae93eb9d8d56d3b52cf83e991839d.jpg \n", + " inflating: /data/dog-breed-identification/train/d51343763fcca542f1bdc24a1faac202.jpg \n", + " inflating: /data/dog-breed-identification/train/d515076900624c15de25a9b5e45c2ee7.jpg \n", + " inflating: /data/dog-breed-identification/train/d5264d9c70065f4f32ce88bfa519d2c3.jpg \n", + " inflating: /data/dog-breed-identification/train/d529e6b7a8f0191fe43dd6ea8db0c497.jpg \n", + " inflating: /data/dog-breed-identification/train/d52ee32a4263a7c0924a0abc49f91acb.jpg \n", + " inflating: /data/dog-breed-identification/train/d530703b353991efcf62e7e10b8f7ee2.jpg \n", + " inflating: /data/dog-breed-identification/train/d535bf3a2946268c98b27fe05ed7f2ed.jpg \n", + " inflating: /data/dog-breed-identification/train/d5367a9c81635759c63cddc526edf3ad.jpg \n", + " inflating: /data/dog-breed-identification/train/d5470b760dc855b84add98748e03bd77.jpg \n", + " inflating: /data/dog-breed-identification/train/d54cbe5a12642bcbef0a24130b926342.jpg \n", + " inflating: /data/dog-breed-identification/train/d554743c028b0ec7d91739cd47e3f33f.jpg \n", + " inflating: /data/dog-breed-identification/train/d559926a39b741a19969b9e634ed1933.jpg \n", + " inflating: /data/dog-breed-identification/train/d55f0b1e0b474c678e1c828211e119e0.jpg \n", + " inflating: /data/dog-breed-identification/train/d56ed1a1f310a47d3144cff906869f6d.jpg \n", + " inflating: /data/dog-breed-identification/train/d5729b46847e1ccef0f318395b0975cd.jpg \n", + " inflating: /data/dog-breed-identification/train/d583fbee2a4c69e9af446ff6939ed478.jpg \n", + " inflating: /data/dog-breed-identification/train/d587009bd0b5c64c7fdb52c9317978c2.jpg \n", + " inflating: /data/dog-breed-identification/train/d58b5b82b7d9ee79e4206e2be1c49528.jpg \n", + " inflating: /data/dog-breed-identification/train/d58d8ea5531a3326f4467cb47bb05b35.jpg \n", + " inflating: /data/dog-breed-identification/train/d599dd3288d6145c299d8b31c92be4be.jpg \n", + " inflating: /data/dog-breed-identification/train/d59ca43ea02ebe2a0b5e626a12cd3df3.jpg \n", + " inflating: /data/dog-breed-identification/train/d5a421e33b9db2f03598af1760c71501.jpg \n", + " inflating: /data/dog-breed-identification/train/d5a4beb5576f71087d740d1137e4513f.jpg \n", + " inflating: /data/dog-breed-identification/train/d5a99328bc26378c55e4bf77fcaea3b1.jpg \n", + " inflating: /data/dog-breed-identification/train/d5a9fac2fa046e3649c845aa9d286e98.jpg \n", + " inflating: /data/dog-breed-identification/train/d5ac49bf70fd512c9a38270fbe768177.jpg \n", + " inflating: /data/dog-breed-identification/train/d5b01922a98bd60cd867e7b6d62039f2.jpg \n", + " inflating: /data/dog-breed-identification/train/d5b91a7590c1510b727e1cf4925687ab.jpg \n", + " inflating: /data/dog-breed-identification/train/d5b98aa5af9ecf0987b25921ee09ccaa.jpg \n", + " inflating: /data/dog-breed-identification/train/d5bfebf92804b7fc76ad6d7190d2be2e.jpg \n", + " inflating: /data/dog-breed-identification/train/d5c742090f2d1bd616784d96a59bf941.jpg \n", + " inflating: /data/dog-breed-identification/train/d5c9f2708cfc84beb1f5af5b74a5b92d.jpg \n", + " inflating: /data/dog-breed-identification/train/d5d1b3e622ddca79975b7d90708cd7d8.jpg \n", + " inflating: /data/dog-breed-identification/train/d5d856e5a6b16e448dcbdd057e280e62.jpg \n", + " inflating: /data/dog-breed-identification/train/d5db9bd4d022fc34ac15a27bd99b8e02.jpg \n", + " inflating: /data/dog-breed-identification/train/d5dc59aae5cd233f9d03e10ec567ac08.jpg \n", + " inflating: /data/dog-breed-identification/train/d5f0bb0a30174fb308fc79df04ad5ead.jpg \n", + " inflating: /data/dog-breed-identification/train/d5f0de591ddb85ac62ae785fb3e316e9.jpg \n", + " inflating: /data/dog-breed-identification/train/d5fb95476c9b8333bc06ecb81fa60511.jpg \n", + " inflating: /data/dog-breed-identification/train/d5fc64a6e484b5ea765358bb74aee4b2.jpg \n", + " inflating: /data/dog-breed-identification/train/d600da0fd3fa1bde3ed69e1442aed8a1.jpg \n", + " inflating: /data/dog-breed-identification/train/d612ce8febeb6a1d74add4669a002530.jpg \n", + " inflating: /data/dog-breed-identification/train/d61a270df934b36a3d04fa329ffedc89.jpg \n", + " inflating: /data/dog-breed-identification/train/d62be3aa1b627562ab1b85ce73f9fede.jpg \n", + " inflating: /data/dog-breed-identification/train/d630a84482a95f1f4789b816c268ab1a.jpg \n", + " inflating: /data/dog-breed-identification/train/d63348c8d835cd499da54d8de6a11803.jpg \n", + " inflating: /data/dog-breed-identification/train/d63508200d0c38766216f3ef91cc59d5.jpg \n", + " inflating: /data/dog-breed-identification/train/d639b33dd3a399d201f995b8d9915a62.jpg \n", + " inflating: /data/dog-breed-identification/train/d63acb5d222016e636daaf98d6b2c810.jpg \n", + " inflating: /data/dog-breed-identification/train/d6525a312a447edce56a6a10c8741afc.jpg \n", + " inflating: /data/dog-breed-identification/train/d654df8cbee8ba4336355a692a3b8468.jpg \n", + " inflating: /data/dog-breed-identification/train/d6574a7c2c804a33cf3d29d917923349.jpg \n", + " inflating: /data/dog-breed-identification/train/d65bc49d1cf6be1fe8fd5798c4a28e08.jpg \n", + " inflating: /data/dog-breed-identification/train/d66104727a2a8cdfd3f3a0fb17bdedc9.jpg \n", + " inflating: /data/dog-breed-identification/train/d66ec4c83a620cca6ebf05ab9d162fcd.jpg \n", + " inflating: /data/dog-breed-identification/train/d67195583fae5a5cce6fd767ccddede1.jpg \n", + " inflating: /data/dog-breed-identification/train/d6857f130c251f01ab973358cbfccce1.jpg \n", + " inflating: /data/dog-breed-identification/train/d686e2bf8438d2a5c29924efca616708.jpg \n", + " inflating: /data/dog-breed-identification/train/d68803c168dd707b81df84496ea2f59b.jpg \n", + " inflating: /data/dog-breed-identification/train/d68aa9ca432fb8c9b97b756ac1d5e4d2.jpg \n", + " inflating: /data/dog-breed-identification/train/d68bf61158dfabeb1bd97cdec84fe9c6.jpg \n", + " inflating: /data/dog-breed-identification/train/d68f1be8128bae9979f58ceda3c76c50.jpg \n", + " inflating: /data/dog-breed-identification/train/d69249ffd14a08732369495e7ec37eff.jpg \n", + " inflating: /data/dog-breed-identification/train/d694275d5ba5d57320e250535dd984fe.jpg \n", + " inflating: /data/dog-breed-identification/train/d694d01ba01c34fcf8b27aa93d0a2708.jpg \n", + " inflating: /data/dog-breed-identification/train/d6978271a12a1019bcb9fb57350b015e.jpg \n", + " inflating: /data/dog-breed-identification/train/d6a17a79cc8407190ba724064186bb45.jpg \n", + " inflating: /data/dog-breed-identification/train/d6a2e5bacf224fcbb6fafff451bd1bd2.jpg \n", + " inflating: /data/dog-breed-identification/train/d6ab190c782baa4655046bf1c8542973.jpg \n", + " inflating: /data/dog-breed-identification/train/d6b7723ebfd80e90faf4890affb89252.jpg \n", + " inflating: /data/dog-breed-identification/train/d6bcaac6c121bd89f127f6b839959510.jpg \n", + " inflating: /data/dog-breed-identification/train/d6caade773e0ae660917883ac8520fa4.jpg \n", + " inflating: /data/dog-breed-identification/train/d6d26cf4c152fefe3d4515fdb814cb40.jpg \n", + " inflating: /data/dog-breed-identification/train/d6d7678b8bddeb7a37789b17535d0b57.jpg \n", + " inflating: /data/dog-breed-identification/train/d6f09113efee7ca0f4903b4fdf42a02b.jpg \n", + " inflating: /data/dog-breed-identification/train/d6f37091a690b403411cc4066a4a83e3.jpg \n", + " inflating: /data/dog-breed-identification/train/d6fb643ed203d7d01db9a4744526f2ea.jpg \n", + " inflating: /data/dog-breed-identification/train/d6fbd70900969f939deaa4f101408858.jpg \n", + " inflating: /data/dog-breed-identification/train/d700f7428d738a40cff8266b4810e494.jpg \n", + " inflating: /data/dog-breed-identification/train/d7011c4c503f8ae150594ac9d3e1d757.jpg \n", + " inflating: /data/dog-breed-identification/train/d70233cd858bc0dd50ed351ae30a5432.jpg \n", + " inflating: /data/dog-breed-identification/train/d7113712b0ecaf9ba397d7bb06624397.jpg \n", + " inflating: /data/dog-breed-identification/train/d717947b987bf5d60e19ac963c548b74.jpg \n", + " inflating: /data/dog-breed-identification/train/d71bfc3b16d6e39fc726f47a93d4cebb.jpg \n", + " inflating: /data/dog-breed-identification/train/d71e26c0af558afe29efb7f41ec33900.jpg \n", + " inflating: /data/dog-breed-identification/train/d726e3be3e78245d2ea2cd5bc82d8ddc.jpg \n", + " inflating: /data/dog-breed-identification/train/d7323663e7f5bac3ca0e1f1fa2f6aaba.jpg \n", + " inflating: /data/dog-breed-identification/train/d73482c6d7e15da74be6477536e7619c.jpg \n", + " inflating: /data/dog-breed-identification/train/d739b294a3652ef517b86d9b79770910.jpg \n", + " inflating: /data/dog-breed-identification/train/d73ca370d7d89f706f09120845d7c283.jpg \n", + " inflating: /data/dog-breed-identification/train/d7413008ce8567c15e05cb6894ec49a2.jpg \n", + " inflating: /data/dog-breed-identification/train/d7442f05504d23c1707d059a9aedf36a.jpg \n", + " inflating: /data/dog-breed-identification/train/d744d79f41606dbad323b3880ded77cb.jpg \n", + " inflating: /data/dog-breed-identification/train/d7563303ddc600a8bb95b7065634d41f.jpg \n", + " inflating: /data/dog-breed-identification/train/d7613d9f96bd0d5e7e1fd38a3f614d06.jpg \n", + " inflating: /data/dog-breed-identification/train/d7621d1161f69c95b4c527618db4f234.jpg \n", + " inflating: /data/dog-breed-identification/train/d763d3a2442894c122da5598ed6b7b7a.jpg \n", + " inflating: /data/dog-breed-identification/train/d76542b37390263a18c2c328e9d35125.jpg \n", + " inflating: /data/dog-breed-identification/train/d7654b32fa71931ebffc7508f85d25f8.jpg \n", + " inflating: /data/dog-breed-identification/train/d7667fb74a7d171c5254c531ec53e7ae.jpg \n", + " inflating: /data/dog-breed-identification/train/d76883e92e76966fe382daa2ff57afd6.jpg \n", + " inflating: /data/dog-breed-identification/train/d76a1cf6c6577f55114001bc8fa1d028.jpg \n", + " inflating: /data/dog-breed-identification/train/d7750c85b3e5d0b3cb7adaffa5474a11.jpg \n", + " inflating: /data/dog-breed-identification/train/d77cbf53aedd8776340977e523f61080.jpg \n", + " inflating: /data/dog-breed-identification/train/d787b05937539cb43d0403e4a0bc3ccc.jpg \n", + " inflating: /data/dog-breed-identification/train/d78b236c7c0016b7e6327e77f3728a42.jpg \n", + " inflating: /data/dog-breed-identification/train/d793a68896bf50ec6b19adbf2770b450.jpg \n", + " inflating: /data/dog-breed-identification/train/d7a98b492b4567df962762fb7ff3cdee.jpg \n", + " inflating: /data/dog-breed-identification/train/d7aaddb8f9b021acc82989d6f4144710.jpg \n", + " inflating: /data/dog-breed-identification/train/d7b8e8b5851e49502dffc5f9654743be.jpg \n", + " inflating: /data/dog-breed-identification/train/d7c47fe9ebf2860b0483d76818c1d000.jpg \n", + " inflating: /data/dog-breed-identification/train/d7c69184308bb29224ef9499292abb09.jpg \n", + " inflating: /data/dog-breed-identification/train/d7c8a92ed39f40c397ec8fa7ddb3fbe3.jpg \n", + " inflating: /data/dog-breed-identification/train/d7cee5c0a1a30d94c7bb60b47c699d90.jpg \n", + " inflating: /data/dog-breed-identification/train/d7db04ffefefce6e6a1ddedbd350a38f.jpg \n", + " inflating: /data/dog-breed-identification/train/d7f3da9914b26cccb9f9e86a977545f5.jpg \n", + " inflating: /data/dog-breed-identification/train/d804a278c01fff883a53104d1aabe307.jpg \n", + " inflating: /data/dog-breed-identification/train/d820e2f5f916d74a31c1689e00e5311d.jpg \n", + " inflating: /data/dog-breed-identification/train/d8213d9a6c78738f4136606db0b88362.jpg \n", + " inflating: /data/dog-breed-identification/train/d82595dca7ce016827454328539a5bb7.jpg \n", + " inflating: /data/dog-breed-identification/train/d82604c1aa6728c546eaf1c3a85f980d.jpg \n", + " inflating: /data/dog-breed-identification/train/d8275152fe9027835bec0c405d131a2b.jpg \n", + " inflating: /data/dog-breed-identification/train/d82b5016c8001998b9b9a1cf22806350.jpg \n", + " inflating: /data/dog-breed-identification/train/d83b16a2a950d7a6b137c0a6fea98e27.jpg \n", + " inflating: /data/dog-breed-identification/train/d83c437073ab4eaa53b733a1c1615131.jpg \n", + " inflating: /data/dog-breed-identification/train/d8488f933f021216d7a39f6e8f1bc9c5.jpg \n", + " inflating: /data/dog-breed-identification/train/d84cd331a97cb5ee7d08974724f7055f.jpg \n", + " inflating: /data/dog-breed-identification/train/d85f51038f76b7ac987b34aeb89e99f3.jpg \n", + " inflating: /data/dog-breed-identification/train/d85f6489eea87d3a09bb93c2e55b661d.jpg \n", + " inflating: /data/dog-breed-identification/train/d8611e2c4409411ea1cea007e2c456ce.jpg \n", + " inflating: /data/dog-breed-identification/train/d8618518fa937be5f5754a6941398edf.jpg \n", + " inflating: /data/dog-breed-identification/train/d8620b5e2fe5d187ac1cec20f14e25ff.jpg \n", + " inflating: /data/dog-breed-identification/train/d8689e70969ba9533bcabee3cfbf697c.jpg \n", + " inflating: /data/dog-breed-identification/train/d875ddeeafd44e1404b4ce4f376a3922.jpg \n", + " inflating: /data/dog-breed-identification/train/d8785cb92c09d3810e4ea86b5386316e.jpg \n", + " inflating: /data/dog-breed-identification/train/d879e7d42ca6540a86ec8c5c42b8e167.jpg \n", + " inflating: /data/dog-breed-identification/train/d88d9d949d676cd61c9bd5fd7e50acf0.jpg \n", + " inflating: /data/dog-breed-identification/train/d88fe1dcc5296a6aae4528f6fa6ad52b.jpg \n", + " inflating: /data/dog-breed-identification/train/d891f61cd3b9c7e32a6a5ddbdc497744.jpg \n", + " inflating: /data/dog-breed-identification/train/d8951830ecd4b5898a6484327dc6f294.jpg \n", + " inflating: /data/dog-breed-identification/train/d8959181b38109cbbba0da2db235f47b.jpg \n", + " inflating: /data/dog-breed-identification/train/d89849a8b1519f2b26597dc4b405c4c3.jpg \n", + " inflating: /data/dog-breed-identification/train/d89a7d0ac27039b8dbadc1cafe221079.jpg \n", + " inflating: /data/dog-breed-identification/train/d8b283801fa52bc167acb47c554e18a5.jpg \n", + " inflating: /data/dog-breed-identification/train/d8b4d06e0021ce684c1682011cc2687d.jpg \n", + " inflating: /data/dog-breed-identification/train/d8b93a3e7f2635d7487dd2749c194e22.jpg \n", + " inflating: /data/dog-breed-identification/train/d8ba24f3c900cbc769a3b1771eff8251.jpg \n", + " inflating: /data/dog-breed-identification/train/d8c56d35f5c43e257ca91b64d3d86fb7.jpg \n", + " inflating: /data/dog-breed-identification/train/d8caec177609c9c618028a270d3fa85c.jpg \n", + " inflating: /data/dog-breed-identification/train/d8dcac39ea96e817cbaa716e31f58e26.jpg \n", + " inflating: /data/dog-breed-identification/train/d8e25785ba02df22a6235ed9fbafd6a9.jpg \n", + " inflating: /data/dog-breed-identification/train/d8e39c28a398072fb76193c402f20405.jpg \n", + " inflating: /data/dog-breed-identification/train/d8e4509a786dc5d844ebb20d45d66f05.jpg \n", + " inflating: /data/dog-breed-identification/train/d8e49c71d93569fffe00086e6998cce4.jpg \n", + " inflating: /data/dog-breed-identification/train/d8e9eadd8cdaa3a14df6c8351a09eb00.jpg \n", + " inflating: /data/dog-breed-identification/train/d8eee7c7a25bdb29844024d3451cd6ec.jpg \n", + " inflating: /data/dog-breed-identification/train/d8f48a0fd6c36ac06eb1fe4f1b8d5a83.jpg \n", + " inflating: /data/dog-breed-identification/train/d8fa88133b8415afdf9de6fe1e2fef94.jpg \n", + " inflating: /data/dog-breed-identification/train/d9028d963306ad99dc5f55b94bc47f5b.jpg \n", + " inflating: /data/dog-breed-identification/train/d9051c00e8c0436e6055f77d845b64a9.jpg \n", + " inflating: /data/dog-breed-identification/train/d906ce11e75fd245403be6c907bf6d3b.jpg \n", + " inflating: /data/dog-breed-identification/train/d91689717c8333406434d3511990958d.jpg \n", + " inflating: /data/dog-breed-identification/train/d91761b9302c37487710a4d8f890ba97.jpg \n", + " inflating: /data/dog-breed-identification/train/d9195974648b224678fb0ef0b86007d7.jpg \n", + " inflating: /data/dog-breed-identification/train/d91c7e2d76abccec81c24a556dd54435.jpg \n", + " inflating: /data/dog-breed-identification/train/d92166adb57cd563a2a9bc92be3ddec0.jpg \n", + " inflating: /data/dog-breed-identification/train/d9232340fa1f7da3da085dfbb9485dca.jpg \n", + " inflating: /data/dog-breed-identification/train/d934720e6736c27c3983574b5d70e4fe.jpg \n", + " inflating: /data/dog-breed-identification/train/d935132a02a3a6057dabe4ab571cc3f9.jpg \n", + " inflating: /data/dog-breed-identification/train/d93bed54dcc81a02081dc07acaddfe0f.jpg \n", + " inflating: /data/dog-breed-identification/train/d9483ad58c5e1ae2e73ac0de09aac90d.jpg \n", + " inflating: /data/dog-breed-identification/train/d94d0ab0f504e4ff8315d02259deade9.jpg \n", + " inflating: /data/dog-breed-identification/train/d94ed3afc66a657545fb5eadaa1e0e63.jpg \n", + " inflating: /data/dog-breed-identification/train/d953e41d5f3e8ff39a48a48db4d146a3.jpg \n", + " inflating: /data/dog-breed-identification/train/d958476b5a7e9699c55c0d3c9af0043c.jpg \n", + " inflating: /data/dog-breed-identification/train/d959917d3824b2d501d32b17055d88f8.jpg \n", + " inflating: /data/dog-breed-identification/train/d95e7320281728b297f6ef83284dfddc.jpg \n", + " inflating: /data/dog-breed-identification/train/d96559602badf0d764001d19336a27b0.jpg \n", + " inflating: /data/dog-breed-identification/train/d9686219c1124bc134f0117db1996a6d.jpg \n", + " inflating: /data/dog-breed-identification/train/d96c7a1efc7455d533bb2beb4da27896.jpg \n", + " inflating: /data/dog-breed-identification/train/d96f4ca79cee3713af5a08b5d1aeea17.jpg \n", + " inflating: /data/dog-breed-identification/train/d971e8cc02852ea0576cf9ecb295c782.jpg \n", + " inflating: /data/dog-breed-identification/train/d974d73918177dd97349f07bff519f3e.jpg \n", + " inflating: /data/dog-breed-identification/train/d979688962729fa2cf50af1c9b478dca.jpg \n", + " inflating: /data/dog-breed-identification/train/d97ed8623808e6d34bab76b758fa14f7.jpg \n", + " inflating: /data/dog-breed-identification/train/d99530d6a2f522178e3dd6acfcfe1e75.jpg \n", + " inflating: /data/dog-breed-identification/train/d99cc713b1bfe4b3786e462884c303aa.jpg \n", + " inflating: /data/dog-breed-identification/train/d99e075b2610dd70e27a95c5f16e3b8d.jpg \n", + " inflating: /data/dog-breed-identification/train/d9ab74f0d0afc57158e44c6956883a2d.jpg \n", + " inflating: /data/dog-breed-identification/train/d9ba83cfc833f82120d0ca495c3cd06d.jpg \n", + " inflating: /data/dog-breed-identification/train/d9bc4f2038e12bf6cba88291d59e7307.jpg \n", + " inflating: /data/dog-breed-identification/train/d9c2cee7a77e1415dc97042e2c5458de.jpg \n", + " inflating: /data/dog-breed-identification/train/d9c93635483f6537ab13d13ac5871f93.jpg \n", + " inflating: /data/dog-breed-identification/train/d9caf3d8e28c9ccbd2989483875c0ce4.jpg \n", + " inflating: /data/dog-breed-identification/train/d9dfa1ef019b07ccbf09deeed7093cd3.jpg \n", + " inflating: /data/dog-breed-identification/train/d9e02a65ba3b640f29e3e0bbf9a70ecc.jpg \n", + " inflating: /data/dog-breed-identification/train/d9e7afce80b4ae156de2cf76307872db.jpg \n", + " inflating: /data/dog-breed-identification/train/d9f0ac78472ce66dd48364521d396835.jpg \n", + " inflating: /data/dog-breed-identification/train/d9f6be0ded9152906b4b5dfdaf588941.jpg \n", + " inflating: /data/dog-breed-identification/train/d9f80faa882c53ef381fbbb1ebbf5a3a.jpg \n", + " inflating: /data/dog-breed-identification/train/d9fec2e53835b302c7c5d1d6803eb4f2.jpg \n", + " inflating: /data/dog-breed-identification/train/da06e7e70ec6a779f1faff2e40bae448.jpg \n", + " inflating: /data/dog-breed-identification/train/da1f66793891e95d2790e4b73170f71d.jpg \n", + " inflating: /data/dog-breed-identification/train/da2967ce6869976d50ff6dfcb70e1381.jpg \n", + " inflating: /data/dog-breed-identification/train/da37b8c9ea48f3db36bd638b290980ed.jpg \n", + " inflating: /data/dog-breed-identification/train/da396ce6d0e1c98f796c8ef8f3c7fcd4.jpg \n", + " inflating: /data/dog-breed-identification/train/da39ac1c77a8bc5ee24f592495e9f885.jpg \n", + " inflating: /data/dog-breed-identification/train/da438857d2816db182869858f4f3efbc.jpg \n", + " inflating: /data/dog-breed-identification/train/da45ea76560481c8c47b6c8f80507eee.jpg \n", + " inflating: /data/dog-breed-identification/train/da51d38f2103b60200efa73c6ec5bdac.jpg \n", + " inflating: /data/dog-breed-identification/train/da5f71e1cf0d1750a0c3929310083a30.jpg \n", + " inflating: /data/dog-breed-identification/train/da63e1fafa687aec77bc81463cfe9a55.jpg \n", + " inflating: /data/dog-breed-identification/train/da6914ff0697969a97c54571be43d561.jpg \n", + " inflating: /data/dog-breed-identification/train/da78fe89c55869ac6d1e1bbbd62c72c6.jpg \n", + " inflating: /data/dog-breed-identification/train/da7f53bbd8a025e32269552e648afe43.jpg \n", + " inflating: /data/dog-breed-identification/train/da82351431ac8992359ffca87bca053d.jpg \n", + " inflating: /data/dog-breed-identification/train/da879ae0affd9dc7ffa761df0a16a2b6.jpg \n", + " inflating: /data/dog-breed-identification/train/daa59e97cb75d22b9691d48bdee59f53.jpg \n", + " inflating: /data/dog-breed-identification/train/dab2023fc149d0c42f905ffaa78b2fa6.jpg \n", + " inflating: /data/dog-breed-identification/train/dab33799bceb2387a3daea652bfd8773.jpg \n", + " inflating: /data/dog-breed-identification/train/dab5753cbc9cc7452e1d69077193e65b.jpg \n", + " inflating: /data/dog-breed-identification/train/dabb3c1034978cadfabf60eadcf08818.jpg \n", + " inflating: /data/dog-breed-identification/train/dabc6c904d23d787b5ef8667a68ef200.jpg \n", + " inflating: /data/dog-breed-identification/train/dabd93944d34d72b6719c3e99ae2694a.jpg \n", + " inflating: /data/dog-breed-identification/train/dabf6f388c11227eb5d64b984e6d14b8.jpg \n", + " inflating: /data/dog-breed-identification/train/dac080b6ac2c9d3013b3762e45296955.jpg \n", + " inflating: /data/dog-breed-identification/train/dac0943bf45a8f3f1a8f374cf72dc50e.jpg \n", + " inflating: /data/dog-breed-identification/train/dac29948d89054e2ede1b957b53d3ec6.jpg \n", + " inflating: /data/dog-breed-identification/train/dac34d5005d8ca61121ee074936ecc65.jpg \n", + " inflating: /data/dog-breed-identification/train/dac79f436cc24744d951ab759c35a59f.jpg \n", + " inflating: /data/dog-breed-identification/train/daccbd24a803594b1860570d839ae3bf.jpg \n", + " inflating: /data/dog-breed-identification/train/dad1410de6bd70cba79abb5dbfda17af.jpg \n", + " inflating: /data/dog-breed-identification/train/dad425aa6e2d08d0743e6ffc4bfc407b.jpg \n", + " inflating: /data/dog-breed-identification/train/dad5389cf2e65a822da16313144d78dd.jpg \n", + " inflating: /data/dog-breed-identification/train/dad80a5e51ac5266dda64b922118e6b4.jpg \n", + " inflating: /data/dog-breed-identification/train/daddd59cf41b76381b8433d6d0f5659b.jpg \n", + " inflating: /data/dog-breed-identification/train/dade054c9c79ab86ba523927c89d9858.jpg \n", + " inflating: /data/dog-breed-identification/train/dae1b84026d288c5299d28297814045f.jpg \n", + " inflating: /data/dog-breed-identification/train/dae3c0bf3f109087f31b89a7e5328e56.jpg \n", + " inflating: /data/dog-breed-identification/train/daef9ef628860dee42415373fa2357a3.jpg \n", + " inflating: /data/dog-breed-identification/train/daf1dde58cb4cb4d55bf4e4874e76153.jpg \n", + " inflating: /data/dog-breed-identification/train/daf211eab9f3b51eadc03817f41ae9ec.jpg \n", + " inflating: /data/dog-breed-identification/train/daf40879d5b5ac9c66bc6cde2983ab43.jpg \n", + " inflating: /data/dog-breed-identification/train/daf558fa86ca539f975a8af9e2f6ae11.jpg \n", + " inflating: /data/dog-breed-identification/train/daf771230632470aa69c9da7deb92a57.jpg \n", + " inflating: /data/dog-breed-identification/train/db0110977a7e472f26d3559c363cb91f.jpg \n", + " inflating: /data/dog-breed-identification/train/db05ee54cbc55c69f155cf90cab5cddf.jpg \n", + " inflating: /data/dog-breed-identification/train/db14ac3935698082d5e1424f1f52f0e4.jpg \n", + " inflating: /data/dog-breed-identification/train/db19763a49f08e0e887b874be8c88e99.jpg \n", + " inflating: /data/dog-breed-identification/train/db1ef03ff78b53a0aff5d2642bdbc885.jpg \n", + " inflating: /data/dog-breed-identification/train/db22ca12428ebe70eb4e176973ee3cbb.jpg \n", + " inflating: /data/dog-breed-identification/train/db24ecad88218324139f9e2f65cfbc85.jpg \n", + " inflating: /data/dog-breed-identification/train/db2834327d66fde21b236b5506c521d4.jpg \n", + " inflating: /data/dog-breed-identification/train/db2d64f588c6c0b2394dde9db440daae.jpg \n", + " inflating: /data/dog-breed-identification/train/db2ff2c534e4e60b659e2d579f93b327.jpg \n", + " inflating: /data/dog-breed-identification/train/db319f1771b92bfc20d4156fd05760a2.jpg \n", + " inflating: /data/dog-breed-identification/train/db357e6642c684e75ee8dbf7305c9f47.jpg \n", + " inflating: /data/dog-breed-identification/train/db3659a1c09ec64e6b39b53b9b5ac30f.jpg \n", + " inflating: /data/dog-breed-identification/train/db39d09f070ad4584c599dbf76909dcd.jpg \n", + " inflating: /data/dog-breed-identification/train/db3cb1060eb49e6fbdbe08082b8626d1.jpg \n", + " inflating: /data/dog-breed-identification/train/db3e4f1de38bee86c0c1aba6f847d6db.jpg \n", + " inflating: /data/dog-breed-identification/train/db431821761a83efec50967b2a04e289.jpg \n", + " inflating: /data/dog-breed-identification/train/db466cbb0c6f243fa38cbd7c1ff6c7b4.jpg \n", + " inflating: /data/dog-breed-identification/train/db4755219032571f2faed853d0dded0a.jpg \n", + " inflating: /data/dog-breed-identification/train/db4e4609382f8d359d30f758d9abc9df.jpg \n", + " inflating: /data/dog-breed-identification/train/db51937bad556c0de62dddc039b49dba.jpg \n", + " inflating: /data/dog-breed-identification/train/db57320475dd90d5e070882d0804a629.jpg \n", + " inflating: /data/dog-breed-identification/train/db5b24455f0bc4c78ecb69771eb3e7a5.jpg \n", + " inflating: /data/dog-breed-identification/train/db5cfed638eaaa1e627f620830129952.jpg \n", + " inflating: /data/dog-breed-identification/train/db5d30c5c1c47af92c4fdfe59282cfbb.jpg \n", + " inflating: /data/dog-breed-identification/train/db5dfb60b06747b115402c9562959e12.jpg \n", + " inflating: /data/dog-breed-identification/train/db62f4b3d619612dbbade4be83a24a14.jpg \n", + " inflating: /data/dog-breed-identification/train/db7aa8aa2ec0ffecda4127bad65ceac4.jpg \n", + " inflating: /data/dog-breed-identification/train/db7d4420a51a8928e82bcb1d9a6c4167.jpg \n", + " inflating: /data/dog-breed-identification/train/db80c33cdf2c6723380d77e0f7bc7abe.jpg \n", + " inflating: /data/dog-breed-identification/train/db84277a5adca6b4a349396f6070287f.jpg \n", + " inflating: /data/dog-breed-identification/train/db8cdf3de2c08f37e50c29b89a0bfa2d.jpg \n", + " inflating: /data/dog-breed-identification/train/db906850433ed033317baa34a0905fb4.jpg \n", + " inflating: /data/dog-breed-identification/train/db953c4fb2ef6767dd6c817574b4e447.jpg \n", + " inflating: /data/dog-breed-identification/train/db9f0d772b4b3b2452c7d6f92bb35f9c.jpg \n", + " inflating: /data/dog-breed-identification/train/dba4ae5fe8b6c1651111ce96c6fbacb4.jpg \n", + " inflating: /data/dog-breed-identification/train/dbb4eaefd7d8c11637d1ee5c2c819427.jpg \n", + " inflating: /data/dog-breed-identification/train/dbb75e40f88b9703dbfe196f6ec57b3b.jpg \n", + " inflating: /data/dog-breed-identification/train/dbbdbfbe8184ff95c6f9f6838b0aefb8.jpg \n", + " inflating: /data/dog-breed-identification/train/dbc7b27136e58946665a82f4ff6209ba.jpg \n", + " inflating: /data/dog-breed-identification/train/dbd006d2d983a1b52238dc293d5139c0.jpg \n", + " inflating: /data/dog-breed-identification/train/dbde111456535431c6caedfa5b8c8dea.jpg \n", + " inflating: /data/dog-breed-identification/train/dbe9a2868d227a12197fb6c44b93d85a.jpg \n", + " inflating: /data/dog-breed-identification/train/dbea4a468416494d088a8854218b099c.jpg \n", + " inflating: /data/dog-breed-identification/train/dbf58e6de2a26b9f6f26c1c8723af964.jpg \n", + " inflating: /data/dog-breed-identification/train/dbfff28982e6d9f71c4491d3f9246fac.jpg \n", + " inflating: /data/dog-breed-identification/train/dc068bd9527c4b3d7cf62fa73ae23bbe.jpg \n", + " inflating: /data/dog-breed-identification/train/dc07c140b26ba8f2c75770f00229270b.jpg \n", + " inflating: /data/dog-breed-identification/train/dc144b931fbfbd2cefa8718b9a2cd5f2.jpg \n", + " inflating: /data/dog-breed-identification/train/dc17313b78b555caab983c9270dd3ab9.jpg \n", + " inflating: /data/dog-breed-identification/train/dc17875272a541c7c8afa2a8451e801d.jpg \n", + " inflating: /data/dog-breed-identification/train/dc19a7f22b2f78763083633f21db6602.jpg \n", + " inflating: /data/dog-breed-identification/train/dc1c430fe26e231d4c8e13d2ee14c5ad.jpg \n", + " inflating: /data/dog-breed-identification/train/dc226c922a709fd866d25d7d93b859c9.jpg \n", + " inflating: /data/dog-breed-identification/train/dc28c131838c21c74b77e2c4f411fd4b.jpg \n", + " inflating: /data/dog-breed-identification/train/dc3d4d214533950f6465ec2483af39c0.jpg \n", + " inflating: /data/dog-breed-identification/train/dc44465b8caedd8d06f52a7482036a02.jpg \n", + " inflating: /data/dog-breed-identification/train/dc56a2b217661f2b3b521d8f58c1504b.jpg \n", + " inflating: /data/dog-breed-identification/train/dc58e98a94ff4d49ff7242b5b860f167.jpg \n", + " inflating: /data/dog-breed-identification/train/dc5ae45eebbefb3cb9dc42550487aaeb.jpg \n", + " inflating: /data/dog-breed-identification/train/dc5ce2648529adf29ed15b7180e0850f.jpg \n", + " inflating: /data/dog-breed-identification/train/dc73bc42f35f710db421ad16a066d846.jpg \n", + " inflating: /data/dog-breed-identification/train/dc893fffb650488d1d9afa5fa70a7b73.jpg \n", + " inflating: /data/dog-breed-identification/train/dc99929daa2c14970c44afaf16ff502f.jpg \n", + " inflating: /data/dog-breed-identification/train/dc99d12a7cabf5d737be5a74bf56c3a9.jpg \n", + " inflating: /data/dog-breed-identification/train/dc9e5324fa4d1b4cdeede9b031d3fdf1.jpg \n", + " inflating: /data/dog-breed-identification/train/dca86fc4e9461bb4fc110bf1e13274ed.jpg \n", + " inflating: /data/dog-breed-identification/train/dcaa788b4a781d17cfa724b27d7c51fd.jpg \n", + " inflating: /data/dog-breed-identification/train/dcb46e9b28b12225c4897b86bea16dbe.jpg \n", + " inflating: /data/dog-breed-identification/train/dcc19db6b4e117d5e4f7d1b00fbe3982.jpg \n", + " inflating: /data/dog-breed-identification/train/dcc865c3a438f9d0f8588dd7d1e0aa09.jpg \n", + " inflating: /data/dog-breed-identification/train/dccc38e7901939c3181a71da57105c46.jpg \n", + " inflating: /data/dog-breed-identification/train/dccd2dd384090bfdf9105015167aed23.jpg \n", + " inflating: /data/dog-breed-identification/train/dcd8ee5311136967d14a2b4592963da1.jpg \n", + " inflating: /data/dog-breed-identification/train/dcd960c964daef0971659471de7b3992.jpg \n", + " inflating: /data/dog-breed-identification/train/dcda81d6b22e37e4c0fc36a383a61e73.jpg \n", + " inflating: /data/dog-breed-identification/train/dcec7c0803824f23b3faa75257d0daa4.jpg \n", + " inflating: /data/dog-breed-identification/train/dcf58c0dac2664e94f476fdc56aa7ffc.jpg \n", + " inflating: /data/dog-breed-identification/train/dcf5b9d3ab80c6829b10d20b72b67746.jpg \n", + " inflating: /data/dog-breed-identification/train/dcf871b987529e8b80b6e058e6150691.jpg \n", + " inflating: /data/dog-breed-identification/train/dcf879aab841a13c41194ec90bc0fd7c.jpg \n", + " inflating: /data/dog-breed-identification/train/dcfa82b89aa735341c1d5dc52f331b89.jpg \n", + " inflating: /data/dog-breed-identification/train/dcfb91097cb730519db11709821e3afb.jpg \n", + " inflating: /data/dog-breed-identification/train/dcfe4f0f7d7c091e8488bff8662582be.jpg \n", + " inflating: /data/dog-breed-identification/train/dd0834325da4d5500f22614308e76d9f.jpg \n", + " inflating: /data/dog-breed-identification/train/dd10fca78ab9624a15f55293c36ad292.jpg \n", + " inflating: /data/dog-breed-identification/train/dd126e42b474c3831f8fda33052428c1.jpg \n", + " inflating: /data/dog-breed-identification/train/dd1306492b2087d880af68186b145604.jpg \n", + " inflating: /data/dog-breed-identification/train/dd19ba0779065dbab0c28842fdf5851f.jpg \n", + " inflating: /data/dog-breed-identification/train/dd1b3f07d67fc801884d25d085441f06.jpg \n", + " inflating: /data/dog-breed-identification/train/dd1d181a7224fa5a1a7c1fae05eec93d.jpg \n", + " inflating: /data/dog-breed-identification/train/dd1ec05369df96f7b53cf3a7db056e99.jpg \n", + " inflating: /data/dog-breed-identification/train/dd2026cafded2318431c3c78f5026fc9.jpg \n", + " inflating: /data/dog-breed-identification/train/dd273d04d5bd6da804f278fc767c6ca8.jpg \n", + " inflating: /data/dog-breed-identification/train/dd27b4e1da87a6db84d1aa08f908c95d.jpg \n", + " inflating: /data/dog-breed-identification/train/dd28cda8dfc9a97642d3333722a520af.jpg \n", + " inflating: /data/dog-breed-identification/train/dd3509cbfcfc094e5e4726b648a26048.jpg \n", + " inflating: /data/dog-breed-identification/train/dd4449d7083cbce6c5dbc4d66091652d.jpg \n", + " inflating: /data/dog-breed-identification/train/dd48373a8d9f30daf5f7b51a3beb1325.jpg \n", + " inflating: /data/dog-breed-identification/train/dd52583a6a9bfdcc5278c5d61a57b7e1.jpg \n", + " inflating: /data/dog-breed-identification/train/dd5a3c84fb3c388ecf0b4da61eaef352.jpg \n", + " inflating: /data/dog-breed-identification/train/dd6482115d974cab92c9428c9a6fe29f.jpg \n", + " inflating: /data/dog-breed-identification/train/dd664bf798e6b767704a9da2e0d26cbe.jpg \n", + " inflating: /data/dog-breed-identification/train/dd68215c1ac5b073fc42202f229bc283.jpg \n", + " inflating: /data/dog-breed-identification/train/dd694fe7f5b46a6dd7c545d31004e1f6.jpg \n", + " inflating: /data/dog-breed-identification/train/dd79ba1404da7aa15471494d11657b68.jpg \n", + " inflating: /data/dog-breed-identification/train/dd7bd28f48de7a9dd72dcc4fbfe50741.jpg \n", + " inflating: /data/dog-breed-identification/train/dd826e6990638acaff24142f0d4b0b4b.jpg \n", + " inflating: /data/dog-breed-identification/train/dd82e748363c70f532e630c5197dd789.jpg \n", + " inflating: /data/dog-breed-identification/train/dd8e6927bc7beb0b332ecd443c4f9a42.jpg \n", + " inflating: /data/dog-breed-identification/train/dd9aae7acb992593e07aa905ff4e5ca4.jpg \n", + " inflating: /data/dog-breed-identification/train/ddbc4983d977bc4e0f4e622da3dd747b.jpg \n", + " inflating: /data/dog-breed-identification/train/ddc8d051c9b476c1eef9d35566490e59.jpg \n", + " inflating: /data/dog-breed-identification/train/ddcaf8e4f88b5c86a622cd62cfadfd5f.jpg \n", + " inflating: /data/dog-breed-identification/train/ddcd72819025d48d4a205f0876e432a5.jpg \n", + " inflating: /data/dog-breed-identification/train/ddcf9049e60ad86952d8298ee6ab7f58.jpg \n", + " inflating: /data/dog-breed-identification/train/ddd8c3182bae599b884efb8cf4112f6c.jpg \n", + " inflating: /data/dog-breed-identification/train/ddde5939ee7175be941a1ae0b8fb404f.jpg \n", + " inflating: /data/dog-breed-identification/train/ddde7d44c5c478092358387baee20cfb.jpg \n", + " inflating: /data/dog-breed-identification/train/dddece0d4f1ff6795a125836e4a88e07.jpg \n", + " inflating: /data/dog-breed-identification/train/dde1f99c744c9c5f3bd471bd390314d6.jpg \n", + " inflating: /data/dog-breed-identification/train/ddf23184178185362a7f99031328aae8.jpg \n", + " inflating: /data/dog-breed-identification/train/ddf85ff807c96d56f25b420fcaa121fe.jpg \n", + " inflating: /data/dog-breed-identification/train/de01c11dfd88085d19b336a812a1795d.jpg \n", + " inflating: /data/dog-breed-identification/train/de0966a23cecbb9f481714b059c8bcd3.jpg \n", + " inflating: /data/dog-breed-identification/train/de14c220f31820f5c9e13ee876d95a8a.jpg \n", + " inflating: /data/dog-breed-identification/train/de1be063c475547284487f18f738dea9.jpg \n", + " inflating: /data/dog-breed-identification/train/de1da53a478bd391889a98ee9063c2d7.jpg \n", + " inflating: /data/dog-breed-identification/train/de1f5f90eed51d8229d197a91d35f4d4.jpg \n", + " inflating: /data/dog-breed-identification/train/de2b37e3a9b09388de6fac2872acfac4.jpg \n", + " inflating: /data/dog-breed-identification/train/de2c9754f0687fc8d0922b990d274777.jpg \n", + " inflating: /data/dog-breed-identification/train/de2db91c990838b3951a72ad21988e31.jpg \n", + " inflating: /data/dog-breed-identification/train/de33550ba1323a0376fc1292a1d25d71.jpg \n", + " inflating: /data/dog-breed-identification/train/de37d314715709b183d88ec82184330a.jpg \n", + " inflating: /data/dog-breed-identification/train/de3ee90f9042b3cdae9aa9f1867eb751.jpg \n", + " inflating: /data/dog-breed-identification/train/de41f775f1b52a040d0965e0923bc983.jpg \n", + " inflating: /data/dog-breed-identification/train/de477e14f0f55d3b3f591f137d13a698.jpg \n", + " inflating: /data/dog-breed-identification/train/de48dcd04693d44183fcd50cd329d2e5.jpg \n", + " inflating: /data/dog-breed-identification/train/de4cf4fcb0bed47267572b694a77060d.jpg \n", + " inflating: /data/dog-breed-identification/train/de51f05ddadfb52e97fbc8799891ea8c.jpg \n", + " inflating: /data/dog-breed-identification/train/de578be532caa0a0cfecd1a553b7e373.jpg \n", + " inflating: /data/dog-breed-identification/train/de5e511f50c8e04008da3de616f254fc.jpg \n", + " inflating: /data/dog-breed-identification/train/de8095b35aec0e2a0d07ed11feb11a28.jpg \n", + " inflating: /data/dog-breed-identification/train/de8c32b71a43eef41eb1af93c1baf150.jpg \n", + " inflating: /data/dog-breed-identification/train/de93a60e4b3dd5518460dafda4a1c8b5.jpg \n", + " inflating: /data/dog-breed-identification/train/de93e2daa7d4993c8e531eb71110ce46.jpg \n", + " inflating: /data/dog-breed-identification/train/de9543af5bb528ea9ef93832856b6d4f.jpg \n", + " inflating: /data/dog-breed-identification/train/de9c66567290fc6574e102d21dc3a957.jpg \n", + " inflating: /data/dog-breed-identification/train/deaba13cbf116d0dda2868a55c697d0b.jpg \n", + " inflating: /data/dog-breed-identification/train/deb5ab49dab71dea5d3a076f00eb4820.jpg \n", + " inflating: /data/dog-breed-identification/train/deb6be36f63461d4f85c2233f0f25c6a.jpg \n", + " inflating: /data/dog-breed-identification/train/deb88665ad11440ea55963b57e9ad79c.jpg \n", + " inflating: /data/dog-breed-identification/train/debd010c990b4217cf5a3d6235f123a7.jpg \n", + " inflating: /data/dog-breed-identification/train/dec0bb81afcfc4c35c7c6790975e5633.jpg \n", + " inflating: /data/dog-breed-identification/train/decaf524b73836c58c7eb371cce2b980.jpg \n", + " inflating: /data/dog-breed-identification/train/ded35f1917f66ed6cb3179e163406aec.jpg \n", + " inflating: /data/dog-breed-identification/train/dedbc436c840a97feb7b2f3fa1175fa5.jpg \n", + " inflating: /data/dog-breed-identification/train/dee5db7f0834ca1db80615a9d5fb6334.jpg \n", + " inflating: /data/dog-breed-identification/train/dee8384897c606a6301be26f1af90d67.jpg \n", + " inflating: /data/dog-breed-identification/train/dee927ec2ed2efb5ea8bda227d0be2b5.jpg \n", + " inflating: /data/dog-breed-identification/train/deec532e2bc9651c24928ead1d10b199.jpg \n", + " inflating: /data/dog-breed-identification/train/deee559315624c49a59358335660ce37.jpg \n", + " inflating: /data/dog-breed-identification/train/deeffdd5ee6aac9f7fa2f31a89e07498.jpg \n", + " inflating: /data/dog-breed-identification/train/def4b391140f0633501190070ffd81e5.jpg \n", + " inflating: /data/dog-breed-identification/train/def67b643b92ee3d6cdec1128df0a9b6.jpg \n", + " inflating: /data/dog-breed-identification/train/df08c075e337bb17c6dd66096f3dff4f.jpg \n", + " inflating: /data/dog-breed-identification/train/df0b926ab57cef7c005b47cd2dc3f44d.jpg \n", + " inflating: /data/dog-breed-identification/train/df12a66b9b154c8bbe922846944e2ef2.jpg \n", + " inflating: /data/dog-breed-identification/train/df1f2dd6e993e8d1bbc325768806be5a.jpg \n", + " inflating: /data/dog-breed-identification/train/df2b16112524fe0c873530ac2357368c.jpg \n", + " inflating: /data/dog-breed-identification/train/df3ba5eee0c008a3284158bb89350673.jpg \n", + " inflating: /data/dog-breed-identification/train/df3e21df71f9c3bd57b3583e34734cc7.jpg \n", + " inflating: /data/dog-breed-identification/train/df464ebe3f311744241cb3d9258a5000.jpg \n", + " inflating: /data/dog-breed-identification/train/df46a6469c374b869375ef18a34bfe35.jpg \n", + " inflating: /data/dog-breed-identification/train/df49b6b0d9d61736047884253bffbf46.jpg \n", + " inflating: /data/dog-breed-identification/train/df4d0576d927992df06c9bb08bb9bac7.jpg \n", + " inflating: /data/dog-breed-identification/train/df6731703d42db260e3b070fed033e8f.jpg \n", + " inflating: /data/dog-breed-identification/train/df775558457f8fc42f50a1b944d89328.jpg \n", + " inflating: /data/dog-breed-identification/train/df780bd081dc94caf4d1b08583fcfb99.jpg \n", + " inflating: /data/dog-breed-identification/train/df823d73ef4bdeda8b61e3435db0ee6b.jpg \n", + " inflating: /data/dog-breed-identification/train/df83ac04bc6a6891fe79d6e9b56c391b.jpg \n", + " inflating: /data/dog-breed-identification/train/df878b8c7162cab57b3f9e32c595b05f.jpg \n", + " inflating: /data/dog-breed-identification/train/df8895a945ab874f4a9cdd451aebc011.jpg \n", + " inflating: /data/dog-breed-identification/train/dfbeabe922f09fa99ce84c6a94210279.jpg \n", + " inflating: /data/dog-breed-identification/train/dfc12676704c8931010ff087e117d2f2.jpg \n", + " inflating: /data/dog-breed-identification/train/dfc362b94e5653a508ceaf63d6b1ffa5.jpg \n", + " inflating: /data/dog-breed-identification/train/dfc726ff7000561805073a11d0d64178.jpg \n", + " inflating: /data/dog-breed-identification/train/dfc97c3d7a59ad4d89e791e6ab14c49f.jpg \n", + " inflating: /data/dog-breed-identification/train/dfd42ce694fd4af24a71392b94176b56.jpg \n", + " inflating: /data/dog-breed-identification/train/dfdb153c5100737c782c5f9578717594.jpg \n", + " inflating: /data/dog-breed-identification/train/dfdd8b613bbe040dcd583ff89d71aeda.jpg \n", + " inflating: /data/dog-breed-identification/train/dfe45c3b288b8224eb17dbdaf1706496.jpg \n", + " inflating: /data/dog-breed-identification/train/dfed11d5d68bd13d8587bd3047ef8e5c.jpg \n", + " inflating: /data/dog-breed-identification/train/dff884afad091397ca26465ffaa75084.jpg \n", + " inflating: /data/dog-breed-identification/train/e0010a83e9302a3abef2ca9b57f9c38b.jpg \n", + " inflating: /data/dog-breed-identification/train/e00130d97b9b4073e79e0aed53fedc3c.jpg \n", + " inflating: /data/dog-breed-identification/train/e001b6f5092b2b90501111b23ab30d12.jpg \n", + " inflating: /data/dog-breed-identification/train/e0032d2cfc32a75f448d9f931f6bca19.jpg \n", + " inflating: /data/dog-breed-identification/train/e00443152fb5951922730b21ba08f8f5.jpg \n", + " inflating: /data/dog-breed-identification/train/e03cdc6533bf441f8bdb7467b1039996.jpg \n", + " inflating: /data/dog-breed-identification/train/e03f2a3e636ea5900f5c57ca7c7af68c.jpg \n", + " inflating: /data/dog-breed-identification/train/e0417404576aa3117ab1a0c646743ed9.jpg \n", + " inflating: /data/dog-breed-identification/train/e05db2e9a15a9af373794a95efee44df.jpg \n", + " inflating: /data/dog-breed-identification/train/e05e54a99b170e04cd266a8e90aedb1e.jpg \n", + " inflating: /data/dog-breed-identification/train/e0629e36f6f2f4708db4b4d68bf1a75e.jpg \n", + " inflating: /data/dog-breed-identification/train/e062d6b642e375e2b3d15057f441163a.jpg \n", + " inflating: /data/dog-breed-identification/train/e06564ae7d0a60d41052c61076014a95.jpg \n", + " inflating: /data/dog-breed-identification/train/e06773bd0666b3b9597b93a54e302c55.jpg \n", + " inflating: /data/dog-breed-identification/train/e07277bc6a6a0da06598b3ad43a011b2.jpg \n", + " inflating: /data/dog-breed-identification/train/e073b458a33cbfe9b733764a1246f52e.jpg \n", + " inflating: /data/dog-breed-identification/train/e0764f7b42ad2336c96d414b419af335.jpg \n", + " inflating: /data/dog-breed-identification/train/e07e46622e95d95ed698113415aaeda6.jpg \n", + " inflating: /data/dog-breed-identification/train/e08af1f586d3150e4cecb3c42668b0e0.jpg \n", + " inflating: /data/dog-breed-identification/train/e08c321bfa4141ecfc3dc6c5488ec770.jpg \n", + " inflating: /data/dog-breed-identification/train/e09cf0d6051273932911650565c30e0f.jpg \n", + " inflating: /data/dog-breed-identification/train/e0a59c9a8625f5ab3015c4ee460e5f1f.jpg \n", + " inflating: /data/dog-breed-identification/train/e0acf08c678ede145c9824dbb7c3718e.jpg \n", + " inflating: /data/dog-breed-identification/train/e0af99a3fd26fa2f9c601d782c5e7b6c.jpg \n", + " inflating: /data/dog-breed-identification/train/e0afc52cda56b96803e099a0b69fb173.jpg \n", + " inflating: /data/dog-breed-identification/train/e0ba27b6d9250ab716da71da96b3bca0.jpg \n", + " inflating: /data/dog-breed-identification/train/e0c2b69fc53eeb74dfbf31a1ce54b11a.jpg \n", + " inflating: /data/dog-breed-identification/train/e0c759008208229026ae7aeb04da7a75.jpg \n", + " inflating: /data/dog-breed-identification/train/e0c78ec8516f29e1b0a94ba500bcefbc.jpg \n", + " inflating: /data/dog-breed-identification/train/e0cfa8c273838e25d74970683b285391.jpg \n", + " inflating: /data/dog-breed-identification/train/e0d0ab2c3c2a2c157c34889ca09e2dd7.jpg \n", + " inflating: /data/dog-breed-identification/train/e0e54245b5dc9d5f66cb1012bae1a10c.jpg \n", + " inflating: /data/dog-breed-identification/train/e0ed88eecdd9e2bbe6317ce9f5eb0847.jpg \n", + " inflating: /data/dog-breed-identification/train/e0f1273a2ed4e1790bd92880d0651893.jpg \n", + " inflating: /data/dog-breed-identification/train/e0f559dd162599f6bd888d53626d3c6d.jpg \n", + " inflating: /data/dog-breed-identification/train/e0fd9680fc1183c6c2de18e711192fe5.jpg \n", + " inflating: /data/dog-breed-identification/train/e0fe902cde4c16de2dd1bd8c5b6dc08e.jpg \n", + " inflating: /data/dog-breed-identification/train/e1064093f30092d63747f9674d6b00e7.jpg \n", + " inflating: /data/dog-breed-identification/train/e1109172b47d152684b6e54b97ae6758.jpg \n", + " inflating: /data/dog-breed-identification/train/e121342b5b88fd01c5c0a6b9406bccf2.jpg \n", + " inflating: /data/dog-breed-identification/train/e125be9ab7aa556c82f6b33e316fb523.jpg \n", + " inflating: /data/dog-breed-identification/train/e12986379661597e7c287008c89f4ea6.jpg \n", + " inflating: /data/dog-breed-identification/train/e12b685cebb695b56fd1168dd4386582.jpg \n", + " inflating: /data/dog-breed-identification/train/e12c6bc13e4ab1de4dd12d21ce74eefe.jpg \n", + " inflating: /data/dog-breed-identification/train/e139e3a836bd9c57af5355847b8eb805.jpg \n", + " inflating: /data/dog-breed-identification/train/e13bcf54507eaba884de1faea9af4dd9.jpg \n", + " inflating: /data/dog-breed-identification/train/e13d468a13740374b13ad373d508c58c.jpg \n", + " inflating: /data/dog-breed-identification/train/e1466949f06f43556943189409054b1c.jpg \n", + " inflating: /data/dog-breed-identification/train/e146cabda18bbb2cb402dece1a7dd4ef.jpg \n", + " inflating: /data/dog-breed-identification/train/e146cd8f43b445baab08b03548c425c4.jpg \n", + " inflating: /data/dog-breed-identification/train/e153e4725d7323d1833817d7cc1c9b01.jpg \n", + " inflating: /data/dog-breed-identification/train/e15b8ff62b03b844e352014b16881251.jpg \n", + " inflating: /data/dog-breed-identification/train/e15dfa0f3c2c47fec01f048312ca8bb7.jpg \n", + " inflating: /data/dog-breed-identification/train/e167b2451fa1a46c231f4a628800b5db.jpg \n", + " inflating: /data/dog-breed-identification/train/e17462ffe48dffec8ff70325a6047957.jpg \n", + " inflating: /data/dog-breed-identification/train/e17f3e7d3d1468c34643e60cb622e935.jpg \n", + " inflating: /data/dog-breed-identification/train/e189ef4583d4b2187abaa63f9a4fbd81.jpg \n", + " inflating: /data/dog-breed-identification/train/e19ef13146562537f75ddf4c447d9697.jpg \n", + " inflating: /data/dog-breed-identification/train/e1a7959ca4b8da181cf109794cf2ce19.jpg \n", + " inflating: /data/dog-breed-identification/train/e1af6b48fbc089b3704ee26ade7b4f6c.jpg \n", + " inflating: /data/dog-breed-identification/train/e1b04d497388246d0305c01f8d01fa1a.jpg \n", + " inflating: /data/dog-breed-identification/train/e1b7f7010a5f6162a99fffc720f3259b.jpg \n", + " inflating: /data/dog-breed-identification/train/e1be68b2576ddfceabb23d33ee9e07ef.jpg \n", + " inflating: /data/dog-breed-identification/train/e1c77280c29861d14cd2c891ff8a362a.jpg \n", + " inflating: /data/dog-breed-identification/train/e1cacd82bce4a66313debf5e883fa806.jpg \n", + " inflating: /data/dog-breed-identification/train/e1d0619c08c523d87097109784a1d0dd.jpg \n", + " inflating: /data/dog-breed-identification/train/e1d53fa88060cde37192028ee3ca4629.jpg \n", + " inflating: /data/dog-breed-identification/train/e1e156d500e30cf73f53970a09af93c9.jpg \n", + " inflating: /data/dog-breed-identification/train/e1e8cefa88b84062d11722537ec61214.jpg \n", + " inflating: /data/dog-breed-identification/train/e1efa8c07decd2fe9c807cbfe157b8bf.jpg \n", + " inflating: /data/dog-breed-identification/train/e1f5774f729e887a7314dfaa8fcb0b8a.jpg \n", + " inflating: /data/dog-breed-identification/train/e1fa947ecb1ca186470952a971376bc6.jpg \n", + " inflating: /data/dog-breed-identification/train/e1fe81af46e109cc63e17616dc630fb7.jpg \n", + " inflating: /data/dog-breed-identification/train/e201fdbcd92e3d0a10d612d5e0b77b35.jpg \n", + " inflating: /data/dog-breed-identification/train/e208bfa575753207f240ed5619f93c41.jpg \n", + " inflating: /data/dog-breed-identification/train/e20e32bf114141e20a1af854ca4d0ecc.jpg \n", + " inflating: /data/dog-breed-identification/train/e212cce60c302d24e8aaf0fb87ee0b2c.jpg \n", + " inflating: /data/dog-breed-identification/train/e2164f0cb48f319ce4e7ca70d82d8754.jpg \n", + " inflating: /data/dog-breed-identification/train/e21bbd2082fe9e256d856c9482aaac8c.jpg \n", + " inflating: /data/dog-breed-identification/train/e220d27b48b78c1b4445273ecd03c0cc.jpg \n", + " inflating: /data/dog-breed-identification/train/e224c29118a187dccba77b4aaa355767.jpg \n", + " inflating: /data/dog-breed-identification/train/e229e4628facf3fb9cd030e0ac484722.jpg \n", + " inflating: /data/dog-breed-identification/train/e236f0b66a5a700022dbed8458e81d26.jpg \n", + " inflating: /data/dog-breed-identification/train/e2379f60b78b0939a84ebd641ff3bb4a.jpg \n", + " inflating: /data/dog-breed-identification/train/e237f2c2da59abcb4d622106db8eef11.jpg \n", + " inflating: /data/dog-breed-identification/train/e23950a981b81588f00133a2c0c5cfb7.jpg \n", + " inflating: /data/dog-breed-identification/train/e2399ad3385db144d54b63986e8247f1.jpg \n", + " inflating: /data/dog-breed-identification/train/e24af0affe6c7a51b3e8ed9c30b090b7.jpg \n", + " inflating: /data/dog-breed-identification/train/e251a767dc8d07dcb4d33cbccb25cb90.jpg \n", + " inflating: /data/dog-breed-identification/train/e25accfed48409094b7c56c6a92395ad.jpg \n", + " inflating: /data/dog-breed-identification/train/e25fb7b550a55bfb47e3805fc00292bb.jpg \n", + " inflating: /data/dog-breed-identification/train/e267360efadbd9da278fde791fc03f20.jpg \n", + " inflating: /data/dog-breed-identification/train/e26c0ce9c297f9abc3b16c37cd48a133.jpg \n", + " inflating: /data/dog-breed-identification/train/e26c1f3142aca0c02cac1d766e87db1d.jpg \n", + " inflating: /data/dog-breed-identification/train/e270622b5ffec8294d7e7628c4ff6c1e.jpg \n", + " inflating: /data/dog-breed-identification/train/e278e285726d3ec4b737d80cc891d266.jpg \n", + " inflating: /data/dog-breed-identification/train/e288a162bfb38659a7ca6809546b6dff.jpg \n", + " inflating: /data/dog-breed-identification/train/e29768ede01098a68e5021b453a94ee8.jpg \n", + " inflating: /data/dog-breed-identification/train/e297c765ac7e6c1d7c4657c7e72b1544.jpg \n", + " inflating: /data/dog-breed-identification/train/e2ac9b607458c25b1422271887f27e35.jpg \n", + " inflating: /data/dog-breed-identification/train/e2b2636c291ebf5649ba403b9592f51e.jpg \n", + " inflating: /data/dog-breed-identification/train/e2b39eb9cb8b34303cc3527f8f8df9d6.jpg \n", + " inflating: /data/dog-breed-identification/train/e2bba60478d1600a8b85b768196e7965.jpg \n", + " inflating: /data/dog-breed-identification/train/e2bbe6eedea8520efe8c709d091be39a.jpg \n", + " inflating: /data/dog-breed-identification/train/e2d770fe32772edb9dba937c009f3e4d.jpg \n", + " inflating: /data/dog-breed-identification/train/e2dd7700aa63c7982130ec9087086bbb.jpg \n", + " inflating: /data/dog-breed-identification/train/e2e10aeca2bcd6e9d9d91cd64efc13a8.jpg \n", + " inflating: /data/dog-breed-identification/train/e2e46a91de179a79ab3b0ec7f813df27.jpg \n", + " inflating: /data/dog-breed-identification/train/e308be2d0ae640f51c08166b2e3ce396.jpg \n", + " inflating: /data/dog-breed-identification/train/e30db8f52d5fe88e128697d8fbd163e9.jpg \n", + " inflating: /data/dog-breed-identification/train/e3104080c13200a90467f712dd2abd08.jpg \n", + " inflating: /data/dog-breed-identification/train/e316925eb1cf7cdeb1ffaab7424e231d.jpg \n", + " inflating: /data/dog-breed-identification/train/e31fe999723e849139c1931ddd9f82b0.jpg \n", + " inflating: /data/dog-breed-identification/train/e32461da9c81edd9bd5a6cc2d7183129.jpg \n", + " inflating: /data/dog-breed-identification/train/e33873fb8e4f52734ab6b584dd6ede0e.jpg \n", + " inflating: /data/dog-breed-identification/train/e33ebba7c3621778acdd362e12d35660.jpg \n", + " inflating: /data/dog-breed-identification/train/e342333b4dd5d53c4720bb69b72cfe4e.jpg \n", + " inflating: /data/dog-breed-identification/train/e343d8cb8acee3f75ad304a68ec68d6a.jpg \n", + " inflating: /data/dog-breed-identification/train/e346c77b51b03999cbf7d89ae776e696.jpg \n", + " inflating: /data/dog-breed-identification/train/e3485777ce8e1a08de482429cd38a420.jpg \n", + " inflating: /data/dog-breed-identification/train/e34ee9dce9546a1b520c1c8a3badb605.jpg \n", + " inflating: /data/dog-breed-identification/train/e35eb70a03e9f7f190a45a820295a671.jpg \n", + " inflating: /data/dog-breed-identification/train/e3674d52b8788561fc9e21430c9bcd67.jpg \n", + " inflating: /data/dog-breed-identification/train/e36b6d896092fa3f2b3298ce7adb2d97.jpg \n", + " inflating: /data/dog-breed-identification/train/e36c116cdff32db5d5b87b3c789ce46c.jpg \n", + " inflating: /data/dog-breed-identification/train/e36cf92886e752cdd8be2101b3090a2c.jpg \n", + " inflating: /data/dog-breed-identification/train/e36f90b1dd7921dceb536c79989fe69f.jpg \n", + " inflating: /data/dog-breed-identification/train/e3728ac64318af2803ea1774459b2df0.jpg \n", + " inflating: /data/dog-breed-identification/train/e37cf6ff2d694f48defa422d6a6a7ef8.jpg \n", + " inflating: /data/dog-breed-identification/train/e381b4aaa19c31ef6765ab0d0af205c9.jpg \n", + " inflating: /data/dog-breed-identification/train/e387ac130247a8a48e1a136d9a4e013c.jpg \n", + " inflating: /data/dog-breed-identification/train/e399ac36963facd47d52aa56df4e6e83.jpg \n", + " inflating: /data/dog-breed-identification/train/e39c995703aaed29660f904fa5bce520.jpg \n", + " inflating: /data/dog-breed-identification/train/e3a3ebddca40ff58d0cb9b8205c46066.jpg \n", + " inflating: /data/dog-breed-identification/train/e3a5ce3bc399b7b6c68a29a9cc5600d4.jpg \n", + " inflating: /data/dog-breed-identification/train/e3a6203b61fa9cd2ffad92ebe31334ca.jpg \n", + " inflating: /data/dog-breed-identification/train/e3a8e45ac0004fb87d2ac40bad70114e.jpg \n", + " inflating: /data/dog-breed-identification/train/e3aa1999ea72d13102a3810d312b49ba.jpg \n", + " inflating: /data/dog-breed-identification/train/e3b0ae3127ce33aecce9f512db0051e1.jpg \n", + " inflating: /data/dog-breed-identification/train/e3b794cfccf2ed02689f046aebe7f650.jpg \n", + " inflating: /data/dog-breed-identification/train/e3b8f3589a7286ebe017300bf780c279.jpg \n", + " inflating: /data/dog-breed-identification/train/e3cc185776379b55e23a9973bce785b7.jpg \n", + " inflating: /data/dog-breed-identification/train/e3d9ae0a1ff03eeb37b6ca4bce7bad97.jpg \n", + " inflating: /data/dog-breed-identification/train/e3dac9d6f0bf998d55b310d37f5d4a27.jpg \n", + " inflating: /data/dog-breed-identification/train/e3e2cb3f0677ec569e74474b7e6a236d.jpg \n", + " inflating: /data/dog-breed-identification/train/e3eb50726f2104cbf86db02d443e04be.jpg \n", + " inflating: /data/dog-breed-identification/train/e3ee6250e25e90f6501f854b27fce20c.jpg \n", + " inflating: /data/dog-breed-identification/train/e3f184140f68b3502dfb048607556438.jpg \n", + " inflating: /data/dog-breed-identification/train/e401d9046968cb01f5bdb97f92e61428.jpg \n", + " inflating: /data/dog-breed-identification/train/e4067d97184c1acb63af99e1e30945fe.jpg \n", + " inflating: /data/dog-breed-identification/train/e41d436e4f76b4c13a92abde03ae2721.jpg \n", + " inflating: /data/dog-breed-identification/train/e4245709e4060e08146b5fe1af72385d.jpg \n", + " inflating: /data/dog-breed-identification/train/e42d24760fcb9e6273a29e2f083ba9b6.jpg \n", + " inflating: /data/dog-breed-identification/train/e438c6b845a8498620dc14f2f25bf603.jpg \n", + " inflating: /data/dog-breed-identification/train/e44455c6c55cd0ca746c3d992e25e672.jpg \n", + " inflating: /data/dog-breed-identification/train/e450c72e96ce47b6c327c671ea8e85c0.jpg \n", + " inflating: /data/dog-breed-identification/train/e4524b9d4614cb5aa68c7dc3e26cb42c.jpg \n", + " inflating: /data/dog-breed-identification/train/e466e0a3b43403c7bffd943fffc92ced.jpg \n", + " inflating: /data/dog-breed-identification/train/e467353f2da5b49fc7a8e9835b22fe0e.jpg \n", + " inflating: /data/dog-breed-identification/train/e47a26a0b4510b25250f654577207ed3.jpg \n", + " inflating: /data/dog-breed-identification/train/e47cac448a3250262bee5660d393da9a.jpg \n", + " inflating: /data/dog-breed-identification/train/e483418ed3331b5bdbd39cf4539fbfb5.jpg \n", + " inflating: /data/dog-breed-identification/train/e485830e23ef663878896f45f85751f4.jpg \n", + " inflating: /data/dog-breed-identification/train/e4913c578cba8c59d842663fa734d337.jpg \n", + " inflating: /data/dog-breed-identification/train/e49b6c0fb5b8bb8b86fe2a1f48c65746.jpg \n", + " inflating: /data/dog-breed-identification/train/e49f8aaa63a2ad36d11ff50fd53e25cf.jpg \n", + " inflating: /data/dog-breed-identification/train/e4a159a9611f2025cecf53bf8e09699b.jpg \n", + " inflating: /data/dog-breed-identification/train/e4a2121c2de5de095855677d4e6eca71.jpg \n", + " inflating: /data/dog-breed-identification/train/e4a43542e9d62fff9a6a95e3ce39942d.jpg \n", + " inflating: /data/dog-breed-identification/train/e4a4cb1f4ab166d24cc5215abeb327b9.jpg \n", + " inflating: /data/dog-breed-identification/train/e4a669bdb348065f7760d43fa3efa731.jpg \n", + " inflating: /data/dog-breed-identification/train/e4b586f1a120bba42545d866d45d0602.jpg \n", + " inflating: /data/dog-breed-identification/train/e4b99d348bb5eec10e433c02173d057e.jpg \n", + " inflating: /data/dog-breed-identification/train/e4bb015a94aec342233058e7bf430704.jpg \n", + " inflating: /data/dog-breed-identification/train/e4c7a29165989410af12b0e3fc6bef0d.jpg \n", + " inflating: /data/dog-breed-identification/train/e4ca855e09eabf518ed39a0a545da981.jpg \n", + " inflating: /data/dog-breed-identification/train/e4cb3bc8fc0dd15216e26c00a7b17d81.jpg \n", + " inflating: /data/dog-breed-identification/train/e4d8b52bd1e9f425f227da0ab7c0d7bd.jpg \n", + " inflating: /data/dog-breed-identification/train/e4e3868ac4ef2dae776dcb129e152e5f.jpg \n", + " inflating: /data/dog-breed-identification/train/e4eea0877800039013d0b17a4663bd16.jpg \n", + " inflating: /data/dog-breed-identification/train/e4f17a9e5ee1ed5385744cd6e8916a4e.jpg \n", + " inflating: /data/dog-breed-identification/train/e4f4ecefab54cba3356b0c8bf7ce4fee.jpg \n", + " inflating: /data/dog-breed-identification/train/e4f5d391d0eab2c83493f2110a743da3.jpg \n", + " inflating: /data/dog-breed-identification/train/e4fe0acc8e0372aae5d0b0817e70bdef.jpg \n", + " inflating: /data/dog-breed-identification/train/e505e918813f8b9e076307e6b884064c.jpg \n", + " inflating: /data/dog-breed-identification/train/e5064451d5ce1b406452427679281bbc.jpg \n", + " inflating: /data/dog-breed-identification/train/e50655050c46e29c5d61527f391475b4.jpg \n", + " inflating: /data/dog-breed-identification/train/e507c47a34012fb435804f89797b6abb.jpg \n", + " inflating: /data/dog-breed-identification/train/e5128f2c854ffaa8e53b917edd0741e9.jpg \n", + " inflating: /data/dog-breed-identification/train/e51781dd757f5355fdf5f0b5872b6ae1.jpg \n", + " inflating: /data/dog-breed-identification/train/e517cb6c5a2942b6a1b545e84551372f.jpg \n", + " inflating: /data/dog-breed-identification/train/e520916265354a8ba4b27fb074795c6a.jpg \n", + " inflating: /data/dog-breed-identification/train/e5271262c8e4fd79dff994f9ed711dfc.jpg \n", + " inflating: /data/dog-breed-identification/train/e52be5cc43cf78690312e1098422ea7d.jpg \n", + " inflating: /data/dog-breed-identification/train/e53b517c2d19cc71b91546deb4013864.jpg \n", + " inflating: /data/dog-breed-identification/train/e53db2fd75c0dd5e75e4d6d2ccd80e6e.jpg \n", + " inflating: /data/dog-breed-identification/train/e5470144eceb4e454681874ffe3e7e78.jpg \n", + " inflating: /data/dog-breed-identification/train/e54ed50db3e1079db9284bd30494de76.jpg \n", + " inflating: /data/dog-breed-identification/train/e550eb60eb9f908f253a79bb75c3b82e.jpg \n", + " inflating: /data/dog-breed-identification/train/e552addd2461f8a62e9691fa26b21370.jpg \n", + " inflating: /data/dog-breed-identification/train/e553589d94e2d8e84f0e20c33a8bb17b.jpg \n", + " inflating: /data/dog-breed-identification/train/e562d56214836fc2621f01d53f25c44f.jpg \n", + " inflating: /data/dog-breed-identification/train/e5679014512835a825578b7b15676df2.jpg \n", + " inflating: /data/dog-breed-identification/train/e568fcc1564833b782490800a507099f.jpg \n", + " inflating: /data/dog-breed-identification/train/e56ce32aeccac2bfbbcd6eb80ab337da.jpg \n", + " inflating: /data/dog-breed-identification/train/e5757684859b44aa875efd4d67661399.jpg \n", + " inflating: /data/dog-breed-identification/train/e57c7aad6b6851575b78e7a692df7ca3.jpg \n", + " inflating: /data/dog-breed-identification/train/e57ca37ac086752c46e40a90139f9b26.jpg \n", + " inflating: /data/dog-breed-identification/train/e5831fcb399c887cb0ac1e3a3d229f7b.jpg \n", + " inflating: /data/dog-breed-identification/train/e5851276fd2652397d1b3ebfc345589a.jpg \n", + " inflating: /data/dog-breed-identification/train/e5861b5d5b08d0e03ca3e3785e44cc88.jpg \n", + " inflating: /data/dog-breed-identification/train/e58621674644d3729585675ecf09c091.jpg \n", + " inflating: /data/dog-breed-identification/train/e58763e5a8e37bcede344260eb20f26d.jpg \n", + " inflating: /data/dog-breed-identification/train/e593a51964b8dbf67e3a8724667c1102.jpg \n", + " inflating: /data/dog-breed-identification/train/e593c0438ce45ae8abc32a8ad154e776.jpg \n", + " inflating: /data/dog-breed-identification/train/e5955cf16bd1c54a1e80f5a031851482.jpg \n", + " inflating: /data/dog-breed-identification/train/e59ba8576716c823f7731385d802fd49.jpg \n", + " inflating: /data/dog-breed-identification/train/e59d5676c3b4910024b164265ace3217.jpg \n", + " inflating: /data/dog-breed-identification/train/e5afb48f945656686984a28974cfca73.jpg \n", + " inflating: /data/dog-breed-identification/train/e5be21baaf39b50fca94e56c3b29416d.jpg \n", + " inflating: /data/dog-breed-identification/train/e5c20fbe4702370b3dc2690bb72a8cd7.jpg \n", + " inflating: /data/dog-breed-identification/train/e5ceecb78014cf7eb9bed9a6ce23dbf0.jpg \n", + " inflating: /data/dog-breed-identification/train/e5d36dcf26d2f478531b1702508b13aa.jpg \n", + " inflating: /data/dog-breed-identification/train/e5d7f92c5a70ddd58cf5dd32d573bd2b.jpg \n", + " inflating: /data/dog-breed-identification/train/e5da6d29569d6d93ae3e0d04f35ab9b9.jpg \n", + " inflating: /data/dog-breed-identification/train/e5e49ed50cc1f2a58f1fca768a977391.jpg \n", + " inflating: /data/dog-breed-identification/train/e5ec89512b1aa4594fffa72867874a27.jpg \n", + " inflating: /data/dog-breed-identification/train/e5eed37393749b2aabc60aef5cb0cf27.jpg \n", + " inflating: /data/dog-breed-identification/train/e5f1325af86723ad733000ca2adbd226.jpg \n", + " inflating: /data/dog-breed-identification/train/e5f1d18d03878ce30dc24eb945652698.jpg \n", + " inflating: /data/dog-breed-identification/train/e5f8c5b22071d0a266d80e3f9edc1cd0.jpg \n", + " inflating: /data/dog-breed-identification/train/e5fb57e9d1239cd72c7855be2947b0c9.jpg \n", + " inflating: /data/dog-breed-identification/train/e5fe6ce882c440a45354722376fdb770.jpg \n", + " inflating: /data/dog-breed-identification/train/e60889840323eb4be2ff9498c79c1409.jpg \n", + " inflating: /data/dog-breed-identification/train/e614058019fbb49489ee4f9090cf25ef.jpg \n", + " inflating: /data/dog-breed-identification/train/e620a0807519a406a20dbe50ebeb1b1c.jpg \n", + " inflating: /data/dog-breed-identification/train/e62262c8f83d4803009619aeb1e8a45d.jpg \n", + " inflating: /data/dog-breed-identification/train/e623bb54da50bddad89dbe36582ef530.jpg \n", + " inflating: /data/dog-breed-identification/train/e62630fa77dea4ee5a100e23edc6d061.jpg \n", + " inflating: /data/dog-breed-identification/train/e62e94130670395491f57ca19cc540e3.jpg \n", + " inflating: /data/dog-breed-identification/train/e63a11b5cf02c211a6b92145fa3cca1a.jpg \n", + " inflating: /data/dog-breed-identification/train/e63afe537a7d02e36846ca681e89f198.jpg \n", + " inflating: /data/dog-breed-identification/train/e63ce7912e8a260513f3d4d9db3eaa62.jpg \n", + " inflating: /data/dog-breed-identification/train/e640b0064c429c2cbad0788219b57303.jpg \n", + " inflating: /data/dog-breed-identification/train/e644ef9e5c4240f7d1d87305fef7aff4.jpg \n", + " inflating: /data/dog-breed-identification/train/e6462e5ebede53be91c8ff4c5022c3f2.jpg \n", + " inflating: /data/dog-breed-identification/train/e64f734a28b021ded23cb8a25ebd5613.jpg \n", + " inflating: /data/dog-breed-identification/train/e65087fe4a5ef4495d30e454f281b06f.jpg \n", + " inflating: /data/dog-breed-identification/train/e655b500159688691e025a1177936df0.jpg \n", + " inflating: /data/dog-breed-identification/train/e6689ca3adab4609b12a24868b8d3bef.jpg \n", + " inflating: /data/dog-breed-identification/train/e66b165e767d45ac2ef046873a27ed29.jpg \n", + " inflating: /data/dog-breed-identification/train/e673bb6ca717b8dffe3b5ba35da07a83.jpg \n", + " inflating: /data/dog-breed-identification/train/e67eb5ac576a272a62033127bf4e6570.jpg \n", + " inflating: /data/dog-breed-identification/train/e67eb5ae1ff58374b0e07398d5a0da5a.jpg \n", + " inflating: /data/dog-breed-identification/train/e6817d4d0b53ec2dc5339166bcb167c0.jpg \n", + " inflating: /data/dog-breed-identification/train/e681969d4b5b730e9c93ce5e9d9ec9ff.jpg \n", + " inflating: /data/dog-breed-identification/train/e6898f5cc43bb4a30970f2d2210fdb9a.jpg \n", + " inflating: /data/dog-breed-identification/train/e68b9b319d716e7c3d927a823c99f6db.jpg \n", + " inflating: /data/dog-breed-identification/train/e6a382c2cca7aa606dda1d5f7109cddc.jpg \n", + " inflating: /data/dog-breed-identification/train/e6a3e08febb3d2d5803769ee624392f3.jpg \n", + " inflating: /data/dog-breed-identification/train/e6a62f69da66da8492120ac8107f68c1.jpg \n", + " inflating: /data/dog-breed-identification/train/e6b47a2f62d0956fae1f3970f310b64a.jpg \n", + " inflating: /data/dog-breed-identification/train/e6c13efafeb9ead1ba224c0fe9edf59f.jpg \n", + " inflating: /data/dog-breed-identification/train/e6cdf82e11fd0ee6f73c55c0b4944616.jpg \n", + " inflating: /data/dog-breed-identification/train/e6d9085fa22d76e3c089cfaea8f39479.jpg \n", + " inflating: /data/dog-breed-identification/train/e6e89b7a7579de5c5ced94253491953e.jpg \n", + " inflating: /data/dog-breed-identification/train/e6eb11571ef963fac4d2300df3122f85.jpg \n", + " inflating: /data/dog-breed-identification/train/e6f837210a5b4c4f171b21f3a43ae12b.jpg \n", + " inflating: /data/dog-breed-identification/train/e6ff9171c960a31363798343982338c4.jpg \n", + " inflating: /data/dog-breed-identification/train/e716ae8410966853094bd28b0829feff.jpg \n", + " inflating: /data/dog-breed-identification/train/e728ffc9ca204977f2cc3c502c68d823.jpg \n", + " inflating: /data/dog-breed-identification/train/e734678e0c18e103c41023e11b92e012.jpg \n", + " inflating: /data/dog-breed-identification/train/e73d792517b5e717709ed11782adddf8.jpg \n", + " inflating: /data/dog-breed-identification/train/e748f61d39da506904f531c7732df512.jpg \n", + " inflating: /data/dog-breed-identification/train/e74d4ff0bbbaed7ed27b0dc3a27aadb1.jpg \n", + " inflating: /data/dog-breed-identification/train/e74f33ae3d78d3d2f8c75b8fc847f5ad.jpg \n", + " inflating: /data/dog-breed-identification/train/e7518def797fd4193bebc97d52ecd66f.jpg \n", + " inflating: /data/dog-breed-identification/train/e75f2316c5dd47b87a22bbe95cf8b8da.jpg \n", + " inflating: /data/dog-breed-identification/train/e76687c549303fb57aae9866739f44b8.jpg \n", + " inflating: /data/dog-breed-identification/train/e7678fac0b8f1d97e742644ee75a8aa2.jpg \n", + " inflating: /data/dog-breed-identification/train/e76a6bb8fdb21c7fcbb4f9f06bbda574.jpg \n", + " inflating: /data/dog-breed-identification/train/e76ab0b6cd07a1583ef84a2d4dc98aca.jpg \n", + " inflating: /data/dog-breed-identification/train/e779a2c3d07afb0056a1e33298e9eecf.jpg \n", + " inflating: /data/dog-breed-identification/train/e77a27e8f24034facbfae4b2dffd476b.jpg \n", + " inflating: /data/dog-breed-identification/train/e78fc6d10f9d4646a50004e4ac74ea5c.jpg \n", + " inflating: /data/dog-breed-identification/train/e78ff6b0a2d3a58234760d45e984eccf.jpg \n", + " inflating: /data/dog-breed-identification/train/e79011daac807552f798aa1effb60ee4.jpg \n", + " inflating: /data/dog-breed-identification/train/e792459978c605f740a2b553ec1608f6.jpg \n", + " inflating: /data/dog-breed-identification/train/e79578de293a0b46e92448ce8f8bb69f.jpg \n", + " inflating: /data/dog-breed-identification/train/e795a13bbfa8a72daeb315646f83068a.jpg \n", + " inflating: /data/dog-breed-identification/train/e7997562415c62141d022cfa8cae1c60.jpg \n", + " inflating: /data/dog-breed-identification/train/e79a196a6e1772b9fa6b008b597d3a08.jpg \n", + " inflating: /data/dog-breed-identification/train/e79a5176fe0322a36d09f057d64aec54.jpg \n", + " inflating: /data/dog-breed-identification/train/e79c6a88cf8da397ee67235a04741f2d.jpg \n", + " inflating: /data/dog-breed-identification/train/e79cc376c53b8333546437ca6e5b8759.jpg \n", + " inflating: /data/dog-breed-identification/train/e79f822b26adace4455db1fcd94ab47e.jpg \n", + " inflating: /data/dog-breed-identification/train/e7a8a3681524405b2e987ffbaff6d173.jpg \n", + " inflating: /data/dog-breed-identification/train/e7ab11f849e895ced3cdddf4d90e5bd2.jpg \n", + " inflating: /data/dog-breed-identification/train/e7ab74acdc860dfb93271901da085ffd.jpg \n", + " inflating: /data/dog-breed-identification/train/e7af30693df78697b8541fb5e2e46941.jpg \n", + " inflating: /data/dog-breed-identification/train/e7af8f590b4fbdca0779f5e606ef91a1.jpg \n", + " inflating: /data/dog-breed-identification/train/e7bc17ef352ef576b73467ebe9a407f1.jpg \n", + " inflating: /data/dog-breed-identification/train/e7c5692d044250d303a839d989132bf1.jpg \n", + " inflating: /data/dog-breed-identification/train/e7f2f67cb616281a89fc77a6bdea6eb0.jpg \n", + " inflating: /data/dog-breed-identification/train/e7f3274adaad369068602a422e9df192.jpg \n", + " inflating: /data/dog-breed-identification/train/e7f491cb903ff0a294eb63c625ef090e.jpg \n", + " inflating: /data/dog-breed-identification/train/e7f4f3c25291884f2dfe7c60051d6949.jpg \n", + " inflating: /data/dog-breed-identification/train/e7f6ec634384c07f7a0f5659e4f77867.jpg \n", + " inflating: /data/dog-breed-identification/train/e7faa5748d8320f604aeb3fd5bbe4ab0.jpg \n", + " inflating: /data/dog-breed-identification/train/e7fda54b5e96c6048dad68414568f142.jpg \n", + " inflating: /data/dog-breed-identification/train/e8013a8357f9eec2369ee4e3fab710a1.jpg \n", + " inflating: /data/dog-breed-identification/train/e8057e2f1e3bca4e9f4a2af48a9a9c46.jpg \n", + " inflating: /data/dog-breed-identification/train/e80dbb78fb1d763d01d73b2f367c6805.jpg \n", + " inflating: /data/dog-breed-identification/train/e818d5a7782350e72171c3dfa05af02a.jpg \n", + " inflating: /data/dog-breed-identification/train/e81b2d682dd2a2f5900b07551297f0a5.jpg \n", + " inflating: /data/dog-breed-identification/train/e81c8db07ad9b5291435e3aa3ac728bf.jpg \n", + " inflating: /data/dog-breed-identification/train/e81e8496a9826236ee5dfb24960aec6d.jpg \n", + " inflating: /data/dog-breed-identification/train/e81f7ae7181b7efe18eb433b0079bdc0.jpg \n", + " inflating: /data/dog-breed-identification/train/e81ff379277f5023333efed3beda7f64.jpg \n", + " inflating: /data/dog-breed-identification/train/e827550ea6ab19bbd4916d9952ee8765.jpg \n", + " inflating: /data/dog-breed-identification/train/e827e6439b32ea68d51e894a7350c699.jpg \n", + " inflating: /data/dog-breed-identification/train/e82a61cd6a3285ab9bba11fc8d752fa9.jpg \n", + " inflating: /data/dog-breed-identification/train/e82fa6e85fd570b2f766635eed62fdf4.jpg \n", + " inflating: /data/dog-breed-identification/train/e8489d90981775781ccff2f27f7f413a.jpg \n", + " inflating: /data/dog-breed-identification/train/e85f6d7af1405365c8e576d0a0aa7a2b.jpg \n", + " inflating: /data/dog-breed-identification/train/e86503273b282ab18bd09c6b6ef63de7.jpg \n", + " inflating: /data/dog-breed-identification/train/e866558cded733e2788c68b170752f59.jpg \n", + " inflating: /data/dog-breed-identification/train/e86a88391d017cc3807cc9653a4a7290.jpg \n", + " inflating: /data/dog-breed-identification/train/e86acb593c6718489134a45e8b936b90.jpg \n", + " inflating: /data/dog-breed-identification/train/e86be20abe3ab746179728fa6f19bc59.jpg \n", + " inflating: /data/dog-breed-identification/train/e86f7777b271c54f7d0d6a228637c8d3.jpg \n", + " inflating: /data/dog-breed-identification/train/e87a86f29c8709e81bbea9c8139d739c.jpg \n", + " inflating: /data/dog-breed-identification/train/e87e709987ed707393533e9d2429f585.jpg \n", + " inflating: /data/dog-breed-identification/train/e88140754690d2544c2684475cd4c6f7.jpg \n", + " inflating: /data/dog-breed-identification/train/e8827c1b0698e56e60f735b142d69d18.jpg \n", + " inflating: /data/dog-breed-identification/train/e88f4a70305e582965569c4ca91c34b8.jpg \n", + " inflating: /data/dog-breed-identification/train/e8919d9aad4b786433cde19e387df19e.jpg \n", + " inflating: /data/dog-breed-identification/train/e89b8245a8e0dec55c3e6aca21e9b374.jpg \n", + " inflating: /data/dog-breed-identification/train/e89e6cbb5d616ef9904675b9e6db06b3.jpg \n", + " inflating: /data/dog-breed-identification/train/e89f2129dc5830c7ba5114c7c79ee459.jpg \n", + " inflating: /data/dog-breed-identification/train/e8a321d602628f7efd19a567974d8771.jpg \n", + " inflating: /data/dog-breed-identification/train/e8a902080c150632c8b45b8013d0e540.jpg \n", + " inflating: /data/dog-breed-identification/train/e8afeaf121012b3abbac531f126c198f.jpg \n", + " inflating: /data/dog-breed-identification/train/e8b700408541ef85238927b90b33df67.jpg \n", + " inflating: /data/dog-breed-identification/train/e8b73dd9f11945ca1de70c3174c724e6.jpg \n", + " inflating: /data/dog-breed-identification/train/e8baa7663b726cad8eefb319ca7d8195.jpg \n", + " inflating: /data/dog-breed-identification/train/e8c241889ceab17b1ee8ce6828477204.jpg \n", + " inflating: /data/dog-breed-identification/train/e8cd220184f16fde613b32d157699edf.jpg \n", + " inflating: /data/dog-breed-identification/train/e8d0712b9ca47f77b333858183a38ef7.jpg \n", + " inflating: /data/dog-breed-identification/train/e8d09f74216aeede7e77829e177de7c0.jpg \n", + " inflating: /data/dog-breed-identification/train/e8e113dfd19ec90776c74b9105f3c29a.jpg \n", + " inflating: /data/dog-breed-identification/train/e8e8ec39dce62227cdc94ce91d761363.jpg \n", + " inflating: /data/dog-breed-identification/train/e8e9ea1506e0ad9382665b2690ecb4e9.jpg \n", + " inflating: /data/dog-breed-identification/train/e8f36fbe544d5c3329bc9e51606abcef.jpg \n", + " inflating: /data/dog-breed-identification/train/e8f5dd1ad67209c064965691030a07e5.jpg \n", + " inflating: /data/dog-breed-identification/train/e904a004a53aab0ff5902239f538afe1.jpg \n", + " inflating: /data/dog-breed-identification/train/e90868c5948296e603f26b2d2696f9c7.jpg \n", + " inflating: /data/dog-breed-identification/train/e915b76d7c4c65ad14a3b808c5f6c9a4.jpg \n", + " inflating: /data/dog-breed-identification/train/e9179ddabcd26e81d3c3604a40e97ea3.jpg \n", + " inflating: /data/dog-breed-identification/train/e917bb5669e0c273eb16304af1f69e1d.jpg \n", + " inflating: /data/dog-breed-identification/train/e9192e92049f545405dfbb8a2a05a8c7.jpg \n", + " inflating: /data/dog-breed-identification/train/e91b2802897234af87d09702a6b23b16.jpg \n", + " inflating: /data/dog-breed-identification/train/e92c996c9454434af5a9e702076338dc.jpg \n", + " inflating: /data/dog-breed-identification/train/e93489f9d52d63c373264b27daabf420.jpg \n", + " inflating: /data/dog-breed-identification/train/e93aa36e6a6951470a703ffb9020e9c4.jpg \n", + " inflating: /data/dog-breed-identification/train/e94234a15721850c8aaf3a94d2363f4f.jpg \n", + " inflating: /data/dog-breed-identification/train/e949de04a40965ca11bf4bc387cbcaf7.jpg \n", + " inflating: /data/dog-breed-identification/train/e95bd9ab625ad1c16b513a13a27c447d.jpg \n", + " inflating: /data/dog-breed-identification/train/e95daa31113477bd93955ae4852dd46a.jpg \n", + " inflating: /data/dog-breed-identification/train/e960fc18473c0998c1ecbda887aa6bea.jpg \n", + " inflating: /data/dog-breed-identification/train/e96a5909f47824c1410fd00e3bc23f4d.jpg \n", + " inflating: /data/dog-breed-identification/train/e970fc0099be12ed7544a6eee8e3a9db.jpg \n", + " inflating: /data/dog-breed-identification/train/e9711a024b141baa00e640231bc461a9.jpg \n", + " inflating: /data/dog-breed-identification/train/e97344d98c999f4f9bfa5dc33ca09ecb.jpg \n", + " inflating: /data/dog-breed-identification/train/e97b337794ab4b95db5e2622156de547.jpg \n", + " inflating: /data/dog-breed-identification/train/e97e598576bbc9adf772f244870d4c7b.jpg \n", + " inflating: /data/dog-breed-identification/train/e98505488dfe161e0894e1bce7b5de04.jpg \n", + " inflating: /data/dog-breed-identification/train/e993788050a70ef538c396f582c3a04c.jpg \n", + " inflating: /data/dog-breed-identification/train/e994ab08b7d68114beb65a3784c08be6.jpg \n", + " inflating: /data/dog-breed-identification/train/e996402cf7fa66f23a8415c9007e06db.jpg \n", + " inflating: /data/dog-breed-identification/train/e996b051ab78dd47c2ca379c70b6f27b.jpg \n", + " inflating: /data/dog-breed-identification/train/e997eaa38f75d7660fd6c488e3c76d72.jpg \n", + " inflating: /data/dog-breed-identification/train/e99886bf590f8e0cb3394f91e8920ae2.jpg \n", + " inflating: /data/dog-breed-identification/train/e9a3813892432cb6d5c771156b4bb3c5.jpg \n", + " inflating: /data/dog-breed-identification/train/e9aaa8964f450f990a7a3c4228bb844d.jpg \n", + " inflating: /data/dog-breed-identification/train/e9ac785eb7c5e7c31dd54be18e40bff2.jpg \n", + " inflating: /data/dog-breed-identification/train/e9b8e25755fcc201168fdf2d299e5350.jpg \n", + " inflating: /data/dog-breed-identification/train/e9c694db39c5b8c904bf36c385fb171b.jpg \n", + " inflating: /data/dog-breed-identification/train/e9d31fe3a42451dc01c6d2e15ba6ef8d.jpg \n", + " inflating: /data/dog-breed-identification/train/e9d78bc07d863ba45ccee1a6c872182d.jpg \n", + " inflating: /data/dog-breed-identification/train/e9d910ff8abc407c1536201210b40888.jpg \n", + " inflating: /data/dog-breed-identification/train/e9da345a17d3e21041214e6cef9556a4.jpg \n", + " inflating: /data/dog-breed-identification/train/e9efc5edbe14eb6375a64b37db8b36df.jpg \n", + " inflating: /data/dog-breed-identification/train/e9f19fbf059a074a0b40c5b088f4fa6f.jpg \n", + " inflating: /data/dog-breed-identification/train/e9f77a21c65963a5e82079536a05c811.jpg \n", + " inflating: /data/dog-breed-identification/train/e9fc49ac28d4f84579f5621bccc78470.jpg \n", + " inflating: /data/dog-breed-identification/train/e9fc775bd40d6d7273ff093fa12a0574.jpg \n", + " inflating: /data/dog-breed-identification/train/ea06fc45fe1d39398d6ad82c2f741742.jpg \n", + " inflating: /data/dog-breed-identification/train/ea0d557dd0acd9191f45e703d18be9b3.jpg \n", + " inflating: /data/dog-breed-identification/train/ea113a236df0ae1d6875259ed2d2bcde.jpg \n", + " inflating: /data/dog-breed-identification/train/ea12edc949e2572580fa237d3daea938.jpg \n", + " inflating: /data/dog-breed-identification/train/ea1527f051be904e70fab5877bbc0341.jpg \n", + " inflating: /data/dog-breed-identification/train/ea15dfc5ed49598f8ea14a3be59164d7.jpg \n", + " inflating: /data/dog-breed-identification/train/ea15f35659972aef8ce3472d65037c4d.jpg \n", + " inflating: /data/dog-breed-identification/train/ea18d9dba21ed0af962a29cbca958bb8.jpg \n", + " inflating: /data/dog-breed-identification/train/ea1a1b02e734d1130c80e45880c106d3.jpg \n", + " inflating: /data/dog-breed-identification/train/ea1c23bc0a6d8c057a12d2a68b44c7c2.jpg \n", + " inflating: /data/dog-breed-identification/train/ea28a7ca3ddea1be196b48623e1ecd4e.jpg \n", + " inflating: /data/dog-breed-identification/train/ea426774d331299b8e12f53955e0fe99.jpg \n", + " inflating: /data/dog-breed-identification/train/ea42de31f9b6e2c8f44a48ab53bc5fd4.jpg \n", + " inflating: /data/dog-breed-identification/train/ea43ab0fec05595317c008853e9798ef.jpg \n", + " inflating: /data/dog-breed-identification/train/ea43c69e5c1321a6d05d0caaceb69318.jpg \n", + " inflating: /data/dog-breed-identification/train/ea57f2db5aca0955ff2eb8ba7ea8acc2.jpg \n", + " inflating: /data/dog-breed-identification/train/ea5be84db93c42752547df879813ce47.jpg \n", + " inflating: /data/dog-breed-identification/train/ea5c48cc2c2d348add796a1530eb27f5.jpg \n", + " inflating: /data/dog-breed-identification/train/ea607b36bd3391a5b4fe4c547f5dd7c7.jpg \n", + " inflating: /data/dog-breed-identification/train/ea668eb722f6167ae2669b6f1ddeb3d6.jpg \n", + " inflating: /data/dog-breed-identification/train/ea6c7f6e749d8aa9512054c47585e298.jpg \n", + " inflating: /data/dog-breed-identification/train/ea6f0fba4d83f7225c539d44cf28392d.jpg \n", + " inflating: /data/dog-breed-identification/train/ea77bbe28a68e90a21bbef677493ad65.jpg \n", + " inflating: /data/dog-breed-identification/train/ea7a7a80860e733c670e387bfe93bf08.jpg \n", + " inflating: /data/dog-breed-identification/train/ea7f0c67bf41d5f2afe2e8e5a9c08774.jpg \n", + " inflating: /data/dog-breed-identification/train/ea81932e294837391a437fd513a87e89.jpg \n", + " inflating: /data/dog-breed-identification/train/ea829d433019aedcdeb0215c10ea905b.jpg \n", + " inflating: /data/dog-breed-identification/train/ea8a9c29c83b5f3d4b917922a0ce9066.jpg \n", + " inflating: /data/dog-breed-identification/train/ea8bb75412610ee545d8c026ae789f4d.jpg \n", + " inflating: /data/dog-breed-identification/train/ea906db2c833f5140b009eabba209eaa.jpg \n", + " inflating: /data/dog-breed-identification/train/ea9134791fb20bce0b72caabfa0947a6.jpg \n", + " inflating: /data/dog-breed-identification/train/ea9ae902f4ca9241c1187992c51db9bb.jpg \n", + " inflating: /data/dog-breed-identification/train/ea9e3ca803a028a17317345e4fa75135.jpg \n", + " inflating: /data/dog-breed-identification/train/eaa2b8ce4fd5320fdeceecbe804f08bb.jpg \n", + " inflating: /data/dog-breed-identification/train/eaa305d6f31664b513c89946b5193ca4.jpg \n", + " inflating: /data/dog-breed-identification/train/eaa3b3bff09bb80c64937996a1be2d92.jpg \n", + " inflating: /data/dog-breed-identification/train/eaadc51c7c1a507fb380909113036428.jpg \n", + " inflating: /data/dog-breed-identification/train/eab1276af0a54dfdf9d0311916ea813d.jpg \n", + " inflating: /data/dog-breed-identification/train/eabda7b78d4f4c71ad5686ca1589e8ad.jpg \n", + " inflating: /data/dog-breed-identification/train/eac45b011f55db4480995fb0643c54d2.jpg \n", + " inflating: /data/dog-breed-identification/train/ead4174291a75b3567c459ec847c87e9.jpg \n", + " inflating: /data/dog-breed-identification/train/eada9c7e7e1d9fde5df22b28e0ca9341.jpg \n", + " inflating: /data/dog-breed-identification/train/eadb3e22a37eb409670a69c6ad4de19a.jpg \n", + " inflating: /data/dog-breed-identification/train/eadb9e961a7835d4da014f7de12e7660.jpg \n", + " inflating: /data/dog-breed-identification/train/eae07088a41c3d064b7d099b2fe63f11.jpg \n", + " inflating: /data/dog-breed-identification/train/eaea761df48d7adef74af9c66a0fbf4c.jpg \n", + " inflating: /data/dog-breed-identification/train/eafb5a5a9adb3e595eba451f571bfd3d.jpg \n", + " inflating: /data/dog-breed-identification/train/eb0dd89eae4856af66b34d736e66849c.jpg \n", + " inflating: /data/dog-breed-identification/train/eb14b97d9dc86f30f3e12d8c6da185d7.jpg \n", + " inflating: /data/dog-breed-identification/train/eb1c2f4061d66878d2de2ce45a2382e5.jpg \n", + " inflating: /data/dog-breed-identification/train/eb1f87344865dfe1e89717a7e82ae18b.jpg \n", + " inflating: /data/dog-breed-identification/train/eb24ed36b0bc25db7e64a22763b3d20d.jpg \n", + " inflating: /data/dog-breed-identification/train/eb2f7c626e517b63a1fc07577d8db675.jpg \n", + " inflating: /data/dog-breed-identification/train/eb3d4ad6a8883206ec91271f983350fd.jpg \n", + " inflating: /data/dog-breed-identification/train/eb40629be9b4a1676e2277d19c43e4b0.jpg \n", + " inflating: /data/dog-breed-identification/train/eb44fc73de4f0d0f247c84284ae3c70d.jpg \n", + " inflating: /data/dog-breed-identification/train/eb4ddd17cbdda67c5bab81f6407fc1ba.jpg \n", + " inflating: /data/dog-breed-identification/train/eb53813edf96c446e5acaa06c0deca9e.jpg \n", + " inflating: /data/dog-breed-identification/train/eb5635af7d2f315b0f256aee4f41524e.jpg \n", + " inflating: /data/dog-breed-identification/train/eb5feb2ca80e43548e0cc0d0b4232bd8.jpg \n", + " inflating: /data/dog-breed-identification/train/eb62cd1de3f75c74bac9941e7f6e8a19.jpg \n", + " inflating: /data/dog-breed-identification/train/eb64056a3ab7ac1e47bfde7c2e8bd518.jpg \n", + " inflating: /data/dog-breed-identification/train/eb64bbda1cedd44da3a37c65e4eacb9a.jpg \n", + " inflating: /data/dog-breed-identification/train/eb68d9469a3925897dd1c06bbc5a40d9.jpg \n", + " inflating: /data/dog-breed-identification/train/eb7beb7875b92b0d8861826a420c8844.jpg \n", + " inflating: /data/dog-breed-identification/train/eb83b7e5d8515928cc6990d019cfdac7.jpg \n", + " inflating: /data/dog-breed-identification/train/eb909c348925d451cdcee84eeb21d15c.jpg \n", + " inflating: /data/dog-breed-identification/train/eb921996f4cc09c35970d0c18a415a2f.jpg \n", + " inflating: /data/dog-breed-identification/train/eb9dedfc29135debc82e82f159c23fa1.jpg \n", + " inflating: /data/dog-breed-identification/train/eba058daca887257dc71bfd2a0267d29.jpg \n", + " inflating: /data/dog-breed-identification/train/ebb21cae1e6090f99253d83ea5fbbdc9.jpg \n", + " inflating: /data/dog-breed-identification/train/ebb2555fb23a8fe20c44050380154456.jpg \n", + " inflating: /data/dog-breed-identification/train/ebb32c71dd92a3ad87d16006507fe779.jpg \n", + " inflating: /data/dog-breed-identification/train/ebb8c99c50ca5b48e010f0bda9a62c85.jpg \n", + " inflating: /data/dog-breed-identification/train/ebc1adf5291f968954104e7d2c7e540a.jpg \n", + " inflating: /data/dog-breed-identification/train/ebc378abeb610e6d4f2a8ad0d731ccfb.jpg \n", + " inflating: /data/dog-breed-identification/train/ebc4076f8944a2451771bc2b84859dc4.jpg \n", + " inflating: /data/dog-breed-identification/train/ebd981f1b06aed2a15a1de27d9f2a5d2.jpg \n", + " inflating: /data/dog-breed-identification/train/ebdf65f440e3f572374c3fd8a0e6ad08.jpg \n", + " inflating: /data/dog-breed-identification/train/ebe9487f88c13d27fec7db2592adf044.jpg \n", + " inflating: /data/dog-breed-identification/train/ebe9a419fc7d2c67a934411960b88913.jpg \n", + " inflating: /data/dog-breed-identification/train/ebeb2198b1392407fc54e13a9aa7cd0c.jpg \n", + " inflating: /data/dog-breed-identification/train/ebefff12595565753410459216e604a9.jpg \n", + " inflating: /data/dog-breed-identification/train/ebf9a3eab9607d1ef1aaaec90de886c9.jpg \n", + " inflating: /data/dog-breed-identification/train/ebfc57d72a4167ec2f738f2adec52e31.jpg \n", + " inflating: /data/dog-breed-identification/train/ebff7c03c37ce6907387b62c68a1c138.jpg \n", + " inflating: /data/dog-breed-identification/train/ec01f50fdb5ca8749c5ee350232eceba.jpg \n", + " inflating: /data/dog-breed-identification/train/ec02eb6e9f6814f7d0f9dda0f642260a.jpg \n", + " inflating: /data/dog-breed-identification/train/ec0f39cdcc3647246f44ac890a39a7a7.jpg \n", + " inflating: /data/dog-breed-identification/train/ec14673c74fc8c62ee54ab67c9daf010.jpg \n", + " inflating: /data/dog-breed-identification/train/ec1654e55864cef34ec43800744f8ac2.jpg \n", + " inflating: /data/dog-breed-identification/train/ec1b8f7b1a012af943ca2ec8efc58fb8.jpg \n", + " inflating: /data/dog-breed-identification/train/ec1f2e671f8b4531b203642664173ffd.jpg \n", + " inflating: /data/dog-breed-identification/train/ec23516f1da6f2fa32048c0920a8ef7e.jpg \n", + " inflating: /data/dog-breed-identification/train/ec3180f25c4860682350127a1a0c3c4a.jpg \n", + " inflating: /data/dog-breed-identification/train/ec3445c0c4db2d219b2377cb0eb8f3dd.jpg \n", + " inflating: /data/dog-breed-identification/train/ec34bb628f628ac8f0c5cd9cef6dc391.jpg \n", + " inflating: /data/dog-breed-identification/train/ec3985e4104dbdb2b4628ed9379d1a56.jpg \n", + " inflating: /data/dog-breed-identification/train/ec3ca2186332da9d6bcd96b0caab2699.jpg \n", + " inflating: /data/dog-breed-identification/train/ec3ea54e26c17014faf12c362f9463d5.jpg \n", + " inflating: /data/dog-breed-identification/train/ec3fd4eea9a6a2c88908c33737442e4a.jpg \n", + " inflating: /data/dog-breed-identification/train/ec483170d4a9c12f9f7bd0d691de7c6d.jpg \n", + " inflating: /data/dog-breed-identification/train/ec4ef1518f6b572f23b9bf298c0b6081.jpg \n", + " inflating: /data/dog-breed-identification/train/ec53887c5887ec7be3693459ad3ba4ed.jpg \n", + " inflating: /data/dog-breed-identification/train/ec6b36707749098ea596f0e97a33f0c9.jpg \n", + " inflating: /data/dog-breed-identification/train/ec760d9e97fdc7816c3cb44ea9aa33ed.jpg \n", + " inflating: /data/dog-breed-identification/train/ec8ae6ea9408c47092ca95031ebc6acd.jpg \n", + " inflating: /data/dog-breed-identification/train/ec8f1e4f39702089c985261d50c0c29f.jpg \n", + " inflating: /data/dog-breed-identification/train/ec981093099a693748c575e418e7a0d2.jpg \n", + " inflating: /data/dog-breed-identification/train/eca1b1928e00e376ac39717ea933b57b.jpg \n", + " inflating: /data/dog-breed-identification/train/eca90f594e91b6689c0908e365cac5ce.jpg \n", + " inflating: /data/dog-breed-identification/train/ecaec07d20ed3a12fc32314cf83c9370.jpg \n", + " inflating: /data/dog-breed-identification/train/ecb279d5c137b233683c6e0b8d7c1b88.jpg \n", + " inflating: /data/dog-breed-identification/train/ecc1a9dcc73f00726ce6a7e79ef10a1c.jpg \n", + " inflating: /data/dog-breed-identification/train/ecca49fac07d1511d00f2e8a669b1b09.jpg \n", + " inflating: /data/dog-breed-identification/train/eccb17b497007b28217bda18db73cce1.jpg \n", + " inflating: /data/dog-breed-identification/train/eccc1f78f99e1f5fc1fa618be4ae7f67.jpg \n", + " inflating: /data/dog-breed-identification/train/ecccfab92b35ee51d1896eb35f5379a7.jpg \n", + " inflating: /data/dog-breed-identification/train/ecd2e5133a9dc2877d9d7d77581edb87.jpg \n", + " inflating: /data/dog-breed-identification/train/ecd514c6eefa5bb8e0b1f71724483e05.jpg \n", + " inflating: /data/dog-breed-identification/train/ecda5afc51397fea7339c68c7bdaf3c9.jpg \n", + " inflating: /data/dog-breed-identification/train/ece034fecdb47d3e30115e7b70dd36d1.jpg \n", + " inflating: /data/dog-breed-identification/train/ece252ae0a9e27590c6f30936f4166f6.jpg \n", + " inflating: /data/dog-breed-identification/train/ece3ba1b205d68f8860612f993ade240.jpg \n", + " inflating: /data/dog-breed-identification/train/ece92625596397f614310352584d7b74.jpg \n", + " inflating: /data/dog-breed-identification/train/ece94a0e987ed8316c4b3e4ed9e49477.jpg \n", + " inflating: /data/dog-breed-identification/train/ecec3a80a97ce33d7c6a25c96421a26d.jpg \n", + " inflating: /data/dog-breed-identification/train/ecefbe28f3b43d4394e6052802bcd042.jpg \n", + " inflating: /data/dog-breed-identification/train/ecf239b27efded2e309b147ebd5a032d.jpg \n", + " inflating: /data/dog-breed-identification/train/ecf4a6af2c19e363bca3324a99cfecff.jpg \n", + " inflating: /data/dog-breed-identification/train/ecffcf50e9df121652758b9371ca3792.jpg \n", + " inflating: /data/dog-breed-identification/train/ed119bc36eb02fbc0ed75e4d04440e0a.jpg \n", + " inflating: /data/dog-breed-identification/train/ed142580e8aef16f8bae1e29381d807a.jpg \n", + " inflating: /data/dog-breed-identification/train/ed154eec8a5e855fabc7c6e37edbac1c.jpg \n", + " inflating: /data/dog-breed-identification/train/ed1957de19321d3f09348c0c1e3321a8.jpg \n", + " inflating: /data/dog-breed-identification/train/ed1e8f8f6231a658f173cc2e4c74af86.jpg \n", + " inflating: /data/dog-breed-identification/train/ed25d380c4d647930c2d9322c78a05ea.jpg \n", + " inflating: /data/dog-breed-identification/train/ed28715e3609b8bc3674a2191310eed1.jpg \n", + " inflating: /data/dog-breed-identification/train/ed38d69eab73e9a66526ee16231f687a.jpg \n", + " inflating: /data/dog-breed-identification/train/ed38d865e5ff611d6d1529652cc6464d.jpg \n", + " inflating: /data/dog-breed-identification/train/ed432c00b3109a6ce7abce09d98be1e2.jpg \n", + " inflating: /data/dog-breed-identification/train/ed46eed049a4fc710697517359b5a95c.jpg \n", + " inflating: /data/dog-breed-identification/train/ed52e8685e7befe1a346651c0f9aaeaf.jpg \n", + " inflating: /data/dog-breed-identification/train/ed563a008568fe9ca11f9dc662826c0c.jpg \n", + " inflating: /data/dog-breed-identification/train/ed5920d873da0891d86d9f3a25f2c2fa.jpg \n", + " inflating: /data/dog-breed-identification/train/ed61a3a5a304bd2f4e7038d8f95248ea.jpg \n", + " inflating: /data/dog-breed-identification/train/ed650c72271b58dbac0fd5d91ebd206b.jpg \n", + " inflating: /data/dog-breed-identification/train/ed6e570e10f027ff20f0ab3598153193.jpg \n", + " inflating: /data/dog-breed-identification/train/ed756805b14606580f9740ba00feb0fe.jpg \n", + " inflating: /data/dog-breed-identification/train/ed87717fd2068ed2c909afa65a2cfe2e.jpg \n", + " inflating: /data/dog-breed-identification/train/ed894588cade79d0253082f96f2a6310.jpg \n", + " inflating: /data/dog-breed-identification/train/ed89cbbf3f0c9383a7514962cd263aa9.jpg \n", + " inflating: /data/dog-breed-identification/train/ed8a0ac4b8566dec190f7308933a1d62.jpg \n", + " inflating: /data/dog-breed-identification/train/ed8bd115216fc760fd0ae45471791f7f.jpg \n", + " inflating: /data/dog-breed-identification/train/ed8fd9eef589a6fe4a0e74bf7ea77bd6.jpg \n", + " inflating: /data/dog-breed-identification/train/ed9c22ad21413691f548f5d72e7a76d9.jpg \n", + " inflating: /data/dog-breed-identification/train/eda44fc4fded6ff594aeb833634ea44c.jpg \n", + " inflating: /data/dog-breed-identification/train/eda47249346bbb28d89a524e3ff0c66b.jpg \n", + " inflating: /data/dog-breed-identification/train/eda5b27feb982dce425c1731a1174e1a.jpg \n", + " inflating: /data/dog-breed-identification/train/eda5e03fc28a7fd651cd1a7b864ae749.jpg \n", + " inflating: /data/dog-breed-identification/train/eda840df78ede0c000cd97b337b3d0db.jpg \n", + " inflating: /data/dog-breed-identification/train/edaba2592430f96c84ef0ae0817c4328.jpg \n", + " inflating: /data/dog-breed-identification/train/edafef97d647ec7c4b4e00ed5fd3dde5.jpg \n", + " inflating: /data/dog-breed-identification/train/edb50bbbbf53fb184cbc04f21dd97b81.jpg \n", + " inflating: /data/dog-breed-identification/train/edc0b13c478a6e50e9e8c3acd181d885.jpg \n", + " inflating: /data/dog-breed-identification/train/edc923ef3b9724d44481d959573b3afd.jpg \n", + " inflating: /data/dog-breed-identification/train/edccf5a23ce769b435817274226fe61f.jpg \n", + " inflating: /data/dog-breed-identification/train/edce5253f5d09d42011e5b0f84d1831b.jpg \n", + " inflating: /data/dog-breed-identification/train/edcf20f5ebcab09f0db88d0358b3ab56.jpg \n", + " inflating: /data/dog-breed-identification/train/edcfee2eaca6697d103ad03531aec0c3.jpg \n", + " inflating: /data/dog-breed-identification/train/edd3d0f1175568806a1a052ba2051fd7.jpg \n", + " inflating: /data/dog-breed-identification/train/edd8796ad2ee790f6ab20f65329007c3.jpg \n", + " inflating: /data/dog-breed-identification/train/ede38d1fbd47fcc1c408fbd7f0084fef.jpg \n", + " inflating: /data/dog-breed-identification/train/edeb63f8216445c1fd9572929e5e2557.jpg \n", + " inflating: /data/dog-breed-identification/train/edeb88b340e0b46a7c04161adbede2f0.jpg \n", + " inflating: /data/dog-breed-identification/train/edf5071a8b480af1212710874ad2c05e.jpg \n", + " inflating: /data/dog-breed-identification/train/edf5a678ccc0861f55342166e3190f1e.jpg \n", + " inflating: /data/dog-breed-identification/train/ee024b9ec41146caca0fd51b4acad8c0.jpg \n", + " inflating: /data/dog-breed-identification/train/ee08cef430080eb10b1388795f5576ad.jpg \n", + " inflating: /data/dog-breed-identification/train/ee10d8e27f4333c12c821c0584c02fdc.jpg \n", + " inflating: /data/dog-breed-identification/train/ee1ce2280494dc07f516970abee34d55.jpg \n", + " inflating: /data/dog-breed-identification/train/ee2567e0e8424a216c45b34682a80472.jpg \n", + " inflating: /data/dog-breed-identification/train/ee307ebe9e98bac8b8a47be0d4803ba1.jpg \n", + " inflating: /data/dog-breed-identification/train/ee3e8a3091a0171973d8a9ae0faa5d28.jpg \n", + " inflating: /data/dog-breed-identification/train/ee3ffc332086a4e74209a97b7b1330da.jpg \n", + " inflating: /data/dog-breed-identification/train/ee41243497e7715fe3094324b5aceeda.jpg \n", + " inflating: /data/dog-breed-identification/train/ee4f333b5dbfab72ccd4225f81920e8f.jpg \n", + " inflating: /data/dog-breed-identification/train/ee5eb948999e9f9de1ee4497faa0ffff.jpg \n", + " inflating: /data/dog-breed-identification/train/ee6479969035687eb6decab728718656.jpg \n", + " inflating: /data/dog-breed-identification/train/ee64bb93cf0e12fc0549b89083f9347b.jpg \n", + " inflating: /data/dog-breed-identification/train/ee6712eed8607d6d5efd13276dcd1056.jpg \n", + " inflating: /data/dog-breed-identification/train/ee6b47bfe941098216982c8e42f2912b.jpg \n", + " inflating: /data/dog-breed-identification/train/ee6bc7f8018f819ec1a458ea10be820e.jpg \n", + " inflating: /data/dog-breed-identification/train/ee8315e3cef238ea380d197ceb26a476.jpg \n", + " inflating: /data/dog-breed-identification/train/ee88ac6bfed3abb8713bbc988b87eb80.jpg \n", + " inflating: /data/dog-breed-identification/train/ee8ed8bcfdf48a835e39bb8deca77249.jpg \n", + " inflating: /data/dog-breed-identification/train/ee92839020f1795e7eab77358e2528f1.jpg \n", + " inflating: /data/dog-breed-identification/train/eea74111b12a1cc83201158a72cc141a.jpg \n", + " inflating: /data/dog-breed-identification/train/eeaccf7f12c1e959de200bf2849ec101.jpg \n", + " inflating: /data/dog-breed-identification/train/eeb4aa1fdbefcc783845be89b94bdab9.jpg \n", + " inflating: /data/dog-breed-identification/train/eebf13123af2e0241f64d35f8cddea86.jpg \n", + " inflating: /data/dog-breed-identification/train/eec0067218af5d0cca32a504501b98f1.jpg \n", + " inflating: /data/dog-breed-identification/train/eec01ae653b0373466a2719f8d56d8d1.jpg \n", + " inflating: /data/dog-breed-identification/train/eec7468f173bcd82691172de4f8233d8.jpg \n", + " inflating: /data/dog-breed-identification/train/eeca7236c506c6c3e4405c959944f3c1.jpg \n", + " inflating: /data/dog-breed-identification/train/eecab13ff6ae86429d808115d161d455.jpg \n", + " inflating: /data/dog-breed-identification/train/eecbc546943281453c0e598587ec674a.jpg \n", + " inflating: /data/dog-breed-identification/train/eed0a81e48da6883e1f3f248b3d398f2.jpg \n", + " inflating: /data/dog-breed-identification/train/eee79170e91e4fc80e2faa8b0d4321ee.jpg \n", + " inflating: /data/dog-breed-identification/train/eeeb378e507ec043c3a35ddeb05c8536.jpg \n", + " inflating: /data/dog-breed-identification/train/eeef7ff1d9b702cc5eb3befca986f723.jpg \n", + " inflating: /data/dog-breed-identification/train/ef029489fc2941792a5460afd7345fcc.jpg \n", + " inflating: /data/dog-breed-identification/train/ef0baf24724a1e0f87543909d5c7dafb.jpg \n", + " inflating: /data/dog-breed-identification/train/ef0ca2e519acbcd5f7f03a97acdf0ef1.jpg \n", + " inflating: /data/dog-breed-identification/train/ef0cecbeaee4a08448d449a7c1e65f17.jpg \n", + " inflating: /data/dog-breed-identification/train/ef1129cb127bff04fd3940d94f2a259b.jpg \n", + " inflating: /data/dog-breed-identification/train/ef14fa3af6b0cfb89b4e0ec98f75e586.jpg \n", + " inflating: /data/dog-breed-identification/train/ef16bf875e06977bfe0d4059326afffc.jpg \n", + " inflating: /data/dog-breed-identification/train/ef1bdfc864035ed213fd676c24ba14f0.jpg \n", + " inflating: /data/dog-breed-identification/train/ef22e67b6d82c04f2d17249e42769d35.jpg \n", + " inflating: /data/dog-breed-identification/train/ef29bc9846ee0f3599ee6c3791e5eb30.jpg \n", + " inflating: /data/dog-breed-identification/train/ef3dc6a3af2210c9cc59e2bbdc9e9ae1.jpg \n", + " inflating: /data/dog-breed-identification/train/ef4efe7980d873475501b56804670c6a.jpg \n", + " inflating: /data/dog-breed-identification/train/ef50a04a3363a88a9ab1c8e5f2bfd687.jpg \n", + " inflating: /data/dog-breed-identification/train/ef519fe81769185abaf3bf9615f31b02.jpg \n", + " inflating: /data/dog-breed-identification/train/ef608e79d3e91f00896e2584c66a1709.jpg \n", + " inflating: /data/dog-breed-identification/train/ef68f5e897135b01ea02853efd8e8344.jpg \n", + " inflating: /data/dog-breed-identification/train/ef71aaad573e1e289ec9ce92d844f96a.jpg \n", + " inflating: /data/dog-breed-identification/train/ef761ccf005b7bb9bd6a094df7e07df5.jpg \n", + " inflating: /data/dog-breed-identification/train/ef80922725c7fb9865005222ef30714e.jpg \n", + " inflating: /data/dog-breed-identification/train/ef8639d4717248c8db31398c26580537.jpg \n", + " inflating: /data/dog-breed-identification/train/ef8c805d9168bdcde66239d35834dd88.jpg \n", + " inflating: /data/dog-breed-identification/train/ef91df1385e44b81c78dd345b1a17f14.jpg \n", + " inflating: /data/dog-breed-identification/train/efb995a30e2672d00cd70273e911590f.jpg \n", + " inflating: /data/dog-breed-identification/train/efbabde6fc97bb48c8c8b6b75bfaea59.jpg \n", + " inflating: /data/dog-breed-identification/train/efd5885fbb8243c327c98eb9505742bd.jpg \n", + " inflating: /data/dog-breed-identification/train/efd68fb69d154ed0ff2dfea8f782f0d3.jpg \n", + " inflating: /data/dog-breed-identification/train/efe33a3e4c939748548a052f5f6d83bd.jpg \n", + " inflating: /data/dog-breed-identification/train/efe80b984a40901f78132386e1ac3c4b.jpg \n", + " inflating: /data/dog-breed-identification/train/efec2f66576df7aa036d77ad1fafc8bf.jpg \n", + " inflating: /data/dog-breed-identification/train/efec77ba29d2407d82f808cf7ca79eb9.jpg \n", + " inflating: /data/dog-breed-identification/train/efeeefcc693ae837060c1c103195c4a6.jpg \n", + " inflating: /data/dog-breed-identification/train/eff315e6d8b855fc2166ab4fc2a192d5.jpg \n", + " inflating: /data/dog-breed-identification/train/eff6f9f023d63b83056ee3cabd204d9a.jpg \n", + " inflating: /data/dog-breed-identification/train/eff87e94b7ba6bc2c8ddfc5e135c1892.jpg \n", + " inflating: /data/dog-breed-identification/train/f0019323b5b8b321160c8199bea41118.jpg \n", + " inflating: /data/dog-breed-identification/train/f008c04f6d72140e1c40b8cf6bfa21d1.jpg \n", + " inflating: /data/dog-breed-identification/train/f019e943f7b7267ecd2665eeea73a4ce.jpg \n", + " inflating: /data/dog-breed-identification/train/f01dce3264ed34d681815a9d0b03f7fc.jpg \n", + " inflating: /data/dog-breed-identification/train/f0216b542b2feaf7fea3412ce949536e.jpg \n", + " inflating: /data/dog-breed-identification/train/f02c7f7b30bf8535b9feed3accacccb4.jpg \n", + " inflating: /data/dog-breed-identification/train/f04043819ed833d9257fa14f4d39c91f.jpg \n", + " inflating: /data/dog-breed-identification/train/f0424f62ba3e580e9f34de645fb7cad4.jpg \n", + " inflating: /data/dog-breed-identification/train/f055e34fcd3c7a941868a371821feb14.jpg \n", + " inflating: /data/dog-breed-identification/train/f056299c0c659aa73947f76837369a9f.jpg \n", + " inflating: /data/dog-breed-identification/train/f056ee6bc913fba2697dabf37c1c8531.jpg \n", + " inflating: /data/dog-breed-identification/train/f056f4655ec9b7b942c1fcd6d4ea61f5.jpg \n", + " inflating: /data/dog-breed-identification/train/f0634d128e68061c0220f224c9ba1228.jpg \n", + " inflating: /data/dog-breed-identification/train/f0649324ca9ee431d8c37ef361a0d9fd.jpg \n", + " inflating: /data/dog-breed-identification/train/f065f6e63169c7d2ec46174000a2d87e.jpg \n", + " inflating: /data/dog-breed-identification/train/f069ed4df76d788ff43403f1ba4fa4a8.jpg \n", + " inflating: /data/dog-breed-identification/train/f06d9112ed14edb4038daa54f1b1abd5.jpg \n", + " inflating: /data/dog-breed-identification/train/f0779cbe759a82f4927f60e47c9f1c81.jpg \n", + " inflating: /data/dog-breed-identification/train/f079fbbb8684c81747227bef5303c56a.jpg \n", + " inflating: /data/dog-breed-identification/train/f07d7513b336f4cf1ac5be3b46eecd66.jpg \n", + " inflating: /data/dog-breed-identification/train/f08adfc00c474e9296e74e8cc0c3bf5c.jpg \n", + " inflating: /data/dog-breed-identification/train/f09536e93c84e70c1a6e144452ed98fb.jpg \n", + " inflating: /data/dog-breed-identification/train/f09b337e8e9424b208854b361f28e39e.jpg \n", + " inflating: /data/dog-breed-identification/train/f0aa6b58cc701bbf31d090426cb4ecb4.jpg \n", + " inflating: /data/dog-breed-identification/train/f0b08d4f04e66b73526313b7cf0a6b08.jpg \n", + " inflating: /data/dog-breed-identification/train/f0dafbc65d9ddbb847863d8d510b3948.jpg \n", + " inflating: /data/dog-breed-identification/train/f0e2319cf6cf322ea6f5849eb4102fac.jpg \n", + " inflating: /data/dog-breed-identification/train/f0ede8a20fcaded1594e274d98670e3b.jpg \n", + " inflating: /data/dog-breed-identification/train/f0efab499435ecd4e634b72f539fde9b.jpg \n", + " inflating: /data/dog-breed-identification/train/f10aebe770d5533a5f7224a0bd816716.jpg \n", + " inflating: /data/dog-breed-identification/train/f111b639af2c7bd5e7e91e7595913894.jpg \n", + " inflating: /data/dog-breed-identification/train/f12ffab8a865e6b4d8afcb0537a746ad.jpg \n", + " inflating: /data/dog-breed-identification/train/f13e0ce50756b5eb78404b8ee5626c87.jpg \n", + " inflating: /data/dog-breed-identification/train/f14e83eb54dc159f17ec0a676214a514.jpg \n", + " inflating: /data/dog-breed-identification/train/f14f1200ba75d75293d9e1ca432f90bf.jpg \n", + " inflating: /data/dog-breed-identification/train/f1512dff4effc6df01f50baa1135c139.jpg \n", + " inflating: /data/dog-breed-identification/train/f16a87262b19602f017d48f050aed0de.jpg \n", + " inflating: /data/dog-breed-identification/train/f1710a76fbc4ac9d47561e2b48dccd11.jpg \n", + " inflating: /data/dog-breed-identification/train/f1842cfc3bdbf485847574cb1b133d29.jpg \n", + " inflating: /data/dog-breed-identification/train/f184f4ea0aad4cac48670bede5868054.jpg \n", + " inflating: /data/dog-breed-identification/train/f193aef29f983b44a4458c03ec309ca1.jpg \n", + " inflating: /data/dog-breed-identification/train/f194e48d99e5e0f279eb8cd5e6872e36.jpg \n", + " inflating: /data/dog-breed-identification/train/f19b045bcf373d6fcf05c80247850d50.jpg \n", + " inflating: /data/dog-breed-identification/train/f19ca47ee303f9e770a298b26c993ec3.jpg \n", + " inflating: /data/dog-breed-identification/train/f1a18e29959bc5af735ebb5045f91b25.jpg \n", + " inflating: /data/dog-breed-identification/train/f1a94cfd3e3c67736b961587e1795a1a.jpg \n", + " inflating: /data/dog-breed-identification/train/f1a9a63f0ec70494ec2050de0fd39402.jpg \n", + " inflating: /data/dog-breed-identification/train/f1b4c01dfbe424368a7d9404e1f0f6c3.jpg \n", + " inflating: /data/dog-breed-identification/train/f1ca2ebd997146e85932ce3cc6e8f064.jpg \n", + " inflating: /data/dog-breed-identification/train/f1e4f7a6a490bd1bb2bfcb5586902e9e.jpg \n", + " inflating: /data/dog-breed-identification/train/f1e82eac53a1fc3f2e3e4b151537c5ed.jpg \n", + " inflating: /data/dog-breed-identification/train/f1f03709a6fb351e6255cabc277c5fc4.jpg \n", + " inflating: /data/dog-breed-identification/train/f1f8d1fad725a613f6e841d3f49ee415.jpg \n", + " inflating: /data/dog-breed-identification/train/f2039c11f872cbd5e01c2bbb959f7d45.jpg \n", + " inflating: /data/dog-breed-identification/train/f20709a67b3a33528b56cfbb4ef3237b.jpg \n", + " inflating: /data/dog-breed-identification/train/f20c9c183552b734bc3c5904bfa32d09.jpg \n", + " inflating: /data/dog-breed-identification/train/f21952d038bd3c41c89def1a1d92385e.jpg \n", + " inflating: /data/dog-breed-identification/train/f220628a83618e6cc13e78361e833361.jpg \n", + " inflating: /data/dog-breed-identification/train/f2215c687883ed200475bc1bdc448622.jpg \n", + " inflating: /data/dog-breed-identification/train/f234c1a8c84a833c77a7774da09cbb1a.jpg \n", + " inflating: /data/dog-breed-identification/train/f23dffefcd4ae2dee8a5dbf67e41b699.jpg \n", + " inflating: /data/dog-breed-identification/train/f243cb6ec376a19f7de80e9c1a248518.jpg \n", + " inflating: /data/dog-breed-identification/train/f24f230f4a39f5411546ab1da48cc196.jpg \n", + " inflating: /data/dog-breed-identification/train/f2580ac6cf1bb3317c661133fc3bc7b6.jpg \n", + " inflating: /data/dog-breed-identification/train/f259fce0c617f40d26b75947f71f489c.jpg \n", + " inflating: /data/dog-breed-identification/train/f25a4184e33741e745175fd1d7c6d172.jpg \n", + " inflating: /data/dog-breed-identification/train/f25b4df00f4afe17ad37fb49a86c9ec1.jpg \n", + " inflating: /data/dog-breed-identification/train/f27909f3037a7b5443193d7101277fb6.jpg \n", + " inflating: /data/dog-breed-identification/train/f27b4323bae39abf810bcd145d8de276.jpg \n", + " inflating: /data/dog-breed-identification/train/f28c40401e84e41f5aa4c60fea02a575.jpg \n", + " inflating: /data/dog-breed-identification/train/f28df9c49c705281298bc288ffab24b2.jpg \n", + " inflating: /data/dog-breed-identification/train/f28efce22982ed7867a67df7d0762c58.jpg \n", + " inflating: /data/dog-breed-identification/train/f293c3e2fcc58e1d539aa71c56be8d55.jpg \n", + " inflating: /data/dog-breed-identification/train/f2ab1692e3ef27cffb8cbb7ad6d99092.jpg \n", + " inflating: /data/dog-breed-identification/train/f2ad1868e6784b428fdccffe0314867f.jpg \n", + " inflating: /data/dog-breed-identification/train/f2b4247f7397d329557b0356b5645e70.jpg \n", + " inflating: /data/dog-breed-identification/train/f2b5ba1f32494b942731fb5248e114a8.jpg \n", + " inflating: /data/dog-breed-identification/train/f2b70abdaeaae871fba924ab80d3253e.jpg \n", + " inflating: /data/dog-breed-identification/train/f2caac437e0ad55c130321aac6ba375b.jpg \n", + " inflating: /data/dog-breed-identification/train/f2cb6f6dfc542d47bfcea63b0dd78c6e.jpg \n", + " inflating: /data/dog-breed-identification/train/f2d229962b94228f494c9cb3c0ffe740.jpg \n", + " inflating: /data/dog-breed-identification/train/f2d2443edb0bffec560ed1339613503d.jpg \n", + " inflating: /data/dog-breed-identification/train/f2d5c11258efb9e8de45a81a6982f8f2.jpg \n", + " inflating: /data/dog-breed-identification/train/f2da0cd3a1f77cc96505d387d9dd731b.jpg \n", + " inflating: /data/dog-breed-identification/train/f2dd5a812a7126c65887f87b7d0c0601.jpg \n", + " inflating: /data/dog-breed-identification/train/f2ef034a335cdd15cda34408d3e89998.jpg \n", + " inflating: /data/dog-breed-identification/train/f2f05d15bf695d5bb45ca909887230f9.jpg \n", + " inflating: /data/dog-breed-identification/train/f2f49abc5f9f5a0f1f5d25807e76d909.jpg \n", + " inflating: /data/dog-breed-identification/train/f2fb3f693de68e2564fbff2890f7ad40.jpg \n", + " inflating: /data/dog-breed-identification/train/f2fd565d275c740f2f4dd91f0759b9df.jpg \n", + " inflating: /data/dog-breed-identification/train/f312a698c57b976f011a6d54607e81b7.jpg \n", + " inflating: /data/dog-breed-identification/train/f316697874ee29b9ee1897979a58e90c.jpg \n", + " inflating: /data/dog-breed-identification/train/f31950a05e0c02c69042b6d3bcb3682d.jpg \n", + " inflating: /data/dog-breed-identification/train/f320c79c2f0df7a61cdfcf689f8dc91b.jpg \n", + " inflating: /data/dog-breed-identification/train/f32e24c17e03d5eb499875387f049ca9.jpg \n", + " inflating: /data/dog-breed-identification/train/f33b851311543ba4197c7bcb0ea5757c.jpg \n", + " inflating: /data/dog-breed-identification/train/f33c92eca07a03a242b4a9986fc66d9b.jpg \n", + " inflating: /data/dog-breed-identification/train/f340906abda537fa75a9b73ba2742a88.jpg \n", + " inflating: /data/dog-breed-identification/train/f343a8b0d587a5f79c7da06732fcb347.jpg \n", + " inflating: /data/dog-breed-identification/train/f34628fdce6eb2c69ad9c65d0f44f2f7.jpg \n", + " inflating: /data/dog-breed-identification/train/f35fd9d0193122c919f159e41769a4ca.jpg \n", + " inflating: /data/dog-breed-identification/train/f36a3d05e27b5e206ac89b4a7e133e57.jpg \n", + " inflating: /data/dog-breed-identification/train/f375e6363bc21dcd3cb65637c7855e9c.jpg \n", + " inflating: /data/dog-breed-identification/train/f3784274b0248ee82c3eba0ef02f5191.jpg \n", + " inflating: /data/dog-breed-identification/train/f37af9dee180ed1195985decf4ef7111.jpg \n", + " inflating: /data/dog-breed-identification/train/f37bcde663c39b566fb9ce69c074a7e8.jpg \n", + " inflating: /data/dog-breed-identification/train/f382107c98a4aa6d80a5b6a6df18e669.jpg \n", + " inflating: /data/dog-breed-identification/train/f3831b2287b3858b44885ec97b37c2c5.jpg \n", + " inflating: /data/dog-breed-identification/train/f38dc765277ae9dc0bcc677dd8685c3c.jpg \n", + " inflating: /data/dog-breed-identification/train/f3929a0a7c3fede11ceb5d9e379778db.jpg \n", + " inflating: /data/dog-breed-identification/train/f39409a147cf719a7e34712034b41625.jpg \n", + " inflating: /data/dog-breed-identification/train/f3972dd0068dd6e3b9b2da13c71c5af3.jpg \n", + " inflating: /data/dog-breed-identification/train/f3973f4c9756bbad4b3b6d9ea459a15d.jpg \n", + " inflating: /data/dog-breed-identification/train/f39fbe4774a9b7662cedca6f9759e023.jpg \n", + " inflating: /data/dog-breed-identification/train/f3aa61d28f81531b81f15ee65d91ecec.jpg \n", + " inflating: /data/dog-breed-identification/train/f3afd4f69038b249042cd481f0b5813a.jpg \n", + " inflating: /data/dog-breed-identification/train/f3b343a1d0a394e5d29a74f31f9922c8.jpg \n", + " inflating: /data/dog-breed-identification/train/f3b47556eea0151354f1ee1b40762fce.jpg \n", + " inflating: /data/dog-breed-identification/train/f3c0501b67da21916640efb617967dd4.jpg \n", + " inflating: /data/dog-breed-identification/train/f3c1bb36d86a8fd1436bf3ff219c8686.jpg \n", + " inflating: /data/dog-breed-identification/train/f3c2b2523885755ccfad67ff51308c98.jpg \n", + " inflating: /data/dog-breed-identification/train/f3c31d575505827557a5fe80f7bba070.jpg \n", + " inflating: /data/dog-breed-identification/train/f3c645a41e15598e326c368253d0703f.jpg \n", + " inflating: /data/dog-breed-identification/train/f3cf90e3a8a78d31850eca467122183a.jpg \n", + " inflating: /data/dog-breed-identification/train/f3d517cae12f545fd445a0cfaf94b505.jpg \n", + " inflating: /data/dog-breed-identification/train/f3d77f8157d981c44a1290b57cc03ac0.jpg \n", + " inflating: /data/dog-breed-identification/train/f3d98aab47dfabd6c98750f45f6c4038.jpg \n", + " inflating: /data/dog-breed-identification/train/f3e172b6c77d1d5b191722de535bc726.jpg \n", + " inflating: /data/dog-breed-identification/train/f3e1cc6183fe3457dd644320d730b3a4.jpg \n", + " inflating: /data/dog-breed-identification/train/f3e5d0f516c56df7b58b4488f13bb00f.jpg \n", + " inflating: /data/dog-breed-identification/train/f3e83444819548799b1f77ac363a4d4a.jpg \n", + " inflating: /data/dog-breed-identification/train/f3f01549c30b4d1ae3e6701b729ab080.jpg \n", + " inflating: /data/dog-breed-identification/train/f3f295d33149977b1d67b8a3b6484ea7.jpg \n", + " inflating: /data/dog-breed-identification/train/f3ffda74c6fcd4a8ff546bac62060276.jpg \n", + " inflating: /data/dog-breed-identification/train/f4045ccfa988393752a265e086979e6c.jpg \n", + " inflating: /data/dog-breed-identification/train/f40a0d368a62d9e2d2fb2d7583368538.jpg \n", + " inflating: /data/dog-breed-identification/train/f411d5b28c9174da6ec9c0a85837fbfb.jpg \n", + " inflating: /data/dog-breed-identification/train/f41a4709ea45d334e7fd4f61f4ca01c0.jpg \n", + " inflating: /data/dog-breed-identification/train/f41a7b3079bbeda29028c5919e0d84af.jpg \n", + " inflating: /data/dog-breed-identification/train/f41b28e57b9838e049dcaa3e3f11d9c9.jpg \n", + " inflating: /data/dog-breed-identification/train/f42938973bccdb8f6f556ea160a9357f.jpg \n", + " inflating: /data/dog-breed-identification/train/f42afcc1fabac064a07c6a5b7ae2766e.jpg \n", + " inflating: /data/dog-breed-identification/train/f430cdcc2b98a6e3566e71b27ae60e71.jpg \n", + " inflating: /data/dog-breed-identification/train/f441dc37af37eb01c9fa3e0950228642.jpg \n", + " inflating: /data/dog-breed-identification/train/f45060c4cac8b690aaa61a510e1d30c5.jpg \n", + " inflating: /data/dog-breed-identification/train/f4520fd1cb3d76b7d36acdb63dd964ca.jpg \n", + " inflating: /data/dog-breed-identification/train/f46f1e81fe9224c6ee6206ff24aa6b31.jpg \n", + " inflating: /data/dog-breed-identification/train/f471dbea5a623c425c8d14b885616ba0.jpg \n", + " inflating: /data/dog-breed-identification/train/f48432fa098e002cc0d21d1258633697.jpg \n", + " inflating: /data/dog-breed-identification/train/f485a9d5afe66b74092e8178f67d34c0.jpg \n", + " inflating: /data/dog-breed-identification/train/f485d0ea64f8950b38954c17cfd03d8f.jpg \n", + " inflating: /data/dog-breed-identification/train/f48ebf5f79e746455810091b884e8eb8.jpg \n", + " inflating: /data/dog-breed-identification/train/f4a20b0c57e1fc7214548b4556a467f5.jpg \n", + " inflating: /data/dog-breed-identification/train/f4a331375dc879c5bfc682221d143fb0.jpg \n", + " inflating: /data/dog-breed-identification/train/f4a9095dabad1799e9747f623bbe0e86.jpg \n", + " inflating: /data/dog-breed-identification/train/f4aa5ed5ebff85d0f8b4d1ed111831df.jpg \n", + " inflating: /data/dog-breed-identification/train/f4c3afa01a61b3471656b42f8d5cb1e1.jpg \n", + " inflating: /data/dog-breed-identification/train/f4c570b8d49c119bbb2e988b240c95a4.jpg \n", + " inflating: /data/dog-breed-identification/train/f4d535251be4d6195c3fc5d6a2c31e03.jpg \n", + " inflating: /data/dog-breed-identification/train/f4e8482c69e4e562d86149b45da8565e.jpg \n", + " inflating: /data/dog-breed-identification/train/f4f070765892e18ce99343779e9058cb.jpg \n", + " inflating: /data/dog-breed-identification/train/f4f3f707907977e9f619540a51e6d9fa.jpg \n", + " inflating: /data/dog-breed-identification/train/f5020914618ca6445b1bb1ca2e0a4d51.jpg \n", + " inflating: /data/dog-breed-identification/train/f5047cc73f0f6691dc0e08c93ebe26da.jpg \n", + " inflating: /data/dog-breed-identification/train/f5091fcbb9e639158ad406a3d793ff62.jpg \n", + " inflating: /data/dog-breed-identification/train/f50c3a2915a6744f66237534db3d7779.jpg \n", + " inflating: /data/dog-breed-identification/train/f5171a2251d123f01566ace3a1107754.jpg \n", + " inflating: /data/dog-breed-identification/train/f523e20f7fac8e1d9909d0956a468467.jpg \n", + " inflating: /data/dog-breed-identification/train/f52a0aaff41a517be216bf41c967a751.jpg \n", + " inflating: /data/dog-breed-identification/train/f52e47ff39ef157d700d51d62bea79d0.jpg \n", + " inflating: /data/dog-breed-identification/train/f5373dbe567183432408067cb9dc0957.jpg \n", + " inflating: /data/dog-breed-identification/train/f5379f2502e90983e6361d4acdf0a56e.jpg \n", + " inflating: /data/dog-breed-identification/train/f547c95af25d09dc444943724396a68a.jpg \n", + " inflating: /data/dog-breed-identification/train/f550e44352e4eeb99932441678a8af34.jpg \n", + " inflating: /data/dog-breed-identification/train/f55d584110183b570d7b9d03e355a403.jpg \n", + " inflating: /data/dog-breed-identification/train/f56093b4b1f055c76701c12620ace957.jpg \n", + " inflating: /data/dog-breed-identification/train/f56a10d2912140974338c6330e97e466.jpg \n", + " inflating: /data/dog-breed-identification/train/f56e24e008d532831fa5fa006618025b.jpg \n", + " inflating: /data/dog-breed-identification/train/f573d2958310513ce7113099598c7707.jpg \n", + " inflating: /data/dog-breed-identification/train/f58364e9a181f9d7e149f4cec3e9ec80.jpg \n", + " inflating: /data/dog-breed-identification/train/f58531d21504bc086aa5d0e2c945f988.jpg \n", + " inflating: /data/dog-breed-identification/train/f588238227e5822b0c83a8355eccd067.jpg \n", + " inflating: /data/dog-breed-identification/train/f589bf0f5d13407f1729e8ee342834f4.jpg \n", + " inflating: /data/dog-breed-identification/train/f589e77c9b0facc8ece5f07e3c04fc46.jpg \n", + " inflating: /data/dog-breed-identification/train/f58c4c4e6670e74fac63514cd9ac8ab9.jpg \n", + " inflating: /data/dog-breed-identification/train/f591763666758b806f71459819ce8602.jpg \n", + " inflating: /data/dog-breed-identification/train/f5940c13d959f10561fda7afad8510c2.jpg \n", + " inflating: /data/dog-breed-identification/train/f597a565d8b2e0d4ff1717a7d042472f.jpg \n", + " inflating: /data/dog-breed-identification/train/f59c6ac65a3b996404ccce16dad310e0.jpg \n", + " inflating: /data/dog-breed-identification/train/f59d3ddb3652e870bea7cb3b15894708.jpg \n", + " inflating: /data/dog-breed-identification/train/f5a88560838b1fa960f2bdf8bcc33621.jpg \n", + " inflating: /data/dog-breed-identification/train/f5aae143adf81f97da05095d78d73350.jpg \n", + " inflating: /data/dog-breed-identification/train/f5b981b89e40c702cbaaedb8b9fc7739.jpg \n", + " inflating: /data/dog-breed-identification/train/f5b9b43b95fbf49626ede41a02f6cf1c.jpg \n", + " inflating: /data/dog-breed-identification/train/f5bc69deded716bae5e2134c81d9a606.jpg \n", + " inflating: /data/dog-breed-identification/train/f5bf73d1e0ef05b8c5fba19d051fc9f1.jpg \n", + " inflating: /data/dog-breed-identification/train/f5c21557b2fdf621b48720357ea35df2.jpg \n", + " inflating: /data/dog-breed-identification/train/f5ced876dc38fce8230cb1c721e29973.jpg \n", + " inflating: /data/dog-breed-identification/train/f5cef8edf2725dc143ead4774ddff74f.jpg \n", + " inflating: /data/dog-breed-identification/train/f5d05878affab9747f86aa1f13c52bd2.jpg \n", + " inflating: /data/dog-breed-identification/train/f5d4b4e4770d3922b6b26dc46bdc57af.jpg \n", + " inflating: /data/dog-breed-identification/train/f5d4d250dfe2f60b155ec2f40df73936.jpg \n", + " inflating: /data/dog-breed-identification/train/f5dcfc042f10a6a57a54d42a4a1f2406.jpg \n", + " inflating: /data/dog-breed-identification/train/f5fa365252241c8be7b95d04444d5bed.jpg \n", + " inflating: /data/dog-breed-identification/train/f5feb8aae47a107bc2e8ca4075286ae5.jpg \n", + " inflating: /data/dog-breed-identification/train/f6082c4fcbaec253ffe964f13aba012d.jpg \n", + " inflating: /data/dog-breed-identification/train/f60ad1508afefc6ce94cd896ff1cdb3f.jpg \n", + " inflating: /data/dog-breed-identification/train/f610363215fd1f00edef4bbf9b3caba9.jpg \n", + " inflating: /data/dog-breed-identification/train/f61250c6e9263857befe7f256f9e909a.jpg \n", + " inflating: /data/dog-breed-identification/train/f61517f58aa8a9d4248174bd75d891b1.jpg \n", + " inflating: /data/dog-breed-identification/train/f6175a0ef289ef111b225a84240f2e4e.jpg \n", + " inflating: /data/dog-breed-identification/train/f61d861726155de0ebd1a8e1e892f5af.jpg \n", + " inflating: /data/dog-breed-identification/train/f6209cf3313e3368c376dbfa13f335cc.jpg \n", + " inflating: /data/dog-breed-identification/train/f62145318e69a5061563eca2eb806d6e.jpg \n", + " inflating: /data/dog-breed-identification/train/f62f6d147bb05a257c05ffe418ae4e45.jpg \n", + " inflating: /data/dog-breed-identification/train/f63ccfe3730fabe9537060a5e9ccbd24.jpg \n", + " inflating: /data/dog-breed-identification/train/f641553e92efc6824ca4b9cef837ba46.jpg \n", + " inflating: /data/dog-breed-identification/train/f6430905d7328cb19856d0fe633f6493.jpg \n", + " inflating: /data/dog-breed-identification/train/f644ec58aab5f57925bd55969f4b925b.jpg \n", + " inflating: /data/dog-breed-identification/train/f64d8dfc5544c9fb4bdc578a7756dfd7.jpg \n", + " inflating: /data/dog-breed-identification/train/f6505af99611986f0141aa986e71e766.jpg \n", + " inflating: /data/dog-breed-identification/train/f6575543e0456836d1fc6871e586a6e0.jpg \n", + " inflating: /data/dog-breed-identification/train/f662df0beaf91c254157863bf00ac62e.jpg \n", + " inflating: /data/dog-breed-identification/train/f6652a6cf81ed972d896b7c51e6aa39f.jpg \n", + " inflating: /data/dog-breed-identification/train/f66c888ffa1ff06fcc2eddafb3e13aaf.jpg \n", + " inflating: /data/dog-breed-identification/train/f66de1bea3790719f70ef37779889a02.jpg \n", + " inflating: /data/dog-breed-identification/train/f679cd9c45865bd983920f79a2d85de3.jpg \n", + " inflating: /data/dog-breed-identification/train/f67c86c7429bfe3a576c2468d243b29f.jpg \n", + " inflating: /data/dog-breed-identification/train/f684cc75fb7996f315832975a3e07f67.jpg \n", + " inflating: /data/dog-breed-identification/train/f690468c2bf0020c6e4ee135278cf71c.jpg \n", + " inflating: /data/dog-breed-identification/train/f69078e04e3d5e8d7689820de78f69ff.jpg \n", + " inflating: /data/dog-breed-identification/train/f692fcf95607b6e82528b0e90d705725.jpg \n", + " inflating: /data/dog-breed-identification/train/f69495e6688f3e331ecddb474367e923.jpg \n", + " inflating: /data/dog-breed-identification/train/f6a32d6b0c663f2f949520149232a329.jpg \n", + " inflating: /data/dog-breed-identification/train/f6b08ab15d3448bcfa29aa59a552a742.jpg \n", + " inflating: /data/dog-breed-identification/train/f6b88d6bb18f14a24c8cd24b941cf0bf.jpg \n", + " inflating: /data/dog-breed-identification/train/f6ba94d33566242b8f4abdfcbd8a6bcb.jpg \n", + " inflating: /data/dog-breed-identification/train/f6bdbf94ffea753f4f7638784faf01cc.jpg \n", + " inflating: /data/dog-breed-identification/train/f6c0c3d4f34faf5caa5de6e81305de3b.jpg \n", + " inflating: /data/dog-breed-identification/train/f6c308322ba8e80c9ad20f9b292a8c4c.jpg \n", + " inflating: /data/dog-breed-identification/train/f6c6e52ad7508381c9b545e7e4f77d8d.jpg \n", + " inflating: /data/dog-breed-identification/train/f6d0770d25962d54d0cfedbfe14949a0.jpg \n", + " inflating: /data/dog-breed-identification/train/f6d25f988a2c379fe7c7ffc52fc39035.jpg \n", + " inflating: /data/dog-breed-identification/train/f6e186742c1885a3c4a66396195d8a18.jpg \n", + " inflating: /data/dog-breed-identification/train/f6e3a909254785d410b2418647034a5a.jpg \n", + " inflating: /data/dog-breed-identification/train/f6eed8d840086c27a17d7bc4f276dd56.jpg \n", + " inflating: /data/dog-breed-identification/train/f6f4398177ec0db196ab4ae92d79c6ea.jpg \n", + " inflating: /data/dog-breed-identification/train/f6f603600c231d6dd529c54e45a7b2c5.jpg \n", + " inflating: /data/dog-breed-identification/train/f706682a30021cc74cd9416dac25e943.jpg \n", + " inflating: /data/dog-breed-identification/train/f7070114b6f8cb965ba2dfb23b511b18.jpg \n", + " inflating: /data/dog-breed-identification/train/f708b2f18826cc754451e07de4ba148c.jpg \n", + " inflating: /data/dog-breed-identification/train/f70f325bf6fc8aad05ed7a99212660fc.jpg \n", + " inflating: /data/dog-breed-identification/train/f71e0bbcb7b9d348986393612e1b6800.jpg \n", + " inflating: /data/dog-breed-identification/train/f72555509a6ffd85394b1e6417c728df.jpg \n", + " inflating: /data/dog-breed-identification/train/f7266a5b52cf22b11274b5fe66a52979.jpg \n", + " inflating: /data/dog-breed-identification/train/f72944994eecb182f41456a820149950.jpg \n", + " inflating: /data/dog-breed-identification/train/f72df3c3daa677aa76027366ad55721f.jpg \n", + " inflating: /data/dog-breed-identification/train/f738d8472928f06c5a3743914ac5d458.jpg \n", + " inflating: /data/dog-breed-identification/train/f74471ba376e55adf638f527eb794c2a.jpg \n", + " inflating: /data/dog-breed-identification/train/f74b4a01eae15a3d909c643e116bcfca.jpg \n", + " inflating: /data/dog-breed-identification/train/f750daa88a8129fd8af9e6b686698147.jpg \n", + " inflating: /data/dog-breed-identification/train/f751bb00d954b8e4e132c958e7117011.jpg \n", + " inflating: /data/dog-breed-identification/train/f7564622d2ebbcebdb2eb1f150dbaa95.jpg \n", + " inflating: /data/dog-breed-identification/train/f75b13d14f950f2a6acf1f907bdfc636.jpg \n", + " inflating: /data/dog-breed-identification/train/f75b457706d15d23fb803e2ad7c66c40.jpg \n", + " inflating: /data/dog-breed-identification/train/f7627680c56c5d3acc4f7eae93124459.jpg \n", + " inflating: /data/dog-breed-identification/train/f76f9724c66f6b62edc1ac44fecaa27d.jpg \n", + " inflating: /data/dog-breed-identification/train/f776961d52ba3a7112506389623c8586.jpg \n", + " inflating: /data/dog-breed-identification/train/f77d5b8a287c1395356e23849a2205fb.jpg \n", + " inflating: /data/dog-breed-identification/train/f7850cfce0e0d79627dfb63d71d97b72.jpg \n", + " inflating: /data/dog-breed-identification/train/f7886847e4058293245db300e17928fc.jpg \n", + " inflating: /data/dog-breed-identification/train/f78c6a61f77eae6388af7f56c8b4aea7.jpg \n", + " inflating: /data/dog-breed-identification/train/f78e04ee7d70acdb10b2079b25471a46.jpg \n", + " inflating: /data/dog-breed-identification/train/f78e0f62a1b5aad9da3a769c59121ed7.jpg \n", + " inflating: /data/dog-breed-identification/train/f7914989bd1e633c30a76054df77266d.jpg \n", + " inflating: /data/dog-breed-identification/train/f7933e6c90808d5bbbade7bde7cce6dd.jpg \n", + " inflating: /data/dog-breed-identification/train/f7a44b90a798d72d636b7f1003c8c3e4.jpg \n", + " inflating: /data/dog-breed-identification/train/f7a5fbcc43b2e0e09fb25cb78a37094f.jpg \n", + " inflating: /data/dog-breed-identification/train/f7a8a885e2b28630634c3fb513277f27.jpg \n", + " inflating: /data/dog-breed-identification/train/f7a9426876f70330f92f55cca08b19cd.jpg \n", + " inflating: /data/dog-breed-identification/train/f7be7c1d6af9e654f16e6d74ea63a8a5.jpg \n", + " inflating: /data/dog-breed-identification/train/f7c22e91e22c5a97abb2b7d9bab98809.jpg \n", + " inflating: /data/dog-breed-identification/train/f7c998238377fda6bb1d8622c5d65c45.jpg \n", + " inflating: /data/dog-breed-identification/train/f7cd9846e7aa0c163ad98ad00ce51cf8.jpg \n", + " inflating: /data/dog-breed-identification/train/f7d18ca76e6b4e359a27955726e9a656.jpg \n", + " inflating: /data/dog-breed-identification/train/f7de9a7b53f6b0b40a61ea56c55943d9.jpg \n", + " inflating: /data/dog-breed-identification/train/f7df3e61cec435ef6066e9d18b5c17b1.jpg \n", + " inflating: /data/dog-breed-identification/train/f7f60f8bad2179cf4b55b8800497eed8.jpg \n", + " inflating: /data/dog-breed-identification/train/f7f73ba72679f35d552ef1af03c63bf5.jpg \n", + " inflating: /data/dog-breed-identification/train/f7fdd6d141e45d148eaf6ba595a9c45b.jpg \n", + " inflating: /data/dog-breed-identification/train/f8009b210e1fca906bbfd55a17fcb224.jpg \n", + " inflating: /data/dog-breed-identification/train/f80541bbc3592999ccc617bdcbd9fde9.jpg \n", + " inflating: /data/dog-breed-identification/train/f8068ff990794cc54cd2d647387ecf6d.jpg \n", + " inflating: /data/dog-breed-identification/train/f80ad50586c794ea8555e3bed23e0c0e.jpg \n", + " inflating: /data/dog-breed-identification/train/f80f575bb32f2cc1958ef092681b9ea4.jpg \n", + " inflating: /data/dog-breed-identification/train/f811a137cac1489f074529064f79acfe.jpg \n", + " inflating: /data/dog-breed-identification/train/f811a192eb721accaa495e6722d9acff.jpg \n", + " inflating: /data/dog-breed-identification/train/f8169d735ce64c6b8e4c421d61b2d3b5.jpg \n", + " inflating: /data/dog-breed-identification/train/f817cad476ff2a52fbe5b56a422dd577.jpg \n", + " inflating: /data/dog-breed-identification/train/f819a72296da2b1dc4f21d8145c40f27.jpg \n", + " inflating: /data/dog-breed-identification/train/f81ec41ce2cc1acb16597fdd231c2406.jpg \n", + " inflating: /data/dog-breed-identification/train/f821311972e25d3dc8e8cf76a64e53e1.jpg \n", + " inflating: /data/dog-breed-identification/train/f824e27367e8475401b62b24111c467f.jpg \n", + " inflating: /data/dog-breed-identification/train/f8271e8e638b9bab6a0d3e164874dd53.jpg \n", + " inflating: /data/dog-breed-identification/train/f827aadcd7537def18615dc19f650456.jpg \n", + " inflating: /data/dog-breed-identification/train/f82a2b46267b6d4edc62c76ebdb0fc8b.jpg \n", + " inflating: /data/dog-breed-identification/train/f831a464cc42602094575fbb18ff1143.jpg \n", + " inflating: /data/dog-breed-identification/train/f8346f0bac3aae2f49113b3d7dabb812.jpg \n", + " inflating: /data/dog-breed-identification/train/f835a7cc45610f664b91d252d413ff65.jpg \n", + " inflating: /data/dog-breed-identification/train/f83683931d1c95c7501b11a3edb4acbe.jpg \n", + " inflating: /data/dog-breed-identification/train/f8387071cbb4e77a274ab6035c3bb687.jpg \n", + " inflating: /data/dog-breed-identification/train/f842f546bc73b3c40698bbf9f94928dd.jpg \n", + " inflating: /data/dog-breed-identification/train/f843b7f86256f4d603b1768c8b16adcc.jpg \n", + " inflating: /data/dog-breed-identification/train/f84472e35734f1c9eb3be0372ba522aa.jpg \n", + " inflating: /data/dog-breed-identification/train/f84bcbdda7152edced4b693e1ccaa888.jpg \n", + " inflating: /data/dog-breed-identification/train/f84d455704632ab197b47cfe4061da36.jpg \n", + " inflating: /data/dog-breed-identification/train/f881a7569fa39a96ce68e8c5c8642b20.jpg \n", + " inflating: /data/dog-breed-identification/train/f888b4d0dac4f5b3faf7a05a6ad01cd4.jpg \n", + " inflating: /data/dog-breed-identification/train/f88b18233f76dcfb8dc49ab1820fa0dd.jpg \n", + " inflating: /data/dog-breed-identification/train/f8972355cef9e75502f9a79a26ba9798.jpg \n", + " inflating: /data/dog-breed-identification/train/f89759123d85d4135ed4bdce6923ad6a.jpg \n", + " inflating: /data/dog-breed-identification/train/f89aacf1761449fc76d665cce1399f33.jpg \n", + " inflating: /data/dog-breed-identification/train/f89b4055956d7574c6597a92e78ee149.jpg \n", + " inflating: /data/dog-breed-identification/train/f89bc5490cbc1b17dc0be9c5bdd4f224.jpg \n", + " inflating: /data/dog-breed-identification/train/f8a16c174f672a88dff675bf2aa5e41f.jpg \n", + " inflating: /data/dog-breed-identification/train/f8a46f8751e34aac4b262fed49f210c6.jpg \n", + " inflating: /data/dog-breed-identification/train/f8a5dcec60ad103637c39b31d6dadaef.jpg \n", + " inflating: /data/dog-breed-identification/train/f8a95fd0eee5042ed2b93e6abd032baa.jpg \n", + " inflating: /data/dog-breed-identification/train/f8a9f218dbc135010a7099efbb36a8ce.jpg \n", + " inflating: /data/dog-breed-identification/train/f8accfdc2c24ee99bca4433e33e2a975.jpg \n", + " inflating: /data/dog-breed-identification/train/f8b5c9e7e0aaa1412dcf01199e64c8d7.jpg \n", + " inflating: /data/dog-breed-identification/train/f8c2b71ba0ee8fb0b64a589f12c98618.jpg \n", + " inflating: /data/dog-breed-identification/train/f8c3ce8448eb8b1a4e5cb611595be906.jpg \n", + " inflating: /data/dog-breed-identification/train/f8c92d371c9f07b52bb6292fe4bdcc3f.jpg \n", + " inflating: /data/dog-breed-identification/train/f8cc0c4e9be6827494a77260d383884f.jpg \n", + " inflating: /data/dog-breed-identification/train/f8ce0925091c9c81fec5ab16c1109f57.jpg \n", + " inflating: /data/dog-breed-identification/train/f8d289525782f3caa8cb51627063ef22.jpg \n", + " inflating: /data/dog-breed-identification/train/f8d48f89aaa55962d4beb853a128eac7.jpg \n", + " inflating: /data/dog-breed-identification/train/f8d6ec246da32576653776342d52cb69.jpg \n", + " inflating: /data/dog-breed-identification/train/f8df586a37584e859e252778a221391b.jpg \n", + " inflating: /data/dog-breed-identification/train/f8e24c6d9d34d762c90c02efb45a96a3.jpg \n", + " inflating: /data/dog-breed-identification/train/f8e530f56e73403c8d69d29fbc391a06.jpg \n", + " inflating: /data/dog-breed-identification/train/f8e53f2c3cdcdd5e42e7337d5b5156b4.jpg \n", + " inflating: /data/dog-breed-identification/train/f8e931fdc022ea586f2bcd50ca6de106.jpg \n", + " inflating: /data/dog-breed-identification/train/f8ed43ad1a94147b0a53b9cb3fb2d4bc.jpg \n", + " inflating: /data/dog-breed-identification/train/f8f529ee9da3f9edc72983ee020f3fbc.jpg \n", + " inflating: /data/dog-breed-identification/train/f901b7392fff2c1c3991bb8f7b31bfe3.jpg \n", + " inflating: /data/dog-breed-identification/train/f901c393eca491f1c03b88b31fa956fc.jpg \n", + " inflating: /data/dog-breed-identification/train/f9024336c267f4dd4de82cf5f617ffa0.jpg \n", + " inflating: /data/dog-breed-identification/train/f90e5fa3f7dabd3292fecf4312ba98c4.jpg \n", + " inflating: /data/dog-breed-identification/train/f918d591dda3b7c1490f70dc92a7fb8e.jpg \n", + " inflating: /data/dog-breed-identification/train/f921b60576b1055884307f1f6d42997f.jpg \n", + " inflating: /data/dog-breed-identification/train/f922fcb519e1f55ca99e8c3bb9e07619.jpg \n", + " inflating: /data/dog-breed-identification/train/f94a145a3a291fc2c535d135938aea72.jpg \n", + " inflating: /data/dog-breed-identification/train/f955bc6cf429a3957922f4155689c857.jpg \n", + " inflating: /data/dog-breed-identification/train/f9561a534df6add1af1806bfa1558fde.jpg \n", + " inflating: /data/dog-breed-identification/train/f95ac23ee909194006c538c4fc6e6750.jpg \n", + " inflating: /data/dog-breed-identification/train/f9698ad44b009757cfacd9f171310651.jpg \n", + " inflating: /data/dog-breed-identification/train/f97b9b8848683ab240cffeda2c855502.jpg \n", + " inflating: /data/dog-breed-identification/train/f97da1d8c57873986183092ec4d5fc13.jpg \n", + " inflating: /data/dog-breed-identification/train/f98392bfae7d0aa35ecc4993ee2afbaf.jpg \n", + " inflating: /data/dog-breed-identification/train/f988271af1ca0e21b76b8cb318bcf4b3.jpg \n", + " inflating: /data/dog-breed-identification/train/f988f65f2b26404fa16698ed835ac08a.jpg \n", + " inflating: /data/dog-breed-identification/train/f9900b44f49e5075c5b3fb589d1aae4b.jpg \n", + " inflating: /data/dog-breed-identification/train/f99c0ede8bf16d5e94a3a69ffb97b341.jpg \n", + " inflating: /data/dog-breed-identification/train/f99cdde623294ec669ff9f2a31e17830.jpg \n", + " inflating: /data/dog-breed-identification/train/f9a1339b0ef12e47ae875a86cf91524b.jpg \n", + " inflating: /data/dog-breed-identification/train/f9a2077bcf32a5d9d33176f52cbcc65c.jpg \n", + " inflating: /data/dog-breed-identification/train/f9a2d5a36d32c2c1285cf46fe05972cb.jpg \n", + " inflating: /data/dog-breed-identification/train/f9a664aff90ef04521974670cbd19cfe.jpg \n", + " inflating: /data/dog-breed-identification/train/f9ac470f280837eda01169eafa5f0d23.jpg \n", + " inflating: /data/dog-breed-identification/train/f9b333457342a87a8321d8686035a7cf.jpg \n", + " inflating: /data/dog-breed-identification/train/f9b4e4ed6e77eca2ea940b31e58b408e.jpg \n", + " inflating: /data/dog-breed-identification/train/f9b6c793a3720605ed68242e73d228d9.jpg \n", + " inflating: /data/dog-breed-identification/train/f9b7736812c24f6de73012511303fb28.jpg \n", + " inflating: /data/dog-breed-identification/train/f9be292e5e04c9ff629ead6c17d5d87d.jpg \n", + " inflating: /data/dog-breed-identification/train/f9bf12ee2c9856d37d12b6299af87f8e.jpg \n", + " inflating: /data/dog-breed-identification/train/f9bf654be6ec7d1c40935aa397a3edbf.jpg \n", + " inflating: /data/dog-breed-identification/train/f9c6acac2e566ae605ed9c7c1ecd6450.jpg \n", + " inflating: /data/dog-breed-identification/train/f9c845c69830b3682fbbbcc3fc139b44.jpg \n", + " inflating: /data/dog-breed-identification/train/f9d4235a0740a33550852fe5393aa929.jpg \n", + " inflating: /data/dog-breed-identification/train/f9d6e1b8bc0906d38bab497a228af52e.jpg \n", + " inflating: /data/dog-breed-identification/train/f9dd329f6a9df4dbad0d100aa2df0fd8.jpg \n", + " inflating: /data/dog-breed-identification/train/f9dece751a4afe2330c02007681a7f81.jpg \n", + " inflating: /data/dog-breed-identification/train/f9eedeff3a54f28301468decc50e3def.jpg \n", + " inflating: /data/dog-breed-identification/train/f9f253c1bf65d4ad1907faec9328eef2.jpg \n", + " inflating: /data/dog-breed-identification/train/f9f2c52b196c9da8c7c98c68003d4f08.jpg \n", + " inflating: /data/dog-breed-identification/train/fa0561b43695b11a89447ae47afdbfeb.jpg \n", + " inflating: /data/dog-breed-identification/train/fa0a112efe3604938af42b371c6227da.jpg \n", + " inflating: /data/dog-breed-identification/train/fa0b4de9dbacd9faa3a5a4f2adc63195.jpg \n", + " inflating: /data/dog-breed-identification/train/fa1a19bd9f99b862cb0a986cbc0ea803.jpg \n", + " inflating: /data/dog-breed-identification/train/fa1ae7cd7d26a1dbc29149daa8b03ea6.jpg \n", + " inflating: /data/dog-breed-identification/train/fa1e9d2b34ac79ba9b2358b2c0d803f5.jpg \n", + " inflating: /data/dog-breed-identification/train/fa24ae09f6c25172009016b9dbec322d.jpg \n", + " inflating: /data/dog-breed-identification/train/fa26802c7a0ff2fc7dcabb9999a6c4b3.jpg \n", + " inflating: /data/dog-breed-identification/train/fa27c3d52a9c8a3c6f6801e92297963f.jpg \n", + " inflating: /data/dog-breed-identification/train/fa289147f856e2525ee70529a6d0ac52.jpg \n", + " inflating: /data/dog-breed-identification/train/fa2a33c1dc8b39ad51738408b289a0de.jpg \n", + " inflating: /data/dog-breed-identification/train/fa3399d2f5241d807e0942b9807c2b4f.jpg \n", + " inflating: /data/dog-breed-identification/train/fa35207eea3c0d9939c1df7c86222853.jpg \n", + " inflating: /data/dog-breed-identification/train/fa3bc3e096a2967f26113992b29b23b5.jpg \n", + " inflating: /data/dog-breed-identification/train/fa3d5cedd5c63e5cc0c04acadf95cb35.jpg \n", + " inflating: /data/dog-breed-identification/train/fa4387315a7d0a81f033dc647546220a.jpg \n", + " inflating: /data/dog-breed-identification/train/fa4b9e80e2300c45a73f165fd34b3378.jpg \n", + " inflating: /data/dog-breed-identification/train/fa4fdbc06a4bf03494884f3dcb062db0.jpg \n", + " inflating: /data/dog-breed-identification/train/fa5054c5187c7171c546bd6a46b8346f.jpg \n", + " inflating: /data/dog-breed-identification/train/fa5296383aa39dd516c2f5610c6e71b0.jpg \n", + " inflating: /data/dog-breed-identification/train/fa537c93c484404490f0bdd71c79ab6c.jpg \n", + " inflating: /data/dog-breed-identification/train/fa5d72cb50d3ab87bac4bedbfd45d3b0.jpg \n", + " inflating: /data/dog-breed-identification/train/fa5df9801dec89450f56f373b074518c.jpg \n", + " inflating: /data/dog-breed-identification/train/fa607eaf5aa5a95f58a6c43df1147e07.jpg \n", + " inflating: /data/dog-breed-identification/train/fa6247111ff3f3cc66713b4bf7c7d1b9.jpg \n", + " inflating: /data/dog-breed-identification/train/fa69bf620cee03a93e32188ff3872c43.jpg \n", + " inflating: /data/dog-breed-identification/train/fa6e79b065e3f95cf98430ed54700301.jpg \n", + " inflating: /data/dog-breed-identification/train/fa6e79c819e06406740a88773b7f035c.jpg \n", + " inflating: /data/dog-breed-identification/train/fa72d780b780d1a8fda59852ab62738f.jpg \n", + " inflating: /data/dog-breed-identification/train/fa747f34cc4dc45dffd28135501c6688.jpg \n", + " inflating: /data/dog-breed-identification/train/fa79fd049300f5082393fa99a142cd7f.jpg \n", + " inflating: /data/dog-breed-identification/train/fa7c0ebcade9a63facc59f32f96a5c17.jpg \n", + " inflating: /data/dog-breed-identification/train/fa7d15c8d452895566586913f123a947.jpg \n", + " inflating: /data/dog-breed-identification/train/fa8442a15275571b324a9e9ad6c7f3ed.jpg \n", + " inflating: /data/dog-breed-identification/train/fa85c1ccb2fa7ff39d78bf2b821bcdd5.jpg \n", + " inflating: /data/dog-breed-identification/train/fa96ff3a3b523de0b176e052843fd152.jpg \n", + " inflating: /data/dog-breed-identification/train/faa111bd776cdece79144aad36558eba.jpg \n", + " inflating: /data/dog-breed-identification/train/faa2c24c801b37aca93ac744da51c2c3.jpg \n", + " inflating: /data/dog-breed-identification/train/fab162b7c732afd2e0886e1fb158862f.jpg \n", + " inflating: /data/dog-breed-identification/train/fab313f7c055d30e79a8590ab05d54f3.jpg \n", + " inflating: /data/dog-breed-identification/train/fab341dadc50afc1a507a9910d5101b3.jpg \n", + " inflating: /data/dog-breed-identification/train/fab4a52198780919e77a541aec737484.jpg \n", + " inflating: /data/dog-breed-identification/train/fab62c77c1af3606f359f987ed9b844e.jpg \n", + " inflating: /data/dog-breed-identification/train/fab782d25875a7cf5298cd2e2aa01cd5.jpg \n", + " inflating: /data/dog-breed-identification/train/fabd0085e9b6b9c83b9fb419e0a2b5b7.jpg \n", + " inflating: /data/dog-breed-identification/train/facdc30ff148a0154121f1e17cac86d7.jpg \n", + " inflating: /data/dog-breed-identification/train/fad30cde8ff696cba83278a55a8de87b.jpg \n", + " inflating: /data/dog-breed-identification/train/fae43d1ff21d5a8847887f7c7695afa5.jpg \n", + " inflating: /data/dog-breed-identification/train/fae8e42fae61856a5bd13bd4ac88b8dd.jpg \n", + " inflating: /data/dog-breed-identification/train/fae9b4b30924265a3098d8e524fc47d2.jpg \n", + " inflating: /data/dog-breed-identification/train/faf615fd8d21d9906b8ab5091e4f5b82.jpg \n", + " inflating: /data/dog-breed-identification/train/fb0568b6c25ef5bc9dded95154b1cfc9.jpg \n", + " inflating: /data/dog-breed-identification/train/fb0c9c4e476ef5deeb432e2326c1f3da.jpg \n", + " inflating: /data/dog-breed-identification/train/fb119fab818c9c46445105a0e05a1fbf.jpg \n", + " inflating: /data/dog-breed-identification/train/fb1cdd8ff249b6ee7c2af2e89ff644d4.jpg \n", + " inflating: /data/dog-breed-identification/train/fb1d0dffd97612ee562cc17a98b683e4.jpg \n", + " inflating: /data/dog-breed-identification/train/fb1f17d411a56a2740310b4c06595877.jpg \n", + " inflating: /data/dog-breed-identification/train/fb1ffb75a67173aa922f40c11c40264a.jpg \n", + " inflating: /data/dog-breed-identification/train/fb211e01f77eedac02e92d6bfeedb3ab.jpg \n", + " inflating: /data/dog-breed-identification/train/fb23c7c26cc49324b9081e2086b32409.jpg \n", + " inflating: /data/dog-breed-identification/train/fb2d408bfd49f5a720c3192f7a1cc519.jpg \n", + " inflating: /data/dog-breed-identification/train/fb2f2e9ccc7d47d475dcca8666d887f2.jpg \n", + " inflating: /data/dog-breed-identification/train/fb32a67c8f89942906d08973dbad87b0.jpg \n", + " inflating: /data/dog-breed-identification/train/fb4dbb9921a74bb2bab11c10a123971b.jpg \n", + " inflating: /data/dog-breed-identification/train/fb542f35faf7843778cb2c35d81a7a44.jpg \n", + " inflating: /data/dog-breed-identification/train/fb56acfbe4b95a0df7a4b9e6bddbafd0.jpg \n", + " inflating: /data/dog-breed-identification/train/fb5898e240410c7d736548bf938bbc0a.jpg \n", + " inflating: /data/dog-breed-identification/train/fb6a9896061f29a967d802e4f6dbbd74.jpg \n", + " inflating: /data/dog-breed-identification/train/fb6bca9a75ead518333fd3c98aa4dc40.jpg \n", + " inflating: /data/dog-breed-identification/train/fb7160e2b5699b6c1548f2f7232f4774.jpg \n", + " inflating: /data/dog-breed-identification/train/fb72489572110ed6d3e563c45f3ecb3e.jpg \n", + " inflating: /data/dog-breed-identification/train/fb73518d7dc9e11b00c978d23115b8c6.jpg \n", + " inflating: /data/dog-breed-identification/train/fb7a49c3ed2c5f1a82d562fe0792c3fb.jpg \n", + " inflating: /data/dog-breed-identification/train/fb80f89c84c6c2649964b574918e02d6.jpg \n", + " inflating: /data/dog-breed-identification/train/fb888e2a6e2acca312352030cc8b24de.jpg \n", + " inflating: /data/dog-breed-identification/train/fb8c009186d200c2f49173d89f4a4a80.jpg \n", + " inflating: /data/dog-breed-identification/train/fb9ad84feb0f23f26247056b830f6e36.jpg \n", + " inflating: /data/dog-breed-identification/train/fb9c8b7e0b70b201898d930ed71163b6.jpg \n", + " inflating: /data/dog-breed-identification/train/fb9cbd60c09b65c1032fc7dab93c8354.jpg \n", + " inflating: /data/dog-breed-identification/train/fb9ee245256bc5c3e14f32c8c69cf6a8.jpg \n", + " inflating: /data/dog-breed-identification/train/fba676383a9e3dd93dc1c4d50c54f48f.jpg \n", + " inflating: /data/dog-breed-identification/train/fba73e53151cb751f9c22e63de669923.jpg \n", + " inflating: /data/dog-breed-identification/train/fbaabd8210413b2084743c1f00d8c0f4.jpg \n", + " inflating: /data/dog-breed-identification/train/fbb02fe9f6a18d5e59c93e957dccf4fb.jpg \n", + " inflating: /data/dog-breed-identification/train/fbb86c8f93d2a1068520c0363a2079bc.jpg \n", + " inflating: /data/dog-breed-identification/train/fbbae3a9f939903eb81b3d86ca4a8786.jpg \n", + " inflating: /data/dog-breed-identification/train/fbbe0a41f7fa5cd4b69f25d8693e55f9.jpg \n", + " inflating: /data/dog-breed-identification/train/fbc14efa3d420a5d5f4719a356446ad4.jpg \n", + " inflating: /data/dog-breed-identification/train/fbcc2af005aa6fa3c9cab4d1ac38bf06.jpg \n", + " inflating: /data/dog-breed-identification/train/fbcd86965ce247c0a44e50ff7b66a670.jpg \n", + " inflating: /data/dog-breed-identification/train/fbd5762e97073435f8b7942fc67a4d16.jpg \n", + " inflating: /data/dog-breed-identification/train/fbd70c8820a6fbcd21242284baa8ba27.jpg \n", + " inflating: /data/dog-breed-identification/train/fbd75ba5bf5ce7ef08a11a69c5ef263c.jpg \n", + " inflating: /data/dog-breed-identification/train/fbdeba9ec017fc43c814e2688ad62402.jpg \n", + " inflating: /data/dog-breed-identification/train/fbee719275e23f753f3cd1d9f83db21f.jpg \n", + " inflating: /data/dog-breed-identification/train/fbf3162c4df3f1a527cb0b26ef062704.jpg \n", + " inflating: /data/dog-breed-identification/train/fbf6ac0549525e721d3d3e48e27db4c5.jpg \n", + " inflating: /data/dog-breed-identification/train/fbf881c1bd9b236af37efdfeb4a1fb51.jpg \n", + " inflating: /data/dog-breed-identification/train/fbfb5da3096f8d12329dc9a02de83dd3.jpg \n", + " inflating: /data/dog-breed-identification/train/fc0020cc00e3b1c7ec453ec129e17838.jpg \n", + " inflating: /data/dog-breed-identification/train/fc0b8fd26e1db206b60074b7b2598465.jpg \n", + " inflating: /data/dog-breed-identification/train/fc0d67099704bcf210316e6c31d2f28b.jpg \n", + " inflating: /data/dog-breed-identification/train/fc11bd87c4d826650463f0ee79b8b4eb.jpg \n", + " inflating: /data/dog-breed-identification/train/fc11c7fdbef74c3a3d8ea731ec48861a.jpg \n", + " inflating: /data/dog-breed-identification/train/fc2049d582b3444ed99af4a5c13b49e5.jpg \n", + " inflating: /data/dog-breed-identification/train/fc2c6cb0b33fec818063b2840217c546.jpg \n", + " inflating: /data/dog-breed-identification/train/fc2d73fcdab269bde56ee0756e40d0c6.jpg \n", + " inflating: /data/dog-breed-identification/train/fc3100b4cc97f41f55e86668d47d26e2.jpg \n", + " inflating: /data/dog-breed-identification/train/fc338aa6f2ba965bb7be53584ca0b4e0.jpg \n", + " inflating: /data/dog-breed-identification/train/fc33f90570fc8502e6c3f83a6bf3b982.jpg \n", + " inflating: /data/dog-breed-identification/train/fc416bf2011ddccc885698a7ad19ba11.jpg \n", + " inflating: /data/dog-breed-identification/train/fc440d7e1cd648dd5dac1ef18c05a799.jpg \n", + " inflating: /data/dog-breed-identification/train/fc4afae2a9dcf48ee66253add0709191.jpg \n", + " inflating: /data/dog-breed-identification/train/fc4f1d94ed5191b9b7d1f24420bbb07b.jpg \n", + " inflating: /data/dog-breed-identification/train/fc4fc2e64dc59dcff402fc281a42481d.jpg \n", + " inflating: /data/dog-breed-identification/train/fc518958bdeda396c5431d8a8ec319df.jpg \n", + " inflating: /data/dog-breed-identification/train/fc58ff2eb1f57943e151704a4f061d38.jpg \n", + " inflating: /data/dog-breed-identification/train/fc5ea05e270b116e5c033f0b9e3b6082.jpg \n", + " inflating: /data/dog-breed-identification/train/fc64ecf1ef29b56646e2480196877f7b.jpg \n", + " inflating: /data/dog-breed-identification/train/fc6abf69e1581b95734830af88c636a0.jpg \n", + " inflating: /data/dog-breed-identification/train/fc6df38f35361d2630568b993a596008.jpg \n", + " inflating: /data/dog-breed-identification/train/fc6e76acd21ed78f2c4c2153380cf63f.jpg \n", + " inflating: /data/dog-breed-identification/train/fc71c9e6334f37a9f42c00e599af8b02.jpg \n", + " inflating: /data/dog-breed-identification/train/fc7317da160bff89cd13aacc980adf26.jpg \n", + " inflating: /data/dog-breed-identification/train/fc77bf555c892344771a2c6714e72659.jpg \n", + " inflating: /data/dog-breed-identification/train/fc7cefece70681db1b55887a4b02a901.jpg \n", + " inflating: /data/dog-breed-identification/train/fc879f14ec130d86a2479e8d869908cd.jpg \n", + " inflating: /data/dog-breed-identification/train/fc94a4822e2d9427568b6d5427e903a7.jpg \n", + " inflating: /data/dog-breed-identification/train/fc992fdbd5af203a39e7c1868fc69090.jpg \n", + " inflating: /data/dog-breed-identification/train/fc9d449fb3c5be2e08839903d9405b42.jpg \n", + " inflating: /data/dog-breed-identification/train/fca2032135773108cb9811ab71d7f5c6.jpg \n", + " inflating: /data/dog-breed-identification/train/fca387fcb6630ae11536b3374d3b2234.jpg \n", + " inflating: /data/dog-breed-identification/train/fcab59cd421ec3d233da0027d664657e.jpg \n", + " inflating: /data/dog-breed-identification/train/fcac8d16408b0967a416431030a3510c.jpg \n", + " inflating: /data/dog-breed-identification/train/fcaff3d64f414b0a095d9a50cc29c401.jpg \n", + " inflating: /data/dog-breed-identification/train/fcb2147b7999bfd687d4b87584bb8907.jpg \n", + " inflating: /data/dog-breed-identification/train/fcb23e9c5e5915e021fd916e3df64ff6.jpg \n", + " inflating: /data/dog-breed-identification/train/fcb911ca6411383f99bed4b745f4fede.jpg \n", + " inflating: /data/dog-breed-identification/train/fcbad8ad476b83b180c9497bba2dedba.jpg \n", + " inflating: /data/dog-breed-identification/train/fcc9c872e76f097b8fd02b56760e7d48.jpg \n", + " inflating: /data/dog-breed-identification/train/fcccabf55377e660accd9c8bf984026c.jpg \n", + " inflating: /data/dog-breed-identification/train/fccd6fe10febbc04d13470ec2aa516af.jpg \n", + " inflating: /data/dog-breed-identification/train/fce0a4c8bf2d6588ea7f6ecf731b2fbb.jpg \n", + " inflating: /data/dog-breed-identification/train/fce950ef7cf99efcb26297537bdf5dd4.jpg \n", + " inflating: /data/dog-breed-identification/train/fcec543b7b4dc1fc1e47d66a9305e87a.jpg \n", + " inflating: /data/dog-breed-identification/train/fcecf02b9cc36ab7c4db440058f331c0.jpg \n", + " inflating: /data/dog-breed-identification/train/fcee07f3f1190697b6ec35911dcdd8cb.jpg \n", + " inflating: /data/dog-breed-identification/train/fd00c84695d64324cc597937320cc0f2.jpg \n", + " inflating: /data/dog-breed-identification/train/fd01857177284110f02b52475dfeb9cd.jpg \n", + " inflating: /data/dog-breed-identification/train/fd03162f0a586bbeac13c33e74d24e73.jpg \n", + " inflating: /data/dog-breed-identification/train/fd05ad1ed6a60452b991ce65d735416a.jpg \n", + " inflating: /data/dog-breed-identification/train/fd082fe869dc90cf5fc21bfacc270265.jpg \n", + " inflating: /data/dog-breed-identification/train/fd0f827a0d0ad9a2ff60ddf30e5d50be.jpg \n", + " inflating: /data/dog-breed-identification/train/fd116b902bf7e614cdb22e725af87e58.jpg \n", + " inflating: /data/dog-breed-identification/train/fd12fd971ffa3d489990b6eeca1952b8.jpg \n", + " inflating: /data/dog-breed-identification/train/fd186806d3d7f123d9a568bcde794f6e.jpg \n", + " inflating: /data/dog-breed-identification/train/fd21ff6aa4f72aa6c29a589b2d3042fa.jpg \n", + " inflating: /data/dog-breed-identification/train/fd229a951f5a9f2439d4f6c2c51595e3.jpg \n", + " inflating: /data/dog-breed-identification/train/fd22da43e0b930b27b38eb97a55f65ba.jpg \n", + " inflating: /data/dog-breed-identification/train/fd2478ca48b9ac774babc2cae65fca5c.jpg \n", + " inflating: /data/dog-breed-identification/train/fd2fbb4ff4bfe775d0267f4737c67521.jpg \n", + " inflating: /data/dog-breed-identification/train/fd3990bdd4b541a0efa8748ff8a901ac.jpg \n", + " inflating: /data/dog-breed-identification/train/fd3c4ef41e17c992e0c5aaa5101a39f3.jpg \n", + " inflating: /data/dog-breed-identification/train/fd411242df68b9da572f8d044afa3c5f.jpg \n", + " inflating: /data/dog-breed-identification/train/fd42b1ea571fbdd77c1c5ceeff8ebb76.jpg \n", + " inflating: /data/dog-breed-identification/train/fd4816b0b1bf94c4868d16da9ee87e10.jpg \n", + " inflating: /data/dog-breed-identification/train/fd5c9929ec93b09977a9565bd94b5672.jpg \n", + " inflating: /data/dog-breed-identification/train/fd64b07c6c3249ae625564fc111adff6.jpg \n", + " inflating: /data/dog-breed-identification/train/fd73ff7c6b24e7292701305e8c7c32c2.jpg \n", + " inflating: /data/dog-breed-identification/train/fd7eeee1c55efbb222223c2fc0b1bbbd.jpg \n", + " inflating: /data/dog-breed-identification/train/fd80cac3ffdcd6ddcfe4d2d6f8f617f2.jpg \n", + " inflating: /data/dog-breed-identification/train/fd8bd9ce34326d7fea7b85040510bdbb.jpg \n", + " inflating: /data/dog-breed-identification/train/fd8d47a35e1c84795b4a2a27b761eb7b.jpg \n", + " inflating: /data/dog-breed-identification/train/fd99c29a3ee91c7e9114e34be1ca0d1e.jpg \n", + " inflating: /data/dog-breed-identification/train/fd9aa3277a9635ecf6abbe637a78e521.jpg \n", + " inflating: /data/dog-breed-identification/train/fda872df827d4ad271ad139a891a665e.jpg \n", + " inflating: /data/dog-breed-identification/train/fda9aa966f3391e2508cdad4b51bbff9.jpg \n", + " inflating: /data/dog-breed-identification/train/fdb58e4dd078f4343ce953f1c0bf2a51.jpg \n", + " inflating: /data/dog-breed-identification/train/fdb60e77166ba6aa999e5dab05c44dbc.jpg \n", + " inflating: /data/dog-breed-identification/train/fdc0556f9b32a714b56985eb99fbbbeb.jpg \n", + " inflating: /data/dog-breed-identification/train/fdc614c16f54555064a32bc94522b4a4.jpg \n", + " inflating: /data/dog-breed-identification/train/fdca49e41439a428f8f727115e2610a9.jpg \n", + " inflating: /data/dog-breed-identification/train/fdca814a411178296fe719d3c0be9049.jpg \n", + " inflating: /data/dog-breed-identification/train/fdcbedc65f600f81df181e9a46858cbb.jpg \n", + " inflating: /data/dog-breed-identification/train/fdccec2dc716306a12b773e7689887c0.jpg \n", + " inflating: /data/dog-breed-identification/train/fdce4d488a629164ee8a9f2a0b81905f.jpg \n", + " inflating: /data/dog-breed-identification/train/fdcedfa9f9ae621a4889e844b9e2940d.jpg \n", + " inflating: /data/dog-breed-identification/train/fdcf75632c624d8d03b37ddb1c6fc592.jpg \n", + " inflating: /data/dog-breed-identification/train/fdd4ab27e77fe219d73a83dbb5123f97.jpg \n", + " inflating: /data/dog-breed-identification/train/fdd70b075a651fb1a966e5a8735b9a34.jpg \n", + " inflating: /data/dog-breed-identification/train/fddb09d408084b1289bf572bd4071c9c.jpg \n", + " inflating: /data/dog-breed-identification/train/fde78cf59b95570111e6f851e06900cd.jpg \n", + " inflating: /data/dog-breed-identification/train/fdf83a2eed71f2c54c3e6f592e84b254.jpg \n", + " inflating: /data/dog-breed-identification/train/fdfcc3d2e40970fbfb8521bd29e9fb4d.jpg \n", + " inflating: /data/dog-breed-identification/train/fe03d2d88e9a68aeeac63f33a920557d.jpg \n", + " inflating: /data/dog-breed-identification/train/fe081bb43a6b0902d7ae9cb560053bc1.jpg \n", + " inflating: /data/dog-breed-identification/train/fe0beca881efd723510e8e859306a3a6.jpg \n", + " inflating: /data/dog-breed-identification/train/fe0ca31ba19fc97e9644ac4daafa7e36.jpg \n", + " inflating: /data/dog-breed-identification/train/fe13d46f5920f0944e6c30e54ac0e2a5.jpg \n", + " inflating: /data/dog-breed-identification/train/fe3d08ee9e1aba1785391b42345c3fc0.jpg \n", + " inflating: /data/dog-breed-identification/train/fe3e760d763e186541e18f303cd7caca.jpg \n", + " inflating: /data/dog-breed-identification/train/fe426e0af99930c0ec3c9ab58b02f8dc.jpg \n", + " inflating: /data/dog-breed-identification/train/fe49341352549164ad921a67647507f1.jpg \n", + " inflating: /data/dog-breed-identification/train/fe4d298d682a42714f33085c9d241cc0.jpg \n", + " inflating: /data/dog-breed-identification/train/fe50bac6c389d137ea01c9cfc7346ca8.jpg \n", + " inflating: /data/dog-breed-identification/train/fe54e87e65fe0c68670c0dd1a923f1f0.jpg \n", + " inflating: /data/dog-breed-identification/train/fe5e4ee18529af1af1861efd550561a3.jpg \n", + " inflating: /data/dog-breed-identification/train/fe624532170510bd80627c0500bafc97.jpg \n", + " inflating: /data/dog-breed-identification/train/fe7171353417898022361453894adf94.jpg \n", + " inflating: /data/dog-breed-identification/train/fe76cbb5f172387f6a5b72739852d608.jpg \n", + " inflating: /data/dog-breed-identification/train/fe78fc42e32174c7178b572bdcf5a129.jpg \n", + " inflating: /data/dog-breed-identification/train/fe7ea4eb63ab5fddea120555790f9187.jpg \n", + " inflating: /data/dog-breed-identification/train/fe8d52ab96ff238ea7d234b508010ece.jpg \n", + " inflating: /data/dog-breed-identification/train/fe9e09be6594f626f0d711bfba10cfe0.jpg \n", + " inflating: /data/dog-breed-identification/train/fea60fdd28de5834520134d6dc77a9a2.jpg \n", + " inflating: /data/dog-breed-identification/train/feafd0730eae85e63a41bbc030755c59.jpg \n", + " inflating: /data/dog-breed-identification/train/feb16cf86c9dac6d476e3c372ba5c279.jpg \n", + " inflating: /data/dog-breed-identification/train/feb9d0ae525ca28aabff74b455e34c16.jpg \n", + " inflating: /data/dog-breed-identification/train/febcab8eb2da444bf83336cffec7eb92.jpg \n", + " inflating: /data/dog-breed-identification/train/fede60fb2acc02a2da0d0a05f760b7d5.jpg \n", + " inflating: /data/dog-breed-identification/train/fee1696ae6725863f84b0da2c05ad892.jpg \n", + " inflating: /data/dog-breed-identification/train/fee672d906b502642597ccbc6acff0bb.jpg \n", + " inflating: /data/dog-breed-identification/train/fee98c990f4d69c6a8467dd0f0668440.jpg \n", + " inflating: /data/dog-breed-identification/train/fef4a58219c8971820a85868a7b073f5.jpg \n", + " inflating: /data/dog-breed-identification/train/fef5d4cdaf50cf159102e803c7d6aa9c.jpg \n", + " inflating: /data/dog-breed-identification/train/fef9c3ab585ad3f778c549fda42c1856.jpg \n", + " inflating: /data/dog-breed-identification/train/fefb453e43ec5e840c323538261493bd.jpg \n", + " inflating: /data/dog-breed-identification/train/ff04baf19edbe449b39619d88da3633c.jpg \n", + " inflating: /data/dog-breed-identification/train/ff05f3976c17fef275cc0306965b3fe4.jpg \n", + " inflating: /data/dog-breed-identification/train/ff0931b1c82289dc2cf02f0b4a165139.jpg \n", + " inflating: /data/dog-breed-identification/train/ff0c4e0e856f1eddcc61facca64440c9.jpg \n", + " inflating: /data/dog-breed-identification/train/ff0d0773ee3eeb6eb90a172d6afd1ea1.jpg \n", + " inflating: /data/dog-breed-identification/train/ff0def9dafea6e633d0d7249554fcb2c.jpg \n", + " inflating: /data/dog-breed-identification/train/ff12508818823987d04e8fa4f5907efe.jpg \n", + " inflating: /data/dog-breed-identification/train/ff181f0d69202b0650e6e5d76e9c13cc.jpg \n", + " inflating: /data/dog-breed-identification/train/ff2523c07da7a6cbeeb7c8f8dafed24f.jpg \n", + " inflating: /data/dog-breed-identification/train/ff3b935868afb51b2d0b75ddc989d058.jpg \n", + " inflating: /data/dog-breed-identification/train/ff47baef46c5876eaf9a403cd6a54d72.jpg \n", + " inflating: /data/dog-breed-identification/train/ff4afeb51a1473f7ba18669a8ff48bc9.jpg \n", + " inflating: /data/dog-breed-identification/train/ff4bb57ce419cd637dd511a1b5474bff.jpg \n", + " inflating: /data/dog-breed-identification/train/ff52a3909f5801a71161cec95d213107.jpg \n", + " inflating: /data/dog-breed-identification/train/ff54d45962b3123bb67052e8e29a60e7.jpg \n", + " inflating: /data/dog-breed-identification/train/ff63ed894f068da8e2bbdfda50a9a9f8.jpg \n", + " inflating: /data/dog-breed-identification/train/ff63fa05a58473138848f80840064d23.jpg \n", + " inflating: /data/dog-breed-identification/train/ff6f47aa8e181b6efa4d0be7b09b5628.jpg \n", + " inflating: /data/dog-breed-identification/train/ff7334b06cee8667a7f30eb00e0b93cf.jpg \n", + " inflating: /data/dog-breed-identification/train/ff7d9c08091acc3b18b869951feeb013.jpg \n", + " inflating: /data/dog-breed-identification/train/ff84992beff3edd99b72718bec9448d2.jpg \n", + " inflating: /data/dog-breed-identification/train/ff8e3fa7e04faca99af85195507ee54d.jpg \n", + " inflating: /data/dog-breed-identification/train/ff91c3c095a50d3d7f1ab52b60e93638.jpg \n", + " inflating: /data/dog-breed-identification/train/ffa0055ec324829882186bae29491645.jpg \n", + " inflating: /data/dog-breed-identification/train/ffa0ad682c6670db3defce2575a2587f.jpg \n", + " inflating: /data/dog-breed-identification/train/ffa16727a9ee462ee3f386be865b199e.jpg \n", + " inflating: /data/dog-breed-identification/train/ffa4e1bf959425bad9228b04af40ac76.jpg \n", + " inflating: /data/dog-breed-identification/train/ffa6a8d29ce57eb760d0f182abada4bf.jpg \n", + " inflating: /data/dog-breed-identification/train/ffbbf7536ba86dcef3f360bda41181b4.jpg \n", + " inflating: /data/dog-breed-identification/train/ffc1717fc5b5f7a6c76d0e4ea7c8f93a.jpg \n", + " inflating: /data/dog-breed-identification/train/ffc2b6b9133a6413c4a013cff29f9ed2.jpg \n", + " inflating: /data/dog-breed-identification/train/ffc532991d3cd7880d27a449ed1c4770.jpg \n", + " inflating: /data/dog-breed-identification/train/ffca1c97cea5fada05b8646998a5b788.jpg \n", + " inflating: /data/dog-breed-identification/train/ffcb610e811817766085054616551f9c.jpg \n", + " inflating: /data/dog-breed-identification/train/ffcde16e7da0872c357fbc7e2168c05f.jpg \n", + " inflating: /data/dog-breed-identification/train/ffcffab7e4beef9a9b8076ef2ca51909.jpg \n", + " inflating: /data/dog-breed-identification/train/ffd25009d635cfd16e793503ac5edef0.jpg \n", + " inflating: /data/dog-breed-identification/train/ffd3f636f7f379c51ba3648a9ff8254f.jpg \n", + " inflating: /data/dog-breed-identification/train/ffe2ca6c940cddfee68fa3cc6c63213f.jpg \n", + " inflating: /data/dog-breed-identification/train/ffe5f6d8e2bff356e9482a80a6e29aac.jpg \n", + " inflating: /data/dog-breed-identification/train/fff43b07992508bc822f33d8ffd902ae.jpg \n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "#狗的品种识别" + ], + "metadata": { + "id": "RnU5TA1cJ5CK" + } + }, + { + "cell_type": "code", + "source": [ + "import collections\n", + "import math\n", + "import os\n", + "import shutil\n", + "import pandas as pd\n", + "import torch\n", + "import torchvision\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ], + "metadata": { + "id": "ie60MoTC-9-I" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "##读取数据路径" + ], + "metadata": { + "id": "ZlUKEbGuKERD" + } + }, + { + "cell_type": "code", + "source": [ + "d2l.DATA_HUB['dog_tiny'] = (d2l.DATA_URL + 'kaggle_dog_tiny.zip','0cb91d09b814ecdc07b50f31f8dcad3e81d6a86d')\n", + "\n", + "demo = False\n", + "if demo:\n", + " data_dir = d2l.download_extract('dog_tiny')\n", + "else:\n", + " data_dir = os.path.join('..','data','dog-breed-identification')" + ], + "metadata": { + "id": "IeK1QIXlyJbr" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#读取.csv类型的标签文件\n", + "def read_csv_labels(fname):\n", + " with open(fname,'r') as f:\n", + " lines = f.readlines()[1:]\n", + " tokens = [l.rstrip().split(',') for l in lines]\n", + " return dict(((name,label) for name,label in tokens))\n", + "\n", + "#复制文件\n", + "def copyfile(filename,target_dir):\n", + " os.makedirs(target_dir,exist_ok=True)\n", + " shutil.copy(filename,target_dir)\n", + "\n", + "#划分开发集\n", + "def reorg_train_valid(data_dir,labels,valid_ratio):\n", + " #训练数据集中样本数量最少的类别中的样本数\n", + " n = collections.Counter(labels.values()).most_common()[-1][1]\n", + " #开发集中每个类别的样本数\n", + " n_valid_per_label = max(1, math.floor(n*valid_ratio))\n", + " label_count = {}\n", + " for train_file in os.listdir(os.path.join(data_dir,'train')):\n", + " label = labels[train_file.split('.')[0]]\n", + " fname = os.path.join(data_dir,'train',train_file)\n", + " copyfile(fname,os.path.join(data_dir,'train_valid_test','train_valid',label))\n", + " if label not in label_count or label_count[label] < n_valid_per_label:\n", + " copyfile(fname,os.path.join(data_dir,'train_valid_test','valid',label))\n", + " label_count[label] = label_count.get(label,0)+1\n", + " else:\n", + " copyfile(fname,os.path.join(data_dir,'train_valid_test','train',label))\n", + " return n_valid_per_label\n", + "\n", + "def reorg_test(data_dir):\n", + " #重整测试集\n", + " for test_file in os.listdir(os.path.join(data_dir,'test')):\n", + " copyfile(os.path.join(data_dir,'test',test_file),os.path.join(data_dir,'train_valid_test','test','unknown'))" + ], + "metadata": { + "id": "aO2j0IUI0r-T" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#重整数据\n", + "def reorg_dog_data(data_dir,valid_ratio):\n", + " labels = read_csv_labels(os.path.join(data_dir,'labels.csv'))\n", + " reorg_train_valid(data_dir,labels,valid_ratio)\n", + " reorg_test(data_dir)\n", + "\n", + "#制定超参数\n", + "#完整数据集batch_size为128\n", + "batch_size = 32 if demo else 128\n", + "valid_ratio = 0.1\n", + "reorg_dog_data(data_dir,valid_ratio)" + ], + "metadata": { + "id": "eEjLdQuuykxs" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#图像增广\n", + "transform_train = torchvision.transforms.Compose([\n", + " #随机剪裁图像,所得图像为原始面积的0.08到1之间,高宽比在3/4和4/3之间\n", + " #然后,缩放图像以创建224x224的新图像\n", + " torchvision.transforms.RandomResizedCrop(224,scale=(0.08,1.0),\n", + " ratio=(3.0/4.0,4.0/3.0)),\n", + " torchvision.transforms.RandomHorizontalFlip(),\n", + " #随机更改亮度,对比度和饱和度\n", + " torchvision.transforms.ColorJitter(brightness=0.4,\n", + " contrast=0.4,\n", + " saturation=0.4),\n", + " #添加随机噪声\n", + " torchvision.transforms.ToTensor(),\n", + " #标准化图像的每个通道\n", + " torchvision.transforms.Normalize([0.485,0.456,0.406],\n", + " [0.229,0.224,0.225])])\n", + "transform_test = torchvision.transforms.Compose([\n", + " torchvision.transforms.Resize(256),\n", + " #从图像中心裁切224x224大小的图片\n", + " torchvision.transforms.CenterCrop(224),\n", + " torchvision.transforms.ToTensor(),\n", + " torchvision.transforms.Normalize([0.485,0.456,0.406],\n", + " [0.229,0.224,0.225])])" + ], + "metadata": { + "id": "r43yT5DJ7LYX" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#读取训练集、开发集和测试集\n", + "train_ds,train_valid_ds = [torchvision.datasets.ImageFolder(\n", + " os.path.join(data_dir,'train_valid_test',folder),\n", + " transform=transform_train) for folder in ['train','train_valid']]\n", + "\n", + "valid_ds,test_ds = [torchvision.datasets.ImageFolder(\n", + " os.path.join(data_dir,'train_valid_test',folder),\n", + " transform=transform_test) for folder in ['valid','test']]" + ], + "metadata": { + "id": "gimT8zzw-d2u" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "train_iter,train_valid_iter = [torch.utils.data.DataLoader(\n", + " dataset,batch_size,shuffle=True,drop_last=True) for dataset in (train_ds,train_valid_ds)]\n", + "\n", + "valid_iter = torch.utils.data.DataLoader(valid_ds,batch_size,shuffle=False,drop_last=True)\n", + "test_iter = torch.utils.data.DataLoader(test_ds,batch_size,shuffle=False,drop_last=False)" + ], + "metadata": { + "id": "BRU5m0PBA8F7" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#搭建微调预训练模型\n", + "def get_net(devices):\n", + " finetune_net = nn.Sequential()\n", + " finetune_net.features = torchvision.models.resnet34(pretrained=True)\n", + " #定义一个新的输出网络,共有120个输出类别\n", + " finetune_net.output_new = nn.Sequential(nn.Linear(1000,256),\n", + " nn.ReLU(),\n", + " nn.Linear(256,120))\n", + " #将模型参数分配给用于计算的CPU或GPU\n", + " finetune_net = finetune_net.to(devices[0])\n", + " #冻结参数\n", + " for param in finetune_net.features.parameters():\n", + " param.requires_grad = False\n", + " return finetune_net" + ], + "metadata": { + "id": "eWwOhaKIBwfb" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "loss = nn.CrossEntropyLoss(reduction='none')\n", + "\n", + "def evaluate_loss(data_iter,net,devices):\n", + " l_sum,n = 0.0,0\n", + " for features,labels in data_iter:\n", + " features,labels = features.to(devices[0]),labels.to(devices[0])\n", + " outputs = net(features)\n", + " l = loss(outputs,labels)\n", + " l_sum += l.sum()\n", + " n += labels.numel()\n", + " return (l_sum / n).to('cpu')" + ], + "metadata": { + "id": "csI76GUYCv3_" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def train(net,train_iter,valid_iter,num_epochs,lr,wd,devices,lr_period,lr_decay):\n", + " net = nn.DataParallel(net,device_ids=devices).to(devices[0])\n", + " trainer = torch.optim.SGD((param for param in net.parameters() if param.requires_grad),lr=lr,momentum=0.9,weight_decay=wd)\n", + " scheduler = torch.optim.lr_scheduler.StepLR(trainer,lr_period,lr_decay)\n", + " num_batches,timer = len(train_iter),d2l.Timer()\n", + " legend = ['train_loss']\n", + " if valid_iter is not None:\n", + " legend.append('valid loss')\n", + " animator = d2l.Animator(xlabel='epoch',xlim=[1,num_epochs],legend=legend)\n", + " for epoch in range(num_epochs):\n", + " metric = d2l.Accumulator(2)\n", + " for i,(features,labels) in enumerate(train_iter):\n", + " timer.start()\n", + " features, labels = features.to(devices[0]),labels.to(devices[0])\n", + " trainer.zero_grad()\n", + " output = net(features)\n", + " l = loss(output,labels).sum()\n", + " l.backward()\n", + " trainer.step()\n", + " metric.add(l,labels.shape[0])\n", + " timer.stop()\n", + " if (i+1)%(num_batches // 5) == 0 or i == num_batches - 1:\n", + " animator.add(epoch + (i+1) / num_batches,(metric[0] / metric[1],None))\n", + " measures = f'train loss {metric[0] / metric[1]:.3f}'\n", + " if valid_iter is not None:\n", + " valid_loss = evaluate_loss(valid_iter,net,devices)\n", + " animator.add(epoch+1,(None,valid_loss.detach()))\n", + " scheduler.step()\n", + " if valid_iter is not None:\n", + " measures += f',valid loss {valid_loss:.3f}'\n", + " print(measures + f'\\n{metric[1] * num_epochs / timer.sum():.1f}' f' examples/sec on {str(devices)}')" + ], + "metadata": { + "id": "JSj77usZDfF0" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "devices,num_epochs,lr,wd = d2l.try_all_gpus(),10,1e-4,1e-4\n", + "lr_period,lr_decay,net = 2,0.9,get_net(devices)\n", + "train(net,train_iter,valid_iter,num_epochs,lr,wd,devices,lr_period,lr_decay)" + ], + "metadata": { + "id": "TEMwTePZG7GH", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 296 + }, + "outputId": "7abbc346-4d84-4196-cdf5-2746f9f8a1ee" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "train loss 0.973,valid loss 0.730\n", + "349.3 examples/sec on [device(type='cuda', index=0)]\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n" + }, + "metadata": { + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "code", + "source": [ + "net = get_net(devices)\n", + "train(net,train_valid_iter,None,num_epochs,lr,wd,devices,lr_period,lr_decay)\n", + "\n", + "preds = []\n", + "for data,label in test_iter:\n", + " output = torch.nn.functional.softmax(net(data.to(devices[0])),dim=0)\n", + " preds.extend(output.cpu().detach().numpy())\n", + "ids = sorted(os.listdir(os.path.join(data_dir,'train_valid_test','test','unknown')))\n", + "with open('submission.csv','w') as f:\n", + " f.write('id,'+','.join(train_valid_ds.classes)+'\\n')\n", + " for i,output in zip(ids,preds):\n", + " f.write(i.split('.')[0] + ',' + ','.join([str(num) for num in output]) + '\\n')" + ], + "metadata": { + "id": "iIM1XTmCImSc", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 296 + }, + "outputId": "62b9c2f9-bffd-4bc9-8cb4-364e7adc8ba6" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "train loss 0.987\n", + "348.7 examples/sec on [device(type='cuda', index=0)]\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n" + }, + "metadata": { + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "code", + "source": [ + "" + ], + "metadata": { + "id": "Fwb09takK2RJ" + }, + "execution_count": null, + "outputs": [] + } + ] +} \ No newline at end of file diff --git a/code/45-ssd/45-multiscale-object-detection.ipynb b/code/45-ssd/45-multiscale-object-detection.ipynb new file mode 100644 index 0000000..5da790e --- /dev/null +++ b/code/45-ssd/45-multiscale-object-detection.ipynb @@ -0,0 +1,1963 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "-" + } + }, + "source": [ + "# 多尺度目标检测\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(561, 728)" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%matplotlib inline\n", + "import torch\n", + "from d2l import torch as d2l\n", + "\n", + "img = d2l.plt.imread('../img/catdog.jpg')#读入图片\n", + "h, w = img.shape[:2]\n", + "h, w" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "在特征图(`fmap`)上生成锚框(`anchors`),每个单位(像素)作为锚框的中心" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 5, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def display_anchors(fmap_w, fmap_h, s):\n", + " d2l.set_figsize()\n", + " fmap = torch.zeros((1, 10, fmap_h, fmap_w))#四维,前两个维度不影响输出,主要关心高和宽\n", + " anchors = d2l.multibox_prior(fmap, sizes=s, ratios=[1, 2, 0.5])#给定fmap,size,高宽比\n", + " bbox_scale = torch.tensor((w, h, w, h))#给定图片的高宽\n", + " d2l.show_bboxes(d2l.plt.imshow(img).axes,\n", + " anchors[0] * bbox_scale)#显示锚框" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "探测小目标" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 7, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "E:\\ProgramData\\Anaconda3\\envs\\d2l_zh\\lib\\site-packages\\torch\\functional.py:445: UserWarning: torch.meshgrid: in an upcoming release, it will be required to pass the indexing argument. (Triggered internally at ..\\aten\\src\\ATen\\native\\TensorShape.cpp:2157.)\n", + " return _VF.meshgrid(tensors, **kwargs) # type: ignore[attr-defined]\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-06T20:58:41.339700\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.3.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "display_anchors(fmap_w=4, fmap_h=4, s=[0.15])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "将特征图的高度和宽度减小一半,然后使用较大的锚框来检测较大的目标" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 9, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-07T09:18:55.810226\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.3.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "display_anchors(fmap_w=2, fmap_h=2, s=[0.4])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "将特征图的高度和宽度减小一半,然后将锚框的尺度增加到0.8" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 11, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-07T09:18:58.667333\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.3.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "display_anchors(fmap_w=1, fmap_h=1, s=[0.8])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "celltoolbar": "Slideshow", + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + }, + "rise": { + "autolaunch": true, + "enable_chalkboard": true, + "overlay": "", + "scroll": true + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/code/45-ssd/45-ssd.ipynb b/code/45-ssd/45-ssd.ipynb new file mode 100644 index 0000000..720a94d --- /dev/null +++ b/code/45-ssd/45-ssd.ipynb @@ -0,0 +1,3742 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "# 单发多框检测(SSD)\n", + "\n", + "类别预测层" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "import torch\n", + "import torchvision\n", + "from torch import nn\n", + "from torch.nn import functional as F\n", + "from d2l import torch as d2l\n", + "\n", + "\n", + "def cls_predictor(num_inputs, num_anchors, num_classes):\n", + " return nn.Conv2d(num_inputs, num_anchors * (num_classes + 1),\n", + " kernel_size=3, padding=1)#类别预测,输出通道对应每个像素生成锚框的类别,图片高宽不变" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "-" + } + }, + "source": [ + "边界框预测层" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 5, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def bbox_predictor(num_inputs, num_anchors):\n", + " return nn.Conv2d(num_inputs, num_anchors * 4, kernel_size=3, padding=1)#对每个边界框预测四个偏移量" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "连结多尺度的预测" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 8, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(torch.Size([2, 55, 20, 20]), torch.Size([2, 33, 10, 10]))" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def forward(x, block):#定义预测层\n", + " return block(x)\n", + "\n", + "Y1 = forward(torch.zeros((2, 8, 20, 20)), cls_predictor(8, 5, 10))#5个锚框,10类\n", + "Y2 = forward(torch.zeros((2, 16, 10, 10)), cls_predictor(16, 3, 10))#3个锚框,10类\n", + "Y1.shape, Y2.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 13, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 25300])" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def flatten_pred(pred):\n", + " return torch.flatten(pred.permute(0, 2, 3, 1), start_dim=1)#交换维度到0,2,3,1(把通道数,也就是预测结果放到最后一维)\n", + "#然后从一维展平,展平后为二维的张量,分别为batch_size和预测结果\n", + "def concat_preds(preds):\n", + " return torch.cat([flatten_pred(p) for p in preds], dim=1)\n", + "#将不同预测结果拼接在一起\n", + "concat_preds([Y1, Y2]).shape#55X20X20+33X10X10" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "高和宽减半块" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 19, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 10, 10, 10])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def down_sample_blk(in_channels, out_channels):\n", + " blk = []\n", + " for _ in range(2):\n", + " blk.append(nn.Conv2d(in_channels, out_channels,\n", + " kernel_size=3, padding=1))#3X3卷积,高宽不变\n", + " blk.append(nn.BatchNorm2d(out_channels))#batchnorm层\n", + " blk.append(nn.ReLU())#relu激活函数\n", + " in_channels = out_channels#通道数不变,再循环一遍\n", + " blk.append(nn.MaxPool2d(2))#最大池化层,高宽减半\n", + " return nn.Sequential(*blk)#两个3X3卷积,两个batchnorm,加一个2X2最大池化层,高宽减半\n", + "\n", + "forward(torch.zeros((2, 3, 20, 20)), down_sample_blk(3, 10)).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "基本网络块" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 22, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 64, 32, 32])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def base_net():\n", + " blk = []\n", + " num_filters = [3, 16, 32, 64]#通道数变化规律\n", + " for i in range(len(num_filters) - 1):\n", + " blk.append(down_sample_blk(num_filters[i], num_filters[i+1]))\n", + " return nn.Sequential(*blk)#构建基本网络,由三个高宽减半块构成\n", + "\n", + "forward(torch.zeros((2, 3, 256, 256)), base_net()).shape#通道数变为64,高宽均缩小八倍" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "完整的单发多框检测模型由五个模块组成" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "origin_pos": 25, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def get_blk(i):#不同的输入返回不同的网络\n", + " if i == 0:\n", + " blk = base_net()\n", + " elif i == 1:\n", + " blk = down_sample_blk(64, 128)\n", + " elif i == 4:\n", + " blk = nn.AdaptiveMaxPool2d((1,1))#自适应池化,输出(H,W)=(1,1)\n", + " else:\n", + " blk = down_sample_blk(128, 128)\n", + " return blk" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "为每个块定义前向传播" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "origin_pos": 28, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def blk_forward(X, blk, size, ratio, cls_predictor, bbox_predictor):\n", + " Y = blk(X)#前向传播\n", + " anchors = d2l.multibox_prior(Y, sizes=size, ratios=ratio)#对Y给定锚框\n", + " cls_preds = cls_predictor(Y)#对Y类别预测\n", + " bbox_preds = bbox_predictor(Y)#对Y边界框预测\n", + " return (Y, anchors, cls_preds, bbox_preds)#返回" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "超参数" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "origin_pos": 30, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#给定了5组大小和比例,每组都是产生4个锚框,对应SSD中每个网络块要预测的超参数\n", + "sizes = [[0.2, 0.272], [0.37, 0.447], [0.54, 0.619], [0.71, 0.79],\n", + " [0.88, 0.961]]\n", + "ratios = [[1, 2, 0.5]] * 5\n", + "num_anchors = len(sizes[0]) + len(ratios[0]) - 1" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "定义完整的模型" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "origin_pos": 33, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class TinySSD(nn.Module):\n", + " def __init__(self, num_classes, **kwargs):\n", + " super(TinySSD, self).__init__(**kwargs)\n", + " self.num_classes = num_classes\n", + " idx_to_in_channels = [64, 128, 128, 128, 128]#对应每个网络块输出通道数\n", + " for i in range(5):#定义每个网络块的类别和锚框预测\n", + " setattr(self, f'blk_{i}', get_blk(i))\n", + " setattr(self, f'cls_{i}', cls_predictor(idx_to_in_channels[i],\n", + " num_anchors, num_classes))\n", + " setattr(self, f'bbox_{i}', bbox_predictor(idx_to_in_channels[i],\n", + " num_anchors))\n", + "\n", + " def forward(self, X):\n", + " anchors, cls_preds, bbox_preds = [None] * 5, [None] * 5, [None] * 5\n", + " for i in range(5):\n", + " X, anchors[i], cls_preds[i], bbox_preds[i] = blk_forward(\n", + " X, getattr(self, f'blk_{i}'), sizes[i], ratios[i],\n", + " getattr(self, f'cls_{i}'), getattr(self, f'bbox_{i}'))\n", + " anchors = torch.cat(anchors, dim=1)#合并锚框\n", + " cls_preds = concat_preds(cls_preds)#合并预测\n", + " cls_preds = cls_preds.reshape(\n", + " cls_preds.shape[0], -1, self.num_classes + 1)#输出(batch,锚框数,类别)\n", + " bbox_preds = concat_preds(bbox_preds)#合并预测锚框\n", + " return anchors, cls_preds, bbox_preds" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "创建一个模型实例,然后使用它\n", + "执行前向传播" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "origin_pos": 36, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "output anchors: torch.Size([1, 5444, 4])\n", + "output class preds: torch.Size([32, 5444, 2])\n", + "output bbox preds: torch.Size([32, 21776])\n" + ] + } + ], + "source": [ + "net = TinySSD(num_classes=1)#一类,使用的是香蕉数据集\n", + "X = torch.zeros((32, 3, 256, 256))\n", + "anchors, cls_preds, bbox_preds = net(X)#预测\n", + "\n", + "print('output anchors:', anchors.shape)\n", + "print('output class preds:', cls_preds.shape)\n", + "print('output bbox preds:', bbox_preds.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "读取\n", + "香蕉检测数据集" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "origin_pos": 38, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "read 1000 training examples\n", + "read 100 validation examples\n" + ] + } + ], + "source": [ + "batch_size = 32\n", + "train_iter, _ = d2l.load_data_bananas(batch_size)#加载data_iter" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "-" + } + }, + "source": [ + "初始化其参数并定义优化算法" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "origin_pos": 41, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "device, net = d2l.try_gpu(), TinySSD(num_classes=1)#在GPU上训练\n", + "trainer = torch.optim.SGD(net.parameters(), lr=0.2, weight_decay=5e-4)#使用随机梯度下降" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "定义损失函数和评价函数" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "origin_pos": 47, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "cls_loss = nn.CrossEntropyLoss(reduction='none')#对类别预测,reduction='none'对batch中各样本的loss,返回列表\n", + "bbox_loss = nn.L1Loss(reduction='none')#锚框预测使用L1loss,因为锚框是回归问题\n", + "\n", + "def calc_loss(cls_preds, cls_labels, bbox_preds, bbox_labels, bbox_masks):\n", + " batch_size, num_classes = cls_preds.shape[0], cls_preds.shape[2]\n", + " cls = cls_loss(cls_preds.reshape(-1, num_classes),\n", + " cls_labels.reshape(-1)).reshape(batch_size, -1).mean(dim=1)#对每个样本的损失求平均\n", + " bbox = bbox_loss(bbox_preds * bbox_masks,\n", + " bbox_labels * bbox_masks).mean(dim=1)#只对预测含有物体的锚框计算损失\n", + " return cls + bbox#返回损失和\n", + "\n", + "def cls_eval(cls_preds, cls_labels):\n", + " return float((cls_preds.argmax(dim=-1).type(\n", + " cls_labels.dtype) == cls_labels).sum())#计算预测类别正确数\n", + "\n", + "def bbox_eval(bbox_preds, bbox_labels, bbox_masks):\n", + " return float((torch.abs((bbox_labels - bbox_preds) * bbox_masks)).sum())#计算预测锚框正确数" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "训练模型" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "origin_pos": 50, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "class err 3.25e-03, bbox mae 3.13e-03\n", + "5044.0 examples/sec on cuda:0\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T15:00:45.920936\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "num_epochs, timer = 20, d2l.Timer()#迭代20次\n", + "animator = d2l.Animator(xlabel='epoch', xlim=[1, num_epochs],\n", + " legend=['class error', 'bbox mae'])#显示图像\n", + "net = net.to(device)#网络转移到GPU上\n", + "for epoch in range(num_epochs):\n", + " metric = d2l.Accumulator(4)\n", + " net.train()#训练模式,会更新梯度\n", + " for features, target in train_iter:\n", + " timer.start()\n", + " trainer.zero_grad()\n", + " X, Y = features.to(device), target.to(device)\n", + " anchors, cls_preds, bbox_preds = net(X)\n", + " bbox_labels, bbox_masks, cls_labels = d2l.multibox_target(anchors, Y)\n", + " l = calc_loss(cls_preds, cls_labels, bbox_preds, bbox_labels,\n", + " bbox_masks)\n", + " l.mean().backward()#反向传播\n", + " trainer.step()\n", + " metric.add(cls_eval(cls_preds, cls_labels), cls_labels.numel(),\n", + " bbox_eval(bbox_preds, bbox_labels, bbox_masks),\n", + " bbox_labels.numel())\n", + " cls_err, bbox_mae = 1 - metric[0] / metric[1], metric[2] / metric[3]\n", + " animator.add(epoch + 1, (cls_err, bbox_mae))\n", + "print(f'class err {cls_err:.2e}, bbox mae {bbox_mae:.2e}')\n", + "print(f'{len(train_iter.dataset) / timer.stop():.1f} examples/sec on '\n", + " f'{str(device)}')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "预测目标" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "origin_pos": 56, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "X = torchvision.io.read_image('../img/banana.jpg').unsqueeze(0).float()\n", + "img = X.squeeze(0).permute(1, 2, 0).long()\n", + "#使用下面的multibox_detection函数,我们可以根据锚框及其预测偏移量得到预测边界框。然后,通过非极大值抑制来移除相似的预测边界框。\n", + "def predict(X):\n", + " net.eval()\n", + " anchors, cls_preds, bbox_preds = net(X.to(device))#传入网络\n", + " cls_probs = F.softmax(cls_preds, dim=2).permute(0, 2, 1)\n", + " output = d2l.multibox_detection(cls_probs, bbox_preds, anchors)\n", + " idx = [i for i, row in enumerate(output[0]) if row[0] != -1]\n", + " return output[0, idx]\n", + "\n", + "output = predict(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "筛选所有置信度不低于0.9的边界框,做为最终输出" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "origin_pos": 59, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T15:00:46.821658\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "def display(img, output, threshold):\n", + " d2l.set_figsize((5, 5))\n", + " fig = d2l.plt.imshow(img)\n", + " for row in output:\n", + " score = float(row[1])\n", + " if score < threshold:#低于阈值,不输出\n", + " continue\n", + " h, w = img.shape[0:2]\n", + " bbox = [row[2:6] * torch.tensor((w, h, w, h), device=row.device)]\n", + " d2l.show_bboxes(fig.axes, bbox, '%.2f' % score, 'w')\n", + "\n", + "display(img, output.cpu(), threshold=0.9)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "origin_pos": 62, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T15:00:47.117448\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "def smooth_l1(data, scalar):#平滑L1范数损失\n", + " out = []\n", + " for i in data:\n", + " if abs(i) < 1 / (scalar ** 2):\n", + " out.append(((scalar * i) ** 2) / 2)\n", + " else:\n", + " out.append(abs(i) - 0.5 / (scalar ** 2))\n", + " return torch.tensor(out)\n", + "\n", + "sigmas = [10, 1, 0.5]\n", + "lines = ['-', '--', '-.']\n", + "x = torch.arange(-2, 2, 0.1)\n", + "d2l.set_figsize()\n", + "\n", + "for l, s in zip(lines, sigmas):\n", + " y = smooth_l1(x, scalar=s)\n", + " d2l.plt.plot(x, y, l, label='sigma=%.1f' % s)\n", + "d2l.plt.legend();" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "origin_pos": 65, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T15:00:47.314425\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "def focal_loss(gamma, x):\n", + " return -(1 - x) ** gamma * torch.log(x)\n", + "\n", + "x = torch.arange(0.01, 1, 0.01)\n", + "for l, gamma in zip(lines, [0, 1, 5]):\n", + " y = d2l.plt.plot(x, focal_loss(gamma, x), l, label='gamma=%.1f' % gamma)\n", + "d2l.plt.legend();" + ] + } + ], + "metadata": { + "celltoolbar": "Slideshow", + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + }, + "rise": { + "autolaunch": true, + "enable_chalkboard": true, + "overlay": "", + "scroll": true + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/46-\350\257\255\344\271\211\345\210\206\345\211\262.ipynb" "b/code/46-\350\257\255\344\271\211\345\210\206\345\211\262.ipynb" new file mode 100644 index 0000000..e85cc01 --- /dev/null +++ "b/code/46-\350\257\255\344\271\211\345\210\206\345\211\262.ipynb" @@ -0,0 +1,533 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "Untitled0.ipynb", + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "PWaz7EsiS44f", + "outputId": "61e5bcb2-bdf5-43ab-c40c-7c962f1244df" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Requirement already satisfied: torch in /usr/local/lib/python3.7/dist-packages (1.10.0+cu111)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch) (3.10.0.2)\n", + "Collecting d2l\n", + " Downloading d2l-0.17.3-py3-none-any.whl (82 kB)\n", + "\u001b[K |████████████████████████████████| 82 kB 561 kB/s \n", + "\u001b[?25hCollecting matplotlib==3.3.3\n", + " Downloading matplotlib-3.3.3-cp37-cp37m-manylinux1_x86_64.whl (11.6 MB)\n", + "\u001b[K |████████████████████████████████| 11.6 MB 17.1 MB/s \n", + "\u001b[?25hCollecting requests==2.25.1\n", + " Downloading requests-2.25.1-py2.py3-none-any.whl (61 kB)\n", + "\u001b[K |████████████████████████████████| 61 kB 8.7 MB/s \n", + "\u001b[?25hCollecting pandas==1.2.2\n", + " Downloading pandas-1.2.2-cp37-cp37m-manylinux1_x86_64.whl (9.9 MB)\n", + "\u001b[K |████████████████████████████████| 9.9 MB 20.8 MB/s \n", + "\u001b[?25hCollecting numpy==1.18.5\n", + " Downloading numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)\n", + "\u001b[K |████████████████████████████████| 20.1 MB 1.3 MB/s \n", + "\u001b[?25hRequirement already satisfied: jupyter==1.0.0 in /usr/local/lib/python3.7/dist-packages (from d2l) (1.0.0)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.6.1)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (4.10.1)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.2.0)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (7.6.5)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.3.1)\n", + "Requirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.2.2)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (2.8.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (3.0.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (0.11.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (1.3.2)\n", + "Requirement already satisfied: pillow>=6.2.0 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (7.1.2)\n", + "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.7/dist-packages (from pandas==1.2.2->d2l) (2018.9)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (2.10)\n", + "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (1.24.3)\n", + "Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (3.0.4)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (2021.10.8)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib==3.3.3->d2l) (1.15.0)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.1.1)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.5.0)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.1.1)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.3.5)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (2.6.1)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.8.1)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (4.4.2)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (57.4.0)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.7.5)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (1.0.18)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (4.8.0)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.2.5)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (0.2.0)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (5.1.3)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (1.0.2)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (3.5.2)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.3.3)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.9.1)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.10.1)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (5.4.0)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (21.4.0)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (3.10.0.2)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (0.18.1)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (3.7.0)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (1.8.0)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (0.13.1)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (2.11.3)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter==1.0.0->d2l) (22.3.0)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter==1.0.0->d2l) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter==1.0.0->d2l) (2.0.1)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.7.1)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.8.4)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.4)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (1.5.0)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.5.0)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (4.1.0)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter==1.0.0->d2l) (21.3)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter==1.0.0->d2l) (0.5.1)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter==1.0.0->d2l) (2.0.1)\n", + "Installing collected packages: numpy, requests, pandas, matplotlib, d2l\n", + " Attempting uninstall: numpy\n", + " Found existing installation: numpy 1.19.5\n", + " Uninstalling numpy-1.19.5:\n", + " Successfully uninstalled numpy-1.19.5\n", + " Attempting uninstall: requests\n", + " Found existing installation: requests 2.23.0\n", + " Uninstalling requests-2.23.0:\n", + " Successfully uninstalled requests-2.23.0\n", + " Attempting uninstall: pandas\n", + " Found existing installation: pandas 1.3.5\n", + " Uninstalling pandas-1.3.5:\n", + " Successfully uninstalled pandas-1.3.5\n", + " Attempting uninstall: matplotlib\n", + " Found existing installation: matplotlib 3.2.2\n", + " Uninstalling matplotlib-3.2.2:\n", + " Successfully uninstalled matplotlib-3.2.2\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "google-colab 1.0.0 requires requests~=2.23.0, but you have requests 2.25.1 which is incompatible.\n", + "datascience 0.10.6 requires folium==0.2.1, but you have folium 0.8.3 which is incompatible.\n", + "albumentations 0.1.12 requires imgaug<0.2.7,>=0.2.5, but you have imgaug 0.2.9 which is incompatible.\u001b[0m\n", + "Successfully installed d2l-0.17.3 matplotlib-3.3.3 numpy-1.18.5 pandas-1.2.2 requests-2.25.1\n" + ] + }, + { + "output_type": "display_data", + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "matplotlib", + "mpl_toolkits", + "numpy", + "pandas" + ] + } + } + }, + "metadata": {} + } + ], + "source": [ + "!pip3 install torch\n", + "!pip3 install d2l" + ] + }, + { + "cell_type": "code", + "source": [ + "%matplotlib inline\n", + "import os\n", + "import torch\n", + "import torchvision\n", + "from d2l import torch as d2l\n", + "\n", + "#最重要的语义分割数据集之一是Pascal VOC2012,之后的VOC都是2012基础上的改动\n", + "d2l.DATA_HUB['voc2012'] = (d2l.DATA_URL + 'VOCtrainval_11-May-2012.tar',\n", + " '4e443f8a2eca6b1dac8a6c57641b67dd40621a49')\n", + "\n", + "voc_dir = d2l.download_extract('voc2012', 'VOCdevkit/VOC2012')" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "0151QjwoTelN", + "outputId": "5d3b8a24-3b50-48ec-c869-bc7f4aa0c993" + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Downloading ../data/VOCtrainval_11-May-2012.tar from http://d2l-data.s3-accelerate.amazonaws.com/VOCtrainval_11-May-2012.tar...\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "#把所有图片读入内存\n", + "def read_voc_images(voc_dir, is_train=True):\n", + " \"\"\"读取所有VOC图像并标注\"\"\"\n", + " txt_fname = os.path.join(voc_dir, 'ImageSets', 'Segmentation',\n", + " 'train.txt' if is_train else 'val.txt')\n", + " mode = torchvision.io.image.ImageReadMode.RGB\n", + " with open(txt_fname, 'r') as f:\n", + " images = f.read().split()\n", + " features, labels = [], []\n", + " for i, fname in enumerate(images):\n", + " features.append(torchvision.io.read_image(os.path.join(\n", + " voc_dir, 'JPEGImages', f'{fname}.jpg')))#根文件下JPEGImages为原始图片,用于训练\n", + " labels.append(torchvision.io.read_image(os.path.join(\n", + " voc_dir, 'SegmentationClass' ,f'{fname}.png'), mode))#语义分割需要对每个像素有label,也存成图片\n", + " return features, labels\n", + "\n", + "train_features, train_labels = read_voc_images(voc_dir, True)" + ], + "metadata": { + "id": "lO1Pjwb6Vsss" + }, + "execution_count": 3, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#展示一些图片和对应label\n", + "n = 5\n", + "imgs = train_features[0:n] + train_labels[0:n]\n", + "imgs = [img.permute(1,2,0) for img in imgs]\n", + "d2l.show_images(imgs, 2, n);" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 885 + }, + "id": "XFaYPwdPTa7-", + "outputId": "c0dbc2e1-6677-41de-d0e1-007948116fd9" + }, + "execution_count": 5, + "outputs": [ + { + "output_type": "error", + "ename": "ImportError", + "evalue": "ignored", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/formatters.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, obj)\u001b[0m\n\u001b[1;32m 332\u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 333\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 334\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mprinter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 335\u001b[0m \u001b[0;31m# Finally look for special method names\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 336\u001b[0m \u001b[0mmethod\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_real_method\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_method\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/pylabtools.py\u001b[0m in \u001b[0;36m\u001b[0;34m(fig)\u001b[0m\n\u001b[1;32m 239\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 240\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m'png'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 241\u001b[0;31m \u001b[0mpng_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'png'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 242\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m'retina'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m'png2x'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 243\u001b[0m \u001b[0mpng_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mretina_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/pylabtools.py\u001b[0m in \u001b[0;36mprint_figure\u001b[0;34m(fig, fmt, bbox_inches, **kwargs)\u001b[0m\n\u001b[1;32m 123\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[0mbytes_io\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mBytesIO\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 125\u001b[0;31m \u001b[0mfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcanvas\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbytes_io\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 126\u001b[0m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbytes_io\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgetvalue\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 127\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfmt\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'svg'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36mprint_figure\u001b[0;34m(self, filename, dpi, facecolor, edgecolor, orientation, format, bbox_inches, **kwargs)\u001b[0m\n\u001b[1;32m 2092\u001b[0m \u001b[0mhardcopy\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2093\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2094\u001b[0;31m \u001b[0mParameters\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2095\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2096\u001b[0m \u001b[0mfilename\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0mstr\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mpath\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mlike\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfile\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mlike\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36m_get_renderer\u001b[0;34m(figure, print_method)\u001b[0m\n\u001b[1;32m 1558\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mrenderer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1559\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1560\u001b[0;31m raise RuntimeError(f\"{print_method} did not call Figure.draw, so \"\n\u001b[0m\u001b[1;32m 1561\u001b[0m f\"no renderer is available\")\n\u001b[1;32m 1562\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backends/backend_agg.py\u001b[0m in \u001b[0;36mprint_png\u001b[0;34m(self, filename_or_obj, metadata, pil_kwargs, *args, **kwargs)\u001b[0m\n\u001b[1;32m 503\u001b[0m \u001b[0mpil_kwargs\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moptional\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 504\u001b[0m \u001b[0mKeyword\u001b[0m \u001b[0marguments\u001b[0m \u001b[0mpassed\u001b[0m \u001b[0mto\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m`\u001b[0m\u001b[0mPIL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mImage\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mImage\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msave\u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 505\u001b[0;31m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 506\u001b[0m \u001b[0mIf\u001b[0m \u001b[0mthe\u001b[0m \u001b[0;34m'pnginfo'\u001b[0m \u001b[0mkey\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0mpresent\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mit\u001b[0m \u001b[0mcompletely\u001b[0m \u001b[0moverrides\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 507\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mmetadata\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mincluding\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mdefault\u001b[0m \u001b[0;34m'Software'\u001b[0m \u001b[0mkey\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mImportError\u001b[0m: cannot import name '_png' from 'matplotlib' (/usr/local/lib/python3.7/dist-packages/matplotlib/__init__.py)" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "code", + "source": [ + "#用不同像素的颜色表示不同的label\n", + "VOC_COLORMAP = [[0, 0, 0], [128, 0, 0], [0, 128, 0], [128, 128, 0],\n", + " [0, 0, 128], [128, 0, 128], [0, 128, 128], [128, 128, 128],\n", + " [64, 0, 0], [192, 0, 0], [64, 128, 0], [192, 128, 0],\n", + " [64, 0, 128], [192, 0, 128], [64, 128, 128], [192, 128, 128],\n", + " [0, 64, 0], [128, 64, 0], [0, 192, 0], [128, 192, 0],\n", + " [0, 64, 128]]\n", + "\n", + "\n", + "VOC_CLASSES = ['background', 'aeroplane', 'bicycle', 'bird', 'boat',\n", + " 'bottle', 'bus', 'car', 'cat', 'chair', 'cow',\n", + " 'diningtable', 'dog', 'horse', 'motorbike', 'person',\n", + " 'potted plant', 'sheep', 'sofa', 'train', 'tv/monitor']" + ], + "metadata": { + "id": "9C0e4K-ZVsbK" + }, + "execution_count": 6, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def voc_colormap2label():\n", + " \"\"\"构建从RGB到VOC类别索引的映射\"\"\"#把每个RGBlabel对应一个整数\n", + " colormap2label = torch.zeros(256 ** 3, dtype=torch.long)\n", + " for i, colormap in enumerate(VOC_COLORMAP):\n", + " colormap2label[\n", + " (colormap[0] * 256 + colormap[1]) * 256 + colormap[2]] = i\n", + " return colormap2label\n", + "\n", + "\n", + "def voc_label_indices(colormap, colormap2label):\n", + " \"\"\"将VOC标签中的RGB值映射到它们的类别索引\"\"\"\n", + " colormap = colormap.permute(1, 2, 0).numpy().astype('int32')\n", + " idx = ((colormap[:, :, 0] * 256 + colormap[:, :, 1]) * 256\n", + " + colormap[:, :, 2])\n", + " return colormap2label[idx]" + ], + "metadata": { + "id": "FXMKAP5YVxKX" + }, + "execution_count": 7, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "y = voc_label_indices(train_labels[0], voc_colormap2label())\n", + "y[105:115, 130:140], VOC_CLASSES[1]" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Ii8EvpdsV7xC", + "outputId": "4f6a2d73-f022-4f59-b2ab-ce13fd5ba044" + }, + "execution_count": 8, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "(tensor([[0, 0, 0, 0, 0, 0, 0, 0, 0, 1],\n", + " [0, 0, 0, 0, 0, 0, 0, 1, 1, 1],\n", + " [0, 0, 0, 0, 0, 0, 1, 1, 1, 1],\n", + " [0, 0, 0, 0, 0, 1, 1, 1, 1, 1],\n", + " [0, 0, 0, 0, 0, 1, 1, 1, 1, 1],\n", + " [0, 0, 0, 0, 1, 1, 1, 1, 1, 1],\n", + " [0, 0, 0, 0, 0, 1, 1, 1, 1, 1],\n", + " [0, 0, 0, 0, 0, 1, 1, 1, 1, 1],\n", + " [0, 0, 0, 0, 0, 0, 1, 1, 1, 1],\n", + " [0, 0, 0, 0, 0, 0, 0, 0, 1, 1]]), 'aeroplane')" + ] + }, + "metadata": {}, + "execution_count": 8 + } + ] + }, + { + "cell_type": "code", + "source": [ + "#图片增广\n", + "def voc_rand_crop(feature, label, height, width):\n", + " \"\"\"随机裁剪特征和标签图像\"\"\"\n", + " rect = torchvision.transforms.RandomCrop.get_params(\n", + " feature, (height, width))#get_params()返回RandomCrop随机得到的框\n", + " feature = torchvision.transforms.functional.crop(feature, *rect)\n", + " label = torchvision.transforms.functional.crop(label, *rect)#用这个框分别切feature和label\n", + " return feature, label\n", + "\n", + "imgs = []\n", + "for _ in range(n):\n", + " imgs += voc_rand_crop(train_features[0], train_labels[0], 200, 300)\n", + "\n", + "imgs = [img.permute(1, 2, 0) for img in imgs]\n", + "d2l.show_images(imgs[::2] + imgs[1::2], 2, n);#展示效果" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 392 + }, + "id": "702H4XnBWApq", + "outputId": "7e782bd9-85d6-415c-e782-afd76324f37a" + }, + "execution_count": 9, + "outputs": [ + { + "output_type": "error", + "ename": "ImportError", + "evalue": "ignored", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/formatters.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, obj)\u001b[0m\n\u001b[1;32m 332\u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 333\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 334\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mprinter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 335\u001b[0m \u001b[0;31m# Finally look for special method names\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 336\u001b[0m \u001b[0mmethod\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_real_method\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_method\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/pylabtools.py\u001b[0m in \u001b[0;36m\u001b[0;34m(fig)\u001b[0m\n\u001b[1;32m 239\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 240\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m'png'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 241\u001b[0;31m \u001b[0mpng_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'png'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 242\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m'retina'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m'png2x'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 243\u001b[0m \u001b[0mpng_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mretina_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/pylabtools.py\u001b[0m in \u001b[0;36mprint_figure\u001b[0;34m(fig, fmt, bbox_inches, **kwargs)\u001b[0m\n\u001b[1;32m 123\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[0mbytes_io\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mBytesIO\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 125\u001b[0;31m \u001b[0mfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcanvas\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbytes_io\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 126\u001b[0m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbytes_io\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgetvalue\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 127\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfmt\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'svg'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36mprint_figure\u001b[0;34m(self, filename, dpi, facecolor, edgecolor, orientation, format, bbox_inches, **kwargs)\u001b[0m\n\u001b[1;32m 2092\u001b[0m \u001b[0mhardcopy\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2093\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2094\u001b[0;31m \u001b[0mParameters\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2095\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2096\u001b[0m \u001b[0mfilename\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0mstr\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mpath\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mlike\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfile\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mlike\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36m_get_renderer\u001b[0;34m(figure, print_method)\u001b[0m\n\u001b[1;32m 1558\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mrenderer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1559\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1560\u001b[0;31m raise RuntimeError(f\"{print_method} did not call Figure.draw, so \"\n\u001b[0m\u001b[1;32m 1561\u001b[0m f\"no renderer is available\")\n\u001b[1;32m 1562\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backends/backend_agg.py\u001b[0m in \u001b[0;36mprint_png\u001b[0;34m(self, filename_or_obj, metadata, pil_kwargs, *args, **kwargs)\u001b[0m\n\u001b[1;32m 503\u001b[0m \u001b[0mpil_kwargs\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moptional\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 504\u001b[0m \u001b[0mKeyword\u001b[0m \u001b[0marguments\u001b[0m \u001b[0mpassed\u001b[0m \u001b[0mto\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m`\u001b[0m\u001b[0mPIL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mImage\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mImage\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msave\u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 505\u001b[0;31m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 506\u001b[0m \u001b[0mIf\u001b[0m \u001b[0mthe\u001b[0m \u001b[0;34m'pnginfo'\u001b[0m \u001b[0mkey\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0mpresent\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mit\u001b[0m \u001b[0mcompletely\u001b[0m \u001b[0moverrides\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 507\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mmetadata\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mincluding\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mdefault\u001b[0m \u001b[0;34m'Software'\u001b[0m \u001b[0mkey\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mImportError\u001b[0m: cannot import name '_png' from 'matplotlib' (/usr/local/lib/python3.7/dist-packages/matplotlib/__init__.py)" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "code", + "source": [ + "class VOCSegDataset(torch.utils.data.Dataset):\n", + " \"\"\"一个用于加载VOC数据集的自定义数据集\"\"\"\n", + "\n", + " def __init__(self, is_train, crop_size, voc_dir):\n", + " self.transform = torchvision.transforms.Normalize(\n", + " mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n", + " self.crop_size = crop_size#用crop不用拉伸是因为拉伸需要插值而对label插值操作很麻烦\n", + " features, labels = read_voc_images(voc_dir, is_train=is_train)\n", + " self.features = [self.normalize_image(feature)\n", + " for feature in self.filter(features)]\n", + " self.labels = self.filter(labels)\n", + " self.colormap2label = voc_colormap2label()\n", + " print('read ' + str(len(self.features)) + ' examples')\n", + "\n", + " def normalize_image(self, img):\n", + " return self.transform(img.float() / 255)\n", + "\n", + " def filter(self, imgs):#如果图片尺寸比裁剪大小还小就舍弃掉(也可以做padding)\n", + " return [img for img in imgs if (\n", + " img.shape[1] >= self.crop_size[0] and\n", + " img.shape[2] >= self.crop_size[1])]\n", + "\n", + " def __getitem__(self, idx):\n", + " feature, label = voc_rand_crop(self.features[idx], self.labels[idx],\n", + " *self.crop_size)\n", + " return (feature, voc_label_indices(label, self.colormap2label))#RGBlabellabel转index label\n", + "\n", + " def __len__(self):\n", + " return len(self.features)" + ], + "metadata": { + "id": "FCL-lddxq9_A" + }, + "execution_count": 10, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#读取数据集\n", + "crop_size = (320, 480)\n", + "voc_train = VOCSegDataset(True, crop_size, voc_dir)\n", + "voc_test = VOCSegDataset(False, crop_size, voc_dir)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "M3KOdUMSr4Yz", + "outputId": "ff0077a1-4228-4f10-d56f-a403081c94ad" + }, + "execution_count": 11, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "read 1114 examples\n", + "read 1078 examples\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "batch_size = 64\n", + "train_iter = torch.utils.data.DataLoader(voc_train, batch_size, shuffle=True,\n", + " drop_last=True,\n", + " num_workers=d2l.get_dataloader_workers())\n", + "for X, Y in train_iter:\n", + " print(X.shape)\n", + " print(Y.shape)\n", + " break" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "cQOAOa6avo9A", + "outputId": "dc0e6ddf-8101-43f3-f865-c1e64e7a89e6" + }, + "execution_count": 12, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:481: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ] + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "torch.Size([64, 3, 320, 480])\n", + "torch.Size([64, 320, 480])\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "#整合所有组件\n", + "def load_data_voc(batch_size, crop_size):\n", + " \"\"\"加载VOC语义分割数据集\"\"\"\n", + " voc_dir = d2l.download_extract('voc2012', os.path.join(\n", + " 'VOCdevkit', 'VOC2012'))\n", + " num_workers = d2l.get_dataloader_workers()\n", + " train_iter = torch.utils.data.DataLoader(\n", + " VOCSegDataset(True, crop_size, voc_dir), batch_size,\n", + " shuffle=True, drop_last=True, num_workers=num_workers)\n", + " test_iter = torch.utils.data.DataLoader(\n", + " VOCSegDataset(False, crop_size, voc_dir), batch_size,\n", + " drop_last=True, num_workers=num_workers)\n", + " return train_iter, test_iter" + ], + "metadata": { + "id": "Tgd9VvqSwMlt" + }, + "execution_count": null, + "outputs": [] + } + ] +} \ No newline at end of file diff --git "a/code/48-\345\205\250\345\215\267\347\247\257\347\245\236\347\273\217\347\275\221\347\273\234\357\274\210FCN\357\274\211/fcn.ipynb" "b/code/48-\345\205\250\345\215\267\347\247\257\347\245\236\347\273\217\347\275\221\347\273\234\357\274\210FCN\357\274\211/fcn.ipynb" new file mode 100644 index 0000000..0a01401 --- /dev/null +++ "b/code/48-\345\205\250\345\215\267\347\247\257\347\245\236\347\273\217\347\275\221\347\273\234\357\274\210FCN\357\274\211/fcn.ipynb" @@ -0,0 +1,990 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 全卷积网络\n", + ":label:`sec_fcn`\n", + "\n", + "如 :numref:`sec_semantic_segmentation`中所介绍的那样,语义分割是对图像中的每个像素分类。\n", + "*全卷积网络*(fully convolutional network,FCN)采用卷积神经网络实现了从图像像素到像素类别的变换 :cite:`Long.Shelhamer.Darrell.2015`。\n", + "与我们之前在图像分类或目标检测部分介绍的卷积神经网络不同,全卷积网络将中间层特征图的高和宽变换回输入图像的尺寸:这是通过在 :numref:`sec_transposed_conv`中引入的*转置卷积*(transposed convolution)实现的。\n", + "因此,输出的类别预测与输入图像在像素级别上具有一一对应关系:通道维的输出即该位置对应像素的类别预测。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "import torch\n", + "import torchvision\n", + "from torch import nn\n", + "from torch.nn import functional as F\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 3 + }, + "source": [ + "## 构造模型\n", + "\n", + "下面我们了解一下全卷积网络模型最基本的设计。\n", + "如 :numref:`fig_fcn`所示,全卷积网络先使用卷积神经网络抽取图像特征,然后通过$1\\times 1$卷积层将通道数变换为类别个数,最后在 :numref:`sec_transposed_conv`中通过转置卷积层将特征图的高和宽变换为输入图像的尺寸。\n", + "因此,模型输出与输入图像的高和宽相同,且最终输出通道包含了该空间位置像素的类别预测。\n", + "\n", + "![全卷积网络](../img/fcn.svg)\n", + ":label:`fig_fcn`\n", + "\n", + "下面,我们[**使用在ImageNet数据集上预训练的ResNet-18模型来提取图像特征**],并将该网络记为`pretrained_net`。\n", + "ResNet-18模型的最后几层包括全局平均汇聚层和全连接层,然而全卷积网络中不需要它们。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 5, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[Sequential(\n", + " (0): BasicBlock(\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (1): BasicBlock(\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " ),\n", + " AdaptiveAvgPool2d(output_size=(1, 1)),\n", + " Linear(in_features=512, out_features=1000, bias=True)]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pretrained_net = torchvision.models.resnet18(pretrained=True) \n", + "list(pretrained_net.children())[-3:] #列出来网络的所有的层children,列出来最后的三层" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 6 + }, + "source": [ + "接下来,我们[**创建一个全卷积网络`net`**]。\n", + "它复制了ResNet-18中大部分的预训练层,除了最后的全局平均汇聚层和最接近输出的全连接层。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 8, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "net = nn.Sequential(*list(pretrained_net.children())[:-2]) # 去掉最后的两层" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 9 + }, + "source": [ + "给定高度为320和宽度为480的输入,`net`的前向传播将输入的高和宽减小至原来的$1/32$,即10和15。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 11, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([1, 512, 10, 15])" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.rand(size=(1, 3, 320, 480))\n", + "net(X).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12 + }, + "source": [ + "接下来,我们[**使用$1\\times1$卷积层将输出通道数转换为Pascal VOC2012数据集的类数(21类)。**]\n", + "最后,我们需要(**将特征图的高度和宽度增加32倍**),从而将其变回输入图像的高和宽。\n", + "回想一下 :numref:`sec_padding`中卷积层输出形状的计算方法:\n", + "由于$(320-64+16\\times2+32)/32=10$且$(480-64+16\\times2+32)/32=15$,我们构造一个步幅为$32$的转置卷积层,并将卷积核的高和宽设为$64$,填充为$16$。\n", + "我们可以看到如果步幅为$s$,填充为$s/2$(假设$s/2$是整数)且卷积核的高和宽为$2s$,转置卷积核会将输入的高和宽分别放大$s$倍。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "num_classes = 21\n", + "# out=num_classes 降低计算量\n", + "net.add_module('final_conv', nn.Conv2d(512, num_classes, kernel_size=1))\n", + "# stride=32 放大32倍 padding=16 高宽不变的最小值 kenel_size=64 每次跳一半\n", + "net.add_module('transpose_conv', nn.ConvTranspose2d(num_classes, num_classes,\n", + " kernel_size=64, padding=16, stride=32)) " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 15 + }, + "source": [ + "## [**初始化转置卷积层**]\n", + "\n", + "在图像处理中,我们有时需要将图像放大,即*上采样*(upsampling)。\n", + "*双线性插值*(bilinear interpolation)\n", + "是常用的上采样方法之一,它也经常用于初始化转置卷积层。\n", + "\n", + "为了解释双线性插值,假设给定输入图像,我们想要计算上采样输出图像上的每个像素。\n", + "首先,将输出图像的坐标$(x,y)$映射到输入图像的坐标$(x',y')$上。\n", + "例如,根据输入与输出的尺寸之比来映射。\n", + "请注意,映射后的$x′$和$y′$是实数。\n", + "然后,在输入图像上找到离坐标$(x',y')$最近的4个像素。\n", + "最后,输出图像在坐标$(x,y)$上的像素依据输入图像上这4个像素及其与$(x',y')$的相对距离来计算。\n", + "\n", + "双线性插值的上采样可以通过转置卷积层实现,内核由以下`bilinear_kernel`函数构造。\n", + "限于篇幅,我们只给出`bilinear_kernel`函数的实现,不讨论算法的原理。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 17, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def bilinear_kernel(in_channels, out_channels, kernel_size):\n", + " factor = (kernel_size + 1) // 2\n", + " if kernel_size % 2 == 1:\n", + " center = factor - 1\n", + " else:\n", + " center = factor - 0.5\n", + " og = (torch.arange(kernel_size).reshape(-1, 1),\n", + " torch.arange(kernel_size).reshape(1, -1))\n", + " filt = (1 - torch.abs(og[0] - center) / factor) * \\\n", + " (1 - torch.abs(og[1] - center) / factor)\n", + " weight = torch.zeros((in_channels, out_channels,\n", + " kernel_size, kernel_size))\n", + " weight[range(in_channels), range(out_channels), :, :] = filt\n", + " return weight" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 18 + }, + "source": [ + "让我们用[**双线性插值的上采样实验**]它由转置卷积层实现。\n", + "我们构造一个将输入的高和宽放大2倍的转置卷积层,并将其卷积核用`bilinear_kernel`函数初始化。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 20, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "conv_trans = nn.ConvTranspose2d(3, 3, kernel_size=4, padding=1, stride=2,\n", + " bias=False)\n", + "# 使用双线性插值初始化转置卷积层的参数\n", + "conv_trans.weight.data.copy_(bilinear_kernel(3, 3, 4));" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 21 + }, + "source": [ + "读取图像`X`,将上采样的结果记作`Y`。为了打印图像,我们需要调整通道维的位置。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 23, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "# 读取图像\n", + "img = torchvision.transforms.ToTensor()(d2l.Image.open('../img/catdog.jpg'))\n", + "X = img.unsqueeze(0)\n", + "# 转置卷积操作\n", + "Y = conv_trans(X)\n", + "out_img = Y[0].permute(1, 2, 0).detach()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 24 + }, + "source": [ + "可以看到,转置卷积层将图像的高和宽分别放大了2倍。\n", + "除了坐标刻度不同,双线性插值放大的图像和在 :numref:`sec_bbox`中打印出的原图看上去没什么两样。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "origin_pos": 26, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "input image shape: torch.Size([561, 728, 3])\n", + "output image shape: torch.Size([1122, 1456, 3])\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2022-02-07T17:28:50.433664\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "d2l.set_figsize()\n", + "print('input image shape:', img.permute(1, 2, 0).shape)\n", + "d2l.plt.imshow(img.permute(1, 2, 0));\n", + "print('output image shape:', out_img.shape)\n", + "d2l.plt.imshow(out_img);" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 27 + }, + "source": [ + "在全卷积网络中,我们[**用双线性插值的上采样初始化转置卷积层。对于$1\\times 1$卷积层,我们使用Xavier初始化参数。**]" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "origin_pos": 29, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "# 用双线性插值的上采样初始化转置卷积层\n", + "W = bilinear_kernel(num_classes, num_classes, 64)\n", + "net.transpose_conv.weight.data.copy_(W);" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 30 + }, + "source": [ + "## [**读取数据集**]\n", + "\n", + "我们用 :numref:`sec_semantic_segmentation`中介绍的语义分割读取数据集。\n", + "指定随机裁剪的输出图像的形状为$320\\times 480$:高和宽都可以被$32$整除。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "origin_pos": 31, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "read 1114 examples\n", + "read 1078 examples\n" + ] + } + ], + "source": [ + "batch_size, crop_size = 32, (320, 480)\n", + "train_iter, test_iter = d2l.load_data_voc(batch_size, crop_size)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 32 + }, + "source": [ + "## [**训练**]\n", + "\n", + "现在我们可以训练全卷积网络了。\n", + "这里的损失函数和准确率计算与图像分类中的并没有本质上的不同,因为我们使用转置卷积层的通道来预测像素的类别,所以需要在损失计算中指定通道维。\n", + "此外,模型基于每个像素的预测类别是否正确来计算准确率。\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "origin_pos": 34, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def loss(inputs, targets):\n", + " return F.cross_entropy(inputs, targets, reduction='none').mean(1).mean(1) # 矩阵取均值\n", + "\n", + "num_epochs, lr, wd, devices = 5, 0.001, 1e-3, d2l.try_all_gpus()\n", + "trainer = torch.optim.SGD(net.parameters(), lr=lr, weight_decay=wd)\n", + "d2l.train_ch13(net, train_iter, test_iter, loss, trainer, num_epochs, devices)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 35 + }, + "source": [ + "## [**预测**]\n", + "\n", + "在预测时,我们需要将输入图像在各个通道做标准化,并转成卷积神经网络所需要的四维输入格式。\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "origin_pos": 37, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def predict(img):\n", + " X = test_iter.dataset.normalize_image(img).unsqueeze(0)\n", + " pred = net(X.to(devices[0])).argmax(dim=1) # 通道纬度\n", + " return pred.reshape(pred.shape[1], pred.shape[2])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 38 + }, + "source": [ + "为了[**可视化预测的类别**]给每个像素,我们将预测类别映射回它们在数据集中的标注颜色。\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "origin_pos": 40, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def label2image(pred):\n", + " colormap = torch.tensor(d2l.VOC_COLORMAP, device=devices[0])\n", + " X = pred.long()\n", + " return colormap[X, :]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 41 + }, + "source": [ + "测试数据集中的图像大小和形状各异。\n", + "由于模型使用了步幅为32的转置卷积层,因此当输入图像的高或宽无法被32整除时,转置卷积层输出的高或宽会与输入图像的尺寸有偏差。\n", + "为了解决这个问题,我们可以在图像中截取多块高和宽为32的整数倍的矩形区域,并分别对这些区域中的像素做前向传播。\n", + "请注意,这些区域的并集需要完整覆盖输入图像。\n", + "当一个像素被多个区域所覆盖时,它在不同区域前向传播中转置卷积层输出的平均值可以作为`softmax`运算的输入,从而预测类别。\n", + "\n", + "为简单起见,我们只读取几张较大的测试图像,并从图像的左上角开始截取形状为$320\\times480$的区域用于预测。\n", + "对于这些测试图像,我们逐一打印它们截取的区域,再打印预测结果,最后打印标注的类别。\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "origin_pos": 43, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "voc_dir = d2l.download_extract('voc2012', 'VOCdevkit/VOC2012')\n", + "test_images, test_labels = d2l.read_voc_images(voc_dir, False)\n", + "n, imgs = 4, []\n", + "for i in range(n):\n", + " crop_rect = (0, 0, 320, 480)\n", + " X = torchvision.transforms.functional.crop(test_images[i], *crop_rect)\n", + " pred = label2image(predict(X))\n", + " imgs += [X.permute(1,2,0), pred.cpu(),\n", + " torchvision.transforms.functional.crop(\n", + " test_labels[i], *crop_rect).permute(1,2,0)]\n", + "d2l.show_images(imgs[::3] + imgs[1::3] + imgs[2::3], 3, n, scale=2);" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 44 + }, + "source": [ + "## 小结\n", + "\n", + "* 全卷积网络先使用卷积神经网络抽取图像特征,然后通过$1\\times 1$卷积层将通道数变换为类别个数,最后通过转置卷积层将特征图的高和宽变换为输入图像的尺寸。\n", + "* 在全卷积网络中,我们可以将转置卷积层初始化为双线性插值的上采样。\n", + "\n", + "## 练习\n", + "\n", + "1. 如果将转置卷积层改用Xavier随机初始化,结果有什么变化?\n", + "1. 调节超参数,能进一步提升模型的精度吗?\n", + "1. 预测测试图像中所有像素的类别。\n", + "1. 最初的全卷积网络的论文中 :cite:`Long.Shelhamer.Darrell.2015`还使用了某些卷积神经网络中间层的输出。试着实现这个想法。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 46, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/3297)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/49-\346\240\267\345\274\217\350\277\201\347\247\273.ipynb" "b/code/49-\346\240\267\345\274\217\350\277\201\347\247\273.ipynb" new file mode 100644 index 0000000..a2639ae --- /dev/null +++ "b/code/49-\346\240\267\345\274\217\350\277\201\347\247\273.ipynb" @@ -0,0 +1,2554 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 风格迁移\n", + "\n", + "如果你是一位摄影爱好者,你也许接触过滤波器。它能改变照片的颜色风格,从而使风景照更加锐利或者令人像更加美白。但一个滤波器通常只能改变照片的某个方面。如果要照片达到理想中的风格,你可能需要尝试大量不同的组合。这个过程的复杂程度不亚于模型调参。\n", + "\n", + "在本节中,我们将介绍如何使用卷积神经网络,自动将一个图像中的风格应用在另一图像之上,即*风格迁移*(style transfer) :cite:`Gatys.Ecker.Bethge.2016`。\n", + "这里我们需要两张输入图像:一张是*内容图像*,另一张是*风格图像*。\n", + "我们将使用神经网络修改内容图像,使其在风格上接近风格图像。\n", + "例如, :numref:`fig_style_transfer`中的内容图像为本书作者在西雅图郊区的雷尼尔山国家公园拍摄的风景照,而风格图像则是一幅主题为秋天橡树的油画。\n", + "最终输出的合成图像应用了风格图像的油画笔触让整体颜色更加鲜艳,同时保留了内容图像中物体主体的形状。\n", + "\n", + "![输入内容图像和风格图像,输出风格迁移后的合成图像](../img/style-transfer.svg)\n", + ":label:`fig_style_transfer`\n", + "\n", + "## 方法\n", + "\n", + " :numref:`fig_style_transfer_model`用简单的例子阐述了基于卷积神经网络的风格迁移方法。\n", + "首先,我们初始化合成图像,例如将其初始化为内容图像。\n", + "该合成图像是风格迁移过程中唯一需要更新的变量,即风格迁移所需迭代的模型参数。\n", + "然后,我们选择一个预训练的卷积神经网络来抽取图像的特征,其中的模型参数在训练中无须更新。\n", + "这个深度卷积神经网络凭借多个层逐级抽取图像的特征,我们可以选择其中某些层的输出作为内容特征或风格特征。\n", + "以 :numref:`fig_style_transfer_model`为例,这里选取的预训练的神经网络含有3个卷积层,其中第二层输出内容特征,第一层和第三层输出风格特征。\n", + "\n", + "![基于卷积神经网络的风格迁移。实线箭头和虚线箭头分别表示前向传播和反向传播](../img/neural-style.svg)\n", + ":label:`fig_style_transfer_model`\n", + "\n", + "接下来,我们通过前向传播(实线箭头方向)计算风格迁移的损失函数,并通过反向传播(虚线箭头方向)迭代模型参数,即不断更新合成图像。\n", + "风格迁移常用的损失函数由3部分组成:\n", + "(i)*内容损失*使合成图像与内容图像在内容特征上接近;\n", + "(ii)*风格损失*使合成图像与风格图像在风格特征上接近;\n", + "(iii)*全变分损失*则有助于减少合成图像中的噪点。\n", + "最后,当模型训练结束时,我们输出风格迁移的模型参数,即得到最终的合成图像。\n", + "\n", + "在下面,我们将通过代码来进一步了解风格迁移的技术细节。\n", + "\n", + "## [**阅读内容和风格图像**]\n", + "\n", + "首先,我们读取内容和风格图像。\n", + "从打印出的图像坐标轴可以看出,它们的尺寸并不一样。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T21:34:14.501005\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "import torch\n", + "import torchvision\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "\n", + "d2l.set_figsize()\n", + "content_img = d2l.Image.open('../img/rainier.jpg')\n", + "d2l.plt.imshow(content_img);" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 4, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T21:34:14.952502\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "style_img = d2l.Image.open('../img/autumn-oak.jpg')\n", + "d2l.plt.imshow(style_img);" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 5 + }, + "source": [ + "## [**预处理和后处理**]\n", + "\n", + "下面,定义图像的预处理函数和后处理函数。\n", + "预处理函数`preprocess`对输入图像在RGB三个通道分别做标准化,并将结果变换成卷积神经网络接受的输入格式。\n", + "后处理函数`postprocess`则将输出图像中的像素值还原回标准化之前的值。\n", + "由于图像打印函数要求每个像素的浮点数值在0到1之间,我们对小于0和大于1的值分别取0和1。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 7, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#VGG网络对数据进行了归一化,使用同样的参数对输入归一化使之更好的适配VGG网络\n", + "rgb_mean = torch.tensor([0.485, 0.456, 0.406])\n", + "rgb_std = torch.tensor([0.229, 0.224, 0.225])\n", + "\n", + "def preprocess(img, image_shape):\n", + " transforms = torchvision.transforms.Compose([\n", + " torchvision.transforms.Resize(image_shape),#变换大小\n", + " torchvision.transforms.ToTensor(),#转为Tensor\n", + " torchvision.transforms.Normalize(mean=rgb_mean, std=rgb_std)])#归一化\n", + " return transforms(img).unsqueeze(0)#加上batch_size维度\n", + "\n", + "def postprocess(img):\n", + " img = img[0].to(rgb_std.device)\n", + " img = torch.clamp(img.permute(1, 2, 0) * rgb_std + rgb_mean, 0, 1)#改变顺序为[h,w,3],使其与rgb_std后缘维度相同,利用广播机制\n", + " return torchvision.transforms.ToPILImage()(img.permute(2, 0, 1))#将顺序变回[3,h,w]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "## [**抽取图像特征**]\n", + "\n", + "我们使用基于ImageNet数据集预训练的VGG-19模型来抽取图像特征 :cite:`Gatys.Ecker.Bethge.2016`。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 10, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "pretrained_net = torchvision.models.vgg19(pretrained=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 11 + }, + "source": [ + "为了抽取图像的内容特征和风格特征,我们可以选择VGG网络中某些层的输出。\n", + "一般来说,越靠近输入层,越容易抽取图像的细节信息;反之,则越容易抽取图像的全局信息。\n", + "为了避免合成图像过多保留内容图像的细节,我们选择VGG较靠近输出的层,即*内容层*,来输出图像的内容特征。\n", + "我们还从VGG中选择不同层的输出来匹配局部和全局的风格,这些图层也称为*风格层*。\n", + "正如 :numref:`sec_vgg`中所介绍的,VGG网络使用了5个卷积块。\n", + "实验中,我们选择第四卷积块的最后一个卷积层作为内容层,选择每个卷积块的第一个卷积层作为风格层。\n", + "这些层的索引可以通过打印`pretrained_net`实例获取。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 12, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "style_layers, content_layers = [0, 5, 10, 19, 28], [25]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 13 + }, + "source": [ + "使用VGG层抽取特征时,我们只需要用到从输入层到最靠近输出层的内容层或风格层之间的所有层。\n", + "下面构建一个新的网络`net`,它只保留需要用到的VGG的所有层。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 15, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "net = nn.Sequential(*[pretrained_net.features[i] for i in\n", + " range(max(content_layers + style_layers) + 1)])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 16 + }, + "source": [ + "给定输入`X`,如果我们简单地调用前向传播`net(X)`,只能获得最后一层的输出。\n", + "由于我们还需要中间层的输出,因此这里我们逐层计算,并保留内容层和风格层的输出。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 17, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def extract_features(X, content_layers, style_layers):\n", + " contents = []\n", + " styles = []\n", + " for i in range(len(net)):\n", + " X = net[i](X)\n", + " if i in style_layers:\n", + " styles.append(X)\n", + " if i in content_layers:\n", + " contents.append(X)\n", + " return contents, styles" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 18 + }, + "source": [ + "下面定义两个函数:`get_contents`函数对内容图像抽取内容特征;\n", + "`get_styles`函数对风格图像抽取风格特征。\n", + "因为在训练时无须改变预训练的VGG的模型参数,所以我们可以在训练开始之前就提取出内容特征和风格特征。\n", + "由于合成图像是风格迁移所需迭代的模型参数,我们只能在训练过程中通过调用`extract_features`函数来抽取合成图像的内容特征和风格特征。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 20, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def get_contents(image_shape, device):\n", + " content_X = preprocess(content_img, image_shape).to(device)\n", + " contents_Y, _ = extract_features(content_X, content_layers, style_layers)\n", + " return content_X, contents_Y\n", + "\n", + "def get_styles(image_shape, device):\n", + " style_X = preprocess(style_img, image_shape).to(device)\n", + " _, styles_Y = extract_features(style_X, content_layers, style_layers)\n", + " return style_X, styles_Y" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 21 + }, + "source": [ + "## [**定义损失函数**]\n", + "\n", + "下面我们来描述风格迁移的损失函数。\n", + "它由内容损失、风格损失和全变分损失3部分组成。\n", + "\n", + "### 内容损失\n", + "\n", + "与线性回归中的损失函数类似,内容损失通过平方误差函数衡量合成图像与内容图像在内容特征上的差异。\n", + "平方误差函数的两个输入均为`extract_features`函数计算所得到的内容层的输出。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "origin_pos": 23, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def content_loss(Y_hat, Y):\n", + " # 我们从动态计算梯度的树中分离目标:\n", + " # 这是一个规定的值,而不是一个变量。\n", + " return torch.square(Y_hat - Y.detach()).mean()#计算平方差损失,mean函数求平均值" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 24 + }, + "source": [ + "### 风格损失\n", + "\n", + "风格损失与内容损失类似,也通过平方误差函数衡量合成图像与风格图像在风格上的差异。\n", + "为了表达风格层输出的风格,我们先通过`extract_features`函数计算风格层的输出。\n", + "假设该输出的样本数为1,通道数为$c$,高和宽分别为$h$和$w$,我们可以将此输出转换为矩阵$\\mathbf{X}$,其有$c$行和$hw$列。\n", + "这个矩阵可以被看作是由$c$个长度为$hw$的向量$\\mathbf{x}_1, \\ldots, \\mathbf{x}_c$组合而成的。其中向量$\\mathbf{x}_i$代表了通道$i$上的风格特征。\n", + "\n", + "在这些向量的*格拉姆矩阵*$\\mathbf{X}\\mathbf{X}^\\top \\in \\mathbb{R}^{c \\times c}$中,$i$行$j$列的元素$x_{ij}$即向量$\\mathbf{x}_i$和$\\mathbf{x}_j$的内积。它表达了通道$i$和通道$j$上风格特征的相关性。我们用这样的格拉姆矩阵来表达风格层输出的风格。\n", + "需要注意的是,当$hw$的值较大时,格拉姆矩阵中的元素容易出现较大的值。\n", + "此外,格拉姆矩阵的高和宽皆为通道数$c$。\n", + "为了让风格损失不受这些值的大小影响,下面定义的`gram`函数将格拉姆矩阵除以了矩阵中元素的个数,即$chw$。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "origin_pos": 25, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def gram(X):\n", + " num_channels, n = X.shape[1], X.numel() // X.shape[1]#得到通道数和将要除以的比例chw\n", + " X = X.reshape((num_channels, n))\n", + " return torch.matmul(X, X.T) / (num_channels * n)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 26 + }, + "source": [ + "自然地,风格损失的平方误差函数的两个格拉姆矩阵输入分别基于合成图像与风格图像的风格层输出。这里假设基于风格图像的格拉姆矩阵`gram_Y`已经预先计算好了。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "origin_pos": 28, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def style_loss(Y_hat, gram_Y):\n", + " return torch.square(gram(Y_hat) - gram_Y.detach()).mean()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 29 + }, + "source": [ + "### 全变分损失\n", + "\n", + "有时候,我们学到的合成图像里面有大量高频噪点,即有特别亮或者特别暗的颗粒像素。\n", + "一种常见的去噪方法是*全变分去噪*(total variation denoising):\n", + "假设$x_{i, j}$表示坐标$(i, j)$处的像素值,降低全变分损失\n", + "\n", + "$$\\sum_{i, j} \\left|x_{i, j} - x_{i+1, j}\\right| + \\left|x_{i, j} - x_{i, j+1}\\right|$$\n", + "\n", + "能够尽可能使邻近的像素值相似。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "origin_pos": 30, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def tv_loss(Y_hat):\n", + " #边界处无需计算,可以通过切片的方式使相邻下标对应,直接做运算\n", + " return 0.5 * (torch.abs(Y_hat[:, :, 1:, :] - Y_hat[:, :, :-1, :]).mean() +\n", + " torch.abs(Y_hat[:, :, :, 1:] - Y_hat[:, :, :, :-1]).mean())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 31 + }, + "source": [ + "### 损失函数\n", + "\n", + "[**风格转移的损失函数是内容损失、风格损失和总变化损失的加权和**]。\n", + "通过调节这些权重超参数,我们可以权衡合成图像在保留内容、迁移风格以及去噪三方面的相对重要性。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "origin_pos": 32, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "content_weight, style_weight, tv_weight = 1, 1e3, 10\n", + "\n", + "def compute_loss(X, contents_Y_hat, styles_Y_hat, contents_Y, styles_Y_gram):\n", + " # 分别计算内容损失、风格损失和全变分损失\n", + " contents_l = [content_loss(Y_hat, Y) * content_weight for Y_hat, Y in zip(\n", + " contents_Y_hat, contents_Y)]\n", + " styles_l = [style_loss(Y_hat, Y) * style_weight for Y_hat, Y in zip(\n", + " styles_Y_hat, styles_Y_gram)]\n", + " tv_l = tv_loss(X) * tv_weight\n", + " # 对所有损失求和\n", + " l = sum(10 * styles_l + contents_l + [tv_l])\n", + " return contents_l, styles_l, tv_l, l" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 33 + }, + "source": [ + "## [**初始化合成图像**]\n", + "\n", + "在风格迁移中,合成的图像是训练期间唯一需要更新的变量。因此,我们可以定义一个简单的模型`SynthesizedImage`,并将合成的图像视为模型参数。模型的前向传播只需返回模型参数即可。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "origin_pos": 35, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class SynthesizedImage(nn.Module):#此类保存变量,更方便调用pytorch的接口\n", + " def __init__(self, img_shape, **kwargs):\n", + " super(SynthesizedImage, self).__init__(**kwargs)\n", + " self.weight = nn.Parameter(torch.rand(*img_shape))\n", + "\n", + " def forward(self):#前向传播不会被调用\n", + " return self.weight" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 36 + }, + "source": [ + "下面,我们定义`get_inits`函数。该函数创建了合成图像的模型实例,并将其初始化为图像`X`。风格图像在各个风格层的格拉姆矩阵`styles_Y_gram`将在训练前预先计算好。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "origin_pos": 38, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def get_inits(X, device, lr, styles_Y):\n", + " gen_img = SynthesizedImage(X.shape).to(device)\n", + " gen_img.weight.data.copy_(X.data)\n", + " trainer = torch.optim.Adam(gen_img.parameters(), lr=lr)\n", + " styles_Y_gram = [gram(Y) for Y in styles_Y]\n", + " return gen_img(), styles_Y_gram, trainer" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 39 + }, + "source": [ + "## [**训练模型**]\n", + "\n", + "在训练模型进行风格迁移时,我们不断抽取合成图像的内容特征和风格特征,然后计算损失函数。下面定义了训练循环。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "origin_pos": 41, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def train(X, contents_Y, styles_Y, device, lr, num_epochs, lr_decay_epoch):\n", + " X, styles_Y_gram, trainer = get_inits(X, device, lr, styles_Y)\n", + " scheduler = torch.optim.lr_scheduler.StepLR(trainer, lr_decay_epoch, 0.8)#使用学习率衰减\n", + " animator = d2l.Animator(xlabel='epoch', ylabel='loss',\n", + " xlim=[10, num_epochs],\n", + " legend=['content', 'style', 'TV'],\n", + " ncols=2, figsize=(7, 2.5))#ncols=2代表共有两列图片,左侧为损失图,右侧为当前合成结果\n", + " for epoch in range(num_epochs):\n", + " trainer.zero_grad()\n", + " contents_Y_hat, styles_Y_hat = extract_features(\n", + " X, content_layers, style_layers)\n", + " contents_l, styles_l, tv_l, l = compute_loss(\n", + " X, contents_Y_hat, styles_Y_hat, contents_Y, styles_Y_gram)\n", + " l.backward()\n", + " trainer.step()\n", + " scheduler.step()\n", + " if (epoch + 1) % 10 == 0:\n", + " animator.axes[1].imshow(postprocess(X))\n", + " animator.add(epoch + 1, [float(sum(contents_l)),\n", + " float(sum(styles_l)), float(tv_l)])\n", + " return X" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 42 + }, + "source": [ + "现在我们[**训练模型**]:\n", + "首先将内容图像和风格图像的高和宽分别调整为300和450像素,用内容图像来初始化合成图像。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "origin_pos": 44, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T21:35:09.010360\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "device, image_shape = d2l.try_gpu(), (300, 450)\n", + "net = net.to(device)\n", + "content_X, contents_Y = get_contents(image_shape, device)#content_x即内容图片进行变换后的结果,用于初始化图片\n", + "_, styles_Y = get_styles(image_shape, device)\n", + "output = train(content_X, contents_Y, styles_Y, device, 0.3, 500, 50)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 45 + }, + "source": [ + "我们可以看到,合成图像保留了内容图像的风景和物体,并同时迁移了风格图像的色彩。例如,合成图像具有与风格图像中一样的色彩块,其中一些甚至具有画笔笔触的细微纹理。\n", + "\n", + "## 小结\n", + "\n", + "* 风格迁移常用的损失函数由3部分组成:(i)内容损失使合成图像与内容图像在内容特征上接近;(ii)风格损失令合成图像与风格图像在风格特征上接近;(iii)全变分损失则有助于减少合成图像中的噪点。\n", + "* 我们可以通过预训练的卷积神经网络来抽取图像的特征,并通过最小化损失函数来不断更新合成图像来作为模型参数。\n", + "* 我们使用格拉姆矩阵表达风格层输出的风格。\n", + "\n", + "## 练习\n", + "\n", + "1. 选择不同的内容和风格层,输出有什么变化?\n", + "1. 调整损失函数中的权重超参数。输出是否保留更多内容或减少更多噪点?\n", + "1. 替换实验中的内容图像和风格图像,你能创作出更有趣的合成图像吗?\n", + "1. 我们可以对文本使用风格迁移吗?提示:你可以参阅调查报告 :cite:`Hu.Lee.Aggarwal.2020`。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 47, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/3300)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.11" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/52-\346\226\207\346\234\254\351\242\204\345\244\204\347\220\206.ipynb" "b/code/52-\346\226\207\346\234\254\351\242\204\345\244\204\347\220\206.ipynb" new file mode 100644 index 0000000..3a2eb2a --- /dev/null +++ "b/code/52-\346\226\207\346\234\254\351\242\204\345\244\204\347\220\206.ipynb" @@ -0,0 +1,566 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "pre-prosses.ipynb", + "provenance": [], + "collapsed_sections": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "#52-文本预处理\n", + "##本节目录\n", + "\n", + "* [处理步骤](https://colab.research.google.com/drive/1oQH-UYj-DhB-DiUJ3nO2dEprKcNSD4F0#scrollTo=ltSVAkJi-SVF&line=6&uniqifier=1)\n", + "* [读取数据集](https://colab.research.google.com/drive/1oQH-UYj-DhB-DiUJ3nO2dEprKcNSD4F0#scrollTo=BE1LFmRneFrp)\n", + "* [词元化](https://colab.research.google.com/drive/1oQH-UYj-DhB-DiUJ3nO2dEprKcNSD4F0#scrollTo=FyvsoyYqeMuW&line=6&uniqifier=1)\n", + "* [词表](https://colab.research.google.com/drive/1oQH-UYj-DhB-DiUJ3nO2dEprKcNSD4F0#scrollTo=aTVML0Pqeqb1&line=5&uniqifier=1)\n", + "* [整合所有功能](https://colab.research.google.com/drive/1oQH-UYj-DhB-DiUJ3nO2dEprKcNSD4F0#scrollTo=cKrAEBT7gplL&line=6&uniqifier=1)\n", + "\n", + "\n" + ], + "metadata": { + "id": "wTVkRfB2hnx8" + } + }, + { + "cell_type": "markdown", + "source": [ + "##启用GPU" + ], + "metadata": { + "id": "4F0oSvy211p_" + } + }, + { + "cell_type": "code", + "source": [ + "%tensorflow_version 2.x\n", + "import tensorflow as tf\n", + "device_name = tf.test.gpu_device_name()\n", + "if device_name != '/device:GPU:0':\n", + " raise SystemError('GPU device not found')\n", + "print('Found GPU at: {}'.format(device_name))" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "nkcC6rZz1xGH", + "outputId": "197e037a-b939-4fef-a19e-ca24b9f1b851" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Found GPU at: /device:GPU:0\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##安装pytorch和d2l" + ], + "metadata": { + "id": "byK-3lVy2BIU" + } + }, + { + "cell_type": "code", + "source": [ + "!pip3 install torch torchvision torchaudio" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "GiXMVWXv2IK4", + "outputId": "7b6d0598-2fcc-4b04-a347-87ecf6839065" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Requirement already satisfied: torch in /usr/local/lib/python3.7/dist-packages (1.10.0+cu111)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (0.11.1+cu111)\n", + "Requirement already satisfied: torchaudio in /usr/local/lib/python3.7/dist-packages (0.10.0+cu111)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch) (3.10.0.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from torchvision) (1.19.5)\n", + "Requirement already satisfied: pillow!=8.3.0,>=5.3.0 in /usr/local/lib/python3.7/dist-packages (from torchvision) (7.1.2)\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!pip3 install d2l==0.14" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "VZQ-iQON2KdN", + "outputId": "62c942aa-bf75-4277-d03c-1cbb83cbf5d1" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting d2l==0.14\n", + " Downloading d2l-0.14.0-py3-none-any.whl (48 kB)\n", + "\u001b[?25l\r\u001b[K |██████▊ | 10 kB 29.0 MB/s eta 0:00:01\r\u001b[K |█████████████▍ | 20 kB 35.7 MB/s eta 0:00:01\r\u001b[K |████████████████████ | 30 kB 19.3 MB/s eta 0:00:01\r\u001b[K |██████████████████████████▉ | 40 kB 16.7 MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 48 kB 4.7 MB/s \n", + "\u001b[?25hRequirement already satisfied: jupyter in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.0.0)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (3.2.2)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.3.5)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.19.5)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.6.1)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.3.1)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.0)\n", + "Requirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.2)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (7.6.5)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (4.10.1)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.5.0)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.3.5)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.8.0)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (2.6.1)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (57.4.0)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.8.1)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.7.5)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.4.2)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.0.18)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.2.5)\n", + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.15.0)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (3.5.2)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (1.0.2)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (5.1.3)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (0.2.0)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.9.1)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.3.3)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (21.4.0)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.10.1)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (5.4.0)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.10.0.2)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (0.18.1)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.7.0)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (2.11.3)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (0.13.1)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (1.8.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (2.8.2)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (22.3.0)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter->d2l==0.14) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter->d2l==0.14) (2.0.1)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (1.3.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (3.0.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (0.11.0)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.8.4)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.4)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (1.5.0)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (4.1.0)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.5.0)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.7.1)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (21.3)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (0.5.1)\n", + "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.7/dist-packages (from pandas->d2l==0.14) (2018.9)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter->d2l==0.14) (2.0.1)\n", + "Installing collected packages: d2l\n", + "Successfully installed d2l-0.14.0\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "#文本预处理\n", + "##处理步骤\n", + "\n", + "---\n", + "\n", + "\n", + "\n", + "1. 将文本作为字符串加载至内存\n", + "2. 将字符串拆分为词元(eg.单词和字符)\n", + "3. 建立词表,将拆分的词元映射到数字索引\n", + "4. 将文本转换为数字索引序列,方便模型操作\n", + "\n" + ], + "metadata": { + "id": "ltSVAkJi-SVF" + } + }, + { + "cell_type": "code", + "source": [ + "import collections\n", + "import re\n", + "from d2l import torch as d2l" + ], + "metadata": { + "id": "DQBfRqj5-uVZ" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "##读取数据集" + ], + "metadata": { + "id": "BE1LFmRneFrp" + } + }, + { + "cell_type": "code", + "source": [ + "d2l.DATA_HUB['time_machine'] = (d2l.DATA_URL + 'timemachine.txt', '0cb91d09b814ecdc07b50f31f8dcad3e81d6a86d')\n", + "\n", + "#清洗数据\n", + "#消除无用文本,转换大小写\n", + "def read_time_machine():\n", + " \"\"\"将时间机器数据集加载到文本行的列表中\"\"\"\n", + " with open(d2l.download('time_machine'),'r') as f:\n", + " lines = f.readlines()\n", + " return [re.sub('[^A-Za-z]+',' ',line).strip().lower() for line in lines]\n", + "\n", + "lines = read_time_machine()\n", + "print(f'# 文本总行数:{len(lines)}')\n", + "print(lines[0])\n", + "print(lines[10])" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "U80crdJH-6EL", + "outputId": "d940c724-b41c-434b-bc44-cd8b0c1ebb80" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Downloading ../data/timemachine.txt from http://d2l-data.s3-accelerate.amazonaws.com/timemachine.txt...\n", + "# 文本总行数:3221\n", + "the time machine by h g wells\n", + "twinkled and his usually pale face was flushed and animated the\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##词元化\n", + "\n", + "\n", + "---\n", + "\n", + "\n", + "\n", + "> 文本行列表中的每个文本序列被转化成以词元为单位的词元列表\n", + "\n", + "\n", + "\n" + ], + "metadata": { + "id": "FyvsoyYqeMuW" + } + }, + { + "cell_type": "code", + "source": [ + "#拆分数据为单词或字母\n", + "def tokenize(lines,token='word'):\n", + " \"\"\"将文本行拆分为单词或字符词元\"\"\"\n", + " if token == 'word':\n", + " return [line.split() for line in lines]\n", + " elif token == 'char':\n", + " return [list(line) for line in lines]\n", + " else:\n", + " print('error: Unkown type:' + token)\n", + "\n", + "tokens = tokenize(lines)\n", + "for i in range(11):\n", + " print(tokens[i])" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "KCWY6qVr0i_b", + "outputId": "58ffaee1-97b3-48d8-a973-1db9795c007e" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "['the', 'time', 'machine', 'by', 'h', 'g', 'wells']\n", + "[]\n", + "[]\n", + "[]\n", + "[]\n", + "['i']\n", + "[]\n", + "[]\n", + "['the', 'time', 'traveller', 'for', 'so', 'it', 'will', 'be', 'convenient', 'to', 'speak', 'of', 'him']\n", + "['was', 'expounding', 'a', 'recondite', 'matter', 'to', 'us', 'his', 'grey', 'eyes', 'shone', 'and']\n", + "['twinkled', 'and', 'his', 'usually', 'pale', 'face', 'was', 'flushed', 'and', 'animated', 'the']\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##词表\n", + "\n", + "\n", + "---\n", + "\n", + "\n", + "> 词表(vocabulary):是一个能让词元和数字索引互相映射的字典\n", + "\n", + "\n", + "\n", + "> 语料(corpus):是文本中各个词元的出现频率的统计结果\n", + "\n", + "\n", + "\n", + "> 根据词元出现频率分配数字索引,同时移除较少的词元降低词表的稀疏程度\n", + "\n", + "\n", + "\n", + "> 特殊词元:\n", + "1. 未知词元\n", + "2. 填充词元\n", + "3. 序列开始词元\n", + "4. 序列结束词元\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n" + ], + "metadata": { + "id": "aTVML0Pqeqb1" + } + }, + { + "cell_type": "code", + "source": [ + "class Vocab:\n", + " \"\"\"文本词表\"\"\"\n", + " def __init__(self,tokens=None,min_freq=0,reserved_tokens=None):\n", + " if tokens is None:\n", + " tokens = []\n", + " if reserved_tokens is None:\n", + " reserved_tokens = []\n", + " #按出现频率排序\n", + " counter = count_corpus(tokens)\n", + " self._token_freqs = sorted(counter.items(),key=lambda x: x[1],reverse=True)\n", + " #未知词元的索引为0\n", + " self.idx_to_token = [''] + reserved_tokens\n", + " self.token_to_idx = {token: idx for idx,token in enumerate(self.idx_to_token)}\n", + " self.idx_to_token,self.token_to_idx = [],dict()\n", + " print(self.token_to_idx)\n", + " for token,freq in self._token_freqs:\n", + " if freq < min_freq:\n", + " break\n", + " if token not in self.token_to_idx:\n", + " self.idx_to_token.append(token)\n", + " self.token_to_idx[token] = len(self.idx_to_token) - 1\n", + "\n", + " def __len__(self):\n", + " return len(self.idx_to_token)\n", + "\n", + " def __getitem__(self,tokens):\n", + " if not isinstance(tokens,(list,tuple)):\n", + " return self.token_to_idx.get(tokens,self.unk)\n", + " return [self.__getitem__(token) for token in tokens]\n", + "\n", + " def to_tokens(self,indices):\n", + " if not isinstance(indices,(list,tuple)):\n", + " return self.idx_to_token[indices]\n", + " return [self.idx_to_token[index] for index in indices]\n", + "\n", + " @property\n", + " def unk(self): #未知词元的索引为0\n", + " return 0\n", + "\n", + " @property\n", + " def token_freqs(self):\n", + " return self._token_freqs\n", + "\n", + "def count_corpus(tokens):\n", + " \"\"\"统计词元的频率\"\"\"\n", + " #这里的tokens是1D列表或2D列表\n", + " if len(tokens) == 0 or isinstance(tokens[0],list):\n", + " #将词元列表展平成一个列表\n", + " tokens = [token for line in tokens for token in line]\n", + " return collections.Counter(tokens)" + ], + "metadata": { + "id": "carzgtSy7H4n" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "vocab = Vocab(tokens)\n", + "print(list(vocab.token_to_idx.items())[:10])" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "-NskhH8ICP0b", + "outputId": "80e6d765-eb8a-47f8-ca73-b6a38cabee3f" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "{}\n", + "[('the', 0), ('i', 1), ('and', 2), ('of', 3), ('a', 4), ('to', 5), ('was', 6), ('in', 7), ('that', 8), ('my', 9)]\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "for i in [0,10]:\n", + " print('token:',tokens[i])\n", + " print('id:',vocab[tokens[i]])" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "zT9px1-TCZ4H", + "outputId": "16e6c9a6-58f5-4826-b6b9-4a2ce3857d66" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "token: ['the', 'time', 'machine', 'by', 'h', 'g', 'wells']\n", + "id: [0, 18, 49, 39, 2182, 2183, 399]\n", + "token: ['twinkled', 'and', 'his', 'usually', 'pale', 'face', 'was', 'flushed', 'and', 'animated', 'the']\n", + "id: [2185, 2, 24, 1043, 361, 112, 6, 1420, 2, 1044, 0]\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##整合所有功能\n", + "\n", + "\n", + "---\n", + "\n", + "\n", + "\n", + "1. 读取清洗后的文件\n", + "2. 词元化\n", + "3. 建立词表\n", + "\n" + ], + "metadata": { + "id": "cKrAEBT7gplL" + } + }, + { + "cell_type": "code", + "source": [ + "def load_corpus_tine_machine(max_tokens=-1):\n", + " \"\"\"返回数据集的词元索引列表和词表\"\"\"\n", + " lines = read_time_machine()\n", + " tokens = tokenize(lines,'char')\n", + " vocab = Vocab(tokens)\n", + " \"\"\"将所有文本行展平到一个列表\"\"\"\n", + " corpus = [vocab[token] for line in tokens for token in line]\n", + " if max_tokens > 0 :\n", + " corpus = corpus[:max_tokens]\n", + " return corpus,vocab\n", + "\n", + "corpus,vocab = d2l.load_corpus_time_machine()\n", + "len(corpus),len(vocab)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "v1mgkKD6CvE_", + "outputId": "2fba3c07-3bb9-46c2-e1fc-e0c5fccba128" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Downloading ../data/timemachine.txt from http://d2l-data.s3-accelerate.amazonaws.com/timemachine.txt...\n" + ] + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "(171489, 28)" + ] + }, + "metadata": {}, + "execution_count": 16 + } + ] + }, + { + "cell_type": "code", + "source": [ + "" + ], + "metadata": { + "id": "t0m8D6NQFF8T" + }, + "execution_count": null, + "outputs": [] + } + ] +} \ No newline at end of file diff --git a/code/55-RNN.ipynb b/code/55-RNN.ipynb new file mode 100644 index 0000000..a90fea7 --- /dev/null +++ b/code/55-RNN.ipynb @@ -0,0 +1,3326 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "## 一、RNN从零开始实现" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "import math\n", + "import torch\n", + "from torch import nn\n", + "from torch.nn import functional as F\n", + "from d2l import torch as d2l\n", + "\n", + "batch_size, num_steps = 32, 35\n", + "train_iter, vocab = d2l.load_data_time_machine(batch_size, num_steps)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 独热编码" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0],\n", + " [0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0]])" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "F.one_hot(torch.tensor([0, 2]), len(vocab)) #将[0, 2]展开为长度为len(vocab)大小的独热向量" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 小批量数据形状是 (批量大小, 时间步数)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([5, 2, 28])" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.arange(10).reshape((2, 5)) # (batch_size, n_step)\n", + "F.one_hot(X.T, 28).shape # (n_step, batch_size, n_features)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 初始化循环神经网络模型的模型参数" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "def get_params(vocab_size, num_hiddens, device):\n", + " num_inputs = num_outputs = vocab_size\n", + " \n", + " def normal(shape):\n", + " return torch.randn(size=shape, device=device) * 0.01\n", + " \n", + " #隐藏层参数\n", + " W_xh = normal((num_inputs, num_hiddens))\n", + " W_hh = normal((num_hiddens, num_hiddens))\n", + " b_h = torch.zeros(num_hiddens, device=device)\n", + " #输出参数\n", + " W_hq = normal((num_hiddens, num_outputs))\n", + " b_q = torch.zeros(num_outputs, device=device)\n", + " #附加梯度\n", + " params = [W_xh, W_hh, b_h, W_hq, b_q]\n", + " for param in params:\n", + " param.requires_grad_(True)\n", + " return params" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### init_rnn_state函数:在初始化时返回隐藏状态" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "def init_rnn_state(batch_size, num_hiddens, device): #返回初始隐层状态\n", + " return (torch.zeros((batch_size, num_hiddens), device=device), )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### rnn函数:定义**一个时间步内**计算隐藏状态和输出\n", + "更新隐藏状态: $$h_t = \\phi(W_{hh}h_{t-1}+W_{hx}x_{t-1}+b_{h})$$\n", + "输出: $$o_{t}=\\phi(W_{ho}h_{t}+b_{o})$$" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "def rnn(inputs, state, params):\n", + " W_xh, W_hh, b_h, W_hq, b_q = params\n", + " H, = state\n", + " outputs = [] #n_step个大小为(batch_size, n_outputs)的torch张量列表\n", + " # inputs: (n_step, batch_size, n_features)\n", + " for X in inputs: # 按时序遍历\n", + " H = torch.tanh(torch.mm(X, W_xh) + torch.mm(H, W_hh) + b_h) # (batch_size, n_hiddens)\n", + " Y = torch.mm(H, W_hq) + b_q # (batch_size, n_outputs)\n", + " outputs.append(Y) \n", + " return torch.cat(outputs, dim=0), (H,) #cat后维数(n_step * batch_size, n_outputs)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 创建一个类来包装这些函数" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "class RNNModelScratch:\n", + " \"\"\"从零开始实现的循环神经网络模型\"\"\"\n", + " def __init__(self, vocab_size, num_hiddens, device, get_params,\n", + " init_state, forward_fn):\n", + " self.vocab_size, self.num_hiddens = vocab_size, num_hiddens\n", + " self.params = get_params(vocab_size, num_hiddens, device) #获得模型初始参数\n", + " self.init_state, self.forward_fn = init_state, forward_fn #隐层初始函数, 前馈函数 \n", + " #注意前馈函数可以换成gru, lstm等\n", + " \n", + " def __call__(self, X, state):\n", + " #输入X: (batch_size, n_step)\n", + " #转置+onehot后 X:(n_step, batch_size, n_features)\n", + " X = F.one_hot(X.T, self.vocab_size).type(torch.float32)\n", + " return self.forward_fn(X, state, self.params)\n", + " \n", + " def begin_state(self, batch_size, device):\n", + " return self.init_state(batch_size, self.num_hiddens, device)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 检查输出是否具有正确的形状" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(torch.Size([10, 28]), 1, torch.Size([2, 512]))" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "num_hiddens = 512\n", + "net = RNNModelScratch(len(vocab), num_hiddens, d2l.try_gpu(), \n", + " get_params, init_rnn_state, rnn)\n", + "# X: (2, 5) 对应(batch_size, n_step)\n", + "state = net.begin_state(X.shape[0], d2l.try_gpu())\n", + "Y, new_state = net(X.to(d2l.try_gpu()), state)\n", + "#Y: (batch_size * n_step, n_outputs) \n", + "#new_state中一个torch张量(最后一个时间步的隐层)\n", + "#new_state[0].shape: (batch_size, n_hiddens)\n", + "Y.shape, len(new_state), new_state[0].shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 首先定义预测函数来生成prefix之后的新字符" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'time travellerrrrrrrrrrr'" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def predict_ch8(prefix, num_preds, net, vocab, device):\n", + " \"\"\"在`prefix`后面生成新字符\"\"\"\n", + " #生成初始隐藏状态\n", + " state = net.begin_state(batch_size=1, device=device) \n", + " outputs = [vocab[prefix[0]]] #第一个word的整型下标\n", + " #将最近预测的词做成tensor, batch_size=1, n_step=1\n", + " get_input = lambda: torch.tensor([outputs[-1]], device=device).reshape((1, 1))\n", + " for y in prefix[1:]: # 预热操作, 保存真值\n", + " _, state = net(get_input(), state)\n", + " outputs.append(vocab[y])\n", + " for _ in range(num_preds): # 预测num_preds步\n", + " y, state = net(get_input(), state)\n", + " outputs.append(int(y.argmax(dim=1).reshape(1)))\n", + " return ''.join([vocab.idx_to_token[i] for i in outputs])\n", + "\n", + "predict_ch8('time traveller', 10, net, vocab, d2l.try_gpu())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 梯度裁剪\n", + "$$\\mathbf{g}\\leftarrow min(1, \\frac{\\theta}{\\parallel \\mathbf{g} \\parallel}) \\mathbf{g}$$" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "def grad_clipping(net, theta):\n", + " \"\"\"裁剪梯度\"\"\"\n", + " if isinstance(net, nn.Module):#如果使用nn.Module来实现\n", + " params = [p for p in net.parameters() if p.requires_grad]\n", + " else:\n", + " params = net.params\n", + " norm = torch.sqrt(sum(torch.sum(\n", + " (p.grad**2)) for p in params))\n", + " if norm > theta:\n", + " for param in params:\n", + " param.grad[:] *= theta / norm" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 查看train_iter数据集" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([32, 35]) torch.Size([32, 35])\n", + "tensor([[ 1, 3, 5, ..., 2, 1, 15],\n", + " [ 4, 6, 11, ..., 5, 10, 8],\n", + " [ 3, 1, 4, ..., 2, 8, 8],\n", + " ...,\n", + " [15, 7, 6, ..., 21, 14, 3],\n", + " [10, 19, 8, ..., 14, 8, 3],\n", + " [ 1, 13, 2, ..., 10, 1, 4]])\n", + "tensor([[ 3, 5, 13, ..., 1, 15, 7],\n", + " [ 6, 11, 20, ..., 10, 8, 1],\n", + " [ 1, 4, 6, ..., 8, 8, 1],\n", + " ...,\n", + " [ 7, 6, 26, ..., 14, 3, 21],\n", + " [19, 8, 3, ..., 8, 3, 1],\n", + " [13, 2, 15, ..., 1, 4, 6]])\n", + " time traveller for so it will be c\n", + "time traveller for so it will be co\n", + "andpassed in our glasses our chairs\n", + "ndpassed in our glasses our chairs \n", + "\n", + "onvenient to speak of himwas expoun\n", + "nvenient to speak of himwas expound\n", + "8\n" + ] + } + ], + "source": [ + "count = 0\n", + "for X, Y in train_iter:\n", + " if count == 0:#第0个batch\n", + " print(X.shape, Y.shape)\n", + " print(X) # (batch_size(=32), n_step(=35))\n", + " print(Y) # (batch_size, n_step)\n", + " print(''.join([vocab.idx_to_token[i] for i in X[0]])) #打印第0个样本对应句子\n", + " print(''.join([vocab.idx_to_token[i] for i in Y[0]])) #打印第0个样本真值\n", + " print(''.join([vocab.idx_to_token[i] for i in X[1]])) #打印第1个样本对应句子\n", + " print(''.join([vocab.idx_to_token[i] for i in Y[1]])) #打印第1个样本真值\n", + " print()\n", + " if count == 1:#第1个batch, 内容和第0个batch上下承接(有时序关系)\n", + " print(''.join([vocab.idx_to_token[i] for i in X[0]]))\n", + " print(''.join([vocab.idx_to_token[i] for i in Y[0]]))\n", + " count += 1\n", + "print(count) #打印batch数量=8" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 定义一个函数在一个迭代周期内训练模型" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "def train_epoch_ch8(net, train_iter, loss, updater, device,\n", + " use_random_iter):\n", + " \"\"\"训练模型一个迭代周期(定义见第8章)\"\"\"\n", + " state, timer = None, d2l.Timer()\n", + " metric = d2l.Accumulator(2)\n", + " for X, Y in train_iter:\n", + " if state is None or use_random_iter:#为第一个batch 或者 batch之间时序上不连续\n", + " state = net.begin_state(batch_size=X.shape[0], device=device) #初始化state\n", + " else:\n", + " if isinstance(net, nn.Module) and not isinstance(state, tuple):\n", + " # state对于nn.GRU是个张量\n", + " state.detach_() # 对之前的部分取消梯度反向传播计算\n", + " else:\n", + " # state对于nn.LSTM或者对于我们从零开始实现的模型是个元组(张量构成)\n", + " for s in state:\n", + " s.detach_()\n", + " y = Y.T.reshape(-1) #reshape真值, 将n_step放在第一维之后拉成一维向量\n", + " X, y = X.to(device), y.to(device)\n", + " y_hat,state = net(X, state)\n", + " l = loss(y_hat, y.long()).mean()\n", + " if isinstance(updater, torch.optim.Optimizer):#调用torch优化函数实现\n", + " updater.zero_grad()\n", + " l.backward()\n", + " grad_clipping(net, 1)\n", + " updater.step()\n", + " else:\n", + " l.backward()\n", + " grad_clipping(net, 1)\n", + " updater(batch_size=1)\n", + " metric.add(l * y.numel(), y.numel())\n", + " return math.exp(metric[0] / metric[1]), metric[1] / timer.stop()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 循环神经网络模型的训练函数既支持从零开始实现,也可以使用高级API实现" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "def train_ch8(net, train_iter, vocab, lr, num_epochs, device, use_random_iter=False):\n", + " \"\"\"训练模型(定义见第8章)\"\"\"\n", + " loss = nn.CrossEntropyLoss()\n", + " animator = d2l.Animator(xlabel='epoch', ylabel='perplexity',\n", + " legend=['train'], xlim=[10,num_epochs])\n", + " #初始化优化器\n", + " if isinstance(net, nn.Module):\n", + " updater = torch.optim.SGD(net.parameters(), lr)\n", + " else:\n", + " updater = lambda batch_size: d2l.sgd(net.params, lr, batch_size)\n", + " predict = lambda prefix: predict_ch8(prefix, 50, net, vocab, device)\n", + " #训练和预测\n", + " for epoch in range(num_epochs):\n", + " ppl, speed = train_epoch_ch8(\n", + " net, train_iter, loss, updater, device, use_random_iter)\n", + " if (epoch + 1) % 10 == 0:\n", + " print(predict('time traveller'))\n", + " animator.add(epoch+1, [ppl])\n", + " print(f'困惑度 {ppl:.1f}, {speed:.1f} 词元/秒 {str(device)}')\n", + " print(predict('time traveller'))\n", + " print(predict('traveller'))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 训练循环神经网络模型(按序迭代batch)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "困惑度 1.0, 46320.2 词元/秒 cuda:0\n", + "time travelleryou can show black is white by argument said filby\n", + "travelleryou can show black is white by argument said filby\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-07T11:39:19.259185\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.3.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "num_epochs, lr = 500, 1\n", + "train_ch8(net, train_iter, vocab, lr, num_epochs, d2l.try_gpu())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 训练循环神经网络模型(随机迭代batch)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "困惑度 1.3, 36524.6 词元/秒 cuda:0\n", + "time traveller held in his hand was a glitteringmetallic framewo\n", + "travellerit s against reason said filbycan a cube that does\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-07T11:42:07.023084\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.3.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "net = RNNModelScratch(len(vocab), num_hiddens, d2l.try_gpu(), get_params,\n", + " init_rnn_state, rnn)\n", + "train_ch8(net, train_iter, vocab, lr, num_epochs, d2l.try_gpu(),\n", + " use_random_iter=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**从零开始实现**上述循环神经网络模型, 虽然有指导意义,但是并不方便。 在下一节中,我们将学习如何改进循环神经网络模型。" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "## 二、循环神经网络的简洁实现" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from torch.nn import functional as F\n", + "from d2l import torch as d2l\n", + "\n", + "batch_size, num_steps = 32, 35\n", + "train_iter, vocab = d2l.load_data_time_machine(batch_size, num_steps) #加载数据" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 定义模型\n", + "nn.RNN(input_size, hidden_size, num_layers=1, nonlinearity=tanh, bias=True, batch_first=False, dropout=0, bidirectional=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "num_hiddens = 256\n", + "rnn_layer = nn.RNN(len(vocab), num_hiddens)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 使用张量来初始化隐藏状态" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([1, 32, 256])" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "state = torch.zeros((1, batch_size, num_hiddens))\n", + "state.shape #(D * num_layers(=1), batch_size, num_hiddens) " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 通过一个隐藏状态和一个输入,我们就可以用更新后的隐藏状态计算输出" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(torch.Size([35, 32, 256]), torch.Size([1, 32, 256]))" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.rand(size=(num_steps, batch_size, len(vocab)))# (n_step, batch_size, num_inputs)\n", + "Y, state_new = rnn_layer(X, state)\n", + "Y.shape, state_new.shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 定义RNNModel类:完整的循环神经网络模型" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "class RNNModel(nn.Module):\n", + " def __init__(self, rnn_layer, vocab_size, **kwargs):\n", + " super(RNNModel, self).__init__(**kwargs)\n", + " self.rnn = rnn_layer\n", + " self.vocab_size = vocab_size\n", + " self.num_hiddens = self.rnn.hidden_size\n", + " if not self.rnn.bidirectional: #如果是双向\n", + " self.num_directions = 1\n", + " self.linear = nn.Linear(self.num_hiddens, self.vocab_size) #线性层 /输出层\n", + " else:\n", + " self.num_directions = 2\n", + " self.linear = nn.Linear(self.num_hiddens, self.vocab_size)\n", + " \n", + " def forward(self, inputs, state):\n", + " X = F.one_hot(inputs.T.long(), self.vocab_size)\n", + " X = X.to(torch.float32)\n", + " Y, state = self.rnn(X, state)\n", + " output = self.linear(Y.reshape((-1, Y.shape[-1])))\n", + " return output, state\n", + " \n", + " def begin_state(self, device, batch_size=1):\n", + " if not isinstance(self.rnn, nn.LSTM):\n", + " # nn.GRU以张量作为隐状态\n", + " return torch.zeros((self.num_directions * self.rnn.num_layers, \n", + " batch_size, self.num_hiddens),\n", + " device = device)\n", + " else:\n", + " # nn.LSTM以元组作为隐状态\n", + " return (torch.zeros((\n", + " self.num_directions * self.rnn.num_layers,\n", + " batch_size, self.num_hiddens), device=device),\n", + " torch.zeros((\n", + " self.num_directions * self.rnn.num_layers,\n", + " batch_size, self.num_hiddens), device=device))#(h_n, c_n)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 训练与预测\n", + "在训练模型之前,让我们基于一个具有随机权重的模型进行预测。" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'time travelleridandand'" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "device = d2l.try_gpu()\n", + "net = RNNModel(rnn_layer, vocab_size=len(vocab))\n", + "net = net.to(device)\n", + "d2l.predict_ch8('time traveller', 10, net, vocab, device)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "perplexity 1.3, 152273.6 tokens/sec on cuda:0\n", + "time traveller coud and inn weridit so mimens of the pramithtred\n", + "traveller his fictses tor hime hal very is f enghas ow llow\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-07T14:08:12.978684\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.3.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "num_epochs, lr = 500, 1\n", + "d2l.train_ch8(net, train_iter, vocab, lr, num_epochs, device)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "与上一节相比,由于深度学习框架的高级API对代码进行了更多的优化, 该模型在较短的时间内达到了较低的困惑度。" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "### 总结\n", + "- **深度学习框架**的高级API提供了循环神经网络层的实现。\n", + "\n", + "- 高级API的循环神经网络层返回一个输出和一个更新后的隐状态,我们**还需要**计算整个模型的**输出层**。\n", + "\n", + "- 相比从零开始实现的循环神经网络,使用**高级API**实现可以**加速训练**。" + ] + } + ], + "metadata": { + "celltoolbar": "幻灯片", + "kernelspec": { + "display_name": "deep2learn", + "language": "python", + "name": "deep2learn" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/code/56-GRU.ipynb b/code/56-GRU.ipynb new file mode 100644 index 0000000..40d41c3 --- /dev/null +++ b/code/56-GRU.ipynb @@ -0,0 +1,1866 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 门控循环单元(GRU)\n", + "\n", + "### 重置门和更新门\n", + "\n", + "$\\mathbf{X}_t \\in \\mathbb{R}^{n \\times d}$\n", + "(样本个数:$n$,输入个数:$d$),\n", + "上一个时间步的隐状态是\n", + "$\\mathbf{H}_{t-1} \\in \\mathbb{R}^{n \\times h}$\n", + "(隐藏单元个数:$h$)。\n", + "那么,重置门$\\mathbf{R}_t \\in \\mathbb{R}^{n \\times h}$和\n", + "更新门$\\mathbf{Z}_t \\in \\mathbb{R}^{n \\times h}$的计算如下所示:\n", + "\n", + "$$\n", + "\\begin{aligned}\n", + "\\mathbf{R}_t = \\sigma(\\mathbf{X}_t \\mathbf{W}_{xr} + \\mathbf{H}_{t-1} \\mathbf{W}_{hr} + \\mathbf{b}_r),\\\\\n", + "\\mathbf{Z}_t = \\sigma(\\mathbf{X}_t \\mathbf{W}_{xz} + \\mathbf{H}_{t-1} \\mathbf{W}_{hz} + \\mathbf{b}_z),\n", + "\\end{aligned}\n", + "$$\n", + "\n", + "其中$\\mathbf{W}_{xr}, \\mathbf{W}_{xz} \\in \\mathbb{R}^{d \\times h}$\n", + "和$\\mathbf{W}_{hr}, \\mathbf{W}_{hz} \\in \\mathbb{R}^{h \\times h}$是权重参数,\n", + "$\\mathbf{b}_r, \\mathbf{b}_z \\in \\mathbb{R}^{1 \\times h}$是偏置参数。\n", + "请注意,在求和过程中会触发广播机制\n", + "(请参阅 :numref:`subsec_broadcasting`)。\n", + "我们使用sigmoid函数(如 :numref:`sec_mlp`中介绍的)\n", + "将输入值转换到区间$(0, 1)$。\n", + "\n", + "### 候选隐状态\n", + "\n", + "接下来,让我们将重置门$\\mathbf{R}_t$\n", + "与 :eqref:`rnn_h_with_state`\n", + "中的常规隐状态更新机制集成,\n", + "得到在时间步$t$的*候选隐状态*(candidate hidden state)\n", + "$\\tilde{\\mathbf{H}}_t \\in \\mathbb{R}^{n \\times h}$。\n", + "\n", + "$$\\tilde{\\mathbf{H}}_t = \\tanh(\\mathbf{X}_t \\mathbf{W}_{xh} + \\left(\\mathbf{R}_t \\odot \\mathbf{H}_{t-1}\\right) \\mathbf{W}_{hh} + \\mathbf{b}_h),$$\n", + ":eqlabel:`gru_tilde_H`\n", + "\n", + "其中$\\mathbf{W}_{xh} \\in \\mathbb{R}^{d \\times h}$\n", + "和$\\mathbf{W}_{hh} \\in \\mathbb{R}^{h \\times h}$是权重参数,\n", + "$\\mathbf{b}_h \\in \\mathbb{R}^{1 \\times h}$是偏置项,\n", + "符号$\\odot$是Hadamard积(按元素乘积)运算符。\n", + "在这里,我们使用tanh非线性激活函数来确保候选隐状态中的值保持在区间$(-1, 1)$中。\n", + "\n", + "\n", + "### 隐状态\n", + "\n", + "上述的计算结果只是候选隐状态,我们仍然需要结合更新门$\\mathbf{Z}_t$的效果。\n", + "这一步确定新的隐状态$\\mathbf{H}_t \\in \\mathbb{R}^{n \\times h}$\n", + "在多大程度上来自旧的状态$\\mathbf{H}_{t-1}$和\n", + "新的候选状态$\\tilde{\\mathbf{H}}_t$。\n", + "更新门$\\mathbf{Z}_t$仅需要在\n", + "$\\mathbf{H}_{t-1}$和$\\tilde{\\mathbf{H}}_t$\n", + "之间进行按元素的凸组合就可以实现这个目标。\n", + "这就得出了门控循环单元的最终更新公式:\n", + "\n", + "$$\\mathbf{H}_t = \\mathbf{Z}_t \\odot \\mathbf{H}_{t-1} + (1 - \\mathbf{Z}_t) \\odot \\tilde{\\mathbf{H}}_t.$$\n", + "\n", + "每当更新门$\\mathbf{Z}_t$接近$1$时,模型就倾向只保留旧状态。\n", + "此时,来自$\\mathbf{X}_t$的信息基本上被忽略,\n", + "从而有效地跳过了依赖链条中的时间步$t$。\n", + "相反,当$\\mathbf{Z}_t$接近$0$时,\n", + "新的隐状态$\\mathbf{H}_t$就会接近候选隐状态$\\tilde{\\mathbf{H}}_t$。\n", + "这些设计可以帮助我们处理循环神经网络中的梯度消失问题,\n", + "并更好地捕获时间步距离很长的序列的依赖关系。\n", + "例如,如果整个子序列的所有时间步的更新门都接近于$1$,\n", + "则无论序列的长度如何,在序列起始时间步的旧隐状态都将很容易保留并传递到序列结束。\n", + "\n", + "\n", + "## 从零开始实现\n", + "\n", + "为了更好地理解门控循环单元模型,我们从零开始实现它。\n", + "首先,我们读取 :numref:`sec_rnn_scratch`中使用的时间机器数据集:\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "\n", + "batch_size, num_steps = 32, 35\n", + "train_iter, vocab = d2l.load_data_time_machine(batch_size, num_steps)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "### [**初始化模型参数**]\n", + "\n", + "下一步是初始化模型参数。\n", + "我们从标准差为$0.01$的高斯分布中提取权重,\n", + "并将偏置项设为$0$,超参数`num_hiddens`定义隐藏单元的数量,\n", + "实例化与更新门、重置门、候选隐状态和输出层相关的所有权重和偏置。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def get_params(vocab_size, num_hiddens, device):\n", + " num_inputs = num_outputs = vocab_size\n", + "\n", + " def normal(shape):\n", + " return torch.randn(size=shape, device=device)*0.01\n", + "\n", + " def three():\n", + " return (normal((num_inputs, num_hiddens)),\n", + " normal((num_hiddens, num_hiddens)),\n", + " torch.zeros(num_hiddens, device=device))\n", + "\n", + " W_xz, W_hz, b_z = three() # 更新门参数\n", + " W_xr, W_hr, b_r = three() # 重置门参数\n", + " W_xh, W_hh, b_h = three() # 候选隐状态参数\n", + " # 输出层参数\n", + " W_hq = normal((num_hiddens, num_outputs))\n", + " b_q = torch.zeros(num_outputs, device=device)\n", + " # 附加梯度\n", + " params = [W_xz, W_hz, b_z, W_xr, W_hr, b_r, W_xh, W_hh, b_h, W_hq, b_q]\n", + " for param in params:\n", + " param.requires_grad_(True)\n", + " return params" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "### 定义模型\n", + "\n", + "现在我们将[**定义隐状态的初始化函数**]`init_gru_state`。\n", + "与 :numref:`sec_rnn_scratch`中定义的`init_rnn_state`函数一样,\n", + "此函数返回一个形状为(批量大小,隐藏单元个数)的张量,张量的值全部为零。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 10, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def init_gru_state(batch_size, num_hiddens, device):\n", + " return (torch.zeros((batch_size, num_hiddens), device=device), )" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12 + }, + "source": [ + "现在我们准备[**定义门控循环单元模型**],\n", + "模型的架构与基本的循环神经网络单元是相同的,\n", + "只是权重更新公式更为复杂。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def gru(inputs, state, params):\n", + " W_xz, W_hz, b_z, W_xr, W_hr, b_r, W_xh, W_hh, b_h, W_hq, b_q = params\n", + " H, = state\n", + " outputs = []\n", + " for X in inputs:\n", + " Z = torch.sigmoid((X @ W_xz) + (H @ W_hz) + b_z)\n", + " R = torch.sigmoid((X @ W_xr) + (H @ W_hr) + b_r)\n", + " H_tilda = torch.tanh((X @ W_xh) + ((R * H) @ W_hh) + b_h)\n", + " H = Z * H + (1 - Z) * H_tilda\n", + " Y = H @ W_hq + b_q\n", + " outputs.append(Y)\n", + " return torch.cat(outputs, dim=0), (H,)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 16 + }, + "source": [ + "### [**训练**]与预测\n", + "\n", + "训练和预测的工作方式与 :numref:`sec_rnn_scratch`完全相同。\n", + "训练结束后,我们分别打印输出训练集的困惑度,\n", + "以及前缀“time traveler”和“traveler”的预测序列上的困惑度。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 17, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "perplexity 1.1, 32652.0 tokens/sec on cuda:0\n", + "time traveller for so it will be convenient to speak of himwas e\n", + "travelleryou can show black is white by argument said filby\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-16T16:46:04.433519\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.4.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "import os\n", + "os.environ[\"KMP_DUPLICATE_LIB_OK\"] = \"TRUE\"\n", + "\n", + "vocab_size, num_hiddens, device = len(vocab), 256, d2l.try_gpu()\n", + "num_epochs, lr = 500, 1\n", + "model = d2l.RNNModelScratch(len(vocab), num_hiddens, device, get_params,\n", + " init_gru_state, gru)\n", + "d2l.train_ch8(model, train_iter, vocab, lr, num_epochs, device)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 19 + }, + "source": [ + "## [**简洁实现**]\n", + "\n", + "高级API包含了前文介绍的所有配置细节,\n", + "所以我们可以直接实例化门控循环单元模型。\n", + "这段代码的运行速度要快得多,\n", + "因为它使用的是编译好的运算符而不是Python来处理之前阐述的许多细节。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 21, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "perplexity 1.0, 114803.3 tokens/sec on cuda:0\n", + "time travelleryou can show black is white by argument said filby\n", + "traveller with a slight accession ofcheerfulness really thi\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-16T16:48:26.545696\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.4.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "num_inputs = vocab_size\n", + "gru_layer = nn.GRU(num_inputs, num_hiddens)\n", + "model = d2l.RNNModel(gru_layer, len(vocab))\n", + "model = model.to(device)\n", + "d2l.train_ch8(model, train_iter, vocab, lr, num_epochs, device)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 23 + }, + "source": [ + "## 小结\n", + "\n", + "* 门控循环神经网络可以更好地捕获时间步距离很长的序列上的依赖关系。\n", + "* 重置门有助于捕获序列中的短期依赖关系。\n", + "* 更新门有助于捕获序列中的长期依赖关系。\n", + "* 重置门打开时,门控循环单元包含基本循环神经网络;更新门打开时,门控循环单元可以跳过子序列。\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/58-\346\267\261\345\261\202\345\276\252\347\216\257\347\245\236\347\273\217\347\275\221\347\273\234.ipynb" "b/code/58-\346\267\261\345\261\202\345\276\252\347\216\257\347\245\236\347\273\217\347\275\221\347\273\234.ipynb" new file mode 100644 index 0000000..2fcaa85 --- /dev/null +++ "b/code/58-\346\267\261\345\261\202\345\276\252\347\216\257\347\245\236\347\273\217\347\275\221\347\273\234.ipynb" @@ -0,0 +1,363 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "pOcWW6NKegj0", + "outputId": "3765c3cd-7e7b-4a1d-bda3-03fa003963ff" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting d2l\n", + " Downloading d2l-0.17.3-py3-none-any.whl (82 kB)\n", + "\u001b[K |████████████████████████████████| 82 kB 616 kB/s \n", + "\u001b[?25hCollecting numpy==1.18.5\n", + " Downloading numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)\n", + "\u001b[K |████████████████████████████████| 20.1 MB 52.4 MB/s \n", + "\u001b[?25hCollecting requests==2.25.1\n", + " Downloading requests-2.25.1-py2.py3-none-any.whl (61 kB)\n", + "\u001b[K |████████████████████████████████| 61 kB 8.4 MB/s \n", + "\u001b[?25hCollecting pandas==1.2.2\n", + " Downloading pandas-1.2.2-cp37-cp37m-manylinux1_x86_64.whl (9.9 MB)\n", + "\u001b[K |████████████████████████████████| 9.9 MB 34.1 MB/s \n", + "\u001b[?25hRequirement already satisfied: jupyter==1.0.0 in /usr/local/lib/python3.7/dist-packages (from d2l) (1.0.0)\n", + "Collecting matplotlib==3.3.3\n", + " Downloading matplotlib-3.3.3-cp37-cp37m-manylinux1_x86_64.whl (11.6 MB)\n", + "\u001b[K |████████████████████████████████| 11.6 MB 29.5 MB/s \n", + "\u001b[?25hRequirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.2.2)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.6.1)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (7.6.5)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.2.0)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.3.1)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (4.10.1)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (0.11.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (1.3.2)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (2.8.2)\n", + "Requirement already satisfied: pillow>=6.2.0 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (7.1.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (3.0.7)\n", + "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.7/dist-packages (from pandas==1.2.2->d2l) (2018.9)\n", + "Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (3.0.4)\n", + "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (1.24.3)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (2021.10.8)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib==3.3.3->d2l) (1.15.0)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.1.1)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.1.1)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.5.0)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.3.5)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.7.5)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (4.8.0)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (57.4.0)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (4.4.2)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (1.0.18)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (2.6.1)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.8.1)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.2.5)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (3.5.2)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (5.1.3)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (1.0.2)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (0.2.0)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.3.3)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.9.1)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (0.18.1)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (5.4.0)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (21.4.0)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.10.1)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (3.10.0.2)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (3.7.0)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (0.13.1)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (1.8.0)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (2.11.3)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter==1.0.0->d2l) (22.3.0)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter==1.0.0->d2l) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter==1.0.0->d2l) (2.0.1)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.5.0)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.8.4)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.7.1)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (4.1.0)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (1.5.0)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.4)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter==1.0.0->d2l) (21.3)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter==1.0.0->d2l) (0.5.1)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter==1.0.0->d2l) (2.0.1)\n", + "Installing collected packages: numpy, requests, pandas, matplotlib, d2l\n", + " Attempting uninstall: numpy\n", + " Found existing installation: numpy 1.19.5\n", + " Uninstalling numpy-1.19.5:\n", + " Successfully uninstalled numpy-1.19.5\n", + " Attempting uninstall: requests\n", + " Found existing installation: requests 2.23.0\n", + " Uninstalling requests-2.23.0:\n", + " Successfully uninstalled requests-2.23.0\n", + " Attempting uninstall: pandas\n", + " Found existing installation: pandas 1.3.5\n", + " Uninstalling pandas-1.3.5:\n", + " Successfully uninstalled pandas-1.3.5\n", + " Attempting uninstall: matplotlib\n", + " Found existing installation: matplotlib 3.2.2\n", + " Uninstalling matplotlib-3.2.2:\n", + " Successfully uninstalled matplotlib-3.2.2\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "tables 3.7.0 requires numpy>=1.19.0, but you have numpy 1.18.5 which is incompatible.\n", + "google-colab 1.0.0 requires requests~=2.23.0, but you have requests 2.25.1 which is incompatible.\n", + "datascience 0.10.6 requires folium==0.2.1, but you have folium 0.8.3 which is incompatible.\n", + "albumentations 0.1.12 requires imgaug<0.2.7,>=0.2.5, but you have imgaug 0.2.9 which is incompatible.\u001b[0m\n", + "Successfully installed d2l-0.17.3 matplotlib-3.3.3 numpy-1.18.5 pandas-1.2.2 requests-2.25.1\n" + ] + }, + { + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "matplotlib", + "mpl_toolkits", + "numpy", + "pandas", + "requests" + ] + } + } + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "!pip install d2l" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "6ymjpFMzhflK", + "outputId": "86f38d14-e603-45b2-b8b0-72507671deeb" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found existing installation: matplotlib 3.3.3\n", + "Uninstalling matplotlib-3.3.3:\n", + " Would remove:\n", + " /usr/local/lib/python3.7/dist-packages/matplotlib-3.3.3-py3.7-nspkg.pth\n", + " /usr/local/lib/python3.7/dist-packages/matplotlib-3.3.3.dist-info/*\n", + " /usr/local/lib/python3.7/dist-packages/matplotlib/*\n", + " /usr/local/lib/python3.7/dist-packages/mpl_toolkits/axes_grid/*\n", + " /usr/local/lib/python3.7/dist-packages/mpl_toolkits/axes_grid1/*\n", + " /usr/local/lib/python3.7/dist-packages/mpl_toolkits/axisartist/*\n", + " /usr/local/lib/python3.7/dist-packages/mpl_toolkits/mplot3d/*\n", + " /usr/local/lib/python3.7/dist-packages/mpl_toolkits/tests/*\n", + " /usr/local/lib/python3.7/dist-packages/pylab.py\n", + "Proceed (y/n)? y\n", + " Successfully uninstalled matplotlib-3.3.3\n", + "Collecting matplotlib==3.3.3\n", + " Using cached matplotlib-3.3.3-cp37-cp37m-manylinux1_x86_64.whl (11.6 MB)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3) (3.0.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3) (0.11.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3) (1.3.2)\n", + "Requirement already satisfied: numpy>=1.15 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3) (1.18.5)\n", + "Requirement already satisfied: pillow>=6.2.0 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3) (7.1.2)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3) (2.8.2)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib==3.3.3) (1.15.0)\n", + "Installing collected packages: matplotlib\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "albumentations 0.1.12 requires imgaug<0.2.7,>=0.2.5, but you have imgaug 0.2.9 which is incompatible.\u001b[0m\n", + "Successfully installed matplotlib-3.3.3\n" + ] + }, + { + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "matplotlib", + "mpl_toolkits" + ] + } + } + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found existing installation: imgaug 0.2.9\n", + "Uninstalling imgaug-0.2.9:\n", + " Would remove:\n", + " /usr/local/lib/python3.7/dist-packages/imgaug-0.2.9.dist-info/*\n", + " /usr/local/lib/python3.7/dist-packages/imgaug/*\n", + "Proceed (y/n)? y\n", + " Successfully uninstalled imgaug-0.2.9\n", + "Collecting imgaug==0.2.5\n", + " Downloading imgaug-0.2.5.tar.gz (562 kB)\n", + "\u001b[K |████████████████████████████████| 562 kB 5.9 MB/s \n", + "\u001b[?25hRequirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from imgaug==0.2.5) (1.4.1)\n", + "Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from imgaug==0.2.5) (0.18.3)\n", + "Requirement already satisfied: numpy>=1.7.0 in /usr/local/lib/python3.7/dist-packages (from imgaug==0.2.5) (1.18.5)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from imgaug==0.2.5) (1.15.0)\n", + "Requirement already satisfied: pillow!=7.1.0,!=7.1.1,>=4.3.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) (7.1.2)\n", + "Requirement already satisfied: matplotlib!=3.0.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) (3.3.3)\n", + "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) (2.6.3)\n", + "Requirement already satisfied: PyWavelets>=1.1.1 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) (1.2.0)\n", + "Requirement already satisfied: imageio>=2.3.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) (2.4.1)\n", + "Requirement already satisfied: tifffile>=2019.7.26 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug==0.2.5) (2021.11.2)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.11.0->imgaug==0.2.5) (2.8.2)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.11.0->imgaug==0.2.5) (0.11.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.11.0->imgaug==0.2.5) (1.3.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3 in /usr/local/lib/python3.7/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.11.0->imgaug==0.2.5) (3.0.7)\n", + "Building wheels for collected packages: imgaug\n", + " Building wheel for imgaug (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for imgaug: filename=imgaug-0.2.5-py3-none-any.whl size=561438 sha256=a80b9b179cf8a4019a53cbb37fd3accd950ff0f3503e6aa325a359113b3e85bf\n", + " Stored in directory: /root/.cache/pip/wheels/60/dd/38/d1dc2cad2b6a66dc0249261004990bccb0f27985c74ba26e49\n", + "Successfully built imgaug\n", + "Installing collected packages: imgaug\n", + "Successfully installed imgaug-0.2.5\n" + ] + } + ], + "source": [ + "!pip uninstall matplotlib\n", + "!pip install matplotlib==3.3.3\n", + "!pip uninstall imgaug\n", + "!pip install imgaug==0.2.5" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "id": "Iicc3Nx_buRL" + }, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l\n", + "\n", + "batch_size, num_steps = 32, 35\n", + "train_iter, vocab = d2l.load_data_time_machine(batch_size, num_steps)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "id": "_hQcwbeee1Vr" + }, + "outputs": [], + "source": [ + "vocab_size, num_hiddens, num_layers = len(vocab), 256, 2 #num_layers为RNN层数,默认是1\n", + "num_inputs = vocab_size\n", + "device = d2l.try_gpu()\n", + "lstm_layer = nn.LSTM(num_inputs, num_hiddens, num_layers)\n", + "model = d2l.RNNModel(lstm_layer, len(vocab))\n", + "model = model.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 460 + }, + "id": "tgkH4BEVgcll", + "outputId": "9a930e95-1db1-4278-a63e-564be52c12ec" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "perplexity 1.0, 6430.2 tokens/sec on cpu\n", + "time travelleryou can show black is white by argument said filby\n", + "travelleryou can show black is white by argument said filby\n" + ] + }, + { + "ename": "ImportError", + "evalue": "ignored", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/formatters.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, obj)\u001b[0m\n\u001b[1;32m 332\u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 333\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 334\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mprinter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 335\u001b[0m \u001b[0;31m# Finally look for special method names\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 336\u001b[0m \u001b[0mmethod\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_real_method\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_method\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/pylabtools.py\u001b[0m in \u001b[0;36m\u001b[0;34m(fig)\u001b[0m\n\u001b[1;32m 245\u001b[0m \u001b[0mjpg_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'jpg'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 246\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m'svg'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 247\u001b[0;31m \u001b[0msvg_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'svg'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 248\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m'pdf'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 249\u001b[0m \u001b[0mpdf_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'pdf'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/pylabtools.py\u001b[0m in \u001b[0;36mprint_figure\u001b[0;34m(fig, fmt, bbox_inches, **kwargs)\u001b[0m\n\u001b[1;32m 123\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[0mbytes_io\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mBytesIO\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 125\u001b[0;31m \u001b[0mfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcanvas\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbytes_io\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 126\u001b[0m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbytes_io\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgetvalue\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 127\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfmt\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'svg'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36mprint_figure\u001b[0;34m(self, filename, dpi, facecolor, edgecolor, orientation, format, bbox_inches, **kwargs)\u001b[0m\n\u001b[1;32m 2057\u001b[0m \u001b[0mIf\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mbackend\u001b[0m\u001b[0;34m*\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mthen\u001b[0m \u001b[0mdetermine\u001b[0m \u001b[0ma\u001b[0m \u001b[0msuitable\u001b[0m \u001b[0mcanvas\u001b[0m \u001b[0;32mclass\u001b[0m \u001b[0;32mfor\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2058\u001b[0m \u001b[0msaving\u001b[0m \u001b[0mto\u001b[0m \u001b[0mformat\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mfmt\u001b[0m\u001b[0;34m*\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m \u001b[0meither\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mcurrent\u001b[0m \u001b[0mcanvas\u001b[0m \u001b[0;32mclass\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mit\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2059\u001b[0;31m \u001b[0msupports\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mfmt\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mwhatever\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m`\u001b[0m\u001b[0mget_registered_canvas_class\u001b[0m\u001b[0;31m`\u001b[0m \u001b[0mreturns\u001b[0m\u001b[0;34m;\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2060\u001b[0m \u001b[0mswitch\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mfigure\u001b[0m \u001b[0mcanvas\u001b[0m \u001b[0mto\u001b[0m \u001b[0mthat\u001b[0m \u001b[0mcanvas\u001b[0m \u001b[0;32mclass\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2061\u001b[0m \"\"\"\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36m_get_output_canvas\u001b[0;34m(self, fmt)\u001b[0m\n\u001b[1;32m 1991\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmouse_grabber\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0max\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1992\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmouse_grabber\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1993\u001b[0;31m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1994\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mdraw\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1995\u001b[0m \u001b[0;34m\"\"\"Render the `.Figure`.\"\"\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36mget_registered_canvas_class\u001b[0;34m(format)\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 125\u001b[0m \u001b[0mThe\u001b[0m \u001b[0mfollowing\u001b[0m \u001b[0mmethods\u001b[0m \u001b[0mmust\u001b[0m \u001b[0mbe\u001b[0m \u001b[0mimplemented\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mbackend\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfull\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 126\u001b[0;31m functionality (though just implementing :meth:`draw_path` alone would\n\u001b[0m\u001b[1;32m 127\u001b[0m give a highly capable backend):\n\u001b[1;32m 128\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/__init__.py\u001b[0m in \u001b[0;36mimport_module\u001b[0;34m(name, package)\u001b[0m\n\u001b[1;32m 125\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 126\u001b[0m \u001b[0mlevel\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 127\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0m_bootstrap\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_gcd_import\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mlevel\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpackage\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlevel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 128\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 129\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_gcd_import\u001b[0;34m(name, package, level)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_find_and_load\u001b[0;34m(name, import_)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_find_and_load_unlocked\u001b[0;34m(name, import_)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_load_unlocked\u001b[0;34m(spec)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap_external.py\u001b[0m in \u001b[0;36mexec_module\u001b[0;34m(self, module)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_call_with_frames_removed\u001b[0;34m(f, *args, **kwds)\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backends/backend_svg.py\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 16\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mmatplotlib\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mmpl\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mmatplotlib\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mcbook\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 18\u001b[0;31m from matplotlib.backend_bases import (\n\u001b[0m\u001b[1;32m 19\u001b[0m \u001b[0m_Backend\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_check_savefig_extra_args\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mFigureCanvasBase\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mFigureManagerBase\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m RendererBase)\n", + "\u001b[0;31mImportError\u001b[0m: cannot import name '_check_savefig_extra_args' from 'matplotlib.backend_bases' (/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py)" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "num_epochs, lr = 500, 2\n", + "d2l.train_ch8(model, train_iter, vocab, lr, num_epochs, device)#colab上运行约十分钟" + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [], + "name": "Untitled1.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.0" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git "a/code/60_\346\234\272\345\231\250\347\277\273\350\257\221\344\270\216\346\225\260\346\215\256\351\233\206.ipynb" "b/code/60_\346\234\272\345\231\250\347\277\273\350\257\221\344\270\216\346\225\260\346\215\256\351\233\206.ipynb" new file mode 100644 index 0000000..8473035 --- /dev/null +++ "b/code/60_\346\234\272\345\231\250\347\277\273\350\257\221\344\270\216\346\225\260\346\215\256\351\233\206.ipynb" @@ -0,0 +1,538 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "60-机器翻译与数据集.ipynb", + "provenance": [], + "collapsed_sections": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "#机器翻译与数据集\n", + "机器翻译指的是将序列从一种语言自动翻译成另一种语言。" + ], + "metadata": { + "id": "8TduQRbKaAnl" + } + }, + { + "cell_type": "code", + "source": [ + "%tensorflow_version 2.x\n", + "import tensorflow as tf\n", + "device_name = tf.test.gpu_device_name()\n", + "if device_name != '/device:GPU:0':\n", + " raise SystemError('GPU device not found')\n", + "print('Found GPU at: {}'.format(device_name))" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "PTvbH-DneRvs", + "outputId": "f03dfe22-db59-4a6f-b54b-169080869755" + }, + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Found GPU at: /device:GPU:0\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!pip3 install torch torchvision torchaudio" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "7u9bteZce0R-", + "outputId": "870f6238-ec9c-4175-f572-c4e0eab1d94e" + }, + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Requirement already satisfied: torch in /usr/local/lib/python3.7/dist-packages (1.10.0+cu111)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (0.11.1+cu111)\n", + "Requirement already satisfied: torchaudio in /usr/local/lib/python3.7/dist-packages (0.10.0+cu111)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch) (3.10.0.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from torchvision) (1.21.5)\n", + "Requirement already satisfied: pillow!=8.3.0,>=5.3.0 in /usr/local/lib/python3.7/dist-packages (from torchvision) (7.1.2)\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!pip3 install d2l==0.14" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Od7vqKAJe2TS", + "outputId": "ba1d1f81-5881-40b0-f54b-8a23814f0b7d" + }, + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting d2l==0.14\n", + " Downloading d2l-0.14.0-py3-none-any.whl (48 kB)\n", + "\u001b[?25l\r\u001b[K |██████▊ | 10 kB 20.0 MB/s eta 0:00:01\r\u001b[K |█████████████▍ | 20 kB 22.9 MB/s eta 0:00:01\r\u001b[K |████████████████████ | 30 kB 10.8 MB/s eta 0:00:01\r\u001b[K |██████████████████████████▉ | 40 kB 8.7 MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 48 kB 2.6 MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.21.5)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (3.2.2)\n", + "Requirement already satisfied: jupyter in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.0.0)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.3.5)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.6.1)\n", + "Requirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.2)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (7.6.5)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.0)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (4.10.1)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.3.1)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.5.0)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.3.5)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.4.2)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (2.6.1)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.8.1)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (57.4.0)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.0.18)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.7.5)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.8.0)\n", + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.15.0)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.2.5)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (1.0.2)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (3.5.2)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (0.2.0)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (5.1.3)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.3.3)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.9.2)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (0.18.1)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.11.1)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (21.4.0)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (5.4.0)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.10.0.2)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.7.0)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (2.11.3)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (0.13.1)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (1.8.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (2.8.2)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (22.3.0)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter->d2l==0.14) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter->d2l==0.14) (2.0.1)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (1.3.2)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (0.11.0)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (3.0.7)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.8.4)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.7.1)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (1.5.0)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.5.0)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.4)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (4.1.0)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (21.3)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (0.5.1)\n", + "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.7/dist-packages (from pandas->d2l==0.14) (2018.9)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter->d2l==0.14) (2.0.1)\n", + "Installing collected packages: d2l\n", + "Successfully installed d2l-0.14.0\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "import os \n", + "import torch\n", + "from d2l import torch as d2l" + ], + "metadata": { + "id": "f255eLvgei3-" + }, + "execution_count": 10, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "##下载和预处理数据集\n", + "Tatoeba项目的双语句子对组成的“英-法”数据集,数据集中的每一行都是制表符分隔的文本序列对" + ], + "metadata": { + "id": "O6qBx3Xmgghh" + } + }, + { + "cell_type": "code", + "source": [ + "d2l.DATA_HUB['fra-eng'] = (d2l.DATA_URL + 'fra-eng.zip', '94646ad1522d915e7b0f9296181140edcf86a4f5')\n", + "\n", + "def read_data_nmt():\n", + " \"\"\"载入 \"英语-法语\" 数据集\"\"\"\n", + " data_dir = d2l.download_extract('fra-eng')\n", + " with open(os.path.join(data_dir,'fra.txt'), 'r', encoding='utf-8') as f:\n", + " return f.read()\n", + "\n", + "raw_text = read_data_nmt()\n", + "print(raw_text[:75])" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ZD0NjCyvcwNw", + "outputId": "71056472-1b91-4ddc-b46f-51593c81f5bd" + }, + "execution_count": 11, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Downloading ../data/fra-eng.zip from http://d2l-data.s3-accelerate.amazonaws.com/fra-eng.zip...\n", + "Go.\tVa !\n", + "Hi.\tSalut !\n", + "Run!\tCours !\n", + "Run!\tCourez !\n", + "Who?\tQui ?\n", + "Wow!\tÇa alors !\n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "def preprocess_nmt(text):\n", + " \"\"\"预处理 \"英语-法语\" 数据集\"\"\"\n", + " def no_space(char, prev_char):\n", + " return char in set(',.!?') and prev_char != ' '\n", + "\n", + " # 使用空格替换不间断空格\n", + " # 使用小写字母替换大写字母\n", + " text = text.replace('\\u202f', ' ').replace('\\xa0', ' ').lower()\n", + " # 在单词和标点符号之间插入空格\n", + " out = [' ' + char if i > 0 and no_space(char, text[i - 1]) else char for i,char in enumerate(text)]\n", + " return ''.join(out)\n", + "\n", + "text = preprocess_nmt(raw_text)\n", + "print(text[:80])" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "_A_cSE-ud-M4", + "outputId": "1768fb9b-607c-4e0d-c8db-bb176ff4170f" + }, + "execution_count": 12, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "go .\tva !\n", + "hi .\tsalut !\n", + "run !\tcours !\n", + "run !\tcourez !\n", + "who ?\tqui ?\n", + "wow !\tça alors !\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##词元化" + ], + "metadata": { + "id": "dsLOTMPIg6EW" + } + }, + { + "cell_type": "code", + "source": [ + "def tokenize_nmt(text, num_examples=None):\n", + " \"\"\"词元化 \"英语-法语\" 数据数据集\"\"\"\n", + " source, target = [], []\n", + " for i, line in enumerate(text.split('\\n')):\n", + " if num_examples and i > num_examples:\n", + " break\n", + " parts = line.split('\\t')\n", + " if len(parts) == 2:\n", + " source.append(parts[0].split(' '))\n", + " target.append(parts[1].split(' '))\n", + " return source, target\n", + "\n", + "source, target = tokenize_nmt(text)" + ], + "metadata": { + "id": "tmA3VhfrgaoZ" + }, + "execution_count": 13, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "source[:6], target[:6]" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "bKYKK24rh-xA", + "outputId": "9145945e-81ac-44b9-c132-227dfd287ce4" + }, + "execution_count": 14, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "([['go', '.'],\n", + " ['hi', '.'],\n", + " ['run', '!'],\n", + " ['run', '!'],\n", + " ['who', '?'],\n", + " ['wow', '!']],\n", + " [['va', '!'],\n", + " ['salut', '!'],\n", + " ['cours', '!'],\n", + " ['courez', '!'],\n", + " ['qui', '?'],\n", + " ['ça', 'alors', '!']])" + ] + }, + "metadata": {}, + "execution_count": 14 + } + ] + }, + { + "cell_type": "code", + "source": [ + "def show_list_len_pair_hist(legend, xlabel, ylabel, xlist, ylist):\n", + " \"\"\"绘制列表长度对的直方图\"\"\"\n", + " d2l.set_figsize()\n", + " _,_,patches = d2l.plt.hist([[len(l) for l in xlist], [len(l) for l in ylist]])\n", + " d2l.plt.xlabel(xlabel)\n", + " d2l.plt.ylabel(ylabel)\n", + " for patch in patches[1].patches:\n", + " patch.set_hatch('/')\n", + " d2l.plt.legend(legend)\n", + "\n", + "show_list_len_pair_hist(['source','target'],'#tokens per sequence','count',source,target);" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 262 + }, + "id": "NCkfXYCfiCHu", + "outputId": "f073d017-4cc5-4b15-985c-c04ca49c1831" + }, + "execution_count": 15, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n" + }, + "metadata": { + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##词表" + ], + "metadata": { + "id": "eG1iSOv0jKL2" + } + }, + { + "cell_type": "code", + "source": [ + "src_vocab = d2l.Vocab(source, min_freq=2, reserved_tokens=['', '', ''])\n", + "len(src_vocab)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "KpMT8RFXjHbA", + "outputId": "28693aaa-f684-41fb-dcd9-4658b482b627" + }, + "execution_count": 16, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "10012" + ] + }, + "metadata": {}, + "execution_count": 16 + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "##加载数据集" + ], + "metadata": { + "id": "THSZVlorjgDQ" + } + }, + { + "cell_type": "code", + "source": [ + "def truncate_pad(line, num_steps, padding_token):\n", + " \"\"\"截断或填充文本序列\"\"\"\n", + " if len(line) > num_steps:\n", + " return line[:num_steps] # 截断\n", + " return line + [padding_token] * (num_steps - len(line)) # 填充\n", + "\n", + "truncate_pad(src_vocab[source[0]], 10, src_vocab[''])" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "05Y0Jr3Aje15", + "outputId": "83f0773c-4f87-4133-ee3e-09c12e9f0cf8" + }, + "execution_count": 17, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "[47, 4, 1, 1, 1, 1, 1, 1, 1, 1]" + ] + }, + "metadata": {}, + "execution_count": 17 + } + ] + }, + { + "cell_type": "code", + "source": [ + "def build_array_nmt(lines,vocab,num_steps):\n", + " \"\"\"将机器翻译的文本序列转换成小批量\"\"\"\n", + " lines = [vocab[l] for l in lines]\n", + " lines = [l + [vocab['']] for l in lines]\n", + " array = torch.tensor([truncate_pad(l,num_steps,vocab['']) for l in lines])\n", + " valid_len = (array != vocab['']).type(torch.int32).sum(1)\n", + " return array, valid_len" + ], + "metadata": { + "id": "EixkhqVPkMkC" + }, + "execution_count": 18, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "##训练模型" + ], + "metadata": { + "id": "-35TzqErlGRc" + } + }, + { + "cell_type": "code", + "source": [ + "def load_data_nmt(batch_size,num_steps,num_examples=600):\n", + " \"\"\"返回翻译数据集的迭代器和词表\"\"\"\n", + " text = preprocess_nmt(read_data_nmt())\n", + " source, target = tokenize_nmt(text,num_examples)\n", + " src_vocab = d2l.Vocab(source,min_freq=2,reserved_tokens=['','',''])\n", + " tgt_vocab = d2l.Vocab(target,min_freq=2,reserved_tokens=['','',''])\n", + " src_array,src_valid_len = build_array_nmt(source,src_vocab,num_steps)\n", + " tgt_array,tgt_valid_len = build_array_nmt(target,tgt_vocab,num_steps)\n", + " data_arrays = (src_array,src_valid_len,tgt_array,tgt_valid_len)\n", + " data_iter = d2l.load_array(data_arrays,batch_size)\n", + " return data_iter,src_vocab,tgt_vocab" + ], + "metadata": { + "id": "ejpKvE2tlFWR" + }, + "execution_count": 19, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "train_iter,src_vocab,tgt_vocab = load_data_nmt(batch_size=2,num_steps=8)\n", + "for X,X_valid_len,Y,Y_valid_len in train_iter:\n", + " print('X:',X.type(torch.int32))\n", + " print('X的有效长度:',X_valid_len)\n", + " print('Y:',Y.type(torch.int32))\n", + " print('Y的有效长度:',Y_valid_len)\n", + " break" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "-qF-R81Mm1kx", + "outputId": "d74338a9-2984-4ace-92ad-7872576abf7b" + }, + "execution_count": 20, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "X: tensor([[ 13, 33, 4, 3, 1, 1, 1, 1],\n", + " [170, 8, 4, 3, 1, 1, 1, 1]], dtype=torch.int32)\n", + "X的有效长度: tensor([4, 4])\n", + "Y: tensor([[187, 4, 3, 1, 1, 1, 1, 1],\n", + " [ 0, 5, 3, 1, 1, 1, 1, 1]], dtype=torch.int32)\n", + "Y的有效长度: tensor([3, 3])\n" + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/code/62-Seq2Seq.ipynb b/code/62-Seq2Seq.ipynb new file mode 100644 index 0000000..acdc349 --- /dev/null +++ b/code/62-Seq2Seq.ipynb @@ -0,0 +1,1565 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 序列到序列学习(seq2seq)\n", + "\n", + "下面,我们动手构建 :numref:`fig_seq2seq`的设计,\n", + "并将基于 :numref:`sec_machine_translation`中\n", + "介绍的“英-法”数据集来训练这个机器翻译模型。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import collections\n", + "import math\n", + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "## 编码器\n", + "\n", + "从技术上讲,编码器将长度可变的输入序列转换成\n", + "形状固定的上下文变量$\\mathbf{c}$,\n", + "并且将输入序列的信息在该上下文变量中进行编码。\n", + "如 :numref:`fig_seq2seq`所示,可以使用循环神经网络来设计编码器。\n", + "\n", + "考虑由一个序列组成的样本(批量大小是$1$)。\n", + "假设输入序列是$x_1, \\ldots, x_T$,\n", + "其中$x_t$是输入文本序列中的第$t$个词元。\n", + "在时间步$t$,循环神经网络将词元$x_t$的输入特征向量\n", + "$\\mathbf{x}_t$和$\\mathbf{h} _{t-1}$(即上一时间步的隐状态)\n", + "转换为$\\mathbf{h}_t$(即当前步的隐状态)。\n", + "使用一个函数$f$来描述循环神经网络的循环层所做的变换:\n", + "\n", + "$$\\mathbf{h}_t = f(\\mathbf{x}_t, \\mathbf{h}_{t-1}). $$\n", + "\n", + "总之,编码器通过选定的函数$q$,\n", + "将所有时间步的隐状态转换为上下文变量:\n", + "\n", + "$$\\mathbf{c} = q(\\mathbf{h}_1, \\ldots, \\mathbf{h}_T).$$\n", + "\n", + "比如,当选择$q(\\mathbf{h}_1, \\ldots, \\mathbf{h}_T) = \\mathbf{h}_T$时\n", + "(就像 :numref:`fig_seq2seq`中一样),\n", + "上下文变量仅仅是输入序列在最后时间步的隐状态$\\mathbf{h}_T$。\n", + "\n", + "到目前为止,我们使用的是一个单向循环神经网络来设计编码器,\n", + "其中隐状态只依赖于输入子序列,\n", + "这个子序列是由输入序列的开始位置到隐状态所在的时间步的位置\n", + "(包括隐状态所在的时间步)组成。\n", + "我们也可以使用双向循环神经网络构造编码器,\n", + "其中隐状态依赖于两个输入子序列,\n", + "两个子序列是由隐状态所在的时间步的位置之前的序列和之后的序列\n", + "(包括隐状态所在的时间步),\n", + "因此隐状态对整个序列的信息都进行了编码。\n", + "\n", + "现在,让我们[**实现循环神经网络编码器**]。\n", + "注意,我们使用了*嵌入层*(embedding layer)\n", + "来获得输入序列中每个词元的特征向量。\n", + "嵌入层的权重是一个矩阵,\n", + "其行数等于输入词表的大小(`vocab_size`),\n", + "其列数等于特征向量的维度(`embed_size`)。\n", + "对于任意输入词元的索引$i$,\n", + "嵌入层获取权重矩阵的第$i$行(从$0$开始)以返回其特征向量。\n", + "另外,本文选择了一个多层门控循环单元来实现编码器。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "class Seq2SeqEncoder(d2l.Encoder):\n", + " \"\"\"用于序列到序列学习的循环神经网络编码器\"\"\"\n", + " def __init__(self, vocab_size, embed_size, num_hiddens, num_layers,\n", + " dropout=0, **kwargs):\n", + " super(Seq2SeqEncoder, self).__init__(**kwargs)\n", + " # 嵌入层\n", + " self.embedding = nn.Embedding(vocab_size, embed_size)\n", + " self.rnn = nn.GRU(embed_size, num_hiddens, num_layers,\n", + " dropout=dropout)\n", + "\n", + " def forward(self, X, *args):\n", + " # 输出'X'的形状:(batch_size,num_steps,embed_size)\n", + " X = self.embedding(X)\n", + " # 在循环神经网络模型中,第一个轴对应于时间步\n", + " X = X.permute(1, 0, 2)\n", + " # 如果未提及状态,则默认为0\n", + " output, state = self.rnn(X)\n", + " # output的形状:(num_steps,batch_size,num_hiddens)\n", + " # state[0]的形状:(num_layers,batch_size,num_hiddens)\n", + " return output, state" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "下面,我们实例化[**上述编码器的实现**]:\n", + "我们使用一个两层门控循环单元编码器,其隐藏单元数为$16$。\n", + "给定一小批量的输入序列`X`(批量大小为$4$,时间步为$7$)。\n", + "在完成所有时间步后,\n", + "最后一层的隐状态的输出是一个张量(`output`由编码器的循环层返回),\n", + "其形状为(时间步数,批量大小,隐藏单元数)。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 10, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([7, 4, 16])" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "encoder = Seq2SeqEncoder(vocab_size=10, embed_size=8, num_hiddens=16,\n", + " num_layers=2)\n", + "encoder.eval()\n", + "X = torch.zeros((4, 7), dtype=torch.long)\n", + "output, state = encoder(X)\n", + "output.shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12 + }, + "source": [ + "由于这里使用的是门控循环单元,\n", + "所以在最后一个时间步的多层隐状态的形状是\n", + "(隐藏层的数量,批量大小,隐藏单元的数量)。\n", + "如果使用长短期记忆网络,`state`中还将包含记忆单元信息。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 4, 16])" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "state.shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 16 + }, + "source": [ + "## [解码器]\n", + ":label:`sec_seq2seq_decoder`\n", + "\n", + "正如上文提到的,编码器输出的上下文变量$\\mathbf{c}$\n", + "对整个输入序列$x_1, \\ldots, x_T$进行编码。\n", + "来自训练数据集的输出序列$y_1, y_2, \\ldots, y_{T'}$,\n", + "对于每个时间步$t'$(与输入序列或编码器的时间步$t$不同),\n", + "解码器输出$y_{t'}$的概率取决于先前的输出子序列\n", + "$y_1, \\ldots, y_{t'-1}$和上下文变量$\\mathbf{c}$,\n", + "即$P(y_{t'} \\mid y_1, \\ldots, y_{t'-1}, \\mathbf{c})$。\n", + "\n", + "为了在序列上模型化这种条件概率,\n", + "我们可以使用另一个循环神经网络作为解码器。\n", + "在输出序列上的任意时间步$t^\\prime$,\n", + "循环神经网络将来自上一时间步的输出$y_{t^\\prime-1}$\n", + "和上下文变量$\\mathbf{c}$作为其输入,\n", + "然后在当前时间步将它们和上一隐状态\n", + "$\\mathbf{s}_{t^\\prime-1}$转换为\n", + "隐状态$\\mathbf{s}_{t^\\prime}$。\n", + "因此,可以使用函数$g$来表示解码器的隐藏层的变换:\n", + "\n", + "$$\\mathbf{s}_{t^\\prime} = g(y_{t^\\prime-1}, \\mathbf{c}, \\mathbf{s}_{t^\\prime-1}).$$\n", + ":eqlabel:`eq_seq2seq_s_t`\n", + "\n", + "在获得解码器的隐状态之后,\n", + "我们可以使用输出层和softmax操作\n", + "来计算在时间步$t^\\prime$时输出$y_{t^\\prime}$的条件概率分布\n", + "$P(y_{t^\\prime} \\mid y_1, \\ldots, y_{t^\\prime-1}, \\mathbf{c})$。\n", + "\n", + "根据 :numref:`fig_seq2seq`,当实现解码器时,\n", + "我们直接使用编码器最后一个时间步的隐状态来初始化解码器的隐状态。\n", + "这就要求使用循环神经网络实现的编码器和解码器具有相同数量的层和隐藏单元。\n", + "为了进一步包含经过编码的输入序列的信息,\n", + "上下文变量在所有的时间步与解码器的输入进行拼接(concatenate)。\n", + "为了预测输出词元的概率分布,\n", + "在循环神经网络解码器的最后一层使用全连接层来变换隐状态。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 18, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class Seq2SeqDecoder(d2l.Decoder):\n", + " \"\"\"用于序列到序列学习的循环神经网络解码器\"\"\"\n", + " def __init__(self, vocab_size, embed_size, num_hiddens, num_layers,\n", + " dropout=0, **kwargs):\n", + " super(Seq2SeqDecoder, self).__init__(**kwargs)\n", + " self.embedding = nn.Embedding(vocab_size, embed_size)\n", + " self.rnn = nn.GRU(embed_size + num_hiddens, num_hiddens, num_layers,\n", + " dropout=dropout)\n", + " self.dense = nn.Linear(num_hiddens, vocab_size)\n", + "\n", + " def init_state(self, enc_outputs, *args):\n", + " return enc_outputs[1]\n", + "\n", + " def forward(self, X, state):\n", + " # 输出'X'的形状:(batch_size,num_steps,embed_size)\n", + " X = self.embedding(X).permute(1, 0, 2)\n", + " # 广播context,使其具有与X相同的num_steps\n", + " context = state[-1].repeat(X.shape[0], 1, 1)\n", + " X_and_context = torch.cat((X, context), 2)\n", + " output, state = self.rnn(X_and_context, state)\n", + " output = self.dense(output).permute(1, 0, 2)\n", + " # output的形状:(batch_size,num_steps,vocab_size)\n", + " # state[0]的形状:(num_layers,batch_size,num_hiddens)\n", + " return output, state" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 20 + }, + "source": [ + "下面,我们用与前面提到的编码器中相同的超参数来[**实例化解码器**]。\n", + "如我们所见,解码器的输出形状变为(批量大小,时间步数,词表大小),\n", + "其中张量的最后一个维度存储预测的词元分布。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 22, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(torch.Size([4, 7, 10]), torch.Size([2, 4, 16]))" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "decoder = Seq2SeqDecoder(vocab_size=10, embed_size=8, num_hiddens=16,\n", + " num_layers=2)\n", + "decoder.eval()\n", + "state = decoder.init_state(encoder(X))\n", + "output, state = decoder(X, state)\n", + "output.shape, state.shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 24 + }, + "source": [ + "## 损失函数\n", + "\n", + "在每个时间步,解码器预测了输出词元的概率分布。\n", + "类似于语言模型,可以使用softmax来获得分布,\n", + "并通过计算交叉熵损失函数来进行优化。\n", + "回想一下 :numref:`sec_machine_translation`中,\n", + "特定的填充词元被添加到序列的末尾,\n", + "因此不同长度的序列可以以相同形状的小批量加载。\n", + "但是,我们应该将填充词元的预测排除在损失函数的计算之外。\n", + "\n", + "为此,我们可以使用下面的`sequence_mask`函数\n", + "[**通过零值化屏蔽不相关的项**],\n", + "以便后面任何不相关预测的计算都是与零的乘积,结果都等于零。\n", + "例如,如果两个序列的有效长度(不包括填充词元)分别为$1$和$2$,\n", + "则第一个序列的第一项和第二个序列的前两项之后的剩余项将被清除为零。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 26, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[1, 0, 0],\n", + " [4, 5, 0]])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#@save\n", + "def sequence_mask(X, valid_len, value=0):\n", + " \"\"\"在序列中屏蔽不相关的项\"\"\"\n", + " maxlen = X.size(1)\n", + " mask = torch.arange((maxlen), dtype=torch.float32,\n", + " device=X.device)[None, :] < valid_len[:, None]\n", + " X[~mask] = value\n", + " return X\n", + "\n", + "X = torch.tensor([[1, 2, 3], [4, 5, 6]])\n", + "sequence_mask(X, torch.tensor([1, 2]))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 28 + }, + "source": [ + "(**我们还可以使用此函数屏蔽最后几个轴上的所有项。**)如果愿意,也可以使用指定的非零值来替换这些项。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 30, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[ 1., 1., 1., 1.],\n", + " [-1., -1., -1., -1.],\n", + " [-1., -1., -1., -1.]],\n", + "\n", + " [[ 1., 1., 1., 1.],\n", + " [ 1., 1., 1., 1.],\n", + " [-1., -1., -1., -1.]]])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.ones(2, 3, 4)\n", + "sequence_mask(X, torch.tensor([1, 2]), value=-1)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 32 + }, + "source": [ + "现在,我们可以[**通过扩展softmax交叉熵损失函数来遮蔽不相关的预测**]。\n", + "最初,所有预测词元的掩码都设置为1。\n", + "一旦给定了有效长度,与填充词元对应的掩码将被设置为0。\n", + "最后,将所有词元的损失乘以掩码,以过滤掉损失中填充词元产生的不相关预测。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "origin_pos": 34, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "class MaskedSoftmaxCELoss(nn.CrossEntropyLoss):\n", + " \"\"\"带遮蔽的softmax交叉熵损失函数\"\"\"\n", + " # pred的形状:(batch_size,num_steps,vocab_size)\n", + " # label的形状:(batch_size,num_steps)\n", + " # valid_len的形状:(batch_size,)\n", + " def forward(self, pred, label, valid_len):\n", + " weights = torch.ones_like(label)\n", + " weights = sequence_mask(weights, valid_len)\n", + " self.reduction='none'\n", + " unweighted_loss = super(MaskedSoftmaxCELoss, self).forward(\n", + " pred.permute(0, 2, 1), label)\n", + " weighted_loss = (unweighted_loss * weights).mean(dim=1)\n", + " return weighted_loss" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 36 + }, + "source": [ + "我们可以创建三个相同的序列来进行[**代码健全性检查**],\n", + "然后分别指定这些序列的有效长度为$4$、$2$和$0$。\n", + "结果就是,第一个序列的损失应为第二个序列的两倍,而第三个序列的损失应为零。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "origin_pos": 38, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([2.3026, 1.1513, 0.0000])" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "loss = MaskedSoftmaxCELoss()\n", + "loss(torch.ones(3, 4, 10), torch.ones((3, 4), dtype=torch.long),\n", + " torch.tensor([4, 2, 0]))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 40 + }, + "source": [ + "## [**训练**]\n", + ":label:`sec_seq2seq_training`\n", + "\n", + "在下面的循环训练过程中,如 :numref:`fig_seq2seq`所示,\n", + "特定的序列开始词元(“<bos>”)和\n", + "原始的输出序列(不包括序列结束词元“<eos>”)\n", + "拼接在一起作为解码器的输入。\n", + "这被称为*强制教学*(teacher forcing),\n", + "因为原始的输出序列(词元的标签)被送入解码器。\n", + "或者,将来自上一个时间步的*预测*得到的词元作为解码器的当前输入。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "origin_pos": 42, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import os\n", + "os.environ[\"KMP_DUPLICATE_LIB_OK\"] = \"TRUE\"\n", + "\n", + "#@save\n", + "def train_seq2seq(net, data_iter, lr, num_epochs, tgt_vocab, device):\n", + " \"\"\"训练序列到序列模型\"\"\"\n", + " def xavier_init_weights(m):\n", + " if type(m) == nn.Linear:\n", + " nn.init.xavier_uniform_(m.weight)\n", + " if type(m) == nn.GRU:\n", + " for param in m._flat_weights_names:\n", + " if \"weight\" in param:\n", + " nn.init.xavier_uniform_(m._parameters[param])\n", + "\n", + " net.apply(xavier_init_weights)\n", + " net.to(device)\n", + " optimizer = torch.optim.Adam(net.parameters(), lr=lr)\n", + " loss = MaskedSoftmaxCELoss()\n", + " net.train()\n", + " animator = d2l.Animator(xlabel='epoch', ylabel='loss',\n", + " xlim=[10, num_epochs])\n", + " for epoch in range(num_epochs):\n", + " timer = d2l.Timer()\n", + " metric = d2l.Accumulator(2) # 训练损失总和,词元数量\n", + " for batch in data_iter:\n", + " optimizer.zero_grad()\n", + " X, X_valid_len, Y, Y_valid_len = [x.to(device) for x in batch]\n", + " bos = torch.tensor([tgt_vocab['']] * Y.shape[0],\n", + " device=device).reshape(-1, 1)\n", + " dec_input = torch.cat([bos, Y[:, :-1]], 1) # 强制教学\n", + " Y_hat, _ = net(X, dec_input, X_valid_len)\n", + " l = loss(Y_hat, Y, Y_valid_len)\n", + " l.sum().backward()\t# 损失函数的标量进行“反向传播”\n", + " d2l.grad_clipping(net, 1)\n", + " num_tokens = Y_valid_len.sum()\n", + " optimizer.step()\n", + " with torch.no_grad():\n", + " metric.add(l.sum(), num_tokens)\n", + " if (epoch + 1) % 10 == 0:\n", + " animator.add(epoch + 1, (metric[0] / metric[1],))\n", + " print(f'loss {metric[0] / metric[1]:.3f}, {metric[1] / timer.stop():.1f} '\n", + " f'tokens/sec on {str(device)}')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 44 + }, + "source": [ + "现在,在机器翻译数据集上,我们可以\n", + "[**创建和训练一个循环神经网络“编码器-解码器”模型**]用于序列到序列的学习。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "origin_pos": 45, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loss 0.019, 14965.4 tokens/sec on cuda:0\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-16T17:14:13.851671\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.4.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "embed_size, num_hiddens, num_layers, dropout = 32, 32, 2, 0.1\n", + "batch_size, num_steps = 64, 10\n", + "lr, num_epochs, device = 0.005, 300, d2l.try_gpu()\n", + "\n", + "train_iter, src_vocab, tgt_vocab = d2l.load_data_nmt(batch_size, num_steps)\n", + "encoder = Seq2SeqEncoder(len(src_vocab), embed_size, num_hiddens, num_layers,\n", + " dropout)\n", + "decoder = Seq2SeqDecoder(len(tgt_vocab), embed_size, num_hiddens, num_layers,\n", + " dropout)\n", + "net = d2l.EncoderDecoder(encoder, decoder)\n", + "train_seq2seq(net, train_iter, lr, num_epochs, tgt_vocab, device)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 46 + }, + "source": [ + "## [**预测**]\n", + "\n", + "为了采用一个接着一个词元的方式预测输出序列,\n", + "每个解码器当前时间步的输入都将来自于前一时间步的预测词元。\n", + "与训练类似,序列开始词元(“<bos>”)\n", + "在初始时间步被输入到解码器中。\n", + "该预测过程如 :numref:`fig_seq2seq_predict`所示,\n", + "当输出序列的预测遇到序列结束词元(“<eos>”)时,预测就结束了。\n", + "我们将在 :numref:`sec_beam-search`中介绍不同的序列生成策略。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "origin_pos": 48, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "def predict_seq2seq(net, src_sentence, src_vocab, tgt_vocab, num_steps,\n", + " device, save_attention_weights=False):\n", + " \"\"\"序列到序列模型的预测\"\"\"\n", + " # 在预测时将net设置为评估模式\n", + " net.eval()\n", + " src_tokens = src_vocab[src_sentence.lower().split(' ')] + [\n", + " src_vocab['']]\n", + " enc_valid_len = torch.tensor([len(src_tokens)], device=device)\n", + " src_tokens = d2l.truncate_pad(src_tokens, num_steps, src_vocab[''])\n", + " # 添加批量轴\n", + " enc_X = torch.unsqueeze(\n", + " torch.tensor(src_tokens, dtype=torch.long, device=device), dim=0)\n", + " enc_outputs = net.encoder(enc_X, enc_valid_len)\n", + " dec_state = net.decoder.init_state(enc_outputs, enc_valid_len)\n", + " # 添加批量轴\n", + " dec_X = torch.unsqueeze(torch.tensor(\n", + " [tgt_vocab['']], dtype=torch.long, device=device), dim=0)\n", + " output_seq, attention_weight_seq = [], []\n", + " for _ in range(num_steps):\n", + " Y, dec_state = net.decoder(dec_X, dec_state) ### 【注意!!!】\n", + " # 我们使用具有预测最高可能性的词元,作为解码器在下一时间步的输入\n", + " dec_X = Y.argmax(dim=2)\n", + " pred = dec_X.squeeze(dim=0).type(torch.int32).item()\n", + " # 保存注意力权重(稍后讨论)\n", + " if save_attention_weights:\n", + " attention_weight_seq.append(net.decoder.attention_weights)\n", + " # 一旦序列结束词元被预测,输出序列的生成就完成了\n", + " if pred == tgt_vocab['']:\n", + " break\n", + " output_seq.append(pred)\n", + " return ' '.join(tgt_vocab.to_tokens(output_seq)), attention_weight_seq" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## [性能改进]\n", + "在predict_seq2seq函数的实现中,标有【注意!!!】的一行,原来[Seq2SeqDecoder](#[解码器])传入第二个参数的本意是希望拿到enc_state,然而这里却是传入一个变化的decoder的隐状态,不再是encoder中的上下文信息,我们猜测修复这个bug可以提升性能,做出以下尝试:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "#@save\n", + "def my_predict_seq2seq(net, src_sentence, src_vocab, tgt_vocab, num_steps,\n", + " device, save_attention_weights=False):\n", + " \"\"\"序列到序列模型的预测\"\"\"\n", + " # 在预测时将net设置为评估模式\n", + " net.eval()\n", + " src_tokens = src_vocab[src_sentence.lower().split(' ')] + [\n", + " src_vocab['']]\n", + " enc_valid_len = torch.tensor([len(src_tokens)], device=device)\n", + " src_tokens = d2l.truncate_pad(src_tokens, num_steps, src_vocab[''])\n", + " # 添加批量轴\n", + " enc_X = torch.unsqueeze(\n", + " torch.tensor(src_tokens, dtype=torch.long, device=device), dim=0)\n", + " enc_outputs = net.encoder(enc_X, enc_valid_len)\n", + " enc_state = hidden_state = net.decoder.init_state(enc_outputs, enc_valid_len)\n", + " # 添加批量轴\n", + " dec_X = torch.unsqueeze(torch.tensor(\n", + " [tgt_vocab['']], dtype=torch.long, device=device), dim=0)\n", + " output_seq, attention_weight_seq = [], []\n", + " \n", + " # 重新定义decoder的forward函数,传入参数增加不变的enc_state,以及变化的解码器的hidden_state\n", + " def my_forward(X, enc_state, hidden_state):\n", + " X = net.decoder.embedding(X).permute(1, 0, 2)\n", + " context = enc_state[-1].repeat(X.shape[0], 1, 1)\n", + " X_and_context = torch.cat((X, context), 2)\n", + " output, state = net.decoder.rnn(X_and_context, hidden_state)\n", + " output = net.decoder.dense(output).permute(1,0,2)\n", + " return output, state\n", + " \n", + " for _ in range(num_steps):\n", + " Y, hidden_state = my_forward(dec_X, enc_state, hidden_state)\n", + " # 我们使用具有预测最高可能性的词元,作为解码器在下一时间步的输入\n", + " dec_X = Y.argmax(dim=2)\n", + " pred = dec_X.squeeze(dim=0).type(torch.int32).item()\n", + " # 保存注意力权重(稍后讨论)\n", + " if save_attention_weights:\n", + " attention_weight_seq.append(net.decoder.attention_weights)\n", + " # 一旦序列结束词元被预测,输出序列的生成就完成了\n", + " if pred == tgt_vocab['']:\n", + " break\n", + " output_seq.append(pred)\n", + " return ' '.join(tgt_vocab.to_tokens(output_seq)), attention_weight_seq" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 50 + }, + "source": [ + "## 预测序列的评估\n", + "\n", + "我们可以通过与真实的标签序列进行比较来评估预测序列。\n", + "虽然 :cite:`Papineni.Roukos.Ward.ea.2002`\n", + "提出的BLEU(bilingual evaluation understudy)\n", + "最先是用于评估机器翻译的结果,\n", + "但现在它已经被广泛用于测量许多应用的输出序列的质量。\n", + "原则上说,对于预测序列中的任意$n$元语法(n-grams),\n", + "BLEU的评估都是这个$n$元语法是否出现在标签序列中。\n", + "\n", + "我们将BLEU定义为:\n", + "\n", + "$$ \\exp\\left(\\min\\left(0, 1 - \\frac{\\mathrm{len}_{\\text{label}}}{\\mathrm{len}_{\\text{pred}}}\\right)\\right) \\prod_{n=1}^k p_n^{1/2^n},$$\n", + ":eqlabel:`eq_bleu`\n", + "\n", + "其中$\\mathrm{len}_{\\text{label}}$表示标签序列中的词元数和\n", + "$\\mathrm{len}_{\\text{pred}}$表示预测序列中的词元数,\n", + "$k$是用于匹配的最长的$n$元语法。\n", + "另外,用$p_n$表示$n$元语法的精确度,它是两个数量的比值:\n", + "第一个是预测序列与标签序列中匹配的$n$元语法的数量,\n", + "第二个是预测序列中$n$元语法的数量的比率。\n", + "具体地说,给定标签序列$A$、$B$、$C$、$D$、$E$、$F$\n", + "和预测序列$A$、$B$、$B$、$C$、$D$,\n", + "我们有$p_1 = 4/5$、$p_2 = 3/4$、$p_3 = 1/3$和$p_4 = 0$。\n", + "\n", + "根据 :eqref:`eq_bleu`中BLEU的定义,\n", + "当预测序列与标签序列完全相同时,BLEU为$1$。\n", + "此外,由于$n$元语法越长则匹配难度越大,\n", + "所以BLEU为更长的$n$元语法的精确度分配更大的权重。\n", + "具体来说,当$p_n$固定时,$p_n^{1/2^n}$\n", + "会随着$n$的增长而增加(原始论文使用$p_n^{1/n}$)。\n", + "而且,由于预测的序列越短获得的$p_n$值越高,\n", + "所以 :eqref:`eq_bleu`中乘法项之前的系数用于惩罚较短的预测序列。\n", + "例如,当$k=2$时,给定标签序列$A$、$B$、$C$、$D$、$E$、$F$\n", + "和预测序列$A$、$B$,尽管$p_1 = p_2 = 1$,\n", + "惩罚因子$\\exp(1-6/2) \\approx 0.14$会降低BLEU。\n", + "\n", + "[**BLEU的代码实现**]如下。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "origin_pos": 51, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "def bleu(pred_seq, label_seq, k): #@save\n", + " \"\"\"计算BLEU\"\"\"\n", + " pred_tokens, label_tokens = pred_seq.split(' '), label_seq.split(' ')\n", + " len_pred, len_label = len(pred_tokens), len(label_tokens)\n", + " score = math.exp(min(0, 1 - len_label / len_pred))\n", + " for n in range(1, k + 1):\n", + " num_matches, label_subs = 0, collections.defaultdict(int)\n", + " for i in range(len_label - n + 1):\n", + " label_subs[' '.join(label_tokens[i: i + n])] += 1\n", + " for i in range(len_pred - n + 1):\n", + " if label_subs[' '.join(pred_tokens[i: i + n])] > 0:\n", + " num_matches += 1\n", + " label_subs[' '.join(pred_tokens[i: i + n])] -= 1\n", + " score *= math.pow(num_matches / (len_pred - n + 1), math.pow(0.5, n))\n", + " return score" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 52 + }, + "source": [ + "最后,利用训练好的循环神经网络“编码器-解码器”模型,\n", + "[**将几个英语句子翻译成法语**],并计算BLEU的最终结果。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "origin_pos": 53, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "go . => va tomber !, bleu 0.000\n", + "i lost . => j'ai ., bleu 0.000\n", + "he's calm . => assieds-toi ici ., bleu 0.000\n", + "i'm home . => je suis chez paresseux ., bleu 0.752\n" + ] + } + ], + "source": [ + "engs = ['go .', \"i lost .\", 'he\\'s calm .', 'i\\'m home .']\n", + "fras = ['va !', 'j\\'ai perdu .', 'il est calme .', 'je suis chez moi .']\n", + "for eng, fra in zip(engs, fras):\n", + " translation, attention_weight_seq = predict_seq2seq(\n", + " net, eng, src_vocab, tgt_vocab, num_steps, device)\n", + " print(f'{eng} => {translation}, bleu {bleu(translation, fra, k=2):.3f}')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 改进版的翻译效果" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "go . => va !, bleu 1.000\n", + "i lost . => j'ai perdu ., bleu 1.000\n", + "he's calm . => assieds-toi ici ., bleu 0.000\n", + "i'm home . => je suis chez moi ., bleu 1.000\n" + ] + } + ], + "source": [ + "engs = ['go .', \"i lost .\", 'he\\'s calm .', 'i\\'m home .']\n", + "fras = ['va !', 'j\\'ai perdu .', 'il est calme .', 'je suis chez moi .']\n", + "for eng, fra in zip(engs, fras):\n", + " translation, attention_weight_seq = my_predict_seq2seq(\n", + " net, eng, src_vocab, tgt_vocab, num_steps, device)\n", + " print(f'{eng} => {translation}, bleu {bleu(translation, fra, k=2):.3f}')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### [结论]\n", + "1. 可以看出,当改进了预测代码中的传入参数,性能整体有所提升,和所给的真值也基本接近。\n", + "2. 这个bug也提示了我们由于训练和预测时模型调用方式不同。训练时:一次性调用;预测时:逐时间步调用;需要尤为关注传入参数的变化性和不变性。" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 55 + }, + "source": [ + "## 小结\n", + "\n", + "* 根据“编码器-解码器”架构的设计,\n", + " 我们可以使用两个循环神经网络来设计一个序列到序列学习的模型。\n", + "* 在实现编码器和解码器时,我们可以使用多层循环神经网络。\n", + "* 我们可以使用遮蔽来过滤不相关的计算,例如在计算损失时。\n", + "* 在“编码器-解码器”训练中,强制教学方法将原始输出序列(而非预测结果)输入解码器。\n", + "* BLEU是一种常用的评估方法,它通过测量预测序列和标签序列之间的$n$元语法的匹配度来评估预测。\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/64-\346\263\250\346\204\217\345\212\233\346\234\272\345\210\266.ipynb" "b/code/64-\346\263\250\346\204\217\345\212\233\346\234\272\345\210\266.ipynb" new file mode 100644 index 0000000..f06b89b --- /dev/null +++ "b/code/64-\346\263\250\346\204\217\345\212\233\346\234\272\345\210\266.ipynb" @@ -0,0 +1,796 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "YH8iJl3qtk1a" + }, + "source": [ + "#64-注意力机制" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NUoif5LqtOMh" + }, + "source": [ + "##启用GPU\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "slNovdubtU8A", + "outputId": "b898f90e-3a89-4db0-a09b-e021ec110697" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found GPU at: /device:GPU:0\n" + ] + } + ], + "source": [ + "%tensorflow_version 2.x\n", + "import tensorflow as tf\n", + "device_name = tf.test.gpu_device_name()\n", + "if device_name != '/device:GPU:0':\n", + " raise SystemError('GPU device not found')\n", + "print('Found GPU at: {}'.format(device_name))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ZdGON_oxtY34" + }, + "source": [ + "##安装pytorch和d2l" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "shMeiKRmtZe4", + "outputId": "4b905b3c-59be-4c78-ab80-63e1c062bebd" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: torch in /usr/local/lib/python3.7/dist-packages (1.10.0+cu111)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (0.11.1+cu111)\n", + "Requirement already satisfied: torchaudio in /usr/local/lib/python3.7/dist-packages (0.10.0+cu111)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch) (3.10.0.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from torchvision) (1.19.5)\n", + "Requirement already satisfied: pillow!=8.3.0,>=5.3.0 in /usr/local/lib/python3.7/dist-packages (from torchvision) (7.1.2)\n" + ] + } + ], + "source": [ + "!pip3 install torch torchvision torchaudio" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "HakkP1iKtaBH", + "outputId": "22f43a5e-b756-4b65-b11c-3a495d7a6dcf" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting d2l==0.14\n", + " Downloading d2l-0.14.0-py3-none-any.whl (48 kB)\n", + "\u001b[K |████████████████████████████████| 48 kB 2.7 MB/s \n", + "\u001b[?25hRequirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (3.2.2)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.3.5)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.19.5)\n", + "Requirement already satisfied: jupyter in /usr/local/lib/python3.7/dist-packages (from d2l==0.14) (1.0.0)\n", + "Requirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.2)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.2.0)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (7.6.5)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.6.1)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (4.10.1)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter->d2l==0.14) (5.3.1)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.5.0)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.1.1)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter->d2l==0.14) (5.3.5)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.0.18)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (57.4.0)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.8.1)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.8.0)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.7.5)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (2.6.1)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (4.4.2)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (0.2.5)\n", + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter->d2l==0.14) (1.15.0)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (0.2.0)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (3.5.2)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (1.0.2)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter->d2l==0.14) (5.1.3)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.3.3)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.9.1)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.10.0.2)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (0.18.1)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (4.10.1)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (21.4.0)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (5.4.0)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter->d2l==0.14) (3.7.0)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (1.8.0)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (0.13.1)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter->d2l==0.14) (2.11.3)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (2.8.2)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter->d2l==0.14) (22.3.0)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter->d2l==0.14) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter->d2l==0.14) (2.0.1)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (1.3.2)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (0.11.0)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->d2l==0.14) (3.0.7)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.8.4)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.4)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.7.1)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (0.5.0)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (1.5.0)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter->d2l==0.14) (4.1.0)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (0.5.1)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter->d2l==0.14) (21.3)\n", + "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.7/dist-packages (from pandas->d2l==0.14) (2018.9)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter->d2l==0.14) (2.0.1)\n", + "Installing collected packages: d2l\n", + "Successfully installed d2l-0.14.0\n" + ] + } + ], + "source": [ + "!pip3 install d2l==0.14" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "853xPsHhptNV" + }, + "source": [ + "###注意力的可视化\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "K7eqrHFes6-W" + }, + "outputs": [], + "source": [ + "import torch\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "UeJD_VVb3azN" + }, + "source": [ + "\n", + "\n", + "* 可视化注意力的权重[0,1]\n", + "* 输入matrices的形状:要显示的行数,要显示的列数,查询的数目,键的数目\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "OoBxfYpFuC1u" + }, + "outputs": [], + "source": [ + "def show_heatmaps(matrices, xlabel, ylabel, titles=None, figsize=(2.5, 2.5),\n", + " cmap='Reds'):\n", + " \"\"\"显示矩阵热图\"\"\"\n", + " d2l.use_svg_display()\n", + " num_rows, num_cols = matrices.shape[0], matrices.shape[1]\n", + " fig, axes = d2l.plt.subplots(num_rows, num_cols, figsize=figsize,\n", + " sharex=True, sharey=True, squeeze=False)\n", + " for i, (row_axes, row_matrices) in enumerate(zip(axes, matrices)):\n", + " for j, (ax, matrix) in enumerate(zip(row_axes, row_matrices)):\n", + " pcm = ax.imshow(matrix.detach().numpy(), cmap=cmap)\n", + " if i == num_rows - 1:\n", + " ax.set_xlabel(xlabel)\n", + " if j == 0:\n", + " ax.set_ylabel(ylabel)\n", + " if titles:\n", + " ax.set_title(titles[j])\n", + " fig.colorbar(pcm, ax=axes, shrink=0.6)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1XXPEZ8iwOQL" + }, + "source": [ + "概率矩阵演示。仅当查询和键相同时,注意力权重为1,否则为0\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 229 + }, + "id": "MIdsrzecwLAO", + "outputId": "1357ccb1-b285-4e8f-e711-aba6b6864113" + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "attention_weights = torch.eye(10).reshape((1, 1, 10, 10))\n", + "show_heatmaps(attention_weights, xlabel='Keys', ylabel='Queries')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "XtPmjBOus5Fp" + }, + "source": [ + "##注意力汇聚:Nadaraya-Watson核回归" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "aGc2DVS6wJtI" + }, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BYx95qIS5dLs" + }, + "source": [ + "###生成数据集\n", + "\n", + "根据非线性函数:\\begin{equation} y_i = 2sin(x_i) + x_{i}^{0.8} + \\xi \\end{equation}\n", + "生成数据集" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "01_GUkxdz0Tg" + }, + "outputs": [], + "source": [ + "n_train = 50 # 训练样本数\n", + "x_train, _ = torch.sort(torch.rand(n_train)*5) # 排序后的训练样本" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ObpH0AgS0SnX", + "outputId": "76f11d43-a9c5-403b-825f-5fd4fed24ebe" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "50" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def f(x):\n", + " return 2 * torch.sin(x) + x**0.8\n", + "\n", + "y_train = f(x_train) + torch.normal(0.0,0.5,(n_train,)) # 训练样本的输出\n", + "x_test = torch.arange(0,5,0.1) # 测试样本\n", + "y_truth = f(x_test) # 测试样本的真实输出\n", + "n_test = len(x_test) # 测试样本数\n", + "n_test" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "PATRJgPR7nhh" + }, + "source": [ + "\n", + "\n", + "* 训练样本(由圆圈表示)\n", + "* 不带噪声项的真实数据生成的函数f(标记为\"Truth\")\n", + "* 学习得到的预测函数(标记为\"Pred\")\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "dadBcSmY1Clh" + }, + "outputs": [], + "source": [ + "def plot_kernel_reg(y_hat):\n", + " d2l.plot(x_test,[y_truth,y_hat],'x','y',legend=['Truth','Pred'],xlim=[0,5],ylim=[-1,5])\n", + " d2l.plt.plot(x_train,y_train,'o',alpha=0.5)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "eui1-By52VL_" + }, + "source": [ + "###平均汇聚\n", + "平均汇聚的公式\\begin{equation} f(x) = \\frac{1}{n}Σ_{i=1}^{n}y_i\\end{equation}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 267 + }, + "id": "eM5YxumL1ujt", + "outputId": "221d8c51-4608-4061-ecd4-6ceaee780027" + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "y_hat = torch.repeat_interleave(y_train.mean(),n_test)\n", + "plot_kernel_reg(y_hat)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "wUlibgnD2a0k" + }, + "source": [ + "###非参数注意力汇聚\n", + "由于平均汇聚忽略了输入,因此根据输入的位置对输出进行加权:\n", + "\\begin{equation} f(x)=Σ_{i=1}^{n}\\frac{K(x-x_i)}{Σ_{i=1}^{n}K(x-x_i)}y_i \\end{equation}\n", + "经过改良,获得更通用的注意力汇聚公式:\n", + "\\begin{equation} f(x)=Σ_{i=1}^{n}α(x,x_i)y_i \\end{equation}\n", + "令高斯核为:\n", + "\\begin{equation} K(u)=\\frac{1}{\\sqrt{2π}}exp(-\\frac{u^2}{2}) \\end{equation} \n", + "将高斯核带入注意力汇聚公式:\n", + "\\begin{equation} f(x)=Σ_{i=1}^{n}softmax(-\\frac{1}{2}(x-x_i)^2)y_i \\end{equation}\n", + "如果一个键越是接近给定的查询,那么分配给这个键对应值的注意力权重就会越大,也就是“获得了更多的注意力”" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 267 + }, + "id": "9-e7Bhce17vq", + "outputId": "a076cd95-acba-485b-84ff-98e383e7216d" + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# X_repeat的形状:(n_test,n_train),\n", + "# 每一行都包含着相同的测试输入(例如:同样的查询)\n", + "X_repeat = x_test.repeat_interleave(n_train).reshape((-1,n_train))\n", + "# x_train包含着键。attention_weights的形状:(n_test,n_train),\n", + "# 每一行都包含着要在给定的每个查询的值(y_train)之间分配的注意力权重\n", + "attention_weights = nn.functional.softmax(-(X_repeat-x_train)**2 / 2,dim=1)\n", + "# y_hat的每个元素都是值的加权平均值,其中的权重是注意力权重\n", + "y_hat = torch.matmul(attention_weights,y_train)\n", + "plot_kernel_reg(y_hat)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7C77gUJmBn6u" + }, + "source": [ + "\n", + "* 测试数据的输入相当于查询\n", + "* 训练数据的输入相当于键\n", + "\n", + "“查询——键”对越接近,注意力汇聚的注意力权重越高\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 232 + }, + "id": "mU8NV5Yp2_r7", + "outputId": "dafd509a-789a-44b6-96f4-a79b68738ee7" + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "show_heatmaps(attention_weights.unsqueeze(0).unsqueeze(0),xlabel='Sorted training inputs',ylabel='Sorted testing inputs')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "TOJQBe9p3uPw" + }, + "source": [ + "###带参数注意力汇聚\n", + "\n", + "\\begin{equation} f(x)=Σ_{i=1}^{n}softmax(-\\frac{1}{2}((x-x_i)\\omega)^2)y_i \\end{equation}\n", + "\n", + "* 批量矩阵乘法\n", + "\n", + "\\begin{equation} (n,a,b),(n,b,c) \\to (n,a,c) \\end{equation}\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "0SHYywlG3XRb", + "outputId": "7fd3eba5-338a-44f9-e2ad-53352771b42c" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 1, 6])" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = torch.ones((2,1,4))\n", + "Y = torch.ones((2,4,6))\n", + "torch.bmm(X,Y).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "AqYuAVbGC2OJ" + }, + "source": [ + "计算加权平均值" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "fCYBwGT54IOq", + "outputId": "09293569-4798-4595-9f6c-9d2881dd0aa2" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[ 4.5000]],\n", + "\n", + " [[14.5000]]])" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "weights = torch.ones((2,10)) * 0.1\n", + "values = torch.arange(20.0).reshape((2,10))\n", + "torch.bmm(weights.unsqueeze(1),values.unsqueeze(-1))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "JyZSOiee4PSu" + }, + "source": [ + "* 定义模型" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Kr9S9BPm5K2U" + }, + "outputs": [], + "source": [ + "class NWKernelRegression(nn.Module):\n", + " def __init__(self,**kwargs):\n", + " super().__init__(**kwargs)\n", + " self.w = nn.Parameter(torch.rand((1,),requires_grad=True))\n", + "\n", + " def forward(self,queries,keys,values):\n", + " # queries和attention_weights的形状为(查询个数,\"键——值\"对个数)\n", + " queries = queries.repeat_interleave(keys.shape[1]).reshape((-1,keys.shape[1]))\n", + " self.attention_weights = nn.functional.softmax(-((queries - keys) * self.w)**2 /2,dim=1)\n", + " # values的形状为(查询个数,\"键——值\"对个数)\n", + " return torch.bmm(self.attention_weights.unsqueeze(1),values.unsqueeze(-1)).reshape(-1)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "xgS-g5fk63_t" + }, + "source": [ + "* 训练" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "XQ0l0wPO6_BM" + }, + "outputs": [], + "source": [ + "# X_title的形状:(n_train,n_train),每一行都包含着相同的训练输入\n", + "X_tile = x_train.repeat((n_train,1))\n", + "# Y_title的形状:(n_train,n_train),每一行都包含着相同的训练输入\n", + "Y_tile = y_train.repeat((n_train,1))\n", + "# keys的形状:('n_train','n_train'-1)\n", + "keys = X_tile[(1 - torch.eye(n_train)).type(torch.bool)].reshape((n_train,-1))\n", + "# values的形状:('n_train','n_train'-1)\n", + "values = Y_tile[(1 - torch.eye(n_train)).type(torch.bool)].reshape((n_train,-1))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "PyHIJdEqERRI" + }, + "source": [ + "使用平方损失函数和随机梯度下降" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 262 + }, + "id": "SrL-oq1D79Ao", + "outputId": "c5668575-5c96-480b-9056-7020e7aa298a" + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "net = NWKernelRegression()\n", + "loss = nn.MSELoss(reduction='none')\n", + "trainer = torch.optim.SGD(net.parameters(),lr=0.5)\n", + "animator = d2l.Animator(xlabel='epoch',ylabel='loss',xlim=[1,5])\n", + "\n", + "for epoch in range(5):\n", + " trainer.zero_grad()\n", + " l = loss(net(x_train,keys,values),y_train)\n", + " l.sum().backward()\n", + " trainer.step()\n", + " print(f'epoch{epoch + 1}, loss{float(l.sum()):.6f}')\n", + " animator.add(epoch + 1,float(l.sum()))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "dwPrUM1TEaew" + }, + "source": [ + "带噪声的数据训练结果不如非参数模型的平滑" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 267 + }, + "id": "Ka6ok93A9ty-", + "outputId": "c4ef5c8b-67e4-493e-a869-59c1328810a0" + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# keys的形状:(n_test,n_train),每一行包含着相同的训练输入(eg.相同的键)\n", + "keys = x_train.repeat((n_test,1))\n", + "# value的形状:(n_test,n_train)\n", + "values = y_train.repeat((n_test,1))\n", + "y_hat = net(x_test,keys,values).unsqueeze(1).detach()\n", + "plot_kernel_reg(y_hat)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 232 + }, + "id": "aI43gX-tAT1h", + "outputId": "5af9eda8-b035-4493-8fb9-689978a8b5da" + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "show_heatmaps(net.attention_weights.unsqueeze(0).unsqueeze(0),xlabel='Sorted training inputs',ylabel='Sorted testing input')" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "Attention.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git "a/code/67-\350\207\252\346\263\250\346\204\217\345\212\233\345\222\214\344\275\215\347\275\256\347\274\226\347\240\201.ipynb" "b/code/67-\350\207\252\346\263\250\346\204\217\345\212\233\345\222\214\344\275\215\347\275\256\347\274\226\347\240\201.ipynb" new file mode 100644 index 0000000..71a98d8 --- /dev/null +++ "b/code/67-\350\207\252\346\263\250\346\204\217\345\212\233\345\222\214\344\275\215\347\275\256\347\274\226\347\240\201.ipynb" @@ -0,0 +1,2376 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "-" + } + }, + "source": [ + "# 自注意力和位置编码\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import math\n", + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "自注意力" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "MultiHeadAttention(\n", + " (attention): DotProductAttention(\n", + " (dropout): Dropout(p=0.5, inplace=False)\n", + " )\n", + " (W_q): Linear(in_features=100, out_features=100, bias=False)\n", + " (W_k): Linear(in_features=100, out_features=100, bias=False)\n", + " (W_v): Linear(in_features=100, out_features=100, bias=False)\n", + " (W_o): Linear(in_features=100, out_features=100, bias=False)\n", + ")" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "num_hiddens, num_heads = 100, 5\n", + "attention = d2l.MultiHeadAttention(num_hiddens, num_hiddens, num_hiddens,\n", + " num_hiddens, num_heads, 0.5)\n", + "attention.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 8, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 4, 100])" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "batch_size, num_queries, valid_lens = 2, 4, torch.tensor([3, 2])\n", + "X = torch.ones((batch_size, num_queries, num_hiddens))\n", + "attention(X, X, X, valid_lens).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "位置编码" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 12, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class PositionalEncoding(nn.Module):\n", + " \"\"\"位置编码\"\"\"\n", + " def __init__(self, num_hiddens, dropout, max_len=1000):\n", + " super(PositionalEncoding, self).__init__()\n", + " self.dropout = nn.Dropout(dropout)\n", + " self.P = torch.zeros((1, max_len, num_hiddens))#num_hiddens特征维度\n", + " X = torch.arange(max_len, dtype=torch.float32).reshape(\n", + " -1, 1) / torch.pow(10000, torch.arange(\n", + " 0, num_hiddens, 2, dtype=torch.float32) / num_hiddens)\n", + " self.P[:, :, 0::2] = torch.sin(X)\n", + " self.P[:, :, 1::2] = torch.cos(X)\n", + " #对应位置编码公式\n", + " def forward(self, X):\n", + " X = X + self.P[:, :X.shape[1], :].to(X.device)#取出对应长度的位置编码\n", + " return self.dropout(X)#dropout避免对P太过敏感" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "行代表词元在序列中的位置,列代表位置编码的不同维度" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 16, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-12T21:41:14.308637\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.3.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "encoding_dim, num_steps = 32, 60\n", + "pos_encoding = PositionalEncoding(encoding_dim, 0)\n", + "pos_encoding.eval()\n", + "X = pos_encoding(torch.zeros((1, num_steps, encoding_dim)))\n", + "P = pos_encoding.P[:, :X.shape[1], :]\n", + "d2l.plot(torch.arange(num_steps), P[0, :, 6:10].T, xlabel='Row (position)',\n", + " figsize=(6, 2.5), legend=[\"Col %d\" % d for d in torch.arange(6, 10)])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "二进制表示" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 19, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0的二进制是:000\n", + "1的二进制是:001\n", + "2的二进制是:010\n", + "3的二进制是:011\n", + "4的二进制是:100\n", + "5的二进制是:101\n", + "6的二进制是:110\n", + "7的二进制是:111\n" + ] + } + ], + "source": [ + "for i in range(8):\n", + " print(f'{i}的二进制是:{i:>03b}')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "slideshow": { + "slide_type": "slide" + } + }, + "source": [ + "在编码维度上降低频率" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 22, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\r\n", + "\r\n", + "\r\n", + "\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " 2022-02-12T21:41:18.229147\r\n", + " image/svg+xml\r\n", + " \r\n", + " \r\n", + " Matplotlib v3.3.3, https://matplotlib.org/\r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + " \r\n", + "\r\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "P = P[0, :, :].unsqueeze(0).unsqueeze(0)\n", + "d2l.show_heatmaps(P, xlabel='Column (encoding dimension)',\n", + " ylabel='Row (position)', figsize=(3.5, 4), cmap='Blues')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "celltoolbar": "Slideshow", + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + }, + "rise": { + "autolaunch": true, + "enable_chalkboard": true, + "overlay": "", + "scroll": true + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/code/68-Transforemer/multihead-attention.ipynb b/code/68-Transforemer/multihead-attention.ipynb new file mode 100644 index 0000000..ff50480 --- /dev/null +++ b/code/68-Transforemer/multihead-attention.ipynb @@ -0,0 +1,336 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# 多头注意力\n", + ":label:`sec_multihead-attention`\n", + "\n", + "在实践中,当给定相同的查询、键和值的集合时,\n", + "我们希望模型可以基于相同的注意力机制学习到不同的行为,\n", + "然后将不同的行为作为知识组合起来,\n", + "捕获序列内各种范围的依赖关系\n", + "(例如,短距离依赖和长距离依赖关系)。\n", + "因此,允许注意力机制组合使用查询、键和值的不同\n", + "*子空间表示*(representation subspaces)可能是有益的。\n", + "\n", + "为此,与其只使用单独一个注意力汇聚,\n", + "我们可以用独立学习得到的$h$组不同的\n", + "*线性投影*(linear projections)来变换查询、键和值。\n", + "然后,这$h$组变换后的查询、键和值将并行地送到注意力汇聚中。\n", + "最后,将这$h$个注意力汇聚的输出拼接在一起,\n", + "并且通过另一个可以学习的线性投影进行变换,\n", + "以产生最终输出。\n", + "这种设计被称为*多头注意力*(multihead attention)\n", + " :cite:`Vaswani.Shazeer.Parmar.ea.2017`。\n", + "对于$h$个注意力汇聚输出,每一个注意力汇聚都被称作一个*头*(head)。\n", + " :numref:`fig_multi-head-attention`\n", + "展示了使用全连接层来实现可学习的线性变换的多头注意力。\n", + "\n", + "![多头注意力:多个头连结然后线性变换](../img/multi-head-attention.svg)\n", + ":label:`fig_multi-head-attention`\n", + "\n", + "## 模型\n", + "\n", + "在实现多头注意力之前,让我们用数学语言将这个模型形式化地描述出来。\n", + "给定查询$\\mathbf{q} \\in \\mathbb{R}^{d_q}$、\n", + "键$\\mathbf{k} \\in \\mathbb{R}^{d_k}$和\n", + "值$\\mathbf{v} \\in \\mathbb{R}^{d_v}$,\n", + "每个注意力头$\\mathbf{h}_i$($i = 1, \\ldots, h$)的计算方法为:\n", + "\n", + "$$\\mathbf{h}_i = f(\\mathbf W_i^{(q)}\\mathbf q, \\mathbf W_i^{(k)}\\mathbf k,\\mathbf W_i^{(v)}\\mathbf v) \\in \\mathbb R^{p_v},$$\n", + "\n", + "其中,可学习的参数包括\n", + "$\\mathbf W_i^{(q)}\\in\\mathbb R^{p_q\\times d_q}$、\n", + "$\\mathbf W_i^{(k)}\\in\\mathbb R^{p_k\\times d_k}$和\n", + "$\\mathbf W_i^{(v)}\\in\\mathbb R^{p_v\\times d_v}$,\n", + "以及代表注意力汇聚的函数$f$。\n", + "$f$可以是 :numref:`sec_attention-scoring-functions`中的\n", + "加性注意力和缩放点积注意力。\n", + "多头注意力的输出需要经过另一个线性转换,\n", + "它对应着$h$个头连结后的结果,因此其可学习参数是\n", + "$\\mathbf W_o\\in\\mathbb R^{p_o\\times h p_v}$:\n", + "\n", + "$$\\mathbf W_o \\begin{bmatrix}\\mathbf h_1\\\\\\vdots\\\\\\mathbf h_h\\end{bmatrix} \\in \\mathbb{R}^{p_o}.$$\n", + "\n", + "基于这种设计,每个头都可能会关注输入的不同部分,\n", + "可以表示比简单加权平均值更复杂的函数。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import math\n", + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "## 实现\n", + "\n", + "在实现过程中,我们[**选择缩放点积注意力作为每一个注意力头**]。\n", + "为了避免计算代价和参数代价的大幅增长,\n", + "我们设定$p_q = p_k = p_v = p_o / h$。\n", + "值得注意的是,如果我们将查询、键和值的线性变换的输出数量设置为\n", + "$p_q h = p_k h = p_v h = p_o$,\n", + "则可以并行计算$h$个头。\n", + "在下面的实现中,$p_o$是通过参数`num_hiddens`指定的。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "class MultiHeadAttention(nn.Module):\n", + " \"\"\"多头注意力\"\"\"\n", + " def __init__(self, \n", + " key_size, \n", + " query_size, \n", + " value_size, \n", + " num_hiddens,\n", + " num_heads, \n", + " dropout, bias=False, **kwargs):\n", + " super(MultiHeadAttention, self).__init__(**kwargs)\n", + " self.num_heads = num_heads # 多少个头\n", + " self.attention = d2l.DotProductAttention(dropout)\n", + " self.W_q = nn.Linear(query_size, num_hiddens, bias=bias)\n", + " self.W_k = nn.Linear(key_size, num_hiddens, bias=bias)\n", + " self.W_v = nn.Linear(value_size, num_hiddens, bias=bias)\n", + " self.W_o = nn.Linear(num_hiddens, num_hiddens, bias=bias)\n", + "\n", + " def forward(self, \n", + " queries, \n", + " keys, \n", + " values, \n", + " valid_lens):\n", + " # queries,keys,values的形状:\n", + " # (batch_size,查询或者“键-值”对的个数,num_hiddens)\n", + " \n", + " # valid_lens 的形状:\n", + " # (batch_size,)或(batch_size,查询的个数)\n", + " \n", + " # 经过变换后,输出的queries,keys,values 的形状:\n", + " # (batch_size*num_heads,查询或者“键-值”对的个数,\n", + " # num_hiddens/num_heads)\n", + " queries = transpose_qkv(self.W_q(queries), self.num_heads)\n", + " keys = transpose_qkv(self.W_k(keys), self.num_heads)\n", + " values = transpose_qkv(self.W_v(values), self.num_heads)\n", + "\n", + " if valid_lens is not None:\n", + " # 在轴0,将第一项(标量或者矢量)复制num_heads次,\n", + " # 然后如此复制第二项,然后诸如此类。\n", + " valid_lens = torch.repeat_interleave(\n", + " valid_lens, repeats=self.num_heads, dim=0)\n", + "\n", + " # output的形状:(batch_size*num_heads,查询的个数,\n", + " # num_hiddens/num_heads)\n", + " output = self.attention(queries, keys, values, valid_lens)\n", + "\n", + " # output_concat的形状:(batch_size,查询的个数,num_hiddens)\n", + " output_concat = transpose_output(output, self.num_heads)\n", + " return self.W_o(output_concat)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "为了能够[**使多个头并行计算**],\n", + "上面的`MultiHeadAttention`类将使用下面定义的两个转置函数。\n", + "具体来说,`transpose_output`函数反转了`transpose_qkv`函数的操作。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 10, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "def transpose_qkv(X, num_heads):\n", + " \"\"\"为了多注意力头的并行计算而变换形状\"\"\"\n", + " # 输入X的形状:(batch_size,查询或者“键-值”对的个数,num_hiddens)\n", + " # 输出X的形状:(batch_size,查询或者“键-值”对的个数,num_heads,num_hiddens/num_heads)\n", + " X = X.reshape(X.shape[0], X.shape[1], num_heads, -1)\n", + "\n", + " # 输出X的形状:(batch_size,num_heads,查询或者“键-值”对的个数,\n", + " # num_hiddens/num_heads)\n", + " X = X.permute(0, 2, 1, 3)\n", + "\n", + " # 最终输出的形状:(batch_size*num_heads,查询或者“键-值”对的个数,\n", + " # num_hiddens/num_heads)\n", + " return X.reshape(-1, X.shape[2], X.shape[3])\n", + "\n", + "\n", + "#@save\n", + "def transpose_output(X, num_heads):\n", + " \"\"\"逆转transpose_qkv函数的操作\"\"\"\n", + " # 输入形状:(batch_size*num_heads,查询的个数,num_hiddens/num_heads)\n", + " # 输出形状:(batch_size,查询的个数,num_hiddens)\n", + " # (batch_size,num_heads,查询的个数,num_hiddens/num_heads)\n", + " X = X.reshape(-1, num_heads, X.shape[1], X.shape[2])\n", + " # (batch_size,查询的个数,num_heads,num_hiddens/num_heads)\n", + " X = X.permute(0, 2, 1, 3)\n", + " # (batch_size,查询的个数,num_hiddens)\n", + " return X.reshape(X.shape[0], X.shape[1], -1)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12 + }, + "source": [ + "下面我们使用键和值相同的小例子来[**测试**]我们编写的`MultiHeadAttention`类。\n", + "多头注意力输出的形状是(`batch_size`,`num_queries`,`num_hiddens`)。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "MultiHeadAttention(\n", + " (attention): DotProductAttention(\n", + " (dropout): Dropout(p=0.5, inplace=False)\n", + " )\n", + " (W_q): Linear(in_features=100, out_features=100, bias=False)\n", + " (W_k): Linear(in_features=100, out_features=100, bias=False)\n", + " (W_v): Linear(in_features=100, out_features=100, bias=False)\n", + " (W_o): Linear(in_features=100, out_features=100, bias=False)\n", + ")" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "num_hiddens, num_heads = 100, 5\n", + "attention = MultiHeadAttention(num_hiddens, num_hiddens, num_hiddens,\n", + " num_hiddens, num_heads, 0.5)\n", + "attention.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 16, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 4, 100])" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "batch_size, num_queries = 2, 4\n", + "num_kvpairs, valid_lens = 6, torch.tensor([3, 2])\n", + "X = torch.ones((batch_size, num_queries, num_hiddens))\n", + "Y = torch.ones((batch_size, num_kvpairs, num_hiddens))\n", + "attention(X, Y, Y, valid_lens).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 18 + }, + "source": [ + "## 小结\n", + "\n", + "* 多头注意力融合了来自于多个注意力汇聚的不同知识,这些知识的不同来源于相同的查询、键和值的不同的子空间表示。\n", + "* 基于适当的张量操作,可以实现多头注意力的并行计算。\n", + "\n", + "## 练习\n", + "\n", + "1. 分别可视化这个实验中的多个头的注意力权重。\n", + "1. 假设我们有一个完成训练的基于多头注意力的模型,现在希望修剪最不重要的注意力头以提高预测速度。如何设计实验来衡量注意力头的重要性呢?\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 20, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/5758)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/code/68-Transforemer/transformer.ipynb b/code/68-Transforemer/transformer.ipynb new file mode 100644 index 0000000..21d14dc --- /dev/null +++ b/code/68-Transforemer/transformer.ipynb @@ -0,0 +1,6155 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 0 + }, + "source": [ + "# Transformer\n", + ":label:`sec_transformer`\n", + "\n", + "我们在 :numref:`subsec_cnn-rnn-self-attention`中比较了卷积神经网络(CNN)、循环神经网络(RNN)和自注意力(self-attention)。值得注意的是,自注意力同时具有并行计算和最短的最大路径长度这两个优势。因此,使用自注意力来设计深度架构是很有吸引力的。对比之前仍然依赖循环神经网络实现输入表示的自注意力模型 :cite:`Cheng.Dong.Lapata.2016,Lin.Feng.Santos.ea.2017,Paulus.Xiong.Socher.2017`,transformer模型完全基于注意力机制,没有任何卷积层或循环神经网络层 :cite:`Vaswani.Shazeer.Parmar.ea.2017`。尽管transformer最初是应用于在文本数据上的序列到序列学习,但现在已经推广到各种现代的深度学习中,例如语言、视觉、语音和强化学习领域。\n", + "\n", + "## 模型\n", + "\n", + "Transformer作为编码器-解码器架构的一个实例,其整体架构图在 :numref:`fig_transformer`中展示。正如所见到的,transformer是由编码器和解码器组成的。与 :numref:`fig_s2s_attention_details`中基于Bahdanau注意力实现的序列到序列的学习相比,transformer的编码器和解码器是基于自注意力的模块叠加而成的,源(输入)序列和目标(输出)序列的*嵌入*(embedding)表示将加上*位置编码*(positional encoding),再分别输入到编码器和解码器中。\n", + "\n", + "![transformer架构](../img/transformer.svg)\n", + ":width:`500px`\n", + ":label:`fig_transformer`\n", + "\n", + "图 :numref:`fig_transformer`中概述了transformer的架构。从宏观角度来看,transformer的编码器是由多个相同的层叠加而成的,每个层都有两个子层(子层表示为$\\mathrm{sublayer}$)。第一个子层是*多头自注意力*(multi-head self-attention)汇聚;第二个子层是*基于位置的前馈网络*(positionwise feed-forward network)。具体来说,在计算编码器的自注意力时,查询、键和值都来自前一个编码器层的输出。受 :numref:`sec_resnet`中残差网络的启发,每个子层都采用了*残差连接*(residual connection)。在transformer中,对于序列中任何位置的任何输入$\\mathbf{x} \\in \\mathbb{R}^d$,都要求满足$\\mathrm{sublayer}(\\mathbf{x}) \\in \\mathbb{R}^d$,以便残差连接满足$\\mathbf{x} + \\mathrm{sublayer}(\\mathbf{x}) \\in \\mathbb{R}^d$。在残差连接的加法计算之后,紧接着应用*层规范化*(layer normalization) :cite:`Ba.Kiros.Hinton.2016`。因此,输入序列对应的每个位置,transformer编码器都将输出一个$d$维表示向量。\n", + "\n", + "Transformer解码器也是由多个相同的层叠加而成的,并且层中使用了残差连接和层规范化。除了编码器中描述的两个子层之外,解码器还在这两个子层之间插入了第三个子层,称为*编码器-解码器注意力*(encoder-decoder attention)层。在编码器-解码器注意力中,查询来自前一个解码器层的输出,而键和值来自整个编码器的输出。在解码器自注意力中,查询、键和值都来自上一个解码器层的输出。但是,解码器中的每个位置只能考虑该位置之前的所有位置。这种*掩蔽*(masked)注意力保留了*自回归*(auto-regressive)属性,确保预测仅依赖于已生成的输出词元。\n", + "\n", + "我们已经描述并实现了基于缩放点积多头注意力 :numref:`sec_multihead-attention`和位置编码 :numref:`subsec_positional-encoding`。接下来,我们将实现transformer模型的剩余部分。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "origin_pos": 2, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "import math\n", + "import pandas as pd\n", + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 4 + }, + "source": [ + "## [**基于位置的前馈网络**]\n", + "\n", + "基于位置的前馈网络对序列中的所有位置的表示进行变换时使用的是同一个多层感知机(MLP),这就是称前馈网络是*基于位置的*(positionwise)的原因。在下面的实现中,输入`X`的形状(批量大小,时间步数或序列长度,隐单元数或特征维度)将被一个两层的感知机转换成形状为(批量大小,时间步数,`ffn_num_outputs`)的输出张量。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "origin_pos": 6, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "class PositionWiseFFN(nn.Module):\n", + " \"\"\"基于位置的前馈网络\"\"\"\n", + " # 说白了就是两个全连接层,是一个mlp\n", + " # 对于pytorch来说,如果是多个维度,前面的都算是样本维,只有最后的时feature维度\n", + " def __init__(self, ffn_num_input, ffn_num_hiddens, ffn_num_outputs,\n", + " **kwargs):\n", + " super(PositionWiseFFN, self).__init__(**kwargs)\n", + " self.dense1 = nn.Linear(ffn_num_input, ffn_num_hiddens)\n", + " self.relu = nn.ReLU()\n", + " self.dense2 = nn.Linear(ffn_num_hiddens, ffn_num_outputs)\n", + "\n", + " def forward(self, X):\n", + " return self.dense2(self.relu(self.dense1(X)))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 8 + }, + "source": [ + "下面的例子显示,[**改变张量的最里层维度的尺寸**],会改变成基于位置的前馈网络的输出尺寸。因为用同一个多层感知机对所有位置上的输入进行变换,所以当所有这些位置的输入相同时,它们的输出也是相同的。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "origin_pos": 10, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[-0.7652, -0.1718, 0.1267, -0.4058, -0.0321, -0.1480, -0.6325, 0.4314],\n", + " [-0.7652, -0.1718, 0.1267, -0.4058, -0.0321, -0.1480, -0.6325, 0.4314],\n", + " [-0.7652, -0.1718, 0.1267, -0.4058, -0.0321, -0.1480, -0.6325, 0.4314]],\n", + " grad_fn=)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ffn = PositionWiseFFN(4, 4, 8)\n", + "ffn.eval()\n", + "ffn(torch.ones((2, 3, 4)))[0]\n", + "# 最后的4会变成8,后面的维度会发生变化" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 12 + }, + "source": [ + "## 残差连接和层规范化\n", + "\n", + "现在让我们关注 :numref:`fig_transformer`中的“*加法和规范化*(add&norm)”组件。正如在本节开头所述,这是由残差连接和紧随其后的层规范化组成的。两者都是构建有效的深度架构的关键。\n", + "\n", + "在 :numref:`sec_batch_norm`中,我们解释了在一个小批量的样本内基于批量规范化对数据进行重新中心化和重新缩放的调整。层规范化和批量规范化的目标相同,但层规范化是基于特征维度进规范化。尽管批量规范化在计算机视觉中被广泛应用,但在自然语言处理任务中(输入通常是变长序列)批量规范化通常不如层规范化的效果好。\n", + "\n", + "以下代码[**对比不同维度的层规范化和批量规范化的效果**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "origin_pos": 14, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "layer norm: tensor([[-1.0000, 1.0000],\n", + " [-1.0000, 1.0000]], grad_fn=) \n", + "batch norm: tensor([[-1.0000, -1.0000],\n", + " [ 1.0000, 1.0000]], grad_fn=)\n" + ] + } + ], + "source": [ + "ln = nn.LayerNorm(2)\n", + "bn = nn.BatchNorm1d(2)\n", + "X = torch.tensor([[1, 2], [2, 3]], dtype=torch.float32)\n", + "# 在训练模式下计算X的均值和方差\n", + "print('layer norm:', ln(X), '\\nbatch norm:', bn(X))\n", + "'''\n", + "layernorm 每个样本变成均值为0,方差为1\n", + "batchnorm 每个特征变成均值为0,方差为1\n", + "'''" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 16 + }, + "source": [ + "现在我们可以[**使用残差连接和层规范化**]来实现`AddNorm`类。暂退法也被作为正则化方法使用。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "origin_pos": 18, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "class AddNorm(nn.Module):\n", + " \"\"\"残差连接后进行层规范化\"\"\"\n", + " def __init__(self, normalized_shape, dropout, **kwargs):\n", + " super(AddNorm, self).__init__(**kwargs)\n", + " self.dropout = nn.Dropout(dropout)\n", + " self.ln = nn.LayerNorm(normalized_shape)\n", + "\n", + " def forward(self, X, Y):\n", + " '''\n", + " @para X:原来的输入\n", + " @para y:输出\n", + " '''\n", + " return self.ln(self.dropout(Y) + X)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 20 + }, + "source": [ + "残差连接要求两个输入的形状相同,以便[**加法操作后输出张量的形状相同**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "origin_pos": 22, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 3, 4])" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# 残差连接要求两个输入的形状相同\n", + "add_norm = AddNorm([3, 4], 0.5)\n", + "add_norm.eval()\n", + "add_norm(torch.ones((2, 3, 4)), torch.ones((2, 3, 4))).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 24 + }, + "source": [ + "## 编码器\n", + "\n", + "有了组成transformer编码器的基础组件,现在可以先[**实现编码器中的一个层**]。下面的`EncoderBlock`类包含两个子层:多头自注意力和基于位置的前馈网络,这两个子层都使用了残差连接和紧随的层规范化。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "origin_pos": 26, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "class EncoderBlock(nn.Module):\n", + " \"\"\"transformer编码器块\"\"\"\n", + " def __init__(self, key_size, query_size, value_size, num_hiddens,\n", + " norm_shape, ffn_num_input, ffn_num_hiddens, num_heads,\n", + " dropout, use_bias=False, **kwargs):\n", + " super(EncoderBlock, self).__init__(**kwargs)\n", + " # 多头注意力\n", + " self.attention = d2l.MultiHeadAttention(\n", + " key_size, query_size, value_size, num_hiddens, num_heads, dropout,\n", + " use_bias)\n", + " # addnorm\n", + " self.addnorm1 = AddNorm(norm_shape, dropout)\n", + " # ffn\n", + " self.ffn = PositionWiseFFN(\n", + " ffn_num_input, ffn_num_hiddens, num_hiddens)\n", + " # addnorm\n", + " self.addnorm2 = AddNorm(norm_shape, dropout)\n", + "\n", + " def forward(self, X, valid_lens):\n", + " '''\n", + " encoder一层,包括一个多头注意力,一个ffn,两个add&norm\n", + " '''\n", + " Y = self.addnorm1(\n", + " X, \n", + " self.attention(X, X, X, valid_lens))\n", + " return self.addnorm2(Y, self.ffn(Y))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 28 + }, + "source": [ + "正如我们所看到的,[**transformer编码器中的任何层都不会改变其输入的形状**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "origin_pos": 30, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 100, 24])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# batchsize为2,sequence长度为100,维度为24.输入是他,输出之后,维度没有变化\n", + "X = torch.ones((2, 100, 24))\n", + "valid_lens = torch.tensor([3, 2])\n", + "encoder_blk = EncoderBlock(24, 24, 24, 24, [100, 24], 24, 48, 8, 0.5)\n", + "encoder_blk.eval()\n", + "encoder_blk(X, valid_lens).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 32 + }, + "source": [ + "在实现下面的[**transformer编码器**]的代码中,我们堆叠了`num_layers`个`EncoderBlock`类的实例。由于我们使用的是值范围在$-1$和$1$之间的固定位置编码,因此通过学习得到的输入的嵌入表示的值需要先乘以嵌入维度的平方根进行重新缩放,然后再与位置编码相加。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "origin_pos": 34, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "#@save\n", + "class TransformerEncoder(d2l.Encoder):\n", + " \"\"\"transformer编码器\"\"\"\n", + " def __init__(self, vocab_size, key_size, query_size, value_size,\n", + " num_hiddens, norm_shape, ffn_num_input, ffn_num_hiddens,\n", + " num_heads, num_layers, dropout, use_bias=False, **kwargs):\n", + " super(TransformerEncoder, self).__init__(**kwargs)\n", + " self.num_hiddens = num_hiddens\n", + " self.embedding = nn.Embedding(vocab_size, num_hiddens)\n", + " self.pos_encoding = d2l.PositionalEncoding(num_hiddens, dropout)\n", + " self.blks = nn.Sequential() #多层网络\n", + " for i in range(num_layers):\n", + " self.blks.add_module(\"block\"+str(i),\n", + " EncoderBlock(key_size, query_size, value_size, num_hiddens,\n", + " norm_shape, ffn_num_input, ffn_num_hiddens,\n", + " num_heads, dropout, use_bias))\n", + "\n", + " def forward(self, X, valid_lens, *args):\n", + " # 因为位置编码值在-1和1之间,\n", + " # 因此嵌入值乘以嵌入维度的平方根进行缩放,\n", + " # 然后再与位置编码相加。\n", + " X = self.pos_encoding(\n", + " self.embedding(X) * math.sqrt(self.num_hiddens)) # 控制大小\n", + " self.attention_weights = [None] * len(self.blks)\n", + " # 输入到n层encoder\n", + " for i, blk in enumerate(self.blks):\n", + " X = blk(X, valid_lens)\n", + " self.attention_weights[\n", + " i] = blk.attention.attention.attention_weights\n", + " return X" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 36 + }, + "source": [ + "下面我们指定了超参数来[**创建一个两层的transformer编码器**]。\n", + "Transformer编码器输出的形状是(批量大小,时间步数目,`num_hiddens`)。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "origin_pos": 38, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 100, 24])" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "encoder = TransformerEncoder(\n", + " 200, 24, 24, 24, 24, [100, 24], 24, 48, 8, 2, 0.5)\n", + "encoder.eval()\n", + "encoder(torch.ones((2, 100), dtype=torch.long), valid_lens).shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 40 + }, + "source": [ + "## 解码器\n", + "\n", + "如 :numref:`fig_transformer`所示,[**transformer解码器也是由多个相同的层组成**]。在`DecoderBlock`类中实现的每个层包含了三个子层:解码器自注意力、“编码器-解码器”注意力和基于位置的前馈网络。这些子层也都被残差连接和紧随的层规范化围绕。\n", + "\n", + "正如在本节前面所述,在掩蔽多头解码器自注意力层(第一个子层)中,查询、键和值都来自上一个解码器层的输出。关于*序列到序列模型*(sequence-to-sequence model),在训练阶段,其输出序列的所有位置(时间步)的词元都是已知的;然而,在预测阶段,其输出序列的词元是逐个生成的。因此,在任何解码器时间步中,只有生成的词元才能用于解码器的自注意力计算中。为了在解码器中保留自回归的属性,其掩蔽自注意力设定了参数`dec_valid_lens`,以便任何查询都只会与解码器中所有已经生成词元的位置(即直到该查询位置为止)进行注意力计算。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "origin_pos": 42, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class DecoderBlock(nn.Module):\n", + " \"\"\"解码器中第i个块\"\"\"\n", + " def __init__(self, key_size, query_size, value_size, num_hiddens,\n", + " norm_shape, ffn_num_input, ffn_num_hiddens, num_heads,\n", + " dropout, i, **kwargs):\n", + " super(DecoderBlock, self).__init__(**kwargs)\n", + " self.i = i #记下i\n", + " \n", + " self.attention1 = d2l.MultiHeadAttention(\n", + " key_size, query_size, value_size, num_hiddens, num_heads, dropout)\n", + " self.addnorm1 = AddNorm(norm_shape, dropout)\n", + " \n", + " self.attention2 = d2l.MultiHeadAttention(\n", + " key_size, query_size, value_size, num_hiddens, num_heads, dropout)\n", + " self.addnorm2 = AddNorm(norm_shape, dropout)\n", + " \n", + " self.ffn = PositionWiseFFN(ffn_num_input, ffn_num_hiddens,\n", + " num_hiddens)\n", + " self.addnorm3 = AddNorm(norm_shape, dropout)\n", + "\n", + " def forward(self, X, state):\n", + " enc_outputs, enc_valid_lens = state[0], state[1]\n", + " # 训练阶段,输出序列的所有词元都在同一时间处理,\n", + " # 因此state[2][self.i]初始化为None。\n", + " # 预测阶段,输出序列是通过词元一个接着一个解码的,\n", + " # 因此state[2][self.i]包含着直到当前时间步第i个块解码的输出表示\n", + " if state[2][self.i] is None:\n", + " key_values = X #train\n", + " else:\n", + " key_values = torch.cat((state[2][self.i], X), axis=1) #prediction,存前面的输出\n", + " \n", + " state[2][self.i] = key_values\n", + " \n", + " if self.training: # train的时候把后面的输出遮掉\n", + " batch_size, num_steps, _ = X.shape\n", + " # dec_valid_lens的开头:(batch_size,num_steps),\n", + " # 其中每一行是[1,2,...,num_steps]\n", + " dec_valid_lens = torch.arange(\n", + " 1, num_steps + 1, device=X.device).repeat(batch_size, 1)\n", + " else:\n", + " dec_valid_lens = None\n", + "\n", + " # 自注意力\n", + " X2 = self.attention1(X, key_values, key_values, dec_valid_lens)\n", + " Y = self.addnorm1(X, X2)\n", + " # 编码器-解码器注意力。\n", + " # enc_outputs的开头:(batch_size,num_steps,num_hiddens)\n", + " Y2 = self.attention2(Y, enc_outputs, enc_outputs, enc_valid_lens)\n", + " Z = self.addnorm2(Y, Y2)\n", + " \n", + " return self.addnorm3(Z, self.ffn(Z)), state" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 44 + }, + "source": [ + "为了便于在“编码器-解码器”注意力中进行缩放点积计算和残差连接中进行加法计算,[**编码器和解码器的特征维度都是`num_hiddens`。**]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "origin_pos": 46, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 100, 24])" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "decoder_blk = DecoderBlock(24, 24, 24, 24, [100, 24], 24, 48, 8, 0.5, 0)\n", + "decoder_blk.eval()\n", + "X = torch.ones((2, 100, 24))\n", + "state = [encoder_blk(X, valid_lens), valid_lens, [None]]\n", + "decoder_blk(X, state)[0].shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 48 + }, + "source": [ + "现在我们构建了由`num_layers`个`DecoderBlock`实例组成的完整的[**transformer解码器**]。最后,通过一个全连接层计算所有`vocab_size`个可能的输出词元的预测值。解码器的自注意力权重和编码器解码器注意力权重都被存储下来,方便日后可视化的需要。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "origin_pos": 50, + "tab": [ + "pytorch" + ] + }, + "outputs": [], + "source": [ + "class TransformerDecoder(d2l.AttentionDecoder):\n", + " '''\n", + " 与encoder一样\n", + " '''\n", + " def __init__(self, vocab_size, key_size, query_size, value_size,\n", + " num_hiddens, norm_shape, ffn_num_input, ffn_num_hiddens,\n", + " num_heads, num_layers, dropout, **kwargs):\n", + " super(TransformerDecoder, self).__init__(**kwargs)\n", + " self.num_hiddens = num_hiddens\n", + " self.num_layers = num_layers\n", + " self.embedding = nn.Embedding(vocab_size, num_hiddens)\n", + " self.pos_encoding = d2l.PositionalEncoding(num_hiddens, dropout)\n", + " self.blks = nn.Sequential()\n", + " for i in range(num_layers):\n", + " self.blks.add_module(\"block\"+str(i),\n", + " DecoderBlock(key_size, query_size, value_size, num_hiddens,\n", + " norm_shape, ffn_num_input, ffn_num_hiddens,\n", + " num_heads, dropout, i))\n", + " # 输出,做英语法语翻译\n", + " self.dense = nn.Linear(num_hiddens, vocab_size)\n", + "\n", + " def init_state(self, enc_outputs, enc_valid_lens, *args):\n", + " '''\n", + " 存状态,最后一维是预测的时候用来存输出用的\n", + " '''\n", + " return [enc_outputs, enc_valid_lens, [None] * self.num_layers]\n", + "\n", + " def forward(self, X, state):\n", + " \n", + " X = self.pos_encoding(self.embedding(X) * math.sqrt(self.num_hiddens))\n", + " \n", + " self._attention_weights = [[None] * len(self.blks) for _ in range (2)]\n", + " \n", + " for i, blk in enumerate(self.blks):\n", + " X, state = blk(X, state)\n", + " # 解码器自注意力权重\n", + " self._attention_weights[0][\n", + " i] = blk.attention1.attention.attention_weights\n", + " # “编码器-解码器”自注意力权重\n", + " self._attention_weights[1][\n", + " i] = blk.attention2.attention.attention_weights\n", + " return self.dense(X), state\n", + "\n", + " @property\n", + " def attention_weights(self):\n", + " return self._attention_weights" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 52 + }, + "source": [ + "## [**训练**]\n", + "\n", + "依照transformer架构来实例化编码器-解码器模型。在这里,指定transformer的编码器和解码器都是2层,都使用4头注意力。与 :numref:`sec_seq2seq_training`类似,为了进行序列到序列的学习,我们在“英语-法语”机器翻译数据集上训练transformer模型。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "origin_pos": 54, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loss 0.033, 5089.5 tokens/sec on cuda:0\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T21:38:44.026110\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "num_hiddens, num_layers, dropout, batch_size, num_steps = 32, 2, 0.1, 64, 10 #2层\n", + "\n", + "lr, num_epochs, device = 0.005, 200, d2l.try_gpu()\n", + "\n", + "ffn_num_input, ffn_num_hiddens, num_heads = 32, 64, 4 #4头\n", + "\n", + "key_size, query_size, value_size = 32, 32, 32 #kqv是32,bert是1024\n", + "norm_shape = [32]\n", + "\n", + "train_iter, src_vocab, tgt_vocab = d2l.load_data_nmt(batch_size, num_steps)\n", + "\n", + "encoder = TransformerEncoder(\n", + " len(src_vocab), key_size, query_size, value_size, num_hiddens,\n", + " norm_shape, ffn_num_input, ffn_num_hiddens, num_heads,\n", + " num_layers, dropout)\n", + "decoder = TransformerDecoder(\n", + " len(tgt_vocab), key_size, query_size, value_size, num_hiddens,\n", + " norm_shape, ffn_num_input, ffn_num_hiddens, num_heads,\n", + " num_layers, dropout)\n", + "net = d2l.EncoderDecoder(encoder, decoder)\n", + "d2l.train_seq2seq(net, train_iter, lr, num_epochs, tgt_vocab, device)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 56 + }, + "source": [ + "训练结束后,使用transformer模型[**将一些英语句子翻译成法语**],并且计算它们的BLEU分数。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "origin_pos": 57, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "go . => va !, bleu 1.000\n", + "i lost . => j'ai perdu ., bleu 1.000\n", + "he's calm . => il est calme ., bleu 1.000\n", + "i'm home . => je suis chez moi ., bleu 1.000\n" + ] + } + ], + "source": [ + "engs = ['go .', \"i lost .\", 'he\\'s calm .', 'i\\'m home .']\n", + "fras = ['va !', 'j\\'ai perdu .', 'il est calme .', 'je suis chez moi .']\n", + "for eng, fra in zip(engs, fras):\n", + " translation, dec_attention_weight_seq = d2l.predict_seq2seq(\n", + " net, eng, src_vocab, tgt_vocab, num_steps, device, True)\n", + " print(f'{eng} => {translation}, ',\n", + " f'bleu {d2l.bleu(translation, fra, k=2):.3f}')\n", + "# 现在bleu分数好像不错,都是1了" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 59 + }, + "source": [ + "当进行最后一个英语到法语的句子翻译工作时,让我们[**可视化transformer的注意力权重**]。编码器自注意力权重的形状为(编码器层数,注意力头数,`num_steps`或查询的数目,`num_steps`或“键-值”对的数目)。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "origin_pos": 60, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 4, 10, 10])" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "enc_attention_weights = torch.cat(net.encoder.attention_weights, 0).reshape((num_layers, num_heads,\n", + " -1, num_steps))\n", + "enc_attention_weights.shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 61 + }, + "source": [ + "在编码器的自注意力中,查询和键都来自相同的输入序列。因为填充词元是不携带信息的,因此通过指定输入序列的有效长度可以避免查询与使用填充词元的位置计算注意力。接下来,将逐行呈现两层多头注意力的权重。每个注意力头都根据查询、键和值的不同的表示子空间来表示不同的注意力。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "origin_pos": 63, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T21:38:44.772527\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "d2l.show_heatmaps(\n", + " enc_attention_weights.cpu(), xlabel='Key positions',\n", + " ylabel='Query positions', titles=['Head %d' % i for i in range(1, 5)],\n", + " figsize=(7, 3.5))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 64 + }, + "source": [ + "[**为了可视化解码器的自注意力权重和“编码器-解码器”的注意力权重,我们需要完成更多的数据操作工作。**]例如,我们用零填充被掩蔽住的注意力权重。值得注意的是,解码器的自注意力权重和“编码器-解码器”的注意力权重都有相同的查询:即以*序列开始词元*(beginning-of-sequence,BOS)打头,再与后续输出的词元共同组成序列。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "origin_pos": 66, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(torch.Size([2, 4, 6, 10]), torch.Size([2, 4, 6, 10]))" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dec_attention_weights_2d = [head[0].tolist()\n", + " for step in dec_attention_weight_seq\n", + " for attn in step for blk in attn for head in blk]\n", + "dec_attention_weights_filled = torch.tensor(\n", + " pd.DataFrame(dec_attention_weights_2d).fillna(0.0).values)\n", + "dec_attention_weights = dec_attention_weights_filled.reshape((-1, 2, num_layers, num_heads, num_steps))\n", + "dec_self_attention_weights, dec_inter_attention_weights = \\\n", + " dec_attention_weights.permute(1, 2, 3, 0, 4)\n", + "dec_self_attention_weights.shape, dec_inter_attention_weights.shape" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 68 + }, + "source": [ + "由于解码器自注意力的自回归属性,查询不会对当前位置之后的“键-值”对进行注意力计算。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "origin_pos": 69, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T21:38:45.555224\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# Plusonetoincludethebeginning-of-sequencetoken\n", + "d2l.show_heatmaps(\n", + " dec_self_attention_weights[:, :, :, :len(translation.split()) + 1],\n", + " xlabel='Key positions', ylabel='Query positions',\n", + " titles=['Head %d' % i for i in range(1, 5)], figsize=(7, 3.5))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 70 + }, + "source": [ + "与编码器的自注意力的情况类似,通过指定输入序列的有效长度,[**输出序列的查询不会与输入序列中填充位置的词元进行注意力计算**]。\n" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "origin_pos": 71, + "tab": [ + "pytorch" + ] + }, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2021-12-08T21:38:46.348556\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.3.3, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "d2l.show_heatmaps(\n", + " dec_inter_attention_weights, xlabel='Key positions',\n", + " ylabel='Query positions', titles=['Head %d' % i for i in range(1, 5)],\n", + " figsize=(7, 3.5))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 72 + }, + "source": [ + "尽管transformer架构是为了“序列到序列”的学习而提出的,但正如我们将在本书后面提及的那样,transformer编码器或transformer解码器通常被单独用于不同的深度学习任务中。\n", + "\n", + "## 小结\n", + "\n", + "* transformer是编码器-解码器架构的一个实践,尽管在实际情况中编码器或解码器可以单独使用。\n", + "* 在transformer中,多头自注意力用于表示输入序列和输出序列,不过解码器必须通过掩蔽机制来保留自回归属性。\n", + "* transformer中的残差连接和层规范化是训练非常深度模型的重要工具。\n", + "* transformer模型中基于位置的前馈网络使用同一个多层感知机,作用是对所有序列位置的表示进行转换。\n", + "\n", + "## 练习\n", + "\n", + "1. 在实验中训练更深的transformer将如何影响训练速度和翻译效果?\n", + "1. 在transformer中使用加性注意力取代缩放点积注意力是不是个好办法?为什么?\n", + "1. 对于语言模型,我们应该使用transformer的编码器还是解码器,或者两者都用?如何设计?\n", + "1. 如果输入序列很长,transformer会面临什么挑战?为什么?\n", + "1. 如何提高transformer的计算速度和内存使用效率?提示:可以参考论文 :cite:`Tay.Dehghani.Bahri.ea.2020`。\n", + "1. 如果不使用卷积神经网络,如何设计基于transformer模型的图像分类任务?提示:可以参考Vision Transformer :cite:`Dosovitskiy.Beyer.Kolesnikov.ea.2021`。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "origin_pos": 74, + "tab": [ + "pytorch" + ] + }, + "source": [ + "[Discussions](https://discuss.d2l.ai/t/5756)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git "a/code/70-BERT\345\276\256\350\260\203/BERT\345\276\256\350\260\203\344\273\243\347\240\201.ipynb" "b/code/70-BERT\345\276\256\350\260\203/BERT\345\276\256\350\260\203\344\273\243\347\240\201.ipynb" new file mode 100644 index 0000000..2f58555 --- /dev/null +++ "b/code/70-BERT\345\276\256\350\260\203/BERT\345\276\256\350\260\203\344\273\243\347\240\201.ipynb" @@ -0,0 +1,428 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "2tyC5_SKKr5A", + "outputId": "08ca6bdd-5941-4854-f201-da233cef6714" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting d2l\n", + " Downloading d2l-0.17.3-py3-none-any.whl (82 kB)\n", + "\u001b[?25l\r\u001b[K |████ | 10 kB 25.1 MB/s eta 0:00:01\r\u001b[K |████████ | 20 kB 8.1 MB/s eta 0:00:01\r\u001b[K |████████████ | 30 kB 5.4 MB/s eta 0:00:01\r\u001b[K |████████████████ | 40 kB 5.1 MB/s eta 0:00:01\r\u001b[K |███████████████████▉ | 51 kB 2.3 MB/s eta 0:00:01\r\u001b[K |███████████████████████▉ | 61 kB 2.7 MB/s eta 0:00:01\r\u001b[K |███████████████████████████▉ | 71 kB 2.9 MB/s eta 0:00:01\r\u001b[K |███████████████████████████████▉| 81 kB 3.2 MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 82 kB 610 kB/s \n", + "\u001b[?25hRequirement already satisfied: jupyter==1.0.0 in /usr/local/lib/python3.7/dist-packages (from d2l) (1.0.0)\n", + "Collecting matplotlib==3.3.3\n", + " Downloading matplotlib-3.3.3-cp37-cp37m-manylinux1_x86_64.whl (11.6 MB)\n", + "\u001b[K |████████████████████████████████| 11.6 MB 10.5 MB/s \n", + "\u001b[?25hCollecting requests==2.25.1\n", + " Downloading requests-2.25.1-py2.py3-none-any.whl (61 kB)\n", + "\u001b[K |████████████████████████████████| 61 kB 9.4 MB/s \n", + "\u001b[?25hCollecting pandas==1.2.2\n", + " Downloading pandas-1.2.2-cp37-cp37m-manylinux1_x86_64.whl (9.9 MB)\n", + "\u001b[K |████████████████████████████████| 9.9 MB 22.1 MB/s \n", + "\u001b[?25hCollecting numpy==1.18.5\n", + " Downloading numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)\n", + "\u001b[K |████████████████████████████████| 20.1 MB 14.6 MB/s \n", + "\u001b[?25hRequirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.2.2)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (4.10.1)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.2.0)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.6.1)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.3.1)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (7.6.5)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (1.3.2)\n", + "Requirement already satisfied: pillow>=6.2.0 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (7.1.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (3.0.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (0.11.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (2.8.2)\n", + "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.7/dist-packages (from pandas==1.2.2->d2l) (2018.9)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (2021.10.8)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (2.10)\n", + "Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (3.0.4)\n", + "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (1.24.3)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib==3.3.3->d2l) (1.15.0)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.3.5)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.5.0)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.1.1)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.1.1)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (57.4.0)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.8.1)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (4.4.2)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (4.8.0)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.7.5)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (2.6.1)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (1.0.18)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.2.5)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (0.2.0)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (5.1.3)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (3.5.2)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (1.0.2)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.9.1)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.3.3)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (0.18.1)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (5.4.0)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (3.10.0.2)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.11.0)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (21.4.0)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (3.7.0)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (0.13.1)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (1.8.0)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (2.11.3)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter==1.0.0->d2l) (22.3.0)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter==1.0.0->d2l) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter==1.0.0->d2l) (2.0.1)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.5.0)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (4.1.0)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.4)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.8.4)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (1.5.0)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.7.1)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter==1.0.0->d2l) (21.3)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter==1.0.0->d2l) (0.5.1)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter==1.0.0->d2l) (2.0.1)\n", + "Installing collected packages: numpy, requests, pandas, matplotlib, d2l\n", + " Attempting uninstall: numpy\n", + " Found existing installation: numpy 1.21.5\n", + " Uninstalling numpy-1.21.5:\n", + " Successfully uninstalled numpy-1.21.5\n", + " Attempting uninstall: requests\n", + " Found existing installation: requests 2.23.0\n", + " Uninstalling requests-2.23.0:\n", + " Successfully uninstalled requests-2.23.0\n", + " Attempting uninstall: pandas\n", + " Found existing installation: pandas 1.3.5\n", + " Uninstalling pandas-1.3.5:\n", + " Successfully uninstalled pandas-1.3.5\n", + " Attempting uninstall: matplotlib\n", + " Found existing installation: matplotlib 3.2.2\n", + " Uninstalling matplotlib-3.2.2:\n", + " Successfully uninstalled matplotlib-3.2.2\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "tensorflow 2.8.0 requires tf-estimator-nightly==2.8.0.dev2021122109, which is not installed.\n", + "tensorflow 2.8.0 requires numpy>=1.20, but you have numpy 1.18.5 which is incompatible.\n", + "tables 3.7.0 requires numpy>=1.19.0, but you have numpy 1.18.5 which is incompatible.\n", + "google-colab 1.0.0 requires requests~=2.23.0, but you have requests 2.25.1 which is incompatible.\n", + "datascience 0.10.6 requires folium==0.2.1, but you have folium 0.8.3 which is incompatible.\n", + "albumentations 0.1.12 requires imgaug<0.2.7,>=0.2.5, but you have imgaug 0.2.9 which is incompatible.\u001b[0m\n", + "Successfully installed d2l-0.17.3 matplotlib-3.3.3 numpy-1.18.5 pandas-1.2.2 requests-2.25.1\n" + ] + }, + { + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "matplotlib", + "mpl_toolkits", + "numpy", + "pandas" + ] + } + } + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "!pip install d2l\n", + "import json\n", + "import multiprocessing\n", + "import os\n", + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "OS1VsMq-LgtH" + }, + "outputs": [], + "source": [ + "d2l.DATA_HUB['bert.base'] = (d2l.DATA_URL + 'bert.base.torch.zip',\n", + " '225d66f04cae318b841a13d32af3acc165f253ac')\n", + "d2l.DATA_HUB['bert.small'] = (d2l.DATA_URL + 'bert.small.torch.zip',\n", + " 'c72329e68a732bef0452e4b96a1c341c8910f81f')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "hq9CVgnnLjnc" + }, + "outputs": [], + "source": [ + "def load_pretrained_model(pretrained_model, num_hiddens, ffn_num_hiddens,\n", + " num_heads, num_layers, dropout, max_len, devices):\n", + " data_dir = d2l.download_extract(pretrained_model)\n", + " # 定义空词表以加载预定义词表\n", + " vocab = d2l.Vocab()\n", + " vocab.idx_to_token = json.load(open(os.path.join(data_dir,\n", + " 'vocab.json')))\n", + " vocab.token_to_idx = {token: idx for idx, token in enumerate(\n", + " vocab.idx_to_token)}\n", + " bert = d2l.BERTModel(len(vocab), num_hiddens, norm_shape=[256],\n", + " ffn_num_input=256, ffn_num_hiddens=ffn_num_hiddens,\n", + " num_heads=4, num_layers=2, dropout=0.2,\n", + " max_len=max_len, key_size=256, query_size=256,\n", + " value_size=256, hid_in_features=256,\n", + " mlm_in_features=256, nsp_in_features=256)\n", + " # 加载预训练BERT参数\n", + " bert.load_state_dict(torch.load(os.path.join(data_dir,\n", + " 'pretrained.params')))\n", + " return bert, vocab" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ZUO4tFyPLpQM", + "outputId": "63ae6f8c-c670-40ba-a8bb-474318d0e5e3" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading ../data/bert.small.torch.zip from http://d2l-data.s3-accelerate.amazonaws.com/bert.small.torch.zip...\n" + ] + } + ], + "source": [ + "devices = d2l.try_all_gpus()\n", + "bert, vocab = load_pretrained_model(\n", + " 'bert.small', num_hiddens=256, ffn_num_hiddens=512, num_heads=4,\n", + " num_layers=2, dropout=0.1, max_len=512, devices=devices)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "LNATOVSXL16a" + }, + "outputs": [], + "source": [ + "class SNLIBERTDataset(torch.utils.data.Dataset):\n", + " def __init__(self, dataset, max_len, vocab=None):\n", + " all_premise_hypothesis_tokens = [[\n", + " p_tokens, h_tokens] for p_tokens, h_tokens in zip(\n", + " *[d2l.tokenize([s.lower() for s in sentences])\n", + " for sentences in dataset[:2]])]\n", + "\n", + " self.labels = torch.tensor(dataset[2])\n", + " self.vocab = vocab\n", + " self.max_len = max_len\n", + " (self.all_token_ids, self.all_segments,\n", + " self.valid_lens) = self._preprocess(all_premise_hypothesis_tokens)\n", + " print('read ' + str(len(self.all_token_ids)) + ' examples')\n", + "\n", + " def _preprocess(self, all_premise_hypothesis_tokens):\n", + " pool = multiprocessing.Pool(4) # 使用4个进程\n", + " out = pool.map(self._mp_worker, all_premise_hypothesis_tokens)\n", + " all_token_ids = [\n", + " token_ids for token_ids, segments, valid_len in out]\n", + " all_segments = [segments for token_ids, segments, valid_len in out]\n", + " valid_lens = [valid_len for token_ids, segments, valid_len in out]\n", + " return (torch.tensor(all_token_ids, dtype=torch.long),\n", + " torch.tensor(all_segments, dtype=torch.long),\n", + " torch.tensor(valid_lens))\n", + "\n", + " def _mp_worker(self, premise_hypothesis_tokens):\n", + " p_tokens, h_tokens = premise_hypothesis_tokens\n", + " self._truncate_pair_of_tokens(p_tokens, h_tokens)\n", + " tokens, segments = d2l.get_tokens_and_segments(p_tokens, h_tokens)\n", + " token_ids = self.vocab[tokens] + [self.vocab['']] \\\n", + " * (self.max_len - len(tokens))\n", + " segments = segments + [0] * (self.max_len - len(segments))\n", + " valid_len = len(tokens)\n", + " return token_ids, segments, valid_len\n", + "\n", + " def _truncate_pair_of_tokens(self, p_tokens, h_tokens):\n", + " # 为BERT输入中的''、''和''词元保留位置\n", + " while len(p_tokens) + len(h_tokens) > self.max_len - 3:\n", + " if len(p_tokens) > len(h_tokens):\n", + " p_tokens.pop()\n", + " else:\n", + " h_tokens.pop()\n", + "\n", + " def __getitem__(self, idx):\n", + " return (self.all_token_ids[idx], self.all_segments[idx],\n", + " self.valid_lens[idx]), self.labels[idx]\n", + "\n", + " def __len__(self):\n", + " return len(self.all_token_ids)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "CAIBVMGTL9Wu", + "outputId": "815b8e0d-221e-4b0e-c78e-83b98c09ad04" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading ../data/snli_1.0.zip from https://nlp.stanford.edu/projects/snli/snli_1.0.zip...\n", + "read 549367 examples\n", + "read 9824 examples\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:481: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ] + } + ], + "source": [ + "# 如果出现显存不足错误,请减少“batch_size”。在原始的BERT模型中,max_len=512\n", + "batch_size, max_len, num_workers = 512, 128, d2l.get_dataloader_workers()\n", + "data_dir = d2l.download_extract('SNLI')\n", + "train_set = SNLIBERTDataset(d2l.read_snli(data_dir, True), max_len, vocab)\n", + "test_set = SNLIBERTDataset(d2l.read_snli(data_dir, False), max_len, vocab)\n", + "train_iter = torch.utils.data.DataLoader(train_set, batch_size, shuffle=True,\n", + " num_workers=num_workers)\n", + "test_iter = torch.utils.data.DataLoader(test_set, batch_size,\n", + " num_workers=num_workers)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "4BjQdXANMbNq" + }, + "outputs": [], + "source": [ + "class BERTClassifier(nn.Module):\n", + " def __init__(self, bert):\n", + " super(BERTClassifier, self).__init__()\n", + " self.encoder = bert.encoder\n", + " self.hidden = bert.hidden#encoder decoder权重是复制来的\n", + " self.output = nn.Linear(256, 3)\n", + "\n", + " def forward(self, inputs):\n", + " tokens_X, segments_X, valid_lens_x = inputs\n", + " encoded_X = self.encoder(tokens_X, segments_X, valid_lens_x)\n", + " return self.output(self.hidden(encoded_X[:, 0, :]))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "1QDprIloMeYN" + }, + "outputs": [], + "source": [ + "net = BERTClassifier(bert)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 463 + }, + "id": "YFzExvxRMiBH", + "outputId": "2f16247c-9c37-4d4f-b1cd-a3d981551bc1" + }, + "outputs": [ + { + "metadata": { + "tags": null + }, + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:481: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ] + }, + { + "output_type": "error", + "ename": "ImportError", + "evalue": "ignored", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/formatters.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, obj)\u001b[0m\n\u001b[1;32m 332\u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 333\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 334\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mprinter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 335\u001b[0m \u001b[0;31m# Finally look for special method names\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 336\u001b[0m \u001b[0mmethod\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_real_method\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_method\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/pylabtools.py\u001b[0m in \u001b[0;36m\u001b[0;34m(fig)\u001b[0m\n\u001b[1;32m 245\u001b[0m \u001b[0mjpg_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'jpg'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 246\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m'svg'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 247\u001b[0;31m \u001b[0msvg_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'svg'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 248\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m'pdf'\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mformats\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 249\u001b[0m \u001b[0mpdf_formatter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFigure\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mlambda\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'pdf'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/IPython/core/pylabtools.py\u001b[0m in \u001b[0;36mprint_figure\u001b[0;34m(fig, fmt, bbox_inches, **kwargs)\u001b[0m\n\u001b[1;32m 123\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[0mbytes_io\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mBytesIO\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 125\u001b[0;31m \u001b[0mfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcanvas\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint_figure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbytes_io\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 126\u001b[0m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbytes_io\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgetvalue\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 127\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfmt\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'svg'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36mprint_figure\u001b[0;34m(self, filename, dpi, facecolor, edgecolor, orientation, format, bbox_inches, **kwargs)\u001b[0m\n\u001b[1;32m 2057\u001b[0m \u001b[0mIf\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mbackend\u001b[0m\u001b[0;34m*\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mthen\u001b[0m \u001b[0mdetermine\u001b[0m \u001b[0ma\u001b[0m \u001b[0msuitable\u001b[0m \u001b[0mcanvas\u001b[0m \u001b[0;32mclass\u001b[0m \u001b[0;32mfor\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2058\u001b[0m \u001b[0msaving\u001b[0m \u001b[0mto\u001b[0m \u001b[0mformat\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mfmt\u001b[0m\u001b[0;34m*\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0;34m-\u001b[0m \u001b[0meither\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mcurrent\u001b[0m \u001b[0mcanvas\u001b[0m \u001b[0;32mclass\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mit\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2059\u001b[0;31m \u001b[0msupports\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0mfmt\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mwhatever\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m`\u001b[0m\u001b[0mget_registered_canvas_class\u001b[0m\u001b[0;31m`\u001b[0m \u001b[0mreturns\u001b[0m\u001b[0;34m;\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2060\u001b[0m \u001b[0mswitch\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mfigure\u001b[0m \u001b[0mcanvas\u001b[0m \u001b[0mto\u001b[0m \u001b[0mthat\u001b[0m \u001b[0mcanvas\u001b[0m \u001b[0;32mclass\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2061\u001b[0m \"\"\"\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36m_get_output_canvas\u001b[0;34m(self, fmt)\u001b[0m\n\u001b[1;32m 1991\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmouse_grabber\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0max\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1992\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmouse_grabber\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1993\u001b[0;31m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1994\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mdraw\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1995\u001b[0m \u001b[0;34m\"\"\"Render the `.Figure`.\"\"\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py\u001b[0m in \u001b[0;36mget_registered_canvas_class\u001b[0;34m(format)\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 125\u001b[0m \u001b[0mThe\u001b[0m \u001b[0mfollowing\u001b[0m \u001b[0mmethods\u001b[0m \u001b[0mmust\u001b[0m \u001b[0mbe\u001b[0m \u001b[0mimplemented\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mthe\u001b[0m \u001b[0mbackend\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfull\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 126\u001b[0;31m functionality (though just implementing :meth:`draw_path` alone would\n\u001b[0m\u001b[1;32m 127\u001b[0m give a highly capable backend):\n\u001b[1;32m 128\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/__init__.py\u001b[0m in \u001b[0;36mimport_module\u001b[0;34m(name, package)\u001b[0m\n\u001b[1;32m 125\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 126\u001b[0m \u001b[0mlevel\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 127\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0m_bootstrap\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_gcd_import\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mlevel\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpackage\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlevel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 128\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 129\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_gcd_import\u001b[0;34m(name, package, level)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_find_and_load\u001b[0;34m(name, import_)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_find_and_load_unlocked\u001b[0;34m(name, import_)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_load_unlocked\u001b[0;34m(spec)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap_external.py\u001b[0m in \u001b[0;36mexec_module\u001b[0;34m(self, module)\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.7/importlib/_bootstrap.py\u001b[0m in \u001b[0;36m_call_with_frames_removed\u001b[0;34m(f, *args, **kwds)\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/matplotlib/backends/backend_svg.py\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 16\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mmatplotlib\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mmpl\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mmatplotlib\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mcbook\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 18\u001b[0;31m from matplotlib.backend_bases import (\n\u001b[0m\u001b[1;32m 19\u001b[0m \u001b[0m_Backend\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_check_savefig_extra_args\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mFigureCanvasBase\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mFigureManagerBase\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m RendererBase)\n", + "\u001b[0;31mImportError\u001b[0m: cannot import name '_check_savefig_extra_args' from 'matplotlib.backend_bases' (/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py)" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ] + }, + "metadata": {} + } + ], + "source": [ + "lr, num_epochs = 1e-4, 5\n", + "trainer = torch.optim.Adam(net.parameters(), lr=lr)\n", + "loss = nn.CrossEntropyLoss(reduction='none')\n", + "d2l.train_ch13(net, train_iter, test_iter, loss, trainer, num_epochs,\n", + " devices)" + ] + } + ], + "metadata": { + "colab": { + "name": "BERT微调代码.ipynb", + "provenance": [], + "collapsed_sections": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git "a/code/70-BERT\345\276\256\350\260\203/\350\207\252\347\204\266\350\257\255\350\250\200\346\216\250\347\220\206\346\225\260\346\215\256\351\233\206.ipynb" "b/code/70-BERT\345\276\256\350\260\203/\350\207\252\347\204\266\350\257\255\350\250\200\346\216\250\347\220\206\346\225\260\346\215\256\351\233\206.ipynb" new file mode 100644 index 0000000..dfa0fc4 --- /dev/null +++ "b/code/70-BERT\345\276\256\350\260\203/\350\207\252\347\204\266\350\257\255\350\250\200\346\216\250\347\220\206\346\225\260\346\215\256\351\233\206.ipynb" @@ -0,0 +1,404 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "自然语言推理数据集.ipynb", + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "g4UN2a_q1g9s", + "outputId": "26625c53-2ada-4332-e4c6-47847197ba29" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting d2l\n", + " Downloading d2l-0.17.3-py3-none-any.whl (82 kB)\n", + "\u001b[K |████████████████████████████████| 82 kB 323 kB/s \n", + "\u001b[?25hRequirement already satisfied: jupyter==1.0.0 in /usr/local/lib/python3.7/dist-packages (from d2l) (1.0.0)\n", + "Collecting numpy==1.18.5\n", + " Downloading numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)\n", + "\u001b[K |████████████████████████████████| 20.1 MB 79.6 MB/s \n", + "\u001b[?25hCollecting requests==2.25.1\n", + " Downloading requests-2.25.1-py2.py3-none-any.whl (61 kB)\n", + "\u001b[K |████████████████████████████████| 61 kB 8.8 MB/s \n", + "\u001b[?25hCollecting pandas==1.2.2\n", + " Downloading pandas-1.2.2-cp37-cp37m-manylinux1_x86_64.whl (9.9 MB)\n", + "\u001b[K |████████████████████████████████| 9.9 MB 20.8 MB/s \n", + "\u001b[?25hCollecting matplotlib==3.3.3\n", + " Downloading matplotlib-3.3.3-cp37-cp37m-manylinux1_x86_64.whl (11.6 MB)\n", + "\u001b[K |████████████████████████████████| 11.6 MB 41.9 MB/s \n", + "\u001b[?25hRequirement already satisfied: qtconsole in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.2.2)\n", + "Requirement already satisfied: notebook in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.3.1)\n", + "Requirement already satisfied: nbconvert in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.6.1)\n", + "Requirement already satisfied: ipywidgets in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (7.6.5)\n", + "Requirement already satisfied: jupyter-console in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (5.2.0)\n", + "Requirement already satisfied: ipykernel in /usr/local/lib/python3.7/dist-packages (from jupyter==1.0.0->d2l) (4.10.1)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (1.3.2)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (2.8.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (3.0.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (0.11.0)\n", + "Requirement already satisfied: pillow>=6.2.0 in /usr/local/lib/python3.7/dist-packages (from matplotlib==3.3.3->d2l) (7.1.2)\n", + "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.7/dist-packages (from pandas==1.2.2->d2l) (2018.9)\n", + "Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (3.0.4)\n", + "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (1.24.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (2021.10.8)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests==2.25.1->d2l) (2.10)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib==3.3.3->d2l) (1.15.0)\n", + "Requirement already satisfied: tornado>=4.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.1.1)\n", + "Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.1.1)\n", + "Requirement already satisfied: jupyter-client in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.3.5)\n", + "Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from ipykernel->jupyter==1.0.0->d2l) (5.5.0)\n", + "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (57.4.0)\n", + "Requirement already satisfied: prompt-toolkit<2.0.0,>=1.0.4 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (1.0.18)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (4.4.2)\n", + "Requirement already satisfied: pexpect in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (4.8.0)\n", + "Requirement already satisfied: pygments in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (2.6.1)\n", + "Requirement already satisfied: simplegeneric>0.8 in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.8.1)\n", + "Requirement already satisfied: pickleshare in /usr/local/lib/python3.7/dist-packages (from ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.7.5)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from prompt-toolkit<2.0.0,>=1.0.4->ipython>=4.0.0->ipykernel->jupyter==1.0.0->d2l) (0.2.5)\n", + "Requirement already satisfied: widgetsnbextension~=3.5.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (3.5.2)\n", + "Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (0.2.0)\n", + "Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (1.0.2)\n", + "Requirement already satisfied: nbformat>=4.2.0 in /usr/local/lib/python3.7/dist-packages (from ipywidgets->jupyter==1.0.0->d2l) (5.1.3)\n", + "Requirement already satisfied: jupyter-core in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.9.1)\n", + "Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /usr/local/lib/python3.7/dist-packages (from nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.3.3)\n", + "Requirement already satisfied: importlib-resources>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (5.4.0)\n", + "Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (0.18.1)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (21.4.0)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (3.10.0.2)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (4.11.0)\n", + "Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.7/dist-packages (from importlib-resources>=1.4.0->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets->jupyter==1.0.0->d2l) (3.7.0)\n", + "Requirement already satisfied: Send2Trash in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (1.8.0)\n", + "Requirement already satisfied: terminado>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (0.13.1)\n", + "Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from notebook->jupyter==1.0.0->d2l) (2.11.3)\n", + "Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.7/dist-packages (from jupyter-client->ipykernel->jupyter==1.0.0->d2l) (22.3.0)\n", + "Requirement already satisfied: ptyprocess in /usr/local/lib/python3.7/dist-packages (from terminado>=0.8.1->notebook->jupyter==1.0.0->d2l) (0.7.0)\n", + "Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->notebook->jupyter==1.0.0->d2l) (2.0.1)\n", + "Requirement already satisfied: testpath in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.5.0)\n", + "Requirement already satisfied: defusedxml in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.7.1)\n", + "Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (1.5.0)\n", + "Requirement already satisfied: bleach in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (4.1.0)\n", + "Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.4)\n", + "Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from nbconvert->jupyter==1.0.0->d2l) (0.8.4)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter==1.0.0->d2l) (21.3)\n", + "Requirement already satisfied: webencodings in /usr/local/lib/python3.7/dist-packages (from bleach->nbconvert->jupyter==1.0.0->d2l) (0.5.1)\n", + "Requirement already satisfied: qtpy in /usr/local/lib/python3.7/dist-packages (from qtconsole->jupyter==1.0.0->d2l) (2.0.1)\n", + "Installing collected packages: numpy, requests, pandas, matplotlib, d2l\n", + " Attempting uninstall: numpy\n", + " Found existing installation: numpy 1.21.5\n", + " Uninstalling numpy-1.21.5:\n", + " Successfully uninstalled numpy-1.21.5\n", + " Attempting uninstall: requests\n", + " Found existing installation: requests 2.23.0\n", + " Uninstalling requests-2.23.0:\n", + " Successfully uninstalled requests-2.23.0\n", + " Attempting uninstall: pandas\n", + " Found existing installation: pandas 1.3.5\n", + " Uninstalling pandas-1.3.5:\n", + " Successfully uninstalled pandas-1.3.5\n", + " Attempting uninstall: matplotlib\n", + " Found existing installation: matplotlib 3.2.2\n", + " Uninstalling matplotlib-3.2.2:\n", + " Successfully uninstalled matplotlib-3.2.2\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "tensorflow 2.8.0 requires tf-estimator-nightly==2.8.0.dev2021122109, which is not installed.\n", + "tensorflow 2.8.0 requires numpy>=1.20, but you have numpy 1.18.5 which is incompatible.\n", + "tables 3.7.0 requires numpy>=1.19.0, but you have numpy 1.18.5 which is incompatible.\n", + "google-colab 1.0.0 requires requests~=2.23.0, but you have requests 2.25.1 which is incompatible.\n", + "datascience 0.10.6 requires folium==0.2.1, but you have folium 0.8.3 which is incompatible.\n", + "albumentations 0.1.12 requires imgaug<0.2.7,>=0.2.5, but you have imgaug 0.2.9 which is incompatible.\u001b[0m\n", + "Successfully installed d2l-0.17.3 matplotlib-3.3.3 numpy-1.18.5 pandas-1.2.2 requests-2.25.1\n" + ] + }, + { + "output_type": "display_data", + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "matplotlib", + "mpl_toolkits", + "numpy", + "pandas" + ] + } + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Downloading ../data/snli_1.0.zip from https://nlp.stanford.edu/projects/snli/snli_1.0.zip...\n" + ] + } + ], + "source": [ + "!pip install d2l\n", + "import os\n", + "import re\n", + "import torch\n", + "from torch import nn\n", + "from d2l import torch as d2l \n", + "\n", + "d2l.DATA_HUB['SNLI'] = (\n", + " 'https://nlp.stanford.edu/projects/snli/snli_1.0.zip',\n", + " '9fcde07509c7e87ec61c640c1b2753d9041758e4')\n", + "\n", + "data_dir = d2l.download_extract('SNLI')" + ] + }, + { + "cell_type": "code", + "source": [ + "def read_snli(data_dir, is_train):\n", + " \"\"\"将SNLI数据集解析为前提、假设和标签\"\"\"\n", + " def extract_text(s):\n", + " # 删除我们不会使用的信息\n", + " s = re.sub('\\\\(', '', s)\n", + " s = re.sub('\\\\)', '', s)\n", + " # 用一个空格替换两个或多个连续的空格\n", + " s = re.sub('\\\\s{2,}', ' ', s)\n", + " return s.strip()#移除字符串头尾空格\n", + " label_set = {'entailment': 0, 'contradiction': 1, 'neutral': 2}\n", + " file_name = os.path.join(data_dir, 'snli_1.0_train.txt'\n", + " if is_train else 'snli_1.0_test.txt')\n", + " with open(file_name, 'r') as f:\n", + " rows = [row.split('\\t') for row in f.readlines()[1:]]\n", + " premises = [extract_text(row[1]) for row in rows if row[0] in label_set]\n", + " hypotheses = [extract_text(row[2]) for row in rows if row[0] \\\n", + " in label_set]\n", + " labels = [label_set[row[0]] for row in rows if row[0] in label_set]\n", + " return premises, hypotheses, labels" + ], + "metadata": { + "id": "Z6F1FiVa3BNM" + }, + "execution_count": 2, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "train_data = read_snli(data_dir, is_train=True)\n", + "for x0, x1, y in zip(train_data[0][:3], train_data[1][:3], train_data[2][:3]):\n", + " print('premise:', x0)\n", + " print('hypothesis:', x1)\n", + " print('label:', y)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ipxvCjjv_4G4", + "outputId": "c103e66d-fe73-4681-f95f-21f07f42d772" + }, + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "premise: A person on a horse jumps over a broken down airplane .\n", + "hypothesis: A person is training his horse for a competition .\n", + "label: 2\n", + "premise: A person on a horse jumps over a broken down airplane .\n", + "hypothesis: A person is at a diner , ordering an omelette .\n", + "label: 1\n", + "premise: A person on a horse jumps over a broken down airplane .\n", + "hypothesis: A person is outdoors , on a horse .\n", + "label: 0\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "test_data = read_snli(data_dir, is_train=False)\n", + "for data in [train_data, test_data]:\n", + " print([[row for row in data[2]].count(i) for i in range(3)])" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "hWHzhVCIA9EU", + "outputId": "b0073b1e-deb4-4c4a-c08a-37f590b678ff" + }, + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "[183416, 183187, 182764]\n", + "[3368, 3237, 3219]\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "class SNLIDataset(torch.utils.data.Dataset):\n", + " \"\"\"用于加载SNLI数据集的自定义数据集\"\"\"\n", + " def __init__(self, dataset, num_steps, vocab=None):\n", + " self.num_steps = num_steps\n", + " all_premise_tokens = d2l.tokenize(dataset[0])\n", + " all_hypothesis_tokens = d2l.tokenize(dataset[1])\n", + " if vocab is None:#用bert时一定要使用bert预训练时的vocab,没有的时候自己构建\n", + " self.vocab = d2l.Vocab(all_premise_tokens + \\\n", + " all_hypothesis_tokens, min_freq=5, reserved_tokens=[''])\n", + " else:\n", + " self.vocab = vocab\n", + " self.premises = self._pad(all_premise_tokens)\n", + " self.hypotheses = self._pad(all_hypothesis_tokens)\n", + " self.labels = torch.tensor(dataset[2])\n", + " print('read ' + str(len(self.premises)) + ' examples')\n", + "\n", + " def _pad(self, lines):\n", + " return torch.tensor([d2l.truncate_pad(\n", + " self.vocab[line], self.num_steps, self.vocab[''])\n", + " for line in lines])\n", + "\n", + " def __getitem__(self, idx):\n", + " return (self.premises[idx], self.hypotheses[idx]), self.labels[idx]\n", + "\n", + " def __len__(self):\n", + " return len(self.premises)" + ], + "metadata": { + "id": "L3NRAar_BAi5" + }, + "execution_count": 6, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "def load_data_snli(batch_size, num_steps=50):\n", + " \"\"\"下载SNLI数据集并返回数据迭代器和词表\"\"\"\n", + " num_workers = d2l.get_dataloader_workers()\n", + " data_dir = d2l.download_extract('SNLI')\n", + " train_data = read_snli(data_dir, True)\n", + " test_data = read_snli(data_dir, False)\n", + " train_set = SNLIDataset(train_data, num_steps)\n", + " test_set = SNLIDataset(test_data, num_steps, train_set.vocab)\n", + " train_iter = torch.utils.data.DataLoader(train_set, batch_size,\n", + " shuffle=True,\n", + " num_workers=num_workers)\n", + " test_iter = torch.utils.data.DataLoader(test_set, batch_size,\n", + " shuffle=False,\n", + " num_workers=num_workers)\n", + " return train_iter, test_iter, train_set.vocab" + ], + "metadata": { + "id": "TnNpL6veG6aB" + }, + "execution_count": 7, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "train_iter, test_iter, vocab = load_data_snli(128, 50)\n", + "len(vocab)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "9hAZUEk1HCT6", + "outputId": "e5d38db0-9d5e-439e-fbcd-b845904e23de" + }, + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "read 549367 examples\n", + "read 9824 examples\n" + ] + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:481: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ] + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "18678" + ] + }, + "metadata": {}, + "execution_count": 8 + } + ] + }, + { + "cell_type": "code", + "source": [ + "for X, Y in train_iter:\n", + " print(X[0].shape)\n", + " print(X[1].shape)\n", + " print(Y.shape)\n", + " break" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "5OqGACJTHEcM", + "outputId": "29221964-b25f-4596-900c-e64a9920db50" + }, + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:481: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ] + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "torch.Size([128, 50])\n", + "torch.Size([128, 50])\n", + "torch.Size([128])\n" + ] + } + ] + } + ] +} \ No newline at end of file