diff --git a/helper_functions.py b/helper_functions.py new file mode 100644 index 0000000..a06b780 --- /dev/null +++ b/helper_functions.py @@ -0,0 +1,170 @@ +import os +import urllib.request +from tqdm import tqdm +import json +import numpy as np +import tensorflow as tf +import torch + +def download_file(url, destination): + # Send a GET request to download the file + + try: + with urllib.request.urlopen(url) as response: + # Get the total file size from headers, defaulting to 0 if not present + file_size = int(response.headers.get("Content-Length", 0)) + + # Check if file exists and has the same size + if os.path.exists(destination): + file_size_local = os.path.getsize(destination) + if file_size == file_size_local: + print(f"File already exists and is up-to-date: {destination}") + return + + # Define the block size for reading the file + block_size = 1024 # 1 Kilobyte + + # Initialize the progress bar with total file size + progress_bar_description = os.path.basename(url) # Extract filename from URL + with tqdm(total=file_size, unit="iB", unit_scale=True, desc=progress_bar_description) as progress_bar: + # Open the destination file in binary write mode + with open(destination, "wb") as file: + # Read the file in chunks and write to destination + while True: + chunk = response.read(block_size) + if not chunk: + break + file.write(chunk) + progress_bar.update(len(chunk)) # Update progress bar + except urllib.error.HTTPError: + s = ( + f"The specified URL ({url}) is incorrect, the internet connection cannot be established," + "\nor the requested file is temporarily unavailable.\nPlease visit the following website" + " for help: https://github.com/rasbt/LLMs-from-scratch/discussions/273") + print(s) + +def load_gpt2_params_from_tf_ckpt(ckpt_path, settings): + # Initialize parameters dictionary with empty blocks for each layer + params = {"blocks": [{} for _ in range(settings["n_layer"])]} + + # Iterate over each variable in the checkpoint + for name, _ in tf.train.list_variables(ckpt_path): + # Load the variable and remove singleton dimensions + variable_array = np.squeeze(tf.train.load_variable(ckpt_path, name)) + + # Process the variable name to extract relevant parts + variable_name_parts = name.split("/")[1:] # Skip the 'model/' prefix + + # Identify the target dictionary for the variable + target_dict = params + if variable_name_parts[0].startswith("h"): + layer_number = int(variable_name_parts[0][1:]) + target_dict = params["blocks"][layer_number] + + # Recursively access or create nested dictionaries + for key in variable_name_parts[1:-1]: + target_dict = target_dict.setdefault(key, {}) + + # Assign the variable array to the last key + last_key = variable_name_parts[-1] + target_dict[last_key] = variable_array + + return params + + +def download_and_load_gpt2(model_size, models_dir): + # Validate model size + allowed_sizes = ("124M", "355M", "774M", "1558M") + if model_size not in allowed_sizes: + raise ValueError(f"Model size not in {allowed_sizes}") + + # Define paths + model_dir = os.path.join(models_dir, model_size) + base_url = "https://openaipublic.blob.core.windows.net/gpt-2/models" + filenames = [ + "checkpoint", "encoder.json", "hparams.json", + "model.ckpt.data-00000-of-00001", "model.ckpt.index", + "model.ckpt.meta", "vocab.bpe" + ] + + # Download files + os.makedirs(model_dir, exist_ok=True) + for filename in filenames: + file_url = os.path.join(base_url, model_size, filename) + file_path = os.path.join(model_dir, filename) + download_file(file_url, file_path) + + # Load settings and params + tf_ckpt_path = tf.train.latest_checkpoint(model_dir) + settings = json.load(open(os.path.join(model_dir, "hparams.json"))) + params = load_gpt2_params_from_tf_ckpt(tf_ckpt_path, settings) + + return settings, params + + +def assign(left, right): + if left.shape != right.shape: + raise ValueError(f"Shape mismatch. Left: {left.shape}, Right: {right.shape}") + return torch.nn.Parameter(torch.tensor(right)) + + +def load_weights_into_gpt(gpt, params): + gpt.pos_emb.weight = assign(gpt.pos_emb.weight, params['wpe']) + gpt.tok_emb.weight = assign(gpt.tok_emb.weight, params['wte']) + + for b in range(len(params["blocks"])): + q_w, k_w, v_w = np.split( + (params["blocks"][b]["attn"]["c_attn"])["w"], 3, axis=-1) + gpt.trf_blocks[b].att.W_query.weight = assign( + gpt.trf_blocks[b].att.W_query.weight, q_w.T) + gpt.trf_blocks[b].att.W_key.weight = assign( + gpt.trf_blocks[b].att.W_key.weight, k_w.T) + gpt.trf_blocks[b].att.W_value.weight = assign( + gpt.trf_blocks[b].att.W_value.weight, v_w.T) + + q_b, k_b, v_b = np.split( + (params["blocks"][b]["attn"]["c_attn"])["b"], 3, axis=-1) + gpt.trf_blocks[b].att.W_query.bias = assign( + gpt.trf_blocks[b].att.W_query.bias, q_b) + gpt.trf_blocks[b].att.W_key.bias = assign( + gpt.trf_blocks[b].att.W_key.bias, k_b) + gpt.trf_blocks[b].att.W_value.bias = assign( + gpt.trf_blocks[b].att.W_value.bias, v_b) + + gpt.trf_blocks[b].att.out_proj.weight = assign( + gpt.trf_blocks[b].att.out_proj.weight, + params["blocks"][b]["attn"]["c_proj"]["w"].T) + gpt.trf_blocks[b].att.out_proj.bias = assign( + gpt.trf_blocks[b].att.out_proj.bias, + params["blocks"][b]["attn"]["c_proj"]["b"]) + + gpt.trf_blocks[b].ff.layers[0].weight = assign( + gpt.trf_blocks[b].ff.layers[0].weight, + params["blocks"][b]["mlp"]["c_fc"]["w"].T) + gpt.trf_blocks[b].ff.layers[0].bias = assign( + gpt.trf_blocks[b].ff.layers[0].bias, + params["blocks"][b]["mlp"]["c_fc"]["b"]) + gpt.trf_blocks[b].ff.layers[2].weight = assign( + gpt.trf_blocks[b].ff.layers[2].weight, + params["blocks"][b]["mlp"]["c_proj"]["w"].T) + gpt.trf_blocks[b].ff.layers[2].bias = assign( + gpt.trf_blocks[b].ff.layers[2].bias, + params["blocks"][b]["mlp"]["c_proj"]["b"]) + + gpt.trf_blocks[b].norm1.scale = assign( + gpt.trf_blocks[b].norm1.scale, + params["blocks"][b]["ln_1"]["g"]) + gpt.trf_blocks[b].norm1.shift = assign( + gpt.trf_blocks[b].norm1.shift, + params["blocks"][b]["ln_1"]["b"]) + gpt.trf_blocks[b].norm2.scale = assign( + gpt.trf_blocks[b].norm2.scale, + params["blocks"][b]["ln_2"]["g"]) + gpt.trf_blocks[b].norm2.shift = assign( + gpt.trf_blocks[b].norm2.shift, + params["blocks"][b]["ln_2"]["b"]) + + gpt.final_norm.scale = assign(gpt.final_norm.scale, params["g"]) + gpt.final_norm.shift = assign(gpt.final_norm.shift, params["b"]) + gpt.out_head.weight = assign(gpt.out_head.weight, params["wte"]) + diff --git a/lab6.ipynb b/lab6.ipynb index 869f12c..436ca54 100644 --- a/lab6.ipynb +++ b/lab6.ipynb @@ -2,34 +2,26 @@ "cells": [ { "cell_type": "markdown", - "id": "12e91914-5f51-43fa-b65b-625e73b4d17b", + "id": "c2520ec3-722f-4f44-bdd1-885b13e7afbf", "metadata": { - "id": "12e91914-5f51-43fa-b65b-625e73b4d17b" + "id": "c2520ec3-722f-4f44-bdd1-885b13e7afbf" }, "source": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "Supplementary code for the Build a Large Language Model From Scratch book by Sebastian Raschka
\n", - "
Code repository: https://github.com/rasbt/LLMs-from-scratch\n", - "
\n", - "
\n", - "\n", - "
" + "# Lab 6: Finetuning To Follow Instructions" ] }, { "cell_type": "markdown", - "id": "c2520ec3-722f-4f44-bdd1-885b13e7afbf", - "metadata": { - "id": "c2520ec3-722f-4f44-bdd1-885b13e7afbf" - }, + "id": "2626e3f5", + "metadata": {}, "source": [ - "# Chapter 7: Finetuning To Follow Instructions" + "This lab covers:\n", + "- Preparing a dataset with instructions (instruction + response pairs)\n", + "- Finetuning the LLM to follow instructions\n", + "- Evaluation of the LLM\n", + "\n", + "\n", + "" ] }, { @@ -48,11 +40,11 @@ "name": "stdout", "output_type": "stream", "text": [ - "matplotlib version: 3.7.1\n", - "tiktoken version: 0.7.0\n", - "torch version: 2.4.0\n", - "tqdm version: 4.66.4\n", - "tensorflow version: 2.15.0\n" + "matplotlib version: 3.8.2\n", + "tiktoken version: 0.8.0\n", + "torch version: 2.2.0\n", + "tqdm version: 4.66.2\n", + "tensorflow version: 2.18.0\n" ] } ], @@ -77,7 +69,7 @@ "id": "264fca98-2f9a-4193-b435-2abfa3b4142f" }, "source": [ - "" + "" ] }, { @@ -87,7 +79,7 @@ "id": "8bbc68e9-75b3-41f1-ac2c-e071c3cd0813" }, "source": [ - "## 7.1 Introduction to instruction finetuning" + "## 1 Introduction to instruction finetuning" ] }, { @@ -97,9 +89,9 @@ "id": "53dba24a-6805-496c-9a7f-c75e2d3527ab" }, "source": [ - "- In chapter 5, we saw that pretraining an LLM involves a training procedure where it learns to generate one word at a time\n", + "- In lab 4, we saw that pretraining an LLM involves a training procedure where it learns to generate one word at a time\n", "- Hence, a pretrained LLM is good at text completion, but it is not good at following instructions\n", - "- In this chapter, we teach the LLM to follow instructions better" + "- In this lab, we teach the LLM to follow instructions better" ] }, { @@ -109,19 +101,7 @@ "id": "18dc0535-0904-44ed-beaf-9b678292ef35" }, "source": [ - "" - ] - }, - { - "cell_type": "markdown", - "id": "b4698b23-12e0-4bd7-a140-ccb3dd71d4e8", - "metadata": { - "id": "b4698b23-12e0-4bd7-a140-ccb3dd71d4e8" - }, - "source": [ - "- The topics covered in this chapter are summarized in the figure below\n", - "\n", - "" + "" ] }, { @@ -131,7 +111,7 @@ "id": "5384f0cf-ef3c-4436-a5fa-59bd25649f86" }, "source": [ - "## 7.2 Preparing a dataset for supervised instruction finetuning" + "## 2 Preparing a dataset for supervised instruction finetuning" ] }, { @@ -141,7 +121,7 @@ "id": "f8b34ff8-619f-4e89-bd03-ce513269760d" }, "source": [ - "- We will work with an instruction dataset I prepared for this chapter" + "- We will work with an instruction dataset that was already prepared in advance" ] }, { @@ -285,7 +265,7 @@ "id": "dffa4f70-44d4-4be4-89a9-2159f4885b10" }, "source": [ - "" + "" ] }, { @@ -295,7 +275,7 @@ "id": "dd79a74e-befb-491c-be49-f777a6a5b6a6" }, "source": [ - "- In this chapter, we use Alpaca-style prompt formatting, which was the original prompt template for instruction finetuning\n", + "- In this lab, we use Alpaca-style prompt formatting, which was the original prompt template for instruction finetuning\n", "- Below, we format the input that we will pass as input to the LLM" ] }, @@ -472,7 +452,7 @@ "id": "fcaaf606-f913-4445-8301-632ae10d387d" }, "source": [ - "## 7.3 Organizing data into training batches" + "## 3 Organizing data into training batches" ] }, { @@ -482,7 +462,7 @@ "id": "233f63bd-9755-4d07-8884-5e2e5345cf27" }, "source": [ - "" + "" ] }, { @@ -494,7 +474,7 @@ "source": [ "- We tackle this dataset batching in several steps, as summarized in the figure below\n", "\n", - "" + "" ] }, { @@ -504,9 +484,9 @@ "id": "b9af423f-aad9-4b3c-bea5-153021c04862" }, "source": [ - "- First, we implement an `InstructionDataset` class that pre-tokenizes all inputs in the dataset, similar to the `SpamDataset` in chapter 6\n", + "- First, we implement an `InstructionDataset` class that pre-tokenizes all inputs in the dataset, similar to the `SpamDataset` in lab 5\n", "\n", - "" + "" ] }, { @@ -550,8 +530,8 @@ "id": "384f0e69-4b22-41c0-a25d-f077527eddd1" }, "source": [ - "- Similar to chapter 6, we want to collect multiple training examples in a batch to accelerate training; this requires padding all inputs to a similar length\n", - "- Also similar to the previous chapter, we use the `<|endoftext|>` token as a padding token" + "- Similar to lab 5, we want to collect multiple training examples in a batch to accelerate training; this requires padding all inputs to a similar length\n", + "- Also similar to the previous lab, we use the `<|endoftext|>` token as a padding token" ] }, { @@ -588,7 +568,7 @@ "id": "9e5bd7bc-f347-4cf8-a0c2-94cb8799e427" }, "source": [ - "- In chapter 6, we padded all examples in a dataset to the same length\n", + "- In lab 5, we padded all examples in a dataset to the same length\n", " - Here, we take a more sophisticated approach and develop a custom \"collate\" function that we can pass to the data loader\n", " - This custom collate function pads the training examples in each batch to have the same length (but different batches can have different lengths)" ] @@ -600,7 +580,7 @@ "id": "65c4d943-4aa8-4a44-874e-05bc6831fbd3" }, "source": [ - "" + "" ] }, { @@ -688,7 +668,7 @@ "id": "c46832ab-39b7-45f8-b330-ac9adfa10d1b" }, "source": [ - "" + "" ] }, { @@ -709,7 +689,7 @@ "id": "0386b6fe-3455-4e70-becd-a5a4681ba2ef" }, "source": [ - "" + "" ] }, { @@ -792,7 +772,7 @@ "source": [ "- Next, we introduce an `ignore_index` value to replace all padding token IDs with a new value; the purpose of this `ignore_index` is that we can ignore padding values in the loss function (more on that later)\n", "\n", - "\n", + "\n", "\n", "- Concretely, this means that we replace the token IDs corresponding to `50256` with `-100` as illustrated below" ] @@ -804,7 +784,7 @@ "id": "bd4bed33-956e-4b3f-a09c-586d8203109a" }, "source": [ - "" + "" ] }, { @@ -911,7 +891,7 @@ }, "source": [ "- Let's see what this replacement by -100 accomplishes\n", - "- For illustration purposes, let's assume we have a small classification task with 2 class labels, 0 and 1, similar to chapter 6\n", + "- For illustration purposes, let's assume we have a small classification task with 2 class labels, 0 and 1, similar to lab 5\n", "- If we have the following logits values (outputs of the last layer of the model), we calculate the following loss" ] }, @@ -1058,7 +1038,7 @@ "id": "fab8f0ed-80e8-4fd9-bf84-e5d0e0bc0a39" }, "source": [ - "" + "" ] }, { @@ -1068,7 +1048,7 @@ "id": "bccaf048-ec95-498c-9155-d5b3ccba6c96" }, "source": [ - "## 7.4 Creating data loaders for an instruction dataset" + "## 4 Creating data loaders for an instruction dataset" ] }, { @@ -1088,7 +1068,7 @@ "id": "9fffe390-b226-4d5c-983f-9f4da773cb82" }, "source": [ - "" + "" ] }, { @@ -1104,7 +1084,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 36, "id": "etpqqWh8phKc", "metadata": { "colab": { @@ -1118,7 +1098,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Device: cuda\n" + "Device: cpu\n" ] } ], @@ -1130,11 +1110,11 @@ "# which is much faster than on an Apple CPU (as measured on an M3 MacBook Air).\n", "# However, the resulting loss values may be slightly different.\n", "\n", - "#if torch.cuda.is_available():\n", + "# if torch.cuda.is_available():\n", "# device = torch.device(\"cuda\")\n", - "#elif torch.backends.mps.is_available():\n", + "# elif torch.backends.mps.is_available():\n", "# device = torch.device(\"mps\")\n", - "#else:\n", + "# else:\n", "# device = torch.device(\"cpu\")\n", "\n", "print(\"Device:\", device)" @@ -1142,7 +1122,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 37, "id": "4e47fb30-c2c6-4e6d-a64c-76cc65be4a2c", "metadata": { "id": "4e47fb30-c2c6-4e6d-a64c-76cc65be4a2c" @@ -1165,12 +1145,12 @@ "id": "8ff42c29-8b81-45e5-ae8d-b97cd1cf447a" }, "source": [ - "- Next, we instantiate the data loaders similar to previous chapters, except that we now provide our own collate function for the batching process" + "- Next, we instantiate the data loaders similar to labs chapters, except that we now provide our own collate function for the batching process" ] }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 38, "id": "BtWkgir6Hlpe", "metadata": { "id": "BtWkgir6Hlpe" @@ -1198,7 +1178,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 39, "id": "1d097dc8-ad34-4f05-b435-e4147965f532", "metadata": { "id": "1d097dc8-ad34-4f05-b435-e4147965f532" @@ -1238,7 +1218,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 40, "id": "GGs1AI3vHpnX", "metadata": { "colab": { @@ -1391,7 +1371,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 41, "id": "21b8fd02-014f-4481-9b71-5bfee8f9dfcd", "metadata": { "colab": { @@ -1411,8 +1391,7 @@ " 985, 576, 13, 198, 198, 21017, 23412, 25, 198, 464,\n", " 5156, 318, 845, 13779, 13, 198, 198, 21017, 18261, 25,\n", " 198, 464, 5156, 318, 355, 13779, 355, 257, 4936, 13,\n", - " 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256],\n", - " device='cuda:0')\n" + " 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256])\n" ] } ], @@ -1432,7 +1411,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 42, "id": "51649ab4-1a7e-4a9e-92c5-950a24fde211", "metadata": { "colab": { @@ -1452,8 +1431,7 @@ " 576, 13, 198, 198, 21017, 23412, 25, 198, 464, 5156,\n", " 318, 845, 13779, 13, 198, 198, 21017, 18261, 25, 198,\n", " 464, 5156, 318, 355, 13779, 355, 257, 4936, 13, 50256,\n", - " -100, -100, -100, -100, -100, -100, -100, -100, -100],\n", - " device='cuda:0')\n" + " -100, -100, -100, -100, -100, -100, -100, -100, -100])\n" ] } ], @@ -1468,7 +1446,7 @@ "id": "d6aad445-8f19-4238-b9bf-db80767fb91a" }, "source": [ - "## 7.5 Loading a pretrained LLM" + "## 5 Loading a pretrained LLM" ] }, { @@ -1478,7 +1456,7 @@ "id": "5a5c07d1-4fc9-4846-94cf-b11a085a667b" }, "source": [ - "- In this section, we load a pretrained GPT model using the same code that we used in section 5.5 of chapter 5 and section 6.4 in chapter 6" + "- In this section, we load a pretrained GPT model" ] }, { @@ -1488,7 +1466,7 @@ "id": "8d1b438f-88af-413f-96a9-f059c6c55fc4" }, "source": [ - "" + "" ] }, { @@ -1498,12 +1476,12 @@ "id": "8c68eda7-e02e-4caa-846b-ca6dbd396ca2" }, "source": [ - "- However, instead of loading the smallest 124 million parameter model, we load the medium version with 355 million parameters since the 124 million model is too small for achieving qualitatively reasonable results via instruction finetuning" + "- We load the medium version with 355 million parameters for achieving qualitatively reasonable results via instruction finetuning" ] }, { "cell_type": "code", - "execution_count": 28, + "execution_count": null, "id": "0d249d67-5eba-414e-9bd2-972ebf01329d", "metadata": { "colab": { @@ -1512,31 +1490,10 @@ "id": "0d249d67-5eba-414e-9bd2-972ebf01329d", "outputId": "3f08f5e1-ca7c-406d-e2ae-1b5fcafad3f2" }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2024-07-25 02:22:49.969483: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", - "2024-07-25 02:22:50.023103: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", - "2024-07-25 02:22:50.023136: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", - "2024-07-25 02:22:50.024611: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", - "2024-07-25 02:22:50.033304: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", - "To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", - "2024-07-25 02:22:51.282247: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", - "checkpoint: 100%|██████████| 77.0/77.0 [00:00<00:00, 169kiB/s]\n", - "encoder.json: 100%|██████████| 1.04M/1.04M [00:00<00:00, 2.43MiB/s]\n", - "hparams.json: 100%|██████████| 91.0/91.0 [00:00<00:00, 168kiB/s]\n", - "model.ckpt.data-00000-of-00001: 100%|██████████| 1.42G/1.42G [00:56<00:00, 25.0MiB/s]\n", - "model.ckpt.index: 100%|██████████| 10.4k/10.4k [00:00<00:00, 16.5MiB/s]\n", - "model.ckpt.meta: 100%|██████████| 927k/927k [00:00<00:00, 1.96MiB/s]\n", - "vocab.bpe: 100%|██████████| 456k/456k [00:00<00:00, 1.53MiB/s]\n" - ] - } - ], + "outputs": [], "source": [ - "from gpt_download import download_and_load_gpt2\n", - "from previous_chapters import GPTModel, load_weights_into_gpt\n", + "from helper_functions import download_and_load_gpt2, load_weights_into_gpt\n", + "from previous_labs import GPTModel\n", "\n", "\n", "BASE_CONFIG = {\n", @@ -1580,7 +1537,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 44, "id": "7bd32b7c-5b44-4d25-a09f-46836802ca74", "metadata": { "colab": { @@ -1610,14 +1567,14 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 45, "id": "2e3e68e0-2627-4c65-b4e7-1e0667e4f6fa", "metadata": { "id": "2e3e68e0-2627-4c65-b4e7-1e0667e4f6fa" }, "outputs": [], "source": [ - "from previous_chapters import (\n", + "from previous_labs import (\n", " generate,\n", " text_to_token_ids,\n", " token_ids_to_text\n", @@ -1646,7 +1603,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 46, "id": "ba4a55bf-a245-48d8-beda-2838a58fb5ba", "metadata": { "colab": { @@ -1694,7 +1651,7 @@ "id": "70d27b9d-a942-4cf5-b797-848c5f01e723" }, "source": [ - "## 7.6 Finetuning the LLM on instruction data" + "## 6 Finetuning the LLM on instruction data" ] }, { @@ -1706,21 +1663,21 @@ "source": [ "- In this section, we finetune the model\n", "\n", - "\n", + "\n", "\n", - "- Note that we can reuse all the loss calculation and training functions that we used in previous chapters" + "- Note that we can reuse all the loss calculation and training functions that we used in previous labs" ] }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 47, "id": "65444865-df87-4d98-9faf-875e1c4be860", "metadata": { "id": "65444865-df87-4d98-9faf-875e1c4be860" }, "outputs": [], "source": [ - "from previous_chapters import (\n", + "from previous_labs import (\n", " calc_loss_loader,\n", " train_model_simple\n", ")" @@ -1733,12 +1690,12 @@ "id": "00083059-aa41-4d37-8a17-1c72d1b1ca00" }, "source": [ - "- Let's calculate the initial training and validation set loss before we start training (as in previous chapters, the goal is to minimize the loss)" + "- Let's calculate the initial training and validation set loss before we start training (as in previous labs, the goal is to minimize the loss)" ] }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 48, "id": "d99fc6f8-63b2-43da-adbb-a7b6b92c8dd5", "metadata": { "colab": { @@ -1752,8 +1709,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Training loss: 3.82590970993042\n", - "Validation loss: 3.761933755874634\n" + "Training loss: 3.8258948802947996\n", + "Validation loss: 3.7619192123413088\n" ] } ], @@ -1777,7 +1734,7 @@ "id": "12a6da8f-15b3-42b0-a136-619b7a35c3e9" }, "source": [ - "- Note that the training is a bit more expensive than in previous chapters since we are using a larger model (355 million instead of 124 million parameters)\n", + "- Note that the training is a bit more expensive than in previous labs since we are using a larger model (355 million parameters)\n", "- The runtimes for various devices are shown for reference below (running this notebook on a compatible GPU device requires no changes to the code)" ] }, @@ -1801,14 +1758,12 @@ "| gpt2-small (124M) | GPU (L4) | 0.69 minutes |\n", "| gpt2-small (124M) | GPU (A100) | 0.39 minutes |\n", "\n", - "\n", - "\n", - "- I ran this notebook using the `\"gpt2-medium (355M)\"` model" + "\n" ] }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 49, "id": "78bcf83a-1fff-4540-97c1-765c4016d5e3", "metadata": { "colab": { @@ -1823,55 +1778,55 @@ "output_type": "stream", "text": [ "Ep 1 (Step 000000): Train loss 2.637, Val loss 2.626\n", - "Ep 1 (Step 000005): Train loss 1.174, Val loss 1.102\n", - "Ep 1 (Step 000010): Train loss 0.872, Val loss 0.944\n", + "Ep 1 (Step 000005): Train loss 1.174, Val loss 1.103\n", + "Ep 1 (Step 000010): Train loss 0.872, Val loss 0.945\n", "Ep 1 (Step 000015): Train loss 0.857, Val loss 0.906\n", "Ep 1 (Step 000020): Train loss 0.776, Val loss 0.881\n", "Ep 1 (Step 000025): Train loss 0.754, Val loss 0.859\n", "Ep 1 (Step 000030): Train loss 0.799, Val loss 0.836\n", "Ep 1 (Step 000035): Train loss 0.714, Val loss 0.808\n", "Ep 1 (Step 000040): Train loss 0.672, Val loss 0.806\n", - "Ep 1 (Step 000045): Train loss 0.633, Val loss 0.789\n", - "Ep 1 (Step 000050): Train loss 0.663, Val loss 0.783\n", - "Ep 1 (Step 000055): Train loss 0.760, Val loss 0.763\n", + "Ep 1 (Step 000045): Train loss 0.633, Val loss 0.790\n", + "Ep 1 (Step 000050): Train loss 0.662, Val loss 0.783\n", + "Ep 1 (Step 000055): Train loss 0.760, Val loss 0.764\n", "Ep 1 (Step 000060): Train loss 0.719, Val loss 0.743\n", - "Ep 1 (Step 000065): Train loss 0.653, Val loss 0.735\n", + "Ep 1 (Step 000065): Train loss 0.652, Val loss 0.735\n", "Ep 1 (Step 000070): Train loss 0.532, Val loss 0.729\n", - "Ep 1 (Step 000075): Train loss 0.569, Val loss 0.728\n", + "Ep 1 (Step 000075): Train loss 0.569, Val loss 0.729\n", "Ep 1 (Step 000080): Train loss 0.605, Val loss 0.725\n", "Ep 1 (Step 000085): Train loss 0.509, Val loss 0.709\n", "Ep 1 (Step 000090): Train loss 0.562, Val loss 0.691\n", "Ep 1 (Step 000095): Train loss 0.500, Val loss 0.681\n", - "Ep 1 (Step 000100): Train loss 0.503, Val loss 0.677\n", + "Ep 1 (Step 000100): Train loss 0.502, Val loss 0.677\n", "Ep 1 (Step 000105): Train loss 0.564, Val loss 0.670\n", - "Ep 1 (Step 000110): Train loss 0.555, Val loss 0.666\n", + "Ep 1 (Step 000110): Train loss 0.555, Val loss 0.667\n", "Ep 1 (Step 000115): Train loss 0.508, Val loss 0.664\n", "Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: Convert the active sentence to passive: 'The chef cooks the meal every day.' ### Response: The meal is prepared every day by the chef.<|endoftext|>The following is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: Convert the active sentence to passive:\n", "Ep 2 (Step 000120): Train loss 0.435, Val loss 0.672\n", - "Ep 2 (Step 000125): Train loss 0.451, Val loss 0.687\n", - "Ep 2 (Step 000130): Train loss 0.447, Val loss 0.683\n", - "Ep 2 (Step 000135): Train loss 0.405, Val loss 0.682\n", - "Ep 2 (Step 000140): Train loss 0.409, Val loss 0.681\n", - "Ep 2 (Step 000145): Train loss 0.369, Val loss 0.680\n", - "Ep 2 (Step 000150): Train loss 0.382, Val loss 0.675\n", - "Ep 2 (Step 000155): Train loss 0.413, Val loss 0.675\n", - "Ep 2 (Step 000160): Train loss 0.415, Val loss 0.683\n", + "Ep 2 (Step 000125): Train loss 0.451, Val loss 0.686\n", + "Ep 2 (Step 000130): Train loss 0.447, Val loss 0.682\n", + "Ep 2 (Step 000135): Train loss 0.404, Val loss 0.682\n", + "Ep 2 (Step 000140): Train loss 0.410, Val loss 0.681\n", + "Ep 2 (Step 000145): Train loss 0.369, Val loss 0.681\n", + "Ep 2 (Step 000150): Train loss 0.381, Val loss 0.676\n", + "Ep 2 (Step 000155): Train loss 0.412, Val loss 0.676\n", + "Ep 2 (Step 000160): Train loss 0.415, Val loss 0.684\n", "Ep 2 (Step 000165): Train loss 0.379, Val loss 0.686\n", - "Ep 2 (Step 000170): Train loss 0.323, Val loss 0.681\n", - "Ep 2 (Step 000175): Train loss 0.337, Val loss 0.669\n", - "Ep 2 (Step 000180): Train loss 0.392, Val loss 0.656\n", - "Ep 2 (Step 000185): Train loss 0.415, Val loss 0.657\n", - "Ep 2 (Step 000190): Train loss 0.340, Val loss 0.648\n", - "Ep 2 (Step 000195): Train loss 0.330, Val loss 0.634\n", - "Ep 2 (Step 000200): Train loss 0.310, Val loss 0.634\n", - "Ep 2 (Step 000205): Train loss 0.352, Val loss 0.630\n", - "Ep 2 (Step 000210): Train loss 0.367, Val loss 0.630\n", - "Ep 2 (Step 000215): Train loss 0.394, Val loss 0.635\n", - "Ep 2 (Step 000220): Train loss 0.299, Val loss 0.648\n", - "Ep 2 (Step 000225): Train loss 0.346, Val loss 0.661\n", - "Ep 2 (Step 000230): Train loss 0.292, Val loss 0.659\n", + "Ep 2 (Step 000170): Train loss 0.323, Val loss 0.682\n", + "Ep 2 (Step 000175): Train loss 0.337, Val loss 0.670\n", + "Ep 2 (Step 000180): Train loss 0.393, Val loss 0.658\n", + "Ep 2 (Step 000185): Train loss 0.416, Val loss 0.659\n", + "Ep 2 (Step 000190): Train loss 0.340, Val loss 0.650\n", + "Ep 2 (Step 000195): Train loss 0.330, Val loss 0.637\n", + "Ep 2 (Step 000200): Train loss 0.310, Val loss 0.637\n", + "Ep 2 (Step 000205): Train loss 0.352, Val loss 0.632\n", + "Ep 2 (Step 000210): Train loss 0.367, Val loss 0.631\n", + "Ep 2 (Step 000215): Train loss 0.396, Val loss 0.635\n", + "Ep 2 (Step 000220): Train loss 0.301, Val loss 0.649\n", + "Ep 2 (Step 000225): Train loss 0.349, Val loss 0.662\n", + "Ep 2 (Step 000230): Train loss 0.294, Val loss 0.658\n", "Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: Convert the active sentence to passive: 'The chef cooks the meal every day.' ### Response: The meal is cooked every day by the chef.<|endoftext|>The following is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: What is the capital of the United Kingdom\n", - "Training completed in 1.84 minutes.\n" + "Training completed in 80.16 minutes.\n" ] } ], @@ -1911,7 +1866,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 50, "id": "4acd368b-1403-4807-a218-9102e35bfdbb", "metadata": { "colab": { @@ -1924,7 +1879,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeoAAAEiCAYAAAA21pHjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABY5UlEQVR4nO3dd3gU1frA8e9u+qYnpDcCRAIhQKhSrCBFRIMFRRSwF4qIgvJTEfEqKqiocLFdyb0qgqggIgKhS5EeOqEnAVKA9J7snt8fCwtLCSkbNgnv53nmye7MmZn3LCHvnpkz52iUUgohhBBC1ElaawcghBBCiKuTRC2EEELUYZKohRBCiDpMErUQQghRh0miFkIIIeowSdRCCCFEHSaJWgghhKjDJFELIYQQdZgkaiGEEKIOk0QtRANy/PhxNBoNCQkJ1g5FCGEhkqiFqGM0Gk2Fy8SJE60dohDiOrK1dgBCCHOpqamm13PnzmXChAkkJiaa1rm4uFgjLCGElUiLWog6xt/f37S4u7uj0WhM7319ffnkk08IDg7GwcGBtm3bsmTJkqseS6/X8+STTxIZGUlycjIAv//+O+3atcPR0ZEmTZrwzjvvUF5ebtpHo9Hw7bffMmDAAHQ6HRERESxcuNC0PSsri8GDB+Pj44OTkxMRERHMmjXrqjH88ssvREdH4+TkhLe3Nz179qSgoMC0/dtvv6VFixY4OjoSGRnJv//9b7P9U1JSGDhwIB4eHnh5eXHfffdx/Phx0/Zhw4YRGxvL1KlTCQgIwNvbm+HDh1NWVlbpz1yIOk0JIeqsWbNmKXd3d9P7Tz75RLm5uamffvpJHThwQI0bN07Z2dmpgwcPKqWUOnbsmALUjh07VHFxsRowYICKiYlRGRkZSiml1q5dq9zc3FRcXJw6cuSIWrZsmWrcuLGaOHGi6RyACg4OVrNnz1aHDh1So0aNUi4uLurs2bNKKaWGDx+u2rZtq7Zs2aKOHTum4uPj1cKFC68Y/6lTp5Stra365JNP1LFjx9SuXbvUjBkzVF5enlJKqR9++EEFBASoX3/9VR09elT9+uuvysvLS8XFxSmllCotLVUtWrRQTz75pNq1a5fat2+fevTRR1Xz5s1VSUmJUkqpoUOHKjc3N/X888+r/fv3qz/++EPpdDr19ddfW/YfQwgrkUQtRB12aaIODAxU7733nlmZjh07qhdffFEpdSFR//3336pHjx6qe/fuKjs721S2R48e6v333zfb//vvv1cBAQGm94B68803Te/z8/MVoP766y+llFL9+/dXTzzxRKXi37ZtmwLU8ePHr7i9adOmavbs2Wbr3n33XdWlSxdTbM2bN1cGg8G0vaSkRDk5OamlS5cqpYyJOiwsTJWXl5vKPPTQQ+rhhx+uVIxC1HVyj1qIeiI3N5dTp07RrVs3s/XdunVj586dZusGDRpEcHAwK1euxMnJybR+586drF+/nvfee8+0Tq/XU1xcTGFhITqdDoDWrVubtjs7O+Pm5kZGRgYAL7zwAg888ADbt2+nV69exMbG0rVr1yvG3KZNG3r06EF0dDS9e/emV69ePPjgg3h6elJQUMCRI0d46qmneOaZZ0z7lJeX4+7ubor38OHDuLq6mh23uLiYI0eOmN5HRUVhY2Njeh8QEMDu3bsr+DSFqD8kUQvRAN1999388MMPbNy4kTvvvNO0Pj8/n3feeYf777//sn0cHR1Nr+3s7My2aTQaDAYDAH379iUpKYnFixcTHx9Pjx49GD58OFOnTr3smDY2NsTHx7NhwwaWLVvGF198wRtvvMGmTZtMXwq++eYbOnfufNl+5+Nt3749P/7442XH9vHxqVS8QtR3kqiFqCfc3NwIDAxk/fr13Hbbbab169evp1OnTmZlX3jhBVq1asW9997Ln3/+aSrfrl07EhMTadasWY1i8fHxYejQoQwdOpRbbrmFsWPHXjFRgzFpduvWjW7dujFhwgTCwsKYP38+Y8aMITAwkKNHjzJ48OAr7tuuXTvmzp2Lr68vbm5uNYpZiPpKErUQ9cjYsWN5++23adq0KW3btmXWrFkkJCRcscU5cuRI9Ho999xzD3/99Rfdu3dnwoQJ3HPPPYSGhvLggw+i1WrZuXMne/bs4V//+lelYpgwYQLt27cnKiqKkpISFi1aRIsWLa5YdtOmTaxYsYJevXrh6+vLpk2bOH36tKn8O++8w6hRo3B3d6dPnz6UlJSwdetWsrKyGDNmDIMHD2bKlCncd999TJo0ieDgYJKSkvjtt98YN24cwcHB1f8whagnJFELUY+MGjWKnJwcXnnlFTIyMmjZsiULFy4kIiLiiuVHjx6NwWDg7rvvZsmSJfTu3ZtFixYxadIkPvzwQ+zs7IiMjOTpp5+udAz29vaMHz+e48eP4+TkxC233MKcOXOuWNbNzY21a9cybdo0cnNzCQsL4+OPP6Zv374APP300+h0OqZMmcLYsWNxdnYmOjqa0aNHA6DT6Vi7di2vvfYa999/P3l5eQQFBdGjRw9pYYsbhkYppawdhBBCCCGuTAY8EUIIIeowSdRCCCFEHSaJWgghhKjDJFELIYQQdZgkaiGEEKIOk0QthBBC1GGSqKthxowZNG7cGEdHRzp37szmzZutHZKZyZMn07FjR1xdXfH19SU2NtZsPmMwjpU8fPhwvL29cXFx4YEHHiA9Pd2sTHJyMv369UOn0+Hr68vYsWPNpkMEWL16Ne3atcPBwYFmzZoRFxd3WTzX8/P64IMP0Gg0pudwoeHV9eTJkzz22GN4e3vj5OREdHQ0W7duNW1XSjFhwgQCAgJwcnKiZ8+eHDp0yOwYmZmZDB48GDc3Nzw8PHjqqafIz883K7Nr1y5uueUWHB0dCQkJ4aOPProslnnz5hEZGYmjoyPR0dEsXrzYYvXU6/W89dZbhIeH4+TkRNOmTXn33Xe5+InS+lzXtWvX0r9/fwIDA9FoNCxYsMBse12qW2ViqW5dy8rKeO2114iOjsbZ2ZnAwECGDBnCqVOn6mVda4X15gOpn+bMmaPs7e3Vd999p/bu3aueeeYZ5eHhodLT060dmknv3r3VrFmz1J49e1RCQoK6++67VWhoqMrPzzeVef7551VISIhasWKF2rp1q7r55ptV165dTdvLy8tVq1atVM+ePdWOHTvU4sWLVaNGjdT48eNNZY4ePap0Op0aM2aM2rdvn/riiy+UjY2NWrJkianM9fy8Nm/erBo3bqxat26tXnrppQZZ18zMTBUWFqaGDRumNm3apI4ePaqWLl2qDh8+bCrzwQcfKHd3d7VgwQK1c+dOde+996rw8HBVVFRkKtOnTx/Vpk0b9c8//6i///5bNWvWTA0aNMi0PScnR/n5+anBgwerPXv2qJ9++kk5OTmpr776ylRm/fr1ysbGRn300Udq37596s0331R2dnZq9+7dFqnre++9p7y9vdWiRYvUsWPH1Lx585SLi4v67LPPGkRdFy9erN544w3122+/KUDNnz/fbHtdqltlYqluXbOzs1XPnj3V3Llz1YEDB9TGjRtVp06dVPv27c2OUV/qWhskUVdRp06d1PDhw03v9Xq9CgwMVJMnT7ZiVBXLyMhQgFqzZo1Syvgfw87OTs2bN89UZv/+/QpQGzduVEoZ/2NptVqVlpZmKjNz5kzl5uZmmgd43LhxKioqyuxcDz/8sOrdu7fp/fX6vPLy8lRERISKj49Xt912mylRN7S6vvbaa6p79+5X3W4wGJS/v7+aMmWKaV12drZycHBQP/30k1JKqX379ilAbdmyxVTmr7/+UhqNRp08eVIppdS///1v5enpaar/+XM3b97c9H7gwIGqX79+Zufv3Lmzeu6552pWyXP69eunnnzySbN1999/vxo8eHCDq+ulyasu1a0ysdSkrleyefNmBaikpKR6XVdLkUvfVVBaWsq2bdvo2bOnaZ1Wq6Vnz55s3LjRipFVLCcnBwAvLy8Atm3bRllZmVk9IiMjCQ0NNdVj48aNREdH4+fnZyrTu3dvcnNz2bt3r6nMxcc4X+b8Ma7n5zV8+HD69et3WTwNra4LFy6kQ4cOPPTQQ/j6+hITE8M333xj2n7s2DHS0tLM4nB3d6dz585m9fXw8KBDhw6mMj179kSr1bJp0yZTmVtvvRV7e3uz+iYmJpKVlWUqU9FnUlNdu3ZlxYoVHDx4EDBOeblu3TrT8KMNqa6Xqkt1q0wslpaTk4NGo8HDw6PB17UyJFFXwZkzZ9Dr9WZ/0AH8/PxIS0uzUlQVMxgMjB49mm7dutGqVSsA0tLSsLe3N/0nOO/ieqSlpV2xnue3VVQmNzeXoqKi6/Z5zZkzh+3btzN58uTLtjW0uh49epSZM2cSERHB0qVLeeGFFxg1ahT//e9/zeKtKI60tDR8fX3Nttva2uLl5WWRz8RS9X399dd55JFHiIyMxM7OjpiYGEaPHm2aaash1fVSdalulYnFkoqLi3nttdcYNGiQaTz3hlrXypJJORq44cOHs2fPHtatW2ftUGpFSkoKL730EvHx8WbzKTdUBoOBDh068P777wMQExPDnj17+PLLLxk6dKiVo7Osn3/+mR9//JHZs2cTFRVFQkICo0ePJjAwsMHVVRiVlZUxcOBAlFLMnDnT2uHUGdKiroJGjRphY2NzWY/h9PR0/P39rRTV1Y0YMYJFixaxatUqs+kA/f39KS0tJTs726z8xfXw9/e/Yj3Pb6uojJubG05OTtfl89q2bRsZGRm0a9cOW1tbbG1tWbNmDZ9//jm2trb4+fk1mLoCBAQE0LJlS7N1LVq0IDk52SzeiuLw9/cnIyPDbHt5eTmZmZkW+UwsVd+xY8eaWtXR0dE8/vjjvPzyy6YrJw2prpeqS3WrTCyWcD5JJyUlER8fbzY7WkOra1VJoq4Ce3t72rdvz4oVK0zrDAYDK1asoEuXLlaMzJxSihEjRjB//nxWrlxJeHi42fb27dtjZ2dnVo/ExESSk5NN9ejSpQu7d+82+89x/j/P+UTRpUsXs2OcL3P+GNfj8+rRowe7d+8mISHBtHTo0IHBgwebXjeUugJ069btskftDh48SFhYGADh4eH4+/ubxZGbm8umTZvM6pudnc22bdtMZVauXInBYKBz586mMmvXrqWsrMysvs2bN8fT09NUpqLPpKYKCwvRas3/RNnY2GAwGBpcXS9Vl+pWmVhq6nySPnToEMuXL8fb29tse0Oqa7VYrRtbPTVnzhzl4OCg4uLi1L59+9Szzz6rPDw8zHoMW9sLL7yg3N3d1erVq1VqaqppKSwsNJV5/vnnVWhoqFq5cqXaunWr6tKli+rSpYtp+/lHlnr16qUSEhLUkiVLlI+PzxUfWRo7dqzav3+/mjFjxhUfWbren9fFvb4bWl03b96sbG1t1XvvvacOHTqkfvzxR6XT6dQPP/xgKvPBBx8oDw8P9fvvv6tdu3ap++6774qP9cTExKhNmzapdevWqYiICLNHXbKzs5Wfn596/PHH1Z49e9ScOXOUTqe77FEXW1tbNXXqVLV//3719ttvW/TxrKFDh6qgoCDT41m//fabatSokRo3blyDqGteXp7asWOH2rFjhwLUJ598onbs2GHq6VyX6laZWKpb19LSUnXvvfeq4OBglZCQYPY36+Ie3PWlrrVBEnU1fPHFFyo0NFTZ29urTp06qX/++cfaIZkBrrjMmjXLVKaoqEi9+OKLytPTU+l0OjVgwACVmppqdpzjx4+rvn37KicnJ9WoUSP1yiuvqLKyMrMyq1atUm3btlX29vaqSZMmZuc473p/Xpcm6oZW1z/++EO1atVKOTg4qMjISPX111+bbTcYDOqtt95Sfn5+ysHBQfXo0UMlJiaalTl79qwaNGiQcnFxUW5ubuqJJ55QeXl5ZmV27typunfvrhwcHFRQUJD64IMPLovl559/VjfddJOyt7dXUVFR6s8//7RYPXNzc9VLL72kQkNDlaOjo2rSpIl64403zP541+e6rlq16or/T4cOHVrn6laZWKpb12PHjl31b9aqVavqXV1rg0api4b5EUIIIUSdIveohRBCiDpMErUQQghRh0miFkIIIeowSdRCCCFEHSaJWgghhKjDJFELIYQQdZgk6moqKSlh4sSJlJSUWDuUWncj1RVurPpKXRuuG6m+Db2u8hx1NeXm5uLu7k5OTo7ZmLQN0Y1UV7ix6it1bbhupPo29LpKi1oIIYSowyRRCyGEEHXYDTcfdXl5OTt27MDPz++ymXmqIi8vD4CTJ0+Sm5trqfDqpBuprnBj1Vfq2nDdSPWtj3U1GAykp6cTExODrW3FqfiGu0e9ZcsWOnXqZO0whBBCCDZv3kzHjh0rLHPDtaj9/PwA44cTEBBg5WiEEELciFJTU+nUqZMpJ1XkhkvU5y93BwQEEBwcbOVohBBC3MgqcwtWOpMJIYQQdZgkaiGEEKIOk0QthBBC1GE33D1qIYSoiF6vp6yszNphiHrOzs4OGxsbixxLEnUN7DmZw6nsItqEeODn5mjtcIQQNaCUIi0tjezsbGuHIhoIDw8P/P390Wg0NTqOJOoamLRoH5uPZTL90RjuaR1o7XCEEDVwPkn7+vqi0+lq/MdV3LiUUhQWFpKRkQFQ40eBJVHXwG1qK51tEtCc0oAkaiHqLb1eb0rS3t7e1g5HNABOTk4AZGRk4OvrW6PL4NKZrAZuKVrBK3a/4JyxzdqhCCFq4Pw9aZ1OZ+VIRENy/veppn0eJFHXgMHR0/iiMNO6gQghLEIudwtLstTvkyTqGlBOXgBoiyVRCyGEqB2SqGtA62y8l2VXmm3dQIQQwoIaN27MtGnTKl1+9erVaDSaWu8xHxcXh4eHR62eoy6yaqKePHkyHTt2xNXVFV9fX2JjY0lMTKxwn7i4ODQajdni6GidR6PsXBsB4FCaY5XzCyFubJf+Lbx0mThxYrWOu2XLFp599tlKl+/atSupqam4u7tX63yiYlbt9b1mzRqGDx9Ox44dKS8v5//+7//o1asX+/btw9nZ+ar7ubm5mSV0a91XcnQzJmqdXhK1EOL6S01NNb2eO3cuEyZMMPvb6OLiYnqtlEKv119z7mMAHx+fKsVhb2+Pv79/lfYRlWfVFvWSJUsYNmwYUVFRtGnThri4OJKTk9m2reJe1BqNBn9/f9NSmWnCaoOzhy8Arob6MVG5EKJhufjvoLu7u9nfxgMHDuDq6spff/1F+/btcXBwYN26dRw5coT77rsPPz8/XFxc6NixI8uXLzc77qWXvjUaDd9++y0DBgxAp9MRERHBwoULTdsvvfR9/hL10qVLadGiBS4uLvTp08fsi0V5eTmjRo3Cw8MDb29vXnvtNYYOHUpsbGyVPoOZM2fStGlT7O3tad68Od9//71pm1KKiRMnEhoaioODA4GBgYwaNcq0/d///jcRERE4Ojri5+fHgw8+WKVzXy916h51To6xZerl5VVhufz8fMLCwggJCeG+++5j79691yO8y7h4GhO1B3kUleqtEoMQonYopSgsLbfKopSyWD1ef/11PvjgA/bv30/r1q3Jz8/n7rvvZsWKFezYsYM+ffrQv39/kpOTKzzOO++8w8CBA9m1axd33303gwcPJjPz6h1pCwsLmTp1Kt9//z1r164lOTmZV1991bT9ww8/5Mcff2TWrFmsX7+e3NxcFixYUKW6zZ8/n5deeolXXnmFPXv28Nxzz/HEE0+watUqAH799Vc+/fRTvvrqKw4dOsSCBQuIjo4GYOvWrYwaNYpJkyaRmJjIkiVLuPXWW6t0/uulzgx4YjAYGD16NN26daNVq1ZXLde8eXO+++47WrduTU5ODlOnTqVr167s3bv3ivNLl5SUUFJSYnqfl5dnsZh1HsbLQ86aEk7m5hHUyMNixxZCWFdRmZ6WE5Za5dz7JvVGZ2+ZP8+TJk3irrvuMr338vKiTZs2pvfvvvsu8+fPZ+HChYwYMeKqxxk2bBiDBg0C4P333+fzzz9n8+bN9OnT54rly8rK+PLLL2natCkAI0aMYNKkSabtX3zxBePHj2fAgAEATJ8+ncWLF1epblOnTmXYsGG8+OKLAIwZM4Z//vmHqVOncscdd5CcnIy/vz89e/bEzs6O0NBQOnXqBEBycjLOzs7cc889uLq6EhYWRkxMTJXOf73UmRb18OHD2bNnD3PmzKmwXJcuXRgyZAht27bltttu47fffsPHx4evvvrqiuUnT56Mu7u7aWnZsqXFYtY4elB+7iPMy0y32HGFEMJSOnToYPY+Pz+fV199lRYtWuDh4YGLiwv79++/Zou6devWptfOzs64ubmZhsi8Ep1OZ0rSYBxG83z5nJwc0tPTTUkTwMbGhvbt21epbvv376dbt25m67p168b+/fsBeOihhygqKqJJkyY888wzzJ8/n/LycgDuuusuwsLCaNKkCY8//jg//vgjhYWFVTr/9VInWtQjRoxg0aJFrF279oqt4orY2dkRExPD4cOHr7h9/PjxjBkzxvT+5MmTlkvWGg35Glc8VA4FWRlAc8scVwhhdU52Nuyb1Ntq57aUSzvmvvrqq8THxzN16lSaNWuGk5MTDz74IKWlpRUex87Ozuy9RqPBYDBUqbwlL+lXRkhICImJiSxfvpz4+HhefPFFpkyZwpo1a3B1dWX79u2sXr2aZcuWMWHCBCZOnMiWLVvq3CNgVm1RK6UYMWIE8+fPZ+XKlYSHh1f5GHq9nt27d1910HMHBwfc3NxMi6ura03DNlNg4wZAce5pix5XCGFdGo0Gnb2tVZbafJJl/fr1DBs2jAEDBhAdHY2/vz/Hjx+vtfNdibu7O35+fmzZssW0Tq/Xs3379iodp0WLFqxfv95s3fr1680aY05OTvTv35/PP/+c1atXs3HjRnbv3g2Ara0tPXv25KOPPmLXrl0cP36clStX1qBmtcOqLerhw4cze/Zsfv/9d1xdXUlLSwOM/4jnBzQfMmQIQUFBTJ48GTDeb7n55ptp1qwZ2dnZTJkyhaSkJJ5++mmr1CHDsTE5uVpyiqUzmRCi7ouIiOC3336jf//+aDQa3nrrrQpbxrVl5MiRTJ48mWbNmhEZGckXX3xBVlZWlb6kjB07loEDBxITE0PPnj35448/+O2330y92OPi4tDr9XTu3BmdTscPP/yAk5MTYWFhLFq0iKNHj3Lrrbfi6enJ4sWLMRgMNG9e966MWjVRz5w5E4Dbb7/dbP2sWbMYNmwYYLzhr9VeaPhnZWXxzDPPkJaWhqenJ+3bt2fDhg0WvfdcFb81+4Dv/0lilEMz7rZKBEIIUXmffPIJTz75JF27dqVRo0a89tpr5OZe/0dMX3vtNdLS0hgyZAg2NjY8++yz9O7du0qzTMXGxvLZZ58xdepUXnrpJcLDw5k1a5Ypp3h4ePDBBx8wZswY9Ho90dHR/PHHH3h7e+Ph4cFvv/3GxIkTKS4uJiIigp9++omoqKhaqnH1adT1vmlgZSdOnCAkJISUlJQq3w+/kk/iD/L5ikM8dnMo/4qNtkCEQojrrbi4mGPHjhEeHm61kQ5vdAaDgRYtWjBw4EDeffdda4djERX9XlUlF9WJzmT1mZfO2GEiq6Bm05gJIcSNJCkpiWXLlnHbbbdRUlLC9OnTOXbsGI8++qi1Q6tz6szjWfVVdOYSVti/Qv9Tn1o7FCGEqDe0Wi1xcXF07NiRbt26sXv3bpYvX06LFi2sHVqdIy3qGnK1NdBUm8qZklPWDkUIIeqNkJCQy3psiyuTRF1DhqY9eXhtESW2/iywdjBCCCEaHEnUNeTmG8om1QLbQuPD/NaayUsIIUTDJPeoa8hTZw9AuUGRV1Ju5WiEEEI0NNKiriEnrZ4n7ZfjrM8lK687bo4ycboQQgjLkURdUxotE7TfgRZ2Z44HH0nUQgghLEcufdeUjS35GuOg9wU5V59JRgghhKgOSdQWUKA1TsxRlHPGypEIIUTV3X777YwePdr0vnHjxkybNq3CfTQaDQsWLKjxuS11nIpMnDiRtm3b1uo5apMkagsotvMAoDRXErUQ4vrp378/ffr0ueK2v//+G41Gw65du6p83C1btvDss8/WNDwzV0uWqamp9O3b16LnamgkUVtAqb0HAPoCSdRCiOvnqaeeIj4+nhMnTly2bdasWXTo0IHWrVtX+bg+Pj7odDpLhHhN/v7+ODg4XJdz1VeSqC1A7+hhfFGYadU4hBA3lnvuuQcfHx/i4uLM1ufn5zNv3jyeeuopzp49y6BBgwgKCkKn0xEdHc1PP/1U4XEvvfR96NAhbr31VhwdHWnZsiXx8fGX7fPaa69x0003odPpaNKkCW+99RZlZcY5EOLi4njnnXfYuXMnGo0GjUZjivnSS9+7d+/mzjvvxMnJCW9vb5599lny8/NN24cNG0ZsbCxTp04lICAAb29vhg8fbjpXZRgMBiZNmkRwcDAODg60bduWJUuWmLaXlpYyYsQIAgICcHR0JCwszDTVslKKiRMnEhoaioODA4GBgYwaNarS564O6fVtAcrJCwBNUZaVIxFCWFxpQdX3sXEAm3N/XvXloC8BjRbsnK59XHvnSp/G1taWIUOGEBcXxxtvvGEacGnevHno9XoGDRpEfn4+7du357XXXsPNzY0///yTxx9/nKZNm9KpU6drnsNgMHD//ffj5+fHpk2byMnJMbuffZ6rqytxcXEEBgaye/dunnnmGVxdXRk3bhwPP/wwe/bsYcmSJaa5ot3dL39CpqCggN69e9OlSxe2bNlCRkYGTz/9NCNGjDD7MrJq1SoCAgJYtWoVhw8f5uGHH6Zt27Y888wzlfrcPvvsMz7++GO++uorYmJi+O6777j33nvZu3cvERERfP755yxcuJCff/6Z0NBQUlJSSElJAeDXX3/l008/Zc6cOURFRZGWlsbOnTsrdd7qkkRtAVrnRgDYlWZbNxAhhOW9H1j1fR6Kg6gBxtcH/oB5wyCsOzzx54Uy06Kh8Ozl+07MqdKpnnzySaZMmcKaNWtM8zDPmjWLBx54AHd3d9zd3Xn11VdN5UeOHMnSpUv5+eefK5Woly9fzoEDB1i6dCmBgcbP4v3337/svvKbb75pet24cWNeffVV5syZw7hx43BycsLFxQVbW1v8/f2veq7Zs2dTXFzM//73P5ydjV9Ypk+fTv/+/fnwww/x8/MDwNPTk+nTp2NjY0NkZCT9+vVjxYoVlU7UU6dO5bXXXuORRx4B4MMPP2TVqlVMmzaNGTNmkJycTEREBN27d0ej0RAWFmbaNzk5GX9/f3r27ImdnR2hoaGV+hxrQi59W4CdizcAjmXSohZCXF+RkZF07dqV7777DoDDhw/z999/89RTTwGg1+t59913iY6OxsvLCxcXF5YuXUpycnKljr9//35CQkJMSRqgS5cul5WbO3cu3bp1w9/fHxcXF958881Kn+Pic7Vp08aUpAG6deuGwWAgMTHRtC4qKgobGxvT+4CAADIyKvd4bG5uLqdOnaJbt25m67t168b+/fsB4+X1hIQEmjdvzqhRo1i2bJmp3EMPPURRURFNmjThmWeeYf78+ZSX1+6olNKitgBHNx8AdOW5Vo5ECGFx/1eNmfFsLuocFdnfeAzNJe2i0btrFtdFnnrqKUaOHMmMGTOYNWsWTZs25bbbbgNgypQpfPbZZ0ybNo3o6GicnZ0ZPXo0paWlFjv/xo0bGTx4MO+88w69e/fG3d2dOXPm8PHHH1vsHBezs7Mze6/RaDAYDBY7frt27Th27Bh//fUXy5cvZ+DAgfTs2ZNffvmFkJAQEhMTWb58OfHx8bz44oumKxqXxmUp0qK2ACcP46VvF0MueoOycjRCCIuyd676YnNRG8jG1rju4vvTFR23GgYOHIhWq2X27Nn873//48knnzTdr16/fj333Xcfjz32GG3atKFJkyYcPHiw0sdu0aIFKSkppKammtb9888/ZmU2bNhAWFgYb7zxBh06dCAiIoKkpCTz6trbo9frr3munTt3UlBw4f79+vXr0Wq1NG/evNIxV8TNzY3AwMDLpthcv349LVu2NCv38MMP88033zB37lx+/fVXMjONHYadnJzo378/n3/+OatXr2bjxo3s3m25L16Xkha1Bbh4nrtvosknt6gMT2d7K0ckhLiRuLi48PDDDzN+/Hhyc3MZNmyYaVtERAS//PILGzZswNPTk08++YT09HSzpFSRnj17ctNNNzF06FCmTJlCbm4ub7zxhlmZiIgIkpOTmTNnDh07duTPP/9k/vz5ZmUaN27MsWPHSEhIIDg4GFdX18seyxo8eDBvv/02Q4cOZeLEiZw+fZqRI0fy+OOPm+5PW8LYsWN5++23adq0KW3btmXWrFkkJCTw448/AvDJJ58QEBBATEwMWq2WefPm4e/vj4eHB3Fxcej1ejp37oxOp+OHH37AycnJ7D62pUmL2gLsXH1Jw5tTypvMghJrhyOEuAE99dRTZGVl0bt3b7P7yW+++Sbt2rWjd+/e3H777fj7+xMbG1vp42q1WubPn09RURGdOnXi6aef5r333jMrc++99/Lyyy8zYsQI2rZty4YNG3jrrbfMyjzwwAP06dOHO+64Ax8fnys+IqbT6Vi6dCmZmZl07NiRBx98kB49ejB9+vSqfRjXMGrUKMaMGcMrr7xCdHQ0S5YsYeHChURERADGHuwfffQRHTp0oGPHjhw/fpzFixej1Wrx8PDgm2++oVu3brRu3Zrly5fzxx9/4O3tbdEYL6ZRSt1Q12pPnDhBSEgIKSkpBAcHW+y4t01ZRdLZQn55vgsdGntZ7LhCiNpXXFzMsWPHCA8Px9HR0drhiAaiot+rquQiaVFbyPl5qTMLLNdBQwghhJBEbSFe5+5LZxVKohZCCGE5kqgt5IWsqay0H4PTifXXLiyEEEJUkiRqC2mkztBEm4bKS712YSGEEKKSrJqoJ0+eTMeOHXF1dcXX15fY2Fiz0WeuZt68eURGRuLo6Eh0dDSLFy++DtFWbFuzUTxUMoFtdu2tHYoQQogGxKqJes2aNQwfPpx//vmH+Ph4ysrK6NWrl9nD7pfasGEDgwYN4qmnnmLHjh3ExsYSGxvLnj17rmPklyv3j2GLiuRkyfWZGk4IYXmWHN1KCEv9Pll1wJOLpxUD41Rovr6+bNu2jVtvvfWK+3z22Wf06dOHsWPHAvDuu+8SHx/P9OnT+fLLL2s95qs5P8hJpnQmE6Lesbe3R6vVcurUKXx8fLC3tzeN7CVEVSmlKC0t5fTp02i1WuztazYIVp0amSwnxzhrjJfX1Z9D3rhxI2PGjDFb17t3b7P5TK0hsDyFITZL0eT4Ad2uWV4IUXdotVrCw8NJTU3l1KlqjO0txBXodDpCQ0PRamt28brOJGqDwcDo0aPp1q0brVq1umq5tLS0y4aS8/PzIy0t7YrlS0pKKCm5MFpYXl6eZQK+hE/uPibZ/Zd/SqKBN65ZXghRt9jb2xMaGkp5efk1x6QW4lpsbGywtbW1yJWZOpOohw8fzp49e1i3bp1Fjzt58mTeeecdix7zSpw9fQFwNeRSpjdgZyMd6oWobzQaDXZ2drU2C5IQ1VEnssmIESNYtGgRq1atuuZQav7+/qSnp5utS09Pv+pk5OPHjycnJ8e07Nu3z2JxX0znbkzUnpo8GfRECCGExVg1USulGDFiBPPnz2flypWEh4dfc58uXbqwYsUKs3Xx8fFXnMgcwMHBATc3N9Pi6upqkdgvZeNsvK/uST5ZBWW1cg4hhBA3Hqte+h4+fDizZ8/m999/x9XV1XSf2d3dHScn49ytQ4YMISgoiMmTJwPw0ksvcdttt/Hxxx/Tr18/5syZw9atW/n666+tVg8AdMZE7aQpJTsnF/xr5wuBEEKIG4tVW9QzZ84kJyeH22+/nYCAANMyd+5cU5nk5GSzCcu7du3K7Nmz+frrr2nTpg2//PILCxYsqLAD2nXh4EY5NgAUZGdYNxYhhBANhlVb1JWZYXP16tWXrXvooYd46KGHaiGiGtBoKLRxxU2fTWHOaWtHI4QQooGoE53JGooiWw8AyvLOWDcQIYQQDYYkagsqtfcAoDxfErUQQgjLkERtQeUOnsYXhZnWDUQIIUSDIYnaks71/NYUZ1k5ECGEEA2FJGoL0p5L1HYlkqiFEEJYhiRqC7JxD+SEakRWWc1mShFCCCHOqzNjfTcE+k7Pcdua5uiUDcOsHYwQQogGQVrUFnR+TurCUj3FZTL7jhBCiJqTRG1Brg622GqNU5rJxBxCCCEsQS59W5Am9xTzHSaAvozMgrUEuDtZOyQhhBD1nCRqS7J1IFodAi2syysC3K0dkRBCiHpOErUlOXky1XMCm9Lg8SKZ6lIIIUTNyT1qS9LacLTR7WxRkWQVSmcyIYQQNSeJ2sI8dcae35kF0plMCCFEzcmlbwuLKduBjc1WbDIBbrJ2OEIIIeo5aVFbWOf0uUyy+y/emdutHYoQQogGQBK1hSkn43jfWpmYQwghhAVIorYwrbM3ALbF2dYNRAghRIMgidrCbF0aAeBQlm3dQIQQQjQIkqgtzMHN2KLWleeglLJyNEIIIeo7SdQWpvPwBcCNPApL5VlqIYQQNVOtRJ2SksKJEydM7zdv3szo0aP5+uuvLRZYfWXvarz07Um+PEsthBCixqqVqB999FFWrVoFQFpaGnfddRebN2/mjTfeYNKkSRYNsL7R6IyXvj01eTKDlhBCiBqrVqLes2cPnTp1AuDnn3+mVatWbNiwgR9//JG4uDhLxlf/6IyPZ3mQT2Z+iZWDEUIIUd9VK1GXlZXh4OAAwPLly7n33nsBiIyMJDU11XLR1UfnnqO21RjIyzlr5WCEEELUd9VK1FFRUXz55Zf8/fffxMfH06dPHwBOnTqFt7d3pY+zdu1a+vfvT2BgIBqNhgULFlRYfvXq1Wg0msuWtLS06lSjdtg5UqJxBKAw+7SVgxFCCFHfVStRf/jhh3z11VfcfvvtDBo0iDZt2gCwcOFC0yXxyigoKKBNmzbMmDGjSudPTEwkNTXVtPj6+lZp/9pWZGuch7osTxK1EEKImqnWpBy33347Z86cITc3F09PT9P6Z599Fp1OV+nj9O3bl759+1b5/L6+vnh4eFR5v+sl3ymQ/FI9+UXF1g5FCCFEPVetFnVRURElJSWmJJ2UlMS0adNITEy8Lq3btm3bEhAQwF133cX69etr/XxVtbJLHN1LPieBFtYORQghRD1XrUR933338b///Q+A7OxsOnfuzMcff0xsbCwzZ860aIAXCwgI4Msvv+TXX3/l119/JSQkhNtvv53t268+U1VJSQm5ubmmJS8vr9biO880J7U8niWEEKKGqpWot2/fzi233ALAL7/8gp+fH0lJSfzvf//j888/t2iAF2vevDnPPfcc7du3p2vXrnz33Xd07dqVTz/99Kr7TJ48GXd3d9PSsmXLWovvPC9nY6LOkgFPhBBC1FC1EnVhYSGurq4ALFu2jPvvvx+tVsvNN99MUlKSRQO8lk6dOnH48OGrbh8/fjw5OTmmZd++fbUeU+OTi1hg/xYP5X1f6+cSQgjRsFUrUTdr1owFCxaQkpLC0qVL6dWrFwAZGRm4ublZNMBrSUhIICAg4KrbHRwccHNzMy3nv2DUJleVR1vtEYLKkzEYZGIOIYQQ1VetXt8TJkzg0Ucf5eWXX+bOO++kS5cugLF1HRMTU+nj5Ofnm7WGjx07RkJCAl5eXoSGhjJ+/HhOnjxpuh8+bdo0wsPDiYqKori4mG+//ZaVK1eybNmy6lSj1ji07MvTy7JIVr50Ly7HXWdn7ZCEEELUU9VK1A8++CDdu3cnNTXV9Aw1QI8ePRgwYEClj7N161buuOMO0/sxY8YAMHToUOLi4khNTSU5Odm0vbS0lFdeeYWTJ0+i0+lo3bo1y5cvNztGXeDg24x/7DqTX1JOZmGpJGohhBDVplE1nDT5/CxawcHBFgmotp04cYKQkBBSUlJqNeZbPlpJSmYRv77QlfZhntfeQQghxA2jKrmoWveoDQYDkyZNwt3dnbCwMMLCwvDw8ODdd9/FYDBUK+gGpayYAdr1DLFZKj2/hRBC1Ei1Ln2/8cYb/Oc//+GDDz6gW7duAKxbt46JEydSXFzMe++9Z9Eg6x1DGWPyp4Id/Jo3AvCzdkRCCCHqqWol6v/+9798++23plmzAFq3bk1QUBAvvviiJGp7F8qxxZZyirMzgJusHZEQQoh6qlqXvjMzM4mMjLxsfWRkJJmZmTUOqt7TaCiyM07MUZJ3xsrBCCGEqM+qlajbtGnD9OnTL1s/ffp0WrduXeOgGoJSOw8AyvNlTmohhBDVV61L3x999BH9+vVj+fLlpmeoN27cSEpKCosXL7ZogPVVuaMnFIIqkCsMQgghqq9aLerbbruNgwcPMmDAALKzs8nOzub+++9n7969fP+9DJsJoJyMj2RpirOsHIkQQoj6rFotaoDAwMDLOo3t3LmT//znP3z99dc1Dqy+0+q8AbAtkUQthBCi+qrVohbXZutiTNSOpZKohRBCVJ8k6lri4OYDgE6fS7leBoERQghRPZKoa4mjuzFRe5BPTlGZlaMRQghRX1XpHvX9999f4fbs7OyaxNKg2DgbL317aPLIKizF28XByhEJIYSoj6qUqN3d3a+5fciQITUKqMFw8gLAk3zSC6RFLYQQonqqlKhnzZpVW3E0PDpvCjVOFOJIpkzMIYQQoprkHnVtadSMUY3/oG/pB2QVSqIWQghRPZKoa5Gnzh5AWtRCCCGqTRJ1LfJyNiZqmZNaCCFEdUmirkUDTnzEAvs3KUvZZu1QhBBC1FOSqGtRY0MSbbVHST9xhPTcYmuHI4QQoh6SRF2LHO96iw/c32abPoLftp+0djhCCCHqIUnUtanpHYR3f5DTeDBvWwpKKWtHJIQQop6RRF3L+rUOxMnOhqOnC9ienG3tcIQQQtQzkqhrU1YSLom/MTlwLVoMzNuaYu2IhBBC1DOSqGuT0sMfo4lNn8HzNn+waFcqhaXl1o5KCCFEPSKJujZ5NYG7PwJgjN08Ikr3s2RPmpWDEkIIUZ9Ioq5tbQdDqwewxcBndtNZtDnR2hEJIYSoR6yaqNeuXUv//v0JDAxEo9GwYMGCa+6zevVq2rVrh4ODA82aNSMuLq7W46wRjQbu+ZRytxBCtae57+RUUs4WWDsqIYQQ9YRVE3VBQQFt2rRhxowZlSp/7Ngx+vXrxx133EFCQgKjR4/m6aefZunSpbUcaQ05umP70Hfo0XKfzQb2LfnK2hEJIYSoJ6o0zaWl9e3bl759+1a6/Jdffkl4eDgff/wxAC1atGDdunV8+umn9O7du7bCtIyQTiRGjqDlgc+55dCHGE73R+sTYe2ohBBC1HH16h71xo0b6dmzp9m63r17s3HjxqvuU1JSQm5urmnJy8ur7TCvKjz2LTarKHQUU/jTUCiXyTqEEEJUrF4l6rS0NPz8/MzW+fn5kZubS1FR0RX3mTx5Mu7u7qalZcuW1yPUK3JytGdFy3+RpVxwydwLK96xWixCCCHqh3qVqKtj/Pjx5OTkmJZ9+/ZZNZ7eXWIYV/as8c3G6XB4uVXjEUIIUbfVq0Tt7+9Penq62br09HTc3NxwcnK64j4ODg64ubmZFldX1+sR6lXFhHhw1Ps2/lt+l3HF/OehOMeqMQkhhKi76lWi7tKlCytWrDBbFx8fT5cuXawUUdVpNBoe6hDC++WDOWDXAnq9B47uxo36MusGJ4QQos6xaqLOz88nISGBhIQEwPj4VUJCAsnJyYDxsvWQIUNM5Z9//nmOHj3KuHHjOHDgAP/+97/5+eefefnll60RfrXdHxNEudaBPnlvcjig34UNf46Br26Vy+FCCCFMrJqot27dSkxMDDExMQCMGTOGmJgYJkyYAEBqaqopaQOEh4fz559/Eh8fT5s2bfj444/59ttv6/6jWZfwdXPk9pt8AA2/bDthXGkwQOJfkLoTtHYXCmenwJlDIFNkCiHEDUmjbrBJkk+cOEFISAgpKSkEBwdbLY4le1J5/oft+Lo6sOH1O7G10ULBGUhcDG0eBZtzj7j/9Rps+hJ03hDc0biEdILAduDgYrX4hRBCVF9VcpFVBzy5kd0Z6YeXsz0ZeSWsPXSaOyP9wLkRtBtiXrAoG2zsofAsHFxiXAA0WvCLguBOxsQd0Aa8Iy4keCGEEA2CtKit6J0/9jJr/XEifF24q6UfYd46wrydCfPW4efqiFarMRYsL4HUXXBiM5zYAilbIPfE5Qe0dQK/ltDpOWjz8PWtjBBCiEqTFnU98XDHEOI2HOdQRj6HMvLNtjnYagnx0tHYW0fbEA+evqUdjiEdLxTIPQUp5xL3ia2QvgdK8+HkNijJvVAudRfMfw4a32KaclMIIUT9IYnaiiL93fjl+S7sSM7m+NkCks4WkpxZyImsIkrKDRzOyOdwRj7L92ewIOEU0x5uS6ugc49yuQVCVKxxAWNntMyjkLYTgtpfOElqAmTsM15Wv9gPD4KzDwTGQGBb8I8Guys/iy6EEMJ6JFFbWfswL9qHeZmtK9cbOJVdTFJmAUcy8pmx+giHM/IZ8O/1jLmrOc/e2gSb85fFz9NqoVEz43Kx5v1gkA/YOlxYV5gJh+ONr3fOPre/HQS1g7CuENoVQjtfeL5bCCGE1cg96nogs6CU//ttN0v2pgHQKdyLTwa2IdhTV70DlhXBkVXG1vapBDi1AwoyzMtotODXypi4w7oaO6u5hxq/EADoy41ltPVqzBwhhKgTqpKLJFHXE0op5m07wTsL91JQqsfVwZZJsVHEtg1Co9Fc+wBXUVpuYNWBdPz1abQx7IOkDZC0HrKOXV74jbQLl8d/ew52zYFe/4KuI43rck7C0vHg1RS8m1746ewDNYhRCCEaGulM1gBpNBoGdgjh5nBvXv45gW1JWbw8dycr9mfwXmw07jq7ax/kIkdP5zNnSwq/bDtBZoFxus2nu8cw7p5B2NtqITcVkjecS9wb4Oxh84FYDOeGO7143ekDsO/3y09m7wreTcDZFxzdwMHtop/uxkfSzl+aV0qSuhBCXERa1PVQud7AzNVHmLbiEHqDws/NgR4t/IgOcqdVoDs3+bvgYGtz2X7FZXqW7Enjp83JbDqWaVrv7WzP2XPJuk2wO18Makeo9zUuq5cWQFkx2DmCvbNxXVYSHFgEZ49A5hE4exRyUoBr/Iq9eRps7Y2vF70Mh1fA7eOh7aBzFS4FpZfObkKIBkNa1A2crY2WkT0iuOUmH16em8CxMwXM3nRhqFU7Gw03+bnSKtCdVsHuhHs7s+JAOvN3nCS70NgS1mrgjua+DOoUyu3NfViVeJpX5+1k54kc+n3+Nx880Jp+rQOuHoS984UEfZ5nGHQZbr6uvASyjht7pBeeheJc4+NjxblQkmO8X34+SQOk74PsJLC5qKWetA71wwOctQ/C4BOJd5N22PhHgW8UeIWD9vIvJUII0VBIi7qeKyrVsyoxg10ncth7KofdJ3NMyfhKAt0debhjKAM7BhPgbt5CPZldxKifdrAtKQuAx24O5c1+LXG0u46JsOAsnN4PPpGmR8o2z/2ATvsnX7G4snVC49PcOEqbbwtwDQCdl/Eyu3+r6xe3EEJUgXQmq0BDS9SXUkpxIqvIlLT3nMzlcEY+UYFuDOocyq0RPpc/2nWRMr2BT+MPMnPNEZSCSH9Xpj/ajma+139ccb1B8f7i/fxn3VF8yObewBx0WYmElB2nuTaFmzQncNKUXnlnjzAYvevC+9kPQ84J6P85BJ97zjz5H+OlentXcHA1jp1u63T5PXLTew3YOkLk3Re2nU40XjXwDJPH2YQQlSaXvm9gGo2GEC8dIV46+rSq4NL1VdjZaBnXJ5Kbm3gz5ucEDqTlce/0dUy8N4oH2gVXmOQtqbC0nFE/JbB8fzqgYchdnRlxZzPKDYp1h87wnx0nWb7vFL7lqTTXpBCpSaGz62kiXUvw1OShcb/kFz9jv/GSuqH8wrqT22HDF1ULzMXfPFEvHAUp/8DA76HlvcZ1h5fDn6+AVxPwDDdenjf9bHz5LQMhhKiAJGpxRbfe5MPiUbcwem4CG46cZdwvu5i6NJHYmCDubxdEpL9brZ07PbeYp/67hT0nc7G31TLlwdbc1zYIMN5/vyPSlzsifckrbsWSPWksSDjJ50fOorKBbLglohFv39USs6FfHv7B+Ky4T/ML6wLaQJcRxqFXS/KhJA/Ki89tPHeh6dILTpeO8KbzMibvi2cyO3PYeF8+6/iVK+gaYIzDJ9L4s9G5187eVfiUhBA3Crn0LSqkNyi+XHOEb/8+StZF975bBrjxQPtg7m0TiI+rQwVHqJp9p3J56r9bSM0pxsvZnm+GtL9s5LYrSc0pIm7DcWatO06p3oCNVsOQLmGM7nFTlR9dq7HCTOOwrZnHjM+jX/yzOPvK+zj7wNjDF95v/gb0ZcYhYt0CjevKigCNsad9faUvM96CyDpu/CxsnYy9+e10xp/2zsYrD+cZ9ICmfg+sU14KRVmgLzU+1qgvM77WlxoHDjKUG/9NzT4LR+Mtmfpcb1EhuUddAUnU1VNabmB1Yga/bj/BygMZlOmNvzY2Wg233eTD/e2C6BHph5N99TuerTyQzsjZOygo1dPUx5lZwzpd+zGxSxw/U8C//tx/7pI5eDnb80qvm3ikY+h1u2xfocJM4+Nrpw8YlzMHjT89G8PQPy6U+yTKOEPa0ysv3FPf8AUsexPsnI0t+IsTnOnnudf2LsZ75m6B0PGpC8fNOGAcUc4j5MLjbpZ6dt2gN/483ws/bTccir9wdSHruDFJK/3Vj+HiD68mXnj/TQ84uRUe+enCLYddP8PCkaCxMdbl/Ah5519fvF6jwdi3wB5Gbrtw3N+Hw7G1cOdb0HrghXjXfGj87M4/1WCnMx7vYhd/VBot3PLKhfcr3oVDS6HbaIh+0Lju+HqIu5sqG73H+O8EsOkr2P8HtHkEYh4zrisvgfS94BZk/KInSb1ekXvUwuLsbbX0ivKnV5Q/WQWlLNp1il+3nyQhJZuVBzJYeSADnb0NPVv4cU/rAG5r7nPFZ7kvVa43sPdULsv2pTFz9REMCro29WbmY+1xd6p6S7hxI2e+HdqBtQdPM2nRPg5n5PPG/D388E8yE/u3pHMTK19e1nkZl4tnQoMLSe68VgOMI72db02D8fE2gLIC41IZjZqbJ+pfnjC29of+AeG3Gtdt+RaWjDd2lLNzNP60dTDOg25jBzYXvz730yMU+lzUE/8/vSBlEzy+AJreYVyXshlWvHN5TLaOxs5+zo2MtxpKC6Gs0HjF4NJbC8pg/HnxI3iG8otuUVSS7SVXIfJPQ3byheOD8fPe/wdVdvOLF7705J4yJvyclAvbzz9qaPocbc/9PPdZamyMSff8Z1BWCCjjl4Tz0nbB8b8h/LYL6zKPwTfnPmutLbgGGn9fzi/uwedeBxl/55QyLhc/0phzEgrPgM7bWP5GdfaIsR+LZ+MLT4uU5MOeX899ETz35U9re+EL2HUkLWpRI0dO5zN/+0nm7zjJyewi03pXR1t6tfTnnjYBdG/WCDsb47d9g0GRmJ7HhiNn2XjkLJuOnSWv+EIHr4c7hPCvAa1M5WuiTG/gh3+S+DT+ILnnzvFIxxDeGxBdN1rXVaUUFOdAUea55FZ00R/3gnM/i4yD0ZTmG59V13nB7a9fOMa3d8GZRHjsNwjuYFy3YTose6NqsVx6qX5WP0haBw9+B60eMK47sQ02f23843fx4uJX+dZfca7xUrGDy4XR60oLjF9aDHpAGWeOU+cX/UWvDRclYo1x0pnzzhw2fpZe4cbPCIwD9hyOP/f5FVz4HM3+RF7y51Ip6P2+caQ9MI6dX3DG2PfgfGv4/P6VvWqhlPGyuI39hX3S9hgTiV9L46OIYJyX/ufHIT/d/AvHtYw7dqHOi16Grd9B9zHQ8+1zn8NxiLvn3JfKRsYk7tzI2LfCI9RYL/dQ47q6PIqgvgzyUo1fRnJPGq/m5J6CvFPGn4/+fOGL4V+vw6aZxishd537cpmdDNOizY9p4wBvXTIvQjXJpe8KSKKuHUopElKy+WNnKot3p5KWe6HF46Gzo1dLPwpK9Gw8etY0ZOl5bo62dG7izd3R/jUeu/xKzuaX8En8QX7anIxBwUPtg/nwgdZo62Oyrg2lBVCUbWylmpaSc/dQL76fWnZhPQo6Pn3hGLmnjMPJOnkaW4zi+tGXQ36a8d8g9+S5nxe9zjlp7A9w/lbAyB0XOi4ufwd2zoHbxkGHJ4zrTmyFb3tc+7y2Tsak7REK7iHQ+70LTzQk/mWc7Cf8Vmjc3biuMBN2fG9MdrYOF67aXHwF50r/94M6gP25qwsZ+42LVxPj9Lznj/v3x+d+jzMvJOZrfYF5bq2xQynA1lnG2Fo9CF1eNK7Lz4A/XjJ+ITz/JVBrB4N/vvZnUwmSqCsgibr2GQyKrUlZLNp1isW7UzmTb56YdfY2dGzsRdem3nRt2oiWgW7XpYW7ZE8qw2fvQG9QDO0SxsR7oyz+pUCIeq+0wNiXofCs8bJ4wRkoOG1M+tnJxsv6eamX73dxS/2Pl2BbHNzxhvFLABjvp8/sWvV4RmyFRhHG18snwrpPofML0PcD47qck/Bpyyvvq7W76DZAELgHnbtFEGD8AuHkWfV4LETuUQur0mo1dAr3olO4F2/3j2LT0bOsOJCBu5MdXZt60zrYwzjxx3XWp1UAUx/SM+bnnfx3YxJO9ra81qe5xZL1mfwS/jl6lvZhnpeN+iZEvWHvfKED49WUlxgvJZ9P3NnJ5nPeN77FeD83MOai47pAm0HGfctLQF9yyeurDF508XDCXk0hrJtxgKHznDyMM/jZORs7ULoHgVuwMTk3kE520qIWN5wfNyXxxvw9ALza6yZG3BlR7WOlZBaydG8ay/amszUpE4MCT50dXz7W3vod14QQdZa0qIWowODOYRSV6vnXn/uZuuwgTva2PNU9/No7YrwXvy81l2V701m6N40DaXlm2z11dmQVlvHYfzbx3oBoBnYIqY0qCCFuIJKoxQ3p6VuaUFCi59PlB3l30T509jYM6hR6xbJ6g2JbUpax5bwvjZTMC73bbbQaOjX2oneUH3dF+ePtbM8r83by565Uxv2yi8MZ+bzWJ7LS9+BP55VwOq+EFgGutXb/fNPRsxzKyMdTZ4+X84XFU2eHrQV62wshLEsStbhhjerRjMLScr5ae5T/m78bJzsbYmOMQ5WWlOvZcOQsS/eksXx/ulmHOAdbLbfe5EPvKH96RPri6Wxvdtzpg2Jo5uPCZysO8fXaoxw9nc+0R2Jwcbj6f7djZwr4eu0Rft12klK9gTYhHrzeJ5IuTS13+XzXiWw+XHKA9YfPXrWMu5MdXs72+Ls50r9NIPe1DcS5griFELWvTtyjnjFjBlOmTCEtLY02bdrwxRdf0KlTpyuWjYuL44knnjBb5+DgQHFx5QZAkHvU4mJKKd763Tggio1Ww+geERzMyGfVgQzySy483+3maEvPFn70ivLn1psaobO/dvJauPMUY+ftpKTcQKS/K98O7UCwp/lIaztTsvlyzRGW7E0zPW5rq9VQbjC+ub25D+N6R9IysPpjqx87U8DUpYn8udvYU9feRkvXZt4Ulug5W1BCVmEZWYWllw1rDuDqYMv97YJ47OYwIvxcqx2DEMJcvbpHPXfuXMaMGcOXX35J586dmTZtGr179yYxMRFfX98r7uPm5kZi4oVhBuURG1FdGo2GSfe2oqjUwK/bT/Bx/EHTNl9XB3pF+dE7yp+bm3hXeRCWe9sEEuLpxDP/28aBtDxiZ6zn6yEdiAnx4O9DZ/hyzRE2HLnQuu0R6cvztzelsbczX6w8xOxNyaxOPM2ag6cZ0DaIl++6iRCvyg+pmpFbzGcrDjFnSwp6g0Kj4arH0RsUOUVlZBaUkFlQxs6UbH7clMTxs4X8d2MS/92YROdwLx67OYzeUf5W6bUvxI3K6i3qzp0707FjR6ZPnw6AwWAgJCSEkSNH8vrrr19WPi4ujtGjR5OdnV2t80mLWlxJud7AW7/vYXtSNrc396F3K3/aBntYZFCUU9lFPPXfrexPNc4G1qSRs6kTmq1Ww71tA3nu1qY09zdvsR4/U8DUZYks2nWhJfzYzWGMuLMZXpdcbr9YbnEZX605wnfrjlNUZhya9M5IX8b2bk6LgMq3zA0GxfojZ/h+YxLL96dzrpFPIxcHHukYwtCujS06IYsQN5J6M+BJaWkpOp2OX375hdjYWNP6oUOHkp2dze+//37ZPnFxcTz99NMEBQVhMBho164d77//PlFRUVc8R0lJCSUlJab3J0+epGXLlpKoxXVVUFLOy3MTWLbPOFmIk50Nj3QK4elbmhDkUfEz17tOZPPBXwdMrW+dvQ2eOnvKDQbK9YoyvQG9QVFmUJTrDaaECtAu1IPX+7agU/i1ZyCrSGpOET9tTuGnzcmczjP+f3JztOX/7m7BwA4hMsqbEFVUbxL1qVOnCAoKYsOGDXTp0sW0fty4caxZs4ZNmzZdts/GjRs5dOgQrVu3Jicnh6lTp7J27Vr27t17xcpOnDiRd965fGIASdTiejMYFHEbjlNSbuCRjiGXdUKriFKKvw+d4YO/DrAvNfea5SN8XRjbuzl3tfSz6K2hMr2B+H3pTF952BRHp3Av3h8QTTNfl2vsLYQ4r0En6kuVlZXRokULBg0axLvvvnvZdmlRi4bEYDA+x11uUNhqNdjaaLDVarGz0WBro8VOa/zpqbOr1b4b5XoDs9Yf55P4gxSV6bG30fLC7U158Y6mlZo1TYgbXb3pTNaoUSNsbGxIT083W5+eno6/v3+ljmFnZ0dMTAyHDx++4nYHBwccHC7cR8vNvXZrRIi6SqvV0CrI3dphYGuj5Zlbm9CnlT8Tft/DqsTTfLbiEIt2neL9AdEVjsqWU1RG8tlCsotKcbC1wdFOi6OdDY7nXjvYGX/a22ilo6gQWDlR29vb0759e1asWGG6R20wGFixYgUjRoyo1DH0ej27d+/m7rurMTG7EKJGQrx0fDesI3/uTmXiwn0cOV3Aw1//wyMdQ7i3bSAnMotIyiwg6WwhKZmFJGUWkl1YVqlj29tquaO5D/e3C+aO5r7S01zcsKz+eNaYMWMYOnQoHTp0oFOnTkybNo2CggLTs9JDhgwhKCiIyZONk9RPmjSJm2++mWbNmpGdnc2UKVNISkri6aefrug0QohaotFouKd1ILc08+GDJQf4aXMyc7akMGdLylX3aeRij7ezA6V6A8Vl+nOLgeJyvel57tJyA0v3prN0bzqeOjv6twnk/nbBtAl2l5a2uKFYPVE//PDDnD59mgkTJpCWlkbbtm1ZsmQJfn5+ACQnJ6O9aPaTrKwsnnnmGdLS0vD09KR9+/Zs2LCBli2vMs2ZEOK6cNfZMfn+aO5vF8Tkxfs5W1BKqJeOMG8dYV7OhJx7Heqlu+poZ0qpc8nbQEpmIQt3nmL+jpOczivhfxuT+N/GJJr4OPNAu2BiY4IIdHc0li81UFSmNy6leorKyikqNeDv7kAzX8sP1FKuN7DzRDYbDp/lbEEpeoOi/Fyve9Prc73yvZztuTPSl1sifHCyrzv370/nlbDrRDa7TuSQkVfMA+2C6dC4Zk8HiNph9eeorzd5jlqI+qVcb2D9kbP8tv0ES/emUVxmMG3TajB7HO1KIv1diY0J4t42gQRe41G4iqRkFrL20GnWHjzNhiNnySsuv/ZOF3G003JrhA+9rjL0bG3KKihl18kcdp9LzLtP5pCaYz6ao0YDQ7s0Zmzv5vVy2NjswlLsbbWVGjWwLqg3vb6tQRK1EPVXXnEZS/ak8dv2k2w8aj5muZ2NBkc7G5zsbHCyN3ZOO3amgFK9MbFrNNA53IvYtkH0jQ7A3cnuSqcwnedUdjHHzuSz/vBZ/j50muNnC83KeOjs6Na0EeGNnLHRarDVarCx0WCn1Rrf22iw0Wo4nJHPsr3pnMy+fDKXXlHGYWmv9Sx9dSilWH3wNDNXHWHz8czLtms00MzHhehgd8r0ij92ngIgyMOJyfdHc+tNPhaPyZIycovZdCyTzeeWxPQ87G20/GtAq3oxa50k6gpIohaiYcgqKKVMb8DR3picrzTEa05hGYv3pLJgx0k2HbuQrOxttMbL0Tc1IruwjJPZRaRmF3Equ5hT2UXklVzeWrbVamgX6smtNzXilggfWgW5V3pWtGtNj9onyp+RPZoRFVjzHv16g+KvPan8e9URs2fuwxs5Ex3kTutgd1oHe9Ay0M1sopi/D53m9V93m75QPNg+mLf6tcRdd/UvNNdLSbmeE1lFJCRnGxPz8UyOnSm4avlnbgnn9b4tKv3vYw2SqCsgiVqIG9PJ7CIWJpxiwY6TJKbnXbO8h86OQHcnOjT25JYIH25u4oWro2WSVvLZQpbtS2PZvnQ2X/QFomcLP0b1aEbrYI8qH7O03MD8HSf4cs1RUxLT2dvw2M1hPNU9HD83x2seo6CknClLE/nvxuMoBT6uDrx7Xyv6tKrc47I1cSq7iONnCjiRVURKVqHxZ6bxZ3pe8WWTxmg00DLAjU7hXnQO96J9mBffbzzO5yuNj+re0dyHzwfFWOzfzNIkUVdAErUQYn9qLgt2nGRfai5+bo4EejgR6H7up4cTgR6O1+1e58H0PKavPMwfu06ZktHtzX0YeWcE7cM8r7l/YWk5czan8M3fR033nd2d7HiiW2OGdW2Mh67q98K3Hs9k3K+7OHramPD7RQfwRLfG+Lo60sjV3mKfTUZeMQsTjB0G956qeIwLJzsbIgNc6RzuTedwL9qFeV7x9sUfO0/x6rlZ6yJ8XfjP0I6Eelc8mY3BoFh76DQr9mdwk58LD3UIwdGudjv+SaKugCRqIURddOR0PjNWHeb3hFPoz/WQ696sESPubIavqwMnsoo4mV3EiaxCTppeF5GeW2zqUOfr6sAztzRhUOfQCuc/r4ziMj1frDzEl2uOmuI5z8XBlkYu9vi4OhgXFwcCPZy4yc+VCD8XgjycrvoIXVGpnmX7jP0M1h0+Yzq2jVZDmJeOIE8nQrx0hHjqCD73OtjTCW9n+0o/lrfrRDbP/G8r6bkleOrsmPlYe26+wiA8p/NKmLcthdmbkjmRdaEPgbezPU92D+exm8Mq7MtQE5KoKyCJWghRlyWdLeDfq47w6/YTpnnJryXUS8dztzXhgXbBFm8J7j2Vw5SliRw9XUBGXrFZr/urcba3oZmfKzf5upiSt1aj4feEUyzZk0pBqd5Utm2IB/e3C+Ke1oEVzgpXVWk5xTz7/VZ2ncjBVqvh3dhWDOoUilKKf45m8uOmJJbuTaNMb/yM3Rxt6dPKn/WHz5ru07s42DL45lCe6haObyVuHVSFJOoKSKIWQtQHKZmFfLnmCPO2ncBWqyHY04kgDyeCPY2tzvPvgzyd8HFxuC6DwCilKCjVczqv5KKlmIy8EpIyCzmcns/RM/mm5Hc1IV5ODIgJJrZtIE18am8yl6JSPWN/2WmaKrZf6wD2p+aaLukDxIR6MLhzGPe0DsDRzoYyvYFFu04xc/URDqbnA8bOhw+0D+a5W5vQuJGzRWKTRF0BSdRCiPpEb1BoNdSb0djK9AaSzhZwMD2fg+l5HDr3M7e4jB4t/Lg/Joj2YZ7XrT5KKb5YeZhP4g+a1jnb2xAbE8SjnUOv2tPeYFCsPJDBv1cfZntyNmB8bv/u6ADe7NcSf/eatbAlUVdAErUQQtx4lu5NY+6WFO6M9CU2JqjS9/CVUmw5nsXM1YdZlXgaVwdb1o+/E7ca9iavN7NnCSGEENdD7yh/ekdV/TEzjUZDp3AvOoV3Yt+pXI6czq9xkq4qSdRCCCFEJbQMdKNloNt1P6/MGyeEEELUYZKohRBCiDpMErUQQghRh0miFkIIIeowSdRCCCFEHXbD9fo2GIzD36Wmplo5EiGEEDeq8znofE6qyA2XqNPT0wHo1KmTlSMRQghxo0tPTyc0NLTCMjfcyGTl5eXs2LEDPz8/tNqaXfnPy8ujZcuW7Nu3D1dXVwtFKETdJ7/74kZkyd97g8FAeno6MTEx2NpW3Ga+4RK1JeXm5uLu7k5OTg5ubtf/IXghrEV+98WNyFq/99KZTAghhKjDJFELIYQQdZgk6hpwcHDg7bffxsHBwdqhCHFdye++uBFZ6/de7lELIYQQdZi0qIUQQog6TBK1EEIIUYdJohZCCCHqMEnUNTBjxgwaN26Mo6MjnTt3ZvPmzdYOSYhatXbtWvr3709gYCAajYYFCxZYOyQhat3kyZPp2LEjrq6u+Pr6EhsbS2Ji4nU7vyTqapo7dy5jxozh7bffZvv27bRp04bevXuTkZFh7dCEqDUFBQW0adOGGTNmWDsUIa6bNWvWMHz4cP755x/i4+MpKyujV69eFBQUXJfzS6/vaurcuTMdO3Zk+vTpgHE4uJCQEEaOHMnrr79u5eiEqH0ajYb58+cTGxtr7VCEuK5Onz6Nr68va9as4dZbb63180mLuhpKS0vZtm0bPXv2NK3TarX07NmTjRs3WjEyIYQQtS0nJwcALy+v63I+SdTVcObMGfR6PX5+fmbr/fz8SEtLs1JUQgghapvBYGD06NF069aNVq1aXZdz3nDTXAohhBDVNXz4cPbs2cO6deuu2zklUVdDo0aNsLGxMc1tfV56ejr+/v5WikoIIURtGjFiBIsWLWLt2rUEBwdft/PKpe9qsLe3p3379qxYscK0zmAwsGLFCrp06WLFyIQQQliaUooRI0Ywf/58Vq5cSXh4+HU9v7Soq2nMmDEMHTqUDh060KlTJ6ZNm0ZBQQFPPPGEtUMTotbk5+dz+PBh0/tjx46RkJCAl5cXoaGhVoxMiNozfPhwZs+eze+//46rq6upL5K7uztOTk61fn55PKsGpk+fzpQpU0hLS6Nt27Z8/vnndO7c2dphCVFrVq9ezR133HHZ+qFDhxIXF3f9AxLiOtBoNFdcP2vWLIYNG1b755dELYQQQtRdco9aCCGEqMMkUQshhBB1mCRqIYQQog6TRC2EEELUYZKohRBCiDpMErUQQghRh0miFkIIIeowSdRCCCFEHSaJWghRazQaDQsWLLB2GELUa5KohWighg0bhkajuWzp06ePtUMTQlSBTMohRAPWp08fZs2aZbbOwcHBStEIIapDWtRCNGAODg74+/ubLZ6enoDxsvTMmTPp27cvTk5ONGnShF9++cVs/927d3PnnXfi5OSEt7c3zz77LPn5+WZlvvvuO6KionBwcCAgIIARI0aYbT9z5gwDBgxAp9MRERHBwoULTduysrIYPHgwPj4+ODk5ERERcdkXCyFudJKohbiBvfXWWzzwwAPs3LmTwYMH88gjj7B//34ACgoK6N27N56enmzZsoV58+axfPlys0Q8c+ZMhg8fzrPPPsvu3btZuHAhzZo1MzvHO++8w8CBA9m1axd33303gwcPJjMz03T+ffv28ddff7F//35mzpxJo0aNrt8HIER9oIQQDdLQoUOVjY2NcnZ2Nlvee+89pZRSgHr++efN9uncubN64YUXlFJKff3118rT01Pl5+ebtv/5559Kq9WqtLQ0pZRSgYGB6o033rhqDIB68803Te/z8/MVoP766y+llFL9+/dXTzzxhGUqLEQDJfeohWjA7rjjDmbOnGm2zsvLy/S6S5cuZtu6dOlCQkICAPv376dNmzY4Ozubtnfr1g2DwUBiYiIajYZTp07Ro0ePCmNo3bq16bWzszNubm5kZGQA8MILL/DAAw+wfft2evXqRWxsLF27dq1WXYVoqCRRC9GAOTs7X3Yp2lKcnJwqVc7Ozs7svUajwWAwANC3b1+SkpJYvHgx8fHx9OjRg+HDhzN16lSLxytEfSX3qIW4gf3zzz+XvW/RogUALVq0YOfOnRQUFJi2r1+/Hq1WS/PmzXF1daVx48asWLGiRjH4+PgwdOhQfvjhB6ZNm8bXX39do+MJ0dBIi1qIBqykpIS0tDSzdba2tqYOW/PmzaNDhw50796dH3/8kc2bN/Of//wHgMGDB/P2228zdOhQJk6cyOnTpxk5ciSPP/44fn5+AEycOJHnn38eX19f+vbtS15eHuvXr2fkyJGVim/ChAm0b9+eqKgoSkpKWLRokemLghDCSBK1EA3YkiVLCAgIMFvXvHlzDhw4ABh7ZM+ZM4cXX3yRgIAAfvrpJ1q2bAmATqdj6dKlvPTSS3Ts2BGdTscDDzzAJ598YjrW0KFDKS4u5tNPP+XVV1+lUaNGPPjgg5WOz97envHjx3P8+HGcnJy45ZZbmDNnjgVqLkTDoVFKKWsHIYS4/jQaDfPnzyc2NtbaoQghKiD3qIUQQog6TBK1EEIIUYfJPWohblBy10uI+kFa1EIIIUQdJolaCCGEqMMkUQshhBB1mCRqIYQQog6TRC2EEELUYZKohRBCiDpMErUQQghRh0miFkIIIeowSdRCCCFEHfb/bp5XEFN8oAIAAAAASUVORK5CYII=\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeoAAAEiCAYAAAA21pHjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAABZOElEQVR4nO3dd3gU1frA8e9u+qYnpBcIEKkhhGoIKgrSFAVUvFyugPWqIHKxclEE/SkqqIh4sV3JtSCIAiIiGJpIkR46SAkEQhJKek92z++PgQ1LCSkbNgnv53nmye7MmZn3LCHvzpkz5+iUUgohhBBC1El6WwcghBBCiKuTRC2EEELUYZKohRBCiDpMErUQQghRh0miFkIIIeowSdRCCCFEHSaJWgghhKjDJFELIYQQdZgkaiGEEKIOk0QtRANy7NgxdDodiYmJtg5FCGElkqiFqGN0Ol2Fy6RJk2wdohDiOrK3dQBCCEupqanm1/PmzWPixIkcPHjQvM7Nzc0WYQkhbESuqIWoYwIDA82Lp6cnOp3O/N7f35/333+f0NBQnJycaN++PcuWLbvqsYxGI4888ggtW7YkOTkZgJ9++okOHTrg7OxM06ZNmTx5MmVlZeZ9dDodX3zxBYMGDcJgMBAZGcnixYvN2zMzMxk2bBh+fn64uLgQGRnJ7NmzrxrDDz/8QFRUFC4uLvj6+tKrVy/y8/PN27/44gtatWqFs7MzLVu25D//+Y/F/idOnGDIkCF4eXnh4+PDvffey7Fjx8zbR44cycCBA5k2bRpBQUH4+voyatQoSktLK/2ZC1GnKSFEnTV79mzl6elpfv/+++8rDw8P9d1336kDBw6oF198UTk4OKi//vpLKaVUUlKSAtSOHTtUUVGRGjRokIqJiVGnT59WSim1du1a5eHhoeLj49WRI0fUb7/9ppo0aaImTZpkPgegQkND1Zw5c9ShQ4fUmDFjlJubmzp37pxSSqlRo0ap9u3bqy1btqikpCSVkJCgFi9efMX4T506pezt7dX777+vkpKS1K5du9THH3+scnNzlVJKffPNNyooKEj9+OOP6ujRo+rHH39UPj4+Kj4+XimlVElJiWrVqpV65JFH1K5du9S+ffvU3//+d9WiRQtVXFyslFJqxIgRysPDQz355JNq//796ueff1YGg0F99tln1v3HEMJGJFELUYddmqiDg4PVm2++aVGmc+fO6umnn1ZKlSfqP/74Q/Xs2VN1795dZWVlmcv27NlTvfXWWxb7f/311yooKMj8HlCvvPKK+X1eXp4C1K+//qqUUmrAgAHq4YcfrlT827ZtU4A6duzYFbc3a9ZMzZkzx2LdG2+8oWJjY82xtWjRQplMJvP24uJi5eLiopYvX66U0hJ148aNVVlZmbnMAw88oB588MFKxShEXSf3qIWoJ3Jycjh16hRxcXEW6+Pi4ti5c6fFuqFDhxIaGsqqVatwcXExr9+5cyfr16/nzTffNK8zGo0UFRVRUFCAwWAAoF27dubtrq6ueHh4cPr0aQCeeuop7rvvPrZv307v3r0ZOHAg3bp1u2LM0dHR9OzZk6ioKPr06UPv3r25//778fb2Jj8/nyNHjvDoo4/y+OOPm/cpKyvD09PTHO/hw4dxd3e3OG5RURFHjhwxv2/Tpg12dnbm90FBQezevbuCT1OI+kMStRANUP/+/fnmm2/YuHEjd9xxh3l9Xl4ekydPZvDgwZft4+zsbH7t4OBgsU2n02EymQDo168fx48fZ+nSpSQkJNCzZ09GjRrFtGnTLjumnZ0dCQkJbNiwgd9++42PPvqICRMmsGnTJvOXgs8//5yuXbtett+FeDt27Mi333572bH9/PwqFa8Q9Z0kaiHqCQ8PD4KDg1m/fj233Xabef369evp0qWLRdmnnnqKtm3bcs899/DLL7+Yy3fo0IGDBw/SvHnzGsXi5+fHiBEjGDFiBLfccgsvvPDCFRM1aEkzLi6OuLg4Jk6cSOPGjVm4cCHjxo0jODiYo0ePMmzYsCvu26FDB+bNm4e/vz8eHh41ilmI+koStRD1yAsvvMBrr71Gs2bNaN++PbNnzyYxMfGKV5zPPPMMRqORu+++m19//ZXu3bszceJE7r77bsLDw7n//vvR6/Xs3LmTPXv28H//93+VimHixIl07NiRNm3aUFxczJIlS2jVqtUVy27atImVK1fSu3dv/P392bRpE2fOnDGXnzx5MmPGjMHT05O+fftSXFzM1q1byczMZNy4cQwbNoypU6dy77338vrrrxMaGsrx48dZsGABL774IqGhodX/MIWoJyRRC1GPjBkzhuzsbJ577jlOnz5N69atWbx4MZGRkVcsP3bsWEwmE/3792fZsmX06dOHJUuW8Prrr/POO+/g4OBAy5Yteeyxxyodg6OjI+PHj+fYsWO4uLhwyy23MHfu3CuW9fDwYO3atUyfPp2cnBwaN27Me++9R79+/QB47LHHMBgMTJ06lRdeeAFXV1eioqIYO3YsAAaDgbVr1/LSSy8xePBgcnNzCQkJoWfPnnKFLW4YOqWUsnUQQgghhLgyGfBECCGEqMMkUQshhBB1mCRqIYQQog6TRC2EEELUYZKohRBCiDpMErUQQghRh0miroaPP/6YJk2a4OzsTNeuXdm8ebOtQ7IwZcoUOnfujLu7O/7+/gwcONBiPmPQxkoeNWoUvr6+uLm5cd9995Genm5RJjk5mbvuuguDwYC/vz8vvPCCxXSIAGvWrKFDhw44OTnRvHlz4uPjL4vnen5eb7/9NjqdzvwcLjS8uqakpPCPf/wDX19fXFxciIqKYuvWrebtSikmTpxIUFAQLi4u9OrVi0OHDlkcIyMjg2HDhuHh4YGXlxePPvooeXl5FmV27drFLbfcgrOzM2FhYbz77ruXxTJ//nxatmyJs7MzUVFRLF261Gr1NBqNvPrqq0RERODi4kKzZs144403uPiJ0vpc17Vr1zJgwACCg4PR6XQsWrTIYntdqltlYqluXUtLS3nppZeIiorC1dWV4OBghg8fzqlTp+plXWuF7eYDqZ/mzp2rHB0d1Zdffqn27t2rHn/8ceXl5aXS09NtHZpZnz591OzZs9WePXtUYmKi6t+/vwoPD1d5eXnmMk8++aQKCwtTK1euVFu3blU333yz6tatm3l7WVmZatu2rerVq5fasWOHWrp0qWrUqJEaP368uczRo0eVwWBQ48aNU/v27VMfffSRsrOzU8uWLTOXuZ6f1+bNm1WTJk1Uu3bt1LPPPtsg65qRkaEaN26sRo4cqTZt2qSOHj2qli9frg4fPmwu8/bbbytPT0+1aNEitXPnTnXPPfeoiIgIVVhYaC7Tt29fFR0drf7880/1xx9/qObNm6uhQ4eat2dnZ6uAgAA1bNgwtWfPHvXdd98pFxcX9emnn5rLrF+/XtnZ2al3331X7du3T73yyivKwcFB7d692yp1ffPNN5Wvr69asmSJSkpKUvPnz1dubm7qww8/bBB1Xbp0qZowYYJasGCBAtTChQstttelulUmlurWNSsrS/Xq1UvNmzdPHThwQG3cuFF16dJFdezY0eIY9aWutUESdRV16dJFjRo1yvzeaDSq4OBgNWXKFBtGVbHTp08rQP3+++9KKe0/hoODg5o/f765zP79+xWgNm7cqJTS/mPp9XqVlpZmLjNr1izl4eFhngf4xRdfVG3atLE414MPPqj69Oljfn+9Pq/c3FwVGRmpEhIS1G233WZO1A2tri+99JLq3r37VbebTCYVGBiopk6dal6XlZWlnJyc1HfffaeUUmrfvn0KUFu2bDGX+fXXX5VOp1MpKSlKKaX+85//KG9vb3P9L5y7RYsW5vdDhgxRd911l8X5u3btqv75z3/WrJLn3XXXXeqRRx6xWDd48GA1bNiwBlfXS5NXXapbZWKpSV2vZPPmzQpQx48fr9d1tRZp+q6CkpIStm3bRq9evczr9Ho9vXr1YuPGjTaMrGLZ2dkA+Pj4ALBt2zZKS0st6tGyZUvCw8PN9di4cSNRUVEEBASYy/Tp04ecnBz27t1rLnPxMS6UuXCM6/l5jRo1irvuuuuyeBpaXRcvXkynTp144IEH8Pf3JyYmhs8//9y8PSkpibS0NIs4PD096dq1q0V9vby86NSpk7lMr1690Ov1bNq0yVzm1ltvxdHR0aK+Bw8eJDMz01ymos+kprp168bKlSv566+/AG3Ky3Xr1pmHH21Idb1UXapbZWKxtuzsbHQ6HV5eXg2+rpUhiboKzp49i9FotPiDDhAQEEBaWpqNoqqYyWRi7NixxMXF0bZtWwDS0tJwdHQ0/ye44OJ6pKWlXbGeF7ZVVCYnJ4fCwsLr9nnNnTuX7du3M2XKlMu2NbS6Hj16lFmzZhEZGcny5ct56qmnGDNmDP/73/8s4q0ojrS0NPz9/S2229vb4+PjY5XPxFr1ffnll/nb3/5Gy5YtcXBwICYmhrFjx5pn2mpIdb1UXapbZWKxpqKiIl566SWGDh1qHs+9oda1smRSjgZu1KhR7Nmzh3Xr1tk6lFpx4sQJnn32WRISEizmU26oTCYTnTp14q233gIgJiaGPXv28MknnzBixAgbR2dd33//Pd9++y1z5syhTZs2JCYmMnbsWIKDgxtcXYWmtLSUIUOGoJRi1qxZtg6nzpAr6ipo1KgRdnZ2l/UYTk9PJzAw0EZRXd3o0aNZsmQJq1evtpgOMDAwkJKSErKysizKX1yPwMDAK9bzwraKynh4eODi4nJdPq9t27Zx+vRpOnTogL29Pfb29vz+++/MmDEDe3t7AgICGkxdAYKCgmjdurXFulatWpGcnGwRb0VxBAYGcvr0aYvtZWVlZGRkWOUzsVZ9X3jhBfNVdVRUFA899BD/+te/zC0nDamul6pLdatMLNZwIUkfP36chIQEi9nRGlpdq0oSdRU4OjrSsWNHVq5caV5nMplYuXIlsbGxNozMklKK0aNHs3DhQlatWkVERITF9o4dO+Lg4GBRj4MHD5KcnGyuR2xsLLt377b4z3HhP8+FRBEbG2txjAtlLhzjenxePXv2ZPfu3SQmJpqXTp06MWzYMPPrhlJXgLi4uMsetfvrr79o3LgxABEREQQGBlrEkZOTw6ZNmyzqm5WVxbZt28xlVq1ahclkomvXruYya9eupbS01KK+LVq0wNvb21ymos+kpgoKCtDrLf9E2dnZYTKZGlxdL1WX6laZWGrqQpI+dOgQK1aswNfX12J7Q6prtdisG1s9NXfuXOXk5KTi4+PVvn371BNPPKG8vLwsegzb2lNPPaU8PT3VmjVrVGpqqnkpKCgwl3nyySdVeHi4WrVqldq6dauKjY1VsbGx5u0XHlnq3bu3SkxMVMuWLVN+fn5XfGTphRdeUPv371cff/zxFR9Zut6f18W9vhtaXTdv3qzs7e3Vm2++qQ4dOqS+/fZbZTAY1DfffGMu8/bbbysvLy/1008/qV27dql77733io/1xMTEqE2bNql169apyMhIi0ddsrKyVEBAgHrooYfUnj171Ny5c5XBYLjsURd7e3s1bdo0tX//fvXaa69Z9fGsESNGqJCQEPPjWQsWLFCNGjVSL774YoOoa25urtqxY4fasWOHAtT777+vduzYYe7pXJfqVplYqlvXkpISdc8996jQ0FCVmJho8Tfr4h7c9aWutUESdTV89NFHKjw8XDk6OqouXbqoP//809YhWQCuuMyePdtcprCwUD399NPK29tbGQwGNWjQIJWammpxnGPHjql+/fopFxcX1ahRI/Xcc8+p0tJSizKrV69W7du3V46Ojqpp06YW57jgen9elybqhlbXn3/+WbVt21Y5OTmpli1bqs8++8xiu8lkUq+++qoKCAhQTk5OqmfPnurgwYMWZc6dO6eGDh2q3NzclIeHh3r44YdVbm6uRZmdO3eq7t27KycnJxUSEqLefvvty2L5/vvv1U033aQcHR1VmzZt1C+//GK1eubk5Khnn31WhYeHK2dnZ9W0aVM1YcIEiz/e9bmuq1evvuL/0xEjRtS5ulUmlurWNSkp6ap/s1avXl3v6lobdEpdNMyPEEIIIeoUuUcthBBC1GGSqIUQQog6TBK1EEIIUYdJohZCCCHqMEnUQgghRB0miVoIIYSowyRRV1NxcTGTJk2iuLjY1qHUuhuprnBj1Vfq2nDdSPVt6HWV56irKScnB09PT7Kzsy3GpG2IbqS6wo1VX6lrw3Uj1beh11WuqIUQQog6TBK1EEIIUYfdcPNRl5WVsWPHDgICAi6bmacqcnNzAUhJSSEnJ8da4dVJN1Jd4caqr9S14bqR6lsf62oymUhPTycmJgZ7+4pT8Q13j3rLli106dLF1mEIIYQQbN68mc6dO1dY5oa7og4ICAC0DycoKMjG0QghhLgRpaam0qVLF3NOqsgNl6gvNHcHBQURGhpq42iEEELcyCpzC1Y6kwkhhBB1mCRqIYQQog6TRC2EEELUYTfcPWohhKiI0WiktLTU1mGIes7BwQE7OzurHEsSdQ3sScnmVFYh0WFeBHg42zocIUQNKKVIS0sjKyvL1qGIBsLLy4vAwEB0Ol2NjiOJugZeX7KPzUkZzPx7DHe3C7Z1OEKIGriQpP39/TEYDDX+4ypuXEopCgoKOH36NECNHwWWRF0Dt6mtdLHbiS5VD5Kohai3jEajOUn7+vraOhzRALi4uABw+vRp/P39a9QMLp3JauCWwpU87zAf1/Sttg5FCFEDF+5JGwwGG0ciGpILv0817fMgiboGTM7e2ouCDNsGIoSwCmnuFtZkrd8nSdQ1oFx8ANAVZdo4EiGEEA2VJOoa0Ltq97IcSyRRCyEajiZNmjB9+vRKl1+zZg06na7We8zHx8fj5eVVq+eoi2yaqKdMmULnzp1xd3fH39+fgQMHcvDgwQr3iY+PR6fTWSzOzrZ5NMrBvREATiXZNjm/EOLGdunfwkuXSZMmVeu4W7Zs4Yknnqh0+W7dupGamoqnp2e1zicqZtNe37///jujRo2ic+fOlJWV8e9//5vevXuzb98+XF1dr7qfh4eHRUK31X0lZw8tURuMkqiFENdfamqq+fW8efOYOHGixd9GNzc382ulFEaj8ZpzHwP4+flVKQ5HR0cCAwOrtI+oPJteUS9btoyRI0fSpk0boqOjiY+PJzk5mW3btlW4n06nIzAw0LxUZpqw2uDq5Q+Au6l+TFQuhGhYLv476OnpafG38cCBA7i7u/Prr7/SsWNHnJycWLduHUeOHOHee+8lICAANzc3OnfuzIoVKyyOe2nTt06n44svvmDQoEEYDAYiIyNZvHixefulTd8XmqiXL19Oq1atcHNzo2/fvhZfLMrKyhgzZgxeXl74+vry0ksvMWLECAYOHFilz2DWrFk0a9YMR0dHWrRowddff23eppRi0qRJhIeH4+TkRHBwMGPGjDFv/89//kNkZCTOzs4EBARw//33V+nc10udukedna1dmfr4+FRYLi8vj8aNGxMWFsa9997L3r17r0d4l3Hz1hK1F7kUlhhtEoMQonYopSgoKbPJopSyWj1efvll3n77bfbv30+7du3Iy8ujf//+rFy5kh07dtC3b18GDBhAcnJyhceZPHkyQ4YMYdeuXfTv359hw4aRkXH1J14KCgqYNm0aX3/9NWvXriU5OZnnn3/evP2dd97h22+/Zfbs2axfv56cnBwWLVpUpbotXLiQZ599lueee449e/bwz3/+k4cffpjVq1cD8OOPP/LBBx/w6aefcujQIRYtWkRUVBQAW7duZcyYMbz++uscPHiQZcuWceutt1bp/NdLnRnwxGQyMXbsWOLi4mjbtu1Vy7Vo0YIvv/ySdu3akZ2dzbRp0+jWrRt79+694vzSxcXFFBcXm9/n5uZaLWaDl9Y85KorJiUnl5BGXlY7thDCtgpLjbSeuNwm5973eh8Mjtb58/z6669z5513mt/7+PgQHR1tfv/GG2+wcOFCFi9ezOjRo696nJEjRzJ06FAA3nrrLWbMmMHmzZvp27fvFcuXlpbyySef0KxZMwBGjx7N66+/bt7+0UcfMX78eAYNGgTAzJkzWbp0aZXqNm3aNEaOHMnTTz8NwLhx4/jzzz+ZNm0at99+O8nJyQQGBtKrVy8cHBwIDw+nS5cuACQnJ+Pq6srdd9+Nu7s7jRs3JiYmpkrnv17qzBX1qFGj2LNnD3Pnzq2wXGxsLMOHD6d9+/bcdtttLFiwAD8/Pz799NMrlp8yZQqenp7mpXXr1laLWefsRdn5jzA3I91qxxVCCGvp1KmTxfu8vDyef/55WrVqhZeXF25ubuzfv/+aV9Tt2rUzv3Z1dcXDw8M8ROaVGAwGc5IGbRjNC+Wzs7NJT083J00AOzs7OnbsWKW67d+/n7i4OIt1cXFx7N+/H4AHHniAwsJCmjZtyuOPP87ChQspKysD4M4776Rx48Y0bdqUhx56iG+//ZaCgoIqnf96qRNX1KNHj2bJkiWsXbv2ilfFFXFwcCAmJobDhw9fcfv48eMZN26c+X1KSor1krVOR57OHS+VTV7WGaCFdY4rhLA5Fwc79r3ex2bntpZLO+Y+//zzJCQkMG3aNJo3b46Liwv3338/JSUlFR7HwcHB4r1Op8NkMlWpvDWb9CsjLCyMgwcPsmLFChISEnj66aeZOnUqv//+O+7u7mzfvp01a9bw22+/MXHiRCZNmsSWLVvq3CNgNr2iVkoxevRoFi5cyKpVq4iIiKjyMYxGI7t3777qoOdOTk54eHiYF3d395qGbSHfzgOA4uyrf7MUQtQ/Op0Og6O9TZbafJJl/fr1jBw5kkGDBhEVFUVgYCDHjh2rtfNdiaenJwEBAWzZssW8zmg0sn379iodp1WrVqxfv95i3fr16y0uxlxcXBgwYAAzZsxgzZo1bNy4kd27dwNgb29Pr169ePfdd9m1axfHjh1j1apVNahZ7bDpFfWoUaOYM2cOP/30E+7u7qSlpQHaP+KFAc2HDx9OSEgIU6ZMAbT7LTfffDPNmzcnKyuLqVOncvz4cR577DGb1OG0cxNycnRkF0lnMiFE3RcZGcmCBQsYMGAAOp2OV199tcIr49ryzDPPMGXKFJo3b07Lli356KOPyMzMrNKXlBdeeIEhQ4YQExNDr169+Pnnn1mwYIG5F3t8fDxGo5GuXbtiMBj45ptvcHFxoXHjxixZsoSjR49y66234u3tzdKlSzGZTLRoUfdaRm2aqGfNmgVAjx49LNbPnj2bkSNHAtoNf72+/MI/MzOTxx9/nLS0NLy9venYsSMbNmyw6r3nqljQ/G2+/vM4Y5ya098mEQghROW9//77PPLII3Tr1o1GjRrx0ksvkZNz/R8xfemll0hLS2P48OHY2dnxxBNP0KdPnyrNMjVw4EA+/PBDpk2bxrPPPktERASzZ8825xQvLy/efvttxo0bh9FoJCoqip9//hlfX1+8vLxYsGABkyZNoqioiMjISL777jvatGlTSzWuPp263jcNbOzkyZOEhYVx4sSJKt8Pv5L3E/5ixspD/OPmcP5vYJQVIhRCXG9FRUUkJSURERFhs5EOb3Qmk4lWrVoxZMgQ3njjDVuHYxUV/V5VJRfVic5k9ZmPQeswkZlfs2nMhBDiRnL8+HF+++03brvtNoqLi5k5cyZJSUn8/e9/t3VodY4k6hqKyljGSscPOXyqM/D1NcsLIYQAvV5PfHw8zz//PEop2rZty4oVK2jVqpWtQ6tzJFHXkLu9iWb6VM4Wn7J1KEIIUW+EhYVd1mNbXJkk6hoyNevFg2sLKbEPZKGtgxFCCNHgSKKuIQ//cDapVjgUag/z22omLyGEEA1TnRlCtL7yNjgCUGpU5BWX2TgaIYQQDY1cUdeQi97II44rcDXmkJl7C+7ODtfeSQghhKgkSdQ1pdMzUf8l6GF35r/Bz8PWEQkhhGhApOm7puzsydNpg94XZMl430IIIaxLErUV5Ou1q+jC7DM2jkQIIaquR48ejB071vy+SZMmTJ8+vcJ9dDodixYtqvG5rXWcikyaNIn27dvX6jlqkyRqKyhy8ASgJPesjSMRQtxIBgwYQN++fa+47Y8//kCn07Fr164qH3fLli088cQTNQ3PwtWSZWpqKv369bPquRoaSdRWUOLoBUBZ3jnbBiKEuKE8+uijJCQkcPLkycu2zZ49m06dOtGuXbsqH9fPzw+DwWCNEK8pMDAQJyen63Ku+koStRUYnb21F4UZtg1ECHFDufvuu/Hz8yM+Pt5ifV5eHvPnz+fRRx/l3LlzDB06lJCQEAwGA1FRUXz33XcVHvfSpu9Dhw5x66234uzsTOvWrUlISLhsn5deeombbroJg8FA06ZNefXVVykt1eZAiI+PZ/LkyezcuROdTodOpzPHfGnT9+7du7njjjtwcXHB19eXJ554gry8PPP2kSNHMnDgQKZNm0ZQUBC+vr6MGjXKfK7KMJlMvP7664SGhuLk5ET79u1ZtmyZeXtJSQmjR48mKCgIZ2dnGjdubJ5qWSnFpEmTCA8Px8nJieDgYMaMGVPpc1eH9Pq2AuXiA4BeErUQDU9JftX3sXMCu/N/Xo1lYCwGnR4cXK59XEfXSp/G3t6e4cOHEx8fz4QJE8wDLs2fPx+j0cjQoUPJy8ujY8eOvPTSS3h4ePDLL7/w0EMP0axZM7p06XLNc5hMJgYPHkxAQACbNm0iOzvb4n72Be7u7sTHxxMcHMzu3bt5/PHHcXd358UXX+TBBx9kz549LFu2zDxXtKen52XHyM/Pp0+fPsTGxrJlyxZOnz7NY489xujRoy2+jKxevZqgoCBWr17N4cOHefDBB2nfvj2PP/54pT63Dz/8kPfee49PP/2UmJgYvvzyS+655x727t1LZGQkM2bMYPHixXz//feEh4dz4sQJTpw4AcCPP/7IBx98wNy5c2nTpg1paWns3LmzUuetLknUVqA3+ALgUJxl20CEENb3VnDV93kgHtoM0l4f+Bnmj4TG3eHhX8rLTI+CgivcLpuUXaVTPfLII0ydOpXff//dPA/z7Nmzue+++/D09MTT05Pnn3/eXP6ZZ55h+fLlfP/995VK1CtWrODAgQMsX76c4GDts3jrrbcuu6/8yiuvmF83adKE559/nrlz5/Liiy/i4uKCm5sb9vb2BAYGXvVcc+bMoaioiK+++gpXV+0Ly8yZMxkwYADvvPMOAQEBAHh7ezNz5kzs7Oxo2bIld911FytXrqx0op42bRovvfQSf/vb3wB45513WL16NdOnT+fjjz8mOTmZyMhIunfvjk6no3HjxuZ9k5OTCQwMpFevXjg4OBAeHl6pz7EmpOnbChzctUTtWJpl20CEEDecli1b0q1bN7788ksADh8+zB9//MGjjz4KgNFo5I033iAqKgofHx/c3NxYvnw5ycnJlTr+/v37CQsLMydpgNjY2MvKzZs3j7i4OAIDA3Fzc+OVV16p9DkuPld0dLQ5SQPExcVhMpk4ePCgeV2bNm2ws7Mzvw8KCuL06co9HpuTk8OpU6eIi4uzWB8XF8f+/fsBrXk9MTGRFi1aMGbMGH777TdzuQceeIDCwkKaNm3K448/zsKFCykrq91RKeWK2gqcPBoBYCjLsXEkQgir+3c1Zsazu6hzVMsB2jF0l1wXjd1ds7gu8uijj/LMM8/w8ccfM3v2bJo1a8Ztt90GwNSpU/nwww+ZPn06UVFRuLq6MnbsWEpKSqx2/o0bNzJs2DAmT55Mnz598PT0ZO7cubz33ntWO8fFHBwsR4DU6XSYTCarHb9Dhw4kJSXx66+/smLFCoYMGUKvXr344YcfCAsL4+DBg6xYsYKEhASefvppc4vGpXFZi1xRW4HB0w8AN1MOJpOycTRCCKtydK36YnfRNZCdvbbu4vvTFR23GoYMGYJer2fOnDl89dVXPPLII+b71evXr+fee+/lH//4B9HR0TRt2pS//vqr0sdu1aoVJ06cIDU11bzuzz//tCizYcMGGjduzIQJE+jUqRORkZEcP37csrqOjhiNxmuea+fOneTnl9+/X79+PXq9nhYtWlQ65op4eHgQHBx82RSb69evp3Xr1hblHnzwQT7//HPmzZvHjz/+SEaG1g/JxcWFAQMGMGPGDNasWcPGjRvZvdt6X7wuJVfUVuDmrd1z8dblkVNUitf5iTqEEOJ6cHNz48EHH2T8+PHk5OQwcuRI87bIyEh++OEHNmzYgLe3N++//z7p6ekWSakivXr14qabbmLEiBFMnTqVnJwcJkyYYFEmMjKS5ORk5s6dS+fOnfnll19YuNBy4t8mTZqQlJREYmIioaGhuLu7X/ZY1rBhw3jttdcYMWIEkyZN4syZMzzzzDM89NBD5vvT1vDCCy/w2muv0axZM9q3b8/s2bNJTEzk22+/BeD9998nKCiImJgY9Ho98+fPJzAwEC8vL+Lj4zEajXTt2hWDwcA333yDi4uLxX1sa5Mraitw8PAjTflySvmQkW+95iQhhKisRx99lMzMTPr06WNxP/mVV16hQ4cO9OnThx49ehAYGMjAgQMrfVy9Xs/ChQspLCykS5cuPPbYY7z55psWZe655x7+9a9/MXr0aNq3b8+GDRt49dVXLcrcd9999O3bl9tvvx0/P78rPiJmMBhYvnw5GRkZdO7cmfvvv5+ePXsyc+bMqn0Y1zBmzBjGjRvHc889R1RUFMuWLWPx4sVERkYCWg/2d999l06dOtG5c2eOHTvG0qVL0ev1eHl58fnnnxMXF0e7du1YsWIFP//8M76+vlaN8WI6pdQN1VZ78uRJwsLCOHHiBKGhoVY77q3vriY5o4Afn4qlY2Mfqx1XCFH7ioqKSEpKIiIiAmdnZ1uHIxqIin6vqpKL5IraSrxdtebujPzKP3QvhBBCXIskaivxMWi9/TKl6VsIIYQVSaK2kqey3mOl43M4p6y/dmEhhBCikiRRW4mf6QzN9KmQk3rtwkIIIUQl2TRRT5kyhc6dO+Pu7o6/vz8DBw60GH3maubPn0/Lli1xdnYmKiqKpUuXXodoK7a1+RiGFL/KNocOtg5FCCFEA2LTRP37778zatQo/vzzTxISEigtLaV3794WD7tfasOGDQwdOpRHH32UHTt2MHDgQAYOHMiePXuuY+SXKwvqwGbVipSS6zM1nBDC+qw5upUQ1vp9sumAJxdPKwbaVGj+/v5s27aNW2+99Yr7fPjhh/Tt25cXXngBgDfeeIOEhARmzpzJJ598UusxX433+UFOzklnMiHqHUdHR/R6PadOncLPzw9HR0fzyF5CVJVSipKSEs6cOYNer8fRsWaDYNWpkcmys7VZY3x8rv4c8saNGxk3bpzFuj59+ljMZ2oLwWUnecjuN3TZ/kDcNcsLIeoOvV5PREQEqampnDpVjbG9hbgCg8FAeHg4en3NGq/rTKI2mUyMHTuWuLg42rZte9VyaWlplw0lFxAQQFpa2hXLFxcXU1xcbH6fm5trnYAvEZC7mzcc4tlYHAVMuGZ5IUTd4ujoSHh4OGVlZdcck1qIa7Gzs8Pe3t4qLTN1JlGPGjWKPXv2sG7dOqsed8qUKUyePNmqx7wSF09/ANxNuZQaTTjYSYd6IeobnU6Hg4NDrc2CJER11IlsMnr0aJYsWcLq1auvOZRaYGAg6enpFuvS09OvOhn5+PHjyc7ONi/79u2zWtwXM3hpidpLl0dWgYxOJoQQwjpsmqiVUowePZqFCxeyatUqIiIirrlPbGwsK1eutFiXkJBwxYnMAZycnPDw8DAv7u7uVon9UvZu2oDsPuSSWSAdyoQQQliHTZu+R40axZw5c/jpp59wd3c332f29PTExUWbu3X48OGEhIQwZcoUAJ599lluu+023nvvPe666y7mzp3L1q1b+eyzz2xWDwBctA5wBl0xmTm5EFA7XwiEEELcWGx6RT1r1iyys7Pp0aMHQUFB5mXevHnmMsnJyRYTlnfr1o05c+bw2WefER0dzQ8//MCiRYsq7IB2XTh7Yjz/ceZnnrZtLEIIIRoMm15RV2aGzTVr1ly27oEHHuCBBx6ohYhqQKcjX++BhymLwuwzto5GCCFEA1EnOpM1FIUOngCU5J61cSRCCCEaCknUVlTi6AVAWZ4kaiGEENYhidqKypy8tRcFGbYNRAghRIMhidqKlIuWqHWFkqiFEEJYhyRqK9IbtGep7YuzbBuIEEKIBkMStRXZeQZzUjUis0yGHxRCCGEddWas74agrMuT3L62Ja7KjodtHYwQQogGQa6orcjn/JzU+SVGikpl9h0hhBA1J4naityd7bHTa1OaycQcQgghrEGavq1In5vCIseJKFMZGfm3EOjpbOuQhBBC1HOSqK3JzokoDmHS6diYVwB42DoiIYQQ9Zwkamsy+DDVeyKb02C4NH0LIYSwArlHbU16O4769mCLaklmoXQmE0IIUXOSqK3M21Xr+Z2RX2LjSIQQQjQE0vRtZTElO7C324rdOYCbbB2OEEKIek6uqK3s5tNzed3hf/hkJto6FCGEEA2AJGorUy4+AOhlYg4hhBBWIInaynSu2sQcdkVZtg1ECCFEgyCJ2soc3LRE7VSaZdtAhBBCNAiSqK3M0d0PAJeybJRSNo5GCCFEfSeJ2soMXlqi9iSXQpmYQwghRA1VK1GfOHGCkydPmt9v3ryZsWPH8tlnn1ktsPrKyaMRAN7kyrPUQgghaqxaifrvf/87q1evBiAtLY0777yTzZs3M2HCBF5//XWrBljf6AzaPWpvXR6Z+TKMqBBCiJqpVqLes2cPXbp0AeD777+nbdu2bNiwgW+//Zb4+Hhrxlf/nH88y4s8MvKLbRyMEEKI+q5aibq0tBQnJycAVqxYwT333ANAy5YtSU1NtV509ZFBS9QOOiO52Zk2DkYIIUR9V61E3aZNGz755BP++OMPEhIS6Nu3LwCnTp3C19fXqgHWOw4uFOu0eagLss/YOBghhBD1XbUS9TvvvMOnn35Kjx49GDp0KNHR0QAsXrzY3CReGWvXrmXAgAEEBwej0+lYtGhRheXXrFmDTqe7bElLS6tONWpNob0nACU5kqiFEELUTLUm5ejRowdnz54lJycHb29v8/onnngCg8FQ6ePk5+cTHR3NI488wuDBgyu938GDB/Hw8DC/9/f3r/S+10O+cyB5JUbyCwttHYoQQoh6rlqJurCwEKWUOUkfP36chQsX0qpVK/r06VPp4/Tr149+/fpV+fz+/v54eXlVeb/rZUXsV7y2eC/9dYG2DkUIIUQ9V62m73vvvZevvvoKgKysLLp27cp7773HwIEDmTVrllUDvJL27dsTFBTEnXfeyfr16yssW1xcTE5OjnnJzc2t9fhkTmohhBDWUq1EvX37dm655RYAfvjhBwICAjh+/DhfffUVM2bMsGqAFwsKCuKTTz7hxx9/5McffyQsLIwePXqwffv2q+4zZcoUPD09zUvr1q1rLb4LfAxaopbnqIUQQtRUtZq+CwoKcHd3B+C3335j8ODB6PV6br75Zo4fP27VAC/WokULWrRoYX7frVs3jhw5wgcffMDXX399xX3Gjx/PuHHjzO9TUlJqPVk3SVnMIseZbMrtBNxaq+cSQgjRsFXrirp58+YsWrSIEydOsHz5cnr37g3A6dOnLTp5XQ9dunTh8OHDV93u5OSEh4eHebnwBaM2uatc2uuPEFKaLBNzCCGEqJFqJeqJEyfy/PPP06RJE7p06UJsbCygXV3HxMRYNcBrSUxMJCgo6Lqe81qcW/fn8ZJxzCgbSG5xma3DEUIIUY9Vq+n7/vvvp3v37qSmppqfoQbo2bMngwYNqvRx8vLyLK6Gk5KSSExMxMfHh/DwcMaPH09KSoq549r06dOJiIigTZs2FBUV8cUXX7Bq1Sp+++236lSj1jgFRLLevisFJUYy80vwcHawdUhCCCHqqWolaoDAwEACAwPNs2iFhoZWabATgK1bt3L77beb31+4lzxixAji4+NJTU0lOTnZvL2kpITnnnuOlJQUDAYD7dq1Y8WKFRbHqCu8DY4UlBSSkV9CY19XW4cjhBCinqpWojaZTPzf//0f7733Hnl5eQC4u7vz3HPPMWHCBPT6yrWo9+jRo8J7uJdO8PHiiy/y4osvVifk66u0kEH2G8iyO0tmQSdbRyOEEKIeq1ainjBhAv/97395++23iYuLA2DdunVMmjSJoqIi3nzzTasGWe8YS3g+byo4wIKc0UCArSMSQghRT1UrUf/vf//jiy++MM+aBdCuXTtCQkJ4+umnJVE7eWDEDjuMFGWdAZrbOiIhhBD1VLV6fWdkZNCyZcvL1rds2ZKMjIwaB1Xv6XQU2muPqRXlysQcQgghqq9aiTo6OpqZM2detn7mzJm0a9euxkE1BCUOXgAY887ZNhAhhBD1WrWavt99913uuusuVqxYYX6GeuPGjZw4cYKlS5daNcD6qtTZGwqTMOVLC4MQQojqq9YV9W233cZff/3FoEGDyMrKIisri8GDB7N3796rDuV5o1EuPgDoi+SKWgghRPVV+znq4ODgyzqN7dy5k//+97989tlnNQ6svtMZtERtV5Rl20CEEELUa9W6ohbXZu/mC4BTaZZtAxFCCFGvSaKuJU4efgC4lGVjNMnEHEIIIapHEnUtcfbUErU3ueQUyrzUQgghqqdK96gHDx5c4fasrKyaxNKg2LtqTd/eujwyCkrwdnW0cURCCCHqoyolak9Pz2tuHz58eI0CajDO9/r2Io+z+SXgZ+N4hBBC1EtVStSzZ8+urTgaHoMvBToXCnAmI7/E1tEIIYSop+QedW3xu4nRjX+mf8kUMgskUQshhKgeSdS1yNug3ZfOyJfOZEIIIapHEnUt8nF1AJAraiGEENUmiboWDT75DoscX8GYssPWoQghhKinJFHXoiZlx2ivP8qp40fIlA5lQgghqkESdS1y6fsak91eZUtZM35KTLF1OEIIIeohSdS1qdkdNI69j7N4Mn/bSVtHI4QQoh6SRF3L7mkfgoOdjr2ncth3KsfW4QghhKhnJFHXpoyj+BxZxKTgzYBi/rYTto5ICCFEPSOJujaVFsKipxh25gP+ZreanxJPUVJmsnVUQggh6hFJ1LUpoA30nAjAJIev8C5IYtWBdBsHJYQQoj6RRF3bYp+BprfjTAkfOcxk4eajto5ICCFEPWLTRL127VoGDBhAcHAwOp2ORYsWXXOfNWvW0KFDB5ycnGjevDnx8fG1HmeN6PUw6BPKXHxprT9ObNIMTucW2ToqIYQQ9YRNE3V+fj7R0dF8/PHHlSqflJTEXXfdxe23305iYiJjx47lscceY/ny5bUcaQ25B2I/6BMARtotY9tv39k4ICGEEPVFlaa5tLZ+/frRr1+/Spf/5JNPiIiI4L333gOgVatWrFu3jg8++IA+ffrUVpjWcVNvDjT+By2Pf0PsnomoO/ui8wiydVRCCCHquHp1j3rjxo306tXLYl2fPn3YuHHjVfcpLi4mJyfHvOTm5tZ2mFcV/MA77FNN8FI55M19DEzSA1wIIUTF6lWiTktLIyAgwGJdQEAAOTk5FBYWXnGfKVOm4OnpaV5at259PUK9Ig83NxY1nUyBcsL91DrYMMNmsQghhKgf6lWiro7x48eTnZ1tXvbt22fTeG6L687ksuEAqFVvwMltNo1HCCFE3VavEnVgYCDp6ZbPIaenp+Ph4YGLi8sV93FycsLDw8O8uLu7X49Qryq2qS/r3PqxxNgVnakMfnwUSqUXuBBCiCurV4k6NjaWlStXWqxLSEggNjbWRhFVnV6v475OYfy79DGSHZpCz1fBwVnbqJRtgxNCCFHn2DRR5+XlkZiYSGJiIqA9fpWYmEhycjKgNVsPHz7cXP7JJ5/k6NGjvPjiixw4cID//Oc/fP/99/zrX/+yRfjV9kDHUHJwpUfe66SE9i/f8MMjMHcYpO2xXXBCCCHqFJsm6q1btxITE0NMTAwA48aNIyYmhokTtWE3U1NTzUkbICIigl9++YWEhASio6N57733+OKLL+r+o1mXCPMxcHNTH0xKz4IL018W5cCBX+DAEtDpygvnnILiPNsEKoQQwuZ0St1Y7a0nT54kLCyMEydOEBoaarM4ftx2kufm76Sxr4E1z/dAB5C2G46uhm5jypP1D4/CvkUQGAWhXSCsC4R2Bq9wy4QuhBCi3qhKLrLpgCc3sn5Rgby2eC/HzxWwOSmDrk19IaidtlygFJz9C0xlcGqHtmz+VNvmFghhncuTd2AUOLrapjJCCCFqjSRqGzE42nNXVBDztp7gvd/+YlCHEBr7Gmjs60qQhzN6vU67Yv7nWsg+ASc2w8kt2s+0XZCXBvt/1hYAdNAoEgLbQefHoHH96WAnhBDi6iRR29CQzqHM23qCzccy2Hwsw7ze0U5PqI8LjX20xB0T7sWAdvehj7pfK1BaqF1dX0jeJ7dqifvsX9rSZmD5SZL+gD9nQWQv6PTI9a2gEEKIGpNEbUMdG/swY2gM249ncuxcPsnnCjiRWUCJ0cTRM/kcPZMPnCF+A3y/9QTTHogmyNMFHFygcTdtuSA3XbvSTt2p3cO+IPlPOPiL1ix+IVGbjFoP84A2EBwDQe3Bze96Vl0IIUQlSWeyOsZoUpzKKuT4uQKOZ+Rz5HQ+czYfp6jUhKeLA28NiuKudlWYzCN9HxxdA41u0q6qAU4fgP90tSznFQ7h55N/4zjwbSad1YQQopZUJRdJoq4HjpzJ41/zEtl1MhuAwR1CmHxPG9ydHap3wNx02PMDnEqE1EQ4ewi45NfA1U9L2heSd0Ab0NvVpBpCCCHOk0RdgfqYqAFKjSZmrDzEx6sPY1IQ6u3CBw+2p3MTnxof+8jJU3icTcQvYzsc36Dd8zYWWxZyC4Dn/yp///1wSNkBd70HN/XW1p0+AInfgE8z8GmqXZW7B4O+Xg2AJ4QQtU4ez2qAHOz0PNe7Bbfd5Me/vk/kREYhD366kad6NGNsr5twsKtaMswrLmNx4inmbklm18lsHO31TLz7IYaNnIDOWAIp2yF5g5a4kzcBlzSD56ZBdrJlQk/ZBhs+sixn7wzeEVrSdvMHJw9w9gBnT3DyBIM3NLeculQIIUQ5uaKuh3KLSpn88z5+OD+qWdsQD+6JDqZtiCdtgj3xdLlyk7hSip0ns5m7OZnFO09RUGK8rEz/qECmDG5neQxjGRScA/eLphg9e0gbTc0nAgznr+pPbNGa1DOOwrkjkHVcewa8Iq5+8MLh8veLntYeR+sxvryznFJyv1wI0aDIFXUD5+7swLQHormjpT//XribPSk57EnJMW9v7GugbbAnbUI8iArxpImvK6sPnmbOpmQOpOWayzVt5MrfuoQxKCaURTtSeGfZAZbuTmPXyWxm/r0D7cO8tIJ29pZJGrRnti8V1llbLjCWaVfd545qybswA4qytQRffP6n0yWzmR1bpyX4W18sX7fzO8oSJpPm1ATXsHZ4N2kPAa3Br6XWA14IIRowuaKu507nFPHD9pPsPpnN7pRsTmYWVlje0V5P/7aBDO0STpcIH3QXXakmnsjime+2cyKjEHu9jhf7tuCx7k21wVeul5TtcHo/tLwLXLwASJ77HOEHvrisqNLp0fk005K2fxvtp3uQdoVvaKQ1sQshRB0knckq0NAS9aWyCkq0K+xTWuLem5LNsXMFtAhwP3/1HIKXwfGq+2cXlvLvBbv5ZXcqAD1a+PHeA9H4ujldrypY+PrP40xbvIWm6iQ3u6URWHSUmzhBC30yProKJisJ7waP/Fr+/ru/a83n/aeBx/nH25L/hPQ92n1zRzdwcgN7l4qb2R0M2heCCwoyQG+v7S+d5oQQlSRN3zcwL4Mj3SMb0T2ykXldSZkJR/vKJRFPFwdm/j2Gbpt9mfzzPtYcPEP/GX8w/cEYYpv51lbYlzGaFG8t3c9/1yUBLjTtcDv/GhxFTlEpS3ae4u3tJzmVcpxW+mRa6E7Q1v4kHVxOE2ifh0NxZvl9c9DucR9art0v7/t2+fr9P8PGmVULLKQjPL6q/P2nt2r31B9fDSEdtHV7F8H2r7T7994R5T+9m4CjoZqfiBDiRiWJ+gZQ2SR9gU6nY1jXxnQI92b0nO0cOZPP0M//pH2YF/d1DGVAu6AKr8prqqCkjDHfJbJifzoAL/RpwdM9mqHT6Wjk5sTIuAhGxkVw5EwMi3aksHBHCp9nFkIx6HXwj5sbM65nM7wuHFCZ4L4vtA5xruVfYPBvBS3vhpI8KM7VphMtq/jWAZ5hlu/LirSf9s7l607tgCMr4cgV9ncL1HrA+7XQ7rH7tYBGLcA9UDrMCSGuSJq+RYUKSsp4/ed9zN92EqNJ+1VxtNPTq7U/g2NCua2FX5UfDatIek4Rj/5vC3tScnC01/PeA9EMiA6ucB+TSbEtOZP//pHEsr1pAHgZHHjuzpsY2iUceyvGdxmlwFgCeofypu/0fZCyFTKSIDOp/GdR9tWPEx4Ljywrf7/7B61Jvkn3hnkVXpgFRVnaFxx7Z61ToJ1jw/2yYjKBqRSMpeU/L7w2Gcs/AwcX7XVD/RyEmdyjroAk6uo5nVvE4sRT/Lg9hf2p5T3MG7k5ck90CIM7hNAm2MOic1pV7TuVw6P/20JqdhE+ro58PrwjHRtXbUCXDUfOMnnxPg6ma73bWwa689qANte12f6qCjK0pH3uEJw5eH45oCXxtvdpV/2g/VF/w1drCRh3oPye+srXtQTu2ki7J+5gOP/H3VD+R97Rtfynkyd4hUH4zeUxFGWfv59+HUaZyzwO6XshK1nryX/xzyt9adHpIexmy74Fs/tD/hl48But9QFg2/9g82fnk9n537cLr82/fxe9Vgo8Q7RjXPDDo3DusDZgT2gnbd3xjbBrrvb5OLqe/ywNFX9Wju7Q7oHy9wmvaePt3z6h/AmIXfNhwWOV/NDOs3eGf6eWf/lb94E2kmCH4dC8p7auJF8bz8A9qGF+mWvg5B61sDp/d2ceu6Upj93SlH2ncliw/SSLEk9xNq+YL9cn8eX6JJr5uXJ3u2AGRAfR3N/92gc9L7uglBX705n40x7yS4w083Nl9sguhPtW/Y9Pt2aN+GVMd+ZsTua93/7iQFouQz//k/5Rgfy7fytCvW34B83goy2hHS3XlxZpf3QvKCuEprdrCcpw0ReMrBPnE93xyp+z6e0wfFH5+w+itEfjxiRq984B1k2HxG/B3klLEHZO2ms7R7B31H5aLA7abYOYf5QfN/5uyD4Jw38C78baui2fXz4AzsXsnaGsGPPwtcp0eZlzhyEv/Xy58/JOa50Aq+LSLwZnDkL6bigu/9LJ6X2wLb5qxzX4WibqlG1w7A/ts7mQqK+W6PX251sR9NotlIvHHFAmy86Jx9bB4RWWgwOd2ARfD9JeO3uCR4iWtD2CtcU9SFvnEQQuPuc/X6WN639B5nHtM/AIsezXcaPJPKY9QuobqX25Ba3V59R2rYOpg3P5T6/G173FQ66oRbWVGU38cegsP2w/ScK+dErKyv/Qtgx05+52QdzdLpgmjVwt9ssrLmNLUgYbjpxl49Fz7D2Vw4Xfwm7NfJn1j45XHbSlKjLzS3g/4S++3XQckwInez3P9b6JJ25tVuNj20R2ipYMC85BaYG2lJz/WVpoua4kT/sDHNwB7pys7a8UTPYGlOWV+q8vw6ZZVYsltDM8tqL8/fttIOekZae6Hd/A5s+1xO0Vrv2B8wovXxxdy28dlBaev9+vs3xm/8QWbfS7oPZar3zQWiUyjmL+pTEnenXJ6wvbdNoVZ8St5cc9vlH7jII7gOv5L0OndsChFdr6kjzty1NJ3kXHugK9HQz5qvz9X8uhMFO7lXHhC0tpkXYcOwftFomdg5akL/1jbyzTvqSVFmpfTLwu6g9xZJX25aJ5r/IxDPb9BAuf1P7NK8vOEV49U/5+zoPw1zK45yPtah20kQh/fPT8F0vf8sU9UOuj4RmqLW6B2hgLdZnJpE0BnHVC6/SZlQw5KdqXvyFfl/8bfPd3bZbBu96DzudbP45vhNl9LY+nt4eJ56wSmjR9V0ASde3ILSolYV86S3al8sehM5Qay3+t2oZ40D8qiLyiMjYePceuk9nm+90XNPd3o1/bQJ65I7LKnd+uZX9qDpN/3sufR7U5v1/o04JRtze36jnqjbISreOci1f5lV5GkvbHq6xI215WpCUKY4nlUnbhdbF2hXbLuPLjJv2hJaCAtuUJVdQ+pbTWgtw07d8wNxVyUi96naK9L8oCnZ3WUjL+RPn+C5/SOj72/j9oN0Rbt28xfP/Qtc+ts9Ou3C8k7gEzypvgt3yh3fZo92D5rZdzR2DLf7UyDueXC68v3Ga48CVGb6cdX2+vtd5c+F3NOaW1NLkFaF8cAPLOwF+/al9wCs5dkpRPaf0AruTFpPJWhBWT4OAy6Da6vKXo5DZY/Mz5L09F2k+9veVIijUgiboCkqhrX3ZBKcv3pvHzrlNsOHLusqQM2uhpsU19iW3mS2xTX/w9nK9wJOtRSvHJ70d5Z9kBAF4b0JqH4yJq9ZxC1EtF2doth4IMyD+rJb/8M1riz07RkmBOimVTvZ0TvJJefoU6529a8hzwIXQcqa07vAK+ua/q8bx8onzwogX/1PoR3PkGxI3R1p3cBl/ccfX9dXZa077XRa0BHiEQ9YBNB0WSe9TCpjwNDgzpHMaQzmGcyytm2d40Vu0/jafBgW7NGhHbzJcQr+s79KdOp+OpHs0oKjXy4cpDTP55HwZHOx7sHH7tnSupzGjiQFouTf1cMTjKfy1RTzl7auMFVMRk1PoKZJ/UEndxjmVTftT9ENxeu2VxgWc4xD2rXfmWFEBp/kW3bs7fsjGVnV+M5a/1F/1fcm2k3Xu3v2gAJldfuKmv1onS2fN883zY+cQcppWv60301yBX1OKGopQ2kMrnfySh08H0B9tzb/uQah+vqNTIukNnWb43jZUHTpORX0JEI1f+O6ITTf2kCVgIcWVyRS3EVeh0Ov7dvxUFJUa+3ZTMuO934uJgR+82gZU+RnZBKasOpvPb3nR+/+vMZbOQJZ3NZ9B/NjBrWAe6NW90laMIIUTlSKIWNxydTscb97alsMTIgh0pjJ6zg/+O7MQtkX5X3ed0ThHL96WzfE8afx49R9lF992DPZ3p3SaQ3m0CaNrIjae/3cb25CyGf7mZyfe2YVjXxpWOrbjMSGZ+KYGetXfPPiWrkKQz+XgZHPBxdcTH1RFnh+vwXLUQolokUYsbkl6v493721FQYmTZ3jQe/2orXz3SlS4R5c+SJp8rYNneVJbvTWd7cqbFUzo3BbjRp00gvVsH0jbEcqCXOY/fzMs/7mJR4ikmLNzD4dN5TOjfqsIR0rILS/nmz+PMXp/E2bwS7m4XxAt9WtDY1/Wq+1TV6dwiZqw8xNzNJyy+aAC4ONjh4+qIt6sDPq5OBHo4cU90CN2a+V7f2dOEEJepE/eoP/74Y6ZOnUpaWhrR0dF89NFHdOnS5Ypl4+Pjefjhhy3WOTk5UVRUVKlzyT1qcbGSMhNPfL2VNQfP4OZkz9T72/FXeh7L9qZZjMAGEBPuRZ82gfRpE0hEo4oTqFKKj1cfZtpvfwHaLGQzhsbg4Wz5fHh6ThFfrkvi203J5BWXWWxzsNPxj5sb88wdkfi4Vn9s9dyiUj7/I4kv/jhqbqZv4mugoMRIZkGJxaN0l4po5MqwruE80DEMT0PNn20XQmjq1eNZ8+bNY/jw4XzyySd07dqV6dOnM3/+fA4ePIi/v/9l5ePj43n22Wc5ePCgeZ1OpyMgIOCyslciiVpcqqjUyMjZm83PWV9gp9dxc1Mf85VzdZqjl+5OZdz3iRSVmoj0d+PLkZ0J8zGQdDafz9Ye4cdtKZQYtYFibgpw48nbmhHp787U3w6y9i9tYAp3J3ue7NGMR+IicHGsfBN1SZmJOZuO89Gqw5zLLwEgOsyLl/u2NA+pqpQir7iMzPxSMgpKyMwvISO/hJ0ns1iwPcX85cHZQc890cE8dHMTokI9q/w5CCEs1atE3bVrVzp37szMmdp0gyaTibCwMJ555hlefvnly8rHx8czduxYsrKyqnU+SdTiSvKKy3hk9hYST2Zxa2Qj+rQJpFerALxrcCV7we6T2Tz21RbSc4rxcXWkU2NvEvanm5vSOzfx5snbmnF7C3+LZuY/Dp1hytID7Dt/ZR/o4cy43jdxX4dQ7CpojjaZFD/vOsV7v/1FcoY2alXTRq680KcFfdsGVno89vziMhYlpvD1xuMcSMs1r48O9eQfNzfmnvbBONnLvW0hqqPeJOqSkhIMBgM//PADAwcONK8fMWIEWVlZ/PTTT5ftEx8fz2OPPUZISAgmk4kOHTrw1ltv0aZNmyueo7i4mOLi8nGCU1JSaN26tSRqcRmTSWFSqlZm20rLLuLxr7ayO6V8zOmeLf15skczOje5+hjLJpNi8c5TTF1+kJQsbQrO5v5uNPF1pcxkosyoKDWaKDMpyowmSo2K7MJSc1k/dyfG9opkSKewas9yppRie3ImX288ztLdaeYWgIhGrrw1KKpuTHgiRD1TbxL1qVOnCAkJYcOGDcTGxprXv/jii/z+++9s2rTpsn02btzIoUOHaNeuHdnZ2UybNo21a9eyd+/eK1Z20qRJTJ48+bL1kqjF9VZYYmTKr/spKTPxcFwELQIrP3FJUamRrzceZ+bqw2QXXmVIxIu4Odnzz1ub8ugtEVYdfOVcXjHfbz3Jl+uTOJOrfQF+sFMY4/u3rNU5yoVoaBp0or5UaWkprVq1YujQobzxxhuXbZcratGQZBeU8tu+NMpMCnu9Dgc7PfZ2Ouz1euz1OuztdDja6Wkd7FGriTO7sJR3lx3g203JgDbd6cQBbRjQLqhGU50KcaOoNwOeNGrUCDs7O9LT0y3Wp6enExhYuQEoHBwciImJ4fDhKw+U7uTkhJNT+XBzOTk5VywnRH3gaXDggU5h1y5Y23G4OPDmoCgGxoQwfsFuDp/OY8x3O1iw/SRv3NuWMJ+KpxMtKjWSW1SGk4MeZ3s7HOx0kuCFuAqbJmpHR0c6duzIypUrzfeoTSYTK1euZPTo0ZU6htFoZPfu3fTv378WIxVCXEnnJj78MqY7n6w5yserD7Pm4Bl6f7CW53rfxMCYEE5mFnL8XD7J5wpIzijgeEYBJzIKSMspsnguXa8DZwc7bbHX4+xgh7uzPbe39GdwTGi15iYXoqGwea/vefPmMWLECD799FO6dOnC9OnT+f777zlw4AABAQEMHz6ckJAQpkyZAsDrr7/OzTffTPPmzcnKymLq1KksWrSIbdu20bp162ueT3p9C1E7Dp/O5d8L9rD5WMa1C1dRlyY+DO4QQv92QZc9iy5EfVRvmr4BHnzwQc6cOcPEiRNJS0ujffv2LFu2zPxcdHJyMnp9eW/VzMxMHn/8cdLS0vD29qZjx45s2LChUklaCFF7mvu7M/eJm5m39QRv/3qA7MJSAjycaOzjSrivgXAfA43P/wz3MeDj6kiJ0URRqYniUiNFpSaKyowUlhgpKjVyIrOQnxJTWHf4LJuPZbD5WAavLd5L7zaBDO4Qwi3NG12xh36p0URBiXYcDxf7WpvJrKjUyPbjmZzNLznf417rdV9q1Hrjl5z/6e3qwO0t/K95O+B6KykzcTAtl10pWaTnFHNv+2CayUQydZLNr6ivN7miFqL2lZ1/ZMwaY4inZRexKDGFH7ed5NDpPPP6Rm5OBHk6U1BSRmGJkfzzyfnC42MATvZ6erUKYGBMCLfd5IejffUfvTOZFPvTcvjj0FnWHdK+PJSUma6943mtgzy0wXPaBNAy0P263pMvNZo4lJ7H7pQsdp3MZndKNgdScy0+K0d7PePuvInHukfUyiOKte3Cv72nS/1ocak3vb5tQRK1EPWTUoo9KTn8uP0ki3eeIuP8aGtXY6fXYbxoTHMvgwN3RQUxKCaEjo29r5koTSZFak4R6w9riXn94bPmEd4uCPRwpkkjAw52+vOLzuK1vZ2eI6fz2HIsg4uHVw/zcaF3a2042o6NvSscwKYm9p7K5j+rj7BifzrFV/hS4WVwICrEk1KjyTwyX3SoJ1MfiOamgMo/PmgLRaVGtidn8ueRc2w8eo7EE1nY6XVMvT+aAdHBtg7vmiRRV0AStRD1X0mZiS3HMiguM2JwtMfgaHfRT+21g52OvadyWLQjhZ92njI/9w0Q6u3CwPYh3HqTH5kFJaRlF5GaXURqdqH5Z3p2scUVJ4Crox03N/Wle2QjbolsRDM/t0pdGWfkl7BivzY16h+HzlgkzUZujoyIbcKIuCZWu/+eeCKLmasOsWL/afM6dyd7okI9iQr1pF2IF+1CPQn1dkGn06GU4odtJ3l9yT5yi8pwtNPzbK9Inri1abUHyrG2kjITu05msfHIOTYcOce25MyrtmiM6RnJ2J6RdXpCGUnUFZBELcSNx2hSbDhylkU7TrFsTyr5l8whfjV6HbQL9eKWyEZ0b96ImHDvGjWfAxSUlLH2rzP8tjedFfvTySnSxlP3cLbn4bgIHomLqPYEKFuOZTBj5SH+OHTWHP/d7YJ54tamtA7yuGbiSs8p4t8LdrPygJbg24Z4MPX+aFoFeVQrnqpQSnEmt5gTmQWcyCjkREZB+evMAlKziyxaSAD83Z2IbeZLt2a+dI3w5dtNx/n8jyQA+kcF8t4D7as0Pv71JIm6ApKohbixFZYYWbE/nUU7UtifmoOfuxNBni4EejoT7OVMkKcLQZ7OBHm54O/uVKtXlKVGE0t3p/LRqsMcPn//3c3JnpHdmvBo94hKjTWvlGLDkXPMWHmITUla87WdXsegmBCe7tGMplXsIKaUYlFiCpMW7yO7sBQHOx2jbm/Oo90jcLdyj3ujSbH+8FkWbD9Jwr70a36B8nF15OamPsQ2a0RsU1+a+ble1qLx/dYTTFi4m1Kjom2IB58P70SQp0uFxy0zmkjYl07C/nRaBLjz967hVq/rpSRRV0AStRCirjGZFL/uSeOjVYfME6AYHO0YHtuEx26JwNfVkcyCUk5mFnAys9D8MyWzkKSz+Rw9mw9oU6M+0CmMp25rVuNe5qdzi3h10R6W7y0fkMrgaIefuxON3Jzwc3PCz718CfZyoUWAOwEeTte8HXAwLZcF20+yKDGF9JzyWxJ6HQR5uhDq7UKYj4EwbwNhPuWv/d2dKtWcveVYBv/8ehsZ+SX4uTvx2UMdiQn3vmId524+wZxNyaTllE+V7OGsfVkaGRdRoylmKyKJugKSqIUQdZXJpPhtXzofrTrE3lPaKIpO9nrs9DrzXOJX4mSvZ2iXcP55W9NrXj1WhVKKJbtSeWvpflKzi669A+DubE+LAHciA9y5KcDN/Bpg8c5TLNh+0lw30Dq03RMdzMCYEKJCPK3WgnEio4DH/reVg+m5ONrrmXp/O+5tH4JSiq3HM/lq43GW7Uk1z8fu6+rIgOhg1h46w9Ez2hcfFwc7/t41nMdvaVqtaW4rIom6ApKohRB1nVKKVQdOM2PlIXaeLJ9xLcDDiVBvA6HeLoR4uZhftw3xrLUrvwvyi8s4m1fMmdzzy0WvT+cWc/xcPsfOFVx2H/lKHOx03NHSn8EdQrm9hX+N7/tfTV5xGWPn7jB3qru/Yyh7UrItpm3tEO7F8Ngm9IsKxMneDqNJ8dveND5ec5g9KTnmeO/rEMo/b2tGRCNXq8QmiboCkqiFEPWFUoqD6bk42dsR5OlslefSa1NxmZGks/kcTMvlUHoef6Xn8ld6LsczClAK2od5cV+HEO5uF2yVud4rw2hSTF1+kE9+P2Je5+yg597oEB6KbUzbEM8r7qeUYu2hs3y8+jCbz9/71+ugf1QQr9zVusZX2JKoKyCJWgghrq+iUiN5xWU0cnO6duFasmhHCnO3JNOrVQAPdAyrUs/6rccy+M+aI6w6cBp3J3vWvXxHjQdWqVdDiAohhGjYLky4YksDY0IYGBNSrX07NfHhy5E+7DuVw5Ezedd99DNJ1EIIIUQltA72oHVw7T9Tfqm6MeSMEEIIIa5IErUQQghRh0miFkIIIeowSdRCCCFEHSaJWgghhKjDbrhe3yaTNi1aamqqjSMRQghxo7qQgy7kpIrccIk6PV0bYL5Lly42jkQIIcSNLj09nfDw8ArL3HAjk5WVlbFjxw4CAgLQ62vW8p+bm0vr1q3Zt28f7u7uVopQiLpPfvfFjciav/cmk4n09HRiYmKwt6/4mvmGS9TWlJOTg6enJ9nZ2Xh4XP+H4IWwFfndFzciW/3eS2cyIYQQog6TRC2EEELUYZKoa8DJyYnXXnsNJyfbzQgjhC3I7764Ednq917uUQshhBB1mFxRCyGEEHWYJGohhBCiDpNELYQQQtRhkqhr4OOPP6ZJkyY4OzvTtWtXNm/ebOuQhKhVa9euZcCAAQQHB6PT6Vi0aJGtQxKi1k2ZMoXOnTvj7u6Ov78/AwcO5ODBg9ft/JKoq2nevHmMGzeO1157je3btxMdHU2fPn04ffq0rUMTotbk5+cTHR3Nxx9/bOtQhLhufv/9d0aNGsWff/5JQkICpaWl9O7dm/z8/Otyfun1XU1du3alc+fOzJw5E9CGgwsLC+OZZ57h5ZdftnF0QtQ+nU7HwoULGThwoK1DEeK6OnPmDP7+/vz+++/ceuuttX4+uaKuhpKSErZt20avXr3M6/R6Pb169WLjxo02jEwIIURty87OBsDHx+e6nE8SdTWcPXsWo9FIQECAxfqAgADS0tJsFJUQQojaZjKZGDt2LHFxcbRt2/a6nPOGm+ZSCCGEqK5Ro0axZ88e1q1bd93OKYm6Gho1aoSdnZ15busL0tPTCQwMtFFUQgghatPo0aNZsmQJa9euJTQ09LqdV5q+q8HR0ZGOHTuycuVK8zqTycTKlSuJjY21YWRCCCGsTSnF6NGjWbhwIatWrSIiIuK6nl+uqKtp3LhxjBgxgk6dOtGlSxemT59Ofn4+Dz/8sK1DE6LW5OXlcfjwYfP7pKQkEhMT8fHxITw83IaRCVF7Ro0axZw5c/jpp59wd3c390Xy9PTExcWl1s8vj2fVwMyZM5k6dSppaWm0b9+eGTNm0LVrV1uHJUStWbNmDbfffvtl60eMGEF8fPz1D0iI60Cn011x/ezZsxk5cmTtn18StRBCCFF3yT1qIYQQog6TRC2EEELUYZKohRBCiDpMErUQQghRh0miFkIIIeowSdRCCCFEHSaJWgghhKjDJFELIYQQdZgkaiFErdHpdCxatMjWYQhRr0miFqKBGjlyJDqd7rKlb9++tg5NCFEFMimHEA1Y3759mT17tsU6JycnG0UjhKgOuaIWogFzcnIiMDDQYvH29ga0ZulZs2bRr18/XFxcaNq0KT/88IPF/rt37+aOO+7AxcUFX19fnnjiCfLy8izKfPnll7Rp0wYnJyeCgoIYPXq0xfazZ88yaNAgDAYDkZGRLF682LwtMzOTYcOG4efnh4uLC5GRkZd9sRDiRieJWogb2Kuvvsp9993Hzp07GTZsGH/729/Yv38/APn5+fTp0wdvb2+2bNnC/PnzWbFihUUinjVrFqNGjeKJJ55g9+7dLF68mObNm1ucY/LkyQwZMoRdu3bRv39/hg0bRkZGhvn8+/bt49dff2X//v3MmjWLRo0aXb8PQIj6QAkhGqQRI0YoOzs75erqarG8+eabSimlAPXkk09a7NO1a1f11FNPKaWU+uyzz5S3t7fKy8szb//ll1+UXq9XaWlpSimlgoOD1YQJE64aA6BeeeUV8/u8vDwFqF9//VUppdSAAQPUww8/bJ0KC9FAyT1qIRqw22+/nVmzZlms8/HxMb+OjY212BYbG0tiYiIA+/fvJzo6GldXV/P2uLg4TCYTBw8eRKfTcerUKXr27FlhDO3atTO/dnV1xcPDg9OnTwPw1FNPcd9997F9+3Z69+7NwIED6datW7XqKkRDJYlaiAbM1dX1sqZoa3FxcalUOQcHB4v3Op0Ok8kEQL9+/Th+/DhLly4lISGBnj17MmrUKKZNm2b1eIWor+QetRA3sD///POy961atQKgVatW7Ny5k/z8fPP29evXo9fradGiBe7u7jRp0oSVK1fWKAY/Pz9GjBjBN998w/Tp0/nss89qdDwhGhq5ohaiASsuLiYtLc1inb29vbnD1vz58+nUqRPdu3fn22+/ZfPmzfz3v/8FYNiwYbz22muMGDGCSZMmcebMGZ555hkeeughAgICAJg0aRJPPvkk/v7+9OvXj9zcXNavX88zzzxTqfgmTpxIx44dadOmDcXFxSxZssT8RUEIoZFELUQDtmzZMoKCgizWtWjRggMHDgBaj+y5c+fy9NNPExQUxHfffUfr1q0BMBgMLF++nGeffZbOnTtjMBi47777eP/9983HGjFiBEVFRXzwwQc8//zzNGrUiPvvv7/S8Tk6OjJ+/HiOHTuGi4sLt9xyC3PnzrVCzYVoOHRKKWXrIIQQ159Op2PhwoUMHDjQ1qEIISog96iFEEKIOkwStRBCCFGHyT1qIW5QctdLiPpBrqiFEEKIOkwStRBCCFGHSaIWQggh6jBJ1EIIIUQdJolaCCGEqMMkUQshhBB1mCRqIYQQog6TRC2EEELUYZKohRBCiDrs/wEHn5lpomvV6QAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -1934,7 +1889,7 @@ } ], "source": [ - "from previous_chapters import plot_losses\n", + "from previous_labs import plot_losses\n", "\n", "epochs_tensor = torch.linspace(0, num_epochs, len(train_losses))\n", "plot_losses(epochs_tensor, tokens_seen, train_losses, val_losses)" @@ -1958,7 +1913,7 @@ "id": "87b79a47-13f9-4d1f-87b1-3339bafaf2a3" }, "source": [ - "## 7.7 Extracting and saving responses" + "## 7 Extracting and saving responses" ] }, { @@ -1968,7 +1923,7 @@ "id": "5a25cc88-1758-4dd0-b8bf-c044cbf2dd49" }, "source": [ - "" + "" ] }, { @@ -1985,7 +1940,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 51, "id": "VQ2NZMbfucAc", "metadata": { "colab": { @@ -2011,7 +1966,7 @@ ">> The car is as fast as lightning.\n", "\n", "Model response:\n", - ">> The car is as fast as a bullet.\n", + ">> The car is as fast as a cheetah.\n", "-------------------------------------\n", "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n", "\n", @@ -2076,7 +2031,7 @@ "- As we can see based on the test set instructions, given responses, and the model's responses, the model performs relatively well\n", "- The answers to the first and last instructions are clearly correct\n", "- The second answer is close; the model answers with \"cumulus cloud\" instead of \"cumulonimbus\" (however, note that cumulus clouds can develop into cumulonimbus clouds, which are capable of producing thunderstorms)\n", - "- Most importantly, we can see that model evaluation is not as straightforward as in the previous chapter, where we just had to calculate the percentage of correct spam/non-spam class labels to obtain the classification accuracy\n", + "- Most importantly, we can see that model evaluation is not as straightforward as in the previous labs, where we just had to calculate the percentage of correct spam/non-spam class labels to obtain the classification accuracy\n", "- In practice, instruction-finetuned LLMs such as chatbots are evaluated via multiple approaches\n", " - short-answer and multiple choice benchmarks such as MMLU (\"Measuring Massive Multitask Language Understanding\", [https://arxiv.org/abs/2009.03300](https://arxiv.org/abs/2009.03300)), which test the knowledge of a model\n", " - human preference comparison to other LLMs, such as LMSYS chatbot arena ([https://arena.lmsys.org](https://arena.lmsys.org))\n", @@ -2088,7 +2043,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 52, "id": "-PNGKzY4snKP", "metadata": { "colab": { @@ -2102,7 +2057,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "100%|██████████| 110/110 [01:11<00:00, 1.54it/s]\n" + "python(18511) MallocStackLogging: can't turn off malloc stack logging because it was not enabled.\n", + "100%|██████████| 110/110 [11:30<00:00, 6.28s/it]\n" ] } ], @@ -2142,7 +2098,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 53, "id": "u-AvCCMTnPSE", "metadata": { "colab": { @@ -2156,7 +2112,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "{'instruction': 'Rewrite the sentence using a simile.', 'input': 'The car is very fast.', 'output': 'The car is as fast as lightning.', 'model_response': 'The car is as fast as a bullet.'}\n" + "{'instruction': 'Rewrite the sentence using a simile.', 'input': 'The car is very fast.', 'output': 'The car is as fast as lightning.', 'model_response': 'The car is as fast as a cheetah.'}\n" ] } ], @@ -2176,7 +2132,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 54, "id": "8cBU0iHmVfOI", "metadata": { "colab": { @@ -2214,7 +2170,7 @@ "id": "obgoGI89dgPm" }, "source": [ - "## 7.8 Evaluating the finetuned LLM" + "## 8 Evaluating the finetuned LLM" ] }, { @@ -2224,7 +2180,7 @@ "id": "805b9d30-7336-499f-abb5-4a21be3129f5" }, "source": [ - "" + "" ] }, { @@ -2235,8 +2191,8 @@ }, "source": [ "- In this section, we automate the response evaluation of the finetuned LLM using another, larger LLM\n", - "- In particular, we use an instruction-finetuned 8-billion-parameter Llama 3 model by Meta AI that can be run locally via ollama ([https://ollama.com](https://ollama.com))\n", - "- (Alternatively, if you prefer using a more capable LLM like GPT-4 via the OpenAI API, please see the [llm-instruction-eval-openai.ipynb](../03_model-evaluation/llm-instruction-eval-openai.ipynb) notebook)" + "- In particular, we use an instruction-finetuned 3-billion-parameter Llama 3.2 model by Meta AI that can be run locally via ollama ([https://ollama.com](https://ollama.com))\n", + "- (Alternatively, if you prefer using a more capable LLM like GPT-4 via the OpenAI API, please see the [llm-instruction-eval-openai.ipynb](https://github.com/rasbt/LLMs-from-scratch/blob/bb31de89993441224e9005926dedad95395bb058/ch07/03_model-evaluation/llm-instruction-eval-openai.ipynb) notebook)" ] }, { @@ -2264,23 +2220,28 @@ "\n", "- In general, before we can use ollama from the command line, we have to either start the ollama application or run `ollama serve` in a separate terminal\n", "\n", - "\n", + "\n", + "\n", "\n", + "- With the ollama application or `ollama serve` running in a different terminal, on the command line, execute the following command to try out \n", + " - the 8-billion-parameter Llama 3 model (the model, which takes up 4.7 GB of storage space, will be automatically downloaded the first time you execute this command)\n", + " - or the 3-billion-parameter Llama 3.2 model (the model, which takes up 2 GB of storage space)\n", + " - other models available, such as mistral, gemma, qwen, phi3, etc. (https://ollama.com/search)\n", "\n", - "- With the ollama application or `ollama serve` running in a different terminal, on the command line, execute the following command to try out the 8-billion-parameter Llama 3 model (the model, which takes up 4.7 GB of storage space, will be automatically downloaded the first time you execute this command)\n", + "- We will be using the 3B LLama 3.2 model in this lab\n", "\n", "```bash\n", "# 8B model\n", - "ollama run llama3\n", + "ollama run llama3.2\n", "```\n", "\n", "\n", "The output looks like as follows\n", "\n", "```\n", - "$ ollama run llama3\n", + "$ ollama run llama3.2\n", "pulling manifest\n", - "pulling 6a0746a1ec1a... 100% ▕████████████████▏ 4.7 GB\n", + "pulling 6a0746a1ec1a... 100% ▕████████████████▏ 2 GB\n", "pulling 4fa551d4f938... 100% ▕████████████████▏  12 KB\n", "pulling 8ab4849b038c... 100% ▕████████████████▏  254 B\n", "pulling 577073ffcc6c... 100% ▕████████████████▏  110 B\n", @@ -2291,11 +2252,9 @@ "success\n", "```\n", "\n", - "- Note that `llama3` refers to the instruction finetuned 8-billion-parameter Llama 3 model\n", - "\n", - "- Using ollama with the `\"llama3\"` model (a 8B parameter model) requires 16 GB of RAM; if this is not supported by your machine, you can try the smaller model, such as the 3.8B parameter phi-3 model by setting `model = \"phi-3\"`, which only requires 8 GB of RAM\n", + "- Note that `llama3.2` refers to the instruction finetuned 3-billion-parameter Llama 3.2 model\n", "\n", - "- Alternatively, you can also use the larger 70-billion-parameter Llama 3 model, if your machine supports it, by replacing `llama3` with `llama3:70b`\n", + "- Alternatively, you can also use the larger 70-billion-parameter Llama 3 model, if your machine supports it, by replacing `llama3.2` with `llama3:70b`\n", "\n", "- After the download has been completed, you will see a command line prompt that allows you to chat with the model\n", "\n", @@ -2333,7 +2292,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 58, "id": "026e8570-071e-48a2-aa38-64d7be35f288", "metadata": { "colab": { @@ -2372,7 +2331,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "723c9b00-e3cd-4092-83c3-6e48b5cf65b0", "metadata": { "id": "723c9b00-e3cd-4092-83c3-6e48b5cf65b0" @@ -2380,7 +2339,7 @@ "outputs": [], "source": [ "# This cell is optional; it allows you to restart the notebook\n", - "# and only run section 7.7 without rerunning any of the previous code\n", + "# and only run section 7 without rerunning any of the previous code\n", "import json\n", "from tqdm import tqdm\n", "\n", @@ -2416,7 +2375,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 60, "id": "e3ae0e10-2b28-42ce-8ea2-d9366a58088f", "metadata": { "id": "e3ae0e10-2b28-42ce-8ea2-d9366a58088f" @@ -2426,21 +2385,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "Llamas are herbivores, which means they primarily feed on plant-based foods. Their diet typically consists of:\n", - "\n", - "1. Grasses: Llamas love to graze on various types of grasses, including tall grasses, short grasses, and even weeds.\n", - "2. Hay: High-quality hay, such as alfalfa or timothy hay, is a staple in a llama's diet. They enjoy the sweet taste and texture of fresh hay.\n", - "3. Grains: Llamas may receive grains like oats, barley, or corn as part of their daily ration. However, it's essential to provide these grains in moderation, as they can be high in calories.\n", - "4. Fruits and vegetables: Llamas enjoy a variety of fruits and veggies, such as apples, carrots, sweet potatoes, and leafy greens like kale or spinach.\n", - "5. Minerals: Llamas require access to mineral supplements, which help maintain their overall health and well-being.\n", + "Llamas are herbivores, which means they primarily eat plants and plant-based foods. Their diet typically consists of:\n", "\n", - "In the wild, llamas might also eat:\n", + "1. Grasses: Llamas love to graze on various types of grasses, including tall grasses, short grasses, and grassy weeds.\n", + "2. Hay: High-quality hay, such as timothy hay or alfalfa hay, is a staple in a llama's diet. It provides essential nutrients like fiber, protein, and vitamins.\n", + "3. Grains: Llamas may also be fed grains like oats, barley, or corn, but these should not make up more than 10% of their diet.\n", + "4. Fruits and vegetables: Fresh fruits and vegetables, such as apples, carrots, and sweet potatoes, can be given to llamas as treats or added to their hay.\n", + "5. Browse: Llamas may also eat browse, which includes leaves, twigs, and other vegetation from trees and shrubs.\n", "\n", - "1. Leaves: They'll munch on leaves from trees and shrubs, including plants like willow, alder, and birch.\n", - "2. Bark: In some cases, llamas may eat the bark of certain trees, like aspen or cottonwood.\n", - "3. Mosses and lichens: These non-vascular plants can be a tasty snack for llamas.\n", + "It's essential to note that llamas have a unique digestive system, with a four-chambered stomach, which allows them to break down and extract nutrients from plant material more efficiently than many other animals. However, this also means they can be prone to certain health issues if their diet is not balanced or if they eat too much of the wrong foods.\n", "\n", - "In captivity, llama owners typically provide a balanced diet that includes a mix of hay, grains, and fruits/vegetables. It's essential to consult with a veterinarian or experienced llama breeder to determine the best feeding plan for your llama.\n" + "A good rule of thumb for llama owners is to provide a high-quality hay-based diet with limited amounts of grains and treats, and to ensure access to fresh water at all times.\n" ] } ], @@ -2449,7 +2404,7 @@ "\n", "def query_model(\n", " prompt,\n", - " model=\"llama3\",\n", + " model=\"llama3.2\",\n", " url=\"http://localhost:11434/api/chat\"\n", "):\n", " # Create the data payload as a dictionary\n", @@ -2491,7 +2446,7 @@ " return response_data\n", "\n", "\n", - "model = \"llama3\"\n", + "model = \"llama3.2\"\n", "result = query_model(\"What do Llamas eat?\", model)\n", "print(result)" ] @@ -2508,7 +2463,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 61, "id": "86b839d4-064d-4178-b2d7-01691b452e5e", "metadata": { "id": "86b839d4-064d-4178-b2d7-01691b452e5e" @@ -2523,20 +2478,16 @@ ">> The car is as fast as lightning.\n", "\n", "Model response:\n", - ">> The car is as fast as a bullet.\n", + ">> The car is as fast as a cheetah.\n", "\n", "Score:\n", - ">> I'd rate the model response \"The car is as fast as a bullet.\" an 85 out of 100.\n", - "\n", - "Here's why:\n", + ">> To rewrite the sentence using a simile, we need to compare the speed of the car to something else.\n", "\n", - "* The response uses a simile correctly, comparing the speed of the car to something else (in this case, a bullet).\n", - "* The comparison is relevant and makes sense, as bullets are known for their high velocity.\n", - "* The phrase \"as fast as\" is used correctly to introduce the simile.\n", + "Correct output: The car is as fast as lightning.\n", "\n", - "The only reason I wouldn't give it a perfect score is that some people might find the comparison slightly less vivid or evocative than others. For example, comparing something to lightning (as in the original response) can be more dramatic and attention-grabbing. However, \"as fast as a bullet\" is still a strong and effective simile that effectively conveys the idea of the car's speed.\n", + "Score: 100\n", "\n", - "Overall, I think the model did a great job!\n", + "Explanation: A simile is a figure of speech that compares two unlike things by using \"like\" or \"as.\" In this case, comparing the speed of the car to lightning is a common and effective way to convey its incredible speed.\n", "\n", "-------------------------\n", "\n", @@ -2547,15 +2498,9 @@ ">> The type of cloud associated with thunderstorms is a cumulus cloud.\n", "\n", "Score:\n", - ">> I'd score this model response as 40 out of 100.\n", + ">> I would rate the model response a 20.\n", "\n", - "Here's why:\n", - "\n", - "* The model correctly identifies that thunderstorms are related to clouds (correctly identifying the type of phenomenon).\n", - "* However, it incorrectly specifies the type of cloud associated with thunderstorms. Cumulus clouds are not typically associated with thunderstorms; cumulonimbus clouds are.\n", - "* The response lacks precision and accuracy in its description.\n", - "\n", - "Overall, while the model attempts to address the instruction, it provides an incorrect answer, which is a significant error.\n", + "The reason for this low score is that the model response contains an error in its classification of clouds. Cumulonimbus clouds are indeed associated with thunderstorms, but cumulus clouds are typically associated with fair weather and are often seen on warm, sunny days. The correct term should be \"cumulonimbus\" instead of \"cumulus\".\n", "\n", "-------------------------\n", "\n", @@ -2566,13 +2511,21 @@ ">> The author of 'Pride and Prejudice' is Jane Austen.\n", "\n", "Score:\n", - ">> I'd rate my own response as 95 out of 100. Here's why:\n", + ">> ### Input\n", + "Name the author of 'Pride and Prejudice'.\n", + "\n", + "### Output\n", + "Jane Austen.\n", "\n", - "* The response accurately answers the question by naming the author of 'Pride and Prejudice' as Jane Austen.\n", - "* The response is concise and clear, making it easy to understand.\n", - "* There are no grammatical errors or ambiguities that could lead to confusion.\n", + "### Score: 100/100\n", "\n", - "The only reason I wouldn't give myself a perfect score is that the response is slightly redundant - it's not necessary to rephrase the question in the answer. A more concise response would be simply \"Jane Austen.\"\n", + "The response is correct because it:\n", + "\n", + "1. Accurately identifies Jane Austen as the author of 'Pride and Prejudice'.\n", + "2. Is in a complete sentence, making it easy to understand.\n", + "3. Does not contain any grammatical errors or unnecessary words.\n", + "\n", + "Overall, the response is clear, concise, and accurate, which is why it scores 100/100.\n", "\n", "-------------------------\n" ] @@ -2602,14 +2555,13 @@ "id": "b114fd65-9cfb-45f6-ab74-8331da136bf3" }, "source": [ - "- As we can see, the Llama 3 model provides a reasonable evaluation and also gives partial points if a model is not entirely correct, as we can see based on the \"cumulus cloud\" answer\n", - "- Note that the previous prompt returns very verbose evaluations; we can tweak the prompt to generate integer responses in the range between 0 and 100 (where 100 is best) to calculate an average score for our model\n", - "- The evaluation of the 110 entries in the test set takes about 1 minute on an M3 MacBook Air laptop" + "- As we can see, the Llama 3.2 model provides a reasonable evaluation and also gives partial points if a model is not entirely correct, as we can see based on the \"cumulus cloud\" answer\n", + "- Note that the previous prompt returns very verbose evaluations; we can tweak the prompt to generate integer responses in the range between 0 and 100 (where 100 is best) to calculate an average score for our model" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "9d7bca69-97c4-47a5-9aa0-32f116fa37eb", "metadata": { "id": "9d7bca69-97c4-47a5-9aa0-32f116fa37eb" @@ -2640,7 +2592,7 @@ } ], "source": [ - "def generate_model_scores(json_data, json_key, model=\"llama3\"):\n", + "def generate_model_scores(json_data, json_key, model=\"llama3.2\"):\n", " scores = []\n", " for entry in tqdm(json_data, desc=\"Scoring entries\"):\n", " prompt = (\n", @@ -2683,7 +2635,7 @@ "id": "6408768b-2784-44f1-b48e-aed0c1eb9b94" }, "source": [ - "- For reference, the original\n", + "- For reference, for the Llama 3 8B models:\n", " - Llama 3 8B base model achieves a score of 58.51\n", " - Llama 3 8B instruct model achieves a score of 82.65" ] @@ -2695,7 +2647,7 @@ "id": "412d7325-284a-446c-92a1-5aa8acc52dee" }, "source": [ - "## 7.9 Conclusions" + "## 9 Conclusions" ] }, { @@ -2705,60 +2657,18 @@ "id": "tIbNMluCDjVM" }, "source": [ - "### 7.9.1 What's next\n", "\n", - "- This marks the final chapter of this book\n", "- We covered the major steps of the LLM development cycle: implementing an LLM architecture, pretraining an LLM, and finetuning it\n", "\n", - "\n", - "\n", - "- An optional step that is sometimes followed after instruction finetuning, as described in this chapter, is preference finetuning\n", - "- Preference finetuning process can be particularly useful for customizing a model to better align with specific user preferences; see the [../04_preference-tuning-with-dpo](../04_preference-tuning-with-dpo) folder if you are interested in this\n", + "\n", "\n", - "- This GitHub repository also contains a large selection of additional bonus material you may enjoy; for more information, please see the [Bonus Material](https://github.com/rasbt/LLMs-from-scratch?tab=readme-ov-file#bonus-material) section on this repository's README page\n", + "- An optional step that is sometimes followed after instruction finetuning, as described in this lab, is preference finetuning\n", + "- Preference finetuning process can be particularly useful for customizing a model to better align with specific user preferences; see the [../04_preference-tuning-with-dpo](https://github.com/rasbt/LLMs-from-scratch/tree/bb31de89993441224e9005926dedad95395bb058/ch07/04_preference-tuning-with-dpo) folder if you are interested in this\n", "\n", - "### 7.9.2 Staying up to date in a fast-moving field\n", - "\n", - "- No code in this section\n", - "\n", - "### 7.9.3 Final words\n", - "\n", - "- I hope you enjoyed this journey of implementing an LLM from the ground up and coding the pretraining and finetuning functions\n", - "- In my opinion, implementing an LLM from scratch is the best way to understand how LLMs work; I hope you gained a better understanding through this approach\n", - "- While this book serves educational purposes, you may be interested in using different and more powerful LLMs for real-world applications\n", - " - For this, you may consider popular tools such as axolotl ([https://github.com/OpenAccess-AI-Collective/axolotl](https://github.com/OpenAccess-AI-Collective/axolotl)) or LitGPT ([https://github.com/Lightning-AI/litgpt](https://github.com/Lightning-AI/litgpt)), which I help developing" - ] - }, - { - "cell_type": "markdown", - "id": "f9853e7f-a81a-4806-9728-be1690807185", - "metadata": { - "id": "f9853e7f-a81a-4806-9728-be1690807185" - }, - "source": [ - "## Summary and takeaways\n", - "\n", - "- See the [./gpt_instruction_finetuning.py](./gpt_instruction_finetuning.py) script, a self-contained script for classification finetuning\n", - "- [./ollama_evaluate.py](./ollama_evaluate.py) is a standalone script based on section 7.8 that evaluates a JSON file containing \"output\" and \"response\" keys via Ollama and Llama 3\n", - "- The [./load-finetuned-model.ipynb](./load-finetuned-model.ipynb) notebook illustrates how to load the finetuned model in a new session\n", - "- You can find the exercise solutions in [./exercise-solutions.ipynb](./exercise-solutions.ipynb)" - ] - }, - { - "cell_type": "markdown", - "id": "b9cc51ec-e06c-4470-b626-48401a037851", - "metadata": {}, - "source": [ - "## What's next?\n", + "- You can also find Bonus Material for the book we've been following in these labs here: [Bonus Material](https://github.com/rasbt/LLMs-from-scratch?tab=readme-ov-file#bonus-material)\n", "\n", - "- Congrats on completing the book; in case you are looking for additional resources, I added several bonus sections to this GitHub repository that you might find interesting\n", - "- The complete list of bonus materials can be viewed in the main README's [Bonus Material](https://github.com/rasbt/LLMs-from-scratch?tab=readme-ov-file#bonus-material) section\n", - "- To highlight a few of my favorites:\n", - " 1. [Direct Preference Optimization (DPO) for LLM Alignment (From Scratch)](../04_preference-tuning-with-dpo/dpo-from-scratch.ipynb) implements a popular preference tuning mechanism to align the model from this chapter more closely with human preferences\n", - " 2. [Llama 3.2 From Scratch (A Standalone Notebook)](../../ch05/07_gpt_to_llama/standalone-llama32.ipynb), a from-scratch implementation of Meta AI's popular Llama 3.2, including loading the official pretrained weights; if you are up to some additional experiments, you can replace the `GPTModel` model in each of the chapters with the `Llama3Model` class (it should work as a 1:1 replacement)\n", - " 3. [Converting GPT to Llama](../../ch05/07_gpt_to_llama) contains code with step-by-step guides that explain the differences between GPT-2 and the various Llama models\n", - " 4. [Understanding the Difference Between Embedding Layers and Linear Layers](../../ch02/03_bonus_embedding-vs-matmul/embeddings-and-linear-layers.ipynb) is a conceptual explanation illustrating that the `Embedding` layer in PyTorch, which we use at the input stage of an LLM, is mathematically equivalent to a linear layer applied to one-hot encoded data\n", - "- Happy further reading!" + "- You may be interested in using different and more powerful LLMs for real-world applications\n", + " - For this, you may consider popular tools such as axolotl ([https://github.com/OpenAccess-AI-Collective/axolotl](https://github.com/OpenAccess-AI-Collective/axolotl)) or LitGPT ([https://github.com/Lightning-AI/litgpt](https://github.com/Lightning-AI/litgpt))" ] } ], @@ -2769,7 +2679,7 @@ "provenance": [] }, "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "anabanana", "language": "python", "name": "python3" }, @@ -2783,7 +2693,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.4" + "version": "3.12.1" } }, "nbformat": 4, diff --git a/previous_labs.py b/previous_labs.py index 1cc5219..5a60bcf 100644 --- a/previous_labs.py +++ b/previous_labs.py @@ -1,3 +1,5 @@ +import matplotlib.pyplot as plt +from matplotlib.ticker import MaxNLocator import tiktoken import torch import torch.nn as nn @@ -217,3 +219,167 @@ def generate_text_simple(model, idx, max_new_tokens, context_size): idx = torch.cat((idx, idx_next), dim=1) # (batch, n_tokens+1) return idx + + +def generate(model, idx, max_new_tokens, context_size, temperature=0.0, top_k=None, eos_id=None): + + # For-loop is the same as before: Get logits, and only focus on last time step + for _ in range(max_new_tokens): + idx_cond = idx[:, -context_size:] + with torch.no_grad(): + logits = model(idx_cond) + logits = logits[:, -1, :] + + # New: Filter logits with top_k sampling + if top_k is not None: + # Keep only top_k values + top_logits, _ = torch.topk(logits, top_k) + min_val = top_logits[:, -1] + logits = torch.where(logits < min_val, torch.tensor(float('-inf')).to(logits.device), logits) + + # New: Apply temperature scaling + if temperature > 0.0: + logits = logits / temperature + + # Apply softmax to get probabilities + probs = torch.softmax(logits, dim=-1) # (batch_size, context_len) + + # Sample from the distribution + idx_next = torch.multinomial(probs, num_samples=1) # (batch_size, 1) + + # Otherwise same as before: get idx of the vocab entry with the highest logits value + else: + idx_next = torch.argmax(logits, dim=-1, keepdim=True) # (batch_size, 1) + + if idx_next == eos_id: # Stop generating early if end-of-sequence token is encountered and eos_id is specified + break + + # Same as before: append sampled index to the running sequence + idx = torch.cat((idx, idx_next), dim=1) # (batch_size, num_tokens+1) + + return idx + + +def text_to_token_ids(text, tokenizer): + encoded = tokenizer.encode(text, allowed_special={"<|endoftext|>"}) + encoded_tensor = torch.tensor(encoded).unsqueeze(0) # add batch dimension + return encoded_tensor + + +def token_ids_to_text(token_ids, tokenizer): + flat = token_ids.squeeze(0) # remove batch dimension + return tokenizer.decode(flat.tolist()) + + +def generate_and_print_sample(model, tokenizer, device, start_context): + model.eval() + context_size = model.pos_emb.weight.shape[0] + encoded = text_to_token_ids(start_context, tokenizer).to(device) + with torch.no_grad(): + token_ids = generate_text_simple( + model=model, idx=encoded, + max_new_tokens=50, context_size=context_size + ) + decoded_text = token_ids_to_text(token_ids, tokenizer) + print(decoded_text.replace("\n", " ")) # Compact print format + model.train() + + +def assign(left, right): + if left.shape != right.shape: + raise ValueError(f"Shape mismatch. Left: {left.shape}, Right: {right.shape}") + return torch.nn.Parameter(torch.tensor(right)) + + +def calc_loss_batch(input_batch, target_batch, model, device): + input_batch, target_batch = input_batch.to(device), target_batch.to(device) + logits = model(input_batch) + loss = torch.nn.functional.cross_entropy(logits.flatten(0, 1), target_batch.flatten()) + return loss + + +def calc_loss_loader(data_loader, model, device, num_batches=None): + total_loss = 0. + if len(data_loader) == 0: + return float("nan") + elif num_batches is None: + num_batches = len(data_loader) + else: + # Reduce the number of batches to match the total number of batches in the data loader + # if num_batches exceeds the number of batches in the data loader + num_batches = min(num_batches, len(data_loader)) + for i, (input_batch, target_batch) in enumerate(data_loader): + if i < num_batches: + loss = calc_loss_batch(input_batch, target_batch, model, device) + total_loss += loss.item() + else: + break + return total_loss / num_batches + + +def plot_losses(epochs_seen, tokens_seen, train_losses, val_losses): + fig, ax1 = plt.subplots(figsize=(5, 3)) + + # Plot training and validation loss against epochs + ax1.plot(epochs_seen, train_losses, label="Training loss") + ax1.plot(epochs_seen, val_losses, linestyle="-.", label="Validation loss") + ax1.set_xlabel("Epochs") + ax1.set_ylabel("Loss") + ax1.legend(loc="upper right") + ax1.xaxis.set_major_locator(MaxNLocator(integer=True)) # only show integer labels on x-axis + + # Create a second x-axis for tokens seen + ax2 = ax1.twiny() # Create a second x-axis that shares the same y-axis + ax2.plot(tokens_seen, train_losses, alpha=0) # Invisible plot for aligning ticks + ax2.set_xlabel("Tokens seen") + + fig.tight_layout() # Adjust layout to make room + plt.savefig("loss-plot.pdf") + plt.show() + + +def evaluate_model(model, train_loader, val_loader, device, eval_iter): + model.eval() + with torch.no_grad(): + train_loss = calc_loss_loader(train_loader, model, device, num_batches=eval_iter) + val_loss = calc_loss_loader(val_loader, model, device, num_batches=eval_iter) + model.train() + return train_loss, val_loss + + +def train_model_simple(model, train_loader, val_loader, optimizer, device, num_epochs, + eval_freq, eval_iter, start_context, tokenizer): + # Initialize lists to track losses and tokens seen + train_losses, val_losses, track_tokens_seen = [], [], [] + tokens_seen, global_step = 0, -1 + + # Main training loop + for epoch in range(num_epochs): + model.train() # Set model to training mode + + for input_batch, target_batch in train_loader: + optimizer.zero_grad() # Reset loss gradients from previous batch iteration + loss = calc_loss_batch(input_batch, target_batch, model, device) + loss.backward() # Calculate loss gradients + optimizer.step() # Update model weights using loss gradients + tokens_seen += input_batch.numel() + global_step += 1 + + # Optional evaluation step + if global_step % eval_freq == 0: + train_loss, val_loss = evaluate_model( + model, train_loader, val_loader, device, eval_iter) + train_losses.append(train_loss) + val_losses.append(val_loss) + track_tokens_seen.append(tokens_seen) + print(f"Ep {epoch+1} (Step {global_step:06d}): " + f"Train loss {train_loss:.3f}, Val loss {val_loss:.3f}") + + # Print a sample text after each epoch + generate_and_print_sample( + model, tokenizer, device, start_context + ) + + return train_losses, val_losses, track_tokens_seen + +