From ec0958f4f846236ac2703dd644f6dac4365f64b4 Mon Sep 17 00:00:00 2001 From: Javier <55246586+Psancs05@users.noreply.github.com> Date: Wed, 20 Sep 2023 14:40:49 +0200 Subject: [PATCH 01/77] Update requirements.txt (#610) --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 4ef9f5fd2d..1e95b716ec 100644 --- a/requirements.txt +++ b/requirements.txt @@ -30,4 +30,3 @@ scipy scikit-learn==1.2.2 pynvml art -wandb From 5d931cc042a8fc0e6b848dd9f78032ae1958b85f Mon Sep 17 00:00:00 2001 From: Maxime <672982+maximegmd@users.noreply.github.com> Date: Thu, 21 Sep 2023 04:02:04 +0200 Subject: [PATCH 02/77] Only run tests when a change to python files is made (#614) * Update tests.yml * Update .github/workflows/tests.yml --------- Co-authored-by: Wing Lian --- .github/workflows/tests.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 18ff575c10..4da10a6c35 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -4,7 +4,11 @@ on: push: branches: - "main" + paths: + - '**.py' pull_request: + paths: + - '**.py' workflow_dispatch: jobs: From c1382e79b6eb599f7fd210fd1a621b97bfed0d51 Mon Sep 17 00:00:00 2001 From: Maxime <672982+maximegmd@users.noreply.github.com> Date: Thu, 21 Sep 2023 04:02:16 +0200 Subject: [PATCH 03/77] Create multi-node.md (#613) * Create multi-node.md * Update multi-node.md * Update multi-node.md --- docs/multi-node.md | 45 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 docs/multi-node.md diff --git a/docs/multi-node.md b/docs/multi-node.md new file mode 100644 index 0000000000..6806159690 --- /dev/null +++ b/docs/multi-node.md @@ -0,0 +1,45 @@ +# Multi Node + +You will need to create a configuration for accelerate, either by using `accelerate config` and follow the instructions or you can use one of the preset below: + +~/.cache/huggingface/accelerate/default_config.yaml +```yaml +compute_environment: LOCAL_MACHINE +debug: false +distributed_type: FSDP +downcast_bf16: 'no' +machine_rank: 0 # Set to 0 for the main machine, increment by one for other machines +main_process_ip: 10.0.0.4 # Set to main machine's IP +main_process_port: 5000 +main_training_function: main +mixed_precision: bf16 +num_machines: 2 # Change to the number of machines +num_processes: 4 # That's the total number of GPUs, (for example: if you have 2 machines with 4 GPU, put 8) +rdzv_backend: static +same_network: true +tpu_env: [] +tpu_use_cluster: false +tpu_use_sudo: false +use_cpu: false +``` + +Configure your model to use FSDP with for example: +```yaml +fsdp: + - full_shard + - auto_wrap +fsdp_config: + fsdp_offload_params: true + fsdp_state_dict_type: FULL_STATE_DICT + fsdp_transformer_layer_cls_to_wrap: LlamaDecoderLayer +``` + +## Machine configuration + +On each machine you need a copy of Axolotl, we suggest using the same commit to ensure compatibility. + +You will also need to have the same configuration file for your model on each machine. + +On the main machine only, make sure the port you set as `main_process_port` is open in TCP and reachable by other machines. + +All you have to do now is launch using accelerate as you would usually do on each machine and voila, the processes will start once you have launched accelerate on every machine. From 2fe95cdcc102888dc559a8be13e7ccee7d56874b Mon Sep 17 00:00:00 2001 From: Maxime <672982+maximegmd@users.noreply.github.com> Date: Thu, 21 Sep 2023 15:11:34 +0200 Subject: [PATCH 04/77] fix distributed devices (#612) * fix distributed devices * Update distributed.py * Update distributed.py --- src/axolotl/utils/distributed.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/axolotl/utils/distributed.py b/src/axolotl/utils/distributed.py index 10f06538ab..9a1c689fb7 100644 --- a/src/axolotl/utils/distributed.py +++ b/src/axolotl/utils/distributed.py @@ -77,7 +77,9 @@ def gather_scalar_from_all_ranks(fn, world_size=1): # pylint: disable=invalid-n value_scalar = fn() if not is_distributed(): return [value_scalar] - value_tensor = torch.tensor(value_scalar, device=dist.get_rank()).float() + value_tensor = torch.tensor( + value_scalar, device=torch.cuda.current_device() + ).float() if not is_main_process(): dist.gather(value_tensor, dst=0) @@ -137,9 +139,13 @@ def compute_and_broadcast(fn): # pylint: disable=invalid-name """ if is_main_process(): value_scalar = fn() - value_tensor = torch.tensor(value_scalar, device=dist.get_rank()).float() + value_tensor = torch.tensor( + value_scalar, device=torch.cuda.current_device() + ).float() else: - value_tensor = torch.tensor(0.0, device=dist.get_rank()) # Placeholder tensor + value_tensor = torch.tensor( + 0.0, device=torch.cuda.current_device() + ) # Placeholder tensor # Broadcast the tensor to all processes. barrier() @@ -164,7 +170,9 @@ def gather_from_all_ranks(fn, world_size=1): # pylint: disable=invalid-name - A list of computed values from all ranks if on the gathering rank, otherwise None. """ value_scalar = fn() - value_tensor = torch.tensor(value_scalar, device=dist.get_rank()).float() + value_tensor = torch.tensor( + value_scalar, device=torch.cuda.current_device() + ).float() # Placeholder tensor for gathering results if is_main_process(): From 92512c390b86cca5c2a33d51538afb016b3416e8 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 21 Sep 2023 11:50:09 -0400 Subject: [PATCH 05/77] ignore wandb to resolve isort headaches (#619) --- .gitignore | 4 ++++ .isort.cfg | 1 + 2 files changed, 5 insertions(+) diff --git a/.gitignore b/.gitignore index 614a6676b1..f46b6808f5 100644 --- a/.gitignore +++ b/.gitignore @@ -161,3 +161,7 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. .idea/ + +# WandB +# wandb creates a folder to store logs for training runs +wandb diff --git a/.isort.cfg b/.isort.cfg index b9fb3f3e8c..79067a7c91 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -1,2 +1,3 @@ [settings] profile=black +known_third_party=wandb From 196ff1181ebd7dcb6c14d592627ed81c8b434a69 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 21 Sep 2023 15:20:31 -0400 Subject: [PATCH 06/77] skip the gpu memory checks if the device is set to 'auto' (#609) * skip the gpu memory checks if the device is set to 'auto' * skip gpu mem logging if cpu too * don't worry about log_gpu_memory_usage since it calls another annotated fn * rename decorator internal --- src/axolotl/utils/bench.py | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/src/axolotl/utils/bench.py b/src/axolotl/utils/bench.py index b460b2ba7c..685be526f0 100644 --- a/src/axolotl/utils/bench.py +++ b/src/axolotl/utils/bench.py @@ -1,14 +1,40 @@ """Benchmarking and measurement utilities""" +import functools import pynvml import torch from pynvml.nvml import NVMLError +def check_cuda_device(default_value): + """ + wraps a function and returns the default value instead of running the + wrapped function if cuda isn't available or the device is auto + :param default_value: + :return: + """ + + def deco(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + device = kwargs.get("device", args[0] if args else None) + + if not torch.cuda.is_available() or device == "auto" or device == "cpu": + return default_value + + return func(*args, **kwargs) + + return wrapper + + return deco + + +@check_cuda_device(0.0) def gpu_memory_usage(device=0): return torch.cuda.memory_allocated(device) / 1024.0**3 +@check_cuda_device((0.0, 0.0, 0.0)) def gpu_memory_usage_all(device=0): usage = torch.cuda.memory_allocated(device) / 1024.0**3 reserved = torch.cuda.memory_reserved(device) / 1024.0**3 @@ -16,6 +42,7 @@ def gpu_memory_usage_all(device=0): return usage, reserved - usage, max(0, smi - reserved) +@check_cuda_device(0.0) def gpu_memory_usage_smi(device=0): if isinstance(device, torch.device): device = device.index @@ -31,9 +58,6 @@ def gpu_memory_usage_smi(device=0): def log_gpu_memory_usage(log, msg, device): - if not torch.cuda.is_available() or device == "auto": - return (0, 0, 0) - usage, cache, misc = gpu_memory_usage_all(device) extras = [] if cache > 0: From e85d2eb06b7a5ad613f2f989296ec0ebd3f22335 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 21 Sep 2023 20:36:30 -0400 Subject: [PATCH 07/77] let MAX_JOBS use the default since we're not resource constrained on our self-hosted runners (#427) --- docker/Dockerfile-base | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docker/Dockerfile-base b/docker/Dockerfile-base index 643a3bfb62..d1c02e69d8 100644 --- a/docker/Dockerfile-base +++ b/docker/Dockerfile-base @@ -46,6 +46,8 @@ FROM base-builder AS bnb-builder WORKDIR /workspace ARG CUDA="118" ENV CUDA=$CUDA +ARG MAX_JOBS="-1" +ENV MAX_JOBS=$MAX_JOBS RUN git clone https://github.com/TimDettmers/bitsandbytes.git && \ cd bitsandbytes && \ @@ -60,8 +62,7 @@ ENV TORCH_CUDA_ARCH_LIST=$TORCH_CUDA_ARCH_LIST # recompile apex RUN python3 -m pip uninstall -y apex RUN git clone https://github.com/NVIDIA/apex -# `MAX_JOBS=1` disables parallel building to avoid cpu memory OOM when building image on GitHub Action (standard) runners -RUN cd apex && MAX_JOBS=1 python3 -m pip install -v --disable-pip-version-check --no-cache-dir --no-build-isolation --config-settings "--build-option=--cpp_ext" --config-settings "--build-option=--cuda_ext" ./ +RUN cd apex && python3 -m pip install -v --disable-pip-version-check --no-cache-dir --no-build-isolation --config-settings "--build-option=--cpp_ext" --config-settings "--build-option=--cuda_ext" ./ RUN mkdir -p /workspace/builds COPY --from=bnb-builder /workspace/bitsandbytes /workspace/builds/bitsandbytes From 2844eb22b63fecfec8ba98ee3f6fc5ac2940bb5a Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 21 Sep 2023 21:51:09 -0400 Subject: [PATCH 08/77] run eval on the first step to get a baseline (#617) * run eval on the first step to get a baseline * wandb kleeps getting moved around by pre-commit ... --- src/axolotl/utils/callbacks.py | 23 +++++++++++++++++++++++ src/axolotl/utils/trainer.py | 2 ++ 2 files changed, 25 insertions(+) diff --git a/src/axolotl/utils/callbacks.py b/src/axolotl/utils/callbacks.py index bf8c4145bd..236caeea46 100644 --- a/src/axolotl/utils/callbacks.py +++ b/src/axolotl/utils/callbacks.py @@ -66,6 +66,29 @@ def on_save( return control +class EvalFirstStepCallback( + TrainerCallback +): # pylint: disable=too-few-public-methods disable=unused-argument + """ + Callback to trigger evals on the first step + """ + + def on_step_end( + self, + args: TrainingArguments, + state: TrainerState, + control: TrainerControl, + **kwargs, + ): + if ( + args.evaluation_strategy == IntervalStrategy.STEPS + and args.eval_steps < 1.0 + and state.global_step == 1 + ): + control.should_evaluate = True + return control + + class SaveBetterTransformerModelCallback( TrainerCallback ): # pylint: disable=too-few-public-methods diff --git a/src/axolotl/utils/trainer.py b/src/axolotl/utils/trainer.py index 2067a90069..944ac5f511 100644 --- a/src/axolotl/utils/trainer.py +++ b/src/axolotl/utils/trainer.py @@ -28,6 +28,7 @@ from axolotl.monkeypatch.relora import ReLoRACallback, ReLoRAScheduler from axolotl.utils.callbacks import ( + EvalFirstStepCallback, GPUStatsCallback, SaveBetterTransformerModelCallback, SavePeftModelCallback, @@ -704,6 +705,7 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer, total_num_ callbacks = [] callbacks.append(GPUStatsCallback(cfg)) + callbacks.append(EvalFirstStepCallback) if cfg.relora_steps: callbacks.append(ReLoRACallback(cfg)) From 97d3776ce630e70e9125828a77a4f42517d48d61 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 21 Sep 2023 21:51:25 -0400 Subject: [PATCH 09/77] split completion text to sequence_len (#616) --- src/axolotl/datasets.py | 5 ++ src/axolotl/prompt_strategies/completion.py | 78 ++++++++++++++++++++- src/axolotl/prompt_tokenizers.py | 49 ++----------- src/axolotl/prompters.py | 14 ---- 4 files changed, 87 insertions(+), 59 deletions(-) diff --git a/src/axolotl/datasets.py b/src/axolotl/datasets.py index 75d8432da8..1dccb6d526 100644 --- a/src/axolotl/datasets.py +++ b/src/axolotl/datasets.py @@ -38,10 +38,15 @@ def __init__( # pylint: disable=super-init-not-called def process(self, dataset): features = dataset.features.keys() num_proc = min(64, os.cpu_count()) + map_kwargs = {} + if self.prompt_tokenizer.supports_batched: + map_kwargs["batched"] = True + map_kwargs["batch_size"] = 100 return dataset.map( self.prompt_tokenizer.tokenize_prompt, num_proc=num_proc, remove_columns=features, + **map_kwargs, ) diff --git a/src/axolotl/prompt_strategies/completion.py b/src/axolotl/prompt_strategies/completion.py index ee5b4cb3e8..3285e667cb 100644 --- a/src/axolotl/prompt_strategies/completion.py +++ b/src/axolotl/prompt_strategies/completion.py @@ -1,10 +1,81 @@ """ Basic completion text """ -from typing import Any, Dict, Optional +from collections import defaultdict +from typing import Any, Dict, Generator, Optional, Tuple -from axolotl.prompt_tokenizers import CompletionPromptTokenizingStrategy -from axolotl.prompters import CompletionPrompter +from axolotl.prompt_tokenizers import InstructionPromptTokenizingStrategy + + +class CompletionPromptTokenizingStrategy(InstructionPromptTokenizingStrategy): + """ + Tokenizing strategy for Completion prompts. + """ + + _field: str = "text" + + def __init__(self, *args, max_length=None, **kwargs): + super().__init__(*args, **kwargs) + if max_length is not None: + self.max_length = max_length + + @property + def supports_batched(self): + return True + + @property + def field(self) -> str: + return self._field + + @field.setter + def field(self, new_field: str): + self._field = new_field + + def parse_instruction_fields(self, prompt) -> Tuple[str, str, str]: + return ( + prompt[self.field], + "", + "", + ) + + def tokenize_prompt(self, prompt): + res = defaultdict(lambda: []) + feature_names = list(prompt.keys()) + for row in zip(*prompt.values()): + prompt_row = dict(zip(feature_names, row)) + ( + instruction, + _, + _, + ) = self.parse_instruction_fields(prompt_row) + + full_prompt = self._build_full_prompt(instruction, None, None) + tokenized_full_prompt = self._tokenize(full_prompt) + + for key, val in tokenized_full_prompt.items(): + for i in range(0, len(val), self.sequence_len): + res[key].append(val[i : i + self.sequence_len]) + + return dict(res) + + def _build_full_prompt( + self, instruction, input, response + ): # pylint: disable=redefined-builtin + return next(iter(self.prompter.build_prompt(instruction, input, response))) + + +class CompletionPrompter: + """ + Prompter for completion + """ + + def build_prompt( + self, + instruction: str, + input=None, # pylint: disable=redefined-builtin, unused-argument + output=None, # pylint: disable=unused-argument + ) -> Generator[str, None, None]: + yield instruction def load(tokenizer, cfg, ds_cfg: Optional[Dict[str, Any]] = None): @@ -13,6 +84,7 @@ def load(tokenizer, cfg, ds_cfg: Optional[Dict[str, Any]] = None): tokenizer, cfg.train_on_inputs, cfg.sequence_len, + max_length=cfg.sequence_len * 64, ) if ds_cfg and "field" in ds_cfg: strat.field = ds_cfg["field"] diff --git a/src/axolotl/prompt_tokenizers.py b/src/axolotl/prompt_tokenizers.py index f30d0e3832..9553e3e8fe 100644 --- a/src/axolotl/prompt_tokenizers.py +++ b/src/axolotl/prompt_tokenizers.py @@ -41,11 +41,16 @@ def __init__( self.tokenizer: PreTrainedTokenizer = tokenizer self.train_on_inputs = train_on_inputs self.sequence_len = sequence_len + self.max_length = sequence_len @abc.abstractmethod def tokenize_prompt(self, prompt): pass + @property + def supports_batched(self): + return False + @functools.lru_cache(maxsize=128) def _get_user_token(self): try: @@ -77,7 +82,7 @@ def _tokenize( result = self.tokenizer( prompt, truncation=True, - max_length=self.sequence_len, + max_length=self.max_length, padding=False, return_tensors=None, ) @@ -86,7 +91,7 @@ def _tokenize( if ( len(result["input_ids"]) > 0 and result["input_ids"][-1] != self.tokenizer.eos_token_id - and len(result["input_ids"]) < self.sequence_len + and len(result["input_ids"]) < self.max_length and add_eos_token ): result["input_ids"].append(self.tokenizer.eos_token_id) @@ -247,46 +252,6 @@ def parse_instruction_fields(self, prompt) -> Tuple[str, str, str]: ) -class CompletionPromptTokenizingStrategy(InstructionPromptTokenizingStrategy): - """ - Tokenizing strategy for Completion prompts. - """ - - _field: str = "text" - - @property - def field(self) -> str: - return self._field - - @field.setter - def field(self, new_field: str): - self._field = new_field - - def parse_instruction_fields(self, prompt) -> Tuple[str, str, str]: - return ( - prompt[self.field], - "", - "", - ) - - def tokenize_prompt(self, prompt): - ( - instruction, - _, - _, - ) = self.parse_instruction_fields(prompt) - - full_prompt = self._build_full_prompt(instruction, None, None) - tokenized_full_prompt = self._tokenize(full_prompt) - - return tokenized_full_prompt - - def _build_full_prompt( - self, instruction, input, response - ): # pylint: disable=redefined-builtin - return next(iter(self.prompter.build_prompt(instruction, input, response))) - - class ReflectionPromptTokenizingStrategy(PromptTokenizingStrategy): """ Tokenizing strategy for Reflection prompts. diff --git a/src/axolotl/prompters.py b/src/axolotl/prompters.py index 5322a10182..d6d14c3694 100644 --- a/src/axolotl/prompters.py +++ b/src/axolotl/prompters.py @@ -135,20 +135,6 @@ def match_prompt_style(self): self.turn_no_input_format = "USER: Summarize the following article as a TL;DR.\n{instruction}\nASSISTANT:" -class CompletionPrompter: - """ - Prompter for completion - """ - - def build_prompt( - self, - instruction: str, - input=None, # pylint: disable=redefined-builtin, unused-argument - output=None, # pylint: disable=unused-argument - ) -> Generator[str, None, None]: - yield instruction - - class GPTeacherPrompter(AlpacaPrompter): """ Prompter for GPTeacher From 03e59077a008715099ea35ebf3fb0a9358f951e8 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 21 Sep 2023 21:52:12 -0400 Subject: [PATCH 10/77] misc fixes to add gptq tests (#621) * misc fixes to add gptq tests * set bf16 needed for fa2 --- src/axolotl/utils/bench.py | 6 +++- src/axolotl/utils/models.py | 35 ++++++++++++++-------- src/axolotl/utils/trainer.py | 1 + tests/e2e/test_lora_llama.py | 58 ++++++++++++++++++++++++++++++++++-- tests/e2e/test_phi.py | 14 +++++---- 5 files changed, 93 insertions(+), 21 deletions(-) diff --git a/src/axolotl/utils/bench.py b/src/axolotl/utils/bench.py index 685be526f0..40be0d9ac8 100644 --- a/src/axolotl/utils/bench.py +++ b/src/axolotl/utils/bench.py @@ -19,7 +19,11 @@ def deco(func): def wrapper(*args, **kwargs): device = kwargs.get("device", args[0] if args else None) - if not torch.cuda.is_available() or device == "auto" or device == "cpu": + if ( + not torch.cuda.is_available() + or device == "auto" + or torch.device(device).type == "cpu" + ): return default_value return func(*args, **kwargs) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index a349776d77..543a0e1a13 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -10,6 +10,7 @@ import transformers from optimum.bettertransformer import BetterTransformer from peft import PeftConfig, prepare_model_for_kbit_training +from peft.tuners.lora import QuantLinear from transformers import ( # noqa: F401 AutoConfig, AutoModelForCausalLM, @@ -309,16 +310,26 @@ def load_model( ): config.max_sequence_length = cfg.sequence_len LOG.warning(f"increasing context length to {cfg.sequence_len}") - model = AutoModelForCausalLM.from_pretrained( - base_model, - config=config, - device_map=cfg.device_map, - load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, - load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, - torch_dtype=cfg.torch_dtype, - trust_remote_code=cfg.trust_remote_code or False, - **model_kwargs, - ) + if cfg.gptq: + model = AutoModelForCausalLM.from_pretrained( + base_model, + config=config, + device_map=cfg.device_map, + torch_dtype=cfg.torch_dtype, + trust_remote_code=cfg.trust_remote_code or False, + **model_kwargs, + ) + else: + model = AutoModelForCausalLM.from_pretrained( + base_model, + config=config, + device_map=cfg.device_map, + load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, + load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, + torch_dtype=cfg.torch_dtype, + trust_remote_code=cfg.trust_remote_code or False, + **model_kwargs, + ) except Exception as err: # pylint: disable=broad-exception-caught LOG.error( "Exception raised attempting to load model, retrying with AutoModelForCausalLM" @@ -466,10 +477,10 @@ def load_llama_adapter(model, cfg): def find_all_linear_names(model): - cls = (bnb.nn.Linear4bit, bnb.nn.Linear8bitLt, torch.nn.Linear) + cls = (bnb.nn.Linear4bit, bnb.nn.Linear8bitLt, torch.nn.Linear, QuantLinear) lora_module_names = set() for name, module in model.named_modules(): - if isinstance(module, cls): + if isinstance(module, cls) or "Linear" in module.__class__.__name__: names = name.split(".") lora_module_names.add(names[0] if len(names) == 1 else names[-1]) diff --git a/src/axolotl/utils/trainer.py b/src/axolotl/utils/trainer.py index 944ac5f511..a4ec1553ef 100644 --- a/src/axolotl/utils/trainer.py +++ b/src/axolotl/utils/trainer.py @@ -676,6 +676,7 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer, total_num_ (cfg.load_best_model_at_end is not False or cfg.early_stopping_patience) and cfg.val_set_size > 0 and cfg.save_steps + and cfg.eval_steps and cfg.save_steps % cfg.eval_steps == 0 ) or False, diff --git a/tests/e2e/test_lora_llama.py b/tests/e2e/test_lora_llama.py index fbca33633e..7d4b75cceb 100644 --- a/tests/e2e/test_lora_llama.py +++ b/tests/e2e/test_lora_llama.py @@ -6,6 +6,7 @@ import os import tempfile import unittest +from pathlib import Path from axolotl.cli import load_datasets from axolotl.common.cli import TrainerCliArgs @@ -24,6 +25,7 @@ class TestLoraLlama(unittest.TestCase): def test_lora(self): # pylint: disable=duplicate-code + output_dir = tempfile.mkdtemp() cfg = DictDefault( { "base_model": "JackFram/llama-68m", @@ -51,7 +53,7 @@ def test_lora(self): "num_epochs": 2, "micro_batch_size": 8, "gradient_accumulation_steps": 1, - "output_dir": tempfile.mkdtemp(), + "output_dir": output_dir, "learning_rate": 0.00001, "optimizer": "adamw_torch", "lr_scheduler": "cosine", @@ -62,9 +64,11 @@ def test_lora(self): dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) + assert (Path(output_dir) / "adapter_model.bin").exists() def test_lora_packing(self): # pylint: disable=duplicate-code + output_dir = tempfile.mkdtemp() cfg = DictDefault( { "base_model": "JackFram/llama-68m", @@ -94,7 +98,7 @@ def test_lora_packing(self): "num_epochs": 2, "micro_batch_size": 8, "gradient_accumulation_steps": 1, - "output_dir": tempfile.mkdtemp(), + "output_dir": output_dir, "learning_rate": 0.00001, "optimizer": "adamw_torch", "lr_scheduler": "cosine", @@ -105,3 +109,53 @@ def test_lora_packing(self): dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) + assert (Path(output_dir) / "adapter_model.bin").exists() + + def test_lora_gptq(self): + # pylint: disable=duplicate-code + output_dir = tempfile.mkdtemp() + cfg = DictDefault( + { + "base_model": "TheBlokeAI/jackfram_llama-68m-GPTQ", + "base_model_config": "TheBlokeAI/jackfram_llama-68m-GPTQ", + "model_type": "AutoModelForCausalLM", + "tokenizer_type": "LlamaTokenizer", + "sequence_len": 1024, + "sample_packing": True, + "flash_attention": True, + "load_in_8bit": True, + "adapter": "lora", + "gptq": True, + "gptq_disable_exllama": True, + "lora_r": 32, + "lora_alpha": 64, + "lora_dropout": 0.05, + "lora_target_linear": True, + "val_set_size": 0.1, + "special_tokens": { + "unk_token": "", + "bos_token": "", + "eos_token": "", + }, + "datasets": [ + { + "path": "mhenrichsen/alpaca_2k_test", + "type": "alpaca", + }, + ], + "num_epochs": 2, + "save_steps": 0.5, + "micro_batch_size": 8, + "gradient_accumulation_steps": 1, + "output_dir": output_dir, + "learning_rate": 0.00001, + "optimizer": "adamw_torch", + "lr_scheduler": "cosine", + } + ) + normalize_config(cfg) + cli_args = TrainerCliArgs() + dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) + + train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) + assert (Path(output_dir) / "adapter_model.bin").exists() diff --git a/tests/e2e/test_phi.py b/tests/e2e/test_phi.py index fb8aa5d875..a84ef0778c 100644 --- a/tests/e2e/test_phi.py +++ b/tests/e2e/test_phi.py @@ -31,9 +31,9 @@ def test_ft(self): "trust_remote_code": True, "model_type": "MixFormerSequentialForCausalLM", "tokenizer_type": "AutoTokenizer", - "sequence_len": 2048, + "sequence_len": 512, "sample_packing": False, - "load_in_8bit": True, + "load_in_8bit": False, "adapter": None, "val_set_size": 0.1, "special_tokens": { @@ -55,8 +55,9 @@ def test_ft(self): "gradient_accumulation_steps": 1, "output_dir": tempfile.mkdtemp(), "learning_rate": 0.00001, - "optimizer": "adamw_torch", + "optimizer": "adamw_bnb_8bit", "lr_scheduler": "cosine", + "bf16": True, } ) normalize_config(cfg) @@ -74,9 +75,9 @@ def test_ft_packed(self): "trust_remote_code": True, "model_type": "MixFormerSequentialForCausalLM", "tokenizer_type": "AutoTokenizer", - "sequence_len": 2048, + "sequence_len": 512, "sample_packing": True, - "load_in_8bit": True, + "load_in_8bit": False, "adapter": None, "val_set_size": 0.1, "special_tokens": { @@ -98,8 +99,9 @@ def test_ft_packed(self): "gradient_accumulation_steps": 1, "output_dir": tempfile.mkdtemp(), "learning_rate": 0.00001, - "optimizer": "adamw_torch", + "optimizer": "adamw_bnb_8bit", "lr_scheduler": "cosine", + "bf16": True, } ) normalize_config(cfg) From d5f8589021ad1bc854b82f18b582b91f593504db Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Fri, 22 Sep 2023 12:31:33 +0900 Subject: [PATCH 11/77] chore(callback): Remove old peft saving code (#510) --- src/axolotl/utils/callbacks.py | 23 ----------------------- src/axolotl/utils/trainer.py | 7 ------- 2 files changed, 30 deletions(-) diff --git a/src/axolotl/utils/callbacks.py b/src/axolotl/utils/callbacks.py index 236caeea46..9a7ebe9517 100644 --- a/src/axolotl/utils/callbacks.py +++ b/src/axolotl/utils/callbacks.py @@ -43,29 +43,6 @@ IGNORE_INDEX = -100 -class SavePeftModelCallback(TrainerCallback): # pylint: disable=too-few-public-methods - """Callback to save the PEFT adapter""" - - def on_save( - self, - args: TrainingArguments, - state: TrainerState, - control: TrainerControl, - **kwargs, - ): - checkpoint_folder = os.path.join( - args.output_dir, - f"{PREFIX_CHECKPOINT_DIR}-{state.global_step}", - ) - - peft_model_path = os.path.join(checkpoint_folder, "adapter_model") - kwargs["model"].save_pretrained( - peft_model_path, save_safetensors=args.save_safetensors - ) - - return control - - class EvalFirstStepCallback( TrainerCallback ): # pylint: disable=too-few-public-methods disable=unused-argument diff --git a/src/axolotl/utils/trainer.py b/src/axolotl/utils/trainer.py index a4ec1553ef..a2657e05ab 100644 --- a/src/axolotl/utils/trainer.py +++ b/src/axolotl/utils/trainer.py @@ -31,7 +31,6 @@ EvalFirstStepCallback, GPUStatsCallback, SaveBetterTransformerModelCallback, - SavePeftModelCallback, bench_eval_callback_factory, log_prediction_callback_factory, ) @@ -711,12 +710,6 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer, total_num_ if cfg.relora_steps: callbacks.append(ReLoRACallback(cfg)) - if cfg.local_rank == 0 and cfg.adapter in [ - "lora", - "qlora", - ]: # only save in rank 0 - callbacks.append(SavePeftModelCallback) - if hasattr(model, "use_bettertransformer") and model.use_bettertransformer is True: callbacks.append(SaveBetterTransformerModelCallback) From c25ba7939b35dbd9589bc694ea06c3490e8f9b54 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 22 Sep 2023 00:15:52 -0400 Subject: [PATCH 12/77] update README w deepspeed info (#605) --- README.md | 30 +++++++++++----- deepspeed/zero1.json | 76 ++++++++++++++++++++-------------------- deepspeed/zero2.json | 82 +++++++++++++++++++++++--------------------- deepspeed/zero3.json | 4 ++- requirements.txt | 1 + setup.py | 9 +++-- 6 files changed, 114 insertions(+), 88 deletions(-) diff --git a/README.md b/README.md index c9b935f53d..6e91158934 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,7 @@ Features: - [How to Use Custom Pretokenized Dataset](#how-to-use-your-custom-pretokenized-dataset) - [Config](#config) - [Train](#train) + - [Training w/ Deepspeed](#training-with-deepspeed) - [Inference](#inference) - [Merge LORA to Base](#merge-lora-to-base) - [Common Errors](#common-errors-) @@ -86,7 +87,7 @@ git clone https://github.com/OpenAccess-AI-Collective/axolotl cd axolotl pip3 install packaging -pip3 install -e .[flash-attn] +pip3 install -e .[flash-attn,deepspeed] pip3 install -U git+https://github.com/huggingface/peft.git # finetune lora @@ -121,7 +122,7 @@ accelerate launch -m axolotl.cli.inference examples/openllama-3b/lora.yml \ 3. Install axolotl along with python dependencies ```bash pip3 install packaging - pip3 install -e .[flash-attn] + pip3 install -e .[flash-attn,deepspeed] ``` - LambdaLabs @@ -157,7 +158,7 @@ accelerate launch -m axolotl.cli.inference examples/openllama-3b/lora.yml \ cd axolotl pip3 install packaging - pip3 install -e .[flash-attn] + pip3 install -e .[flash-attn,deepspeed] pip3 install protobuf==3.20.3 pip3 install -U --ignore-installed requests Pillow psutil scipy ``` @@ -715,11 +716,6 @@ fsdp_config: fsdp_transformer_layer_cls_to_wrap: LlamaDecoderLayer ``` -- llama Deepspeed -```yaml -deepspeed: deepspeed/zero3.json -``` - ##### Weights & Biases Logging - wandb options @@ -732,6 +728,24 @@ wandb_run_id: wandb_log_model: ``` +### Training with Deepspeed + +Deepspeed is an optimization suite for multi-gpu systems allowing you to train much larger models than you +might typically be able to fit into your GPU's VRAM. More information about the various optimization types +for deepspeed is available at https://huggingface.co/docs/accelerate/main/en/usage_guides/deepspeed#what-is-integrated + +We provide several default deepspeed JSON configurations for ZeRO stage 1, 2, and 3. + +```shell +accelerate launch -m axolotl.cli.train examples/llama-2/config.py --deepspeed deepspeed/zero1.json +``` + +or + +```yaml +deepspeed: deepspeed/zero1.json +``` + ### Inference Pass the appropriate flag to the train command: diff --git a/deepspeed/zero1.json b/deepspeed/zero1.json index 39d648b2b5..85bc586091 100644 --- a/deepspeed/zero1.json +++ b/deepspeed/zero1.json @@ -1,39 +1,41 @@ { - "zero_optimization": { - "stage": 1, - "overlap_comm": true - }, - "bf16": { - "enabled": "auto" - }, - "fp16": { - "enabled": "auto", - "auto_cast": false, - "loss_scale": 0, - "initial_scale_power": 32, - "loss_scale_window": 1000, - "hysteresis": 2, - "min_loss_scale": 1 - }, - "optimizer": { - "type": "AdamW", - "params": { - "lr": "auto", - "betas": "auto", - "eps": "auto", - "weight_decay": "auto" - } - }, - "scheduler": { - "type": "WarmupDecayLR", - "params": { - "warmup_min_lr": "auto", - "warmup_max_lr": "auto", - "warmup_num_steps": "auto", - "total_num_steps": "auto" - } - }, - "train_batch_size": "auto", - "train_micro_batch_size_per_gpu": "auto", - "wall_clock_breakdown": false + "zero_optimization": { + "stage": 1, + "overlap_comm": true + }, + "bf16": { + "enabled": "auto" + }, + "fp16": { + "enabled": "auto", + "auto_cast": false, + "loss_scale": 0, + "initial_scale_power": 32, + "loss_scale_window": 1000, + "hysteresis": 2, + "min_loss_scale": 1 + }, + "optimizer": { + "type": "AdamW", + "params": { + "lr": "auto", + "betas": "auto", + "eps": "auto", + "weight_decay": "auto" + } + }, + "scheduler": { + "type": "WarmupDecayLR", + "params": { + "warmup_min_lr": "auto", + "warmup_max_lr": "auto", + "warmup_num_steps": "auto", + "warmup_type": "linear", + "total_num_steps": "auto" + } + }, + "gradient_accumulation_steps": "auto", + "train_batch_size": "auto", + "train_micro_batch_size_per_gpu": "auto", + "wall_clock_breakdown": false } diff --git a/deepspeed/zero2.json b/deepspeed/zero2.json index 16f2710f69..18ec4bd0e3 100644 --- a/deepspeed/zero2.json +++ b/deepspeed/zero2.json @@ -1,43 +1,45 @@ { - "zero_optimization": { - "stage": 2, - "offload_optimizer": { - "device": "cpu" - }, - "contiguous_gradients": true, - "overlap_comm": true + "zero_optimization": { + "stage": 2, + "offload_optimizer": { + "device": "cpu" }, - "bf16": { - "enabled": "auto" - }, - "fp16": { - "enabled": "auto", - "auto_cast": false, - "loss_scale": 0, - "initial_scale_power": 32, - "loss_scale_window": 1000, - "hysteresis": 2, - "min_loss_scale": 1 - }, - "optimizer": { - "type": "AdamW", - "params": { - "lr": "auto", - "betas": "auto", - "eps": "auto", - "weight_decay": "auto" - } - }, - "scheduler": { - "type": "WarmupDecayLR", - "params": { - "warmup_min_lr": "auto", - "warmup_max_lr": "auto", - "warmup_num_steps": "auto", - "total_num_steps": "auto" - } - }, - "train_batch_size": "auto", - "train_micro_batch_size_per_gpu": "auto", - "wall_clock_breakdown": false + "contiguous_gradients": true, + "overlap_comm": true + }, + "bf16": { + "enabled": "auto" + }, + "fp16": { + "enabled": "auto", + "auto_cast": false, + "loss_scale": 0, + "initial_scale_power": 32, + "loss_scale_window": 1000, + "hysteresis": 2, + "min_loss_scale": 1 + }, + "optimizer": { + "type": "AdamW", + "params": { + "lr": "auto", + "betas": "auto", + "eps": "auto", + "weight_decay": "auto" + } + }, + "scheduler": { + "type": "WarmupDecayLR", + "params": { + "warmup_min_lr": "auto", + "warmup_max_lr": "auto", + "warmup_num_steps": "auto", + "warmup_type": "linear", + "total_num_steps": "auto" + } + }, + "gradient_accumulation_steps": "auto", + "train_batch_size": "auto", + "train_micro_batch_size_per_gpu": "auto", + "wall_clock_breakdown": false } diff --git a/deepspeed/zero3.json b/deepspeed/zero3.json index 3f5dff3302..5da9c07e28 100644 --- a/deepspeed/zero3.json +++ b/deepspeed/zero3.json @@ -45,9 +45,11 @@ "params": { "warmup_min_lr": "auto", "warmup_max_lr": "auto", - "warmup_num_steps": "auto" + "warmup_num_steps": "auto", + "warmup_type": "linear" } }, + "gradient_accumulation_steps": "auto", "train_batch_size": "auto", "train_micro_batch_size_per_gpu": "auto", "wall_clock_breakdown": false diff --git a/requirements.txt b/requirements.txt index 1e95b716ec..5aba20b161 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,6 +7,7 @@ peft @ git+https://github.com/huggingface/peft.git transformers @ git+https://github.com/huggingface/transformers.git bitsandbytes>=0.41.1 accelerate @ git+https://github.com/huggingface/accelerate +deepspeed addict evaluate fire diff --git a/setup.py b/setup.py index fca5088da1..3109d4af1a 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,12 @@ def parse_requirements(): # Handle custom index URLs _, url = line.split() _dependency_links.append(url) - elif "flash-attn" not in line and line and line[0] != "#": + elif ( + "flash-attn" not in line + and "deepspeed" not in line + and line + and line[0] != "#" + ): # Handle standard packages _install_requires.append(line) return _install_requires, _dependency_links @@ -35,7 +40,7 @@ def parse_requirements(): "flash-attn": [ "flash-attn>=2.2.1", ], - "extras": [ + "deepspeed": [ "deepspeed", ], }, From 501958bb6fa8bbf0991080f8d6b537fd69665142 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 22 Sep 2023 16:13:26 -0400 Subject: [PATCH 13/77] create a model card with axolotl badge (#624) --- src/axolotl/train.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/axolotl/train.py b/src/axolotl/train.py index 5ed5837f21..da98600a45 100644 --- a/src/axolotl/train.py +++ b/src/axolotl/train.py @@ -9,8 +9,7 @@ from typing import Optional import torch - -# add src to the pythonpath so we don't need to pip install this +import transformers.modelcard from datasets import Dataset from optimum.bettertransformer import BetterTransformer @@ -103,6 +102,9 @@ def terminate_handler(_, __, model): signal.SIGINT, lambda signum, frame: terminate_handler(signum, frame, model) ) + badge_markdown = """[Built with Axolotl](https://github.com/OpenAccess-AI-Collective/axolotl)""" + transformers.modelcard.AUTOGENERATED_TRAINER_COMMENT += f"\n{badge_markdown}" + LOG.info("Starting trainer...") if cfg.group_by_length: LOG.info("hang tight... sorting dataset for group_by_length") @@ -138,4 +140,7 @@ def terminate_handler(_, __, model): model.save_pretrained(cfg.output_dir, safe_serialization=safe_serialization) + if not cfg.hub_model_id: + trainer.create_model_card(model_name=cfg.output_dir.lstrip("./")) + return model, tokenizer From a363604dcff72b965e7224f758f1a6421c6bda7a Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 22 Sep 2023 16:13:42 -0400 Subject: [PATCH 14/77] better handling and logging of empty sharegpt turns (#603) --- src/axolotl/prompt_tokenizers.py | 41 ++++++++++----- tests/fixtures/conversation.missingturns.json | 52 +++++++++++++++++++ tests/test_prompt_tokenizers.py | 26 ++++++++++ 3 files changed, 105 insertions(+), 14 deletions(-) create mode 100644 tests/fixtures/conversation.missingturns.json diff --git a/src/axolotl/prompt_tokenizers.py b/src/axolotl/prompt_tokenizers.py index 9553e3e8fe..a74da71da5 100644 --- a/src/axolotl/prompt_tokenizers.py +++ b/src/axolotl/prompt_tokenizers.py @@ -358,10 +358,12 @@ def tokenize_prompt(self, prompt): ): if isinstance(part, tuple): if part[0] == "USER:": - part = part[0] + part[1] if not user_token else part[1] + turn = part[0] + part[1] if not user_token else part[1] # this is still the user query, we should + if not part[1].strip(): + LOG.warning(f"user turn has empty text: {prompt}") res = self._tokenize( - part.strip(), + turn.strip(), add_eos_token=False, strip_bos_token=True, ) @@ -371,10 +373,12 @@ def tokenize_prompt(self, prompt): labels = [IGNORE_TOKEN_ID] * len(res["input_ids"]) elif part[0] == "ASSISTANT:": # TODO label assistant token/tokens w/ IGNORE_TOKEN_ID - part = part[0] + part[1] if not assistant_token else part[1] - # this should be the assistent response, should end with an eos token + turn = part[0] + part[1] if not assistant_token else part[1] + # this should be the assistant response, should end with an eos token + if not part[1].strip(): + LOG.warning(f"assistant turn has empty text: {prompt}") res = self._tokenize( - part.strip(), + turn.strip(), add_eos_token=True, strip_bos_token=True, ) @@ -409,22 +413,31 @@ def tokenize_prompt(self, prompt): raise InvalidDataException(str(err)) from err def _tokenize(self, prompt, add_eos_token=True, strip_bos_token=False): - result = self.tokenizer( - prompt, - truncation=True, - max_length=self.sequence_len, - padding=False, - return_tensors=None, - ) + if not prompt.strip(): + LOG.warning("Empty text requested for tokenization.") + result = BatchEncoding(data={"input_ids": [], "attention_mask": []}) + else: + result = self.tokenizer( + prompt, + truncation=True, + max_length=self.sequence_len, + padding=False, + return_tensors=None, + ) if ( - result["input_ids"][-1] != self.tokenizer.eos_token_id + len(result["input_ids"]) > 0 + and result["input_ids"][-1] != self.tokenizer.eos_token_id and len(result["input_ids"]) < self.sequence_len and add_eos_token ): result["input_ids"].append(self.tokenizer.eos_token_id) result["attention_mask"].append(1) - if result["input_ids"][0] == self.tokenizer.bos_token_id and strip_bos_token: + if ( + len(result["input_ids"]) > 0 + and result["input_ids"][0] == self.tokenizer.bos_token_id + and strip_bos_token + ): result["input_ids"] = result["input_ids"][1:] result["attention_mask"] = result["attention_mask"][1:] diff --git a/tests/fixtures/conversation.missingturns.json b/tests/fixtures/conversation.missingturns.json new file mode 100644 index 0000000000..b5b962d859 --- /dev/null +++ b/tests/fixtures/conversation.missingturns.json @@ -0,0 +1,52 @@ +{ + "conversations": [ + { + "from": "human", + "value": "how could i implement a minesweeper algorithm that utilises algebraic topology to solve boards?" + }, + { + "from": "gpt", + "value": "" + }, + { + "from": "human", + "value": "could you give me the gist of how it could though?" + }, + { + "from": "gpt", + "value": "Sure. The basic idea would be to use algebraic topology to analyze the structure of the Minesweeper game board and determine the locations of mines based on certain topological invariants. This could involve representing the game board as a topological space and using topological invariants such as homotopy groups and Betti numbers to infer information about the location of mines on the board. However, as I mentioned before, implementing this would require a deep understanding of both algebraic topology and the game of Minesweeper, so it's not something that can be explained in a simple answer." + }, + { + "from": "human", + "value": "give me a specific example" + }, + { + "from": "gpt", + "value": "As a specific example, suppose we have a Minesweeper game board with the following configuration:\n```\n 1 1 1 1 1\n 1 ? ? ? 1\n 1 ? ? ? 1\n 1 ? ? ? 1\n 1 1 1 1 1\n```\nIn this case, the game board can be represented as a topological space, where each square on the board corresponds to a point in the space. The mines on the board can be thought of as \"holes\" in the space, which are points that are not part of the space. Using algebraic topology, we can analyze the structure of this space and determine the locations of the mines based on certain topological invariants.\n\nFor instance, one way to do this is to compute the homotopy groups of the space. Homotopy groups are a way of describing the holes in a space, and they can be used to infer information about the locations of the mines on the Minesweeper board. In the case of the board above, we can compute the first homotopy group of the space, which describes the number and \"shape\" of the holes in the space. In this case, the first homotopy group would tell us that there are four holes in the space, arranged in a square shape.\n\nBased on this information, we can infer that the mines in the game are located in the four corners of the board, as shown below:\n```\n 1 1 1 1 1\n 1 M ? ? 1\n 1 ? ? ? 1\n 1 ? ? M 1\n 1 1 1 1 1\n```\nOf course, this is just one example of how algebraic topology could be used to solve Minesweeper. There are many other possible approaches, and the specific details of how to implement them would depend on the specific game board and the topological invariants being used. As I mentioned before, implementing a solution like this would require a deep understanding of both algebraic topology and the game of Minesweeper." + }, + { + "from": "human", + "value": "assuming i was someone with a deep understanding of both, could you explain the same concept to me with that assumption?" + }, + { + "from": "gpt", + "value": "Sure. Assuming you have a deep understanding of both algebraic topology and the game of Minesweeper, here is how you might use algebraic topology to solve a Minesweeper game board:\n\n1. Represent the Minesweeper game board as a topological space. Each square on the board corresponds to a point in the space, and the mines on the board can be thought of as \"holes\" in the space, which are points that are not part of the space.\n2. Use topological invariants to infer information about the locations of the mines on the board. There are many possible topological invariants that could be used for this, such as homotopy groups, Betti numbers, or persistent homology.\n3. Use the information inferred from the topological invariants to determine the locations of the mines on the board. For example, if the first homotopy group of the space indicates that there are four holes in the space, arranged in a square shape, then we can infer that the mines are located in the four corners of the board.\n4. Use this information to solve the Minesweeper game. Once you know the locations of the mines, you can use this information to avoid them and clear the board.\n\nOf course, this is just one possible approach to using algebraic topology to solve Minesweeper. There may be other ways to do it, depending on the specific game board and the topological invariants being used. However, this should give you a general idea of how algebraic topology can be used to solve Minesweeper." + }, + { + "from": "human", + "value": "how would i do it with persistent homology or any of the other topological invariants?" + }, + { + "from": "gpt", + "value": "To use persistent homology or any of the other topological invariants to solve a Minesweeper game board, you would follow the same basic steps as described above, but with some differences in the specific details.\n\nFor instance, here is how you might use persistent homology to solve a Minesweeper game board:\n\n1. Represent the Minesweeper game board as a topological space. Each square on the board corresponds to a point in the space, and the mines on the board can be thought of as \"holes\" in the space, which are points that are not part of the space.\n2. Compute the persistent homology of the space. Persistent homology is a way of describing the holes in a space over time, and it can be used to infer information about the locations of the mines on the Minesweeper board.\n3. Use the information from the persistent homology to determine the locations of the mines on the board. For example, if the persistent homology indicates that there are four holes in the space, arranged in a square shape, then we can infer that the mines are located in the four corners of the board.\n4. Use this information to solve the Minesweeper game. Once you know the locations of the mines, you can use this information to avoid them and clear the board.\n\nOf course, this is just one possible approach to using persistent homology to solve Minesweeper. There may be other ways to do it, depending on the specific game board and the specific details of the persistent homology computation. However, this should give you a general idea of how persistent homology can be used to solve Minesweeper." + }, + { + "from": "human", + "value": "how could i implement this in python?" + }, + { + "from": "gpt", + "value": "To implement a Minesweeper solver that uses algebraic topology in Python, you would need to do the following:\n\n1. Install a Python package for algebraic topology. There are several options available, such as Gudhi, Dionysus, or scikit-tda. These packages provide algorithms and data structures for computing topological invariants, such as homotopy groups, Betti numbers, or persistent homology.\n2. Represent the Minesweeper game board as a topological space. This could be done using a data structure provided by the algebraic topology package, such as a simplicial complex or a Vietoris-Rips complex. Each square on the board would correspond to a point in the space, and the mines on the board would be represented as \"holes\" in the space.\n3. Compute the topological invariants of the space. This could be done using algorithms provided by the algebraic topology package. For example, you could compute the persistent homology of the space using the persistent homology algorithm.\n4. Use the information from the topological invariants to determine the locations of the mines on the board. This could be done using a combination of mathematical reasoning and programming. For example, if the persistent homology indicates that there are four holes in the space, arranged in a square shape, then you could use a piece of code to infer that the mines are located in the four corners of the board.\n5. Use this information to solve the Minesweeper game. Once you know the locations of the mines, you could use a piece of code to automatically click on the safe squares and clear the board.\n\nOf course, this is just one possible approach to implementing a Minesweeper solver that uses algebraic topology in Python. There may be other ways to do it, depending on the specific details of the implementation. However, this should give you a general idea of how to get started with such a project." + } + ] +} diff --git a/tests/test_prompt_tokenizers.py b/tests/test_prompt_tokenizers.py index 1dd511f6b8..a455d8b513 100644 --- a/tests/test_prompt_tokenizers.py +++ b/tests/test_prompt_tokenizers.py @@ -3,7 +3,9 @@ import logging import unittest from pathlib import Path +from typing import Optional +import pytest from transformers import AutoTokenizer, LlamaTokenizer from axolotl.prompt_strategies.alpaca_chat import NoSystemPrompter @@ -29,6 +31,12 @@ class TestPromptTokenizationStrategies(unittest.TestCase): Test class for prompt tokenization strategies. """ + _caplog: Optional[pytest.LogCaptureFixture] = None + + @pytest.fixture(autouse=True) + def inject_fixtures(self, caplog): + self._caplog = caplog + def setUp(self) -> None: # pylint: disable=duplicate-code self.tokenizer = AutoTokenizer.from_pretrained("huggyllama/llama-7b") @@ -64,6 +72,24 @@ def test_sharegpt_integration(self): self.assertEqual(len(example[fields]), len(tokenized_conversation[fields])) self.assertEqual(example[fields], tokenized_conversation[fields]) + def test_sharegpt_warnings_integration(self): + with open( + Path(__file__).parent / "fixtures/conversation.missingturns.json", + encoding="utf-8", + ) as fin: + data = fin.read() + conversation = json.loads(data) + prompter = ShareGPTPrompter("chat") + strat = ShareGPTPromptTokenizingStrategy( + prompter, + self.tokenizer, + False, + 2048, + ) + with self._caplog.at_level(logging.WARNING): + strat.tokenize_prompt(conversation) + assert "assistant turn has empty text" in self._caplog.records[1].message + def test_no_sys_prompt(self): """ tests the interface between the user and assistant parts From 923eb913047824897d7f58722c8940ddbd82c847 Mon Sep 17 00:00:00 2001 From: Maxime <672982+maximegmd@users.noreply.github.com> Date: Fri, 22 Sep 2023 22:17:50 +0200 Subject: [PATCH 15/77] tweak: improve base builder for smaller layers (#500) --- docker/Dockerfile-base | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/docker/Dockerfile-base b/docker/Dockerfile-base index d1c02e69d8..5d1e2c2b89 100644 --- a/docker/Dockerfile-base +++ b/docker/Dockerfile-base @@ -13,16 +13,14 @@ ARG CUDA="118" ENV PYTHON_VERSION=$PYTHON_VERSION -RUN apt-get update -RUN apt-get install -y wget git build-essential ninja-build git-lfs libaio-dev && rm -rf /var/lib/apt/lists/* - -RUN wget \ +RUN apt-get update \ + && apt-get install -y wget git build-essential ninja-build git-lfs libaio-dev && rm -rf /var/lib/apt/lists/* + && wget \ https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh \ && mkdir /root/.conda \ && bash Miniconda3-latest-Linux-x86_64.sh -b \ - && rm -f Miniconda3-latest-Linux-x86_64.sh - -RUN conda create -n "py${PYTHON_VERSION}" python="${PYTHON_VERSION}" + && rm -f Miniconda3-latest-Linux-x86_64.sh \ + && conda create -n "py${PYTHON_VERSION}" python="${PYTHON_VERSION}" ENV PATH="/root/miniconda3/envs/py${PYTHON_VERSION}/bin:${PATH}" From 67b98886306aba93232cb8766f95126566c23a9d Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Sat, 23 Sep 2023 13:11:27 +0900 Subject: [PATCH 16/77] Feat(doc): Add eval_sample_packing to doc (#625) --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 6e91158934..6ec81eed3c 100644 --- a/README.md +++ b/README.md @@ -493,6 +493,8 @@ pad_to_sequence_len: max_packed_sequence_len: 1024 # use efficient multi-packing with block diagonal attention and per sequence position_ids. Recommend set to 'true' sample_packing: +# set to 'false' if getting errors during eval with sample_packing on. +eval_sample_packing: # you can set these packing optimizations AFTER starting a training at least once. # The trainer will provide recommended values for these values. sample_packing_eff_est: From 4fecbfe5e1cb8649d1c8cf741aec897a8677389c Mon Sep 17 00:00:00 2001 From: mhenrichsen Date: Sun, 24 Sep 2023 18:52:53 +0200 Subject: [PATCH 17/77] default model changed --- examples/llama-2/lora.yml | 4 ++-- examples/llama-2/qlora.yml | 4 ++-- examples/llama-2/relora.yml | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/llama-2/lora.yml b/examples/llama-2/lora.yml index 4fdcb04092..3134648fdb 100644 --- a/examples/llama-2/lora.yml +++ b/examples/llama-2/lora.yml @@ -1,5 +1,5 @@ -base_model: meta-llama/Llama-2-7b-hf -base_model_config: meta-llama/Llama-2-7b-hf +base_model: NousResearch/Llama-2-7b-hf +base_model_config: NousResearch/Llama-2-7b-hf model_type: LlamaForCausalLM tokenizer_type: LlamaTokenizer is_llama_derived_model: true diff --git a/examples/llama-2/qlora.yml b/examples/llama-2/qlora.yml index ef20d9fbe3..54255320cc 100644 --- a/examples/llama-2/qlora.yml +++ b/examples/llama-2/qlora.yml @@ -1,5 +1,5 @@ -base_model: meta-llama/Llama-2-7b-hf -base_model_config: meta-llama/Llama-2-7b-hf +base_model: NousResearch/Llama-2-7b-hf +base_model_config: NousResearch/Llama-2-7b-hf model_type: LlamaForCausalLM tokenizer_type: LlamaTokenizer is_llama_derived_model: true diff --git a/examples/llama-2/relora.yml b/examples/llama-2/relora.yml index b59a7da04c..2b1fad55d8 100644 --- a/examples/llama-2/relora.yml +++ b/examples/llama-2/relora.yml @@ -1,5 +1,5 @@ -base_model: meta-llama/Llama-2-7b-hf -base_model_config: meta-llama/Llama-2-7b-hf +base_model: NousResearch/Llama-2-7b-hf +base_model_config: NousResearch/Llama-2-7b-hf model_type: LlamaForCausalLM tokenizer_type: LlamaTokenizer is_llama_derived_model: true From cfbce020e97986a785a7371e18c5e7f7c58bae5a Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Mon, 25 Sep 2023 13:48:18 +0900 Subject: [PATCH 18/77] Fix: Fail bf16 check when running on cpu during merge (#631) --- src/axolotl/utils/config.py | 2 +- tests/test_validation.py | 23 +++++++++++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/src/axolotl/utils/config.py b/src/axolotl/utils/config.py index 1c0487ff8e..cb18380cb7 100644 --- a/src/axolotl/utils/config.py +++ b/src/axolotl/utils/config.py @@ -94,7 +94,7 @@ def validate_config(cfg): if not cfg.bf16 and not cfg.bfloat16: LOG.info("bf16 support detected, but not enabled for this configuration.") else: - if cfg.bf16 or cfg.bfloat16: + if not cfg.merge_lora and (cfg.bf16 or cfg.bfloat16): raise ValueError( "bf16 requested, but AMP is not supported on this GPU. Requires Ampere series or above." ) diff --git a/tests/test_validation.py b/tests/test_validation.py index f250e5cb47..d7935c1a54 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -351,3 +351,26 @@ def test_packing(self): regex_exp = r".*set only one of max_packed_sequence_len \(deprecated soon\) or sample_packing.*" with pytest.raises(ValueError, match=regex_exp): validate_config(cfg) + + def test_merge_lora_no_bf16_fail(self): + """ + This is assumed to be run on a CPU machine, so bf16 is not supported. + """ + + cfg = DictDefault( + { + "bf16": True, + } + ) + + with pytest.raises(ValueError, match=r".*AMP is not supported on this GPU*"): + validate_config(cfg) + + cfg = DictDefault( + { + "bf16": True, + "merge_lora": True, + } + ) + + validate_config(cfg) From 5e5296a77cf185418d9d8f8f4e4b01fdf394d0ce Mon Sep 17 00:00:00 2001 From: Fernando Tarin Morales Date: Tue, 26 Sep 2023 00:50:14 +0900 Subject: [PATCH 19/77] Added quotes to the pip install -e command to fix an incompatibility with shells that do glob expansion like zsh (#632) --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 6ec81eed3c..25044a2361 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ git clone https://github.com/OpenAccess-AI-Collective/axolotl cd axolotl pip3 install packaging -pip3 install -e .[flash-attn,deepspeed] +pip3 install -e '.[flash-attn,deepspeed]' pip3 install -U git+https://github.com/huggingface/peft.git # finetune lora @@ -122,7 +122,7 @@ accelerate launch -m axolotl.cli.inference examples/openllama-3b/lora.yml \ 3. Install axolotl along with python dependencies ```bash pip3 install packaging - pip3 install -e .[flash-attn,deepspeed] + pip3 install -e '.[flash-attn,deepspeed]' ``` - LambdaLabs @@ -158,7 +158,7 @@ accelerate launch -m axolotl.cli.inference examples/openllama-3b/lora.yml \ cd axolotl pip3 install packaging - pip3 install -e .[flash-attn,deepspeed] + pip3 install -e '.[flash-attn,deepspeed]' pip3 install protobuf==3.20.3 pip3 install -U --ignore-installed requests Pillow psutil scipy ``` From 19a600a8b859c40cf4c3749b0b8a3db17b82a0c0 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Tue, 26 Sep 2023 22:53:28 +0900 Subject: [PATCH 20/77] Feat: Add support for upstream FA2 (#626) * Feat: Add support for upstream FA2 * chore: add is_falcon_derived_model: true to examples * chore: add config to readme for documentation * feat: add extra model types * fix: remove old falcon flash patch * chore: pin transformers and accelerate --- README.md | 4 + examples/falcon/config-7b-lora.yml | 1 + examples/falcon/config-7b-qlora.yml | 1 + examples/falcon/config-7b.yml | 1 + requirements.txt | 4 +- .../monkeypatch/falcon_attn_hijack_flash.py | 101 ------------------ src/axolotl/utils/config.py | 16 +++ src/axolotl/utils/models.py | 20 ++-- 8 files changed, 31 insertions(+), 117 deletions(-) delete mode 100644 src/axolotl/monkeypatch/falcon_attn_hijack_flash.py diff --git a/README.md b/README.md index 25044a2361..c2d4d8ef8c 100644 --- a/README.md +++ b/README.md @@ -408,6 +408,10 @@ tokenizer_legacy: # this is reported to improve training speed on some models resize_token_embeddings_to_32x: +# used to identify if the model is falcon/llama based +is_falcon_derived_model: +is_llama_derived_model: + # whether you are training a 4-bit GPTQ quantized model gptq: true gptq_groupsize: 128 # group size diff --git a/examples/falcon/config-7b-lora.yml b/examples/falcon/config-7b-lora.yml index a5cbdc00df..738068a474 100644 --- a/examples/falcon/config-7b-lora.yml +++ b/examples/falcon/config-7b-lora.yml @@ -3,6 +3,7 @@ base_model_config: tiiuae/falcon-7b trust_remote_code: true model_type: AutoModelForCausalLM tokenizer_type: AutoTokenizer +is_falcon_derived_model: true load_in_8bit: true load_in_4bit: false gptq: false diff --git a/examples/falcon/config-7b-qlora.yml b/examples/falcon/config-7b-qlora.yml index 72b09b87d9..554081fcba 100644 --- a/examples/falcon/config-7b-qlora.yml +++ b/examples/falcon/config-7b-qlora.yml @@ -6,6 +6,7 @@ base_model_config: tiiuae/falcon-7b trust_remote_code: true model_type: AutoModelForCausalLM tokenizer_type: AutoTokenizer +is_falcon_derived_model: true load_in_8bit: false # enable 4bit for QLoRA load_in_4bit: true diff --git a/examples/falcon/config-7b.yml b/examples/falcon/config-7b.yml index 46f4caff15..25e67a53b1 100644 --- a/examples/falcon/config-7b.yml +++ b/examples/falcon/config-7b.yml @@ -3,6 +3,7 @@ base_model_config: tiiuae/falcon-7b trust_remote_code: true model_type: AutoModelForCausalLM tokenizer_type: AutoTokenizer +is_falcon_derived_model: true load_in_8bit: false load_in_4bit: false gptq: false diff --git a/requirements.txt b/requirements.txt index 5aba20b161..33a2157d96 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,9 +4,9 @@ torch==2.0.1 auto-gptq packaging peft @ git+https://github.com/huggingface/peft.git -transformers @ git+https://github.com/huggingface/transformers.git +transformers @ git+https://github.com/huggingface/transformers.git@0ac3875011d32dc85e0e83970507e3afe8f0febb bitsandbytes>=0.41.1 -accelerate @ git+https://github.com/huggingface/accelerate +accelerate @ git+https://github.com/huggingface/accelerate@80da9cfb09bb3cc9f1b385cb55d6b90d025a5fd9 deepspeed addict evaluate diff --git a/src/axolotl/monkeypatch/falcon_attn_hijack_flash.py b/src/axolotl/monkeypatch/falcon_attn_hijack_flash.py deleted file mode 100644 index ed11c55234..0000000000 --- a/src/axolotl/monkeypatch/falcon_attn_hijack_flash.py +++ /dev/null @@ -1,101 +0,0 @@ -""" -Flash Attention monkey patch for Falcon - -copied from https://github.com/pacman100/DHS-LLM-Workshop/blob/main/chat_assistant/training/falcon_flash_attn_monkey_patch.py -""" - -from typing import Optional, Tuple - -import torch -import transformers -from flash_attn import flash_attn_func - - -def forward( - self, - hidden_states: torch.Tensor, - alibi: Optional[torch.Tensor], - attention_mask: torch.Tensor, # pylint: disable=unused-argument - layer_past: Optional[Tuple[torch.Tensor, torch.Tensor]] = None, - head_mask: Optional[torch.Tensor] = None, # pylint: disable=unused-argument - use_cache: bool = False, - output_attentions: bool = False, # pylint: disable=unused-argument -): - fused_qkv = self.query_key_value( - hidden_states - ) # [batch_size, seq_length, 3 x hidden_size] - num_kv_heads = ( - self.num_heads if self.new_decoder_architecture else self.num_kv_heads - ) - # 3 x [batch_size, seq_length, num_heads, head_dim] - ( - query_layer, - key_layer, - value_layer, - ) = self._split_heads( # pylint: disable=protected-access - fused_qkv - ) - - batch_size, query_length, _, _ = query_layer.shape - - query_layer = query_layer.transpose(1, 2).reshape( - batch_size * self.num_heads, query_length, self.head_dim - ) - key_layer = key_layer.transpose(1, 2).reshape( - batch_size * num_kv_heads, - query_length, - self.head_dim, - ) - value_layer = value_layer.transpose(1, 2).reshape( - batch_size * num_kv_heads, query_length, self.head_dim - ) - - past_kv_length = 0 if layer_past is None else layer_past[0].shape[1] - query_layer, key_layer = self.maybe_rotary(query_layer, key_layer, past_kv_length) - - if layer_past is not None: - past_key, past_value = layer_past - # concatenate along seq_length dimension: - # - key: [batch_size * self.num_heads, kv_length, head_dim] - # - value: [batch_size * self.num_heads, kv_length, head_dim] - key_layer = torch.cat((past_key, key_layer), dim=1) - value_layer = torch.cat((past_value, value_layer), dim=1) - - # unused - # _, kv_length, _ = key_layer.shape - if use_cache: - present = (key_layer, value_layer) - else: - present = None - # unused - # attention_mask_float = (attention_mask * 1.0).masked_fill(attention_mask, float("-1e9")).to(query_layer.dtype) - query_layer_ = ( - query_layer.reshape(batch_size, self.num_heads, -1, self.head_dim) - .transpose(1, 2) - .to(torch.bfloat16) - ) - key_layer_ = ( - key_layer.reshape(batch_size, num_kv_heads, -1, self.head_dim) - .transpose(1, 2) - .to(torch.bfloat16) - ) - value_layer_ = ( - value_layer.reshape(batch_size, num_kv_heads, -1, self.head_dim) - .transpose(1, 2) - .to(torch.bfloat16) - ) - - if alibi is not None: - raise ValueError("`alibi` is not supported when `use_flash_attn` is True") - - # below output will have shape (batch_size, seqlen, nheads, headdim) - attn_output = flash_attn_func(query_layer_, key_layer_, value_layer_, causal=True) - attn_output = attn_output.reshape( - batch_size, query_length, self.num_heads * self.head_dim - ) - output_tensor = self.dense(attn_output) - return output_tensor, present - - -def replace_falcon_attn_with_flash_attn(): - transformers.models.falcon.modeling_falcon.FalconAttention.forward = forward diff --git a/src/axolotl/utils/config.py b/src/axolotl/utils/config.py index cb18380cb7..3a574cefcc 100644 --- a/src/axolotl/utils/config.py +++ b/src/axolotl/utils/config.py @@ -86,6 +86,22 @@ def normalize_config(cfg): or (cfg.model_type and "llama" in cfg.model_type.lower()) ) + # figure out if the model is falcon + cfg.is_falcon_derived_model = ( + ( + hasattr(model_config, "model_type") + and model_config.model_type + in [ + "falcon", + "RefinedWebModel", + "RefinedWeb", + ] + ) + or cfg.is_falcon_derived_model + or "falcon" in cfg.base_model + or (cfg.model_type and "rwforcausallm" in cfg.model_type.lower()) + ) + log_gpu_memory_usage(LOG, "baseline", cfg.device) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 543a0e1a13..361440931f 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -114,25 +114,13 @@ def load_model( replace_btlm_attn_with_flash_attn(cfg.base_model) - if hasattr(model_config, "model_type") and model_config.model_type in [ - "falcon", - "RefinedWebModel", - "RefinedWeb", - ]: - if cfg.flash_attention: - from axolotl.monkeypatch.falcon_attn_hijack_flash import ( - replace_falcon_attn_with_flash_attn, - ) - - replace_falcon_attn_with_flash_attn() - - if cfg.is_llama_derived_model and cfg.flash_attention: + if cfg.is_llama_derived_model and cfg.flash_attention and cfg.sample_packing: if cfg.device not in ["mps", "cpu"] and not inference: from axolotl.monkeypatch.llama_attn_hijack_flash import ( replace_llama_attn_with_flash_attn, ) - LOG.info("patching with flash attention") + LOG.info("patching with flash attention for sample packing") replace_llama_attn_with_flash_attn(packed=cfg.sample_packing) elif cfg.is_llama_derived_model and cfg.xformers_attention: from axolotl.monkeypatch.llama_attn_hijack_xformers import ( @@ -213,6 +201,10 @@ def load_model( bnb_4bit_use_double_quant=True, bnb_4bit_quant_type="nf4", ) + # sample packing uses custom FA2 patch + if cfg.flash_attention and not cfg.sample_packing: + if cfg.is_llama_derived_model or cfg.is_falcon_derived_model: + model_kwargs["use_flash_attention_2"] = True try: if cfg.is_llama_derived_model and not cfg.trust_remote_code and not cfg.gptq: from transformers import LlamaForCausalLM From d887ad86c3bd04c2e2b87af16a29a3427634a0db Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Tue, 26 Sep 2023 10:13:20 -0400 Subject: [PATCH 21/77] eval_table isn't quite stable enough to be in default llama configs (#637) --- examples/llama-2/lora.yml | 2 +- examples/llama-2/qlora.yml | 2 +- examples/llama-2/tiny-llama.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/llama-2/lora.yml b/examples/llama-2/lora.yml index 3134648fdb..106de6e320 100644 --- a/examples/llama-2/lora.yml +++ b/examples/llama-2/lora.yml @@ -56,7 +56,7 @@ flash_attention: true warmup_steps: 10 eval_steps: 20 -eval_table_size: 5 +eval_table_size: eval_table_max_new_tokens: 128 save_steps: debug: diff --git a/examples/llama-2/qlora.yml b/examples/llama-2/qlora.yml index 54255320cc..870c3fa848 100644 --- a/examples/llama-2/qlora.yml +++ b/examples/llama-2/qlora.yml @@ -58,7 +58,7 @@ flash_attention: true warmup_steps: 10 eval_steps: 20 -eval_table_size: 5 +eval_table_size: save_steps: debug: deepspeed: diff --git a/examples/llama-2/tiny-llama.yml b/examples/llama-2/tiny-llama.yml index a53c9c831b..d556a6d719 100644 --- a/examples/llama-2/tiny-llama.yml +++ b/examples/llama-2/tiny-llama.yml @@ -56,7 +56,7 @@ flash_attention: true warmup_steps: 10 eval_steps: 20 -eval_table_size: 5 +eval_table_size: save_steps: debug: deepspeed: From e8cbf50be698413f205bd7a603c01e2aa1d83231 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Wed, 27 Sep 2023 11:12:08 -0400 Subject: [PATCH 22/77] attention_mask not needed for training (#642) * attention_mask not needed for training * specifically don't use attention mask for phi * use a different check for phi * small fixes since phi removed some values from their config --- src/axolotl/models/phi/modeling_mixformer_sequential.py | 8 ++------ src/axolotl/utils/data.py | 2 +- src/axolotl/utils/trainer.py | 9 ++++++++- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/axolotl/models/phi/modeling_mixformer_sequential.py b/src/axolotl/models/phi/modeling_mixformer_sequential.py index 27bede5f01..fd2ec054c5 100644 --- a/src/axolotl/models/phi/modeling_mixformer_sequential.py +++ b/src/axolotl/models/phi/modeling_mixformer_sequential.py @@ -711,12 +711,8 @@ def __init__( self.resid_dropout = nn.Dropout(config.resid_pdrop) self.block_idx = block_idx - self.mixer = MHA(config=config, **mixer, layer_idx=block_idx) - mlp_cls = mlp.pop("mlp_cls") - if mlp_cls == "fused_mlp": - self.mlp = FusedMLP(config=config, **mlp) - else: - self.mlp = MLP(config=config, **mlp) + self.mixer = MHA(config, layer_idx=block_idx) + self.mlp = MLP(config) def forward( self, diff --git a/src/axolotl/utils/data.py b/src/axolotl/utils/data.py index 2dc2d82b28..9792371c7c 100644 --- a/src/axolotl/utils/data.py +++ b/src/axolotl/utils/data.py @@ -76,7 +76,7 @@ def prepare_dataset(cfg, tokenizer): with zero_first(is_main_process()): train_dataset, eval_dataset = process_datasets_for_packing( - cfg, train_dataset, eval_dataset + cfg, train_dataset, eval_dataset, tokenizer ) if cfg.max_steps: total_num_steps = min( diff --git a/src/axolotl/utils/trainer.py b/src/axolotl/utils/trainer.py index a2657e05ab..aee2a1b99e 100644 --- a/src/axolotl/utils/trainer.py +++ b/src/axolotl/utils/trainer.py @@ -397,7 +397,7 @@ def disable_datasets_caching(): set_caching_enabled(True) -def process_datasets_for_packing(cfg, train_dataset, eval_dataset): +def process_datasets_for_packing(cfg, train_dataset, eval_dataset, tokenizer): drop_long = partial(drop_long_seq, sequence_len=cfg.sequence_len) with zero_first(is_main_process()): train_dataset = train_dataset.filter(drop_long, num_proc=os.cpu_count()) @@ -414,6 +414,13 @@ def process_datasets_for_packing(cfg, train_dataset, eval_dataset): eval_dataset = eval_dataset.map( add_position_ids, num_proc=os.cpu_count() ) + + # Phi doesn't want the attention_mask feature when training + if "CodeGenTokenizer" in tokenizer.__class__.__name__: + train_dataset = train_dataset.remove_columns("attention_mask") + if eval_dataset: + eval_dataset = eval_dataset.remove_columns("attention_mask") + return train_dataset, eval_dataset From 60c7c48c97a380aefa558bbb39c7108bd5130581 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Wed, 27 Sep 2023 12:10:32 -0400 Subject: [PATCH 23/77] update for recent transformers updates (#636) * update for recent transformers updates * fix checkpoint forward kwargs * just pass args into torch checkpoint --- .../monkeypatch/llama_attn_hijack_flash.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/src/axolotl/monkeypatch/llama_attn_hijack_flash.py b/src/axolotl/monkeypatch/llama_attn_hijack_flash.py index d172d302d9..97f0477649 100644 --- a/src/axolotl/monkeypatch/llama_attn_hijack_flash.py +++ b/src/axolotl/monkeypatch/llama_attn_hijack_flash.py @@ -99,6 +99,7 @@ def flashattn_forward( past_key_value: Optional[Tuple[torch.Tensor]] = None, output_attentions: bool = False, use_cache: bool = False, + padding_mask: Optional[torch.LongTensor] = None, # pylint: disable=unused-argument cu_seqlens: Optional[torch.Tensor] = None, max_seqlen: Optional[torch.Tensor] = None, ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: @@ -476,6 +477,13 @@ def llama_model_forward( dtype=torch.bool, device=inputs_embeds.device, ) + padding_mask = None + else: + if 0 in attention_mask: + padding_mask = attention_mask + else: + padding_mask = None + attention_mask = ( self._prepare_decoder_attention_mask( # pylint: disable=protected-access attention_mask, @@ -510,7 +518,9 @@ def llama_model_forward( def create_custom_forward(module): def custom_forward(*inputs): # None for past_key_value - return module(*inputs) + return module( + *inputs, + ) return custom_forward @@ -519,9 +529,10 @@ def custom_forward(*inputs): hidden_states, attention_mask, position_ids, - None, + past_key_value, output_attentions, None, + padding_mask, cu_seqlens, max_seqlen, ) @@ -533,6 +544,7 @@ def custom_forward(*inputs): past_key_value=past_key_value, output_attentions=output_attentions, use_cache=use_cache, + padding_mask=padding_mask, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen, ) @@ -579,6 +591,7 @@ def forward( past_key_value: Optional[Tuple[torch.Tensor]] = None, output_attentions: Optional[bool] = False, use_cache: Optional[bool] = False, + padding_mask: Optional[torch.LongTensor] = None, cu_seqlens: Optional[torch.Tensor] = None, max_seqlen: Optional[torch.Tensor] = None, ) -> Tuple[ @@ -611,6 +624,7 @@ def forward( past_key_value=past_key_value, output_attentions=output_attentions, use_cache=use_cache, + padding_mask=padding_mask, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen, ) From e7d3e2dbb6784d3bb7ebdf2e6c48ed458ed5a184 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Wed, 27 Sep 2023 12:10:45 -0400 Subject: [PATCH 24/77] use fastchat conversations template (#578) * use fastchat conversations template * require fastchat (fschat) pip install * handle roles dynamically from conversation * tweak fastchat conversation with a monkeypatch to get individual turns * fix up so it works with multiple conversation styles, and don't strip the turns * fix sharegpt fixture now that we're using a more correct tokenization * use a new prompter and support fastchat conversation type * use sharegpt from prompt strategies now * update docs, add chatml template * add a newline after im_end token * ensure we correctly set system message * update per PR feedback to handle deprecated sharegpt types * don't add duplicate wandb req * make sharegpt fields configurable from yml * llama2 fixes * don't fail fatally when turns are improper --- README.md | 7 +- requirements.txt | 1 + .../fastchat_conversation_turns.py | 174 ++++++++++++++++++ .../{sharegpt_simple.py => sharegpt.py} | 33 +++- .../prompt_strategies/sharegpt_jokes.py | 4 +- src/axolotl/prompt_tokenizers.py | 24 ++- src/axolotl/prompters.py | 131 +++++-------- src/axolotl/utils/config.py | 19 ++ src/axolotl/utils/data.py | 11 -- src/axolotl/utils/tokenization.py | 1 + tests/fixtures/conversation.tokenized.json | 2 +- tests/test_prompt_tokenizers.py | 6 +- tests/test_validation.py | 23 +++ 13 files changed, 324 insertions(+), 112 deletions(-) create mode 100644 src/axolotl/monkeypatch/fastchat_conversation_turns.py rename src/axolotl/prompt_strategies/{sharegpt_simple.py => sharegpt.py} (63%) diff --git a/README.md b/README.md index c2d4d8ef8c..3b3bc9985a 100644 --- a/README.md +++ b/README.md @@ -180,7 +180,7 @@ Have dataset(s) in one of the following format (JSONL recommended): ```json {"instruction": "...", "input": "...", "output": "..."} ``` -- `sharegpt:chat`: conversations where `from` is `human`/`gpt` +- `sharegpt`: conversations where `from` is `human`/`gpt` ```json {"conversations": [{"from": "...", "value": "..."}]} ``` @@ -269,11 +269,11 @@ Have dataset(s) in one of the following format (JSONL recommended): ```json {"prompt": "...", "generation": "..."} ``` -- `sharegpt_simple.load_role`: conversations where `role` is used instead of `from` +- `sharegpt.load_role`: conversations where `role` is used instead of `from` ```json {"conversations": [{"role": "...", "value": "..."}]} ``` -- `sharegpt_simple.load_guanaco`: conversations where `from` is `prompter`/`assistant` instead of default sharegpt +- `sharegpt.load_guanaco`: conversations where `from` is `prompter`/`assistant` instead of default sharegpt ```json {"conversations": [{"from": "...", "value": "..."}]} ``` @@ -443,6 +443,7 @@ datasets: data_files: # Optional[str] path to source data files shards: # Optional[int] number of shards to split data into name: # Optional[str] name of dataset configuration to load + conversation: # Optional[str] fastchat conversation type, only used with type: sharegpt # custom user prompt - path: repo diff --git a/requirements.txt b/requirements.txt index 33a2157d96..7616d1fb0a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -31,3 +31,4 @@ scipy scikit-learn==1.2.2 pynvml art +fschat==0.2.29 diff --git a/src/axolotl/monkeypatch/fastchat_conversation_turns.py b/src/axolotl/monkeypatch/fastchat_conversation_turns.py new file mode 100644 index 0000000000..19313fb7e2 --- /dev/null +++ b/src/axolotl/monkeypatch/fastchat_conversation_turns.py @@ -0,0 +1,174 @@ +""" +monkeypatch to add a get_turns method +""" + +import logging +from typing import Generator, Tuple + +from fastchat.conversation import SeparatorStyle + +LOG = logging.getLogger("axolotl.monkeypatch.fastchat_conversation_turns") + + +def get_prompt(self) -> str: + ret = "" + for role, msg in self.get_turns(): + ret += role + msg + return ret + + +def get_turns( # pylint: disable=too-many-return-statements + self, +) -> Generator[Tuple[str, str], None, None]: + """Get the prompt for generation.""" + system_prompt = self.system_template.format(system_message=self.system_message) + if self.sep_style == SeparatorStyle.ADD_COLON_SINGLE: + yield "", system_prompt + self.sep + for role, message in self.messages: + if message: + yield role + ": ", message + self.sep + else: + yield role + ":", "" + return + if self.sep_style == SeparatorStyle.ADD_COLON_TWO: + seps = [self.sep, self.sep2] + yield "", system_prompt + seps[0] + for i, (role, message) in enumerate(self.messages): + if message: + yield role + ": ", message + seps[i % 2] + else: + yield role + ":", "" + return + if self.sep_style == SeparatorStyle.ADD_COLON_SPACE_SINGLE: + yield "", system_prompt + self.sep + for role, message in self.messages: + if message: + yield role + ": ", message + self.sep + else: + yield role + ": ", "" # must be end with a space + return + if self.sep_style == SeparatorStyle.ADD_NEW_LINE_SINGLE: + yield "", "" if system_prompt == "" else system_prompt + self.sep + for role, message in self.messages: + if message: + yield role + "\n", message + self.sep + else: + yield role + "\n", "" + return + if self.sep_style == SeparatorStyle.NO_COLON_SINGLE: + yield "", system_prompt + for role, message in self.messages: + if message: + yield role, message + self.sep + else: + yield role, "" + return + if self.sep_style == SeparatorStyle.NO_COLON_TWO: + seps = [self.sep, self.sep2] + yield "", system_prompt + for i, (role, message) in enumerate(self.messages): + if message: + yield role, message + seps[i % 2] + else: + yield role, "" + return + if self.sep_style == SeparatorStyle.RWKV: + yield "", system_prompt + for i, (role, message) in enumerate(self.messages): + if message: + yield role + ": ", message.replace("\r\n", "\n").replace( + "\n\n", "\n" + ) + "\n\n" + else: + yield role + ":", "" + return + if self.sep_style == SeparatorStyle.LLAMA2: + seps = [self.sep, self.sep2] + if self.system_message: + yield "", system_prompt + else: + yield "", "[INST] " + for i, (role, message) in enumerate(self.messages[1:]): + if message: + yield role + " ", message + seps[i % 2] + else: + yield role, "" + return + if self.sep_style == SeparatorStyle.CHATGLM: + # source: https://huggingface.co/THUDM/chatglm-6b/blob/1d240ba371910e9282298d4592532d7f0f3e9f3e/modeling_chatglm.py#L1302-L1308 + # source2: https://huggingface.co/THUDM/chatglm2-6b/blob/e186c891cf64310ac66ef10a87e6635fa6c2a579/modeling_chatglm.py#L926 + round_add_n = 1 if self.name == "chatglm2" else 0 + if system_prompt: + yield "", system_prompt + self.sep + + for i, (role, message) in enumerate(self.messages): + if i % 2 == 0: + yield "", f"[Round {i//2 + round_add_n}]{self.sep}" + + if message: + yield f"{role}:", f"{message}{self.sep}" + else: + yield f"{role}:", "" + return + if self.sep_style == SeparatorStyle.CHATML: + yield "", "" if system_prompt == "" else system_prompt + self.sep + "\n" + for role, message in self.messages: + if message: + yield role + "\n", message + self.sep + "\n" + else: + yield role + "\n", "" + return + if self.sep_style == SeparatorStyle.CHATINTERN: + # source: https://huggingface.co/internlm/internlm-chat-7b-8k/blob/bd546fa984b4b0b86958f56bf37f94aa75ab8831/modeling_internlm.py#L771 + seps = [self.sep, self.sep2] + yield "", system_prompt + for i, (role, message) in enumerate(self.messages): + prefix = "" if i % 2 == 0 else "" + if message: + yield prefix + role + ":", message + seps[i % 2] + "\n" + else: + yield role + ":", "" + return + if self.sep_style == SeparatorStyle.DOLLY: + seps = [self.sep, self.sep2] + yield "", system_prompt + for i, (role, message) in enumerate(self.messages): + if message: + suffix = "\n\n" if i % 2 == 1 else "" + yield role + ":\n", message + seps[i % 2] + suffix + else: + yield role + ":\n", "" + return + if self.sep_style == SeparatorStyle.PHOENIX: + yield "", system_prompt + for role, message in self.messages: + if message: + yield role + ": ", "" + message + "" + else: + yield role + ": " + "", "" + return + if self.sep_style == SeparatorStyle.ROBIN: + yield "", system_prompt + self.sep + for role, message in self.messages: + if message: + yield role + ":\n", message + self.sep + else: + yield role + ":\n", "" + return + if self.sep_style == SeparatorStyle.FALCON_CHAT: + if self.system_message: + yield "", system_prompt + self.sep + for role, message in self.messages: + if message: + yield role + ": ", message + self.sep + else: + yield role + ":", "" + else: + raise ValueError(f"Invalid style: {self.sep_style}") + + +def add_get_turns_to_conversation(): + import fastchat.conversation + + fastchat.conversation.Conversation.get_turns = get_turns + fastchat.conversation.Conversation.get_prompt = get_prompt diff --git a/src/axolotl/prompt_strategies/sharegpt_simple.py b/src/axolotl/prompt_strategies/sharegpt.py similarity index 63% rename from src/axolotl/prompt_strategies/sharegpt_simple.py rename to src/axolotl/prompt_strategies/sharegpt.py index bfe0d164bc..da36e778e0 100644 --- a/src/axolotl/prompt_strategies/sharegpt_simple.py +++ b/src/axolotl/prompt_strategies/sharegpt.py @@ -1,12 +1,35 @@ """Module containing the SimpleShareGPTPromptTokenizingStrategy class""" +from typing import Any, Dict, Optional + +from fastchat.conversation import Conversation, SeparatorStyle, register_conv_template from axolotl.prompt_tokenizers import ShareGPTPromptTokenizingStrategy -from axolotl.prompters import PromptStyle, ShareGPTPrompter +from axolotl.prompters import ShareGPTPrompterV2 + +register_conv_template( + Conversation( + name="chatml", + system_template="<|im_start|>system\n{system_message}", + system_message="You are a helpful assistant.", + roles=["<|im_start|>user", "<|im_start|>assistant"], + sep_style=SeparatorStyle.CHATML, + sep="<|im_end|>\n", + ) +) -def load(tokenizer, cfg): +def load(tokenizer, cfg, ds_cfg: Optional[Dict[str, Any]] = None): + conversation = ( + ds_cfg["conversation"] if ds_cfg and "conversation" in ds_cfg else None + ) + field_human = ds_cfg["field_human"] if ds_cfg and "field_human" in ds_cfg else None + field_model = ds_cfg["field_model"] if ds_cfg and "field_model" in ds_cfg else None return SimpleShareGPTPromptTokenizingStrategy( - ShareGPTPrompter(PromptStyle.CHAT.value), + ShareGPTPrompterV2( + conversation=conversation, + role_key_model=field_model, + role_key_human=field_human, + ), tokenizer, cfg.train_on_inputs, cfg.sequence_len, @@ -15,7 +38,7 @@ def load(tokenizer, cfg): def load_role(tokenizer, cfg): return SimpleRoleShareGPTPromptTokenizingStrategy( - ShareGPTPrompter(PromptStyle.CHAT.value), + ShareGPTPrompterV2(), tokenizer, cfg.train_on_inputs, cfg.sequence_len, @@ -24,7 +47,7 @@ def load_role(tokenizer, cfg): def load_guanaco(tokenizer, cfg): return GuanacoShareGPTPromptTokenizingStrategy( - ShareGPTPrompter(PromptStyle.CHAT.value), + ShareGPTPrompterV2(), tokenizer, cfg.train_on_inputs, cfg.sequence_len, diff --git a/src/axolotl/prompt_strategies/sharegpt_jokes.py b/src/axolotl/prompt_strategies/sharegpt_jokes.py index ac424bf7c7..404302c81e 100644 --- a/src/axolotl/prompt_strategies/sharegpt_jokes.py +++ b/src/axolotl/prompt_strategies/sharegpt_jokes.py @@ -1,11 +1,11 @@ """Module for Jokes prompts using sharegpt style """ from axolotl.prompt_tokenizers import ShareGPTPromptTokenizingStrategy -from axolotl.prompters import PromptStyle, ShareGPTPrompter +from axolotl.prompters import ShareGPTPrompterV2 def load(tokenizer, cfg): return SimpleJokesShareGPTPromptTokenizingStrategy( - ShareGPTPrompter(PromptStyle.CHAT.value), + ShareGPTPrompterV2(), tokenizer, cfg.train_on_inputs, cfg.sequence_len, diff --git a/src/axolotl/prompt_tokenizers.py b/src/axolotl/prompt_tokenizers.py index a74da71da5..31bb73c2f7 100644 --- a/src/axolotl/prompt_tokenizers.py +++ b/src/axolotl/prompt_tokenizers.py @@ -6,8 +6,12 @@ import logging from typing import Dict, List, Tuple, Union +from fastchat.conversation import Conversation from transformers import BatchEncoding, PreTrainedTokenizer +from axolotl.monkeypatch.fastchat_conversation_turns import ( + add_get_turns_to_conversation, +) from axolotl.prompters import IGNORE_TOKEN_ID LOG = logging.getLogger("axolotl") @@ -18,6 +22,8 @@ LLAMA_DEFAULT_BOS_TOKEN = "" # nosec LLAMA_DEFAULT_UNK_TOKEN = "" # nosec +add_get_turns_to_conversation() + class InvalidDataException(Exception): """ @@ -352,18 +358,21 @@ def tokenize_prompt(self, prompt): result, current_len = tokenize_prompt_default() user_token = self._get_user_token() assistant_token = self._get_assistant_token() + conversation: Conversation = ( + self.prompter._conversation # pylint: disable=protected-access + ) try: for _, part in enumerate( self.prompter.build_prompt(self.get_conversation_thread(prompt)) ): if isinstance(part, tuple): - if part[0] == "USER:": + if conversation.roles[0] in part[0]: turn = part[0] + part[1] if not user_token else part[1] # this is still the user query, we should if not part[1].strip(): LOG.warning(f"user turn has empty text: {prompt}") res = self._tokenize( - turn.strip(), + turn, add_eos_token=False, strip_bos_token=True, ) @@ -371,14 +380,14 @@ def tokenize_prompt(self, prompt): res["input_ids"] = [user_token, *res["input_ids"]] # everything from this is masked out from the labels labels = [IGNORE_TOKEN_ID] * len(res["input_ids"]) - elif part[0] == "ASSISTANT:": + elif conversation.roles[1] in part[0]: # TODO label assistant token/tokens w/ IGNORE_TOKEN_ID turn = part[0] + part[1] if not assistant_token else part[1] # this should be the assistant response, should end with an eos token if not part[1].strip(): LOG.warning(f"assistant turn has empty text: {prompt}") res = self._tokenize( - turn.strip(), + turn, add_eos_token=True, strip_bos_token=True, ) @@ -389,16 +398,17 @@ def tokenize_prompt(self, prompt): ] # not masked out from labels labels = copy.deepcopy(res["input_ids"]) - elif part[0] == "SYSTEM:": - part = part[1] # Ignore the system role from preamble + elif part[0] == "": + turn = part[1] # this is only ever the first part, should include the bos token and the user query res = self._tokenize( - part.strip(), add_eos_token=False, strip_bos_token=False + turn, add_eos_token=False, strip_bos_token=False ) # everything from this is masked out from the labels labels = [IGNORE_TOKEN_ID] * len(res["input_ids"]) else: LOG.warning(f"unhandled role: {part[0]}") + continue # pylint: disable=duplicate-code result, current_len = parse_tokenized_to_result( diff --git a/src/axolotl/prompters.py b/src/axolotl/prompters.py index d6d14c3694..370e15091c 100644 --- a/src/axolotl/prompters.py +++ b/src/axolotl/prompters.py @@ -1,9 +1,10 @@ """Module containing prompters""" -import dataclasses import logging -from enum import Enum, auto -from typing import Generator, List, Optional, Tuple, Union +from enum import Enum +from typing import Generator, Optional, Union + +from fastchat.conversation import Conversation, get_conv_template LOG = logging.getLogger("axolotl") IGNORE_TOKEN_ID = -100 @@ -214,53 +215,6 @@ def build_prompt( yield res -class SeparatorStyle(Enum): - """Different separator style.""" - - SINGLE = auto() - TWO = auto() - DOLLY = auto() - - -# TODO clean this 💩 up -@dataclasses.dataclass -class Conversation: - """A class that keeps all conversation history.""" - - system: str - roles: List[str] - messages: List[List[str]] - offset: int - sep_style: SeparatorStyle = SeparatorStyle.SINGLE - sep: str = "###" - sep2: Optional[str] = None - - def get_prompt(self) -> Generator[Tuple[str, str], None, None]: - # seps = [self.sep, self.sep2] - preamble = self.system + self.sep - yield ("SYSTEM:", preamble) - for _, (role, message) in enumerate(self.messages): - if message: - yield (role + ":", " " + message) - else: - LOG.warning(f"role with empty message: {role}") - yield (role + ":", "") - - def copy(self): - return Conversation( - system=self.system, - roles=self.roles, - messages=[[x, y] for x, y in self.messages], - offset=self.offset, - sep_style=self.sep_style, - sep=self.sep, - sep2=self.sep2, - ) - - def append_message(self, role, message): - self.messages.append([role, message]) - - SHAREGPT_ASSERTION_FAILED_ROLE = ( "Role did not alternate between turns (gpt and human). Please check your data." ) @@ -271,28 +225,27 @@ class ShareGPTPrompter: # pylint: disable=too-few-public-methods A prompter that generates prompts for the ShareGPT """ - def __init__(self, prompt_style=None, system_prompt: Optional[str] = None): - if prompt_style != PromptStyle.CHAT.value: - raise ValueError( - f"unsupported prompt_style for ShareGPTPrompter({prompt_style})" - ) - system: str = ( - system_prompt - if system_prompt - else ( - "A chat between a curious user and an artificial intelligence assistant. " - "The assistant gives helpful, detailed, and polite answers to the user's questions." - ) - ) - self._conversation = Conversation( - system=system, - roles=["USER", "ASSISTANT"], - messages=[], - offset=0, - sep_style=SeparatorStyle.TWO, - sep=" ", - sep2=" ", - ) + role_key_human = "human" + role_key_model = "gpt" + + def __init__( + self, + prompt_style=None, # pylint: disable=unused-argument + conversation: Optional[Union[str, Conversation]] = None, + role_key_human: Optional[str] = None, + role_key_model: Optional[str] = None, + ): + if conversation: + if isinstance(conversation, Conversation): + self._conversation = conversation + else: + self._conversation = get_conv_template(conversation) + else: + self._conversation = get_conv_template("vicuna_v1.1") + if role_key_human: + self.role_key_human = role_key_human + if role_key_model: + self.role_key_model = role_key_model def build_prompt(self, source) -> Generator[str, None, None]: if len(source) < 2: @@ -306,17 +259,14 @@ def build_prompt(self, source) -> Generator[str, None, None]: # Add the conversation system prompt if provided, otherwise use the default one if source[0]["from"] == "system": - conv.system = source[0]["value"] + conv.set_system_message(source[0]["value"]) source.pop(0) - roles = {"human": conv.roles[0], "gpt": conv.roles[1]} + roles = {self.role_key_human: conv.roles[0], self.role_key_model: conv.roles[1]} try: # Apply prompt templates - if ( - source[0]["from"] not in roles - or roles[source[0]["from"]] != conv.roles[0] - ): + if source[0]["from"] not in roles: # Skip the first one if it is not from human source = source[1:] except IndexError as err: @@ -326,8 +276,29 @@ def build_prompt(self, source) -> Generator[str, None, None]: conv.messages = [] for j, sentence in enumerate(source): role = roles[sentence["from"]] - assert role == conv.roles[j % 2], SHAREGPT_ASSERTION_FAILED_ROLE + if role != conv.roles[j % 2]: + LOG.warning(f"{SHAREGPT_ASSERTION_FAILED_ROLE}: {sentence}") conv.append_message(role, sentence["value"]) - for part in conv.get_prompt(): + for part in conv.get_turns(): + if part[0] and not part[1]: + LOG.warning(f"role with empty message: {part[0]}") yield part + + +class ShareGPTPrompterV2(ShareGPTPrompter): + """ + A V2 prompter that generates prompts for the ShareGPT + """ + + def __init__( + self, + conversation: Optional[Union[str, Conversation]] = None, + role_key_human: Optional[str] = None, + role_key_model: Optional[str] = None, + ): + super().__init__( + conversation=conversation, + role_key_human=role_key_human, + role_key_model=role_key_model, + ) diff --git a/src/axolotl/utils/config.py b/src/axolotl/utils/config.py index 3a574cefcc..1dfdab2605 100644 --- a/src/axolotl/utils/config.py +++ b/src/axolotl/utils/config.py @@ -278,6 +278,25 @@ def validate_config(cfg): "`model_type: MixFormerSequentialForCausalLM` required for sample_packing" ) + if cfg.datasets: + for idx, ds_cfg in enumerate(cfg.datasets): + if ds_cfg.type == "sharegpt:chat": + LOG.warning( + PendingDeprecationWarning( + "`type: sharegpt:chat` will soon be deprecated. simply use `type: sharegpt` instead." + ) + ) + cfg.datasets[idx].type = "sharegpt" + if "sharegpt_simple" in ds_cfg.type: + LOG.warning( + PendingDeprecationWarning( + "`type: sharegpt_simple` will soon be deprecated. simply use `type: sharegpt` instead." + ) + ) + cfg.datasets[idx].type = cfg.datasets[idx].type.replace( + "sharegpt_simple", "sharegpt" + ) + # TODO # MPT 7b # https://github.com/facebookresearch/bitsandbytes/issues/25 diff --git a/src/axolotl/utils/data.py b/src/axolotl/utils/data.py index 9792371c7c..eeac15d302 100644 --- a/src/axolotl/utils/data.py +++ b/src/axolotl/utils/data.py @@ -25,7 +25,6 @@ GPTeacherPromptTokenizingStrategy, JeopardyPromptTokenizingStrategy, OpenAssistantPromptTokenizingStrategy, - ShareGPTPromptTokenizingStrategy, SummarizeTLDRPromptTokenizingStrategy, ) from axolotl.prompters import ( @@ -35,7 +34,6 @@ MultipleChoiceConcisePrompter, MultipleChoiceExplainPrompter, ReflectAlpacaPrompter, - ShareGPTPrompter, SummarizeTLDRPrompter, ) from axolotl.utils.dict import DictDefault @@ -320,15 +318,6 @@ def for_d_in_datasets(dataset_configs): ) ds_wrapper = TokenizedPromptDataset(ds_strategy, ds) datasets.append(ds_wrapper) - elif d_base_type == "sharegpt": - ds_strategy = ShareGPTPromptTokenizingStrategy( - ShareGPTPrompter(d_prompt_style), - tokenizer, - cfg.train_on_inputs, - cfg.sequence_len, - ) - ds_wrapper = TokenizedPromptDataset(ds_strategy, ds) - datasets.append(ds_wrapper) else: suffix = "" if ":load_" in d.type: diff --git a/src/axolotl/utils/tokenization.py b/src/axolotl/utils/tokenization.py index 4746ceddef..2b7608b636 100644 --- a/src/axolotl/utils/tokenization.py +++ b/src/axolotl/utils/tokenization.py @@ -33,5 +33,6 @@ def check_example_labels(example, tokenizer, text_only=False): LOG.info(" ".join(colored_tokens)) LOG.info("\n\n\n") + print(" ".join(colored_tokens)) return " ".join(colored_tokens) diff --git a/tests/fixtures/conversation.tokenized.json b/tests/fixtures/conversation.tokenized.json index 0ac93713bc..ea4fca0990 100644 --- a/tests/fixtures/conversation.tokenized.json +++ b/tests/fixtures/conversation.tokenized.json @@ -1 +1 @@ -{"input_ids": [1, 319, 13563, 1546, 263, 12758, 1404, 322, 385, 23116, 21082, 20255, 29889, 450, 20255, 4076, 8444, 29892, 13173, 29892, 322, 1248, 568, 6089, 304, 278, 1404, 29915, 29879, 5155, 29889, 3148, 1001, 29901, 920, 1033, 474, 2334, 263, 29086, 705, 11356, 5687, 393, 3667, 4637, 21531, 20159, 304, 4505, 1045, 3163, 29973, 319, 1799, 9047, 13566, 29901, 739, 29915, 29879, 1950, 304, 671, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29892, 541, 372, 723, 5517, 367, 3755, 4280, 29889, 838, 5869, 293, 20159, 338, 263, 5443, 310, 23964, 393, 11898, 278, 4426, 310, 8162, 393, 526, 21634, 1090, 9126, 316, 689, 800, 29892, 1316, 408, 16116, 292, 322, 289, 2548, 29889, 512, 278, 3030, 310, 341, 1475, 705, 11356, 29892, 445, 1033, 367, 1304, 304, 27599, 278, 3829, 310, 278, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 2398, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 739, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 29889, 2, 3148, 1001, 29901, 1033, 366, 2367, 592, 278, 330, 391, 310, 920, 372, 1033, 2466, 29973, 319, 1799, 9047, 13566, 29901, 18585, 29889, 450, 6996, 2969, 723, 367, 304, 671, 21531, 20159, 304, 27599, 278, 3829, 310, 278, 341, 1475, 705, 11356, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 910, 1033, 25135, 15783, 278, 3748, 7613, 408, 263, 25002, 2913, 322, 773, 25002, 15939, 1934, 1316, 408, 3632, 327, 2270, 6471, 322, 350, 9890, 3694, 304, 10115, 2472, 1048, 278, 4423, 310, 29086, 373, 278, 7613, 29889, 2398, 29892, 408, 306, 5276, 1434, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 577, 372, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 29889, 2, 3148, 1001, 29901, 2367, 592, 263, 2702, 1342, 319, 1799, 9047, 13566, 29901, 1094, 263, 2702, 1342, 29892, 7755, 591, 505, 263, 341, 1475, 705, 11356, 3748, 7613, 411, 278, 1494, 5285, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 797, 445, 1206, 29892, 278, 3748, 7613, 508, 367, 9875, 408, 263, 25002, 2913, 29892, 988, 1269, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29889, 450, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 5293, 21531, 20159, 29892, 591, 508, 27599, 278, 3829, 310, 445, 2913, 322, 8161, 278, 14354, 310, 278, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 13, 13, 2831, 2777, 29892, 697, 982, 304, 437, 445, 338, 304, 10272, 278, 3632, 327, 2270, 6471, 310, 278, 2913, 29889, 15089, 327, 2270, 6471, 526, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 29892, 322, 896, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 512, 278, 1206, 310, 278, 7613, 2038, 29892, 591, 508, 10272, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 29892, 607, 16612, 278, 1353, 322, 376, 12181, 29908, 310, 278, 26532, 297, 278, 2913, 29889, 512, 445, 1206, 29892, 278, 937, 3632, 327, 2270, 2318, 723, 2649, 502, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29889, 13, 13, 29933, 1463, 373, 445, 2472, 29892, 591, 508, 10115, 393, 278, 29086, 297, 278, 3748, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29892, 408, 4318, 2400, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 341, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 341, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1342, 310, 920, 21531, 20159, 1033, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 526, 1784, 916, 1950, 13501, 29892, 322, 278, 2702, 4902, 310, 920, 304, 2334, 963, 723, 8839, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 1094, 306, 5276, 1434, 29892, 16049, 263, 1650, 763, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 2, 3148, 1001, 29901, 10241, 474, 471, 4856, 411, 263, 6483, 8004, 310, 1716, 29892, 1033, 366, 5649, 278, 1021, 6964, 304, 592, 411, 393, 11833, 29973, 319, 1799, 9047, 13566, 29901, 18585, 29889, 17090, 366, 505, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 1244, 338, 920, 366, 1795, 671, 21531, 20159, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 4803, 25002, 15939, 1934, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1670, 526, 1784, 1950, 25002, 15939, 1934, 393, 1033, 367, 1304, 363, 445, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29941, 29889, 4803, 278, 2472, 10115, 1127, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 21531, 20159, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 2, 3148, 1001, 29901, 920, 723, 474, 437, 372, 411, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 29973, 319, 1799, 9047, 13566, 29901, 1763, 671, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29892, 366, 723, 1101, 278, 1021, 6996, 6576, 408, 5439, 2038, 29892, 541, 411, 777, 12651, 297, 278, 2702, 4902, 29889, 13, 13, 2831, 2777, 29892, 1244, 338, 920, 366, 1795, 671, 28152, 3632, 3002, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 11796, 29872, 278, 28152, 3632, 3002, 310, 278, 2913, 29889, 9034, 9696, 3632, 3002, 338, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 975, 931, 29892, 322, 372, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 13, 29941, 29889, 4803, 278, 2472, 515, 278, 28152, 3632, 3002, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 28152, 3632, 3002, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 2702, 4902, 310, 278, 28152, 3632, 3002, 16287, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 28152, 3632, 3002, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 2, 3148, 1001, 29901, 920, 1033, 474, 2334, 445, 297, 3017, 29973, 319, 1799, 9047, 13566, 29901, 1763, 2334, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29892, 366, 723, 817, 304, 437, 278, 1494, 29901, 13, 13, 29896, 29889, 16052, 263, 5132, 3577, 363, 21531, 20159, 29889, 1670, 526, 3196, 3987, 3625, 29892, 1316, 408, 402, 566, 2918, 29892, 360, 291, 952, 375, 29892, 470, 4560, 7354, 29899, 29873, 1388, 29889, 4525, 9741, 3867, 14009, 322, 848, 12286, 363, 20602, 25002, 15939, 1934, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29906, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 910, 1033, 367, 2309, 773, 263, 848, 3829, 4944, 491, 278, 21531, 20159, 3577, 29892, 1316, 408, 263, 3053, 506, 616, 4280, 470, 263, 478, 2035, 29367, 29899, 29934, 4512, 4280, 29889, 7806, 6862, 373, 278, 7613, 723, 3928, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 723, 367, 9875, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29889, 13, 29941, 29889, 11796, 29872, 278, 25002, 15939, 1934, 310, 278, 2913, 29889, 910, 1033, 367, 2309, 773, 14009, 4944, 491, 278, 21531, 20159, 3577, 29889, 1152, 1342, 29892, 366, 1033, 10272, 278, 28152, 3632, 3002, 310, 278, 2913, 773, 278, 28152, 3632, 3002, 5687, 29889, 13, 29946, 29889, 4803, 278, 2472, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 910, 1033, 367, 2309, 773, 263, 10296, 310, 19475, 24481, 322, 8720, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 366, 1033, 671, 263, 8424, 310, 775, 304, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29945, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 1033, 671, 263, 8424, 310, 775, 304, 6336, 2828, 373, 278, 9109, 25256, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 16049, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 4902, 310, 278, 5314, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 304, 679, 4687, 411, 1316, 263, 2060, 29889, 2], "attention_mask": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], "labels": [-100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 739, 29915, 29879, 1950, 304, 671, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29892, 541, 372, 723, 5517, 367, 3755, 4280, 29889, 838, 5869, 293, 20159, 338, 263, 5443, 310, 23964, 393, 11898, 278, 4426, 310, 8162, 393, 526, 21634, 1090, 9126, 316, 689, 800, 29892, 1316, 408, 16116, 292, 322, 289, 2548, 29889, 512, 278, 3030, 310, 341, 1475, 705, 11356, 29892, 445, 1033, 367, 1304, 304, 27599, 278, 3829, 310, 278, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 2398, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 739, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 29889, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 18585, 29889, 450, 6996, 2969, 723, 367, 304, 671, 21531, 20159, 304, 27599, 278, 3829, 310, 278, 341, 1475, 705, 11356, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 910, 1033, 25135, 15783, 278, 3748, 7613, 408, 263, 25002, 2913, 322, 773, 25002, 15939, 1934, 1316, 408, 3632, 327, 2270, 6471, 322, 350, 9890, 3694, 304, 10115, 2472, 1048, 278, 4423, 310, 29086, 373, 278, 7613, 29889, 2398, 29892, 408, 306, 5276, 1434, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 577, 372, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 29889, 2, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 1094, 263, 2702, 1342, 29892, 7755, 591, 505, 263, 341, 1475, 705, 11356, 3748, 7613, 411, 278, 1494, 5285, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 797, 445, 1206, 29892, 278, 3748, 7613, 508, 367, 9875, 408, 263, 25002, 2913, 29892, 988, 1269, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29889, 450, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 5293, 21531, 20159, 29892, 591, 508, 27599, 278, 3829, 310, 445, 2913, 322, 8161, 278, 14354, 310, 278, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 13, 13, 2831, 2777, 29892, 697, 982, 304, 437, 445, 338, 304, 10272, 278, 3632, 327, 2270, 6471, 310, 278, 2913, 29889, 15089, 327, 2270, 6471, 526, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 29892, 322, 896, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 512, 278, 1206, 310, 278, 7613, 2038, 29892, 591, 508, 10272, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 29892, 607, 16612, 278, 1353, 322, 376, 12181, 29908, 310, 278, 26532, 297, 278, 2913, 29889, 512, 445, 1206, 29892, 278, 937, 3632, 327, 2270, 2318, 723, 2649, 502, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29889, 13, 13, 29933, 1463, 373, 445, 2472, 29892, 591, 508, 10115, 393, 278, 29086, 297, 278, 3748, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29892, 408, 4318, 2400, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 341, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 341, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1342, 310, 920, 21531, 20159, 1033, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 526, 1784, 916, 1950, 13501, 29892, 322, 278, 2702, 4902, 310, 920, 304, 2334, 963, 723, 8839, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 1094, 306, 5276, 1434, 29892, 16049, 263, 1650, 763, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 18585, 29889, 17090, 366, 505, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 1244, 338, 920, 366, 1795, 671, 21531, 20159, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 4803, 25002, 15939, 1934, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1670, 526, 1784, 1950, 25002, 15939, 1934, 393, 1033, 367, 1304, 363, 445, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29941, 29889, 4803, 278, 2472, 10115, 1127, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 21531, 20159, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 1763, 671, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29892, 366, 723, 1101, 278, 1021, 6996, 6576, 408, 5439, 2038, 29892, 541, 411, 777, 12651, 297, 278, 2702, 4902, 29889, 13, 13, 2831, 2777, 29892, 1244, 338, 920, 366, 1795, 671, 28152, 3632, 3002, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 11796, 29872, 278, 28152, 3632, 3002, 310, 278, 2913, 29889, 9034, 9696, 3632, 3002, 338, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 975, 931, 29892, 322, 372, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 13, 29941, 29889, 4803, 278, 2472, 515, 278, 28152, 3632, 3002, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 28152, 3632, 3002, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 2702, 4902, 310, 278, 28152, 3632, 3002, 16287, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 28152, 3632, 3002, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 1763, 2334, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29892, 366, 723, 817, 304, 437, 278, 1494, 29901, 13, 13, 29896, 29889, 16052, 263, 5132, 3577, 363, 21531, 20159, 29889, 1670, 526, 3196, 3987, 3625, 29892, 1316, 408, 402, 566, 2918, 29892, 360, 291, 952, 375, 29892, 470, 4560, 7354, 29899, 29873, 1388, 29889, 4525, 9741, 3867, 14009, 322, 848, 12286, 363, 20602, 25002, 15939, 1934, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29906, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 910, 1033, 367, 2309, 773, 263, 848, 3829, 4944, 491, 278, 21531, 20159, 3577, 29892, 1316, 408, 263, 3053, 506, 616, 4280, 470, 263, 478, 2035, 29367, 29899, 29934, 4512, 4280, 29889, 7806, 6862, 373, 278, 7613, 723, 3928, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 723, 367, 9875, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29889, 13, 29941, 29889, 11796, 29872, 278, 25002, 15939, 1934, 310, 278, 2913, 29889, 910, 1033, 367, 2309, 773, 14009, 4944, 491, 278, 21531, 20159, 3577, 29889, 1152, 1342, 29892, 366, 1033, 10272, 278, 28152, 3632, 3002, 310, 278, 2913, 773, 278, 28152, 3632, 3002, 5687, 29889, 13, 29946, 29889, 4803, 278, 2472, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 910, 1033, 367, 2309, 773, 263, 10296, 310, 19475, 24481, 322, 8720, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 366, 1033, 671, 263, 8424, 310, 775, 304, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29945, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 1033, 671, 263, 8424, 310, 775, 304, 6336, 2828, 373, 278, 9109, 25256, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 16049, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 4902, 310, 278, 5314, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 304, 679, 4687, 411, 1316, 263, 2060, 29889, 2]} +{"input_ids": [1, 319, 13563, 1546, 263, 12758, 1404, 322, 385, 23116, 21082, 20255, 29889, 450, 20255, 4076, 8444, 29892, 13173, 29892, 322, 1248, 568, 6089, 304, 278, 1404, 29915, 29879, 5155, 29889, 29871, 3148, 1001, 29901, 920, 1033, 474, 2334, 263, 29086, 705, 11356, 5687, 393, 3667, 4637, 21531, 20159, 304, 4505, 1045, 3163, 29973, 29871, 319, 1799, 9047, 13566, 29901, 739, 29915, 29879, 1950, 304, 671, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29892, 541, 372, 723, 5517, 367, 3755, 4280, 29889, 838, 5869, 293, 20159, 338, 263, 5443, 310, 23964, 393, 11898, 278, 4426, 310, 8162, 393, 526, 21634, 1090, 9126, 316, 689, 800, 29892, 1316, 408, 16116, 292, 322, 289, 2548, 29889, 512, 278, 3030, 310, 341, 1475, 705, 11356, 29892, 445, 1033, 367, 1304, 304, 27599, 278, 3829, 310, 278, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 2398, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 739, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, 3148, 1001, 29901, 1033, 366, 2367, 592, 278, 330, 391, 310, 920, 372, 1033, 2466, 29973, 29871, 319, 1799, 9047, 13566, 29901, 18585, 29889, 450, 6996, 2969, 723, 367, 304, 671, 21531, 20159, 304, 27599, 278, 3829, 310, 278, 341, 1475, 705, 11356, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 910, 1033, 25135, 15783, 278, 3748, 7613, 408, 263, 25002, 2913, 322, 773, 25002, 15939, 1934, 1316, 408, 3632, 327, 2270, 6471, 322, 350, 9890, 3694, 304, 10115, 2472, 1048, 278, 4423, 310, 29086, 373, 278, 7613, 29889, 2398, 29892, 408, 306, 5276, 1434, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 577, 372, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, 3148, 1001, 29901, 2367, 592, 263, 2702, 1342, 29871, 319, 1799, 9047, 13566, 29901, 1094, 263, 2702, 1342, 29892, 7755, 591, 505, 263, 341, 1475, 705, 11356, 3748, 7613, 411, 278, 1494, 5285, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 797, 445, 1206, 29892, 278, 3748, 7613, 508, 367, 9875, 408, 263, 25002, 2913, 29892, 988, 1269, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29889, 450, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 5293, 21531, 20159, 29892, 591, 508, 27599, 278, 3829, 310, 445, 2913, 322, 8161, 278, 14354, 310, 278, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 13, 13, 2831, 2777, 29892, 697, 982, 304, 437, 445, 338, 304, 10272, 278, 3632, 327, 2270, 6471, 310, 278, 2913, 29889, 15089, 327, 2270, 6471, 526, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 29892, 322, 896, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 512, 278, 1206, 310, 278, 7613, 2038, 29892, 591, 508, 10272, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 29892, 607, 16612, 278, 1353, 322, 376, 12181, 29908, 310, 278, 26532, 297, 278, 2913, 29889, 512, 445, 1206, 29892, 278, 937, 3632, 327, 2270, 2318, 723, 2649, 502, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29889, 13, 13, 29933, 1463, 373, 445, 2472, 29892, 591, 508, 10115, 393, 278, 29086, 297, 278, 3748, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29892, 408, 4318, 2400, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 341, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 341, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1342, 310, 920, 21531, 20159, 1033, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 526, 1784, 916, 1950, 13501, 29892, 322, 278, 2702, 4902, 310, 920, 304, 2334, 963, 723, 8839, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 1094, 306, 5276, 1434, 29892, 16049, 263, 1650, 763, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, 3148, 1001, 29901, 10241, 474, 471, 4856, 411, 263, 6483, 8004, 310, 1716, 29892, 1033, 366, 5649, 278, 1021, 6964, 304, 592, 411, 393, 11833, 29973, 29871, 319, 1799, 9047, 13566, 29901, 18585, 29889, 17090, 366, 505, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 1244, 338, 920, 366, 1795, 671, 21531, 20159, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 4803, 25002, 15939, 1934, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1670, 526, 1784, 1950, 25002, 15939, 1934, 393, 1033, 367, 1304, 363, 445, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29941, 29889, 4803, 278, 2472, 10115, 1127, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 21531, 20159, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, 3148, 1001, 29901, 920, 723, 474, 437, 372, 411, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 29973, 29871, 319, 1799, 9047, 13566, 29901, 1763, 671, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29892, 366, 723, 1101, 278, 1021, 6996, 6576, 408, 5439, 2038, 29892, 541, 411, 777, 12651, 297, 278, 2702, 4902, 29889, 13, 13, 2831, 2777, 29892, 1244, 338, 920, 366, 1795, 671, 28152, 3632, 3002, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 11796, 29872, 278, 28152, 3632, 3002, 310, 278, 2913, 29889, 9034, 9696, 3632, 3002, 338, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 975, 931, 29892, 322, 372, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 13, 29941, 29889, 4803, 278, 2472, 515, 278, 28152, 3632, 3002, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 28152, 3632, 3002, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 2702, 4902, 310, 278, 28152, 3632, 3002, 16287, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 28152, 3632, 3002, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, 3148, 1001, 29901, 920, 1033, 474, 2334, 445, 297, 3017, 29973, 29871, 319, 1799, 9047, 13566, 29901, 1763, 2334, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29892, 366, 723, 817, 304, 437, 278, 1494, 29901, 13, 13, 29896, 29889, 16052, 263, 5132, 3577, 363, 21531, 20159, 29889, 1670, 526, 3196, 3987, 3625, 29892, 1316, 408, 402, 566, 2918, 29892, 360, 291, 952, 375, 29892, 470, 4560, 7354, 29899, 29873, 1388, 29889, 4525, 9741, 3867, 14009, 322, 848, 12286, 363, 20602, 25002, 15939, 1934, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29906, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 910, 1033, 367, 2309, 773, 263, 848, 3829, 4944, 491, 278, 21531, 20159, 3577, 29892, 1316, 408, 263, 3053, 506, 616, 4280, 470, 263, 478, 2035, 29367, 29899, 29934, 4512, 4280, 29889, 7806, 6862, 373, 278, 7613, 723, 3928, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 723, 367, 9875, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29889, 13, 29941, 29889, 11796, 29872, 278, 25002, 15939, 1934, 310, 278, 2913, 29889, 910, 1033, 367, 2309, 773, 14009, 4944, 491, 278, 21531, 20159, 3577, 29889, 1152, 1342, 29892, 366, 1033, 10272, 278, 28152, 3632, 3002, 310, 278, 2913, 773, 278, 28152, 3632, 3002, 5687, 29889, 13, 29946, 29889, 4803, 278, 2472, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 910, 1033, 367, 2309, 773, 263, 10296, 310, 19475, 24481, 322, 8720, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 366, 1033, 671, 263, 8424, 310, 775, 304, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29945, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 1033, 671, 263, 8424, 310, 775, 304, 6336, 2828, 373, 278, 9109, 25256, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 16049, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 4902, 310, 278, 5314, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 304, 679, 4687, 411, 1316, 263, 2060, 21106, 29879, 29958, 2], "attention_mask": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], "labels": [-100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 739, 29915, 29879, 1950, 304, 671, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29892, 541, 372, 723, 5517, 367, 3755, 4280, 29889, 838, 5869, 293, 20159, 338, 263, 5443, 310, 23964, 393, 11898, 278, 4426, 310, 8162, 393, 526, 21634, 1090, 9126, 316, 689, 800, 29892, 1316, 408, 16116, 292, 322, 289, 2548, 29889, 512, 278, 3030, 310, 341, 1475, 705, 11356, 29892, 445, 1033, 367, 1304, 304, 27599, 278, 3829, 310, 278, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 2398, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 739, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 18585, 29889, 450, 6996, 2969, 723, 367, 304, 671, 21531, 20159, 304, 27599, 278, 3829, 310, 278, 341, 1475, 705, 11356, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 910, 1033, 25135, 15783, 278, 3748, 7613, 408, 263, 25002, 2913, 322, 773, 25002, 15939, 1934, 1316, 408, 3632, 327, 2270, 6471, 322, 350, 9890, 3694, 304, 10115, 2472, 1048, 278, 4423, 310, 29086, 373, 278, 7613, 29889, 2398, 29892, 408, 306, 5276, 1434, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 577, 372, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 1094, 263, 2702, 1342, 29892, 7755, 591, 505, 263, 341, 1475, 705, 11356, 3748, 7613, 411, 278, 1494, 5285, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 797, 445, 1206, 29892, 278, 3748, 7613, 508, 367, 9875, 408, 263, 25002, 2913, 29892, 988, 1269, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29889, 450, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 5293, 21531, 20159, 29892, 591, 508, 27599, 278, 3829, 310, 445, 2913, 322, 8161, 278, 14354, 310, 278, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 13, 13, 2831, 2777, 29892, 697, 982, 304, 437, 445, 338, 304, 10272, 278, 3632, 327, 2270, 6471, 310, 278, 2913, 29889, 15089, 327, 2270, 6471, 526, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 29892, 322, 896, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 512, 278, 1206, 310, 278, 7613, 2038, 29892, 591, 508, 10272, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 29892, 607, 16612, 278, 1353, 322, 376, 12181, 29908, 310, 278, 26532, 297, 278, 2913, 29889, 512, 445, 1206, 29892, 278, 937, 3632, 327, 2270, 2318, 723, 2649, 502, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29889, 13, 13, 29933, 1463, 373, 445, 2472, 29892, 591, 508, 10115, 393, 278, 29086, 297, 278, 3748, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29892, 408, 4318, 2400, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 341, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 341, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1342, 310, 920, 21531, 20159, 1033, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 526, 1784, 916, 1950, 13501, 29892, 322, 278, 2702, 4902, 310, 920, 304, 2334, 963, 723, 8839, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 1094, 306, 5276, 1434, 29892, 16049, 263, 1650, 763, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 18585, 29889, 17090, 366, 505, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 1244, 338, 920, 366, 1795, 671, 21531, 20159, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 4803, 25002, 15939, 1934, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1670, 526, 1784, 1950, 25002, 15939, 1934, 393, 1033, 367, 1304, 363, 445, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29941, 29889, 4803, 278, 2472, 10115, 1127, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 21531, 20159, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 1763, 671, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29892, 366, 723, 1101, 278, 1021, 6996, 6576, 408, 5439, 2038, 29892, 541, 411, 777, 12651, 297, 278, 2702, 4902, 29889, 13, 13, 2831, 2777, 29892, 1244, 338, 920, 366, 1795, 671, 28152, 3632, 3002, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 11796, 29872, 278, 28152, 3632, 3002, 310, 278, 2913, 29889, 9034, 9696, 3632, 3002, 338, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 975, 931, 29892, 322, 372, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 13, 29941, 29889, 4803, 278, 2472, 515, 278, 28152, 3632, 3002, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 28152, 3632, 3002, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 2702, 4902, 310, 278, 28152, 3632, 3002, 16287, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 28152, 3632, 3002, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 1763, 2334, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29892, 366, 723, 817, 304, 437, 278, 1494, 29901, 13, 13, 29896, 29889, 16052, 263, 5132, 3577, 363, 21531, 20159, 29889, 1670, 526, 3196, 3987, 3625, 29892, 1316, 408, 402, 566, 2918, 29892, 360, 291, 952, 375, 29892, 470, 4560, 7354, 29899, 29873, 1388, 29889, 4525, 9741, 3867, 14009, 322, 848, 12286, 363, 20602, 25002, 15939, 1934, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29906, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 910, 1033, 367, 2309, 773, 263, 848, 3829, 4944, 491, 278, 21531, 20159, 3577, 29892, 1316, 408, 263, 3053, 506, 616, 4280, 470, 263, 478, 2035, 29367, 29899, 29934, 4512, 4280, 29889, 7806, 6862, 373, 278, 7613, 723, 3928, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 723, 367, 9875, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29889, 13, 29941, 29889, 11796, 29872, 278, 25002, 15939, 1934, 310, 278, 2913, 29889, 910, 1033, 367, 2309, 773, 14009, 4944, 491, 278, 21531, 20159, 3577, 29889, 1152, 1342, 29892, 366, 1033, 10272, 278, 28152, 3632, 3002, 310, 278, 2913, 773, 278, 28152, 3632, 3002, 5687, 29889, 13, 29946, 29889, 4803, 278, 2472, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 910, 1033, 367, 2309, 773, 263, 10296, 310, 19475, 24481, 322, 8720, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 366, 1033, 671, 263, 8424, 310, 775, 304, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29945, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 1033, 671, 263, 8424, 310, 775, 304, 6336, 2828, 373, 278, 9109, 25256, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 16049, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 4902, 310, 278, 5314, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 304, 679, 4687, 411, 1316, 263, 2060, 21106, 29879, 29958, 2]} diff --git a/tests/test_prompt_tokenizers.py b/tests/test_prompt_tokenizers.py index a455d8b513..9ee5ea588d 100644 --- a/tests/test_prompt_tokenizers.py +++ b/tests/test_prompt_tokenizers.py @@ -21,7 +21,7 @@ AlpacaPromptTokenizingStrategy, ShareGPTPromptTokenizingStrategy, ) -from axolotl.prompters import AlpacaPrompter, PromptStyle, ShareGPTPrompter +from axolotl.prompters import AlpacaPrompter, PromptStyle, ShareGPTPrompterV2 LOG = logging.getLogger("axolotl") @@ -60,7 +60,7 @@ def test_sharegpt_integration(self): ) as fin: data = fin.read() tokenized_conversation = json.loads(data) - prompter = ShareGPTPrompter("chat") + prompter = ShareGPTPrompterV2() strat = ShareGPTPromptTokenizingStrategy( prompter, self.tokenizer, @@ -79,7 +79,7 @@ def test_sharegpt_warnings_integration(self): ) as fin: data = fin.read() conversation = json.loads(data) - prompter = ShareGPTPrompter("chat") + prompter = ShareGPTPrompterV2() strat = ShareGPTPromptTokenizingStrategy( prompter, self.tokenizer, diff --git a/tests/test_validation.py b/tests/test_validation.py index d7935c1a54..b9a57c2e9a 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -374,3 +374,26 @@ def test_merge_lora_no_bf16_fail(self): ) validate_config(cfg) + + def test_sharegpt_deprecation(self): + cfg = DictDefault( + {"datasets": [{"path": "lorem/ipsum", "type": "sharegpt:chat"}]} + ) + with self._caplog.at_level(logging.WARNING): + validate_config(cfg) + assert any( + "`type: sharegpt:chat` will soon be deprecated." in record.message + for record in self._caplog.records + ) + assert cfg.datasets[0].type == "sharegpt" + + cfg = DictDefault( + {"datasets": [{"path": "lorem/ipsum", "type": "sharegpt_simple:load_role"}]} + ) + with self._caplog.at_level(logging.WARNING): + validate_config(cfg) + assert any( + "`type: sharegpt_simple` will soon be deprecated." in record.message + for record in self._caplog.records + ) + assert cfg.datasets[0].type == "sharegpt:load_role" From 895f0a0723e20359b865a7f414faa06103bef7f9 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Wed, 27 Sep 2023 12:11:07 -0400 Subject: [PATCH 25/77] skip some flash attn patches unless explicitly enabled (#643) * skip some flash attn patches if explicitly disabled * make the other patches optional --- README.md | 2 + .../monkeypatch/llama_attn_hijack_flash.py | 54 +++++++++++-------- src/axolotl/utils/models.py | 6 ++- 3 files changed, 38 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index 3b3bc9985a..7f3230423c 100644 --- a/README.md +++ b/README.md @@ -636,6 +636,8 @@ flash_optimum: xformers_attention: # whether to use flash attention patch https://github.com/Dao-AILab/flash-attention: flash_attention: +flash_attn_cross_entropy: # Whether to use flash-attention cross entropy implementation - advanced use only +flash_attn_rms_norm: # Whether to use flash-attention rms norm implementation - advanced use only # whether to use scaled-dot-product attention # https://pytorch.org/docs/stable/generated/torch.nn.functional.scaled_dot_product_attention.html sdp_attention: diff --git a/src/axolotl/monkeypatch/llama_attn_hijack_flash.py b/src/axolotl/monkeypatch/llama_attn_hijack_flash.py index 97f0477649..4f6b715756 100644 --- a/src/axolotl/monkeypatch/llama_attn_hijack_flash.py +++ b/src/axolotl/monkeypatch/llama_attn_hijack_flash.py @@ -38,7 +38,11 @@ LOG = logging.getLogger("axolotl") -def replace_llama_attn_with_flash_attn(packed: Optional[bool] = False): +def replace_llama_attn_with_flash_attn( + packed: Optional[bool] = False, + cross_entropy: Optional[bool] = False, + rms_norm: Optional[bool] = False, +): transformers.models.llama.modeling_llama.LlamaModel._prepare_decoder_attention_mask = ( # pylint: disable=protected-access _prepare_decoder_attention_mask ) @@ -49,33 +53,37 @@ def replace_llama_attn_with_flash_attn(packed: Optional[bool] = False): llama_model_forward ) - try: - from flash_attn.losses.cross_entropy import CrossEntropyLoss + # skip only if explicitly disabled + if cross_entropy: + try: + from flash_attn.losses.cross_entropy import CrossEntropyLoss - LOG.info("patching with flash_attn.losses.cross_entropy") - transformers.models.llama.modeling_llama.CrossEntropyLoss = partial( - CrossEntropyLoss, inplace_backward=True - ) - except ImportError: - LOG.info( - "optimized flash-attention CrossEntropyLoss not found (run `pip install 'git+https://github.com/Dao-AILab/flash-attention.git#egg=xentropy_cuda_lib&subdirectory=csrc/xentropy'`)" - ) + LOG.info("patching with flash_attn.losses.cross_entropy") + transformers.models.llama.modeling_llama.CrossEntropyLoss = partial( + CrossEntropyLoss, inplace_backward=True + ) + except ImportError: + LOG.info( + "optimized flash-attention CrossEntropyLoss not found (run `pip install 'git+https://github.com/Dao-AILab/flash-attention.git#egg=xentropy_cuda_lib&subdirectory=csrc/xentropy'`)" + ) - try: - from flash_attn.ops.rms_norm import RMSNorm + # skip only if explicitly disabled + if rms_norm: + try: + from flash_attn.ops.rms_norm import RMSNorm - class LlamaRMSNorm(RMSNorm): - """Patched LLamaRMSNorm""" + class LlamaRMSNorm(RMSNorm): + """Patched LLamaRMSNorm""" - def __init__(self, hidden_size, eps=1e-6): - super().__init__(hidden_size, eps=eps) + def __init__(self, hidden_size, eps=1e-6): + super().__init__(hidden_size, eps=eps) - LOG.info("patching with flash_attn.ops.rms_norm") - transformers.models.llama.modeling_llama.LlamaRMSNorm = LlamaRMSNorm - except ImportError: - LOG.info( - "optimized flash-attention RMSNorm not found (run `pip install 'git+https://github.com/Dao-AILab/flash-attention.git#egg=dropout_layer_norm&subdirectory=csrc/layer_norm'`)" - ) + LOG.info("patching with flash_attn.ops.rms_norm") + transformers.models.llama.modeling_llama.LlamaRMSNorm = LlamaRMSNorm + except ImportError: + LOG.info( + "optimized flash-attention RMSNorm not found (run `pip install 'git+https://github.com/Dao-AILab/flash-attention.git#egg=dropout_layer_norm&subdirectory=csrc/layer_norm'`)" + ) # Disable the transformation of the attention mask in LlamaModel as the flash attention diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 361440931f..07cdc4d6ed 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -121,7 +121,11 @@ def load_model( ) LOG.info("patching with flash attention for sample packing") - replace_llama_attn_with_flash_attn(packed=cfg.sample_packing) + replace_llama_attn_with_flash_attn( + packed=cfg.sample_packing, + cross_entropy=cfg.flash_attn_cross_entropy, + rms_norm=cfg.flash_attn_rms_norm, + ) elif cfg.is_llama_derived_model and cfg.xformers_attention: from axolotl.monkeypatch.llama_attn_hijack_xformers import ( hijack_llama_attention, From d1236f2c418f4efa2e36be82fe5cd640968f8908 Mon Sep 17 00:00:00 2001 From: Felix Yan Date: Wed, 27 Sep 2023 19:12:10 +0300 Subject: [PATCH 26/77] Correct typos in datasets.py (#639) --- src/axolotl/datasets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/axolotl/datasets.py b/src/axolotl/datasets.py index 1dccb6d526..32b2e0cc2d 100644 --- a/src/axolotl/datasets.py +++ b/src/axolotl/datasets.py @@ -22,7 +22,7 @@ class TokenizedPromptDataset(Dataset): """ Dataset that returns tokenized prompts from a stream of text files. Args: - prompt_tokenizer (PromptTokenizingStrategy): The prompt tokenizing method for proccessing the data. + prompt_tokenizer (PromptTokenizingStrategy): The prompt tokenizing method for processing the data. dataset (dataset.Dataset): Dataset with text files. """ @@ -55,7 +55,7 @@ class ConstantLengthDataset(IterableDataset): """ Iterable dataset that returns constant length chunks of tokens from stream of text files. Args: - tokenizer (Tokenizer): The processor used for proccessing the data. + tokenizer (Tokenizer): The processor used for processing the data. dataset (dataset.Dataset): Dataset with text files. seq_length (int): Length of token sequences to return. """ From 8fe0e633d2715d84333ce68fdd45d2d975638a54 Mon Sep 17 00:00:00 2001 From: Ethan Smith Date: Wed, 27 Sep 2023 10:41:31 -0700 Subject: [PATCH 27/77] Fix bug in dataset loading (#284) * Fix bug in dataset loading This fixes a bug when loading datasets. `d.data_files` is a list, so it cannot be directly passed to `hf_hub_download` * Check type of data_files, and load accordingly --- src/axolotl/utils/data.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/src/axolotl/utils/data.py b/src/axolotl/utils/data.py index eeac15d302..2713796774 100644 --- a/src/axolotl/utils/data.py +++ b/src/axolotl/utils/data.py @@ -205,11 +205,26 @@ def for_d_in_datasets(dataset_configs): use_auth_token=use_auth_token, ) else: - fp = hf_hub_download( - repo_id=d.path, - repo_type="dataset", - filename=d.data_files, - ) + if isinstance(d.data_files, str): + fp = hf_hub_download( + repo_id=d.path, + repo_type="dataset", + filename=d.data_files, + ) + elif isinstance(d.data_files, list): + fp = [] + for file in d.data_files: + fp.append( + hf_hub_download( + repo_id=d.path, + repo_type="dataset", + filename=file, + ) + ) + else: + raise ValueError( + "data_files must be either a string or list of strings" + ) ds = load_dataset( "json", name=d.name, data_files=fp, streaming=False, split=None ) From 85b0be2ba7251e7e4cdc4be8e31dae4f88be6a8f Mon Sep 17 00:00:00 2001 From: Napuh <55241721+Napuh@users.noreply.github.com> Date: Wed, 27 Sep 2023 23:43:35 +0200 Subject: [PATCH 28/77] Warn users to login to HuggingFace (#645) * added warning if user is not logged in HF * updated doc to suggest logging in to HF --- README.md | 5 +++++ scripts/finetune.py | 2 ++ src/axolotl/cli/__init__.py | 15 +++++++++++++++ src/axolotl/cli/train.py | 2 ++ 4 files changed, 24 insertions(+) diff --git a/README.md b/README.md index 7f3230423c..d502eec0b5 100644 --- a/README.md +++ b/README.md @@ -124,6 +124,11 @@ accelerate launch -m axolotl.cli.inference examples/openllama-3b/lora.yml \ pip3 install packaging pip3 install -e '.[flash-attn,deepspeed]' ``` + 4. (Optional) Login to Huggingface to use gated models/datasets. + ```bash + huggingface-cli login + ``` + Get the token at huggingface.co/settings/tokens - LambdaLabs
diff --git a/scripts/finetune.py b/scripts/finetune.py index 7b6751e31c..118a97b844 100644 --- a/scripts/finetune.py +++ b/scripts/finetune.py @@ -7,6 +7,7 @@ from axolotl.cli import ( check_accelerate_default_config, + check_user_token, do_inference, do_merge_lora, load_cfg, @@ -31,6 +32,7 @@ def do_cli(config: Path = Path("examples/"), **kwargs): ) parsed_cfg = load_cfg(config, **kwargs) check_accelerate_default_config() + check_user_token() parser = transformers.HfArgumentParser((TrainerCliArgs)) parsed_cli_args, _ = parser.parse_args_into_dataclasses( return_remaining_strings=True diff --git a/src/axolotl/cli/__init__.py b/src/axolotl/cli/__init__.py index 90e1d508b0..c3b580391a 100644 --- a/src/axolotl/cli/__init__.py +++ b/src/axolotl/cli/__init__.py @@ -14,6 +14,8 @@ # add src to the pythonpath so we don't need to pip install this from accelerate.commands.config import config_args from art import text2art +from huggingface_hub import HfApi +from huggingface_hub.utils import LocalTokenNotFoundError from transformers import GenerationConfig, TextStreamer from axolotl.common.cli import TrainerCliArgs, load_model_and_tokenizer @@ -247,3 +249,16 @@ def check_accelerate_default_config(): LOG.warning( f"accelerate config file found at {config_args.default_yaml_config_file}. This can lead to unexpected errors" ) + + +def check_user_token(): + # Verify if token is valid + api = HfApi() + try: + user_info = api.whoami() + return bool(user_info) + except LocalTokenNotFoundError: + LOG.warning( + "Error verifying HuggingFace token. Remember to log in using `huggingface-cli login` and get your access token from https://huggingface.co/settings/tokens if you want to use gated models or datasets." + ) + return False diff --git a/src/axolotl/cli/train.py b/src/axolotl/cli/train.py index 72a9250c8d..c64755872b 100644 --- a/src/axolotl/cli/train.py +++ b/src/axolotl/cli/train.py @@ -8,6 +8,7 @@ from axolotl.cli import ( check_accelerate_default_config, + check_user_token, load_cfg, load_datasets, print_axolotl_text_art, @@ -21,6 +22,7 @@ def do_cli(config: Path = Path("examples/"), **kwargs): print_axolotl_text_art() parsed_cfg = load_cfg(config, **kwargs) check_accelerate_default_config() + check_user_token() parser = transformers.HfArgumentParser((TrainerCliArgs)) parsed_cli_args, _ = parser.parse_args_into_dataclasses( return_remaining_strings=True From b6ab8aad62f5b9f75e3456a4f60fa0e2040cf34b Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Wed, 27 Sep 2023 18:41:00 -0400 Subject: [PATCH 29/77] Mistral flash attn packing (#646) * add mistral monkeypatch * add arg for decoder attention masl * fix lint for duplicate code * make sure to update transformers too * tweak install for e2e * move mistral patch to conditional --- .github/workflows/tests.yml | 5 +- requirements.txt | 2 +- .../monkeypatch/mistral_attn_hijack_flash.py | 401 ++++++++++++++++++ src/axolotl/utils/models.py | 8 + 4 files changed, 412 insertions(+), 4 deletions(-) create mode 100644 src/axolotl/monkeypatch/mistral_attn_hijack_flash.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4da10a6c35..b4637fd67f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -44,7 +44,7 @@ jobs: - name: Install dependencies run: | - pip3 install -e . + pip3 install -U -e . pip3 install -r requirements-tests.txt - name: Run tests @@ -69,8 +69,7 @@ jobs: - name: Install dependencies run: | - pip3 install -e . - pip3 install flash-attn + pip3 install -U -e .[flash-attn] pip3 install -r requirements-tests.txt - name: Run e2e tests diff --git a/requirements.txt b/requirements.txt index 7616d1fb0a..18659daec5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ torch==2.0.1 auto-gptq packaging peft @ git+https://github.com/huggingface/peft.git -transformers @ git+https://github.com/huggingface/transformers.git@0ac3875011d32dc85e0e83970507e3afe8f0febb +transformers @ git+https://github.com/huggingface/transformers.git@78dd120 bitsandbytes>=0.41.1 accelerate @ git+https://github.com/huggingface/accelerate@80da9cfb09bb3cc9f1b385cb55d6b90d025a5fd9 deepspeed diff --git a/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py b/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py new file mode 100644 index 0000000000..f53d5d0071 --- /dev/null +++ b/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py @@ -0,0 +1,401 @@ +"""Flash attention monkey patch for mistral model""" +# pylint: disable=duplicate-code + +import logging +import math +from typing import List, Optional, Tuple, Union + +import torch +import transformers +from einops import rearrange +from torch import nn +from transformers.modeling_outputs import BaseModelOutputWithPast +from transformers.models.mistral.modeling_mistral import ( + MistralDecoderLayer as OriginalMistralDecoderLayer, +) +from transformers.models.mistral.modeling_mistral import apply_rotary_pos_emb, repeat_kv + +from axolotl.monkeypatch.utils import get_cu_seqlens_from_pos_ids + +try: + from flash_attn.flash_attn_interface import ( # pylint: disable=ungrouped-imports + flash_attn_varlen_qkvpacked_func, + ) +except ImportError: + from flash_attn.flash_attn_interface import ( + flash_attn_unpadded_qkvpacked_func as flash_attn_varlen_qkvpacked_func, + ) + + +LOG = logging.getLogger("axolotl.monkeypatch.mistral") + + +def replace_mistral_attn_with_flash_attn( + packed: Optional[bool] = False, +): + transformers.models.mistral.modeling_mistral.MistralModel._prepare_decoder_attention_mask = ( # pylint: disable=protected-access + _prepare_decoder_attention_mask + ) + transformers.models.mistral.modeling_mistral.MistralAttention.forward = ( + flashattn_forward + ) + if packed: + transformers.models.mistral.modeling_mistral.MistralDecoderLayer = ( + MistralDecoderLayer + ) + transformers.models.mistral.modeling_mistral.MistralModel.forward = ( + mistral_model_forward + ) + + +# Disable the transformation of the attention mask in LlamaModel as the flash attention +# requires the attention mask to be the same as the key_padding_mask +def _prepare_decoder_attention_mask( + self, + attention_mask, + input_shape, + inputs_embeds, + past_key_values_length, + sliding_window, +): # pylint: disable=unused-argument + # [bsz, seq_len] + return attention_mask + + +def flashattn_forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: bool = False, + use_cache: bool = False, + cu_seqlens: Optional[torch.Tensor] = None, + max_seqlen: Optional[torch.Tensor] = None, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view( + bsz, q_len, self.num_heads, self.head_dim + ).transpose(1, 2) + key_states = key_states.view( + bsz, q_len, self.num_key_value_heads, self.head_dim + ).transpose(1, 2) + value_states = value_states.view( + bsz, q_len, self.num_key_value_heads, self.head_dim + ).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value[0].shape[-2] + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb( + query_states, key_states, cos, sin, position_ids + ) + + if past_key_value is not None: + # reuse k, v, self_attention + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + + past_key_value = (key_states, value_states) if use_cache else None + + # repeat k/v heads if n_kv_heads < n_heads + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + if cu_seqlens is not None and max_seqlen is not None and cu_seqlens.dim() == 1: + # special handling using sample packing + qkv = torch.stack( + [query_states, key_states, value_states], dim=2 + ) # [bsz, nh, 3, q_len, hd] + qkv = qkv.transpose(1, 3) # [bsz, q_len, 3, nh, hd] + qkv = rearrange(qkv, "b s ... -> (b s) ...") + + output = flash_attn_varlen_qkvpacked_func( + qkv, cu_seqlens, max_seqlen, 0.0, softmax_scale=None, causal=True + ) + output = rearrange(output, "(b s) ... -> b s ...", b=bsz) + attn_output = output + if attn_output.size() != (bsz, q_len, self.num_heads, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, q_len, self.num_heads, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + attn_output = rearrange(attn_output, "b s h d -> b s (h d)") + attn_weights = None + else: + attn_weights = torch.matmul( + query_states, key_states.transpose(2, 3) + ) / math.sqrt(self.head_dim) + if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): + raise ValueError( + f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is" + f" {attn_weights.size()}" + ) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}" + ) + + attn_weights = attn_weights + attention_mask + + # upcast attention to fp32 + attn_weights = nn.functional.softmax( + attn_weights, dim=-1, dtype=torch.float32 + ).to(query_states.dtype) + attn_output = torch.matmul(attn_weights, value_states) + + if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +def mistral_model_forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, +) -> Union[Tuple, BaseModelOutputWithPast]: + output_attentions = ( + output_attentions + if output_attentions is not None + else self.config.output_attentions + ) + output_hidden_states = ( + output_hidden_states + if output_hidden_states is not None + else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = ( + return_dict if return_dict is not None else self.config.use_return_dict + ) + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError( + "You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time" + ) + if input_ids is not None: + batch_size, seq_length = input_ids.shape + elif inputs_embeds is not None: + batch_size, seq_length, _ = inputs_embeds.shape + else: + raise ValueError( + "You have to specify either decoder_input_ids or decoder_inputs_embeds" + ) + + seq_length_with_past = seq_length + past_key_values_length = 0 + + if past_key_values is not None: + past_key_values_length = past_key_values[0][0].shape[2] + seq_length_with_past = seq_length_with_past + past_key_values_length + + cu_seqlens = None + max_seqlen = None + if position_ids is None: + device = input_ids.device if input_ids is not None else inputs_embeds.device + position_ids = torch.arange( + past_key_values_length, + seq_length + past_key_values_length, + dtype=torch.long, + device=device, + ) + position_ids = position_ids.unsqueeze(0).view(-1, seq_length) + else: + position_ids = position_ids.view(-1, seq_length).long() + cu_seqlens, max_seqlen = get_cu_seqlens_from_pos_ids(position_ids) + cu_seqlens = cu_seqlens.squeeze() + + if inputs_embeds is None: + inputs_embeds = self.embed_tokens(input_ids) + # embed positions + if attention_mask is None: + attention_mask = torch.ones( + (batch_size, seq_length_with_past), + dtype=torch.bool, + device=inputs_embeds.device, + ) + attention_mask = ( + self._prepare_decoder_attention_mask( # pylint: disable=protected-access + attention_mask, + (batch_size, seq_length), + inputs_embeds, + past_key_values_length, + sliding_window=self.config.sliding_window, + ) + ) + + hidden_states = inputs_embeds + + if self.gradient_checkpointing and self.training: + if use_cache: + transformers.logger.warning_once( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + next_decoder_cache = () if use_cache else None + + for idx, decoder_layer in enumerate(self.layers): + if output_hidden_states: + all_hidden_states += (hidden_states,) + + past_key_value = past_key_values[idx] if past_key_values is not None else None + + if self.gradient_checkpointing and self.training: + + def create_custom_forward(module): + def custom_forward(*inputs): + # None for past_key_value + return module(*inputs) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(decoder_layer), + hidden_states, + attention_mask, + position_ids, + past_key_value, + output_attentions, + None, + cu_seqlens, + max_seqlen, + ) + else: + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache += (layer_outputs[2 if output_attentions else 1],) + + if output_attentions: + all_self_attns += (layer_outputs[1],) + + hidden_states = self.norm(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states,) + + next_cache = next_decoder_cache if use_cache else None + if not return_dict: + return tuple( + v + for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] + if v is not None + ) + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + ) + + +class MistralDecoderLayer(OriginalMistralDecoderLayer): + """ + patched version of MistralDecoderLayer to pass through the precalculated cu_seqlens + """ + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: Optional[bool] = False, + use_cache: Optional[bool] = False, + cu_seqlens: Optional[torch.Tensor] = None, + max_seqlen: Optional[torch.Tensor] = None, + ) -> Tuple[ + torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]] + ]: + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): attention mask of size + `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states + cu_seqlens (`torch.Tensor`, *optional*) cumulative sequence len when packing + """ + + residual = hidden_states + + hidden_states = self.input_layernorm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.self_attn( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + hidden_states = residual + hidden_states + + # Fully Connected + residual = hidden_states + hidden_states = self.post_attention_layernorm(hidden_states) + hidden_states = self.mlp(hidden_states) + hidden_states = residual + hidden_states + + outputs = (hidden_states,) + + if output_attentions: + outputs += (self_attn_weights,) + + if use_cache: + outputs += (present_key_value,) + + return outputs diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 07cdc4d6ed..63e34293ee 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -150,6 +150,14 @@ def load_model( # Note: This might overwrite previous additional_special_tokens tokenizer.add_special_tokens({"additional_special_tokens": [MEM_TOKEN]}) + if cfg.is_mistral_derived_model and cfg.flash_attention: + from axolotl.monkeypatch.mistral_attn_hijack_flash import ( + replace_mistral_attn_with_flash_attn, + ) + + LOG.info("patching with flash attention") + replace_mistral_attn_with_flash_attn(packed=cfg.sample_packing) + if cfg.is_llama_derived_model and cfg.xpos_rope: from axolotl.monkeypatch.xpos_rope_llama_monkey_patch import ( replace_llama_rope_with_xpos_rope, From 383f88d7a71286e5c74f50e3caf893bd9d071fcf Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Thu, 28 Sep 2023 10:14:41 +0900 Subject: [PATCH 30/77] Fix(cfg): Add validation for save_strategy and eval_strategy (#633) * Fix(cfg): Check save_strategy cfg conflict with save_steps * Fix(cfg): Check evaluation_strategy cfg conflict with eval_steps * chore: add extra check for steps only --- src/axolotl/utils/config.py | 18 ++++ src/axolotl/utils/trainer.py | 15 +--- tests/test_validation.py | 168 +++++++++++++++++++++++++++++++++++ 3 files changed, 190 insertions(+), 11 deletions(-) diff --git a/src/axolotl/utils/config.py b/src/axolotl/utils/config.py index 1dfdab2605..ac067b5055 100644 --- a/src/axolotl/utils/config.py +++ b/src/axolotl/utils/config.py @@ -296,6 +296,24 @@ def validate_config(cfg): cfg.datasets[idx].type = cfg.datasets[idx].type.replace( "sharegpt_simple", "sharegpt" ) + if cfg.save_strategy and cfg.save_steps and cfg.save_strategy != "steps": + raise ValueError( + "save_strategy and save_steps mismatch. Please set save_strategy to 'steps' or remove save_steps." + ) + + if ( + cfg.evaluation_strategy + and cfg.eval_steps + and cfg.evaluation_strategy != "steps" + ): + raise ValueError( + "evaluation_strategy and eval_steps mismatch. Please set evaluation_strategy to 'steps' or remove eval_steps." + ) + + if cfg.val_set_size == 0 and (cfg.eval_steps or cfg.evaluation_strategy): + raise ValueError( + "eval_steps and evaluation_strategy are not supported with val_set_size == 0" + ) # TODO # MPT 7b diff --git a/src/axolotl/utils/trainer.py b/src/axolotl/utils/trainer.py index aee2a1b99e..3c75e4ec53 100644 --- a/src/axolotl/utils/trainer.py +++ b/src/axolotl/utils/trainer.py @@ -604,26 +604,19 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer, total_num_ "sample_packing_efficiency" ] = cfg.sample_packing_eff_est - if cfg.eval_steps and cfg.evaluation_strategy: - # assume if the user set both, they know what they're doing - training_arguments_kwargs["evaluation_strategy"] = cfg.evaluation_strategy + if cfg.eval_steps: + training_arguments_kwargs["evaluation_strategy"] = "steps" training_arguments_kwargs["eval_steps"] = cfg.eval_steps + elif cfg.evaluation_strategy: + training_arguments_kwargs["evaluation_strategy"] = cfg.evaluation_strategy elif cfg.val_set_size == 0: # no eval set, so don't eval training_arguments_kwargs["evaluation_strategy"] = "no" - elif cfg.evaluation_strategy and cfg.evaluation_strategy in ["epoch", "no"]: - # if explicitly set for epoch, just set, and eval steps don't matter - training_arguments_kwargs["evaluation_strategy"] = cfg.evaluation_strategy - elif cfg.eval_steps: - # steps isn't used w/ epochs - training_arguments_kwargs["evaluation_strategy"] = "steps" - training_arguments_kwargs["eval_steps"] = cfg.eval_steps else: # we have an eval set, but no steps defined, default to use epoch training_arguments_kwargs["evaluation_strategy"] = "epoch" if cfg.save_steps: - # save_steps implies save_strategy of steps training_arguments_kwargs["save_strategy"] = "steps" training_arguments_kwargs["save_steps"] = cfg.save_steps elif cfg.save_strategy: diff --git a/tests/test_validation.py b/tests/test_validation.py index b9a57c2e9a..35d90a2cb4 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -397,3 +397,171 @@ def test_sharegpt_deprecation(self): for record in self._caplog.records ) assert cfg.datasets[0].type == "sharegpt:load_role" + + def test_no_conflict_save_strategy(self): + cfg = DictDefault( + { + "save_strategy": "epoch", + "save_steps": 10, + } + ) + + with pytest.raises( + ValueError, match=r".*save_strategy and save_steps mismatch.*" + ): + validate_config(cfg) + + cfg = DictDefault( + { + "save_strategy": "no", + "save_steps": 10, + } + ) + + with pytest.raises( + ValueError, match=r".*save_strategy and save_steps mismatch.*" + ): + validate_config(cfg) + + cfg = DictDefault( + { + "save_strategy": "steps", + } + ) + + validate_config(cfg) + + cfg = DictDefault( + { + "save_strategy": "steps", + "save_steps": 10, + } + ) + + validate_config(cfg) + + cfg = DictDefault( + { + "save_steps": 10, + } + ) + + validate_config(cfg) + + cfg = DictDefault( + { + "save_strategy": "no", + } + ) + + validate_config(cfg) + + def test_no_conflict_eval_strategy(self): + cfg = DictDefault( + { + "evaluation_strategy": "epoch", + "eval_steps": 10, + } + ) + + with pytest.raises( + ValueError, match=r".*evaluation_strategy and eval_steps mismatch.*" + ): + validate_config(cfg) + + cfg = DictDefault( + { + "evaluation_strategy": "no", + "eval_steps": 10, + } + ) + + with pytest.raises( + ValueError, match=r".*evaluation_strategy and eval_steps mismatch.*" + ): + validate_config(cfg) + + cfg = DictDefault( + { + "evaluation_strategy": "steps", + } + ) + + validate_config(cfg) + + cfg = DictDefault( + { + "evaluation_strategy": "steps", + "eval_steps": 10, + } + ) + + validate_config(cfg) + + cfg = DictDefault( + { + "eval_steps": 10, + } + ) + + validate_config(cfg) + + cfg = DictDefault( + { + "evaluation_strategy": "no", + } + ) + + validate_config(cfg) + + cfg = DictDefault( + { + "evaluation_strategy": "epoch", + "val_set_size": 0, + } + ) + + with pytest.raises( + ValueError, + match=r".*eval_steps and evaluation_strategy are not supported with val_set_size == 0.*", + ): + validate_config(cfg) + + cfg = DictDefault( + { + "eval_steps": 10, + "val_set_size": 0, + } + ) + + with pytest.raises( + ValueError, + match=r".*eval_steps and evaluation_strategy are not supported with val_set_size == 0.*", + ): + validate_config(cfg) + + cfg = DictDefault( + { + "val_set_size": 0, + } + ) + + validate_config(cfg) + + cfg = DictDefault( + { + "eval_steps": 10, + "val_set_size": 0.01, + } + ) + + validate_config(cfg) + + cfg = DictDefault( + { + "evaluation_strategy": "epoch", + "val_set_size": 0.01, + } + ) + + validate_config(cfg) From eb41f76f9286127c7e77dfe09bd59fbae0fb6fcb Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Thu, 28 Sep 2023 20:15:00 +0900 Subject: [PATCH 31/77] Feat: Add example for Mistral (#644) * Feat: Add example for Mistral * chore: turn off flash * chore: add is_mistral_derived_model * chore: update following PR --- README.md | 3 +- examples/mistral/config.yml | 62 +++++++++++++++++++++++++++++++++++++ src/axolotl/utils/config.py | 17 ++++++++-- 3 files changed, 79 insertions(+), 3 deletions(-) create mode 100644 examples/mistral/config.yml diff --git a/README.md b/README.md index d502eec0b5..3a1eb0cd7a 100644 --- a/README.md +++ b/README.md @@ -413,9 +413,10 @@ tokenizer_legacy: # this is reported to improve training speed on some models resize_token_embeddings_to_32x: -# used to identify if the model is falcon/llama based +# used to identify which the model is based on is_falcon_derived_model: is_llama_derived_model: +is_mistral_derived_model: # whether you are training a 4-bit GPTQ quantized model gptq: true diff --git a/examples/mistral/config.yml b/examples/mistral/config.yml new file mode 100644 index 0000000000..d199f947be --- /dev/null +++ b/examples/mistral/config.yml @@ -0,0 +1,62 @@ +base_model: mistralai/Mistral-7B-v0.1 +base_model_config: mistralai/Mistral-7B-v0.1 +model_type: MistralForCausalLM +tokenizer_type: LlamaTokenizer +is_mistral_derived_model: true + +load_in_8bit: false +load_in_4bit: false +strict: false + +datasets: + - path: mhenrichsen/alpaca_2k_test + type: alpaca +dataset_prepared_path: last_run_prepared +val_set_size: 0.01 +output_dir: ./out + +sequence_len: 8192 +sample_packing: +pad_to_sequence_len: + +wandb_project: +wandb_entity: +wandb_watch: +wandb_run_id: +wandb_log_model: + +gradient_accumulation_steps: 4 +micro_batch_size: 2 +num_epochs: 3 +optimizer: adamw_bnb_8bit +lr_scheduler: cosine +learning_rate: 0.0002 + +train_on_inputs: false +group_by_length: false +bf16: true +fp16: false +tf32: false + +gradient_checkpointing: true +early_stopping_patience: +resume_from_checkpoint: +local_rank: +logging_steps: 1 +xformers_attention: +flash_attention: true + +warmup_steps: 10 +eval_steps: 20 +eval_table_size: 5 +eval_table_max_new_tokens: 128 +save_steps: +debug: +deepspeed: +weight_decay: 0.0 +fsdp: +fsdp_config: +special_tokens: + bos_token: "" + eos_token: "" + unk_token: "" diff --git a/src/axolotl/utils/config.py b/src/axolotl/utils/config.py index ac067b5055..67f9490c47 100644 --- a/src/axolotl/utils/config.py +++ b/src/axolotl/utils/config.py @@ -82,7 +82,7 @@ def normalize_config(cfg): cfg.is_llama_derived_model = ( (hasattr(model_config, "model_type") and model_config.model_type == "llama") or cfg.is_llama_derived_model - or "llama" in cfg.base_model + or "llama" in cfg.base_model.lower() or (cfg.model_type and "llama" in cfg.model_type.lower()) ) @@ -98,10 +98,23 @@ def normalize_config(cfg): ] ) or cfg.is_falcon_derived_model - or "falcon" in cfg.base_model + or "falcon" in cfg.base_model.lower() or (cfg.model_type and "rwforcausallm" in cfg.model_type.lower()) ) + cfg.is_mistral_derived_model = ( + ( + hasattr(model_config, "model_type") + and model_config.model_type + in [ + "mistral", + ] + ) + or cfg.is_mistral_derived_model + or "mistral" in cfg.base_model.lower() + or (cfg.model_type and "mistral" in cfg.model_type.lower()) + ) + log_gpu_memory_usage(LOG, "baseline", cfg.device) From b88f51512aa1524b60893ba84266088d922d84df Mon Sep 17 00:00:00 2001 From: Adarsh Shirawalmath <114558126+adarshxs@users.noreply.github.com> Date: Thu, 28 Sep 2023 19:54:56 +0530 Subject: [PATCH 32/77] Update mistral/README.md (#647) --- examples/mistral/README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 examples/mistral/README.md diff --git a/examples/mistral/README.md b/examples/mistral/README.md new file mode 100644 index 0000000000..d1efb2cab3 --- /dev/null +++ b/examples/mistral/README.md @@ -0,0 +1,12 @@ +**Mistral 7B** is a language model with a total of 7.3 billion parameters, showcasing a notable performance across a variety of benchmarks. + +Fine Tune: +```shell +accelerate launch -m axolotl.cli.train examples/mistral/config.yml + +``` + +If you run into CUDA OOM, use deepspeed with config zero2.json: +```shell +accelerate launch -m axolotl.cli.train examples/mistral/config.yml --deepspeed deepspeed/zero2.json +``` From b2edaaeff6db708164c3e764bdd97e8f227c1b0f Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 28 Sep 2023 10:57:37 -0400 Subject: [PATCH 33/77] fix for flash attn w mistral w/o sammple packing (#648) --- .../monkeypatch/mistral_attn_hijack_flash.py | 236 ++++++++++++++---- 1 file changed, 188 insertions(+), 48 deletions(-) diff --git a/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py b/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py index f53d5d0071..21a6ee0842 100644 --- a/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py +++ b/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py @@ -2,13 +2,17 @@ # pylint: disable=duplicate-code import logging -import math from typing import List, Optional, Tuple, Union import torch import transformers from einops import rearrange -from torch import nn +from flash_attn.bert_padding import pad_input, unpad_input +from flash_attn.flash_attn_interface import ( # pylint: disable=ungrouped-imports + flash_attn_kvpacked_func, + flash_attn_varlen_kvpacked_func, + flash_attn_varlen_qkvpacked_func, +) from transformers.modeling_outputs import BaseModelOutputWithPast from transformers.models.mistral.modeling_mistral import ( MistralDecoderLayer as OriginalMistralDecoderLayer, @@ -17,16 +21,6 @@ from axolotl.monkeypatch.utils import get_cu_seqlens_from_pos_ids -try: - from flash_attn.flash_attn_interface import ( # pylint: disable=ungrouped-imports - flash_attn_varlen_qkvpacked_func, - ) -except ImportError: - from flash_attn.flash_attn_interface import ( - flash_attn_unpadded_qkvpacked_func as flash_attn_varlen_qkvpacked_func, - ) - - LOG = logging.getLogger("axolotl.monkeypatch.mistral") @@ -108,6 +102,15 @@ def flashattn_forward( key_states = repeat_kv(key_states, self.num_key_value_groups) value_states = repeat_kv(value_states, self.num_key_value_groups) + if self.training: + # during training q,k,v always have same seqlen + assert key_states.shape == query_states.shape + is_causal = True + else: + # turn off FA causal mask after first inference autoregressive iteration + # only on first autoregressive step q,k,v have same seqlen + is_causal = key_states.shape == query_states.shape + if cu_seqlens is not None and max_seqlen is not None and cu_seqlens.dim() == 1: # special handling using sample packing qkv = torch.stack( @@ -120,46 +123,84 @@ def flashattn_forward( qkv, cu_seqlens, max_seqlen, 0.0, softmax_scale=None, causal=True ) output = rearrange(output, "(b s) ... -> b s ...", b=bsz) - attn_output = output - if attn_output.size() != (bsz, q_len, self.num_heads, self.head_dim): - raise ValueError( - f"`attn_output` should be of size {(bsz, q_len, self.num_heads, self.head_dim)}, but is" - f" {attn_output.size()}" - ) - attn_output = rearrange(attn_output, "b s h d -> b s (h d)") - attn_weights = None + elif query_states.shape == key_states.shape: + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + qkv_unpad, cu_seqlens_q, max_seqlen_q, _, output_pad_fn = generate_qkv( + query_states, + key_states, + value_states, + qkvpacked=True, + # We have disabled _prepare_decoder_attention_mask in LlamaModel + # the attention_mask should be the same as the key_padding_mask + key_padding_mask=attention_mask, + query_padding_mask=attention_mask[:, -query_states.size(1) :] + if attention_mask is not None + else None, + ) + output_unpad = flash_attn_varlen_qkvpacked_func( + qkv_unpad, + cu_seqlens_q, + max_seqlen_q, + 0.0, + softmax_scale=None, + causal=is_causal, + ) + output = output_pad_fn(output_unpad) else: - attn_weights = torch.matmul( - query_states, key_states.transpose(2, 3) - ) / math.sqrt(self.head_dim) - if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): - raise ValueError( - f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is" - f" {attn_weights.size()}" + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + if attention_mask is None or attention_mask.all().item(): + output = flash_attn_kvpacked_func( + query_states, + torch.stack([key_states, value_states], 2), + causal=is_causal, ) - - if attention_mask is not None: - if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): - raise ValueError( - f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}" - ) - - attn_weights = attn_weights + attention_mask - - # upcast attention to fp32 - attn_weights = nn.functional.softmax( - attn_weights, dim=-1, dtype=torch.float32 - ).to(query_states.dtype) - attn_output = torch.matmul(attn_weights, value_states) - - if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): - raise ValueError( - f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is" - f" {attn_output.size()}" + else: + ( # pylint: disable=unbalanced-tuple-unpacking + q_unpad, + kv_unpad, + cu_seqlens_q, + cu_seqlens_k, + max_seqlen_q, + max_seqlen_k, + _, + _, + output_pad_fn, + ) = generate_qkv( + query_states, + key_states, + value_states, + kvpacked=True, + key_padding_mask=attention_mask, + query_padding_mask=attention_mask[:, -query_states.size(1) :] + if attention_mask is not None + else None, + ) + if q_unpad.dtype != kv_unpad.dtype: + kv_unpad = kv_unpad.to(q_unpad.dtype) + output_unpad = flash_attn_varlen_kvpacked_func( + q_unpad, + kv_unpad, + cu_seqlens_q, + cu_seqlens_k, + max_seqlen_q, + max_seqlen_k, + 0.0, + softmax_scale=None, + causal=is_causal, ) + output = output_pad_fn(output_unpad) - attn_output = attn_output.transpose(1, 2).contiguous() - attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + attn_output = output + if attn_output.size() != (bsz, q_len, self.num_heads, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, q_len, self.num_heads, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + attn_output = rearrange(attn_output, "b s h d -> b s (h d)") attn_output = self.o_proj(attn_output) @@ -169,6 +210,105 @@ def flashattn_forward( return attn_output, attn_weights, past_key_value +# based on https://github.com/Dao-AILab/flash-attention/blob/364a5b/tests/test_flash_attn.py#L38 +def generate_qkv( + q, + k, + v, + query_padding_mask=None, + key_padding_mask=None, + kvpacked=False, + qkvpacked=False, +): # pylint: disable=invalid-name,unnecessary-lambda-assignment + """ + Arguments: + q: (batch_size, seqlen_q, nheads, d) + k: (batch_size, seqlen_k, nheads_k, d) + v: (batch_size, seqlen_k, nheads_k, d) + query_padding_mask: (batch_size, seqlen), bool + key_padding_mask: (batch_size, seqlen), bool + """ + assert not (kvpacked and qkvpacked) + batch_size, seqlen_q, nheads, d = q.shape + _, seqlen_k, nheads_k, _ = k.shape + assert k.shape == (batch_size, seqlen_k, nheads_k, d) + assert v.shape == (batch_size, seqlen_k, nheads_k, d) + + if query_padding_mask is not None: + q_unpad, indices_q, cu_seqlens_q, max_seqlen_q = unpad_input( + q, query_padding_mask + ) + + output_pad_fn = lambda output_unpad: pad_input( # noqa: E731 + output_unpad, indices_q, batch_size, seqlen_q + ) + + else: + q_unpad = rearrange(q, "b s h d -> (b s) h d") + cu_seqlens_q = torch.arange( + 0, + (batch_size + 1) * seqlen_q, + step=seqlen_q, + dtype=torch.int32, + device=q_unpad.device, + ) + max_seqlen_q = seqlen_q + + output_pad_fn = lambda output_unpad: rearrange( # noqa: E731 + output_unpad, "(b s) h d -> b s h d", b=batch_size + ) + + if key_padding_mask is not None: + k_unpad, _, cu_seqlens_k, max_seqlen_k = unpad_input(k, key_padding_mask) + v_unpad, _, _, _ = unpad_input(v, key_padding_mask) + else: + k_unpad = rearrange(k, "b s h d -> (b s) h d") + v_unpad = rearrange(v, "b s h d -> (b s) h d") + cu_seqlens_k = torch.arange( + 0, + (batch_size + 1) * seqlen_k, + step=seqlen_k, + dtype=torch.int32, + device=k_unpad.device, + ) + max_seqlen_k = seqlen_k + + if qkvpacked: + assert nheads == nheads_k + qkv_unpad = torch.stack([q_unpad, k_unpad, v_unpad], dim=1) + qkv = torch.stack([q, k, v], dim=2) + return (qkv_unpad, cu_seqlens_q, max_seqlen_q, qkv, output_pad_fn) + + if kvpacked: + kv_unpad = torch.stack([k_unpad, v_unpad], dim=1) + kv = torch.stack([k, v], dim=2) + return ( + q_unpad, + kv_unpad, + cu_seqlens_q, + cu_seqlens_k, + max_seqlen_q, + max_seqlen_k, + q, + kv, + output_pad_fn, + ) + + return ( + q_unpad, + k_unpad, + v_unpad, + cu_seqlens_q, + cu_seqlens_k, + max_seqlen_q, + max_seqlen_k, + q, + k, + v, + output_pad_fn, + ) + + def mistral_model_forward( self, input_ids: torch.LongTensor = None, From 8662e8ffe89d11569c412c65d8b4785ae87d326f Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 28 Sep 2023 12:21:51 -0400 Subject: [PATCH 34/77] don't strip the prompt for check since we don't strip to tokenize anymore (#650) --- src/axolotl/prompt_tokenizers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/axolotl/prompt_tokenizers.py b/src/axolotl/prompt_tokenizers.py index 31bb73c2f7..1b39336642 100644 --- a/src/axolotl/prompt_tokenizers.py +++ b/src/axolotl/prompt_tokenizers.py @@ -81,7 +81,7 @@ def _tokenize( self, prompt: str, add_eos_token: bool = True, strip_bos_token: bool = False ) -> BatchEncoding: result: BatchEncoding - if not prompt.strip(): + if not prompt: LOG.warning("Empty text requested for tokenization.") result = BatchEncoding(data={"input_ids": [], "attention_mask": []}) else: From 409ca0f21cc5811a0956ffe0bffa04fe9e8d45e7 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 28 Sep 2023 20:14:14 -0400 Subject: [PATCH 35/77] add support for defined train split (#654) --- README.md | 10 +++++ src/axolotl/prompt_strategies/context_qa.py | 41 +++++++++++++++++++++ src/axolotl/utils/data.py | 10 +++++ 3 files changed, 61 insertions(+) diff --git a/README.md b/README.md index 3a1eb0cd7a..3f1767ea48 100644 --- a/README.md +++ b/README.md @@ -250,6 +250,10 @@ Have dataset(s) in one of the following format (JSONL recommended): ```json {"article": "...", "question": "...", "answer": "..."} ``` +- `context_qa.load_v2`: in context question answering (alternate) + ```json + {"context": "...", "question": "...", "answer": "..."} + ``` - `context_qa.load_404`: in context question answering from an article, with default response for no answer from context ```json {"article": "...", "unanswerable_question": "..."} @@ -356,6 +360,12 @@ See [examples](examples) for quick start. It is recommended to duplicate and mod - path: data.jsonl # or json ds_type: json # see other options below type: alpaca + + # dataset with splits, but no train split + dataset: + - path: knowrohit07/know_sql + type: context_qa.load_v2 + train_on_split: validation ``` - loading diff --git a/src/axolotl/prompt_strategies/context_qa.py b/src/axolotl/prompt_strategies/context_qa.py index f7027c7e24..f87dd8b5cd 100644 --- a/src/axolotl/prompt_strategies/context_qa.py +++ b/src/axolotl/prompt_strategies/context_qa.py @@ -24,6 +24,15 @@ def load(tokenizer, cfg): ) +def load_v2(tokenizer, cfg): + return ContextQaV2PromptTokenizingStrategy( + ContextV2Prompter(), + tokenizer, + cfg.train_on_inputs, + cfg.sequence_len, + ) + + class AlpacaContextPrompter(AlpacaPrompter): """ Customized system prompted for concise QA @@ -50,6 +59,38 @@ def parse_instruction_fields(self, prompt) -> Tuple[str, str, str]: ) +class ContextQaV2PromptTokenizingStrategy(InstructionPromptTokenizingStrategy): + """ + Tokenization Strategy to combine in-context article with a question and answer + """ + + def parse_instruction_fields(self, prompt) -> Tuple[str, str, str]: + return ( + "Context: " + + prompt["context"] + + "\nQuestion: " + + prompt["question"] + + "\n", + "", + "Answer: " + prompt["answer"], + ) + + +class ContextV2Prompter(AlpacaPrompter): + """ + Customized system prompted for concise QA + """ + + system_prompt = "" + system_no_input_prompt = "" + + def match_prompt_style(self): + # pylint: disable=duplicate-code + self.turn_format = "{instruction}\n{input}" + self.turn_no_input_format = "{instruction}" + self.system_format = "{system}" + + class AlpacaMissingInfoContextPromptTokenizingStrategy( InstructionPromptTokenizingStrategy ): diff --git a/src/axolotl/utils/data.py b/src/axolotl/utils/data.py index 2713796774..34a5baaffd 100644 --- a/src/axolotl/utils/data.py +++ b/src/axolotl/utils/data.py @@ -247,6 +247,16 @@ def for_d_in_datasets(dataset_configs): d_prompt_style = d_type_split[1] if len(d_type_split) > 1 else None if "train" in ds: ds = ds["train"] + elif ( + isinstance(ds, DatasetDict) + and d.train_on_split + and d.train_on_split in ds + ): + ds = ds[d.train_on_split] + elif isinstance(ds, DatasetDict): + raise ValueError( + f"no train split found for dataset {d.path}, you may specify a split with 'train_on_split: `" + ) if ( "input_ids" in ds.features and "attention_mask" in ds.features From 590d6032fdcbe915a1f7c7d8456c28db9f2d3861 Mon Sep 17 00:00:00 2001 From: ich <25612731+ein-ich@users.noreply.github.com> Date: Fri, 29 Sep 2023 04:54:10 +0200 Subject: [PATCH 36/77] Fix bug when using pretokenized datasets (#652) * fix pretokenized datasets readme * check if dataset type is not set to handle pretokenized datasets --- README.md | 2 +- src/axolotl/utils/config.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 3f1767ea48..593eff6c31 100644 --- a/README.md +++ b/README.md @@ -317,7 +317,7 @@ Using file: #### How to use your custom pretokenized dataset - Do not pass a `type:` -- Dataset must contain `input_ids`, `attention_mask`, `labels` in columns +- Columns in Dataset must be exactly `input_ids`, `attention_mask`, `labels` ### Config diff --git a/src/axolotl/utils/config.py b/src/axolotl/utils/config.py index 67f9490c47..5a034ea0f2 100644 --- a/src/axolotl/utils/config.py +++ b/src/axolotl/utils/config.py @@ -293,6 +293,8 @@ def validate_config(cfg): if cfg.datasets: for idx, ds_cfg in enumerate(cfg.datasets): + if not ds_cfg.type: + continue if ds_cfg.type == "sharegpt:chat": LOG.warning( PendingDeprecationWarning( From 9ec20777ba3a6320875b36d0ce4de04a65c57797 Mon Sep 17 00:00:00 2001 From: Kyle Corbitt Date: Thu, 28 Sep 2023 21:22:22 -0700 Subject: [PATCH 37/77] Make dataset_processes configurable (#651) I'm using the Axolotl script to train models on https://modal.com serverless GPUs. Unfortunately, their environment seems to have some kind of bug where if I try to run `datasets.filter` with too high a `num_proc`, it throws an error and dies. This PR adds a new configuration option `dataset_processes`, which lets you explicitly set the number of processes used to map/filter the dataset. If not included, this defaults to the current behavior of setting that to `os.cpu_count()`. --- README.md | 3 +++ src/axolotl/utils/config.py | 2 ++ src/axolotl/utils/trainer.py | 16 +++++++++++----- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 593eff6c31..60013df938 100644 --- a/README.md +++ b/README.md @@ -487,6 +487,9 @@ datasets: dataset_prepared_path: data/last_run_prepared # push prepared dataset to hub push_dataset_to_hub: # repo path +# The maximum number of processes to use while preprocessing your input dataset. This defaults to `os.cpu_count()` +# if not set. +dataset_processes: # defaults to os.cpu_count() if not set # push checkpoints to hub hub_model_id: # repo path to push finetuned model # how to push checkpoints to hub diff --git a/src/axolotl/utils/config.py b/src/axolotl/utils/config.py index 5a034ea0f2..a8c41d95b0 100644 --- a/src/axolotl/utils/config.py +++ b/src/axolotl/utils/config.py @@ -75,6 +75,8 @@ def normalize_config(cfg): else: cfg.torch_dtype = torch.float32 + cfg.dataset_processes = cfg.dataset_processes or os.cpu_count() + model_config = load_model_config(cfg) cfg.model_config_type = model_config.model_type diff --git a/src/axolotl/utils/trainer.py b/src/axolotl/utils/trainer.py index 3c75e4ec53..80ee5c8c61 100644 --- a/src/axolotl/utils/trainer.py +++ b/src/axolotl/utils/trainer.py @@ -400,19 +400,25 @@ def disable_datasets_caching(): def process_datasets_for_packing(cfg, train_dataset, eval_dataset, tokenizer): drop_long = partial(drop_long_seq, sequence_len=cfg.sequence_len) with zero_first(is_main_process()): - train_dataset = train_dataset.filter(drop_long, num_proc=os.cpu_count()) + train_dataset = train_dataset.filter(drop_long, num_proc=cfg.dataset_processes) if eval_dataset: - eval_dataset = eval_dataset.filter(drop_long, num_proc=os.cpu_count()) + eval_dataset = eval_dataset.filter( + drop_long, num_proc=cfg.dataset_processes + ) if cfg.group_by_length: - train_dataset = train_dataset.map(add_length, num_proc=os.cpu_count()) + train_dataset = train_dataset.map( + add_length, num_proc=cfg.dataset_processes + ) if cfg.sample_packing: - train_dataset = train_dataset.map(add_position_ids, num_proc=os.cpu_count()) + train_dataset = train_dataset.map( + add_position_ids, num_proc=cfg.dataset_processes + ) if cfg.eval_sample_packing is not False: if eval_dataset: eval_dataset = eval_dataset.map( - add_position_ids, num_proc=os.cpu_count() + add_position_ids, num_proc=cfg.dataset_processes ) # Phi doesn't want the attention_mask feature when training From 5b0bc48fbc52f3053806bd47159f110ee99c5df5 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 29 Sep 2023 00:22:40 -0400 Subject: [PATCH 38/77] add mistral e2e tests (#649) * mistral e2e tests * make sure to enable flash attention for the e2e tests * use latest transformers full sha * uninstall first --- .github/workflows/tests.yml | 1 + requirements.txt | 2 +- tests/e2e/test_mistral.py | 208 ++++++++++++++++++++++++++++++++++++ 3 files changed, 210 insertions(+), 1 deletion(-) create mode 100644 tests/e2e/test_mistral.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b4637fd67f..419382df74 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -69,6 +69,7 @@ jobs: - name: Install dependencies run: | + pip3 uninstall -y transformers accelerate pip3 install -U -e .[flash-attn] pip3 install -r requirements-tests.txt diff --git a/requirements.txt b/requirements.txt index 18659daec5..b02e656eb2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ torch==2.0.1 auto-gptq packaging peft @ git+https://github.com/huggingface/peft.git -transformers @ git+https://github.com/huggingface/transformers.git@78dd120 +transformers @ git+https://github.com/huggingface/transformers.git@5e11d72d4d0939138fbabfebe9a69d2061519547 bitsandbytes>=0.41.1 accelerate @ git+https://github.com/huggingface/accelerate@80da9cfb09bb3cc9f1b385cb55d6b90d025a5fd9 deepspeed diff --git a/tests/e2e/test_mistral.py b/tests/e2e/test_mistral.py new file mode 100644 index 0000000000..4212d36408 --- /dev/null +++ b/tests/e2e/test_mistral.py @@ -0,0 +1,208 @@ +""" +E2E tests for lora llama +""" + +import logging +import os +import tempfile +import unittest +from pathlib import Path + +from transformers.utils import is_torch_bf16_gpu_available + +from axolotl.cli import load_datasets +from axolotl.common.cli import TrainerCliArgs +from axolotl.train import train +from axolotl.utils.config import normalize_config +from axolotl.utils.dict import DictDefault + +LOG = logging.getLogger("axolotl.tests.e2e") +os.environ["WANDB_DISABLED"] = "true" + + +class TestMistral(unittest.TestCase): + """ + Test case for Llama models using LoRA + """ + + def test_lora(self): + # pylint: disable=duplicate-code + output_dir = tempfile.mkdtemp() + cfg = DictDefault( + { + "base_model": "openaccess-ai-collective/tiny-mistral", + "base_model_config": "openaccess-ai-collective/tiny-mistral", + "flash_attention": True, + "sequence_len": 1024, + "load_in_8bit": True, + "adapter": "lora", + "lora_r": 32, + "lora_alpha": 64, + "lora_dropout": 0.05, + "lora_target_linear": True, + "val_set_size": 0.1, + "special_tokens": { + "unk_token": "", + "bos_token": "", + "eos_token": "", + }, + "datasets": [ + { + "path": "mhenrichsen/alpaca_2k_test", + "type": "alpaca", + }, + ], + "num_epochs": 2, + "micro_batch_size": 2, + "gradient_accumulation_steps": 1, + "output_dir": output_dir, + "learning_rate": 0.00001, + "optimizer": "adamw_torch", + "lr_scheduler": "cosine", + "max_steps": 20, + "save_steps": 10, + "eval_steps": 10, + } + ) + normalize_config(cfg) + cli_args = TrainerCliArgs() + dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) + + train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) + assert (Path(output_dir) / "adapter_model.bin").exists() + + def test_lora_packing(self): + # pylint: disable=duplicate-code + output_dir = tempfile.mkdtemp() + cfg = DictDefault( + { + "base_model": "openaccess-ai-collective/tiny-mistral", + "base_model_config": "openaccess-ai-collective/tiny-mistral", + "flash_attention": True, + "sample_packing": True, + "sequence_len": 1024, + "load_in_8bit": True, + "adapter": "lora", + "lora_r": 32, + "lora_alpha": 64, + "lora_dropout": 0.05, + "lora_target_linear": True, + "val_set_size": 0.1, + "special_tokens": { + "unk_token": "", + "bos_token": "", + "eos_token": "", + }, + "datasets": [ + { + "path": "mhenrichsen/alpaca_2k_test", + "type": "alpaca", + }, + ], + "num_epochs": 2, + "micro_batch_size": 2, + "gradient_accumulation_steps": 1, + "output_dir": output_dir, + "learning_rate": 0.00001, + "optimizer": "adamw_torch", + "lr_scheduler": "cosine", + "max_steps": 20, + "save_steps": 10, + "eval_steps": 10, + } + ) + normalize_config(cfg) + cli_args = TrainerCliArgs() + dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) + + train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) + assert (Path(output_dir) / "adapter_model.bin").exists() + + def test_ft(self): + # pylint: disable=duplicate-code + output_dir = tempfile.mkdtemp() + cfg = DictDefault( + { + "base_model": "openaccess-ai-collective/tiny-mistral", + "base_model_config": "openaccess-ai-collective/tiny-mistral", + "flash_attention": True, + "sequence_len": 1024, + "val_set_size": 0.1, + "special_tokens": { + "unk_token": "", + "bos_token": "", + "eos_token": "", + }, + "datasets": [ + { + "path": "mhenrichsen/alpaca_2k_test", + "type": "alpaca", + }, + ], + "num_epochs": 2, + "micro_batch_size": 2, + "gradient_accumulation_steps": 1, + "output_dir": output_dir, + "learning_rate": 0.00001, + "optimizer": "adamw_torch", + "lr_scheduler": "cosine", + "max_steps": 20, + "save_steps": 10, + "eval_steps": 10, + } + ) + if is_torch_bf16_gpu_available(): + cfg.bf16 = True + else: + cfg.fp16 = True + normalize_config(cfg) + cli_args = TrainerCliArgs() + dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) + + train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) + assert (Path(output_dir) / "pytorch_model.bin").exists() + + def test_ft_packing(self): + # pylint: disable=duplicate-code + output_dir = tempfile.mkdtemp() + cfg = DictDefault( + { + "base_model": "openaccess-ai-collective/tiny-mistral", + "base_model_config": "openaccess-ai-collective/tiny-mistral", + "flash_attention": True, + "sample_packing": True, + "sequence_len": 1024, + "val_set_size": 0.1, + "special_tokens": { + "unk_token": "", + "bos_token": "", + "eos_token": "", + }, + "datasets": [ + { + "path": "mhenrichsen/alpaca_2k_test", + "type": "alpaca", + }, + ], + "num_epochs": 2, + "micro_batch_size": 2, + "gradient_accumulation_steps": 1, + "output_dir": output_dir, + "learning_rate": 0.00001, + "optimizer": "adamw_torch", + "lr_scheduler": "cosine", + "max_steps": 20, + "save_steps": 10, + "eval_steps": 10, + } + ) + if is_torch_bf16_gpu_available(): + cfg.bf16 = True + else: + cfg.fp16 = True + normalize_config(cfg) + cli_args = TrainerCliArgs() + dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) + + train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) + assert (Path(output_dir) / "pytorch_model.bin").exists() From a7e56d83c2c6cf35dee515ad92da46d887a60ffd Mon Sep 17 00:00:00 2001 From: Napuh <55241721+Napuh@users.noreply.github.com> Date: Mon, 2 Oct 2023 14:40:05 +0200 Subject: [PATCH 39/77] removed duplicate on requirements.txt (#661) --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b02e656eb2..cf4ce1d832 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,6 @@ bitsandbytes>=0.41.1 accelerate @ git+https://github.com/huggingface/accelerate@80da9cfb09bb3cc9f1b385cb55d6b90d025a5fd9 deepspeed addict -evaluate fire PyYAML>=6.0 datasets From f4868d733cf69d854f682d0e97e7c044841338e6 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Mon, 2 Oct 2023 08:43:40 -0400 Subject: [PATCH 40/77] make sure we also run CI tests when requirements.txt changes (#663) --- .github/workflows/tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 419382df74..04fe53c00a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -6,9 +6,11 @@ on: - "main" paths: - '**.py' + - 'requirements.txt' pull_request: paths: - '**.py' + - 'requirements.txt' workflow_dispatch: jobs: From e50a64e85ecd69b7a596eccbf9be90d72d0b02b6 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Mon, 2 Oct 2023 21:07:24 -0400 Subject: [PATCH 41/77] prepared dataset caching, other misc fixes (#665) * prepared dataset caching, other misc fixes * also don't load from disk cache unless explicit --- examples/cerebras/qlora.yml | 2 +- examples/code-llama/13b/lora.yml | 2 +- examples/code-llama/13b/qlora.yml | 2 +- examples/code-llama/34b/lora.yml | 2 +- examples/code-llama/34b/qlora.yml | 2 +- examples/code-llama/7b/lora.yml | 2 +- examples/code-llama/7b/qlora.yml | 2 +- examples/falcon/config-7b-lora.yml | 2 +- examples/falcon/config-7b-qlora.yml | 2 +- examples/falcon/config-7b.yml | 2 +- examples/gptj/qlora.yml | 2 +- examples/jeopardy-bot/config.yml | 2 +- examples/llama-2/gptq-lora.yml | 2 +- examples/llama-2/lora.yml | 2 +- examples/llama-2/qlora.yml | 2 +- examples/llama-2/relora.yml | 2 +- examples/llama-2/tiny-llama.yml | 2 +- examples/mistral/config.yml | 2 +- examples/mpt-7b/config.yml | 2 +- examples/openllama-3b/config.yml | 2 +- examples/openllama-3b/lora.yml | 2 +- examples/openllama-3b/qlora.yml | 2 +- examples/phi/phi-ft.yml | 2 +- examples/phi/phi-qlora.yml | 2 +- examples/pythia-12b/config.yml | 2 +- examples/pythia/lora.yml | 2 +- examples/redpajama/config-3b.yml | 2 +- examples/replit-3b/config-lora.yml | 2 +- examples/xgen-7b/xgen-7b-8k-qlora.yml | 2 +- src/axolotl/cli/__init__.py | 2 +- src/axolotl/utils/data.py | 6 +++--- src/axolotl/utils/tokenization.py | 3 ++- 32 files changed, 35 insertions(+), 34 deletions(-) diff --git a/examples/cerebras/qlora.yml b/examples/cerebras/qlora.yml index eec16193d6..a13517f3ef 100644 --- a/examples/cerebras/qlora.yml +++ b/examples/cerebras/qlora.yml @@ -7,7 +7,7 @@ push_dataset_to_hub: datasets: - path: teknium/GPT4-LLM-Cleaned type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 adapter: qlora lora_model_dir: diff --git a/examples/code-llama/13b/lora.yml b/examples/code-llama/13b/lora.yml index 071b2bc051..91807846b7 100644 --- a/examples/code-llama/13b/lora.yml +++ b/examples/code-llama/13b/lora.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./lora-out diff --git a/examples/code-llama/13b/qlora.yml b/examples/code-llama/13b/qlora.yml index 3de4b3b4b0..9fa05ffab6 100644 --- a/examples/code-llama/13b/qlora.yml +++ b/examples/code-llama/13b/qlora.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./qlora-out diff --git a/examples/code-llama/34b/lora.yml b/examples/code-llama/34b/lora.yml index e52c11181f..a342b6ebc5 100644 --- a/examples/code-llama/34b/lora.yml +++ b/examples/code-llama/34b/lora.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./lora-out diff --git a/examples/code-llama/34b/qlora.yml b/examples/code-llama/34b/qlora.yml index 7e6da2e968..1501dd9a38 100644 --- a/examples/code-llama/34b/qlora.yml +++ b/examples/code-llama/34b/qlora.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./qlora-out diff --git a/examples/code-llama/7b/lora.yml b/examples/code-llama/7b/lora.yml index 5e9d12e7c1..638dddc435 100644 --- a/examples/code-llama/7b/lora.yml +++ b/examples/code-llama/7b/lora.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./lora-out diff --git a/examples/code-llama/7b/qlora.yml b/examples/code-llama/7b/qlora.yml index b24b5c1626..5b3b338222 100644 --- a/examples/code-llama/7b/qlora.yml +++ b/examples/code-llama/7b/qlora.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./qlora-out diff --git a/examples/falcon/config-7b-lora.yml b/examples/falcon/config-7b-lora.yml index 738068a474..f45deb6437 100644 --- a/examples/falcon/config-7b-lora.yml +++ b/examples/falcon/config-7b-lora.yml @@ -12,7 +12,7 @@ push_dataset_to_hub: datasets: - path: teknium/GPT4-LLM-Cleaned type: alpaca:chat -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 adapter: lora lora_model_dir: diff --git a/examples/falcon/config-7b-qlora.yml b/examples/falcon/config-7b-qlora.yml index 554081fcba..f593419651 100644 --- a/examples/falcon/config-7b-qlora.yml +++ b/examples/falcon/config-7b-qlora.yml @@ -18,7 +18,7 @@ datasets: data_files: - Chain-of-Thought/formatted_cot_data/gsm8k_train.json type: "alpaca:chat" -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 # enable QLoRA adapter: qlora diff --git a/examples/falcon/config-7b.yml b/examples/falcon/config-7b.yml index 25e67a53b1..777a97b31f 100644 --- a/examples/falcon/config-7b.yml +++ b/examples/falcon/config-7b.yml @@ -12,7 +12,7 @@ push_dataset_to_hub: datasets: - path: teknium/GPT4-LLM-Cleaned type: alpaca:chat -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 adapter: lora_model_dir: diff --git a/examples/gptj/qlora.yml b/examples/gptj/qlora.yml index b41c946fc4..696747dfec 100644 --- a/examples/gptj/qlora.yml +++ b/examples/gptj/qlora.yml @@ -7,7 +7,7 @@ push_dataset_to_hub: datasets: - path: teknium/GPT4-LLM-Cleaned type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 adapter: qlora lora_model_dir: diff --git a/examples/jeopardy-bot/config.yml b/examples/jeopardy-bot/config.yml index 1aaa1b5572..32e7a34ee5 100644 --- a/examples/jeopardy-bot/config.yml +++ b/examples/jeopardy-bot/config.yml @@ -6,7 +6,7 @@ load_in_8bit: false datasets: - path: openaccess-ai-collective/jeopardy type: jeopardy -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.02 adapter: lora_model_dir: diff --git a/examples/llama-2/gptq-lora.yml b/examples/llama-2/gptq-lora.yml index 0d96e4610b..257433f26f 100644 --- a/examples/llama-2/gptq-lora.yml +++ b/examples/llama-2/gptq-lora.yml @@ -15,7 +15,7 @@ hf_use_auth_token: true datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 adapter: lora lora_model_dir: diff --git a/examples/llama-2/lora.yml b/examples/llama-2/lora.yml index 106de6e320..8c0e3e910c 100644 --- a/examples/llama-2/lora.yml +++ b/examples/llama-2/lora.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./lora-out diff --git a/examples/llama-2/qlora.yml b/examples/llama-2/qlora.yml index 870c3fa848..b8209934cc 100644 --- a/examples/llama-2/qlora.yml +++ b/examples/llama-2/qlora.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./qlora-out diff --git a/examples/llama-2/relora.yml b/examples/llama-2/relora.yml index 2b1fad55d8..9f27cafeaa 100644 --- a/examples/llama-2/relora.yml +++ b/examples/llama-2/relora.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: teknium/GPT4-LLM-Cleaned type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./relora-out diff --git a/examples/llama-2/tiny-llama.yml b/examples/llama-2/tiny-llama.yml index d556a6d719..0b56ea7d3e 100644 --- a/examples/llama-2/tiny-llama.yml +++ b/examples/llama-2/tiny-llama.yml @@ -12,7 +12,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./lora-out diff --git a/examples/mistral/config.yml b/examples/mistral/config.yml index d199f947be..e644bec507 100644 --- a/examples/mistral/config.yml +++ b/examples/mistral/config.yml @@ -11,7 +11,7 @@ strict: false datasets: - path: mhenrichsen/alpaca_2k_test type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 output_dir: ./out diff --git a/examples/mpt-7b/config.yml b/examples/mpt-7b/config.yml index 0efd28ceea..8d9b429b1d 100644 --- a/examples/mpt-7b/config.yml +++ b/examples/mpt-7b/config.yml @@ -6,7 +6,7 @@ load_in_8bit: false datasets: - path: vicgalle/alpaca-gpt4 type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.02 adapter: lora_model_dir: diff --git a/examples/openllama-3b/config.yml b/examples/openllama-3b/config.yml index 961aeabdaa..dd11d53b00 100644 --- a/examples/openllama-3b/config.yml +++ b/examples/openllama-3b/config.yml @@ -9,7 +9,7 @@ push_dataset_to_hub: datasets: - path: teknium/GPT4-LLM-Cleaned type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.02 adapter: lora_model_dir: diff --git a/examples/openllama-3b/lora.yml b/examples/openllama-3b/lora.yml index 17fa7fa8bd..fad3fb5511 100644 --- a/examples/openllama-3b/lora.yml +++ b/examples/openllama-3b/lora.yml @@ -9,7 +9,7 @@ push_dataset_to_hub: datasets: - path: teknium/GPT4-LLM-Cleaned type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.02 adapter: lora lora_model_dir: diff --git a/examples/openllama-3b/qlora.yml b/examples/openllama-3b/qlora.yml index deba03fd54..80d4d727bd 100644 --- a/examples/openllama-3b/qlora.yml +++ b/examples/openllama-3b/qlora.yml @@ -9,7 +9,7 @@ push_dataset_to_hub: datasets: - path: teknium/GPT4-LLM-Cleaned type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 adapter: qlora lora_model_dir: diff --git a/examples/phi/phi-ft.yml b/examples/phi/phi-ft.yml index 9eb1080494..668eea317d 100644 --- a/examples/phi/phi-ft.yml +++ b/examples/phi/phi-ft.yml @@ -13,7 +13,7 @@ datasets: - path: garage-bAInd/Open-Platypus type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.05 output_dir: ./phi-sft-out diff --git a/examples/phi/phi-qlora.yml b/examples/phi/phi-qlora.yml index f3a06c39d5..a548b3f052 100644 --- a/examples/phi/phi-qlora.yml +++ b/examples/phi/phi-qlora.yml @@ -13,7 +13,7 @@ datasets: - path: garage-bAInd/Open-Platypus type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.05 output_dir: ./phi-sft-out diff --git a/examples/pythia-12b/config.yml b/examples/pythia-12b/config.yml index dc06eb6b6d..4e0e1523a4 100644 --- a/examples/pythia-12b/config.yml +++ b/examples/pythia-12b/config.yml @@ -10,7 +10,7 @@ device_map: auto datasets: - path: vicgalle/alpaca-gpt4 type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.05 adapter: lora_model_dir: diff --git a/examples/pythia/lora.yml b/examples/pythia/lora.yml index dfe7ef846f..f9a83c059a 100644 --- a/examples/pythia/lora.yml +++ b/examples/pythia/lora.yml @@ -4,7 +4,7 @@ load_in_8bit: true datasets: - path: teknium/GPT4-LLM-Cleaned type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.05 adapter: lora lora_model_dir: diff --git a/examples/redpajama/config-3b.yml b/examples/redpajama/config-3b.yml index 10297d860b..97f31c87ac 100644 --- a/examples/redpajama/config-3b.yml +++ b/examples/redpajama/config-3b.yml @@ -7,7 +7,7 @@ load_in_8bit: false datasets: - path: vicgalle/alpaca-gpt4 type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.02 adapter: lora_model_dir: diff --git a/examples/replit-3b/config-lora.yml b/examples/replit-3b/config-lora.yml index 71a80d17df..d345e25a0b 100644 --- a/examples/replit-3b/config-lora.yml +++ b/examples/replit-3b/config-lora.yml @@ -5,7 +5,7 @@ load_in_8bit: false datasets: - path: vicgalle/alpaca-gpt4 type: alpaca -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.05 adapter: lora lora_model_dir: diff --git a/examples/xgen-7b/xgen-7b-8k-qlora.yml b/examples/xgen-7b/xgen-7b-8k-qlora.yml index 0f215afb94..352dcb610a 100644 --- a/examples/xgen-7b/xgen-7b-8k-qlora.yml +++ b/examples/xgen-7b/xgen-7b-8k-qlora.yml @@ -16,7 +16,7 @@ datasets: data_files: - openassistant_best_replies_train.jsonl type: "completion" -dataset_prepared_path: last_run_prepared +dataset_prepared_path: val_set_size: 0.01 # enable QLoRA adapter: qlora diff --git a/src/axolotl/cli/__init__.py b/src/axolotl/cli/__init__.py index c3b580391a..c21d931705 100644 --- a/src/axolotl/cli/__init__.py +++ b/src/axolotl/cli/__init__.py @@ -51,7 +51,7 @@ def print_axolotl_text_art(suffix=None): def get_multi_line_input() -> Optional[str]: - print("Give me an instruction (Ctrl + D to finish): ") + print("Give me an instruction (Ctrl + D to submit): ") instruction = "" for line in sys.stdin: instruction += line # pylint: disable=consider-using-join diff --git a/src/axolotl/utils/data.py b/src/axolotl/utils/data.py index 34a5baaffd..bdad21fb11 100644 --- a/src/axolotl/utils/data.py +++ b/src/axolotl/utils/data.py @@ -122,7 +122,7 @@ def load_tokenized_prepared_datasets( if dataset: ... - elif any(prepared_ds_path.glob("*")): + elif cfg.dataset_prepared_path and any(prepared_ds_path.glob("*")): LOG.info(f"Loading prepared dataset from disk at {prepared_ds_path}...") dataset = load_from_disk(str(prepared_ds_path)) LOG.info("Prepared dataset loaded from disk...") @@ -357,7 +357,7 @@ def for_d_in_datasets(dataset_configs): if len(datasets) > 1: LOG.info("shuffle merged datasets") dataset = dataset.shuffle(seed=seed) - if cfg.local_rank == 0: + if cfg.local_rank == 0 and cfg.dataset_prepared_path: LOG.info(f"Saving merged prepared dataset to disk... {prepared_ds_path}") dataset.save_to_disk(prepared_ds_path) if cfg.push_dataset_to_hub: @@ -425,7 +425,7 @@ def load_prepare_datasets( if dataset: ... - elif any(prepared_ds_path.glob("*")): + elif cfg.dataset_prepared_path and any(prepared_ds_path.glob("*")): LOG.info( f"Loading prepared packed dataset from disk at {prepared_ds_path}..." ) diff --git a/src/axolotl/utils/tokenization.py b/src/axolotl/utils/tokenization.py index 2b7608b636..deb9bb00ec 100644 --- a/src/axolotl/utils/tokenization.py +++ b/src/axolotl/utils/tokenization.py @@ -31,7 +31,8 @@ def check_example_labels(example, tokenizer, text_only=False): ) colored_tokens.append(colored_token) - LOG.info(" ".join(colored_tokens)) + delimiter = "" if text_only else " " + LOG.info(delimiter.join(colored_tokens)) LOG.info("\n\n\n") print(" ".join(colored_tokens)) From f34648c8b9dc254034926a42edefe4ae28a43855 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Mon, 2 Oct 2023 21:07:41 -0400 Subject: [PATCH 42/77] remove patch fix for phi (#664) --- src/axolotl/utils/models.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 63e34293ee..3287c0ee93 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -1,5 +1,4 @@ """Module for models and model loading""" -import importlib import logging import math import os @@ -176,20 +175,6 @@ def load_model( LOG.info("patching _expand_mask") hijack_expand_mask() - # special handling b/c remote MixFormers code doesn't have _no_split_modules set - if ( - "MixFormerSequentialConfig" in model_config.__class__.__name__ - and cfg.model_type == "AutoModelForCausalLM" - ): - module_name = model_config.__class__.__module__.replace( - ".configuration_mixformer_sequential", ".modeling_mixformer_sequential" - ) - modeling_phi = importlib.import_module(module_name) - # pylint:disable=protected-access - modeling_phi.MixFormerSequentialForCausalLM._no_split_modules = [ - "ParallelBlock" - ] - model_kwargs = {} if cfg.model_revision: model_kwargs["revision"] = cfg.model_revision From 2642caedf2b61c32a1ba5a2b9307a743131e3fa6 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Mon, 2 Oct 2023 21:08:07 -0400 Subject: [PATCH 43/77] refactor to set eval_batch_size earlier if unset, so we can warn if mismatched (#662) --- README.md | 2 +- src/axolotl/utils/config.py | 7 +++++++ src/axolotl/utils/trainer.py | 4 +--- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 60013df938..c2d8e7d8c6 100644 --- a/README.md +++ b/README.md @@ -571,7 +571,7 @@ torch_compile_backend: # Optional[str] # training hyperparameters gradient_accumulation_steps: 1 micro_batch_size: 2 -eval_batch_size: 2 +eval_batch_size: num_epochs: 3 warmup_steps: 100 learning_rate: 0.00003 diff --git a/src/axolotl/utils/config.py b/src/axolotl/utils/config.py index a8c41d95b0..9503d838cb 100644 --- a/src/axolotl/utils/config.py +++ b/src/axolotl/utils/config.py @@ -49,6 +49,8 @@ def normalize_config(cfg): cfg.batch_size = ( cfg.batch_size or cfg.micro_batch_size * cfg.gradient_accumulation_steps ) + if cfg.eval_batch_size is None: + cfg.eval_batch_size = cfg.micro_batch_size cfg.world_size = int(os.environ.get("WORLD_SIZE", 1)) cfg.local_rank = int(os.environ.get("LOCAL_RANK", 0)) cfg.eval_table_size = cfg.eval_table_size or 0 @@ -157,6 +159,11 @@ def validate_config(cfg): "batch_size is not recommended. Please use gradient_accumulation_steps instead.", "To calculate the equivalent gradient_accumulation_steps, divide batch_size / micro_batch_size / number of gpus.", ) + if cfg.eval_batch_size != cfg.micro_batch_size: + LOG.warning( + "eval_batch_size != micro_batch_size. This can lead to VRAM instability." + ) + if cfg.load_4bit: raise ValueError("cfg.load_4bit parameter has been deprecated") diff --git a/src/axolotl/utils/trainer.py b/src/axolotl/utils/trainer.py index 80ee5c8c61..a10a2b0e77 100644 --- a/src/axolotl/utils/trainer.py +++ b/src/axolotl/utils/trainer.py @@ -668,9 +668,7 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer, total_num_ max_steps=total_num_steps if cfg.max_steps else -1, max_seq_length=cfg.sequence_len, per_device_train_batch_size=cfg.micro_batch_size, - per_device_eval_batch_size=cfg.eval_batch_size - if cfg.eval_batch_size is not None - else cfg.micro_batch_size, + per_device_eval_batch_size=cfg.eval_batch_size, gradient_accumulation_steps=cfg.gradient_accumulation_steps, eval_accumulation_steps=cfg.gradient_accumulation_steps, num_train_epochs=cfg.num_epochs, From 90e0d673f76f84478856434deb6024c5c869a5ad Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Tue, 3 Oct 2023 23:38:42 +0900 Subject: [PATCH 44/77] Feat: Add config yaml to section for reprod in bug-report.yaml (#667) * Update bug-report.yaml * Update bug-report.yaml * Update bug-report.yaml --- .github/ISSUE_TEMPLATE/bug-report.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/bug-report.yaml b/.github/ISSUE_TEMPLATE/bug-report.yaml index cca243fdf2..5cf332587a 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yaml +++ b/.github/ISSUE_TEMPLATE/bug-report.yaml @@ -53,6 +53,13 @@ body: validations: required: true + - type: textarea + id: config + attributes: + label: Config yaml + description: | + Please attach the config yaml! + - type: textarea id: possible-solution attributes: From 697c50d40872436c5f01216b264bea558eddcd76 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Wed, 4 Oct 2023 20:40:47 +0900 Subject: [PATCH 45/77] Feat: Allow usage of native Mistral FA when no sample_packing (#669) * Allow usage of native Mistral FA when no sample_packing * fix: do not apply custom patch when sample_pack off * chore: lint * chore: pin transformer to v4.35.0.dev0 * fix: split sample_packing to separate test --- requirements.txt | 2 +- src/axolotl/utils/models.py | 8 +- tests/e2e/test_mistral.py | 92 --------------------- tests/e2e/test_mistral_samplepack.py | 118 +++++++++++++++++++++++++++ 4 files changed, 125 insertions(+), 95 deletions(-) create mode 100644 tests/e2e/test_mistral_samplepack.py diff --git a/requirements.txt b/requirements.txt index cf4ce1d832..554f2ec69a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ torch==2.0.1 auto-gptq packaging peft @ git+https://github.com/huggingface/peft.git -transformers @ git+https://github.com/huggingface/transformers.git@5e11d72d4d0939138fbabfebe9a69d2061519547 +transformers @ git+https://github.com/huggingface/transformers.git@bd6205919aad4d3a2300a39a98a642f1cc3a5348 bitsandbytes>=0.41.1 accelerate @ git+https://github.com/huggingface/accelerate@80da9cfb09bb3cc9f1b385cb55d6b90d025a5fd9 deepspeed diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 3287c0ee93..6e520bd50e 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -149,7 +149,7 @@ def load_model( # Note: This might overwrite previous additional_special_tokens tokenizer.add_special_tokens({"additional_special_tokens": [MEM_TOKEN]}) - if cfg.is_mistral_derived_model and cfg.flash_attention: + if cfg.is_mistral_derived_model and cfg.flash_attention and cfg.sample_packing: from axolotl.monkeypatch.mistral_attn_hijack_flash import ( replace_mistral_attn_with_flash_attn, ) @@ -200,7 +200,11 @@ def load_model( ) # sample packing uses custom FA2 patch if cfg.flash_attention and not cfg.sample_packing: - if cfg.is_llama_derived_model or cfg.is_falcon_derived_model: + if ( + cfg.is_llama_derived_model + or cfg.is_falcon_derived_model + or cfg.is_mistral_derived_model + ): model_kwargs["use_flash_attention_2"] = True try: if cfg.is_llama_derived_model and not cfg.trust_remote_code and not cfg.gptq: diff --git a/tests/e2e/test_mistral.py b/tests/e2e/test_mistral.py index 4212d36408..f3098f0583 100644 --- a/tests/e2e/test_mistral.py +++ b/tests/e2e/test_mistral.py @@ -71,53 +71,6 @@ def test_lora(self): train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) assert (Path(output_dir) / "adapter_model.bin").exists() - def test_lora_packing(self): - # pylint: disable=duplicate-code - output_dir = tempfile.mkdtemp() - cfg = DictDefault( - { - "base_model": "openaccess-ai-collective/tiny-mistral", - "base_model_config": "openaccess-ai-collective/tiny-mistral", - "flash_attention": True, - "sample_packing": True, - "sequence_len": 1024, - "load_in_8bit": True, - "adapter": "lora", - "lora_r": 32, - "lora_alpha": 64, - "lora_dropout": 0.05, - "lora_target_linear": True, - "val_set_size": 0.1, - "special_tokens": { - "unk_token": "", - "bos_token": "", - "eos_token": "", - }, - "datasets": [ - { - "path": "mhenrichsen/alpaca_2k_test", - "type": "alpaca", - }, - ], - "num_epochs": 2, - "micro_batch_size": 2, - "gradient_accumulation_steps": 1, - "output_dir": output_dir, - "learning_rate": 0.00001, - "optimizer": "adamw_torch", - "lr_scheduler": "cosine", - "max_steps": 20, - "save_steps": 10, - "eval_steps": 10, - } - ) - normalize_config(cfg) - cli_args = TrainerCliArgs() - dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) - - train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) - assert (Path(output_dir) / "adapter_model.bin").exists() - def test_ft(self): # pylint: disable=duplicate-code output_dir = tempfile.mkdtemp() @@ -161,48 +114,3 @@ def test_ft(self): train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) assert (Path(output_dir) / "pytorch_model.bin").exists() - - def test_ft_packing(self): - # pylint: disable=duplicate-code - output_dir = tempfile.mkdtemp() - cfg = DictDefault( - { - "base_model": "openaccess-ai-collective/tiny-mistral", - "base_model_config": "openaccess-ai-collective/tiny-mistral", - "flash_attention": True, - "sample_packing": True, - "sequence_len": 1024, - "val_set_size": 0.1, - "special_tokens": { - "unk_token": "", - "bos_token": "", - "eos_token": "", - }, - "datasets": [ - { - "path": "mhenrichsen/alpaca_2k_test", - "type": "alpaca", - }, - ], - "num_epochs": 2, - "micro_batch_size": 2, - "gradient_accumulation_steps": 1, - "output_dir": output_dir, - "learning_rate": 0.00001, - "optimizer": "adamw_torch", - "lr_scheduler": "cosine", - "max_steps": 20, - "save_steps": 10, - "eval_steps": 10, - } - ) - if is_torch_bf16_gpu_available(): - cfg.bf16 = True - else: - cfg.fp16 = True - normalize_config(cfg) - cli_args = TrainerCliArgs() - dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) - - train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) - assert (Path(output_dir) / "pytorch_model.bin").exists() diff --git a/tests/e2e/test_mistral_samplepack.py b/tests/e2e/test_mistral_samplepack.py new file mode 100644 index 0000000000..623d20b0c2 --- /dev/null +++ b/tests/e2e/test_mistral_samplepack.py @@ -0,0 +1,118 @@ +""" +E2E tests for lora llama +""" + +import logging +import os +import tempfile +import unittest +from pathlib import Path + +from transformers.utils import is_torch_bf16_gpu_available + +from axolotl.cli import load_datasets +from axolotl.common.cli import TrainerCliArgs +from axolotl.train import train +from axolotl.utils.config import normalize_config +from axolotl.utils.dict import DictDefault + +LOG = logging.getLogger("axolotl.tests.e2e") +os.environ["WANDB_DISABLED"] = "true" + + +class TestMistral(unittest.TestCase): + """ + Test case for Llama models using LoRA + """ + + def test_lora_packing(self): + # pylint: disable=duplicate-code + output_dir = tempfile.mkdtemp() + cfg = DictDefault( + { + "base_model": "openaccess-ai-collective/tiny-mistral", + "base_model_config": "openaccess-ai-collective/tiny-mistral", + "flash_attention": True, + "sample_packing": True, + "sequence_len": 1024, + "load_in_8bit": True, + "adapter": "lora", + "lora_r": 32, + "lora_alpha": 64, + "lora_dropout": 0.05, + "lora_target_linear": True, + "val_set_size": 0.1, + "special_tokens": { + "unk_token": "", + "bos_token": "", + "eos_token": "", + }, + "datasets": [ + { + "path": "mhenrichsen/alpaca_2k_test", + "type": "alpaca", + }, + ], + "num_epochs": 2, + "micro_batch_size": 2, + "gradient_accumulation_steps": 1, + "output_dir": output_dir, + "learning_rate": 0.00001, + "optimizer": "adamw_torch", + "lr_scheduler": "cosine", + "max_steps": 20, + "save_steps": 10, + "eval_steps": 10, + } + ) + normalize_config(cfg) + cli_args = TrainerCliArgs() + dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) + + train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) + assert (Path(output_dir) / "adapter_model.bin").exists() + + def test_ft_packing(self): + # pylint: disable=duplicate-code + output_dir = tempfile.mkdtemp() + cfg = DictDefault( + { + "base_model": "openaccess-ai-collective/tiny-mistral", + "base_model_config": "openaccess-ai-collective/tiny-mistral", + "flash_attention": True, + "sample_packing": True, + "sequence_len": 1024, + "val_set_size": 0.1, + "special_tokens": { + "unk_token": "", + "bos_token": "", + "eos_token": "", + }, + "datasets": [ + { + "path": "mhenrichsen/alpaca_2k_test", + "type": "alpaca", + }, + ], + "num_epochs": 2, + "micro_batch_size": 2, + "gradient_accumulation_steps": 1, + "output_dir": output_dir, + "learning_rate": 0.00001, + "optimizer": "adamw_torch", + "lr_scheduler": "cosine", + "max_steps": 20, + "save_steps": 10, + "eval_steps": 10, + } + ) + if is_torch_bf16_gpu_available(): + cfg.bf16 = True + else: + cfg.fp16 = True + normalize_config(cfg) + cli_args = TrainerCliArgs() + dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) + + train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) + assert (Path(output_dir) / "pytorch_model.bin").exists() From e62d5901b55653e6a24ce28fc323669179e28e45 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Wed, 4 Oct 2023 20:41:26 +0900 Subject: [PATCH 46/77] chore: Clean up repetitive model kwargs (#670) --- src/axolotl/utils/models.py | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 6e520bd50e..6c8e7b8f0c 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -176,6 +176,10 @@ def load_model( hijack_expand_mask() model_kwargs = {} + + model_kwargs["device_map"] = cfg.device_map + model_kwargs["torch_dtype"] = cfg.torch_dtype + if cfg.model_revision: model_kwargs["revision"] = cfg.model_revision if cfg.gptq: @@ -206,6 +210,7 @@ def load_model( or cfg.is_mistral_derived_model ): model_kwargs["use_flash_attention_2"] = True + try: if cfg.is_llama_derived_model and not cfg.trust_remote_code and not cfg.gptq: from transformers import LlamaForCausalLM @@ -220,10 +225,8 @@ def load_model( model = LlamaForCausalLM.from_pretrained( base_model, config=config, - device_map=cfg.device_map, load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, - torch_dtype=cfg.torch_dtype, **model_kwargs, ) # elif model_type == "GPTNeoXForCausalLM" and cfg.flash_attention: @@ -257,28 +260,22 @@ def load_model( model = MixFormerSequentialForCausalLM.from_pretrained( base_model, - device_map=cfg.device_map, load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, - torch_dtype=cfg.torch_dtype, **model_kwargs, ) elif model_type and not cfg.trust_remote_code: if cfg.gptq: model = AutoModelForCausalLM.from_pretrained( base_model, - device_map=cfg.device_map, - torch_dtype=cfg.torch_dtype, trust_remote_code=cfg.trust_remote_code or False, **model_kwargs, ) else: model = getattr(transformers, model_type).from_pretrained( base_model, - device_map=cfg.device_map, load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, - torch_dtype=cfg.torch_dtype, trust_remote_code=cfg.trust_remote_code or False, **model_kwargs, ) @@ -307,8 +304,6 @@ def load_model( model = AutoModelForCausalLM.from_pretrained( base_model, config=config, - device_map=cfg.device_map, - torch_dtype=cfg.torch_dtype, trust_remote_code=cfg.trust_remote_code or False, **model_kwargs, ) @@ -316,10 +311,8 @@ def load_model( model = AutoModelForCausalLM.from_pretrained( base_model, config=config, - device_map=cfg.device_map, load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, - torch_dtype=cfg.torch_dtype, trust_remote_code=cfg.trust_remote_code or False, **model_kwargs, ) @@ -330,10 +323,8 @@ def load_model( LOG.exception(err) model = AutoModelForCausalLM.from_pretrained( base_model, - device_map=cfg.device_map, load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, - torch_dtype=cfg.torch_dtype, trust_remote_code=cfg.trust_remote_code or False, **model_kwargs, ) From 43856c0a393fb7c4c44c56dc1a35ab7bc4bd52fd Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Wed, 4 Oct 2023 21:32:19 +0900 Subject: [PATCH 47/77] Fix(version): Update FA to work with Mistral SWA (#673) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 554f2ec69a..cced386735 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,7 @@ addict fire PyYAML>=6.0 datasets -flash-attn>=2.2.1 +flash-attn>=2.3.0 sentencepiece wandb einops From e0b7eeabfd0f997d41964d1301900a6b277b4146 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Fri, 6 Oct 2023 03:50:49 +0900 Subject: [PATCH 48/77] Fix(tokenizer): Set rstrip,lstrip,norm to False (#678) --- src/axolotl/utils/models.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 6c8e7b8f0c..7ef22cf5f0 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -11,6 +11,7 @@ from peft import PeftConfig, prepare_model_for_kbit_training from peft.tuners.lora import QuantLinear from transformers import ( # noqa: F401 + AddedToken, AutoConfig, AutoModelForCausalLM, AutoTokenizer, @@ -82,9 +83,16 @@ def load_tokenizer(cfg): if cfg.special_tokens: for k, val in cfg.special_tokens.items(): - tokenizer.add_special_tokens({k: val}) + tokenizer.add_special_tokens( + {k: AddedToken(val, rstrip=False, lstrip=False, normalized=False)} + ) if cfg.tokens: - tokenizer.add_tokens(list(cfg.tokens)) + tokenizer.add_tokens( + [ + AddedToken(token, rstrip=False, lstrip=False, normalized=False) + for token in cfg.tokens + ] + ) return tokenizer From 69fac9a0205c3b4da5ef528585d6be9492673812 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Fri, 6 Oct 2023 03:56:18 +0900 Subject: [PATCH 49/77] Fix: Future deprecation warning with use_auth_token (#680) --- src/axolotl/utils/data.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/axolotl/utils/data.py b/src/axolotl/utils/data.py index bdad21fb11..bac7d96c9e 100644 --- a/src/axolotl/utils/data.py +++ b/src/axolotl/utils/data.py @@ -114,7 +114,7 @@ def load_tokenized_prepared_datasets( if cfg.push_dataset_to_hub: dataset = load_dataset( f"{cfg.push_dataset_to_hub}/{ds_hash}", - use_auth_token=use_auth_token, + token=use_auth_token, ) dataset = dataset["train"] except Exception: # pylint: disable=broad-except # nosec @@ -155,7 +155,7 @@ def for_d_in_datasets(dataset_configs): d.path, name=d.name, streaming=True, - use_auth_token=use_auth_token, + token=use_auth_token, ) ds_from_hub = True except FileNotFoundError: @@ -202,7 +202,7 @@ def for_d_in_datasets(dataset_configs): name=d.name, streaming=False, data_files=d.data_files, - use_auth_token=use_auth_token, + token=use_auth_token, ) else: if isinstance(d.data_files, str): @@ -417,7 +417,7 @@ def load_prepare_datasets( ) dataset = load_dataset( f"{cfg.push_dataset_to_hub}/{ds_hash}", - use_auth_token=use_auth_token, + token=use_auth_token, ) dataset = dataset["train"] except Exception: # pylint: disable=broad-except # nosec From 133e676bcc7ad961cbf78cb4823948fbc1287ef1 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Fri, 6 Oct 2023 04:09:14 +0900 Subject: [PATCH 50/77] Feat: Set WORKDIR to /workspace/axolotl (#679) --- docker/Dockerfile | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 8608e2348b..7b121aaa7b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -12,17 +12,18 @@ RUN apt-get update && \ WORKDIR /workspace RUN git clone --depth=1 https://github.com/OpenAccess-AI-Collective/axolotl.git + +WORKDIR /workspace/axolotl + # If AXOLOTL_EXTRAS is set, append it in brackets -RUN cd axolotl && \ - if [ "$AXOLOTL_EXTRAS" != "" ] ; then \ +RUN if [ "$AXOLOTL_EXTRAS" != "" ] ; then \ pip install -e .[flash-attn,$AXOLOTL_EXTRAS]; \ else \ pip install -e .[flash-attn]; \ fi # fix so that git fetch/pull from remote works -RUN cd axolotl && \ - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" && \ +RUN git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" && \ git config --get remote.origin.fetch # helper for huggingface-login cli From eb480dfd68afced2c9a10632e4ff4c3cab762d78 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Fri, 6 Oct 2023 04:12:54 +0900 Subject: [PATCH 51/77] Fix: ValueError when FA + Mistral when padding_side=right (#681) * Fix: ValueError when FA + Mistral when padding_side=right * fix: remove tokenizer class check --- src/axolotl/utils/models.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 7ef22cf5f0..c05bccbf08 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -81,6 +81,9 @@ def load_tokenizer(cfg): tokenizer.add_special_tokens({"pad_token": "[PAD]"}) os.environ["TOKENIZERS_PARALLELISM"] = "false" + if cfg.is_mistral_derived_model: + tokenizer.padding_side = "left" + if cfg.special_tokens: for k, val in cfg.special_tokens.items(): tokenizer.add_special_tokens( From 2d60ba3a6ea4def14e6ab974299322a0bf90d5bb Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 5 Oct 2023 16:03:43 -0400 Subject: [PATCH 52/77] flash_attention + sample packing for stablelm 3b (#671) * stablelm epoch fa patch * is causal for fa * working stablelm fa w packing * chore: pre-commit linting --- .../monkeypatch/btlm_attn_hijack_flash.py | 4 +- .../monkeypatch/stablelm_attn_hijack_flash.py | 415 ++++++++++++++++++ src/axolotl/utils/models.py | 11 + 3 files changed, 429 insertions(+), 1 deletion(-) create mode 100644 src/axolotl/monkeypatch/stablelm_attn_hijack_flash.py diff --git a/src/axolotl/monkeypatch/btlm_attn_hijack_flash.py b/src/axolotl/monkeypatch/btlm_attn_hijack_flash.py index be5a705595..1275906804 100644 --- a/src/axolotl/monkeypatch/btlm_attn_hijack_flash.py +++ b/src/axolotl/monkeypatch/btlm_attn_hijack_flash.py @@ -7,6 +7,7 @@ from typing import Optional, Tuple import torch +from accelerate import init_empty_weights from flash_attn.flash_attn_interface import flash_attn_func from transformers import AutoConfig, AutoModelForCausalLM @@ -17,7 +18,8 @@ def replace_btlm_attn_with_flash_attn(model_name="cerebras/btlm-3b-8k-base"): # this is a wonky hack to get the remotely loaded module model_config = AutoConfig.from_pretrained(model_name, trust_remote_code=True) # we need to load the model here in order for modeling_btlm to be available - AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True) + with init_empty_weights(): + AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True) module_name = model_config.__class__.__module__.replace( ".configuration_btlm", ".modeling_btlm" ) diff --git a/src/axolotl/monkeypatch/stablelm_attn_hijack_flash.py b/src/axolotl/monkeypatch/stablelm_attn_hijack_flash.py new file mode 100644 index 0000000000..0269f90157 --- /dev/null +++ b/src/axolotl/monkeypatch/stablelm_attn_hijack_flash.py @@ -0,0 +1,415 @@ +# coding=utf-8 +# Copyright 2023 Stability AI, EleutherAI, and The HuggingFace Inc. team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# This code is based off the following work: +# https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/modeling_llama.py +# https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt_neox/modeling_gpt_neox.py +""" PyTorch StableLM Epoch model. """ +import importlib +import math +from typing import Optional, Tuple, Union + +import torch +import torch.utils.checkpoint +from accelerate import init_empty_weights +from einops import rearrange +from flash_attn.flash_attn_interface import ( # pylint: disable=ungrouped-imports + flash_attn_varlen_qkvpacked_func, +) +from torch import nn +from transformers import AutoConfig, AutoModelForCausalLM +from transformers.modeling_outputs import BaseModelOutputWithPast +from transformers.utils import logging + +from axolotl.monkeypatch.utils import get_cu_seqlens_from_pos_ids + +logger = logging.get_logger(__name__) + + +def replace_stablelm_attn_with_flash_attn(model_name="stabilityai/stablelm-3b-4e1t"): + # this is a wonky hack to get the remotely loaded module + model_config = AutoConfig.from_pretrained(model_name, trust_remote_code=True) + # we need to load the model here in order for modeling_stablelm_epoch to be available + with init_empty_weights(): + AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True) + module_name = model_config.__class__.__module__.replace( + ".configuration_stablelm_epoch", ".modeling_stablelm_epoch" + ) + modeling_stablelm = importlib.import_module(module_name) + modeling_stablelm.Attention.forward = ( # pylint: disable=protected-access + flashattn_attn + ) + modeling_stablelm.StableLMEpochModel.forward = ( # pylint: disable=protected-access + stablelm_model_forward + ) + modeling_stablelm.DecoderLayer.forward = ( # pylint: disable=protected-access + decoder_layer_forward + ) + + +def rotate_half(x: torch.Tensor): + """Rotates half the hidden dims of the input.""" + # pylint: disable=invalid-name + x1, x2 = torch.chunk(x, 2, dim=-1) + return torch.cat((-x2, x1), dim=-1) + + +def apply_rotary_pos_emb(q, k, cos, sin, position_ids): + # The first two dimensions of cos and sin are always 1, so we can `squeeze` them. + # pylint: disable=invalid-name + cos = cos.squeeze(1).squeeze(0) # [seq_len, dim] + sin = sin.squeeze(1).squeeze(0) # [seq_len, dim] + cos = cos[position_ids].unsqueeze(1) # [batch_size, 1, seq_len, dim] + sin = sin[position_ids].unsqueeze(1) # [batch_size, 1, seq_len, dim] + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """ + This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch, + num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, None, :, :].expand( + batch, num_key_value_heads, n_rep, slen, head_dim + ) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim) + + +def flashattn_attn( + self, + hidden_states: torch.FloatTensor, + attention_mask: torch.FloatTensor, + position_ids: torch.LongTensor, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: Optional[bool] = False, # pylint: disable=unused-argument + use_cache: Optional[bool] = False, + cu_seqlens: Optional[torch.Tensor] = None, + max_seqlen: Optional[torch.Tensor] = None, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view( + bsz, q_len, self.num_heads, self.head_dim + ).transpose(1, 2) + key_states = key_states.view( + bsz, q_len, self.num_key_value_heads, self.head_dim + ).transpose(1, 2) + value_states = value_states.view( + bsz, q_len, self.num_key_value_heads, self.head_dim + ).transpose(1, 2) + + query_rot = query_states[..., : self.rotary_ndims] + query_pass = query_states[..., self.rotary_ndims :] + key_rot = key_states[..., : self.rotary_ndims] + key_pass = key_states[..., self.rotary_ndims :] + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value[0].shape[-2] + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb( + query_rot, key_rot, cos, sin, position_ids + ) + + # [batch_size, num_heads, seq_len, head_dim] + query_states = torch.cat((query_states, query_pass), dim=-1) + key_states = torch.cat((key_states, key_pass), dim=-1) + + if past_key_value is not None: + # Reuse k, v, self_attention + key_states = torch.cat((past_key_value[0], key_states), dim=2) + value_states = torch.cat((past_key_value[1], value_states), dim=2) + + past_key_value = (key_states, value_states) if use_cache else None + + # Repeat k/v heads if n_kv_heads < n_heads + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + if cu_seqlens is not None and max_seqlen is not None and cu_seqlens.dim() == 1: + # special handling using sample packing + qkv = torch.stack( + [query_states, key_states, value_states], dim=2 + ) # [bsz, nh, 3, q_len, hd] + qkv = qkv.transpose(1, 3) # [bsz, q_len, 3, nh, hd] + qkv = rearrange(qkv, "b s ... -> (b s) ...") + softmax_scale = None + + output = flash_attn_varlen_qkvpacked_func( + qkv, cu_seqlens, max_seqlen, 0.0, softmax_scale=softmax_scale, causal=True + ) + + attn_output = rearrange(output, "(b s) ... -> b s ...", b=bsz) + attn_output = rearrange(attn_output, "b s h d -> b s (h d)") + else: + attn_weights = torch.matmul( + query_states, key_states.transpose(2, 3) + ) / math.sqrt(self.head_dim) + + if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): + raise ValueError( + f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is" + f" {attn_weights.size()}" + ) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}" + ) + attn_weights = attn_weights + attention_mask + + # Upcast attention to fp32 + attn_weights = nn.functional.softmax( + attn_weights, dim=-1, dtype=torch.float32 + ).to(query_states.dtype) + attn_output = torch.matmul(attn_weights, value_states) + + if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + + # Merge heads + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + # Final linear projection + attn_output = self.o_proj(attn_output) + + return attn_output, None, past_key_value + + +def decoder_layer_forward( + self, + hidden_states: Optional[torch.FloatTensor], + attention_mask: Optional[torch.FloatTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: Optional[bool] = False, + use_cache: Optional[bool] = False, + cu_seqlens: Optional[torch.Tensor] = None, + max_seqlen: Optional[torch.Tensor] = None, +) -> Union[ + Tuple[torch.Tensor], Optional[Tuple[torch.Tensor, Tuple[torch.FloatTensor, ...]]] +]: + # pylint: disable=duplicate-code + residual = hidden_states + + hidden_states = self.input_layernorm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.self_attn( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + hidden_states = residual + hidden_states + + # Fully Connected + residual = hidden_states + hidden_states = self.post_attention_layernorm(hidden_states) + hidden_states = self.mlp(hidden_states) + hidden_states = residual + hidden_states + + outputs = (hidden_states,) + + if output_attentions: + outputs += (self_attn_weights,) + + if use_cache: + outputs += (present_key_value,) + + return outputs + + +def stablelm_model_forward( + self, + input_ids: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, +) -> Union[Tuple, BaseModelOutputWithPast]: + # pylint: disable=duplicate-code + output_attentions = ( + output_attentions + if output_attentions is not None + else self.config.output_attentions + ) + output_hidden_states = ( + output_hidden_states + if output_hidden_states is not None + else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = ( + return_dict if return_dict is not None else self.config.use_return_dict + ) + + # Retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError( + "You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time" + ) + if input_ids is not None: + batch_size, seq_length = input_ids.shape + elif inputs_embeds is not None: + batch_size, seq_length, _ = inputs_embeds.shape + else: + raise ValueError( + "You have to specify either decoder_input_ids or decoder_inputs_embeds" + ) + + seq_length_with_past = seq_length + past_key_values_length = 0 + + if past_key_values is not None: + past_key_values_length = past_key_values[0][0].shape[2] + seq_length_with_past = seq_length_with_past + past_key_values_length + + cu_seqlens = None + max_seqlen = None + if position_ids is None: + device = input_ids.device if input_ids is not None else inputs_embeds.device + position_ids = torch.arange( + past_key_values_length, + seq_length + past_key_values_length, + dtype=torch.long, + device=device, + ) + position_ids = position_ids.unsqueeze(0).view(-1, seq_length) + else: + position_ids = position_ids.view(-1, seq_length).long() + cu_seqlens, max_seqlen = get_cu_seqlens_from_pos_ids(position_ids) + cu_seqlens = cu_seqlens.squeeze() + + if inputs_embeds is None: + inputs_embeds = self.embed_tokens(input_ids) + # Embed positions + if attention_mask is None: + attention_mask = torch.ones( + (batch_size, seq_length_with_past), + dtype=torch.bool, + device=inputs_embeds.device, + ) + attention_mask = ( + self._prepare_decoder_attention_mask( # pylint: disable=protected-access + attention_mask, + (batch_size, seq_length), + inputs_embeds, + past_key_values_length, + ) + ) + + hidden_states = inputs_embeds + + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + # Decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + next_decoder_cache = () if use_cache else None + + for idx, decoder_layer in enumerate(self.layers): + if output_hidden_states: + all_hidden_states += (hidden_states,) + + past_key_value = past_key_values[idx] if past_key_values is not None else None + + if self.gradient_checkpointing and self.training: + + def create_custom_forward(module): + def custom_forward(*inputs): + # None for past_key_value + return module(*inputs) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(decoder_layer), + hidden_states, + attention_mask, + position_ids, + past_key_value, + output_attentions, + None, + cu_seqlens, + max_seqlen, + ) + else: + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache += (layer_outputs[2 if output_attentions else 1],) + + if output_attentions: + all_self_attns += (layer_outputs[1],) + + hidden_states = self.norm(hidden_states) + + # Add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states,) + + next_cache = next_decoder_cache if use_cache else None + if not return_dict: + return tuple( + v + for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] + if v is not None + ) + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + ) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index c05bccbf08..aa6049bd3e 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -124,6 +124,17 @@ def load_model( replace_btlm_attn_with_flash_attn(cfg.base_model) + if ( + hasattr(model_config, "model_type") + and model_config.model_type == "stablelm_epoch" + ): + if cfg.flash_attention and cfg.sample_packing: + from axolotl.monkeypatch.stablelm_attn_hijack_flash import ( + replace_stablelm_attn_with_flash_attn, + ) + + replace_stablelm_attn_with_flash_attn(cfg.base_model) + if cfg.is_llama_derived_model and cfg.flash_attention and cfg.sample_packing: if cfg.device not in ["mps", "cpu"] and not inference: from axolotl.monkeypatch.llama_attn_hijack_flash import ( From d4a88e4eca44240b531b64ec9373bb7ac06a1bbb Mon Sep 17 00:00:00 2001 From: Abhishek Mishra Date: Fri, 6 Oct 2023 17:35:56 +0530 Subject: [PATCH 53/77] Adding qlora config for Mistral (#675) * Adding qlora config for Mistral Contains fix for Mistral FA issue - ValueError: You are attempting to perform batched generation with padding_side='right' this may lead to unexpected behaviour for Flash Attention version of Mistral. Make sure to call tokenizer.padding_side = 'left' before tokenizing the input. Fix for now is to set sample_packing: true and pad_to_sequence_len: true * Renamed to qlora.yml --- examples/mistral/qlora.yml | 79 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 examples/mistral/qlora.yml diff --git a/examples/mistral/qlora.yml b/examples/mistral/qlora.yml new file mode 100644 index 0000000000..5a131c5f36 --- /dev/null +++ b/examples/mistral/qlora.yml @@ -0,0 +1,79 @@ +base_model: mistralai/Mistral-7B-v0.1 +base_model_config: mistralai/Mistral-7B-v0.1 +model_type: MistralForCausalLM +tokenizer_type: LlamaTokenizer +is_mistral_derived_model: true + +load_in_8bit: false +load_in_4bit: true +strict: false + +datasets: + - path: mhenrichsen/alpaca_2k_test + type: alpaca +dataset_prepared_path: last_run_prepared +val_set_size: 0.01 +output_dir: ./qlora-out + +adapter: qlora +lora_model_dir: + +sequence_len: 8192 +sample_packing: True +pad_to_sequence_len: True + +lora_r: 32 +lora_alpha: 16 +lora_dropout: 0.05 +lora_target_linear: true +lora_fan_in_fan_out: +lora_target_modules: + - gate_proj + - down_proj + - up_proj + - q_proj + - v_proj + - k_proj + - o_proj + +wandb_project: +wandb_entity: +wandb_watch: +wandb_run_id: +wandb_log_model: + +gradient_accumulation_steps: 4 +micro_batch_size: 4 +num_epochs: 1 +optimizer: adamw_bnb_8bit +lr_scheduler: cosine +learning_rate: 0.0002 + +train_on_inputs: false +group_by_length: false +bf16: true +fp16: false +tf32: false + +gradient_checkpointing: true +early_stopping_patience: +resume_from_checkpoint: +local_rank: +logging_steps: 1 +xformers_attention: +flash_attention: true + +warmup_steps: 10 +eval_steps: 20 +eval_table_size: 5 +eval_table_max_new_tokens: 128 +save_steps: +debug: +deepspeed: +weight_decay: 0.0 +fsdp: +fsdp_config: +special_tokens: + bos_token: "" + eos_token: "" + unk_token: "" \ No newline at end of file From 669f1d052c996a6b6c12bd146e15be30edb9be9d Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Sat, 7 Oct 2023 01:33:43 +0900 Subject: [PATCH 54/77] Fix: Higher vram usage for mistral and sample_packing (#691) * Fix: Higher vram usage for mistral and sample_packing * chore: update comment * chore: lint --- examples/mistral/qlora.yml | 8 ++++---- src/axolotl/utils/models.py | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/examples/mistral/qlora.yml b/examples/mistral/qlora.yml index 5a131c5f36..9c64a8c2dd 100644 --- a/examples/mistral/qlora.yml +++ b/examples/mistral/qlora.yml @@ -36,10 +36,10 @@ lora_target_modules: - k_proj - o_proj -wandb_project: -wandb_entity: +wandb_project: +wandb_entity: wandb_watch: -wandb_run_id: +wandb_run_id: wandb_log_model: gradient_accumulation_steps: 4 @@ -76,4 +76,4 @@ fsdp_config: special_tokens: bos_token: "" eos_token: "" - unk_token: "" \ No newline at end of file + unk_token: "" diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index aa6049bd3e..2c60f00c2b 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -81,7 +81,8 @@ def load_tokenizer(cfg): tokenizer.add_special_tokens({"pad_token": "[PAD]"}) os.environ["TOKENIZERS_PARALLELISM"] = "false" - if cfg.is_mistral_derived_model: + # Mistral's official FA implementation requires left padding + if cfg.is_mistral_derived_model and cfg.flash_attention and not cfg.sample_packing: tokenizer.padding_side = "left" if cfg.special_tokens: From 4c8ddf2c6f5c2509cbc87d9804f9bd6e42dc0c1a Mon Sep 17 00:00:00 2001 From: mhenrichsen Date: Fri, 6 Oct 2023 22:58:13 +0200 Subject: [PATCH 55/77] new lr, sample pack --- examples/mistral/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/mistral/config.yml b/examples/mistral/config.yml index e644bec507..7924fc682c 100644 --- a/examples/mistral/config.yml +++ b/examples/mistral/config.yml @@ -16,8 +16,8 @@ val_set_size: 0.01 output_dir: ./out sequence_len: 8192 -sample_packing: -pad_to_sequence_len: +sample_packing: true +pad_to_sequence_len: true wandb_project: wandb_entity: @@ -30,7 +30,7 @@ micro_batch_size: 2 num_epochs: 3 optimizer: adamw_bnb_8bit lr_scheduler: cosine -learning_rate: 0.0002 +learning_rate: 0.000005 train_on_inputs: false group_by_length: false @@ -59,4 +59,4 @@ fsdp_config: special_tokens: bos_token: "" eos_token: "" - unk_token: "" + unk_token: "" \ No newline at end of file From de87ea68f6230d4998bfca3cf3fd86b29374b298 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 6 Oct 2023 22:38:15 -0400 Subject: [PATCH 56/77] fix multiline for docker (#694) --- docker/Dockerfile-base | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile-base b/docker/Dockerfile-base index 5d1e2c2b89..ab1a667b5b 100644 --- a/docker/Dockerfile-base +++ b/docker/Dockerfile-base @@ -14,7 +14,7 @@ ARG CUDA="118" ENV PYTHON_VERSION=$PYTHON_VERSION RUN apt-get update \ - && apt-get install -y wget git build-essential ninja-build git-lfs libaio-dev && rm -rf /var/lib/apt/lists/* + && apt-get install -y wget git build-essential ninja-build git-lfs libaio-dev && rm -rf /var/lib/apt/lists/* \ && wget \ https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh \ && mkdir /root/.conda \ From 83a950bb87c4a47670668ad9abb1d6b09c6a3b69 Mon Sep 17 00:00:00 2001 From: mhenrichsen Date: Sat, 7 Oct 2023 11:04:35 +0200 Subject: [PATCH 57/77] lint --- examples/mistral/config.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/mistral/config.yml b/examples/mistral/config.yml index 7924fc682c..08e3a02b19 100644 --- a/examples/mistral/config.yml +++ b/examples/mistral/config.yml @@ -59,4 +59,5 @@ fsdp_config: special_tokens: bos_token: "" eos_token: "" - unk_token: "" \ No newline at end of file + + unk_token: "" From aca03983157612918520c4cbb1a08a9c535daf01 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Sat, 7 Oct 2023 12:20:45 -0400 Subject: [PATCH 58/77] apex not needed as amp is part of pytorch (#696) --- docker/Dockerfile-base | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docker/Dockerfile-base b/docker/Dockerfile-base index ab1a667b5b..8f33ff2295 100644 --- a/docker/Dockerfile-base +++ b/docker/Dockerfile-base @@ -57,11 +57,6 @@ FROM base-builder ARG TORCH_CUDA_ARCH_LIST="7.0 7.5 8.0 8.6 9.0+PTX" ENV TORCH_CUDA_ARCH_LIST=$TORCH_CUDA_ARCH_LIST -# recompile apex -RUN python3 -m pip uninstall -y apex -RUN git clone https://github.com/NVIDIA/apex -RUN cd apex && python3 -m pip install -v --disable-pip-version-check --no-cache-dir --no-build-isolation --config-settings "--build-option=--cpp_ext" --config-settings "--build-option=--cuda_ext" ./ - RUN mkdir -p /workspace/builds COPY --from=bnb-builder /workspace/bitsandbytes /workspace/builds/bitsandbytes From 7f2618b5f4eb4c8b2969182e80ce373c2017806a Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Sat, 7 Oct 2023 12:23:31 -0400 Subject: [PATCH 59/77] add docker images for pytorch 2.10 (#697) --- .github/workflows/base.yml | 5 +++++ .github/workflows/main.yml | 10 ++++++++++ 2 files changed, 15 insertions(+) diff --git a/.github/workflows/base.yml b/.github/workflows/base.yml index d755b63263..5f08854842 100644 --- a/.github/workflows/base.yml +++ b/.github/workflows/base.yml @@ -25,6 +25,11 @@ jobs: python_version: "3.10" pytorch: 2.0.1 torch_cuda_arch_list: "7.0 7.5 8.0 8.6 9.0+PTX" + - cuda: "118" + cuda_version: 11.8.0 + python_version: "3.10" + pytorch: 2.1.0 + torch_cuda_arch_list: "7.0 7.5 8.0 8.6 9.0+PTX" steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a5b4d30379..5fdd2d705d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -23,6 +23,11 @@ jobs: python_version: "3.10" pytorch: 2.0.1 axolotl_extras: + - cuda: 118 + cuda_version: 11.8.0 + python_version: "3.10" + pytorch: 2.1.0 + axolotl_extras: runs-on: [self-hosted, gpu, docker] steps: - name: Checkout @@ -68,6 +73,11 @@ jobs: pytorch: 2.0.1 axolotl_extras: is_latest: true + - cuda: 118 + cuda_version: 11.8.0 + python_version: "3.10" + pytorch: 2.1.0 + axolotl_extras: runs-on: [self-hosted, gpu, docker] steps: - name: Checkout From f91db198f36f329ad6d5f5f95217b4f9b7309633 Mon Sep 17 00:00:00 2001 From: mhenrichsen Date: Sat, 7 Oct 2023 20:19:25 +0200 Subject: [PATCH 60/77] fix unneeded space (#699) --- examples/mistral/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/mistral/config.yml b/examples/mistral/config.yml index 08e3a02b19..2a4498a119 100644 --- a/examples/mistral/config.yml +++ b/examples/mistral/config.yml @@ -59,5 +59,4 @@ fsdp_config: special_tokens: bos_token: "" eos_token: "" - unk_token: "" From 77c84e02fd1a7eef25cccc5b8104178d980851c7 Mon Sep 17 00:00:00 2001 From: "seungduk.kim.2304" Date: Mon, 9 Oct 2023 02:37:54 +0900 Subject: [PATCH 61/77] Update README with some explanations (#700) * Update README with some explanations * revert commit-hook change * add more explanation about batch size and gradient accum * not use latex foromat * decorate * git hook again * Attach a link that explains about LoRA hyperparameters * update table of content * Explanation about lora_modules_to_save --- README.md | 250 ++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 167 insertions(+), 83 deletions(-) diff --git a/README.md b/README.md index c2d8e7d8c6..e277120e62 100644 --- a/README.md +++ b/README.md @@ -23,9 +23,10 @@ Features: - [Supported Features](#axolotl-supports) - [Quickstart](#quickstart-) - [Installation](#installation) - - [Docker Installation](#environment) - - [Conda/Pip venv Installation](#condapip-venv) - - [LambdaLabs Installation](#lambdalabs) + - [Docker](#docker) + - [Conda/Pip venv](#condapip-venv) + - [LambdaLabs](#lambdalabs) + - [Windows](#windows) - [Dataset](#dataset) - [How to Add Custom Prompts](#how-to-add-custom-prompts) - [How to Use Custom Pretokenized Dataset](#how-to-use-your-custom-pretokenized-dataset) @@ -50,7 +51,7 @@ Features: Axolotl provides a unified repository for fine-tuning
a variety of AI models with ease

- Go ahead and axolotl questions!! + Go ahead and Axolotl questions!!

pre-commit PyTest Status @@ -102,7 +103,7 @@ accelerate launch -m axolotl.cli.inference examples/openllama-3b/lora.yml \ ### Environment -- Docker +#### Docker ```bash docker run --gpus '"all"' --rm -it winglian/axolotl:main-py3.10-cu118-2.0.1 ``` @@ -114,12 +115,12 @@ accelerate launch -m axolotl.cli.inference examples/openllama-3b/lora.yml \ docker compose up -d ``` -- Conda/Pip venv +#### Conda/Pip venv 1. Install python >=**3.9** 2. Install pytorch stable https://pytorch.org/get-started/locally/ - 3. Install axolotl along with python dependencies + 3. Install Axolotl along with python dependencies ```bash pip3 install packaging pip3 install -e '.[flash-attn,deepspeed]' @@ -130,7 +131,7 @@ accelerate launch -m axolotl.cli.inference examples/openllama-3b/lora.yml \ ``` Get the token at huggingface.co/settings/tokens -- LambdaLabs +#### LambdaLabs
Click to Expand @@ -174,7 +175,8 @@ accelerate launch -m axolotl.cli.inference examples/openllama-3b/lora.yml \ ```
-- Windows: Please use WSL or Docker! +#### Windows +Please use WSL or Docker! ### Dataset @@ -396,15 +398,15 @@ See [examples](examples) for quick start. It is recommended to duplicate and mod All yaml options ```yaml -# this is the huggingface model that contains *.pt, *.safetensors, or *.bin files -# this can also be a relative path to a model on disk +# This is the huggingface model that contains *.pt, *.safetensors, or *.bin files +# This can also be a relative path to a model on disk base_model: ./llama-7b-hf -# you can specify an ignore pattern if the model repo contains more than 1 model type (*.pt, etc) +# You can specify an ignore pattern if the model repo contains more than 1 model type (*.pt, etc) base_model_ignore_patterns: -# if the base_model repo on hf hub doesn't include configuration .json files, -# you can set that here, or leave this empty to default to base_model +# If the base_model repo on hf hub doesn't include configuration .json files, +# You can set that here, or leave this empty to default to base_model base_model_config: ./llama-7b-hf -# you can specify to choose a specific model revision from huggingface hub +# You can specify to choose a specific model revision from huggingface hub model_revision: # Optional tokenizer configuration override in case you want to use a different tokenizer # than the one defined in the base model @@ -419,23 +421,24 @@ trust_remote_code: tokenizer_use_fast: # Whether to use the legacy tokenizer setting, defaults to True tokenizer_legacy: -# resize the model embeddings when new tokens are added to multiples of 32 -# this is reported to improve training speed on some models +# Resize the model embeddings when new tokens are added to multiples of 32 +# This is reported to improve training speed on some models resize_token_embeddings_to_32x: -# used to identify which the model is based on +# Used to identify which the model is based on is_falcon_derived_model: is_llama_derived_model: +# Please note that if you set this to true, `padding_side` will be set to "left" by default is_mistral_derived_model: -# whether you are training a 4-bit GPTQ quantized model +# Whether you are training a 4-bit GPTQ quantized model gptq: true gptq_groupsize: 128 # group size gptq_model_v1: false # v1 or v2 -# this will attempt to quantize the model down to 8 bits and use adam 8 bit optimizer +# This will attempt to quantize the model down to 8 bits and use adam 8 bit optimizer load_in_8bit: true -# use bitsandbytes 4 bit +# Use bitsandbytes 4 bit load_in_4bit: # Use CUDA bf16 @@ -449,9 +452,9 @@ tf32: true # require >=ampere bfloat16: true # require >=ampere float16: true -# a list of one or more datasets to finetune the model with +# A list of one or more datasets to finetune the model with datasets: - # hf dataset repo | "json" for local dataset, make sure to fill data_files + # HuggingFace dataset repo | "json" for local dataset, make sure to fill data_files - path: vicgalle/alpaca-gpt4 # The type of prompt to use for training. [alpaca, sharegpt, gpteacher, oasst, reflection] type: alpaca # format | format: (chat/instruct) | .load_ @@ -461,16 +464,16 @@ datasets: name: # Optional[str] name of dataset configuration to load conversation: # Optional[str] fastchat conversation type, only used with type: sharegpt - # custom user prompt + # Custom user prompt - path: repo type: - # the below are defaults. only set what's needed. + # The below are defaults. only set what's needed. system_prompt: "" field_system: system field_instruction: instruction field_output: input - # customizable to be single line or multi-line + # Customizable to be single line or multi-line system_format: "{system}" # 'format' can include {input} format: |- @@ -479,13 +482,13 @@ datasets: # 'no_input_format' cannot include {input} no_input_format: "{instruction} " - # for completions datsets, uses the provided field if not `text` + # For completions datsets, uses the provided field if not `text` field: -# axolotl attempts to save the dataset as an arrow after packing the data together so +# Axolotl attempts to save the dataset as an arrow after packing the data together so # subsequent training attempts load faster, relative path dataset_prepared_path: data/last_run_prepared -# push prepared dataset to hub +# Push prepared dataset to hub push_dataset_to_hub: # repo path # The maximum number of processes to use while preprocessing your input dataset. This defaults to `os.cpu_count()` # if not set. @@ -495,8 +498,8 @@ hub_model_id: # repo path to push finetuned model # how to push checkpoints to hub # https://huggingface.co/docs/transformers/v4.31.0/en/main_classes/trainer#transformers.TrainingArguments.hub_strategy hub_strategy: -# whether to use hf `use_auth_token` for loading datasets. Useful for fetching private datasets -# required to be true when used in combination with `push_dataset_to_hub` +# Whether to use hf `use_auth_token` for loading datasets. Useful for fetching private datasets +# Required to be true when used in combination with `push_dataset_to_hub` hf_use_auth_token: # boolean # How much of the dataset to set aside as evaluation. 1 = 100%, 0.50 = 50%, etc. 0 for no eval. val_set_size: 0.04 @@ -505,30 +508,34 @@ dataset_shard_num: # Index of shard to use for whole dataset dataset_shard_idx: -# the maximum length of an input to train with, this should typically be less than 2048 +# The maximum length of an input to train with, this should typically be less than 2048 # as most models have a token/context limit of 2048 sequence_len: 2048 -# pad inputs so each step uses constant sized buffers -# this will reduce memory fragmentation and may prevent OOMs, by re-using memory more efficiently +# Pad inputs so each step uses constant sized buffers +# This will reduce memory fragmentation and may prevent OOMs, by re-using memory more efficiently pad_to_sequence_len: -# max sequence length to concatenate training samples together up to -# inspired by StackLLaMA. see https://huggingface.co/blog/stackllama#supervised-fine-tuning +# Max sequence length to concatenate training samples together up to +# Inspired by StackLLaMA. see https://huggingface.co/blog/stackllama#supervised-fine-tuning # FutureWarning: This will soon be DEPRECATED max_packed_sequence_len: 1024 -# use efficient multi-packing with block diagonal attention and per sequence position_ids. Recommend set to 'true' +# Use efficient multi-packing with block diagonal attention and per sequence position_ids. Recommend set to 'true' sample_packing: -# set to 'false' if getting errors during eval with sample_packing on. +# Set to 'false' if getting errors during eval with sample_packing on. eval_sample_packing: -# you can set these packing optimizations AFTER starting a training at least once. +# You can set these packing optimizations AFTER starting a training at least once. # The trainer will provide recommended values for these values. sample_packing_eff_est: total_num_tokens: -# if you want to use 'lora' or 'qlora' or leave blank to train all parameters in original model +# If you want to use 'lora' or 'qlora' or leave blank to train all parameters in original model adapter: lora -# if you already have a lora model trained that you want to load, put that here -# lora hyperparameters +# If you already have a lora model trained that you want to load, put that here. +# This means after training, if you want to test the model, you should set this to the value of `lora_out_dir`. lora_model_dir: + +# LoRA hyperparameters +# For more details about the following options, see: +# https://www.anyscale.com/blog/fine-tuning-llms-lora-or-full-parameter-an-in-depth-analysis-with-llama-2 lora_r: 8 lora_alpha: 16 lora_dropout: 0.05 @@ -540,36 +547,48 @@ lora_target_modules: # - gate_proj # - down_proj # - up_proj -lora_target_linear: # if true, will target all linear layers +lora_target_linear: # If true, will target all linear layers + +# If you added new tokens to the tokenizer, you may need to save some LoRA modules because they need to know the new tokens. +# For LLaMA and Mistral, you need to save `embed_tokens` and `lm_head`. It may vary for other models. +# `embed_tokens` converts tokens to embeddings, and `lm_head` converts embeddings to token probabilities. +# https://github.com/huggingface/peft/issues/334#issuecomment-1561727994 lora_modules_to_save: # - embed_tokens # - lm_head + +# Once you complete training, the model will be saved to the following directory. +# If you merge the adapter to the base model, a subdirectory `merged` will be created under this directory. +# Make sure `lora_model_dir` points to this directory if you want to use the trained model. lora_out_dir: lora_fan_in_fan_out: false # ReLoRA configuration -# must use either 'lora' or 'qlora' adapter, and does not support fsdp or deepspeed -relora_steps: # number of steps per ReLoRA restart -relora_warmup_steps: # number of per-restart warmup steps -relora_cpu_offload: # true to perform lora weight merges on cpu during restarts, for modest gpu memory savings +# Must use either 'lora' or 'qlora' adapter, and does not support fsdp or deepspeed +relora_steps: # Number of steps per ReLoRA restart +relora_warmup_steps: # Number of per-restart warmup steps +relora_cpu_offload: # True to perform lora weight merges on cpu during restarts, for modest gpu memory savings # wandb configuration if you're using it wandb_mode: # "offline" to save run metadata locally and not sync to the server, "disabled" to turn off wandb -wandb_project: # your wandb project name -wandb_entity: # a wandb Team name if using a Team +wandb_project: # Your wandb project name +wandb_entity: # A wandb Team name if using a Team wandb_watch: -wandb_run_id: # set the name of your wandb run +wandb_run_id: # Set the name of your wandb run wandb_log_model: # "checkpoint" to log model to wandb Artifacts every `save_steps` or "end" to log only at the end of training -# where to save the finished model to +# Where to save the full-finetuned model to output_dir: ./completed-model -# whether to use torch.compile and which backend to use +# Whether to use torch.compile and which backend to use torch_compile: # bool torch_compile_backend: # Optional[str] -# training hyperparameters +# Training hyperparameters + +# If greater than 1, backpropagation will be skipped and the gradients will be accumulated for the given number of steps. gradient_accumulation_steps: 1 +# The number of samples to include in each batch. This is the number of samples sent to each GPU. micro_batch_size: 2 eval_batch_size: num_epochs: 3 @@ -577,44 +596,47 @@ warmup_steps: 100 learning_rate: 0.00003 lr_quadratic_warmup: logging_steps: -save_strategy: # set to `no` to skip checkpoint saves -save_steps: # leave empty to save at each epoch -eval_steps: # leave empty to eval at each epoch -save_total_limit: # checkpoints saved at a time +save_strategy: # Set to `no` to skip checkpoint saves +save_steps: # Leave empty to save at each epoch +eval_steps: # Leave empty to eval at each epoch +save_total_limit: # Checkpoints saved at a time +# Maximum number of iterations to train for. It precedes num_epochs which means that +# if both are set, num_epochs will not be guaranteed. +# e.g., when 1 epoch is 1000 steps => `num_epochs: 2` and `max_steps: 100` will train for 100 steps max_steps: -eval_table_size: # approximate number of predictions sent to wandb depending on batch size. Enabled above 0. Default is 0 -eval_table_max_new_tokens: # total number of tokens generated for predictions sent to wandb. Default is 128 +eval_table_size: # Approximate number of predictions sent to wandb depending on batch size. Enabled above 0. Default is 0 +eval_table_max_new_tokens: # Total number of tokens generated for predictions sent to wandb. Default is 128 -# save model as safetensors (require safetensors package) +# Save model as safetensors (require safetensors package) save_safetensors: -# whether to mask out or include the human's prompt from the training labels +# Whether to mask out or include the human's prompt from the training labels train_on_inputs: false -# group similarly sized data to minimize padding -# may be slower to start, as it must download and sort the entire dataset -# note that training loss may have an oscillating pattern with this enabled +# Group similarly sized data to minimize padding. +# May be slower to start, as it must download and sort the entire dataset. +# Note that training loss may have an oscillating pattern with this enabled. group_by_length: false # Whether to use gradient checkpointing https://huggingface.co/docs/transformers/v4.18.0/en/performance#gradient-checkpointing gradient_checkpointing: false -# stop training after this many evaluation losses have increased in a row +# Stop training after this many evaluation losses have increased in a row # https://huggingface.co/transformers/v4.2.2/_modules/transformers/trainer_callback.html#EarlyStoppingCallback early_stopping_patience: 3 -# specify a scheduler and kwargs to use with the optimizer +# Specify a scheduler and kwargs to use with the optimizer lr_scheduler: # 'one_cycle' | 'log_sweep' | empty for cosine lr_scheduler_kwargs: -# for one_cycle optim -lr_div_factor: # learning rate div factor +# For one_cycle optim +lr_div_factor: # Learning rate div factor -# for log_sweep optim +# For log_sweep optim log_sweep_min_lr: log_sweep_max_lr: -# specify optimizer +# Specify optimizer # Valid values are driven by the Transformers OptimizerNames class, see: # https://github.com/huggingface/transformers/blob/95b374952dc27d8511541d6f5a4e22c9ec11fb24/src/transformers/training_args.py#L134 # @@ -640,7 +662,7 @@ log_sweep_max_lr: # - paged_lion_32bit # - paged_lion_8bit optimizer: -# specify weight decay +# Specify weight decay weight_decay: # adamw hyperparams adam_beta1: @@ -649,49 +671,51 @@ adam_epsilon: # Gradient clipping max norm max_grad_norm: -# whether to bettertransformers +# Whether to bettertransformers flash_optimum: -# whether to use xformers attention patch https://github.com/facebookresearch/xformers: +# Whether to use xformers attention patch https://github.com/facebookresearch/xformers: xformers_attention: -# whether to use flash attention patch https://github.com/Dao-AILab/flash-attention: +# Whether to use flash attention patch https://github.com/Dao-AILab/flash-attention: flash_attention: flash_attn_cross_entropy: # Whether to use flash-attention cross entropy implementation - advanced use only flash_attn_rms_norm: # Whether to use flash-attention rms norm implementation - advanced use only -# whether to use scaled-dot-product attention +# Whether to use scaled-dot-product attention # https://pytorch.org/docs/stable/generated/torch.nn.functional.scaled_dot_product_attention.html sdp_attention: # Landmark attention (only llama) landmark_attention: # xpos RoPE see https://github.com/kaiokendev/cutoff-len-is-context-len/blob/main/util/xpos_rope_llama_monkey_patch.py -# llama only +# LLaMA only xpos_rope: # RoPE Scaling https://github.com/huggingface/transformers/pull/24653 rope_scaling: type: # linear | dynamic factor: # float -# resume from a specific checkpoint dir +# Resume from a specific checkpoint dir resume_from_checkpoint: -# if resume_from_checkpoint isn't set and you simply want it to start where it left off -# be careful with this being turned on between different models +# If resume_from_checkpoint isn't set and you simply want it to start where it left off. +# Be careful with this being turned on between different models. auto_resume_from_checkpoints: false -# don't mess with this, it's here for accelerate and torchrun +# Don't mess with this, it's here for accelerate and torchrun local_rank: -# add or change special tokens +# Add or change special tokens. +# If you add tokens here, you don't need to add them to the `tokens` list. special_tokens: # bos_token: "" # eos_token: "" # unk_token: "" -# add extra tokens + +# Add extra tokens. tokens: # FSDP fsdp: fsdp_config: -# Deepspeed config path +# Deepspeed config path. e.g., deepspeed/zero3.json deepspeed: # Advanced DDP Arguments @@ -717,6 +741,66 @@ strict:
+
+ Understanding of batch size and gradient accumulation steps +
+Gradient accumulation means accumulating gradients over several mini-batches and updating the model weights afterward. When the samples in each batch are diverse, this technique doesn't significantly impact learning. + +This method allows for effective training with larger effective batch sizes without needing proportionally larger memory. Here's why: + +1. **Memory Consumption with Batch Size**: The primary reason increasing the batch size impacts memory is due to the storage requirements for intermediate activations. When you forward propagate a batch through a network, you have to store the activations at each layer for each sample in the batch, because these activations are used during backpropagation to compute gradients. Therefore, larger batches mean more activations, leading to greater GPU memory consumption. + +2. **Gradient Accumulation**: With gradient accumulation, you're effectively simulating a larger batch size by accumulating gradients over several smaller batches (or micro-batches). However, at any given time, you're only forward and backward propagating a micro-batch. This means you only store activations for the micro-batch, not the full accumulated batch. As a result, you can simulate the effect of a larger batch size without the memory cost of storing activations for a large batch. + +**Example 1:** +Micro batch size: 3 +Gradient accumulation steps: 2 +Number of GPUs: 3 +Total batch size = 3 * 2 * 3 = 18 + +``` +| GPU 1 | GPU 2 | GPU 3 | +|----------------|----------------|----------------| +| S1, S2, S3 | S4, S5, S6 | S7, S8, S9 | +| e1, e2, e3 | e4, e5, e6 | e7, e8, e9 | +|----------------|----------------|----------------| +| → (accumulate) | → (accumulate) | → (accumulate) | +|----------------|----------------|----------------| +| S10, S11, S12 | S13, S14, S15 | S16, S17, S18 | +| e10, e11, e12 | e13, e14, e15 | e16, e17, e18 | +|----------------|----------------|----------------| +| → (apply) | → (apply) | → (apply) | + +Accumulated gradient for the weight w1 after the second iteration (considering all GPUs): +Total gradient for w1 = e1 + e2 + e3 + e4 + e5 + e6 + e7 + e8 + e9 + e10 + e11 + e12 + e13 + e14 + e15 + e16 + e17 + e18 + +Weight update for w1: +w1_new = w1_old - learning rate x (Total gradient for w1 / 18) +``` + +**Example 2:** +Micro batch size: 2 +Gradient accumulation steps: 1 +Number of GPUs: 3 +Total batch size = 2 * 1 * 3 = 6 + +``` +| GPU 1 | GPU 2 | GPU 3 | +|-----------|-----------|-----------| +| S1, S2 | S3, S4 | S5, S6 | +| e1, e2 | e3, e4 | e5, e6 | +|-----------|-----------|-----------| +| → (apply) | → (apply) | → (apply) | + +Accumulated gradient for the weight w1 (considering all GPUs): +Total gradient for w1 = e1 + e2 + e3 + e4 + e5 + e6 + +Weight update for w1: +w1_new = w1_old - learning rate × (Total gradient for w1 / 6) +``` + +
+ ### Train Run From 295b2662e1a9176e18492463627890c4de0a4cb2 Mon Sep 17 00:00:00 2001 From: lukemarsden Date: Tue, 10 Oct 2023 07:14:23 +0100 Subject: [PATCH 62/77] Get qlora mistral-7b fine tuning working on a single 4090 (#708) --- examples/mistral/qlora.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/mistral/qlora.yml b/examples/mistral/qlora.yml index 9c64a8c2dd..3d22c70346 100644 --- a/examples/mistral/qlora.yml +++ b/examples/mistral/qlora.yml @@ -43,7 +43,7 @@ wandb_run_id: wandb_log_model: gradient_accumulation_steps: 4 -micro_batch_size: 4 +micro_batch_size: 2 num_epochs: 1 optimizer: adamw_bnb_8bit lr_scheduler: cosine From 11c48c5e039b1b08941b02d356d814bc62e8ac1f Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Tue, 10 Oct 2023 21:08:17 +0900 Subject: [PATCH 63/77] fix(doc): Add note on inference w sample packing (#712) --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index e277120e62..1610f7bda8 100644 --- a/README.md +++ b/README.md @@ -876,6 +876,10 @@ Pass the appropriate flag to the train command: --base_model="./completed-model" --prompter=None --load_in_8bit=True ``` +Please use `--sample_packing False` if you have it on and receive the error similar to below: + +> RuntimeError: stack expects each tensor to be equal size, but got [1, 32, 1, 128] at entry 0 and [1, 32, 8, 128] at entry 1 + ### Merge LORA to base Add below flag to train command above From ace70b33c6f04f5fe9f8fbb47ae386218067ce22 Mon Sep 17 00:00:00 2001 From: atgctg <105969161+atgctg@users.noreply.github.com> Date: Tue, 10 Oct 2023 14:32:20 +0200 Subject: [PATCH 64/77] Fix: lowercase `True` values in config (#713) * Fix: lowercase `True` values in config * Fix: lowercase `True` values in config --- examples/mistral/qlora.yml | 4 ++-- examples/pythia/lora.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/mistral/qlora.yml b/examples/mistral/qlora.yml index 3d22c70346..09639d006a 100644 --- a/examples/mistral/qlora.yml +++ b/examples/mistral/qlora.yml @@ -19,8 +19,8 @@ adapter: qlora lora_model_dir: sequence_len: 8192 -sample_packing: True -pad_to_sequence_len: True +sample_packing: true +pad_to_sequence_len: true lora_r: 32 lora_alpha: 16 diff --git a/examples/pythia/lora.yml b/examples/pythia/lora.yml index f9a83c059a..6ff036621f 100644 --- a/examples/pythia/lora.yml +++ b/examples/pythia/lora.yml @@ -28,8 +28,8 @@ num_epochs: 3 learning_rate: 0.00001 train_on_inputs: false group_by_length: false -bf16: True -tf32: True +bf16: true +tf32: true early_stopping_patience: resume_from_checkpoint: local_rank: From 5855dded3db7a205bc0d8476caf2fa46a0404c13 Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Tue, 10 Oct 2023 21:51:56 +0900 Subject: [PATCH 65/77] fix(doc): update default doc according to arg (#714) --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 1610f7bda8..f22ccb5939 100644 --- a/README.md +++ b/README.md @@ -469,12 +469,13 @@ datasets: type: # The below are defaults. only set what's needed. system_prompt: "" + system_format: "{system}" field_system: system field_instruction: instruction - field_output: input + field_input: input + field_output: output # Customizable to be single line or multi-line - system_format: "{system}" # 'format' can include {input} format: |- User: {instruction} {input} @@ -482,7 +483,7 @@ datasets: # 'no_input_format' cannot include {input} no_input_format: "{instruction} " - # For completions datsets, uses the provided field if not `text` + # For `completion` datsets only, uses the provided field instead of `text` column field: # Axolotl attempts to save the dataset as an arrow after packing the data together so From 490923fb78e0646c2c0ba427628a0daa72b86996 Mon Sep 17 00:00:00 2001 From: Jan Philipp Harries <2862336+jphme@users.noreply.github.com> Date: Wed, 11 Oct 2023 13:28:12 +0200 Subject: [PATCH 66/77] Save Axolotl config as WandB artifact (#716) --- src/axolotl/cli/__init__.py | 1 + src/axolotl/utils/callbacks.py | 24 ++++++++++++++++++++++++ src/axolotl/utils/trainer.py | 4 ++++ 3 files changed, 29 insertions(+) diff --git a/src/axolotl/cli/__init__.py b/src/axolotl/cli/__init__.py index c21d931705..07a6209e46 100644 --- a/src/axolotl/cli/__init__.py +++ b/src/axolotl/cli/__init__.py @@ -194,6 +194,7 @@ def load_cfg(config: Path = Path("examples/"), **kwargs): # load the config from the yaml file with open(config, encoding="utf-8") as file: cfg: DictDefault = DictDefault(yaml.safe_load(file)) + cfg.axolotl_config_path = config # if there are any options passed in the cli, if it is something that seems valid from the yaml, # then overwrite the value cfg_keys = cfg.keys() diff --git a/src/axolotl/utils/callbacks.py b/src/axolotl/utils/callbacks.py index 9a7ebe9517..458e537c63 100644 --- a/src/axolotl/utils/callbacks.py +++ b/src/axolotl/utils/callbacks.py @@ -514,3 +514,27 @@ def log_table_from_dataloader(name: str, table_dataloader): return control return LogPredictionCallback + + +class SaveAxolotlConfigtoWandBCallback(TrainerCallback): + """Callback to save axolotl config to wandb""" + + def __init__(self, axolotl_config_path): + self.axolotl_config_path = axolotl_config_path + + def on_train_begin( + self, + args: AxolotlTrainingArguments, # pylint: disable=unused-argument + state: TrainerState, # pylint: disable=unused-argument + control: TrainerControl, + **kwargs, # pylint: disable=unused-argument + ): + if is_main_process(): + try: + artifact = wandb.Artifact(name="axolotl-config", type="config") + artifact.add_file(local_path=self.axolotl_config_path) + wandb.run.log_artifact(artifact) + LOG.info("Axolotl config has been saved to WandB as an artifact.") + except (FileNotFoundError, ConnectionError) as err: + LOG.warning(f"Error while saving Axolotl config to WandB: {err}") + return control diff --git a/src/axolotl/utils/trainer.py b/src/axolotl/utils/trainer.py index a10a2b0e77..ee8c634966 100644 --- a/src/axolotl/utils/trainer.py +++ b/src/axolotl/utils/trainer.py @@ -30,6 +30,7 @@ from axolotl.utils.callbacks import ( EvalFirstStepCallback, GPUStatsCallback, + SaveAxolotlConfigtoWandBCallback, SaveBetterTransformerModelCallback, bench_eval_callback_factory, log_prediction_callback_factory, @@ -775,6 +776,9 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer, total_num_ LogPredictionCallback = log_prediction_callback_factory(trainer, tokenizer) trainer.add_callback(LogPredictionCallback(cfg)) + if cfg.use_wandb: + trainer.add_callback(SaveAxolotlConfigtoWandBCallback(cfg.axolotl_config_path)) + if cfg.do_bench_eval: trainer.add_callback(bench_eval_callback_factory(trainer, tokenizer)) From 1c412c7e9dd228209ad63afdcd5dc430a1ef82ab Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 13 Oct 2023 07:46:07 -0400 Subject: [PATCH 67/77] improve handling of the prepared ds path and other cfg defaults (#701) --- src/axolotl/cli/inference.py | 1 + src/axolotl/cli/train.py | 13 +++++++++++++ src/axolotl/common/const.py | 5 +++++ src/axolotl/utils/data.py | 4 ++-- 4 files changed, 21 insertions(+), 2 deletions(-) create mode 100644 src/axolotl/common/const.py diff --git a/src/axolotl/cli/inference.py b/src/axolotl/cli/inference.py index f3daac83dd..91405d8c66 100644 --- a/src/axolotl/cli/inference.py +++ b/src/axolotl/cli/inference.py @@ -14,6 +14,7 @@ def do_cli(config: Path = Path("examples/"), **kwargs): # pylint: disable=duplicate-code print_axolotl_text_art() parsed_cfg = load_cfg(config, **kwargs) + parsed_cfg.sample_packing = False parser = transformers.HfArgumentParser((TrainerCliArgs)) parsed_cli_args, _ = parser.parse_args_into_dataclasses( return_remaining_strings=True diff --git a/src/axolotl/cli/train.py b/src/axolotl/cli/train.py index c64755872b..b49cbc6b60 100644 --- a/src/axolotl/cli/train.py +++ b/src/axolotl/cli/train.py @@ -1,10 +1,12 @@ """ CLI to run training on a model """ +import logging from pathlib import Path import fire import transformers +from colorama import Fore from axolotl.cli import ( check_accelerate_default_config, @@ -14,8 +16,11 @@ print_axolotl_text_art, ) from axolotl.common.cli import TrainerCliArgs +from axolotl.common.const import DEFAULT_DATASET_PREPARED_PATH from axolotl.train import train +LOG = logging.getLogger("axolotl.cli.train") + def do_cli(config: Path = Path("examples/"), **kwargs): # pylint: disable=duplicate-code @@ -27,6 +32,14 @@ def do_cli(config: Path = Path("examples/"), **kwargs): parsed_cli_args, _ = parser.parse_args_into_dataclasses( return_remaining_strings=True ) + if parsed_cli_args.prepare_ds_only and not parsed_cfg.dataset_prepared_path: + msg = ( + Fore.RED + + "--prepare_ds_only called without dataset_prepared_path set." + + Fore.RESET + ) + LOG.warning(msg) + parsed_cfg.dataset_prepared_path = DEFAULT_DATASET_PREPARED_PATH dataset_meta = load_datasets(cfg=parsed_cfg, cli_args=parsed_cli_args) if parsed_cli_args.prepare_ds_only: diff --git a/src/axolotl/common/const.py b/src/axolotl/common/const.py new file mode 100644 index 0000000000..fd34ad4694 --- /dev/null +++ b/src/axolotl/common/const.py @@ -0,0 +1,5 @@ +""" +Various shared constants +""" + +DEFAULT_DATASET_PREPARED_PATH = "last_run_prepared" diff --git a/src/axolotl/utils/data.py b/src/axolotl/utils/data.py index bac7d96c9e..c944dd27ba 100644 --- a/src/axolotl/utils/data.py +++ b/src/axolotl/utils/data.py @@ -16,6 +16,7 @@ from huggingface_hub import hf_hub_download from transformers import PreTrainedTokenizerBase +from axolotl.common.const import DEFAULT_DATASET_PREPARED_PATH from axolotl.datasets import ConstantLengthDataset, TokenizedPromptDataset from axolotl.prompt_strategies import load from axolotl.prompt_tokenizers import ( @@ -44,7 +45,6 @@ ) LOG = logging.getLogger("axolotl") -DEFAULT_DATASET_PREPARED_PATH = "last_run_prepared" def md5(to_hash: str, encoding: str = "utf-8") -> str: @@ -357,7 +357,7 @@ def for_d_in_datasets(dataset_configs): if len(datasets) > 1: LOG.info("shuffle merged datasets") dataset = dataset.shuffle(seed=seed) - if cfg.local_rank == 0 and cfg.dataset_prepared_path: + if cfg.local_rank == 0: LOG.info(f"Saving merged prepared dataset to disk... {prepared_ds_path}") dataset.save_to_disk(prepared_ds_path) if cfg.push_dataset_to_hub: From 2aa1f714641da62a19a1100299c07b6f41985620 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 13 Oct 2023 08:57:28 -0400 Subject: [PATCH 68/77] fix pytorch 2.1.0 build, add multipack docs (#722) --- .github/workflows/main.yml | 1 + docker/Dockerfile | 4 +++ docs/multipack.md | 51 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+) create mode 100644 docs/multipack.md diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5fdd2d705d..f84f7f7a9a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -51,6 +51,7 @@ jobs: build-args: | BASE_TAG=${{ github.ref_name }}-base-py${{ matrix.python_version }}-cu${{ matrix.cuda }}-${{ matrix.pytorch }} CUDA=${{ matrix.cuda }} + PYTORCH_VERSION=${{ matrix.pytorch }} file: ./docker/Dockerfile push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.metadata.outputs.tags }}-py${{ matrix.python_version }}-cu${{ matrix.cuda }}-${{ matrix.pytorch }}${{ matrix.axolotl_extras != '' && '-' || '' }}${{ matrix.axolotl_extras }} diff --git a/docker/Dockerfile b/docker/Dockerfile index 7b121aaa7b..ff47548bc4 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -5,6 +5,9 @@ ARG TORCH_CUDA_ARCH_LIST="7.0 7.5 8.0 8.6+PTX" ARG AXOLOTL_EXTRAS="" ARG CUDA="118" ENV BNB_CUDA_VERSION=$CUDA +ARG PYTORCH_VERSION="2.0.1" + +ENV PYTORCH_VERSION=$PYTORCH_VERSION RUN apt-get update && \ apt-get install -y vim curl @@ -16,6 +19,7 @@ RUN git clone --depth=1 https://github.com/OpenAccess-AI-Collective/axolotl.git WORKDIR /workspace/axolotl # If AXOLOTL_EXTRAS is set, append it in brackets +RUN sed -i "s/torch==.*/torch==$PYTORCH_VERSION/" requirements.txt RUN if [ "$AXOLOTL_EXTRAS" != "" ] ; then \ pip install -e .[flash-attn,$AXOLOTL_EXTRAS]; \ else \ diff --git a/docs/multipack.md b/docs/multipack.md new file mode 100644 index 0000000000..2a55148b2a --- /dev/null +++ b/docs/multipack.md @@ -0,0 +1,51 @@ +# Multipack + +4k context, bsz =4, +each character represents 256 tokens +X represents a padding token + +``` + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 +[[ A A A A A A A A A A A ] + B B B B B B ] + C C C C C C C ] + D D D D ]] + +[[ E E E E E E E E ] + [ F F F F ] + [ G G G ] + [ H H H H ]] + +[[ I I I ] + [ J J J ] + [ K K K K K] + [ L L L ]] +``` + +after padding to longest input in each step +``` + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 +[[ A A A A A A A A A A A ] + B B B B B B X X X X X X ] + C C C C C C C X X X X ] + D D D D X X X X X X X ]] + +[[ E E E E E E E E ] + [ F F F F X X X X ] + [ G G G X X X X X ] + [ H H H H X X X X ]] + +[[ I I I X X ] + [ J J J X X ] + [ K K K K K ] + [ L L L X X ]] +``` + +w packing ( note it's the same effective number of tokens per step, but a true bsz of 1) +``` + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 +[[ A A A A A A A A A A A B B B B B + B C C C C C C C D D D D E E E E + E E E E F F F F F G G G H H H H + I I I J J J J K K K K K L L L X ]] +``` From 3bd95283905db3e751a242aa387b3f5659f883ab Mon Sep 17 00:00:00 2001 From: Maxime <672982+maximegmd@users.noreply.github.com> Date: Fri, 13 Oct 2023 16:00:42 +0200 Subject: [PATCH 69/77] add noisy embedding (#721) * add noisy embedding * fix format * Update README.md * Update README.md * linter issues * caseus fixes --------- Co-authored-by: Maxime --- README.md | 5 +++ .../monkeypatch/llama_embeddings_hijack.py | 40 +++++++++++++++++++ .../monkeypatch/mistral_embeddings_hijack.py | 40 +++++++++++++++++++ src/axolotl/utils/models.py | 20 ++++++++++ 4 files changed, 105 insertions(+) create mode 100644 src/axolotl/monkeypatch/llama_embeddings_hijack.py create mode 100644 src/axolotl/monkeypatch/mistral_embeddings_hijack.py diff --git a/README.md b/README.md index f22ccb5939..57447b36de 100644 --- a/README.md +++ b/README.md @@ -672,6 +672,11 @@ adam_epsilon: # Gradient clipping max norm max_grad_norm: +# Augmentation techniques +# NEFT https://arxiv.org/abs/2310.05914, set this to a number (paper default is 5) to add noise to embeddings +# currently only supported on Llama and Mistral +noisy_embedding_alpha: + # Whether to bettertransformers flash_optimum: # Whether to use xformers attention patch https://github.com/facebookresearch/xformers: diff --git a/src/axolotl/monkeypatch/llama_embeddings_hijack.py b/src/axolotl/monkeypatch/llama_embeddings_hijack.py new file mode 100644 index 0000000000..654ca3ba82 --- /dev/null +++ b/src/axolotl/monkeypatch/llama_embeddings_hijack.py @@ -0,0 +1,40 @@ +""" +patch to add noisy embeddings per https://arxiv.org/abs/2310.05914 +""" + +import torch +import transformers.models.llama.modeling_llama +from transformers.utils import logging + +logger = logging.get_logger(__name__) + + +def replace_llama_embeddings_with_uniform_distribution(noise_alpha=5): + # pylint: disable=duplicate-code + def noised_embed(orig_embed, noise_alpha, model): + def new_func(input_ids): + # during training, we add noise to the embedding + # during generation, we don't add noise to the embedding + if model.training: + embed_init = orig_embed(input_ids) + dims = torch.tensor(embed_init.size(1) * embed_init.size(2)) + mag_norm = noise_alpha / torch.sqrt(dims) + return embed_init + torch.zeros_like(embed_init).uniform_( + -mag_norm, mag_norm + ) + return orig_embed(input_ids) + + return new_func + + def post_init(orig_post_init): + def new_func(self): + orig_post_init(self) + self.embed_tokens.forward = noised_embed( + self.embed_tokens.forward, noise_alpha, self + ) + + return new_func + + transformers.models.llama.modeling_llama.LlamaModel.post_init = post_init( + transformers.models.llama.modeling_llama.LlamaModel.post_init + ) diff --git a/src/axolotl/monkeypatch/mistral_embeddings_hijack.py b/src/axolotl/monkeypatch/mistral_embeddings_hijack.py new file mode 100644 index 0000000000..ed5f259650 --- /dev/null +++ b/src/axolotl/monkeypatch/mistral_embeddings_hijack.py @@ -0,0 +1,40 @@ +""" +patch to add noisy embeddings per https://arxiv.org/abs/2310.05914 +""" + +import torch +import transformers.models.mistral.modeling_mistral +from transformers.utils import logging + +logger = logging.get_logger(__name__) + + +def replace_mistral_embeddings_with_uniform_distribution(noise_alpha=5): + # pylint: disable=duplicate-code + def noised_embed(orig_embed, noise_alpha, model): + def new_func(input_ids): + # during training, we add noise to the embedding + # during generation, we don't add noise to the embedding + if model.training: + embed_init = orig_embed(input_ids) + dims = torch.tensor(embed_init.size(1) * embed_init.size(2)) + mag_norm = noise_alpha / torch.sqrt(dims) + return embed_init + torch.zeros_like(embed_init).uniform_( + -mag_norm, mag_norm + ) + return orig_embed(input_ids) + + return new_func + + def post_init(orig_post_init): + def new_func(self): + orig_post_init(self) + self.embed_tokens.forward = noised_embed( + self.embed_tokens.forward, noise_alpha, self + ) + + return new_func + + transformers.models.mistral.modeling_mistral.MistralModel.post_init = post_init( + transformers.models.mistral.modeling_mistral.MistralModel.post_init + ) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 2c60f00c2b..c133e9eb61 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -180,6 +180,26 @@ def load_model( LOG.info("patching with flash attention") replace_mistral_attn_with_flash_attn(packed=cfg.sample_packing) + if cfg.is_llama_derived_model and cfg.noisy_embedding_alpha: + from axolotl.monkeypatch.llama_embeddings_hijack import ( + replace_llama_embeddings_with_uniform_distribution, + ) + + LOG.info("patching with noisy embeddings") + replace_llama_embeddings_with_uniform_distribution( + noise_alpha=cfg.noisy_embedding_alpha + ) + + if cfg.is_mistral_derived_model and cfg.noisy_embedding_alpha: + from axolotl.monkeypatch.mistral_embeddings_hijack import ( + replace_mistral_embeddings_with_uniform_distribution, + ) + + LOG.info("patching with noisy embeddings") + replace_mistral_embeddings_with_uniform_distribution( + noise_alpha=cfg.noisy_embedding_alpha + ) + if cfg.is_llama_derived_model and cfg.xpos_rope: from axolotl.monkeypatch.xpos_rope_llama_monkey_patch import ( replace_llama_rope_with_xpos_rope, From bfbdba8614f55d430affc880fc4e3d9b5c33b13a Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 13 Oct 2023 10:27:56 -0400 Subject: [PATCH 70/77] pin xformers >= 0.0.22 (#724) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index cced386735..1092ebddfb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,7 +16,7 @@ flash-attn>=2.3.0 sentencepiece wandb einops -xformers +xformers>=0.0.22 optimum hf_transfer colorama From f30afe4544982140f9ca60c6428a47419f0f9346 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 13 Oct 2023 11:04:39 -0400 Subject: [PATCH 71/77] misc sharegpt fixes (#723) * support for sharegpt with assistant talking first, better masking of assistant token, allow remap of roles from dataset * invalid role is actually not possible * update tokenized fixture for corrected labels --- src/axolotl/prompt_tokenizers.py | 68 +++++++++++----------- src/axolotl/prompters.py | 6 +- tests/fixtures/conversation.tokenized.json | 2 +- tests/test_prompt_tokenizers.py | 67 +++++++++++++++++++++ 4 files changed, 107 insertions(+), 36 deletions(-) diff --git a/src/axolotl/prompt_tokenizers.py b/src/axolotl/prompt_tokenizers.py index 1b39336642..4e30b81a71 100644 --- a/src/axolotl/prompt_tokenizers.py +++ b/src/axolotl/prompt_tokenizers.py @@ -2,7 +2,6 @@ import abc import copy -import functools import logging from typing import Dict, List, Tuple, Union @@ -57,26 +56,6 @@ def tokenize_prompt(self, prompt): def supports_batched(self): return False - @functools.lru_cache(maxsize=128) - def _get_user_token(self): - try: - id_or_ids = self.tokenizer.convert_tokens_to_ids("<|USER|>") - if isinstance(id_or_ids, (int,)): - return id_or_ids - except KeyError: - pass - return False - - @functools.lru_cache(maxsize=128) - def _get_assistant_token(self): - try: - id_or_ids = self.tokenizer.convert_tokens_to_ids("<|ASSISTANT|>") - if isinstance(id_or_ids, (int,)): - return id_or_ids - except KeyError: - pass - return False - def _tokenize( self, prompt: str, add_eos_token: bool = True, strip_bos_token: bool = False ) -> BatchEncoding: @@ -356,18 +335,34 @@ def get_conversation_thread(self, prompt): def tokenize_prompt(self, prompt): result, current_len = tokenize_prompt_default() - user_token = self._get_user_token() - assistant_token = self._get_assistant_token() conversation: Conversation = ( - self.prompter._conversation # pylint: disable=protected-access + self.prompter._conversation.copy() # pylint: disable=protected-access ) + + # support for custom roles from the dataset, only useful for vicuna style prompts/roles + role_remap = [] + if ( + conversation.name == "vicuna_v1.1" + and "roles" in prompt + and len(prompt["roles"]) >= 2 + ): + role_remap = [ + {"from": conversation.roles[0], "to": prompt["roles"][0]}, + {"from": conversation.roles[1], "to": prompt["roles"][1]}, + ] + try: for _, part in enumerate( self.prompter.build_prompt(self.get_conversation_thread(prompt)) ): if isinstance(part, tuple): if conversation.roles[0] in part[0]: - turn = part[0] + part[1] if not user_token else part[1] + role = ( + part[0].replace(role_remap[0]["from"], role_remap[0]["to"]) + if role_remap + else part[0] + ) + turn = role + part[1] # this is still the user query, we should if not part[1].strip(): LOG.warning(f"user turn has empty text: {prompt}") @@ -376,13 +371,16 @@ def tokenize_prompt(self, prompt): add_eos_token=False, strip_bos_token=True, ) - if user_token: - res["input_ids"] = [user_token, *res["input_ids"]] # everything from this is masked out from the labels labels = [IGNORE_TOKEN_ID] * len(res["input_ids"]) elif conversation.roles[1] in part[0]: # TODO label assistant token/tokens w/ IGNORE_TOKEN_ID - turn = part[0] + part[1] if not assistant_token else part[1] + role = ( + part[0].replace(role_remap[1]["from"], role_remap[1]["to"]) + if role_remap + else part[0] + ) + turn = role + part[1] # this should be the assistant response, should end with an eos token if not part[1].strip(): LOG.warning(f"assistant turn has empty text: {prompt}") @@ -391,13 +389,17 @@ def tokenize_prompt(self, prompt): add_eos_token=True, strip_bos_token=True, ) - if assistant_token: - res["input_ids"] = [ - assistant_token, - *res["input_ids"], - ] + role_res = self._tokenize( + role.rstrip(), + add_eos_token=False, + strip_bos_token=True, + ) # not masked out from labels labels = copy.deepcopy(res["input_ids"]) + len_role = len(role_res["input_ids"]) + labels[:len_role] = [IGNORE_TOKEN_ID] * min( + len_role, len(labels) + ) elif part[0] == "": turn = part[1] # this is only ever the first part, should include the bos token and the user query diff --git a/src/axolotl/prompters.py b/src/axolotl/prompters.py index 370e15091c..7cd89886aa 100644 --- a/src/axolotl/prompters.py +++ b/src/axolotl/prompters.py @@ -274,9 +274,11 @@ def build_prompt(self, source) -> Generator[str, None, None]: raise err conv.messages = [] - for j, sentence in enumerate(source): + for _, sentence in enumerate(source): role = roles[sentence["from"]] - if role != conv.roles[j % 2]: + if len(conv.messages) > 0 and ( + (role == conv.messages[-1][0]) or (role not in conv.roles) + ): LOG.warning(f"{SHAREGPT_ASSERTION_FAILED_ROLE}: {sentence}") conv.append_message(role, sentence["value"]) diff --git a/tests/fixtures/conversation.tokenized.json b/tests/fixtures/conversation.tokenized.json index ea4fca0990..4b7334cc15 100644 --- a/tests/fixtures/conversation.tokenized.json +++ b/tests/fixtures/conversation.tokenized.json @@ -1 +1 @@ -{"input_ids": [1, 319, 13563, 1546, 263, 12758, 1404, 322, 385, 23116, 21082, 20255, 29889, 450, 20255, 4076, 8444, 29892, 13173, 29892, 322, 1248, 568, 6089, 304, 278, 1404, 29915, 29879, 5155, 29889, 29871, 3148, 1001, 29901, 920, 1033, 474, 2334, 263, 29086, 705, 11356, 5687, 393, 3667, 4637, 21531, 20159, 304, 4505, 1045, 3163, 29973, 29871, 319, 1799, 9047, 13566, 29901, 739, 29915, 29879, 1950, 304, 671, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29892, 541, 372, 723, 5517, 367, 3755, 4280, 29889, 838, 5869, 293, 20159, 338, 263, 5443, 310, 23964, 393, 11898, 278, 4426, 310, 8162, 393, 526, 21634, 1090, 9126, 316, 689, 800, 29892, 1316, 408, 16116, 292, 322, 289, 2548, 29889, 512, 278, 3030, 310, 341, 1475, 705, 11356, 29892, 445, 1033, 367, 1304, 304, 27599, 278, 3829, 310, 278, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 2398, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 739, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, 3148, 1001, 29901, 1033, 366, 2367, 592, 278, 330, 391, 310, 920, 372, 1033, 2466, 29973, 29871, 319, 1799, 9047, 13566, 29901, 18585, 29889, 450, 6996, 2969, 723, 367, 304, 671, 21531, 20159, 304, 27599, 278, 3829, 310, 278, 341, 1475, 705, 11356, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 910, 1033, 25135, 15783, 278, 3748, 7613, 408, 263, 25002, 2913, 322, 773, 25002, 15939, 1934, 1316, 408, 3632, 327, 2270, 6471, 322, 350, 9890, 3694, 304, 10115, 2472, 1048, 278, 4423, 310, 29086, 373, 278, 7613, 29889, 2398, 29892, 408, 306, 5276, 1434, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 577, 372, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, 3148, 1001, 29901, 2367, 592, 263, 2702, 1342, 29871, 319, 1799, 9047, 13566, 29901, 1094, 263, 2702, 1342, 29892, 7755, 591, 505, 263, 341, 1475, 705, 11356, 3748, 7613, 411, 278, 1494, 5285, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 797, 445, 1206, 29892, 278, 3748, 7613, 508, 367, 9875, 408, 263, 25002, 2913, 29892, 988, 1269, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29889, 450, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 5293, 21531, 20159, 29892, 591, 508, 27599, 278, 3829, 310, 445, 2913, 322, 8161, 278, 14354, 310, 278, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 13, 13, 2831, 2777, 29892, 697, 982, 304, 437, 445, 338, 304, 10272, 278, 3632, 327, 2270, 6471, 310, 278, 2913, 29889, 15089, 327, 2270, 6471, 526, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 29892, 322, 896, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 512, 278, 1206, 310, 278, 7613, 2038, 29892, 591, 508, 10272, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 29892, 607, 16612, 278, 1353, 322, 376, 12181, 29908, 310, 278, 26532, 297, 278, 2913, 29889, 512, 445, 1206, 29892, 278, 937, 3632, 327, 2270, 2318, 723, 2649, 502, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29889, 13, 13, 29933, 1463, 373, 445, 2472, 29892, 591, 508, 10115, 393, 278, 29086, 297, 278, 3748, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29892, 408, 4318, 2400, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 341, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 341, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1342, 310, 920, 21531, 20159, 1033, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 526, 1784, 916, 1950, 13501, 29892, 322, 278, 2702, 4902, 310, 920, 304, 2334, 963, 723, 8839, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 1094, 306, 5276, 1434, 29892, 16049, 263, 1650, 763, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, 3148, 1001, 29901, 10241, 474, 471, 4856, 411, 263, 6483, 8004, 310, 1716, 29892, 1033, 366, 5649, 278, 1021, 6964, 304, 592, 411, 393, 11833, 29973, 29871, 319, 1799, 9047, 13566, 29901, 18585, 29889, 17090, 366, 505, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 1244, 338, 920, 366, 1795, 671, 21531, 20159, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 4803, 25002, 15939, 1934, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1670, 526, 1784, 1950, 25002, 15939, 1934, 393, 1033, 367, 1304, 363, 445, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29941, 29889, 4803, 278, 2472, 10115, 1127, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 21531, 20159, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, 3148, 1001, 29901, 920, 723, 474, 437, 372, 411, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 29973, 29871, 319, 1799, 9047, 13566, 29901, 1763, 671, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29892, 366, 723, 1101, 278, 1021, 6996, 6576, 408, 5439, 2038, 29892, 541, 411, 777, 12651, 297, 278, 2702, 4902, 29889, 13, 13, 2831, 2777, 29892, 1244, 338, 920, 366, 1795, 671, 28152, 3632, 3002, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 11796, 29872, 278, 28152, 3632, 3002, 310, 278, 2913, 29889, 9034, 9696, 3632, 3002, 338, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 975, 931, 29892, 322, 372, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 13, 29941, 29889, 4803, 278, 2472, 515, 278, 28152, 3632, 3002, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 28152, 3632, 3002, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 2702, 4902, 310, 278, 28152, 3632, 3002, 16287, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 28152, 3632, 3002, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, 3148, 1001, 29901, 920, 1033, 474, 2334, 445, 297, 3017, 29973, 29871, 319, 1799, 9047, 13566, 29901, 1763, 2334, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29892, 366, 723, 817, 304, 437, 278, 1494, 29901, 13, 13, 29896, 29889, 16052, 263, 5132, 3577, 363, 21531, 20159, 29889, 1670, 526, 3196, 3987, 3625, 29892, 1316, 408, 402, 566, 2918, 29892, 360, 291, 952, 375, 29892, 470, 4560, 7354, 29899, 29873, 1388, 29889, 4525, 9741, 3867, 14009, 322, 848, 12286, 363, 20602, 25002, 15939, 1934, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29906, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 910, 1033, 367, 2309, 773, 263, 848, 3829, 4944, 491, 278, 21531, 20159, 3577, 29892, 1316, 408, 263, 3053, 506, 616, 4280, 470, 263, 478, 2035, 29367, 29899, 29934, 4512, 4280, 29889, 7806, 6862, 373, 278, 7613, 723, 3928, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 723, 367, 9875, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29889, 13, 29941, 29889, 11796, 29872, 278, 25002, 15939, 1934, 310, 278, 2913, 29889, 910, 1033, 367, 2309, 773, 14009, 4944, 491, 278, 21531, 20159, 3577, 29889, 1152, 1342, 29892, 366, 1033, 10272, 278, 28152, 3632, 3002, 310, 278, 2913, 773, 278, 28152, 3632, 3002, 5687, 29889, 13, 29946, 29889, 4803, 278, 2472, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 910, 1033, 367, 2309, 773, 263, 10296, 310, 19475, 24481, 322, 8720, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 366, 1033, 671, 263, 8424, 310, 775, 304, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29945, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 1033, 671, 263, 8424, 310, 775, 304, 6336, 2828, 373, 278, 9109, 25256, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 16049, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 4902, 310, 278, 5314, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 304, 679, 4687, 411, 1316, 263, 2060, 21106, 29879, 29958, 2], "attention_mask": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], "labels": [-100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 739, 29915, 29879, 1950, 304, 671, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29892, 541, 372, 723, 5517, 367, 3755, 4280, 29889, 838, 5869, 293, 20159, 338, 263, 5443, 310, 23964, 393, 11898, 278, 4426, 310, 8162, 393, 526, 21634, 1090, 9126, 316, 689, 800, 29892, 1316, 408, 16116, 292, 322, 289, 2548, 29889, 512, 278, 3030, 310, 341, 1475, 705, 11356, 29892, 445, 1033, 367, 1304, 304, 27599, 278, 3829, 310, 278, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 2398, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 739, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 18585, 29889, 450, 6996, 2969, 723, 367, 304, 671, 21531, 20159, 304, 27599, 278, 3829, 310, 278, 341, 1475, 705, 11356, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 910, 1033, 25135, 15783, 278, 3748, 7613, 408, 263, 25002, 2913, 322, 773, 25002, 15939, 1934, 1316, 408, 3632, 327, 2270, 6471, 322, 350, 9890, 3694, 304, 10115, 2472, 1048, 278, 4423, 310, 29086, 373, 278, 7613, 29889, 2398, 29892, 408, 306, 5276, 1434, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 577, 372, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 1094, 263, 2702, 1342, 29892, 7755, 591, 505, 263, 341, 1475, 705, 11356, 3748, 7613, 411, 278, 1494, 5285, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 797, 445, 1206, 29892, 278, 3748, 7613, 508, 367, 9875, 408, 263, 25002, 2913, 29892, 988, 1269, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29889, 450, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 5293, 21531, 20159, 29892, 591, 508, 27599, 278, 3829, 310, 445, 2913, 322, 8161, 278, 14354, 310, 278, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 13, 13, 2831, 2777, 29892, 697, 982, 304, 437, 445, 338, 304, 10272, 278, 3632, 327, 2270, 6471, 310, 278, 2913, 29889, 15089, 327, 2270, 6471, 526, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 29892, 322, 896, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 512, 278, 1206, 310, 278, 7613, 2038, 29892, 591, 508, 10272, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 29892, 607, 16612, 278, 1353, 322, 376, 12181, 29908, 310, 278, 26532, 297, 278, 2913, 29889, 512, 445, 1206, 29892, 278, 937, 3632, 327, 2270, 2318, 723, 2649, 502, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29889, 13, 13, 29933, 1463, 373, 445, 2472, 29892, 591, 508, 10115, 393, 278, 29086, 297, 278, 3748, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29892, 408, 4318, 2400, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 341, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 341, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1342, 310, 920, 21531, 20159, 1033, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 526, 1784, 916, 1950, 13501, 29892, 322, 278, 2702, 4902, 310, 920, 304, 2334, 963, 723, 8839, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 1094, 306, 5276, 1434, 29892, 16049, 263, 1650, 763, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 18585, 29889, 17090, 366, 505, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 1244, 338, 920, 366, 1795, 671, 21531, 20159, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 4803, 25002, 15939, 1934, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1670, 526, 1784, 1950, 25002, 15939, 1934, 393, 1033, 367, 1304, 363, 445, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29941, 29889, 4803, 278, 2472, 10115, 1127, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 21531, 20159, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 1763, 671, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29892, 366, 723, 1101, 278, 1021, 6996, 6576, 408, 5439, 2038, 29892, 541, 411, 777, 12651, 297, 278, 2702, 4902, 29889, 13, 13, 2831, 2777, 29892, 1244, 338, 920, 366, 1795, 671, 28152, 3632, 3002, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 11796, 29872, 278, 28152, 3632, 3002, 310, 278, 2913, 29889, 9034, 9696, 3632, 3002, 338, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 975, 931, 29892, 322, 372, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 13, 29941, 29889, 4803, 278, 2472, 515, 278, 28152, 3632, 3002, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 28152, 3632, 3002, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 2702, 4902, 310, 278, 28152, 3632, 3002, 16287, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 28152, 3632, 3002, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 319, 1799, 9047, 13566, 29901, 1763, 2334, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29892, 366, 723, 817, 304, 437, 278, 1494, 29901, 13, 13, 29896, 29889, 16052, 263, 5132, 3577, 363, 21531, 20159, 29889, 1670, 526, 3196, 3987, 3625, 29892, 1316, 408, 402, 566, 2918, 29892, 360, 291, 952, 375, 29892, 470, 4560, 7354, 29899, 29873, 1388, 29889, 4525, 9741, 3867, 14009, 322, 848, 12286, 363, 20602, 25002, 15939, 1934, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29906, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 910, 1033, 367, 2309, 773, 263, 848, 3829, 4944, 491, 278, 21531, 20159, 3577, 29892, 1316, 408, 263, 3053, 506, 616, 4280, 470, 263, 478, 2035, 29367, 29899, 29934, 4512, 4280, 29889, 7806, 6862, 373, 278, 7613, 723, 3928, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 723, 367, 9875, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29889, 13, 29941, 29889, 11796, 29872, 278, 25002, 15939, 1934, 310, 278, 2913, 29889, 910, 1033, 367, 2309, 773, 14009, 4944, 491, 278, 21531, 20159, 3577, 29889, 1152, 1342, 29892, 366, 1033, 10272, 278, 28152, 3632, 3002, 310, 278, 2913, 773, 278, 28152, 3632, 3002, 5687, 29889, 13, 29946, 29889, 4803, 278, 2472, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 910, 1033, 367, 2309, 773, 263, 10296, 310, 19475, 24481, 322, 8720, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 366, 1033, 671, 263, 8424, 310, 775, 304, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29945, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 1033, 671, 263, 8424, 310, 775, 304, 6336, 2828, 373, 278, 9109, 25256, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 16049, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 4902, 310, 278, 5314, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 304, 679, 4687, 411, 1316, 263, 2060, 21106, 29879, 29958, 2]} +{"input_ids": [1, 319, 13563, 1546, 263, 12758, 1404, 322, 385, 23116, 21082, 20255, 29889, 450, 20255, 4076, 8444, 29892, 13173, 29892, 322, 1248, 568, 6089, 304, 278, 1404, 29915, 29879, 5155, 29889, 29871, 3148, 1001, 29901, 920, 1033, 474, 2334, 263, 29086, 705, 11356, 5687, 393, 3667, 4637, 21531, 20159, 304, 4505, 1045, 3163, 29973, 29871, 319, 1799, 9047, 13566, 29901, 739, 29915, 29879, 1950, 304, 671, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29892, 541, 372, 723, 5517, 367, 3755, 4280, 29889, 838, 5869, 293, 20159, 338, 263, 5443, 310, 23964, 393, 11898, 278, 4426, 310, 8162, 393, 526, 21634, 1090, 9126, 316, 689, 800, 29892, 1316, 408, 16116, 292, 322, 289, 2548, 29889, 512, 278, 3030, 310, 341, 1475, 705, 11356, 29892, 445, 1033, 367, 1304, 304, 27599, 278, 3829, 310, 278, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 2398, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 739, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, 3148, 1001, 29901, 1033, 366, 2367, 592, 278, 330, 391, 310, 920, 372, 1033, 2466, 29973, 29871, 319, 1799, 9047, 13566, 29901, 18585, 29889, 450, 6996, 2969, 723, 367, 304, 671, 21531, 20159, 304, 27599, 278, 3829, 310, 278, 341, 1475, 705, 11356, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 910, 1033, 25135, 15783, 278, 3748, 7613, 408, 263, 25002, 2913, 322, 773, 25002, 15939, 1934, 1316, 408, 3632, 327, 2270, 6471, 322, 350, 9890, 3694, 304, 10115, 2472, 1048, 278, 4423, 310, 29086, 373, 278, 7613, 29889, 2398, 29892, 408, 306, 5276, 1434, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 577, 372, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, 3148, 1001, 29901, 2367, 592, 263, 2702, 1342, 29871, 319, 1799, 9047, 13566, 29901, 1094, 263, 2702, 1342, 29892, 7755, 591, 505, 263, 341, 1475, 705, 11356, 3748, 7613, 411, 278, 1494, 5285, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 797, 445, 1206, 29892, 278, 3748, 7613, 508, 367, 9875, 408, 263, 25002, 2913, 29892, 988, 1269, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29889, 450, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 5293, 21531, 20159, 29892, 591, 508, 27599, 278, 3829, 310, 445, 2913, 322, 8161, 278, 14354, 310, 278, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 13, 13, 2831, 2777, 29892, 697, 982, 304, 437, 445, 338, 304, 10272, 278, 3632, 327, 2270, 6471, 310, 278, 2913, 29889, 15089, 327, 2270, 6471, 526, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 29892, 322, 896, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 512, 278, 1206, 310, 278, 7613, 2038, 29892, 591, 508, 10272, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 29892, 607, 16612, 278, 1353, 322, 376, 12181, 29908, 310, 278, 26532, 297, 278, 2913, 29889, 512, 445, 1206, 29892, 278, 937, 3632, 327, 2270, 2318, 723, 2649, 502, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29889, 13, 13, 29933, 1463, 373, 445, 2472, 29892, 591, 508, 10115, 393, 278, 29086, 297, 278, 3748, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29892, 408, 4318, 2400, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 341, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 341, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1342, 310, 920, 21531, 20159, 1033, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 526, 1784, 916, 1950, 13501, 29892, 322, 278, 2702, 4902, 310, 920, 304, 2334, 963, 723, 8839, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 1094, 306, 5276, 1434, 29892, 16049, 263, 1650, 763, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, 3148, 1001, 29901, 10241, 474, 471, 4856, 411, 263, 6483, 8004, 310, 1716, 29892, 1033, 366, 5649, 278, 1021, 6964, 304, 592, 411, 393, 11833, 29973, 29871, 319, 1799, 9047, 13566, 29901, 18585, 29889, 17090, 366, 505, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 1244, 338, 920, 366, 1795, 671, 21531, 20159, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 4803, 25002, 15939, 1934, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1670, 526, 1784, 1950, 25002, 15939, 1934, 393, 1033, 367, 1304, 363, 445, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29941, 29889, 4803, 278, 2472, 10115, 1127, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 21531, 20159, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, 3148, 1001, 29901, 920, 723, 474, 437, 372, 411, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 29973, 29871, 319, 1799, 9047, 13566, 29901, 1763, 671, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29892, 366, 723, 1101, 278, 1021, 6996, 6576, 408, 5439, 2038, 29892, 541, 411, 777, 12651, 297, 278, 2702, 4902, 29889, 13, 13, 2831, 2777, 29892, 1244, 338, 920, 366, 1795, 671, 28152, 3632, 3002, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 11796, 29872, 278, 28152, 3632, 3002, 310, 278, 2913, 29889, 9034, 9696, 3632, 3002, 338, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 975, 931, 29892, 322, 372, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 13, 29941, 29889, 4803, 278, 2472, 515, 278, 28152, 3632, 3002, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 28152, 3632, 3002, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 2702, 4902, 310, 278, 28152, 3632, 3002, 16287, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 28152, 3632, 3002, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, 3148, 1001, 29901, 920, 1033, 474, 2334, 445, 297, 3017, 29973, 29871, 319, 1799, 9047, 13566, 29901, 1763, 2334, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29892, 366, 723, 817, 304, 437, 278, 1494, 29901, 13, 13, 29896, 29889, 16052, 263, 5132, 3577, 363, 21531, 20159, 29889, 1670, 526, 3196, 3987, 3625, 29892, 1316, 408, 402, 566, 2918, 29892, 360, 291, 952, 375, 29892, 470, 4560, 7354, 29899, 29873, 1388, 29889, 4525, 9741, 3867, 14009, 322, 848, 12286, 363, 20602, 25002, 15939, 1934, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29906, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 910, 1033, 367, 2309, 773, 263, 848, 3829, 4944, 491, 278, 21531, 20159, 3577, 29892, 1316, 408, 263, 3053, 506, 616, 4280, 470, 263, 478, 2035, 29367, 29899, 29934, 4512, 4280, 29889, 7806, 6862, 373, 278, 7613, 723, 3928, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 723, 367, 9875, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29889, 13, 29941, 29889, 11796, 29872, 278, 25002, 15939, 1934, 310, 278, 2913, 29889, 910, 1033, 367, 2309, 773, 14009, 4944, 491, 278, 21531, 20159, 3577, 29889, 1152, 1342, 29892, 366, 1033, 10272, 278, 28152, 3632, 3002, 310, 278, 2913, 773, 278, 28152, 3632, 3002, 5687, 29889, 13, 29946, 29889, 4803, 278, 2472, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 910, 1033, 367, 2309, 773, 263, 10296, 310, 19475, 24481, 322, 8720, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 366, 1033, 671, 263, 8424, 310, 775, 304, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29945, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 1033, 671, 263, 8424, 310, 775, 304, 6336, 2828, 373, 278, 9109, 25256, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 16049, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 4902, 310, 278, 5314, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 304, 679, 4687, 411, 1316, 263, 2060, 21106, 29879, 29958, 2], "attention_mask": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], "labels": [-100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 739, 29915, 29879, 1950, 304, 671, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29892, 541, 372, 723, 5517, 367, 3755, 4280, 29889, 838, 5869, 293, 20159, 338, 263, 5443, 310, 23964, 393, 11898, 278, 4426, 310, 8162, 393, 526, 21634, 1090, 9126, 316, 689, 800, 29892, 1316, 408, 16116, 292, 322, 289, 2548, 29889, 512, 278, 3030, 310, 341, 1475, 705, 11356, 29892, 445, 1033, 367, 1304, 304, 27599, 278, 3829, 310, 278, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 2398, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29889, 739, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 18585, 29889, 450, 6996, 2969, 723, 367, 304, 671, 21531, 20159, 304, 27599, 278, 3829, 310, 278, 341, 1475, 705, 11356, 3748, 7613, 322, 8161, 278, 14354, 310, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 910, 1033, 25135, 15783, 278, 3748, 7613, 408, 263, 25002, 2913, 322, 773, 25002, 15939, 1934, 1316, 408, 3632, 327, 2270, 6471, 322, 350, 9890, 3694, 304, 10115, 2472, 1048, 278, 4423, 310, 29086, 373, 278, 7613, 29889, 2398, 29892, 408, 306, 5276, 1434, 29892, 16049, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 577, 372, 29915, 29879, 451, 1554, 393, 508, 367, 10824, 297, 263, 2560, 1234, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 1094, 263, 2702, 1342, 29892, 7755, 591, 505, 263, 341, 1475, 705, 11356, 3748, 7613, 411, 278, 1494, 5285, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 797, 445, 1206, 29892, 278, 3748, 7613, 508, 367, 9875, 408, 263, 25002, 2913, 29892, 988, 1269, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29889, 450, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 5293, 21531, 20159, 29892, 591, 508, 27599, 278, 3829, 310, 445, 2913, 322, 8161, 278, 14354, 310, 278, 29086, 2729, 373, 3058, 25002, 15939, 1934, 29889, 13, 13, 2831, 2777, 29892, 697, 982, 304, 437, 445, 338, 304, 10272, 278, 3632, 327, 2270, 6471, 310, 278, 2913, 29889, 15089, 327, 2270, 6471, 526, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 29892, 322, 896, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 512, 278, 1206, 310, 278, 7613, 2038, 29892, 591, 508, 10272, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 29892, 607, 16612, 278, 1353, 322, 376, 12181, 29908, 310, 278, 26532, 297, 278, 2913, 29889, 512, 445, 1206, 29892, 278, 937, 3632, 327, 2270, 2318, 723, 2649, 502, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29889, 13, 13, 29933, 1463, 373, 445, 2472, 29892, 591, 508, 10115, 393, 278, 29086, 297, 278, 3748, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29892, 408, 4318, 2400, 29901, 13, 28956, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 29871, 29896, 29871, 341, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 1577, 259, 29896, 13, 29871, 29896, 29871, 1577, 29871, 1577, 29871, 341, 259, 29896, 13, 29871, 29896, 259, 29896, 259, 29896, 259, 29896, 259, 29896, 13, 28956, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1342, 310, 920, 21531, 20159, 1033, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 526, 1784, 916, 1950, 13501, 29892, 322, 278, 2702, 4902, 310, 920, 304, 2334, 963, 723, 8839, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 1094, 306, 5276, 1434, 29892, 16049, 263, 1650, 763, 445, 723, 1996, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 18585, 29889, 17090, 366, 505, 263, 6483, 8004, 310, 1716, 21531, 20159, 322, 278, 3748, 310, 341, 1475, 705, 11356, 29892, 1244, 338, 920, 366, 1795, 671, 21531, 20159, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 4803, 25002, 15939, 1934, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1670, 526, 1784, 1950, 25002, 15939, 1934, 393, 1033, 367, 1304, 363, 445, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29941, 29889, 4803, 278, 2472, 10115, 1127, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 937, 3632, 327, 2270, 2318, 310, 278, 2913, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 21531, 20159, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 25002, 15939, 1934, 1641, 1304, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 21531, 20159, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 1763, 671, 28152, 3632, 3002, 470, 738, 310, 278, 916, 25002, 15939, 1934, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29892, 366, 723, 1101, 278, 1021, 6996, 6576, 408, 5439, 2038, 29892, 541, 411, 777, 12651, 297, 278, 2702, 4902, 29889, 13, 13, 2831, 2777, 29892, 1244, 338, 920, 366, 1795, 671, 28152, 3632, 3002, 304, 4505, 263, 341, 1475, 705, 11356, 3748, 7613, 29901, 13, 13, 29896, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 7806, 6862, 373, 278, 7613, 16161, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 508, 367, 2714, 310, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29892, 607, 526, 3291, 393, 526, 451, 760, 310, 278, 2913, 29889, 13, 29906, 29889, 11796, 29872, 278, 28152, 3632, 3002, 310, 278, 2913, 29889, 9034, 9696, 3632, 3002, 338, 263, 982, 310, 20766, 278, 26532, 297, 263, 2913, 975, 931, 29892, 322, 372, 508, 367, 1304, 304, 10115, 2472, 1048, 278, 14354, 310, 278, 29086, 373, 278, 341, 1475, 705, 11356, 7613, 29889, 13, 29941, 29889, 4803, 278, 2472, 515, 278, 28152, 3632, 3002, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 591, 508, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29946, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 508, 671, 445, 2472, 304, 4772, 963, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 773, 28152, 3632, 3002, 304, 4505, 341, 1475, 705, 11356, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 3748, 7613, 322, 278, 2702, 4902, 310, 278, 28152, 3632, 3002, 16287, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 28152, 3632, 3002, 508, 367, 1304, 304, 4505, 341, 1475, 705, 11356, 21106, 29879, 29958, 2, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 1763, 2334, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29892, 366, 723, 817, 304, 437, 278, 1494, 29901, 13, 13, 29896, 29889, 16052, 263, 5132, 3577, 363, 21531, 20159, 29889, 1670, 526, 3196, 3987, 3625, 29892, 1316, 408, 402, 566, 2918, 29892, 360, 291, 952, 375, 29892, 470, 4560, 7354, 29899, 29873, 1388, 29889, 4525, 9741, 3867, 14009, 322, 848, 12286, 363, 20602, 25002, 15939, 1934, 29892, 1316, 408, 3632, 327, 2270, 6471, 29892, 350, 9890, 3694, 29892, 470, 28152, 3632, 3002, 29889, 13, 29906, 29889, 16314, 278, 341, 1475, 705, 11356, 3748, 7613, 408, 263, 25002, 2913, 29889, 910, 1033, 367, 2309, 773, 263, 848, 3829, 4944, 491, 278, 21531, 20159, 3577, 29892, 1316, 408, 263, 3053, 506, 616, 4280, 470, 263, 478, 2035, 29367, 29899, 29934, 4512, 4280, 29889, 7806, 6862, 373, 278, 7613, 723, 3928, 304, 263, 1298, 297, 278, 2913, 29892, 322, 278, 29086, 373, 278, 7613, 723, 367, 9875, 408, 376, 5391, 267, 29908, 297, 278, 2913, 29889, 13, 29941, 29889, 11796, 29872, 278, 25002, 15939, 1934, 310, 278, 2913, 29889, 910, 1033, 367, 2309, 773, 14009, 4944, 491, 278, 21531, 20159, 3577, 29889, 1152, 1342, 29892, 366, 1033, 10272, 278, 28152, 3632, 3002, 310, 278, 2913, 773, 278, 28152, 3632, 3002, 5687, 29889, 13, 29946, 29889, 4803, 278, 2472, 515, 278, 25002, 15939, 1934, 304, 8161, 278, 14354, 310, 278, 29086, 373, 278, 7613, 29889, 910, 1033, 367, 2309, 773, 263, 10296, 310, 19475, 24481, 322, 8720, 29889, 1152, 1342, 29892, 565, 278, 28152, 3632, 3002, 14088, 393, 727, 526, 3023, 26532, 297, 278, 2913, 29892, 21050, 297, 263, 6862, 8267, 29892, 769, 366, 1033, 671, 263, 8424, 310, 775, 304, 10115, 393, 278, 29086, 526, 5982, 297, 278, 3023, 26995, 310, 278, 7613, 29889, 13, 29945, 29889, 4803, 445, 2472, 304, 4505, 278, 341, 1475, 705, 11356, 3748, 29889, 9038, 366, 1073, 278, 14354, 310, 278, 29086, 29892, 366, 1033, 671, 263, 8424, 310, 775, 304, 6336, 2828, 373, 278, 9109, 25256, 322, 2821, 278, 7613, 29889, 13, 13, 2776, 3236, 29892, 445, 338, 925, 697, 1950, 2948, 304, 16049, 263, 341, 1475, 705, 11356, 899, 369, 393, 3913, 21531, 20159, 297, 5132, 29889, 1670, 1122, 367, 916, 5837, 304, 437, 372, 29892, 8679, 373, 278, 2702, 4902, 310, 278, 5314, 29889, 2398, 29892, 445, 881, 2367, 366, 263, 2498, 2969, 310, 920, 304, 679, 4687, 411, 1316, 263, 2060, 21106, 29879, 29958, 2]} diff --git a/tests/test_prompt_tokenizers.py b/tests/test_prompt_tokenizers.py index 9ee5ea588d..0635bd718b 100644 --- a/tests/test_prompt_tokenizers.py +++ b/tests/test_prompt_tokenizers.py @@ -90,6 +90,73 @@ def test_sharegpt_warnings_integration(self): strat.tokenize_prompt(conversation) assert "assistant turn has empty text" in self._caplog.records[1].message + def test_sharegpt_warnings_turns(self): + conversation = { + "conversations": [ + {"from": "system", "value": "lorem"}, + {"from": "gpt", "value": "ipsum"}, + {"from": "human", "value": "dolor"}, + {"from": "human", "value": "dolor"}, + {"from": "gpt", "value": "sit"}, + ] + } + prompter = ShareGPTPrompterV2() + strat = ShareGPTPromptTokenizingStrategy( + prompter, + self.tokenizer, + False, + 2048, + ) + with self._caplog.at_level(logging.WARNING): + strat.tokenize_prompt(conversation) + assert ( + "Role did not alternate between turns (gpt and human)" + in self._caplog.records[0].message + ) + + def test_sharegpt_changes_roles(self): + conversation = { + "roles": ["USER", "CHARACTER"], + "conversations": [ + {"from": "system", "value": "lorem"}, + {"from": "gpt", "value": "ipsum"}, + {"from": "human", "value": "dolor"}, + {"from": "gpt", "value": "sit"}, + ], + } + prompter = ShareGPTPrompterV2() + strat = ShareGPTPromptTokenizingStrategy( + prompter, + self.tokenizer, + False, + 2048, + ) + with self._caplog.at_level(logging.WARNING): + res = strat.tokenize_prompt(conversation) + assert "CHARACTER" in self.tokenizer.decode(res["input_ids"]) + + def test_sharegpt_assistant_label_ignore(self): + conversation = { + "roles": ["user", "assistant"], + "conversations": [ + {"from": "system", "value": "lorem"}, + {"from": "gpt", "value": "ipsum"}, + {"from": "human", "value": "dolor"}, + {"from": "gpt", "value": "sit"}, + ], + } + prompter = ShareGPTPrompterV2() + strat = ShareGPTPromptTokenizingStrategy( + prompter, + self.tokenizer, + False, + 2048, + ) + with self._caplog.at_level(logging.WARNING): + res = strat.tokenize_prompt(conversation) + idx = res["input_ids"].index(20255) # assistant token + assert res["labels"][idx] == -100 + def test_no_sys_prompt(self): """ tests the interface between the user and assistant parts From 8d288a2ad48aefca02e2fc9773b1e63859a39411 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 13 Oct 2023 11:19:30 -0400 Subject: [PATCH 72/77] workaround for installing xformers w torch 2.1.0 (#725) --- setup.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/setup.py b/setup.py index 3109d4af1a..1ceda5c044 100644 --- a/setup.py +++ b/setup.py @@ -21,6 +21,14 @@ def parse_requirements(): ): # Handle standard packages _install_requires.append(line) + + # TODO(wing) remove once xformers release supports torch 2.1.0 + if "torch==2.1.0" in _install_requires: + _install_requires.pop(_install_requires.index("xformers>=0.0.22")) + _install_requires.append( + "git+https://github.com/facebookresearch/xformers.git@main#egg=xformers" + ) + return _install_requires, _dependency_links From 7f2027d93f571ceffd56db79b66cf2c50f4c68e2 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 13 Oct 2023 15:21:17 -0400 Subject: [PATCH 73/77] tweak for xformers install w pytorch 2.1.0 (#727) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 1ceda5c044..ada5fcb289 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ def parse_requirements(): if "torch==2.1.0" in _install_requires: _install_requires.pop(_install_requires.index("xformers>=0.0.22")) _install_requires.append( - "git+https://github.com/facebookresearch/xformers.git@main#egg=xformers" + "xformers @ git+https://github.com/facebookresearch/xformers.git@main" ) return _install_requires, _dependency_links From 3553172e3cd98beca1fc10406648a2c9af5ac63b Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Sat, 14 Oct 2023 09:27:07 -0400 Subject: [PATCH 74/77] fixes for alpaca w chatml, and don't include attention_mask w mistral for flash attention (#728) --- src/axolotl/prompt_strategies/alpaca_chat.py | 12 ++++++++---- src/axolotl/utils/trainer.py | 4 +++- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/axolotl/prompt_strategies/alpaca_chat.py b/src/axolotl/prompt_strategies/alpaca_chat.py index 17fe69be7b..975fee889e 100644 --- a/src/axolotl/prompt_strategies/alpaca_chat.py +++ b/src/axolotl/prompt_strategies/alpaca_chat.py @@ -1,6 +1,6 @@ -"""Module containing the AlpacaQAPromptTokenizingStrategy class""" +"""Module for Alpaca prompt strategy classes""" -from typing import Tuple +from typing import Any, Dict, Optional, Tuple from axolotl.prompt_tokenizers import ( AlpacaPromptTokenizingStrategy, @@ -9,9 +9,13 @@ from axolotl.prompters import AlpacaPrompter, PromptStyle, UnpromptedPrompter -def load(tokenizer, cfg): +def load(tokenizer, cfg, ds_cfg: Optional[Dict[str, Any]] = None): + prompt_style = PromptStyle.CHAT.value + if ds_cfg and "conversation" in ds_cfg: + prompt_style = ds_cfg["conversation"] + return AlpacaPromptTokenizingStrategy( - AlpacaPrompter(PromptStyle.CHAT.value), + AlpacaPrompter(prompt_style), tokenizer, cfg.train_on_inputs, cfg.sequence_len, diff --git a/src/axolotl/utils/trainer.py b/src/axolotl/utils/trainer.py index ee8c634966..820202b80b 100644 --- a/src/axolotl/utils/trainer.py +++ b/src/axolotl/utils/trainer.py @@ -423,7 +423,9 @@ def process_datasets_for_packing(cfg, train_dataset, eval_dataset, tokenizer): ) # Phi doesn't want the attention_mask feature when training - if "CodeGenTokenizer" in tokenizer.__class__.__name__: + if "CodeGenTokenizer" in tokenizer.__class__.__name__ or ( + cfg.is_mistral_derived_model and cfg.flash_attention + ): train_dataset = train_dataset.remove_columns("attention_mask") if eval_dataset: eval_dataset = eval_dataset.remove_columns("attention_mask") From e1b214c62b24594465d756452008e83f5bd82c68 Mon Sep 17 00:00:00 2001 From: Casper Date: Sat, 14 Oct 2023 15:28:12 +0200 Subject: [PATCH 75/77] Clarify custom format example (#729) * Clarify custom prompt format * Simplify format --- README.md | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 57447b36de..c70abf6480 100644 --- a/README.md +++ b/README.md @@ -297,25 +297,24 @@ Have dataset(s) in one of the following format (JSONL recommended): #### How to add custom prompts -Using yaml. Example: +For a dataset that is preprocessed for instruction purposes: + +```json +{"instruction": "...", "output": "..."} +``` + +You can use this example in your YAML config: + ```yaml datasets: - path: repo type: system_prompt: "" - no_input_format: |- - User: {instruction}<|end_of_turn|> - Assistant: - format: |- - User: {instruction} - {input}<|end_of_turn|> - Assistant: + field_system: system + format: "[INST] {instruction} [/INST]" + no_input_format: "[INST] {instruction} [/INST]" ``` -Using file: -1. Add your method to a file in [prompt_strategies](src/axolotl/prompt_strategies). Please see other files as example. -2. Use your custom file name as the dataset type `.load_`. - #### How to use your custom pretokenized dataset - Do not pass a `type:` From a045db02146751548fec57a5d3f31382ce4e5959 Mon Sep 17 00:00:00 2001 From: Casper Date: Mon, 16 Oct 2023 21:13:46 +0200 Subject: [PATCH 76/77] Mistral: Sliding Window Attention with Flash Attention and Sample Packing (#732) * Implement Mistral FA + SWA + Sample Packing * Handle unbroadcastable tensor * chore: lint * Simplify _prepare_decoder_attention_mask * Uncomment window size * Upgrade flash-attn to minimum of 2.3.0 to support SWA * Add original condition to avoid error during inference * chore: lint * use torchscript to prevent oom * chore: pylint --------- Co-authored-by: Wing Lian --- setup.py | 2 +- .../monkeypatch/mistral_attn_hijack_flash.py | 109 +++++++++++++++++- 2 files changed, 105 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index ada5fcb289..e3ee54350b 100644 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ def parse_requirements(): dependency_links=dependency_links, extras_require={ "flash-attn": [ - "flash-attn>=2.2.1", + "flash-attn>=2.3.0", ], "deepspeed": [ "deepspeed", diff --git a/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py b/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py index 21a6ee0842..26b511d06e 100644 --- a/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py +++ b/src/axolotl/monkeypatch/mistral_attn_hijack_flash.py @@ -14,6 +14,9 @@ flash_attn_varlen_qkvpacked_func, ) from transformers.modeling_outputs import BaseModelOutputWithPast +from transformers.models.mistral.modeling_mistral import ( + MistralAttention as OriginalMistralAttention, +) from transformers.models.mistral.modeling_mistral import ( MistralDecoderLayer as OriginalMistralDecoderLayer, ) @@ -42,6 +45,44 @@ def replace_mistral_attn_with_flash_attn( ) +@torch.jit.script +def _make_sliding_window_causal_mask( + bsz: int, + tgt_len: int, + dtype: torch.dtype, + device: torch.device, + past_key_values_length: int = 0, + sliding_window: int = 4096, +): + """ + Make causal mask used for sliding window attention + """ + tensor = torch.full( + (tgt_len, tgt_len), + fill_value=1, + device=device, + ) + mask = torch.tril(tensor, diagonal=0) + # make the mask banded to account for sliding window + # NOTE: HF implementation is wrong as of 14-10-2023 for torch.triu, needs +1 + mask = torch.triu(mask, diagonal=-sliding_window + 1) + mask = torch.log(mask).to(dtype) + + if past_key_values_length > 0: + mask = torch.cat( + [ + torch.zeros( + tgt_len, past_key_values_length, dtype=dtype, device=device + ), + mask, + ], + dim=-1, + ) + return mask[None, None, :, :].expand( + bsz, 1, tgt_len, tgt_len + past_key_values_length + ) + + # Disable the transformation of the attention mask in LlamaModel as the flash attention # requires the attention mask to be the same as the key_padding_mask def _prepare_decoder_attention_mask( @@ -53,11 +94,29 @@ def _prepare_decoder_attention_mask( sliding_window, ): # pylint: disable=unused-argument # [bsz, seq_len] + if attention_mask is None: + return attention_mask + + # NOTE: attention mask and sliding masks are only broadcastable in certain scenarios. + # Without attention_mask.shape[0] == 1, error will trigger after eval loss but only when wandb is enabled. + if input_shape[-1] > 1 and attention_mask.shape[0] == 1: + sliding_window_mask = _make_sliding_window_causal_mask( + bsz=input_shape[0], + tgt_len=input_shape[1], + dtype=inputs_embeds.dtype, + device=inputs_embeds.device, + past_key_values_length=past_key_values_length, + sliding_window=sliding_window, + ) + attention_mask = attention_mask + sliding_window_mask + else: + LOG.info("skipping sliding window mask, not broadcastable with attention mask") + return attention_mask def flashattn_forward( - self, + self: OriginalMistralAttention, hidden_states: torch.Tensor, attention_mask: Optional[torch.Tensor] = None, position_ids: Optional[torch.LongTensor] = None, @@ -91,10 +150,41 @@ def flashattn_forward( query_states, key_states, cos, sin, position_ids ) + use_sliding_windows = ( + hasattr(self.config, "sliding_window") is not None + and kv_seq_len > self.config.sliding_window + ) + + if use_sliding_windows: + window_size = (self.config.sliding_window, self.config.sliding_window) + else: + window_size = (-1, -1) + if past_key_value is not None: - # reuse k, v, self_attention - key_states = torch.cat([past_key_value[0], key_states], dim=2) - value_states = torch.cat([past_key_value[1], value_states], dim=2) + # Activate slicing cache only if the config has a value `sliding_windows` attribute + if ( + hasattr(self.config, "sliding_window") + and kv_seq_len > self.config.sliding_window + ): + slicing_tokens = kv_seq_len - self.config.sliding_window + + past_key = past_key_value[0] + past_value = past_key_value[1] + + past_key = past_key[:, :, slicing_tokens:, :].contiguous() + past_value = past_value[:, :, slicing_tokens:, :].contiguous() + + if past_key.shape[-2] != self.config.sliding_window - 1: + raise ValueError( + f"past key much have a shape of (`batch_size, num_heads, self.config.sliding_window-1, head_dim`), got" + f" {past_key.shape}" + ) + + past_key_value = (past_key, past_value) if use_cache else None + + if past_key_value is not None: + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) past_key_value = (key_states, value_states) if use_cache else None @@ -120,7 +210,13 @@ def flashattn_forward( qkv = rearrange(qkv, "b s ... -> (b s) ...") output = flash_attn_varlen_qkvpacked_func( - qkv, cu_seqlens, max_seqlen, 0.0, softmax_scale=None, causal=True + qkv, + cu_seqlens, + max_seqlen, + 0.0, + softmax_scale=None, + causal=True, + window_size=window_size, ) output = rearrange(output, "(b s) ... -> b s ...", b=bsz) elif query_states.shape == key_states.shape: @@ -146,6 +242,7 @@ def flashattn_forward( 0.0, softmax_scale=None, causal=is_causal, + window_size=window_size, ) output = output_pad_fn(output_unpad) else: @@ -157,6 +254,7 @@ def flashattn_forward( query_states, torch.stack([key_states, value_states], 2), causal=is_causal, + window_size=window_size, ) else: ( # pylint: disable=unbalanced-tuple-unpacking @@ -191,6 +289,7 @@ def flashattn_forward( 0.0, softmax_scale=None, causal=is_causal, + window_size=window_size, ) output = output_pad_fn(output_unpad) From 91a016f410f7e9b65c05cb829ea2c39ac4917568 Mon Sep 17 00:00:00 2001 From: mhenrichsen Date: Wed, 18 Oct 2023 16:21:34 +0200 Subject: [PATCH 77/77] badge (#739) * badge * fixed text --- image/sticker_fixed.png | Bin 0 -> 379330 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 image/sticker_fixed.png diff --git a/image/sticker_fixed.png b/image/sticker_fixed.png new file mode 100644 index 0000000000000000000000000000000000000000..0f5c67a1922120993bd350eb1213e2d331478b76 GIT binary patch literal 379330 zcmaI71z1#F*FJt`7+~n3BqW9|DQT%8B}8%r>5vkT2I&Fm7Aa9uBt}vZkOnD{Zb513 zPRaR=&-=XJ|N8#LbsZ+IefC~w?G^XE)}9!hhxbVc=?MVIS)&l?t_ALZ}g0V0A zAx+}|?l)I;Wd(!h=G(v8V<{D2s73VP9c!ksphiT6|Dhv<+yiz@-)L~k={b$Cxz{}J zYji3Up8WdTN^w-y)$zg6QPkS^-yYZA1O4dO=-58VPH0pw?Yo{9@lEeYmsUt5qbntC z@Y}PW^DPsZL&f%SvgJ$%f#KpmPD)5*m|gAVyBS-e_5zv`hDCfnH+WxG?|I&A{kl1w zJrBIO*wOr6++QYNz{<*IBLx6$KKHVXuPA3fQ%fd( z09e4v2DDLO#{p36g}`3_`wovdn=(8ASWjyCJ5fC@cGUmO_Wym?-<^VEufGEPUlW_$ z!9Mu9UokcI%Qk+*|4pX<84ItI#I9(GiyiepOZ~r3_}^zk#HnBx`upkse;f)s;(rfu zu4Vv$ytOw*e+B&s#@_x{4gb$&hd@Bv)kZ7^Ks|bd9e|+iRKhOzzgGLNFsL;E$i#P* zpJ@gD4*j3ULj3*kcaRV+v@JD<3G?*(!(|ZmTOH)<^F!^clS2uMy1CiqTUZfL)kh2G z19RD^A4sI%S4`#SGpV{~B&8aP6v>a_sBm2fh|U%UQy4LHbm{c*pZ8gR);JqE`K;v@ zmC9Qf%*CZGAE^wYCV#DrM-ncKL#Ng&NtQPLd`~K%^5j;d8J=G zD0?PTms9+-vUy6*MAf{xbePb9y!z>OGnlVew*XBZ+73YD&6UHSv9GhB`}$c6c-OlK zS$BigQ&DoyQqKfUkZDijg}W&Ln|R>|iHkr9Ar-NzV9%U%##*~RE4?to9wW-*5gr15 ztXJZ&KDVU`-*&)`PrlFZ6WC2_6FuFoi;O2nD(a-6GIa`g!5i~#%=LmL);_v-Icda> zwb=;E5eHkw3v)Vjm#B}4s=rD+B~1n_;XG9Hfj@33t<0R8%yhc<_fZF;~X#u)ccP*w`2m#+zGozxg0qREu4s zx)yqzcIfMh{5;=e^?Xe+{=ME>Ww7_9iMBjRv(FFP4C?P0@`vm1h%5r#e;nGHxq=GS zwRrf*)$$?Wwv}jg70_ITcF`m1CUIJucn;tnb-cpKL&s>Af(;g@@4v+1VjdqJ*3s8r zZoVz;^Rq|(rur+Xyw}O*#N}S2tCp||Za%&e?#hTtKKUD%4JEa+A}Opy{OY<`j#PnJ zwf{KNCMjFOHAhWmX`hYvTboPWugQ@)9#)p{_nH~DenkZ=s}PTE18T(hLho0s2m>IY zZrUS?=7qs?k2@mV?xo5MLSGJDTx>3lQA`1J{*{0T73PK#c+hhCEH(3{N0>PRN%bq86VZ?yQE-B5i_rm z`L`Z@Jmux(l$;3)5vStE1iaC@5Vyp)TrukkU{t=IY^NM9gu37b7n3r;u6io4D)otN z2~-TVj_yySg{$cpxGM^eZ47DI_x#oiUiNJ@p-Xgdax(d1-E~OKcX;QOsV;};6L22y z^{a#o>A%X*1j1`q{N}LoghO6f*%EWAo93($BDr5s(~}R%ZtV>44`i5ne8Z(N9Tu(S z8S=<=Tapz>kgpxV5*QlX924@4!O5hp=g@p8U5ks$bk3FwR|Ja*&~^ z`?>yF?(F2G-{p?))kgmPtnQ@CF}4swztsgvtCddJv1P?G{B!~WALsviAO&FGyc7Rv zNN!E*K6CSY@a1`De%OVEf720Q9lf0yz0cc2b@Wq;41tV+5|Uf(C4blLX(eb=FU}F& z`XW{C;^O=3x-klKyiatr#u?#tkfIV!c(cVGO%VD%6_Oa%z?05zTJ12>L9&%4 zfBsE-_ny_%k56m0JrD7r$x4VaG_lYZMoUtxo&K|iu&(t|3OBh904~*X;^TX2h7ACh z+l_nGJq#i~rZ^0FmiM3Uu(HC?RB(qOOygej?C#|@UdQ%WuHfg4)`JAsJ7x{X628~9 z@83=E-5sFNPZc<(CMJ4{M%;*C3owWPv=80o`bTDMVV^hP&EZ6 zxR>ts0xdwI`}ym-$^<|Kk6g(TryA|<_-)Z*dRO*(t!~b}hf(=B&SD-f0rCiigx(1^ zl)`ET(uOrFiYvdhY~Q5=uE6~iO*3=;Xwa zl(}nuSw(3sDfK+mQA`)wnB$}-t5(Jhv&0xi?mq1v7mv?QE2U=kvuW`KV4q-?} z*3ImBZNxCig!yuzKJpy`dPH6m2~}gfZ*-Po3b?*Y>=cF~8itlBA91 z(PRi1aEHEMBliFOkH;kZbsJq>JHx@g<(|aWMd>MO!U(M9dW07_xNF0nrQd>q$jh3w z_MkO5Qo|!LHK;pq<#3pZu?mvc`loY*pe|z#h#$fMZJ(mrJpIK8f%PKm-Xsu9O5)KD zhn7YF@u4;Jz)2AZ0u!C8sn}oDPgH69#+57T+1>9{+4`(K4EK}W5G(grKIMH4tY2vY z*zBh~&?BA@G&?)zbI?U&$a}N37022~$~*lC&*w_hTM1|f5h{Cal zS}Kqs$u0>x5?pgGL`c?)OQn&T@-D|_+4r{Zs5uuCcE9JpEJil&H8?GHsPIjd3E0}I zQC*33i`&NwKLS6&x<5HVc&o=YK~M1YmIXF$Nk8cerD!Lm=A|Vh$GgwA`^PX;QW=F> z9j*9IDq#Qs_yy>VW#_DC*m$I)Gu0nX?Rb% z5T)2RGc$TPuCIOl-5%q;arm2`pI?C$k7CKi^sm6_|7e^82B!em&B2)je~K>dM$4eLcwq1VKrR?yo^Sxz@aD?1}5nCP16(MM3iV@bRK!nHiuO1OdhW_fS8 zR1&YTu;KK42-B0oq4_aWxK|g=JInT;HtkA@;HBl9Ux4A<3W1?Tye-9^Dq8`S6!d5J`&jwnK(JvP*?&=;J9bgXKb{*D%`2yVctQDogd`XFF%&oSJg(+R;k+qo zu*8w-TY;lGlnjg>+asCN;YAQJM3~y9UTpN8my$b}2P0jA0<%u@zh)(BuCMG{pC7)q zcl~m1w-#g);KahS;`gw#lU=vk6N{#Q;QhsIMqmJ{Mk=|0a%#Dm7@PAqPLZ_h2Edl} z5X-jGnSeX1S+LMhfP1*W>U{Gw&51@{gwNSwEZu{I0~c*2$w`6a1S2}vRUXiCqg#$Y z1sQw}*=WVF5e*mO=5UVmI?_fu9_^_rK1sFSeX7SkC`NUU&|}=Or{~_@Y3Y4HEg4W) zXeO?|)d0(ydKP$5x_0BX#~gh4D!0s}@sWWIbo=dJsI3V^n&JP=ea@4wDq>NIb*(#B zzO9!Xp9b&6{hH*TQ?W$AA*yNbHlNWsfYM!zB%E#v&WS#zi>E4)e3_|fY2zXIz^j}I ztz^+?&i$MRkTO2bH_|u$(LrVZ>%4eLz~(e)nWcQ)IrAe8m1VbGd5B_NQBn8UL-~Ue z2`2ZO^R?`=)1x;M721KD*?c`8wMfOgGj;#r0Lt*akohjdz(u0V-MYGzScmy1^Za>0 zXnRn^4c#!S)gpQo_j|@bW>5C04tMHD_7fF6BlQYKAqg??6Wk4Vqsr9nx#OZPVO)awR0<3GOm4VU0VFo~Q z50go)2$gkZ(TFC9pfb&6I$ULQ)T7LbrCrt~;=iw9B$jfN47Ek+hHn zF~9^PRrTcSQt42wp!op*V;B1}bqW?l`*l`VxPS&*vx^rS(+V8XIZX~FXe)cbYnsA- zk-ob6BYUi{#cg3E>Rh(xe(Fzin|AoYx+>TfCED#y*>f|1FNhv@3R<`S z7a$N8bhAqS3q6eplCdtKBC}Z{G3RxJ*>1VqGMzE8UxLEpBLQL*ph9zBCy%(_?959)FE(Pw2mnIXLp;#+NwYK z_R`5(QooHv<0I@!JQby;*x@LM;tI6DO*FWp-Xz##tl3xIYdJw;F6ivn9LM`IWmHrW z>=;XY|A8RAkL|PFcRtTo)ZDx#C}8a%jt=`(!bvWIt~qmnOxApEseihQGnP_|O7x(l ziE~s??EMr!{Rx~5nz*Z6qSKv-qS7#tcHkdxWP!c-5hcS_K<~qzTIp&Z>$!Uw7l5uu z*DQv6&h_CxRHkzy#1}d-8^j44dnoPq^G`Ji2}#_oJAqd*JxaYnOJYOAnpI;$|AJ_+ zDlP#qf`CUfZDsFWooNb6Z@c*pcuVy*LPenf0ulvrwCGrj)vOgx_bw-8hq>cQ zqVhrLK7xD=oYnrUI=7pSO0%N`EFBYf{fU5c_K9&ND;Ay&K8`}avu(mUfb(CVOdLve z@QbG9QXX?8U);24c~}Bx?Q*CFNa489UKE4TFF8zT&(y%<+PDv%*(-j@ssiNI9k57M zFp7#4fycjA%~s)L_Va;@lVQ2s{`T0g0Sd=2nJq2n!^<88d`7q7P<#VCpPPBiwSg_e z!j~UIoST$?P|?}Z|JGNqC7Ax*aLR}E$D^8LvfGwk`Y7n|BGI*Dn_{aeiJDl}krY_5qu=Nnr!(!{p!D5e3C!zwJW~XVqVw&7IVu_+5LBQ5fWn%;StuLb8v~2J%`yE7ByuT~K6eI~G zfJgYB`^e#tx`Ce{=-j(USLJ1!mtxl5*z+HkpNC&mKTNiztIg7#U{`;~+&d>WEHOK`oH<%4dG!}3s^ zzqKA#UaGbq_PO5Q`6aY6g@Wtv9m|dGVVX<@sA6HblvKT;x4dL-t`3XWQm34U^(J*n zUX-9fkLYzQcB89e?$(Ow2E8^sW%OHO@1$j4aG>`W`*(3i}fFRMrI*K~usFxax&$4ae{HAW4`(MfGa zIlZpw&whMYoh)sRMWhc3V2KWchI4N^?YEFM`yMQ*N250dIMZSS%k zWQYN=P&+F{DyswTv5>h}zH01TQ@2cVj72dLA&<|ZxT<;K_^EK9-QD}XF72z}lZMLR z9Vf%7HjYjd%70m`=igG9)Oq2n%9e{wkwM8cwUO^%sMy+Ikw8aD-tI@{ej~ZdB<-@? zIpwG@N%3^1dLWz71QTc7wto-ob%%>`7-R$OdO?D9-dIpR_;ueyRr)tcz>fd^mtq@=WPs-a zvXXt?;O)fcgcq_GFmE!^bWa5hM0=8e5B4Xg?Bc7*QH*KA-9ogc(?8)V71ao3xXnx)$ z!F*!3Rv)j|k%(Yok|xDMMsF;^R2`1E+_C8hC%{|@=K8HQ4&pD!AnTs7Uvm<-bCSD} zx{!Y`S^Ifv`8{kx%kiZ_`#U4bsj5@wqvYQY^j^7TJzWZNC&n@Ev*vu*22gEMSt5kM zg*d^RF`hgek2S@w2l1cgF!`lefe?I)WM+p>m7pjxz7&!KMDoA?Oj{_gdyD6EG zhaU8ciT$TcBZY;m?}lXraXeODMulu>8YVJJn5(dN>z1ErD#H;d23WZLEq=eN^O=V^ z2J)e)5+9tnx~hn!IhUZ{B%kUZ^f6mp?qAu@WczSBcv{J7Y$Z2Gds|%2W6ZVR>xK2w zN^z)(i7Ul|>p9z8*YEI~id>R~m><6>HDZG@{LqzP;k=p+%q3 zeaf`Dj}6ZMkZzdGXa!~E#UV0aubE7(S2&kNG%G+ zTLN}7CV?qT3*#p6iHpmhcXZ_bYF(#ETGgDqdooB^lJDhwpvA@PyBIL%{fGR%GI`{g z+CCav$oNM(A^k4zDKp>xYF98pK$JkZE;$^0Tlg&T(q2X=Z8D8(TucEV9{W8Mt3b#4 zNO90;vT1dMV<=VOt-toeFb4x|^(9IzQ_?Arzum~&-f8P`*TC+Vu~3nzTd|+jw!-)X zA5R@#ee??nq~MQz2tPI8Rygs*9Gpu(Yqi1FuOBWlwoMOWsq!R9NRiD|kJhGeK1cEp zP`1%iYw?;$qp=7)sCU=Gx`Un<3w3lGZXU@|@B!lJZCAuc(Rx-sRwV}n7@=IL<|0eJ zm{ug((87hkWSviUZaWox@NM+!Q8Yk^Qbt0VP}+g%8I;4ijSTx~adNUTTbV`%<`-xA zI^G{dJ7R8WK=0Vn#5Iby+$2PC-rr8}hli@N^uT1EpIx6_omOl;Ya$ich<}aBWZ3#p zR5|c3Ofvx#_pK%D?K5IluM|`?s|bo&;bvFyBY9qz54fF?7pLn{UHtEujmkI{9Dwr+<7|#ffJkFLemF z6oRPAvp}pp=7il-qvb@`@03bjyde-q>FBN~I(l#QOEb-^tt-*b*E4rdu_|PM1zP^N z9-R&{&v|?=!#&EHHEz-dI$`@BSZ;9pZ(%2bH#Hg*o=iDLSDfoNAU1^()<8$Y;NSsl zfp_rey8B*Z7JzORHK-}|*T)!B~p-Q$H%m9RrN zyeMa6YBO2lpYzx5ghSY8j*e_mHp3n~#UDl0wYK^>JcBO+`X47fL=3+#;cDJl5j4Mk zkF7&_9&KSs;9nknz6e$ z-@Wq&*|2o{RUnMt4oi#i+p4U?vB#Plg#e+f59P8%TIlQV}&!5Yg*;D zxoyowIHw;yD0P~MR2#9_%Lr>P85Q4n$1=*2C{6rO_2g}^98<=es!#?6fWAQ> zI#uUAS^-RMCD|#uG!zAegLx1*hRGG&ni>&--W%VfnV-J>e0j5$8*Z!<>N#=EwTF4f z9EkZ~(Q5}kU-}Q zYn+ekO_X6U+69+v99mZT2?oF}_y@Bm`x`qbupf5ucIl%xHkX?8JKevn^N5?X)v@oz z+5Fk5$l$)^NSh_98jHgJ*03m<{sUFk?`E93M`wjyB9ImuH?QU_ilyS3Ylgq0UyMx8K&VSoE zl0$yIkfjCl*1h1pz^QYa?s3gdH)!%xRJ#rwH$9@uSAJ65^|z7X8)XXJp0XOx35?K) zkI@g55O)OHe{UoSO{&XT2pH6ap{ru7rLA^;5augFc?sKIRel%7+&Bi5&DDP&c_my* z`dMRP-r3+Ib zb#6m**;Qo9wKCyy#T%yTs`)NVpu$-IBZZPk4~icy-26d-tML|BR=l3ZwYD^mm&RXb z&WGLQt?ns|$uK|tK_u7&yM_tZCjQGA$N@qZhOy@h3a_+fci4sS^4Arv$;e;$`7zBg?ih>ylQz1KQS!m-^W>te^!xn|yZ+WmkBtY}}m8lc4l3P-H zz^xyVpr9@;%&$f*E7`+%LnWxeAWM(ofjv)z0M{mzdW9!tRqQ8Ip8$_TJb@`cmuX^Y zLz#h3KM<~3zYBVGno|axkP;cVx8KM8&0;&4;qrVlB&2m-I7&R{xe%e(_@aLB#WJ>E zJY_zAwOZFkZ?h{Ey6+%j5+5ntU9}7O+ZLxJ*S@z=xK-I|j{7bhwBzvRqGR)MT%cP{ z6R#ClQMf4^4N?BW`)YB~TfYj6Jj%c8arHmO24B1G5AnX{$o4w~{EjA#>E8{n&Lred z(lCFfT+I(QKFEb^OjZ7w;g@*k^|(0BQH!Os-u7{Ap7o zasE(;Xi1x?^{Dk((`w9@G2MNQS=*B4b^Y9~8H#VQwP~$yVn379od$lLV?N)H;@tbx zT4L>l&1Zj`n8eZ<&oSREe&OLl+-M^h)|D@7zTCDG^y$!T%TZf2K%i#~kF}rg=p!@O zA9M2?*R6icH5((08IGnU2O#^_&VOcbZs#%gl(9E&{97@zU3 zQ>c-CLnI9bdU$X7=kNT%e@Xptcs=&FEq_SEInBVBUB`KYu(x6GV(c&905jjMRvm83 z6|6P?J;_9#K<=?>GIVuw*1AX3&m^3*?byrJex?H`L%_yMSB^vJCNptz9Bm@ignX&P zQ$9Z(qdSdWnFeiX`40M*;S|6)Kn;p;MOGNhH!pDL9a*|0w@d4qG3nzFl{`XX&ZL*I z-`-AW()@P9o;h=}fQ5$=`$bmOezJAhQnk_xv;IVjn?FZeucy45><3qW!Hw&vyo2U4 zpZ?S41sjywE0f;+Gto&v30m<>eEDy1@W_3^N@}4(jQFt&q zd6L$B`K_1%v&O{ShqI`QwXL&^pHVvv(SdsD6sMV@9fmb-NAN-{xyhuKLZ2OuexTPp%g|sAAE6IQ zX(Pl->0Q>Dy6EWRIFk((4?G@Y#{9v@2&gFJ|8BZNk97XkZ|t>;s#2tobOT^v^x@3Ot@vm}p*G_4?1wNU3r@`_GkuEiS*C68Mxz zs5hMqY&NfSke8i($`IE5^_i_%($Hwy#~hVKhpuZ4K0g1P@tS+XYO{|nMRf4cYVRQE8^3M0L~joe&yydN5v zNcRBu9Wqo>D~>9BMtteM-}G z7g{6xi5&$&DefP7^#AsF9j$0@Zd`43E^@@wa=ymBd}MJoloYs6f#(Pz>o3LXWEC4y z;{mw!$a>Hr_B0AvakdXVTGi<503pQk-JRgg^F$ICk`}n4dRDYgJIvut1&&b4<+!eC zR0J%KG(6c}*-w9Q%+U0-plmn1b1v%`()4uhol9$4QjTYS^!Tl&Cwc<*5sEC4!&7|Y zuG%c<0vN6VP8*6FGQ6pv$Fai-eBk}`^_D)TWPw^Iwo#8XfYH8NX{@(mV*H?+e~!uu zz=g^_sc}jsAJO5XhEe)UC8O^}43%U$X@yOeexiTq_ccTA%)&l9uv>-s0(B;q&4Lr& zoB!rt8^qF9^Saq*S_uhh6CsUn9$(WxQPPK@P~O`84kjOqIQ{X9(FJBzY)6A@g?=5h zCP!af&BQDwYZdQ$Na;kT!^?Z$y!<76)Z%)zjn6M9*p*xq<14TtMQ(Lp;=&3NL8^bh z=9f-{n(P!i5CGzRPMwX{i|m)@3fy~-VUOGfl5`! zE0R&k3YhvQGyJWWw%tpTEFNj=7OdKx23>g>Ql&OUyS~g52tr}ugQB)Xcx0xnFdje3 z3FrNX*<16MYqDfBn3jND>C;RbS_TdwX#?SKmg(1Y|6!sLlhm8?CmY%m6H{p7`2+cd zcFcg)qHNNabZ2Mx_$jRci|qwApo1`%eTY9ZVA(Zf9OOu}1J+gE(8N(DuvMe;njcFl zH*aML&qJ%H#MdvGBEk$i*Y}>3Kq9Hx;IwobF(XzzEy*Z#<=E83_e)tnh;mx5+ZB}t z&RTqaT?zzh+XPYiU~}U?F>+9ZGVQ2sp{QFJdlt*Q+DH?E8|_eX&lwkb4Oxir-YOR4 zQCw%Uv^N4pyYX+NOuAIH5u9z>wP%EU-8WrM2`k`0bi z`4yeQxnZXwH?t>7!IG@_LEq!Gy4l$`^tv`3Qgr&5m?e+?S3&2?vkyEhPs8!s6xibU zAe(PLd$nsbWd@Eo)&;KNpHTJ)=_*G+eKh`4Rmh}Ow4YOl1K93g?$rvH1?D2C{cBww zbuLN_0(N zW)xt3n{K~f=VfN>hi_hAiN4#-`O8lYmrGzO?T2EYp~JEh{zIWu^ScUZ6%Epw&*s*m z*%IL$P!m^hB!$Mz;rLILBbo;u35h|(p_x91Yy1@C0Fq^?!iAZW1Np=BQY&IY_h}SbjmqS6A0WSaxIL-XpUx_$djH zOL8puqDX?#d-b*b1sDg;G3<8DfKBK|(k_p-3dAUh640o+d0bxiseR@}!=#frGvD zAIpa#z>MGSSWyocJYIaA_QiDf!Kmoai?c-Zh5RA&yZaA*?dcIbI+UU}yLTdu3D}Mj z%sO4L5V#q?Up(-9<`reawPtgb2)5DtH%V44^WZaj>?`0)B)GQq@+9NLnb62e_?pfv zgkRE|OTTO(-6nsu2a7DGE|@(et!5<0rbSd@S*8VmUSKp#Iu{}+oWG=5pZ*6WVJ3B zd9cHH6BHqojCwMr8IETf8hu%C2I0^V=FxQ)sH@zbem;Nfcs7`MI!z>EFJKXVL0`e2 z?3w-eF|tmb6yezMV4Pgqu&nuP^Nrl)m!TYwes?kT(38*m0ghDv9=dyAU@>N3XlOET zbo*ofOJ?0T{ZMGxy3tktb_~g~D7QK21t-qk^*Xicu48H!n^InKm#qpOznEQKYQ{ns z)Xa#kByw}1ju5wykGtjA#s|!1^GoMx(8@v2uK!AlUFuy(*ny|N(irF{7>M#!fa>b zqfC(#J(#thmvjBw&pvLDK+XKq%)!uDk~B^gTt_f@DyPO?EVng?npznK(N&Ox6#6Qr zCF>}v?|EPLH~H>K&D(uy6#23J#QujObgyT?P2|#UwdHc|iMG6QzjFU}6Fv+go-w2w z02LJ_Rvfvdj8usRRr#cTf;ioJ_@u|IR(3nO@`a^-K02(hMpK_^t__H1cxr(G;yzMS zPw46k2t^uP4CeP4v+r`(_5DUg$1O{~(x9_BIuC>yA*ltOjZn3oJQYs79PvL5c)faN z00eQRPJ}-)nTqCvF3`spj4l|x>)-CB1v^23$Gznt6B9D75E%bNx+W6p<`y3BNeVcA z7&aZwP9=`EA`Y8Oh;n-=`kfF4U0DDSsZ`3A)>JkfzcYAj-=XGA#Ltp2f3_}(iX>Pj zYmb~qyW}s2_T-UYcHbB0ioh+Gac=EyH(hOBN|iiaMDwcg00aaO>dujETm5Ij4t;Eb z&pyvT8}lTBC|aVvxR?~y?HsFM=%FBRHG?vRaHhf}-pe>*IwXsAabg=Fllmk2Pn(e@ z!!C{OJKtVx4MR)*?#t(&1{AR=f_Xr7_&J=d}+1=6wGlZu_ zRIO58K9Ab*oqpC>9=pP&LIByed8bZB(j>pMg%z?S=yx&mS7T>4t?rQlg}5I@+!AeOhF@ zNbo>8M|pw>2-DYkkq*xw>y>UhGx&p-$DU(MqeM*6XYVQ3xA*D%^yu_*vt0Y9ytWhV zBmW-(6C=3`bU<7|n7$t~h!au2d(XxrLF=7r{Gah+@!NG6InUL>$3h|Pk4&7UoHlEU zbDyUq(8)aCPc7XWG8UcsO4M7*WfLz{3M5m^MSA>I9~CM*9HO$uAtb^{EA6vwFMs)C zCTnIWWYN>awfgr`SKO3|YxC;ixh~Z}mYCDwO3dwevyjzzx)99eeDT2BgWbzL3A<0k z*jo+)pH;GG1S|giFdR@CFR@Gw+a{9zCV2y$(Lud2|CA#g4x4Bp|1@V zRc=i&9=%txs6w1J6WtFC5cku%O+w}9Fbn-V=vVphU3+;F!r}st=ICOz82Rp%i8s$+S72oLyaS#eL_^GFE!Y3f7?8iU(D3k zE-5daJxG$^&y>Y>KIa1V4=@drf_J<2!D2#SMB@fN*0ld)+u-*P)|>Uyw8ZC@_P>(N zJA6&Z(z@BIkN~5Gq+Zn;g;warQNZJo~obR*~MR*Z-vP6%SBO)uL0>g7mxkpG|xl7;U9~MpJtsve9SMmx38!tdo&j zo>0|cV~;%BM}1l#_%xp*bE%bhvfoS^zv;;$65HRmd>e&Y!o!J!|#NUnC`+r@gC!H|;4puH`!I>#d((oR^jy znIJY@QpdCrD--Ww#@5KzgXx5)=>pPQ$9pbcP990`){d2&bR$Axybe+IL3oJR4`QVS z{(su`azi*be;sMdig~RJ69_hodT$<=t@6Eip#PR{PFOKCJUm<(gw^1B0R?O5+a%Br z$Kvdv*8MQSpo1gji1|KemxFv>7kS>!HX7cTZSOWY0l{3UeblnH% z731a0V>ky46^g_mlRX}0^qB^s2r~(Wk)19)Gi;Gy(jRAQPiQ7cACPdmP?mv4TblI> zald}?oip73Br<0dCK+}Qkx$5y9Cfyj7r3XN6zu1C=4|<-Wc~k?91UH!tG;n=x~z9k1*4`f;i6p?xAYLD zW6JT8rwW$F$bqMgJ)xJbb+ZDmIGYdl0(P&L2PDMoKBY?@0042)7$cAKSkDZy?6cD zR5Qcs7T8{Vd%l+Cg30|6GH>-YvINyfwTN5NBQ5xVA(aV6d-?Ke77fvVeRF1mF$*hV z=u`mNS}RSm=|S_PI*+Q3CF*WM@3O?R?5JQ~c=_OXk?_dvR{6*Ys^%H-yEBOJ_jHlM z!cdUx{B;vG1;RaB6&4<7_s*B0U6buwthV3siiKX#x$!DRk;<=5+w5VvmursKPo`YP zC?E$O4;R{rWi6l4&H@yrCo_E(&1I4me=mRg`}{}+H!H5~{j>b{_^*cZ1&7&IZq76+<3MtKzn%sT)mrzl?Ie zPScZxuI+Y#PZGGYh}1@VvGzN=&S^XY8^gU$C1;0s+)rI4sdWhXt2`i&>8D~wBx4oD zU0fjkDIef?LU2~RY2P9O?r2A;eh7#hBxD&;5?c$`i6DH`4{~T+%mU#S5=AAaJ~Abe zusSd;JXzMkdw^Tjr>Uoa-q9nX@fIvfdmnjqN8O)TRWR)M?o2=AmSY^ z3Ed0%kmXQkxkq5NJX0_87Orz*4ZaPb$X;?MhgAWS!|}^XL8IC`#CqrTp$gg0KBGqH zNPmbLKIcG!rrXK1NDk#@JX>YSu9ntX+{Z_h(DG30DPqcl3|5A4b`kHEG!2AJ3G4t z(#R>>Q+y@cXYPQ@5c8*(Es+>Z@%RVn{h50+aF#;|$n<+JNSK>-&=dCB;2oY+Chb0) zOwvsaKkyWlYZi>e)nn-ZZn?nP*5&Vn!K<+KkGYGjBo0+NAsJy|nIf$~E-z@bip&Ms zWbKME%bkkK=JR$Fx=AUE2Fq4)RV{6~Yh@J`+|*DqwzJf|a&q1W7b4cJ7%L%f;sQS6 zFgL$hj)g?7$`9tpEd$QC6#Mf)uis~3jxg8DEnLjmEpE@YtzSGl`wack7jyR$2hJZ} z4W!-O;sp7f$lK2(cWU-X>7rQoESv|dtHC%fa&J1J%m9*U;7QK7m{^1%LV*pm2hcm$9Sc~FKP zC)U6GZG*e6G`Y+L%i|^qeFHLX1_--mB1$NP)8^VwnMvs6q`vFiz(IrJ(XrHdCtoa?cw7Rr*jFOO!2X+^ye7=LYu@Hrq=|&F(q*G_dMd z4{{STCDZBoZO3qJH~k|0anXI>!e0h2wZ7S_!f_&o=*@qDQcUqf>-n0mr_yEtJ452L z&iV(I`l*L%)XP@J?sjn?7@*&vj-M#zpDn-aO{_UG@+QE09^ul2cVfik?l48N9ag|d76{Tei zgwg1NTwOdPXy3}B*XJ%@>jazWRf-Db&UJEh0osiDMNR<>HMPRNAn=Jv#sE<;6H%=6 z{ibtH3F`9F(hXuiLS;GHfA-98f!S4JFa}QAhf<^(y&eD&Jl?N0aS6I8Eyg+mj%p&~ z3lxxNlbW3prePRq+Xnp#dMw%`VLvi1RUvSDVtj3jep!+<3|=#t@{)T{<@`X6OQ%yI&k#shIlIyhC{vrLCZv-2QBGeBBGf~#%`OO6Q{Hi%(yGlJ|&HF4#=O~v=R#ti0ez6AR@2|!PlkZB}VM1-VuVio>>vZF%d+CO~X9Oy0Y9PS>xBsP}>D=n~ z@pLTFd!?Kan0xgs;3Gyaf1zFMggj^7n(I{4;aCz7EZ}gKyOMa9xt-Eq&l%b z_g$+Vz<*<9w8kjniIFd4PkXwZG5ff0;_#8G-wz!WmV=O;p!reVLIjE8Yv}We+4%a? zx`^uY9>Vsx;M1?Gqfc{GR5VOMxLD$nk~#lWjs)LM)JqmPR;1I`VRql=ZOVy&FwTl^ z%%%WH#!Qxk)#o(f<5}j(Um@?iHCdF^#9_b2ivQJ-MgwtAEERJ`x>C z{FTF6!Sq8di-)61=SjEW6XTAhLu3*oR|Hr@E;S4DbsI)*F2ViFcOVsD~loxgFcow;n;18de;KkG_ZYqW1JL zg5Q6{&#E7#r{WxRkL}v(e^lU3eTjHzX4o-3Mux|TVnljp>93{w^?ZcQ$)Bpn+Qj2{ z>`AO)mi@`P;n)-}(=NaiFDw*S*#8EE#+=0{_*Qos1_zJhh&~s(g!_gG8K~<7d?$RW_UPL<|v7ndq8*Gb=Kre3m`| zS>IgvLkZSZUbObxR?{lbQsnHq{5jXb9G#B;Zwm7gRtYsGY{-GPyn2*|r#XmjgEaco z^C-OKvqvSc9c4^Py35a>xae%}_giaN8x7Ma**zU+hDMWv7z(@16EvMbRIDgxL&3}a za3EK^#;IrJz?rk_si3a z$~%vL&m$!0R?q?av;ZS)ybb?3$QeROX8oUZ2t@|Lh;_2lP4seE&8DoB?3jT@YNS&L zM)7{4d{Gd$s?yqX=LZ)o=A+b-$ zl6?XENr}1$bgYpfpN$3UER>8<3leJdQ2gsCD6+uxKDxUpm@4VXgC%-a25I!z{sbD` z6Ki zB})mgmN0IZI2;RWDyz@w{h#D`Qtkd^}(S8p+7*|BvLB-C=-K2h?sdSbP5T6IX zv#$&i;U7#+G_TK+u__t5Y;uX&vnk|Vd9Z+R&p+>k)Mgi2!oXAYT}N$r8P8WAUrQzM zCA=o@(j#q|Was(68xz|$ytufod*2}tr5SQ@(wD1omG-2dQKhrEsc!ny<93Mbzj#ul zzbv&}v^Hxh`-lGAA}cnd8PYSqp)(IAP)>bs7Kd|uS_z$FCcaW2RDqR>e^OvT5hFGZ z@^#9~oyg5h94iZJZ(t*ye!95#Ur@DQSM`B!jIhxA(vjKO13cV+ zthqmqjb;o7HkJZZH0FVX@yQcH_%y2u0KY%1Ux&tGNMFzT3ZZAag`_|;C0wV}=|C!g ztGYm~{4`hp4-$c+jld3<=ZgWGrUyb^I?m)}FOAxzjM1T!GZX|;^{4J$tj}K4JUfzH z%$Vr-ih!ar1IbA=@5Qg2=u~}S=@i}*U3Q;dwP;&&KYYQ1;fB5cq_M0C0yqE}1+Vp> zf9ARvau^06gm77!1Q3Y9g`WM{LioA#>tM@Lh;uSb(swjN#Ry?V)RJAcd@U*Z#Uwx? z;Ji=?oSkLpi4c{^N<4wl=2{NTQ(1=kfhx8aBkumRa11!5Kud15;G%MpWB){OLx$cP z70?6ncbM!3^&Y0u&TwN|+B&LxcXPcnKlU=4;G25hN%OwQ*k2o&`RdZ2Fwbk11HI>J zT%RVozBRV9NBz3v?8%qftl0@@({aY-td0YI{791NQgwj;8N?IIqvVGjhOLt?^;?ds^ne=IQ;`gefxWXiL5q599IM7H3w~OP{Zi(y zwqqLa7uLofqApLgd4nMO#ms%~2j!~S4EP}oB5(bNA8heQJMYx<^kO;Veq8dsj9PGK zq`wX)pyDn3XEWf;9$PIhr}6`nU8(UNr!wDN!qb}T2O;RaT}fv_qX=H#0zh+$rU=zd zysfhm_>EwCufyjxu7JAUy^dFfb0j2div|wSAJqU^2=7#zYah)&Z0Vy%n7Cx30@?OW z$PW**$tQHeo8YUbo&S|OOo}7ckB5Pnk5had?(YGn#iq1lx3-2$?-F+k+C}L{zICDp zL)?nKDsjCvJUv^`NtA#z6{3~*wVr-z#i#My>r{7GfVpLet>3#_-A3i?HD2GFw&da@z0(J zBmr3f0@M&klo!-QfIa3Qjfd0Tj$Fn-PKdumzDh8JbbxE3evxh!)tVK7nF%p1iiC^B z4e2K`01JpNstyYq)PJ%?u)}#zDAu}Dg-AF-mSjQ(CZP&Qcp@x?qxJOIiCX`|IUOh; zGlb8}O~ZO&A$B2W9EAzln$zRD$X2uEixG4RVS2CB6hgojJIMI%S8~`RwQZKmcR`!<&_%_%n|h8R~< z&cVP^dB?PiG3Gy55jnaH0$FffcaWyrYMrM-7R2hr&gxh3HM_J|jJ*05o#Sfc7oRQq zqfcnm!igDG;p#xDSMTbEww)pm1el=F7fnEic2!$;El)#!)`LLtJEp7h@+f($9IStW zD00*P*2JT_Pp*ynurKbONJ$=$f@5e*xp8hPLu>{t;)Ph?=m5e}?<7RFLNnYBs+c)A zWbpmpwEBbPpTg0~>SwvQ02kQe;D$wymN-?x!=1PfCLu77s&jIewkmYLD{(1`8|Z9@ z*a6{;mQz2IHFm#x-VvNB6)GV3tqCE`HL;a4?~qN?MZwdVz-i_O5zXymw*0Jsx{UuG zo(Y=&cVqqy*IpWB8n@e^5zMqsQ1YaDp?m+8Zv}HudJUdj=;XTLUAPMiS+NVW6&qO= z98cV<(sZ!uJ{?8hdq59wLu>_}ec-R$bQqxI^SW14yYN5;)YgJ3vm7qs7iV$dA$d0K zSTg(Ax!lPUY(aVPGI8PYOAO+}_&H2jQ$8k3U8JbB&p`j6rSWP%Ae_z(QeOISo?ed6 z55m|j5gZ#J1RE3xARt6kxLB1mk~mHykoF50UQol4L5W}VltIyv@ZxB<77U!7~Nmw86-rO(jT z$L01I#~Kqm%pY`aW5*8g7iwGA%vQ7FKckUln#hfL?q@E{XoVF6fEtq}7$YpdSB&p- z7X>~XPG8)9$HPwKVFAB%2m&W|rYW2se7B6DJVU^{JS+OB@88|qq(j;&s9G)xGClU% z!V%$#<)~z0V0i3COe6@Fi#h>sE4v<#eNU{A0@X=jRG%P$o3jq+oPDOoSs!bcV=8Cf zOrRN4!;)D-G?(<_nCHjeTdlV$!VPz0u{UEjEhoIGHuVBS(tg5;qk3tEOKJq%Bt58I za_GhWELOV1e8DrbKMsGb282zlEr_u}mLc)HGu7K?xIIq7Bomns0xI zseXQ6hNEWAIQ97;ky=BwLY&~j#CV#3fHA{u3rCKkdHGZb;@nuHsKIaHZ`=vI(YgMc zw^*U*BnrH43~DrTY2961-b>8E5Suak3?k^m4GE3GjTjz zTox9V@FJSHt|MbfQ(@1kc=S*Qb`vr|nv=H(iFI<{cXf@!*Hd|UkQfiV(_&JhiVPi2 zr%2}jY6FuIgwf5GkNO`WEG)^$2=G?uFDY-8Mo>?x2j438NFTsV1XJmjGCGXAP^Nde z(#KQ6itpV`SYD-4ie6UZQAzow#B-EI@r0pky)}0Cf5;*W_<)$C3K`Vs>f+QIpSv5x zO`^LeGF;cN#vi&!T27m3op0*G6|aSd8=_<;yOA=|Uq)Cp*WY{D^AvLi)k>dhS8Vl3 z;ft?Uo0;}U6{B-AWA+MYc~$>6>K<^cPD#M!kf)VAE^Tp$= z#GvR;+2@7i0$56IR`M<}(&gW4*VnreioI|e=7`Rb-~h`_l|@N@^jmouQ6 zoA|_;Tanj-j;~Dtc42vmoj2P-9)axBu;+^sK1q1V-}IRE*GSJ-&RI5yOuWoaWFiZ; zgl^a!pnlyv6pf?#prtTf(0t^)TQ(M?F;-B8M5=G@lpF3U_I`Vp@1wgImGTplteDB4 zBFL_J6urwB-cRGe53ngejAsHX7LpibG^E0AgC56aNaAUZ;s@yFvgpeCv9=mJrl!%~ zIEcmzZmWMPSGlSB%#jmuO7l6*6{0-K`CW~!v})`DcMSgYz$!=b6O4I#H9&#R^U^V{HHNNI8wb6wHrNvBx2{;)-j&s|Q zq=kzh9xIJ)7^#w7(;Dtc=)?g`iM+7-W0HV^2*yMPMec_E%Cg4ID?2IIko#VnsjYtG zG<=purD54z8ao>dm3rERr@vj(wNX4gd;R|O+Vvn^l`dUP}8@*>J zxE^rd`@cJr^xsX|Kf%V0eT4er01#I$K4#JAn1{u;gi+^d%cU3dqT&tz0ZN#e;)VSI z+*|e1he^|ZuLmVV-o)lXD4iufEg^AW|KDna$kJ)1R957ll+2O`0RIV&gCWX7_vR9pNmg_$l(Fbh7eZ>z=(0OhJjIx~6}z#tq(^2_=@qsXl>Qzb`H3Cht|uL$9B z0LpnmF~0+V;8cGmd9XUE%<+ZfOQHm8R(VM&lXTtpYbA^I%gvY_zAQj<^RKK=D}Iv5 z?t!G+6e-tRWgph#0&o1p-|Eb8CrX)dfWaZ?#qGO^sq8J!zeXDt@cr&*i!9#ef7_em zAF)ci+TiA9P$u07@RtZy2dLp_I3pdli+*~<Gu>l0KOO7rmZ2=G@YL^Ei@nneNQ$DCV??;iQQ_iUrsI%Dg zJUf&^*yXQBuYnyt_lBRWvb(Wk5h4JmBqG#BcT{%9Rf*<{7CuvtWGPd@h?=?FIXu0* zK8{DO0dFFkIO>^*xY5*<85IE}2Nhu>SLajSQ8)n|Ad8iOIv&Vgs7?gHp=KbU-*P;9 z;r=+uWhqA>4WG+JgIHr}VKK&fSNZGLrgR@3OLO~DxOe(8%?Hg%Nn>1i-iWQS^2*X= zrZ0V)@IAvw#IiGy#?__Z+kMy9&3r5dKA?a!stGm#n2o_gH#Z28?BJq~PyfQo%FXD# zA4K~@ctr;egl`Q*2t(s|`(wjyof)C%WD@S|ZOZM;@bDfRsX62=3BMgcon8Zw$BLl! zGjT4yNHdOq4_88E$|bbH7F6fchOfq8hC8d1CJDk!W192$uO*i>0tKU!ytBMpbv^0j zEny?vm z|K9|8BKENa#=nhwtOwsA%wZPk&^1||H6Cs%jQdW!7qb&5w_MI?Y>j>6??zdCgm`S{ zHt?pw&|!DC7m)O{+!`x}4@7+rvSUlW=q-65Q3_2SQcN{Ik=AEmi^}L(I&AeT{|o>0 zAc;IMrM3|++d=%)&)6VlR!DcLqJQv_&mF+ap}`X zbS&a)plYsiEKH}^kummVs7mLan4x=XA8Juk1)gRPGr^FG+`d$jGBTHpQ7@>khnw=@ z!yb}U`tyUyqp)3CK0g0-R7W!iLMRq9tB}7!t^b>(k}6m)2%KQYmiB#@jZYPlN-Fg+ z-cgi;6%#IDohU8K7`uSGn6tqkVIg~~(>k6j7h6n^%4)-kTRqn=(| zJGXb`&T#Z`n1;3J;1oCDr|NrPA7j%0;Drzs6%`D15?LOJ@`4N+O5)`%mc?V_SjJ*k zT_gfYL+sx(3XJLRtrs|pa?qoI=!tK2Ec4V_yfLJ~RrR?n2v@!_IFl3$$zm+T04}a# z;%GKD`{?<#$56gw^BwVT#DKV}FBs^}{%(yxIPQ+8Rl6ABBQ?`#H>{k?`7e5iSknvj1|&OI492PlI+efm6Tlb{1xNP-$$i zP=um^-55LG$D%gJ)7Ai&yjgb*Y|CLQViIoyCuaiK$8%@&r#fKM!H70`^iwS$2^ONT zhj}S#4W%>F-N+e#x+dYgtTsGU~`2Hz7QWo z6dxNHSH4uR|lU+G%x?_I{JTQewU#~9Nj?DmfB_VM(($0>ww{_aPF=e_+A_2*%M zYcLt}bC_<8IrBB%bswxp_?Hc5s^SE)T83Kdl!X=D+LCbbUR3|Yzwq}w`7p1MRVN__Vk zNTrrEd@91Bw&_IB{Md(9o0dp~eoAoCK#pHTu9sf1aZK5G)!x9clMR2V7U1GOtC`O~ zED%9!6;R=QvauZ_=F=0+%fz%wM|Jqwu#UU{`c&m!(VC7knkN-&HNYH*Tap2)mVU>gdDwcAZ{c+qK zn@pkrm4?KM3Qrmk`=GK1v*~hZxc`TNp|rTYI_6eO)hDdpPK|WOJ1L&Caiv{sH_V0i z(}fPN*wn$0BC~S&{1O(6@pLC>Fwyfy=svq{eXVFx8?=9YR%X%p#xJs>(RyyB>8GnA zf_J|C?40dCle|CwFi)tq5@T0%X0)}n&tQMLcF0m@p8~+nI>(gtdx*Zsc`J0YRm}EV zr#Q0rqZa$mGzh&g?#|Z2Pif^e6olXN!XC`1VPTOam)xn!1v%fTXTX!PAOKmoT@(p3 zGma)(&T7fshoCVgHXPPDVOVP{N$jUsoY9MVG2x;|R)m6#E}rUN&YvMM6}~s`@x@z8 zBqVp(<5iqw;GWkIJ6^oRRR{oL;N*eZ(rPj!J}=>Ycf9rcCjg}8xk(8aQB+N$WqXwn zh6k{Esk=mnBDW%R@Y(gp`1p=;a|ovfGFE;>@}oLH4~ENX)@bt$U7(?l<$&1^&VPEE zp3IfQlS%;3!ZX{5Ij}6glK91#!u;0YlyVzRy^4I60{n}W8PGTTtMfgQ?ukfE?DRXhLLlQa_a2A2z^A0*I-z*Ekf zNhoW$)n?JN_$e2&RKtpfACVcS)>ZY>Cc(GpH$09z%!O&n2H#~9Zs(+Maug^Vn`I-T z_>mY&jbqDa8YCWf@f0>E4N!5}yoIC7`)tnQABCSJi*gm#yE)wyyia=YGQOXKjB5|o zd-Yss+Wj%C&(q@u4DBzPdgRc5&=BdM9ewW8%|+BTTH1PDy-PO0j~+t*=0xkb?!x)L zAJLz;X9tD%4dUTUPmJhgo9PGgIh6<$%`(f&NhouEKYc);t@&n;XprG z;Fdi>fkUUYXfg0C8=l7G8g~vcq%^ESq7wQ@<_84FhX6qm_yY5_rPMvlR;Aiqq*3B^ zHrG<^K%Sn`iSq8E#^dJuEJsH`GI1L`o@+WBfPU*8U(N8%U2CU$OZ?fa(05d0dy@5^ z-L86@1V`B1DJS*o^#Yd`gRxxz(DECU#%dEyLbrggMjN`=TZNs zdg`(DyPX#_LiYE?6WqTkE!fiLpFc#0DjF}(%fjw0|C~yRY>Lm>{d%U+#w<6>1`{Dh z_np*#g9yA&fK*$WGJPVi(7ULIn#KmJt6c*^KX$o*&P4FXOaOtjLwWt@T%9(){;UBy zOQ3{1Q(lKRV*ZVZ`pNs3=KW<2Rb)y(;;~L*P8`?u2P(vcFLBX655ra^h{4k5V|anh zW>8mg3Z9C#r@tLgGTs9!{e-HofRVJ(-H8Ed_6z}w`gdk5ta~%#W_wLF799_Jsm6k4 z-9`(s)IFh?vlF}zZs)OGPUd5R`$JBb56|>s&DJvu-zi}cp|VwF{rH1zbb(q zqX1coMOkQl@w5zf6$7s*UxB+KHc>iyZPDw6DIPcdW^ zmX(C8B6!(^F{khQe1Gxa_4&=g&iN z%TUarH+Wq6ny8`W4uw*^){+hs;zog=OBC}Ijo=86l4Y}49NwbCSE4d=Ux&erzAbg0HI0+g-K5a7z*2M7mmMuK zYkfwzKQ(0-5)^zdvN+~v-kSE^h&403`I7Op9(OjLEcz|?#)^pJVXiCGvl~X#?8D4l zb9ha^BNzN~09`&!HNg|umrk7Hkdb%47^Uz-KBAU}|MzVC%M1+j#&<-{(giM9=2lTT zgUav`)@$<+9~x=ot8PQV#3SDpd+vfYt&GS>N%_{?wqYnw91y@;aP1)+B@xy4(W23M z&CkOM30G57TOF~Hbw5FJp8U{vHbPv)RVN(v5go)uvcE<9N#j)oeN z#x~56X?^q=0OWTcut%%gc2-g!4vo0;eOj1_f5YW`V4i9%XnW>BIgB7Sl*!)ab7tV$AaQ;wWEb8@|6R$)xd%3|7)ciJO;iXy zgw!GhZBR?rg&TI!i>V>yKADczbC}haYQBbV(|!d$;_F?7 zCr8KiuPW04(c9i_I0({M8s9!^D1~DWxMSTqwam8d*sh{Oz~Mf55(EgSgUvta{+*c1 zNA&37+uj#_m;FI9S$y5`>pHu$`tOLV#g{aq=wuARo?KT>j_aGVi! zQ=>0KDZlsbcibbqE4aB%Q3Ekna#i;367d!Y>l;%eqp%9rxuPOl0Z{3&vq~DI$V`Wc zx)ncKk}(?SdgL&Hh}D?zNC4e6Yq5Ijl`rwFl9T{LOb$sykih0F^dER*_Ko3k! z&4~lIVtgkotXDbbX3n8Z1xNEu#LNt~uSV;gbe`W)XnbC9<3itNA5N5%&$>we%jJB> zh-QIIPztaTV|amQ(}l##XS&tmvmw9x3WoHwOVJ zoTcmMTfT+mbHCL)vg9GnLe}I_n4XiP`EYA^z>tJ^&FI=v-OG5N!?xz?)*|CIu&2Z8 zV4`dG>9k{fVpD3(=k6rQ(3jq%W1z8(jHi8jOlbXSW7UqrbEF{E_o$uH@JGElTFL0i z=DV|!-no=ms@ei?R#^R!7w-;1ZiQ(EPTS_!bTF1~4+QW{|8R1P?(y%J*5%6nP_&qm zKEMs!9mzP?+wRt*mC<Hs5t$WFaq4Y4vr`cKk2f>w4LMJ_!&J-+pRFyqsX zk5Pev%gRl&;fv!=(WV4;mNovA%XbtE!IPgT(}rF(G+W}G0D0s9Q7M!mx}c-3K)d#b z@qes1vymuq2ORqRAWM(UOCceKnZ6?W)1RQe{VR(`OP=fAv8cBq=YG-k3#H6sSfuaV zx`#Q~oSL{f+Ki43#9k(*KWJ)#*>oz6jHkDdC_Ee2b}iU+H-KQ>;rzJ&Dv(4gV?ot% zjDc_9jat^1!ttiKHD{G3&bZGoK@aBrUtj{kR}*KH!wB4>Kn?5QjYhDE{`BDhIe_LU zGzmZImrB zT!3*M+;QKdy1yf4fsNISoA#2y>V(X!!1>u(Z>STJZR@7&cZ{3gC}-{mV&6qyuY@W} zl=BV#Lht_?QhM4*@)unm8AAU)X{Xl>K%Z1~`JV=re!Q+mcL=@yLSt`yh(MuK_W3q7 z?CXAXoBS*w#(x}=`Fu1J*Wq+H?%UeQc4Y$*E$ag*{>K|)G65(hzWr)xDM@0#^Cvv4 zuoHFDG`h}ZVZ!*qtNmHFu;~!3pyQ@|G6wAJE$>S>f|Fp&RuRegLfHm>he3 z%JB)z=QVf^+lcScrHc}?%@ih?&9XEN1_8kSR zRLsTAN~`nC7kx9lEw3MsU-qWf+b=I!w_e7_+fLC8pN6g~+MO?52Qq!<@|BLhA!3le zx1DxqIju=5R;FxyHST!nIfL1u#VCE~_*?Gz>VEpL&-R<|E~@4OW!+e^urzh4Rg3el z|0>Feu}pzUhxeV8&tJjPWq%s&uOKq`oF%Ni9WPNkJw1Hx&I6c(7@2KtcH|oecT_*Q zW)J)J^6V*iI>-^Y&YKcfvrc~vao_tXc#be{7wG3Y){RZv#{|cg%c0J4cNd2(oix2| ztqpe$*>q?5J-(l^NZfMt6^*Lzie1lJ3f#>J6C0kKHXqxCQu+`hl_kAJR&GMA*_(EL?d5DR;mN*Sr;zp?1>;kqNy}uv90Jv5+wTCJFyK_# zqv1vx%jiU1Gr5SckcZ4PfQqw#5tkXRbfOax27T-YVu@6qwQn!wzuJ1$;D`B8UuDEo;uHl84{HI{gC_xYnCAWC+Zwo87{~G^JsSxz9V?aQ&1_1x&ipY$N>|=4f z6Z`z#$xGXv1U9i%G}i$fA_m2b7dvmIfeaL%L1~{Oej(1XC9N+(lociup3y-IR8u{i#|bZ^UGq&(KnU@nFBDMw3Py=?cn;fDw_wC0P=+p+S5~5Ypb>@F3X9$q&-gYyGR(i4o-G>3FYku#H zEu*M&e2S)|LNdKJn;6&Jv?iWN8ep~uqauDl@nOrNJaGz!iaaB8jaU=5zy!2AS*&~= zukHy7=^xIeAcoyXMnH1r8MI!k@M2RoJld_)8QfWwgpqV^k}L zK;%Q2O`QFaOymvX3pgW8IEKD}z3w1i@KCV{%0dM(UXgp$-W4<4s$koc439oiH@|ZT z{l=D)5sscpWl0f41D~^@8lj6i5fnnA^s!PMKR>`ZJEw`$15O>GQ`&4!oq@+-j@H5i zREK_WG<3M2$wCXsvldsQCdQXgwv-d%dB6)y9`qX-I-!McuGU=qoz%Lun=@sMr;|jh z794LdT!cNXp#`sQ5sMlY-q<7RsLjQKh>pw(uc+pXXU;&AJHjH6Suv#PHd{JuJm_bU z>3f*LNAWel*rHoF@0#=Wa7FW%f%JNH1Y||aRnK^Fb#eR*H8Kt_ zm#sAIwKpW1HH>B&Vf8+>>Nn}_@sX~-6lb0|vKPqIxEauCZv1zfLmo3fyPhmsqzO3h zgy}pNkXgSo*$I=U&4fv5Bau{7MVuV!UDmoEgsfre*5CQ2e9K?no+o_7YB{Klj1h$1 zi>*EGGYr0b5Ota5(ayYhIm03s`1^X=bhmYu>2#dX(Q(~xNbdG`o?Ybl*E$q>cjd@K zu@x3Cz9hsu3RCdT@mwy$a$Icj;CrO5l0d=uZaGv0^Vty}*WdJ7XOo7=)aLh}F9%B^ z>O#WsSs{OPA|EUK4Ooe@ACx)vR$_)>)lC0t06 z_f={~R1^QVC`WU(-K&obt1rD$7_nZdpb@jDu4H9wcp9JU1Fj@PmqQ@ZU$lU4d>=jl zsox(??K6(LU5pNeeH^QlEIy-MNB1mxCj!0SR!U?WmUWM5%*g>|r_{J@HpGbBc8>d; z8p1ARkGdA8?ROc4QIuj|r^xmVUV7}_c#bs0h(YhUKQzMthnT0w;%Eo2bze%imlM9@ zpD!@~V^NE>mCYWxkz(BuaEd0_?4u~3``iwos}iS2uA)Iz)x^9+27_cy6&BstS+KW7 z1HCSE6?{C%9rSyvXe8e@s_T{$+|l?#*{i8yhyjRsuqGA`k(; zPZNV8t;#o2czEW zui@h+b|DI($goB4?A3mI0fV>k@fRhBe)V;uk}66GtLfB2i?!)Til64)2ueg1 zVqVuNp}r0E#Xfu80xi~6)BB<|zBb-HQ~mwdv5=GpUBmlpAW`~Dt~O(<$53&h@MW)5 zVVcKLa7X*zFRP07+b8WGide#ja_gSvL%j@*I^@hcMg&oHup!L9GElXIJ%~G1k-m+v zU!Y;*;O^ztF72Vo@WC5mWhyYiMPZuP&_s41FpLA|<5sk@7AiD1S~PD5I$}uzKxX68H#|w zjEXPh#2&M&f(VCfA6aAPiaN|Vlrx}P`rtLKPSQiHn03?w)yy^GUDBFGnCf)16>Y4d zShwEjBFA3}@y*2|Z3&__M{$|^Ws?P#*yAAVHmN->`t%oNKmDNhtaZQ^7I6o<+8N*P zH$z(%>{`3}`Jguq9rw!|?83VN%yH|F$CuSC` zlXC9Kt<;zxW;&V*0~p6f;9v1K+m8tYYE&tSs9#KgonuljQ_OfRP@r`1#C}l`5TR0> zqRpTBcot_emRicjC`EfrPEA%w^P1Q_R#(tfLOw(SRt2Z(8h^!rs0wTo9@W97Hc3Ap zYZW04T;|d5$iBM6%!%EJ@NhaIIf{8Xeq6j7xIb?Rl;-u2xKZi`4;+?Oo))C;K6o%- z@$%+Gv=K~0S8Dc_(k#Z24X0EH-m|aB=cMRRnU>XOmc3P6_2-TB{XE4YE)PY z`I1TAsh!pi=>9d3VM!m~dk0Q%h1q(|V~V&HzH#}~>`cNnYS?;SE%tIhpJd>BpB*a0 z-hLgyy2m@Z{&MxC!QSw&fYCr&y*ToiZ;|PKh}Ge_m;5vL)oYjyoB*dPaw;VD1SQ~G zbsyGB^#To{!eoJg$I-&~Ey9=$BlH+;YLY%gpDk-%aHV=DG0utw0olT_2`@ zj_top>+X#%sa7HfR9OXNPVv9oymSn}_{ZVImE|H-e?Z=pKNlbiWa11Y+ljTatfkVz zmsbvH+XgbZ04%>2|D24$?xfs{CouwtZ#PvKHvKuN-Zr!>*k7=9ee@liniB&S`JNV-fK>l7b^EMGTU8}C&bZPyVlMt%*BEf|`T3PV%=-vk3Bt_*W-pFiL1S;VfbrRM-$78$nU zS6P^bp+v;6{Zh@BmIg-ykte3=m2F6>I&;l0Tbe=&Sn}j2F^>lhBdk*Pd}YpZ^2LsO zdaZa+4+*xb9~XeGT*@_<^jZjtmIVc{>2q$l11I zkRNbpomS0+ZpQd%8l?LpJlWbN0B|1ua5IYCPKgu2_xdrRPz}NJwCR&c7uoBo81*G<; zbZ`NmxhBU~(C-!jvz%+4c;G_tI0{rY_RIl5J_q%j|0Q+wC!P8c`RU_kLSQ?p1d$bp z;*FKB3u!RNiDUaEgsj}=7bj0oEkr5=pAuwplmPSj#OHG3Q6a$J%0AgA(0z{p{ULXj zx=G||j>O1%5|m{{_v1?6w&{UD`*Zt6;hardmCE8!3Wy?XgF(gjB->&;_$C4W7ig%> z$I8*d@b69Inhf~>nM35G?Ni!FQNgw!18wCJRc>wTD`MM>D8wt|00V@+|f>DVRGKz~k;QZbz z_%Kw7J^}4k^H^sFkmJuV<`cPZ%$q+I>j5?W#8xehFIk}8bHy-IyV**~lpwvj1SZES zZ3KdZ=%gG2Tt5_0a3HrVN;j(UrTX92yy>q!J8iC2jHbdpdL@uGPsCytk}!E}QX(|w zWaYn?#>9yJ(K0~(Mv5bY{`$XCa>g0}XL}=L2|os+Cq<|6=@_WRW!w!mBMmlnWrUp& zIw}&7ipdDw{etCpanT?jPc%u*Sbma0>p+GOQ6#iC-GJN1laWQw9bL@5YsP{hKdH&t zgDQx)H$pO+^OA{jSCwzj^4JUv;_lq>!Z)9CFg=NPU4xDQGn-&ldyC2Aq%53!f5dk0 z8I7Z;tSlY2ui@8k`q|vjx!HLNmwW1%wu=ScWxKm|Im$oqo0CtJi^=2dmveb`f6t?v zoc2Yp?(qUSjUvk$ybrRG*1n}ZKI>PU)ZbB@tqOP_v(-cYjQiGS8F!WIpFsOl<33Ql zt}2zcC%|vN}S$aM9Aj0NaL|Upc;N z;vU5!jQ%F0u&E)e@h03gLMO+T`fUPt^fP|_l?kp?&Dq~MEjEioy@=2z-I5IEx=I5h zWxZ3PAtfUY)rpQ^0cgQX#Fh^(aK@Tz20AMuf2Kdq@ggTN1rVE4$MVpynRr+R( zp_a5J*ctC%VwzHiB_10)MV{w#s9WpcPk5` zcdNrFc6U#Ouh1AiME^1fMdL27KBeP=G0|tQi;{|VL94maf4uSZt*ahjlsExs+Qn#! z(z?h|%UtsES!-KU?8Tbr*~`_k&&4i>uRCnNi|@l78~FnFK;}aN2j%t7a10mI{*~*> zO~VCajM!eX=^oz*oaHcEgK`)2xw#}pU`r=p7F-@@QbtQnjRI7ZmsbP#Aa?RD+X|VF z5MPvxu4+zh%T4{JA*KBdV-2Il}X(hA-t7!D$%3SIo2%b-Biak z7}2A7)+|9>fKr|&eq5{C7CP?<~Po{y=7$GM{py> z`qw2bw&fCOq4ERmP1r}msPf>}7!0!DC}Pnq{Qf7ZC4&JCzQl{yT(mKfKn3sSIch@JL`L+rJ1FW zM`du|PeXwrc$b5T>|(0YzG@^&-@Jk(Ssoo(O8Wy7C?7lr~Z zEjJB5=YL+2&uhpkydP66d_1~l35p@kg5+S4_wW#9VnEOb-2ZSBO_0qkz`FG!1k&6b zrHo+G*k20NRT`p!w;`dFOV*)c!($nsQ-$ZR)Xp5Op~;?42S@^_rsj~9@olh)>5xFbkAqkVDa4NZOIF|Zn?&=vY)7MD>`wTCF`84eI$wl%ZK4w`{^{;C2>>~*)pJLN z&waGO*&Y=a)#~IfGI`4<7UfL)ZWLIXmeRX4+MH(H8RnmV(c2+b6nO-h(J{Fu2}l@0 zN&tg<F#mnEcxPeTMd9hR2Ctl}2j6X|r=-0(45<2@0 z!vwdg;6!~l?okaMdUh3Bcuakw&+NXQq{uNLBJ2|1| z1Oi~4J)j>~D-fhBeG{Fh*e?zb4>k*CdBopOhwxAx^(peRpqrbPVrbSm)yZ`+If8W5 zVpa3s4f^y3^E?&FoxRq$652b^EmMesS4AT9{Tgw;yHcP2_p5er)!stnqC$y;K=;p+bAylr{BUx@E+&KW&Jltx_!4r-ABofZ z{jkR)e+3bL)z7+S9iKE(>|)uJ`V^&=yD6ZW6BO(dZC>-`eLrLGRoFfgq31bADrVilyYzOE!_b`fnHK*0 zY?}D*kK|$~2-ws={mMhaqI5o*5?~ShEdmEK4YylrW%d*X{3PH`A{k}g?vEi`_l=^m zZ+pDS;PTX3YZb*!(D>r8urN7##CLsDmKd-T2PIng&F!@pOX0n5r$dYtl9rEZBhn;x zZKKnH>0Q(KB`P|`gAh6c9}lOJHfw+0sQ{NumqiRkU}9n-0U#hCC?xV{&`L7|iF?Q^ z=iqK~6;wYrT?bR$3(>$y_(_+{vglE-s^#L)U6OGFP>BKFW|(3=w=;CQDw;Ipitk8N zw9vNBTf*!M?!Wvr3Rg6Ybl`efL{q3PSQ{lzaU_c}U;oWB1TN2;`o{jb$!qJHdBkay zrEV}qh-lCL<~NUS(L&dsD!1ET6eMj`iuTS<*~G^FjM87U%&oVOx~b!=9qw*n$o<^b zA|duyk7-Nv=NsPd-Zfc2vSK3rn_}>$M>JO)OxBS&^(3@_uiv9}mxj(b%CndhR7m+2 zt4u>18W{UK+Q*bYVWF}A$I~~k$Juq^PHZ-4Y^$-^*l29qwrv|t8Z}NDPHfw@ZRgDU zopYUkFxNbLuf1-pd(AD48#=nx7rd&R_Wzg~5i*rtL70l!#F|-caZC=4#vTii&1HZ1 z?0Bdcr*SA6TraR95y5|k;&UE1GEchCI7((@0A28 zPN$f?yQ?1%6qE#}sHoVT(V#(3D|~UjizpbD@wi&GQ;o1H5`l`P;jvIV#S=f!55F~sjKb$%E}x>`7@y{(3$qP#%2&LuiO&zNAM=( zBF(Q3pQb|jR9E*rejnVoS;#}*()fBUG6-g?TKPj7=+rDSwUwZq|NjLOB14$f>y!to zb=ykD6lSH+;>NHL6Cx^je#g1ty|P&2|w1rg+sRE zSxZNn8~5;+V|-a7AZs-JhIV4p?+vX8UeXIaCbzC2DQENsOtiGm`E?PuKycMO;K@s z?i@v0SXg)m1)~>zWph%mVy>5JmOjG#dsR};pOOFp`7v|t_pR~k!t07!%;Ysdlfx+y zV!?iMSw8cjs1kV6;V>2AH8(((LI|>3np-Hw{)lvzPydy7K;W9wJ#+DcREYbTbe__t zm|w8R&5#=~j`bS@3OO_)SU`8_{pkBRzkcWc!}K1gni#OC3{h z%ike}mzAzeOV>J#m!Rk@XWCT%WL)nbjFry(u4kAD?c!p3K127B7StPqkU*~Z%SN5U zU#dj`ARRFyQ*(wC8gL9Dt7kA4?|S%6ITzpgPu6s;krL1i@iZ|dAlr3=NEXuG&O)!* z*5Y(<8xRn9jUwxx)h1k?2Oj_zlMcRn+3upseaej_180<77>sIlcYDG_mq`7x;`%)K zojvK07XmB>KR1`dehru43q+yM1Ot)p?|^x{@l&UvbeVVsh8FBrxqpRzi}Vd4=W}@l zsZ{`--{J<0vVIu3NItLWC40wTu{(aG$;lisCZgUGu>-(Jh~dCF|MhJz@GjOSkw{d@ zn@4gWN{<8ww9KaXZ90Fv7Dw9%&a)`@TTKkBCgMz?IzJmXI zF+=Y&<}eD01ZIi#V!5S9x~hz!lZ4|Ui|gNKq(8HTris`~0??DR#3@LuiXnd(Ib?kM zMO%r6I)nzPR$KCE6*XIFBoP3QMf*%b^V_5~iDE1b&9v2SqYWw=qvh(Wo6Z{x>x@0x z3~b&~byi!rAHPJ1ajW%~gSK8S{ITQhzSZmF_4iL0qL;U^@3iVQYMn*{2uDogy8JLI z4tEY$GxhExNyW;DVHN3&5JJDk-^%Aopl!0A&%=(>S9c&NpC z{g5deCd5j38c+&_2@`?3V#3_l^$iM245FZ*K!_&vUqJ!(d?MvH1xQ&iM|~eJcz?lJ zCaP^^g=7eM`*`Z%!hXlUT~*M;jqP~F3u@>PMt?3OeO{!-6S`?5>!TDFemP*6NGHdN z7g6(YdniZ$f!>5S(!xbtv)`ujA>6@W+WP84foR)FPA^Oj2g!_3{6TAsy z%frJGD=eOK*PD6Y(i<$zblBptGpN+N#8+?V;OBA&#=oGto=m#JQ9}g@PVEDm+NfTnsnM?UGJ6hVpM>yQSLwW^!<)$k42S4VVU)1wp+G3l49Nqu~CH!X3E5r z=vC{O6|`CkjPDK~@Yr<({eyFJb)}_8`whQ&HcDUZdf42BHu#m6qU0$z zS}kPh;cTv|TW&l?G+#0139J<>c{>;Oj7vJS^*gcq+r zY`b@Y!vXtr>eaG2Y&5jAe?`la=oTw;CcX;*rBi8vlb4_ks7_CM{1XO$h~Z$*a`8ol zC}%{#U>$s45wBeyUoEc`ScW;a5O2N+c}0bVM*tvja`QQl;unZLOH2L5LLf_8ViFui znq$=}ObZDy>|g5_NAMe#7#3Uv_V|ho1`wbbc+%Y3P0SDc(jc?wrtc9%dv(>w-Gf6{ zvJ&G4k3sv50bm7mo3Oyqrjkd|%>NZ*3yxmPAS|j>Lj#PP86QZe-W`rGX;z}V6X6KU zPJrA{czPiW1j42XGW$#<-&%b_;iZ^~atH{{e$`l#r#;z|*7LMZ5el?k^d`ch)@4qv zW7`f+AlOLpHaBAxYah&w48YytFq~EV;3{{*#^!!P&!}(j%2Z2lL|Gu2Y}1 zp=aIOxy)jcK>q3T-mZ!OgM_QsZc-<9ccg&F{d&IAfag~ISF8H4FMW!23&?J8JPj8; z^kb?@s=QuZ6U$Jx|nIhqyZ_NPn6@pGlC4=Xy#9K%m5NR z0P(lXR@p*w9&YXvS01G_vzm;t-e#T-Mw!F$bLq?FM(2gUs*IO*nXKj<(TnDRf#}14 zfH~!W-|@nZ3iek;5^-H#IE!6V1${8cFp!WY4r2}`UxAz?hFHlfCB)t2wht|^NpSQt zGc)Ji)mtr}_a}48rei5pzeom*%s4$}f}oD3a&kG`!t7&Dt#TiC2N2dc>}?m?LuV3L zTp#EAB;r6axbN$|+r_FJ0c+|D2aBQ`mi1C4jn=;iDEbr2M3=p=Eu=1M8u7jhUiVw9 zVP4PSeaF2av+GBT^)}5|uZ6pV``<|eZY5nm*K|VAhHgA>pYP9>YRsx=Qvf6dS5k{}2vVH|b`OJ~1>L^d;KTB~K%&^Avxy%Ldp1XMjl%x;k zb0tt~ITWoItBq}az=MOEFPHsJ?@t~#crX~$Y*zO(L5nk28g0D|`tK^#Yj}8g%v`4u z$rLWhC089yBt&*$00NsC88t$m)f2pe9*&%w=vUj@%C)j^@O;pWBs4oaeQy{En17a2 z`b}yTQB4516G#8`=Z*jj42&OxLc;Zu5k|6LGhH^H*x>ix7o{NQd874`lsXhE6l{DP zvTwy9r1VMGC>(h`{V4iCgnIeBR%~$5-lea1^VRCf`7TTKmj#u?kjKy43keiNak2`D ztF5jt&`-VOYe{GKZn`xcMNgQE@*oQld;y?hy#IAd%~B(af{%B-yWM4@ty0sj(F-=O z(G#NrMZZZB``Z1G_Sb_Kg@eL#B-&-t?ZQgO=rUWf;zUs|vJvol48bHXnz?2qk13`p zOcw0VR*5M*kn_k$2f{Q)fIG;-)1%$?WNiOg6l{0&zI(JKO$9z&;-C-0CJMj)7D|Vp z&LKO>iA;9UdKlHM&`3>e4hPyW*G9NXdMsxz!-|L!0(_M|jGh+^ngq7ubXuM2+S0KY zv;a`j0B0NFRn(?V9cS=sc^|KnG=pMfttxmWQteM!;jT$NgotB;bACS7d!iYABgWzt zbfRy(ZZ{gy1G@(YCtuGuq1fp}kbLse%;P$&ck2UpAa~y)sc!%%B zoqnZle>w|A(hvR4Yis9c-x|{Rp!xOa_DJw3l|rYmG0axGm#fRwZo_m3J-o&Vn#Mh#<2+)my4_=6J3Hzids`Z=Glf=w0= z5!0nzo9Bn>5JYSMRxAPCO$^WGJ{?wW?<&?aFX%^6m*e2ig!^z2hBN}Y?U!%oP6L*G62x`GS)Gle>=Nw<1a40YG1pJSX45J;fI^FrQyVc z0&S=LDb7q$HIqQuu`c*XQ1r)K^1-I>xz$-2pKae^#IAXTXI-5AkR-R!4n0$&lu0e- zLxvNXXT~_Ia#;OJ2I#%Kz3T#1&c&Cl|KSRUa@*bRo~TNzRc{sa^hP37W)O0@R{q*C zBH(cm>h3`HdwG4ZMutQqUu)67jlC8OHiB1v=`Ch3x(!;O6Tipnh3{k-ITU$8c++hi z<>^gHOpcrEkO4z0xUM3B=HP}&EIYrr*l7JZWX?$hvPzbtQ~XV%*KYl#X=Ylz(N?Zf zI+>l$aJf-b+I=HlEYf30WbRL;>rC?kpUn^KD%(mM!9l+dW?vSo=BpSFqqetmBLarg*{Otw3Q7_LLWk%5AK_2*uM9BE+& zsjzsrFaK76)Ad#_$$dledx)$_$PeeE@%rOJr@51doUnA7D$N=-3(4U&0&URyv(;pK zMx#=~2kTYqHPU8OEMvRc*6Is<43cNHdR>krqpY=2zP>v;#8CNVG`^pDH;;EumsWkg zSb7#SlRPK^y;-2J=AJW;9>~ zUtcuX>DMs`e7ukcPljcwe-Y$+IaO9c?Baxnhll)_GWqYh5M7!5XF2!#XE^bD(Qqwd zPz>4tq%U0HSLTjGNP;-3vT^3IO3!P4dchw6ItdF@_t!kKBxJV*hjbw%Acr_aLc)m$ z>8xMS={N%+ow5G9sUz9=!poI;%F?9`>1?sVjYmaXzFemYH~J7A(&l|}q@ol&Pr%X1 zapu$|ol(c<^R^Wd9qM+%?Wc_|E$8kz{ylCi{5Ni9lDuz_4fJMHsjT*&?>6~6iHVQ_ zfI@wgZ_l&^pDWI$uY2Tah0?Mn{Du+a2>4eQC#JF-?Vd!?fI$0T{PXjLAZdWh!_7f6 z+74+V1fgK3;|1;;1IW040~{dzS%O%CU1m42_pM&*q+|6x86aDwKsio>@ZE;;2(00Hw40gu*UpptNtA#Dea0M zFjHK=HT{`IhstnMnk&hwa8IyjQx%RDYtr0Q5Rs!%D5ZFKcsOAiHF`j~MH0=ei(Dxx zI=Vu;ORseolb?^Dvs&AI_qDEgEUn%g0LT^e|Ls<#b$2*n{_p`4Pw434k|-?CLT>If z*8hv>X3&F9k|qrU=}uGlp(lmS;a(BPgYX}yP)+V17DF^rYpGRgFb0o8D;BKa_nn8ordaynmJ8G(M&2^zbgJZB(2LYVImx_q3(Ji-f(`}lbR)i~) zu3?oTJ@mwh09ZXEX;2^lxVV;M3Hj@Vz=%luYQU8Ucg0>jE&hBVbcEdJ2$7!{Kasz-ml>Eo@u+s+gQg$1D{P!~sl2bWM5bT`@5&;(?uXm=-#u&R_+=hobL6v78idPWMkVrUY#oFK`=i+ zv+P__jY0pna(TuIEg(N{iprdds$ygu!KXwvb6IS7`;ysc7&Xl&gWvwLl`h@M^!7Kg z{|&kS$1gKv1dw1_>1r8`6^jNvUQ7q^c<`hI`MWNDYO`BP4M~`fSSdx|Z7QN3${!US z8b`Q(Wq5s<6e>Ebw$@)#vYAm=Jx+VlD+3oXj9q&%%1p+_=KKW#TPH^s{(wob+3mu3 zD6XMPx5;H?*|A8!6E>^P3wMsc*=|i7{N-{3CQj>(nN>WMTI-)xAh4dBNL>Qky`I3pveZyD0+Rn-(_5Qr?E9OkNap}nJH@-pYdUwNJ9a;T-qXE0i$wTO6qir#|w@oO#{~dQ>$R=W1)(T z`x7DpT^RpRH6)=+3Nf^o%0x;efCe>hubjTGxmHq5e$PYj+Vi>M`q(|uR{I~V%D-m? zUN=2I2g}dpR!x4^GcLIu@KCgHDutY1g&H??9YfN;%%lS1bb%VXN#Sxh8|AL)XnkQ7 z0fLU8rJgC^2Y_#{vF(xY<;oQS;l?=yZUBx;Z&12RDG;Q>Pc%Ywj05*UO2~ zOH=k*jW{YE*myfcXa-nvJl+J>^FLkRR~IU@+3g&6cEuKB>KlS}vzu(|G4Dq*6RvLC zc?HJ<;Q$xb?DV;;U&UkUnY<~Wfg2mF4SqfkA^W5)KM#3&T7;%@;^~^3BJQ|!Q#fOJ z$|SX-(+!|RgV_U^@64IXD?tO1uQJS9NY_kBxOsIlS2BOi5DOF_mKgPZf!sa?nP~fL znv1ln?Cc}7Gjj;?r-)Nwax`nHa%=g<8xqbxr-3LlhHR<&{v6N6s)xl7pi0Rk+t}=| zR8bbYO`XAJJJv)Nk$~I%{vN_6loK|MUdl$?IALC&Hj04n$vg14OwW}^^{=w+QFU%{ zgL($LF^5Vfk+!$N=DQ~l+Kf?ILzlP=G&f7@2pliv`L{nPgr!2fpkywTz zCKDilI`SGN^ruKAx7}S`l^gP7XXnCH4tJl|NeYv`;b83v5xPL_bW3{aQq}=ocnmI^ z@##N!(=ks^Z%ejS20YGqTxQb4pYQ!MDwm+YlOj_QmymFWXqzecz9>@-15idSAttd@ zuFgp*@ad`%5gsn#x`it%H}-h=b({!2&ocadmQ7iV)+i)c53>HGmhPx}oy$2nH`$o- zDuyf^Wh#xqI+MjT3X^W|&L^+ZGZ-AgeygJ|3moZ!BSm>M1rVLi=y~)0(xL8t;re>1 z3$$!pt>rZIcDpe9>?8Acdrv7j(huv#8>bBFUKwb09a9TOArf%?%xM5>-$C12%~y}y zlAvSmsFRMNgOPrdf>v(ST2LaB00Vz*-1*?oCP0%C$67m3C~-&0BX^{UYrm{O{^MT| zM)lAltA?NwT&dH*L&`I1!NJ?oR!cYa_gzftoaGN5V40ienIA|)YDMA{kKAFkQ02V0 zG7(WWtah<#;yfkgx!!869xDx6zaA;wsp@<GkeNV(ZeHyXvq@F zv5GA=vb3<0Ez4Ho zUq44r%W{r3W$8}J(w#o9deO77=+a}EJ^)JLY{|pLsy80y81vs>G-%M|pxEDngF#$&DK8FToXvB=%^jO)nr1^ov(C<7Z{ z2wV}KM{X6CW|EDP zYSJTT1OS#C$7old4_?t&7)uP<8B`$vi4h*V!!uOz5Yp<@OZLkJJh8a48GOua^s*@j z&~kX@6Uk$|D&jGY1N8^BvUktVKU9V4dc~ZMCJAtGm<`mr`a?oSG3n~c+t3p;{V4!n zeeQnX8Dh@PFZ)dm4NSkv6d$74S&xSE7~Ch@L*eg_w}$wWn_-hA`==HKKU?iE$>h6( zU`b&#N|%_6YqlEjdYFO;`C7LAIex>YD!pkKTn9Q!)GYD)e->)7Z+ZSan{+h9iN1K{Q~>01t$D?{oPMMrr{``QrP?bFDO;9L-t1 zXs1)7v7$^m4Z00gnjKnD;DgUglr1)O{@m_%`RvweZpX{WHaX zB9{USTAp#+Q?rIqekx2_QGPCn!%YVXi}Ft#?;GXGrA^fvn~lz;lN)UumW%g;6WPw` z^pSC~Gp7&kR~vZDJ|16S5LS!j8}2^&&n+#^*o^00ZbJ1MeTBX9IRZ+xlD}~~8`G|| z>P-5guqM{VwOPMB@14n1)H4ua9ok7O$9k$1`XEYWGY)r7OcNRSwsphZ1d!ky#2<^o zkQRbR4P<+}%_lP&{c>L+EsREC8gg!5Cia;mgoOhY8U(39y=JH&t%JO2h+^Bj6L`PT z!2OdHlmW3NB-&h##2DBv1#RZUadJ#j{R19&CM+r)FBl}X?Ox0`$hAbLG!Fd6yO(3IvSzGuvG2-3sGo=5*^z35(Y;IS=x9#i={i0Q=R#{@aJ7Qwf?gy#Txj?6# z%P{~TWVsN-e+=@-Gft8PW_e;Hc*zolK(Nhw#*Oj?$@|Hze|bjkl=FPLj4YeA(d*(g zd+J@|Kw;*cf|QI@zuohJl7mOT(+8vqNi%25>k4#Q%;$gvz5eMCC0~hL9@jS0=Er#x zsDx?tu0#L|5fS(8OB@2mE4yVjr|}}%-5@wI4t+&w4N?qTDe>dUyy?fJEwg>zkn~8- z#nE!%eT4+UM`bS|L~w~b-!^m2?k@t~Mkd{olx{MiFK(ZJn}^0?eLh$1&0a**?=P2! z&gPX%K?Y~;Q^wLiKurqR4^*hAmis@+ei=R6o^j>fD?}+nS{=DE&{Lpq%tD--`!hH! zRx4?ug0UeR^z6 zn@x+ofRNRO-kHU?cN3FAF(WqX5$NtL-36EJZWGNerrd4aY!4R+f3eXvw)ydfzK?;R zNGF%s?7|FF|LypjuwP%>NLc=OE``EyHY$J?3?kw}z{ns$kVm|{pdjGnYq^UNbo!)c zGr^w6qWn=seo^9%KV2>(`ISm_gTzhQGI+@2C(MozaIiQ)8jZx~APHPlz~}HR8bu_G zo#IiT-sa|;-|A0#WgVZK5vy{Ijx6f2WHSYi&wI^45h$2@TzaqrNW<5Za@p_G1)g(} zWTkb5o@zBjXap~mEbWRnq#(^=IEjG{{QgCL=L#__nSvJvrco`KUmZD>_!qT)-U)Am z=@cUg?x#Q&jPN$(`-d`V_rX|tjaFZ?Xe9d6)s{I&VMRDlHA$}vsY0hiOFn-+v%$1V zxi?+CB8^dVH0oA&eLc4DV0-7M?P?R9eqABfd0CY8LZ>eB)cT0{Ud8khPgNWRDx{?P z@#n`Ib&~>0tVARqMTaU*^VPcJ)8tlmY=l9KqE#wFVOL}C5pRZA1!9WB z@l7(IXkfiWn|I4ZiNZS0rt(Okip4c!JqT0I^>P_hurdkl zF0+^HO>%|gXlUqVSiKg9K?cwcpO3#FL0_}MB$LPGcJKjj^}$xV(j;<%w@JW0L!BQB7KzQiB2AkF(hMUy<_VJSsIqkCPSX_uaLEsO0c5F%J&mlnfML;;nD_i}mMC8B;^AIr`9w1ft06L~n^#@Mw zuEr1R(8kh-KCPNx_*=>cg?q*g`;)Cl*MWakeWHIc7R^||4sS8J3pZ6v^+RWK*jsH> zzJLJKMw@H8dimYq7^heNFm9)6G!}!?&N-`8)2*e&3_~H*nlj{4jow`%K7xn2extK4 zh-tBZ-KtoswJV0l>L*MEe@aCX>Wm{DtIF#@|!}QK;zqNXSG~gD=*N) z(?p}yv7wF*QauS zF42mM2O3J1rAGn<9WH*)2}JPe8Z%a5#S5UPG}5W{lKs=k(%aykcD(*e7fZ)nfH+`t zZL`C#G0Qvt((`)X5+0M;!Hx&Gs#Zd&4D}7 zB+@N0ALLh~SG+7(UwPtb7yZI6Do9_=+xW>eaPe;2<*vMlear6T?d}3;(=m6e73O=@ zh>-+RkP@GP9a7{cq3M&WI(bjTc6@s_yD>_7Ofj0~iTki?I&8hTp9tk; z`$m4P3p;I+rBh;34r0?;LT{oAx+!Dv7AiG!k32(ujEO}U90Yt>9Dequ@1fc^l+kxzao{{}}XpRLs4_@VuDX)=*Y)?;F!)g>0aSXEe5sEL=kOCd8()civ~@cj;p-ay4{ z=pMJvnNJ*1r$k4yVc?dQjZNmUQo!@BpQ3-O#dQuIT`JuD{PcWh90=9vHNscwOBJ8? zSGi|n!FcC&fQDELEluF%FS#$gL1Gp>)@YYwACDO$*?X{p-2HMuG>0M4V)BWB#}3rbxtq%yMhXOw*eT< ziUpVFAsbjh6A#66dQm~a|Cn-AkrZW8e+7Nmrfu?Mb$I_o?I3E&!hXZbT~9=!zAwbv zt^Uu0Uz|e4%D!wx)bATo);=_)y@mA53}!3|$4^(L1+X58Sx_Nex>7-CCGXdd!9PMj zO66-w@%s&{QXj1r;@BJUxm-0)dJlM9PdKcX>AgGn?@*U*wkLCW&l=s%C~LN%T;u&Z ztQRU-{nx?0J2u7-?zVN~XYPwVM#Otq+u$gB2CI4eElK|0(*2jqcOg5H7fi@S& zV=DW=-m5rC@)+hRQ068$6LBD~=cQ~ir5vqVwT+xSA&<+CvsRNj4dh&|W$6W4kXWe_ zJ~^Djj_{W6d^pwR_@>sAG@#{nQVW9V)=uzvAKFvR^0Geb!JJHPxf^HTlM5B9XEQY@ zc=W!vr!QQ=hJEksDy4E9S}`EUg9_Pij~A8>N<>LeKDZnq^G<2-+q=GCDqxbo#qXs@ zF8ebi9dHvMM=Vnx37`~WNQwZd^Z(bR^FG!5*$-Qk-=UeXB_J-8WGXU@a!5SQ6r1!J zSLd&m4}l-|b=_kj4Qke#G9yy3e?b;|I-`X*dgXRR-(^JZK?Db9(C9ach_O?V_YcGq z@zkkiFK&dna!}$0>2=U6*L#G?Zn{mzdF5W7m(PJI5USIvYgVZl!;7YQKb{YeH-Rjb zM{&QXe+A0ctXo*Yj=vCIN3bM~o@xu-_YXq);yL#0&VLePne?=m-al=PZYf6E^Yi^b;r zc!~xaJ_J00$hAq)*{ZSNa;fGir92r>T=O31w@L+GD>T}-y^V)5kA4X2vy1ZY6X(0} zp$zM{t)3Ojr_GM$%Ps5me^Mp%nVGQypb0k6g1dd5mj{Q-6K-}mW211XlGNF6fu%X_0t^!%g|JZJdNG2tux-=t>eD&+ogn*L5FqHy_zs{u#5U~3yX=w%Ox4JznFMJ~#lx0+^ z@2|S1>gO-M!daPYvy&mSg!L8?+J$4?DD_5 zPi=U>3(-XseaNq{ye=lyjKX z*mU`6x$=$)IzBp`UXhT6P~*s6z)OBAc@3_&{=4YltOv8rfpVbwWjGXSH|(F)R%uT! zugf++=|R3k3K7vBHzWbqL@~(ONRS8ff72{MuaM4=5WoFBiI@ptzr5Q`C7Uq11)F** zV)usRe^n|vu9jKC{(gJ9j3#!@Nm#rX1kiT#^=@jiELG~}F6>uo)RCU+E+^#u)U=j2 z8PAW}w*wdR;g<9wQTY*>y6fNP)x$x9^GY2%%$bXQvq zyd)ID2w7K|pB&Ty1!`}LWdg3V_bkO@_`II&7Uf3#UheS`QAu!?iGHW|@wvIY3VCADSnRf|7H2tC z6j9jvt#)#ISaMkL+U>SuSeR2!c3TY~i_#Q?zR7qF&GW%ny(#S@4vWFj&PHT?;2#BY zd#J=zJZ2mhcnu0doE}tDFDCFtDJDrCvrF_Q7EkljnFj6Zbxp8h{XFmtfFuCzaOkMo zTI{@_X%F_(q$F9etr3F?HlvTF7>~nV1Qv7xm;?mub_&JDv+{Y}?LtQ}VM$cX<;JpD zy*FMT(#JK^#`At-KvXsH_q&9K}lTM4XVz*Z;X z4t){;A4W3vSs5y7sa%;=HAa5h9~lHpKd&-VRTPP@ZLhb+?j?@1k3wyu144)yp^~cb zdn{A3$F8l3VN1Ideh3qjG=aqhU?K1sY&OAUN?+q8m_}yCU8CI=t))*gmffQ?HMiL7 z(J3YU3X0mRrH3#e|E5_ydrg}OWtoBQ1piigLO+e_3z@J|u9 zt53_#_g2Tqn@A#~*N4+WEH*uE)(tJJ@2U9kLIMX4*HiKM{)i+H?Fvyqzqha(-(Bag zfV&>f5|d1=f3lgJ*_<1l&U1hI%3Uez5%;w@!}dJn#S7_QO~%=)Rk|Ru-Ph}ZtcI`i zd@zss9S&+W%B6q4gNBmK%}Y^y@613?ax^w0JQ6}HzDHw`O!{+%M%u0c|E@wVKdayP za@N9MyNyoUu}y+y`DCDEU?JvMew$w!xIq+HqQ2;*U|uwCL5KY}iCDjd5nJ@Km<-lT zbUX~kGYj*ssjK#gfY08G9yV7Qfu}!(#30hpxOX2e^{-kzMe*!Ar)98#Bvde>jd0BU zem9OU3m8HV4`y*wu79Mr@iSw}=d&-O?*sq6^+MgiX3%?u4Fm-0MkYbHCPn0FPHOO&H^oFjoQs_8t{7v zeCk*i99%SF#1dI!^k!i52Dwsb{3gE)V5B|<(4dWvi!LH~fHUpcBuA-iZFpFC_TdE~?)N^Hqa|dgjVw zh43RMu|>>{wEMoH0@8SOxk9vD^T}H zKDw0gn`tEGOAfDlwPrLHoqE;dXwHb#)nzWfjG~6c4*GybMSf&5RZ9z(Hho(Ds!my{?lVPAZZ?yIHv$ z5`LZ(caFsU#NM&vkrVvvzOAi#Dfevq|jX^-j?CZ1qKoA|3-h3cbP-~H+FuTs@3 z2rw=eFIqNM{-vcD7+?Q!NL~x<2D3LeUn&%f)@rn>UaqayIgqY&)34Wv#$ZZg_=)Ux zgJ8~(mXMRuY!Ha{A@L2eSUROZsDvqdqgF3jxmCKBzjCD>EpYpTK^cCHuoZ>A+=nft1g#6*xAxbg~Aoj!v$cl0^x) zTr75LBSEG&F3^&^>F5I6Xm^mCm-O7$W5FtQV?$RVms_(%gCs6^o!fc-sVYuv_t?V! z^gT0iPn_L)iN$#Fe9XOuLABGR!b}dqWQw_IYny`|m&B?^@N|O)JVf#)Fboz2rr8ZC z%RzJ()qVKt{T4!-{Fxd25K!RtfNLxsKXc*&OBQ!TUHoPEF(!B?esk1ICP{o1r|<3h z5SYA32&hz3LKh>xpDK4Ph1H-_2%dS~?t#E53l7$e!(Y*Z=}-i6HQB8AoG;HX*Uik8 z%Dc|2)aW%ug}yYHj5YbqWGw~uG*|0ZC^uxuG{yK$5k2AYaFMcz#J+$8J(Zd#0dZ=*Nqro#P}#X!cS(W}^_mHsEh!2Q*xP6qphu zekO*X@e3*0aFurBAt1EVm#mEKOLe0Q)cgETK-3UI^>{v0EdRwfrIaa&#ZAc~z6}Ye zRI>bB=Yy;BtrOe!N6g!!DaIc(pUgE*y|m%E`r>RQ5{%3I1Sw})U2e9Q_}nYjI3|(9 z->;ixssN}4I2KoazGJC$bLKknH_n5mlbNm;gGdz8sV11XnXG1TUO0S)>s%t16SJQEs?i{PX32>Ck^G|k!1R^Y-&)4fP1}E{V zx4~p0Wri*o9`YHD1yI%~PRuNIbHt>#56~>PfUYHtE9tYviabtUD+TeRggEOe(Yc)` zX=H1?0F2}G>n@y4-7e5PZ9)1V+*nh&6$Yvy({%oxKg;|3`{`m0r}N}_+3;|^=@=HR z4lE#TJS&w#1}j&7&Dw#9@(NmHv)yOI`&UiA+qp|^Lzlzl`e9#oW|8O3=)r~HMFHQY zhs9hm0`_i^@ny(5QoQe0qrwz^95>!#_0i@2I|&61<4v#>hX4Zdv0yvV8x4HO#}Oz{ zsFGN#$LsN26OCG=9qAMPies8Y)QJflz-51H`JRIrb3cy37q ze6|?f*@kAZv?)iu`^M;kyS{)Hv-<(i-QL&NH=dCD*I)5sYfigiAqrIPxKhiEpXjJ) z#|PP*^*~enHMeJ=`_(1CSBu+HR$^`#V+Q|I=T}j-yWJLim+DkSop)M{jn-lm#TfXm zUP>>8Gv;u!+Yy$!LVT$LgpRt%LgqE zJZ{VW_sHVAbnCg6o42y#=%s20E!n`^9O98h`@bw)+oGi4+w^vtRB;5XzWo}()RTHY z{8uY8AtX>t!S>L8hCiOpQ5i4B*y1(n0u61u_avz7a@ZgoC(~|hMLhz2UeJ@#d&xGR zPb*lbepY~t*U4wEMQgrcNTbTr;efP2RLBg26gR>mmysltD?x*kq>vOYczkeFw3lky z5XFM;jv0NAJicojwwlYlRlsOL`w^-RWUy_3tWbWa#3s_b3@sR!Ec0&`3BX>B#Lst< z9s4L|u)>8i&5x-;0_~_YS~SeNP1?xpW?QpBPbG|+jjq8#k5qbH+Tdh$NlA;bM}pq1 z_K80?)v86%C5p{<8=Tst7Z(@6=8LkrB?_FR@O#3MOwmPUH-q%wFjF}`$*54^H3v`% z?(yc1HA4(pZ7#Fz+3Gb!?LH3O2|X%@Q^>5M;@X15LVVM}-(Y6Mw6q>UX#gp3MjU!l z#al}e5i2J3dpxfI+W$8MQ>=Vk0|r4H^8EW|73wO|-^6up$w!617i zs+l^7SasIO-}zL?C#R(HG@txu^I!!7LzO!j2M)@4Sb5F;-3~Z@?BhjKN;IWYwV18U z6omt`l9bkKvcbtNUijyEmydPs`Eq;EGEfwT_+0}u#v~`x)O0!he*l3&e!limz~f!# zcCqFyefsnnyKwC7Tetf3>5DKHWsxADKPtS=pE>V@QR9jfESx)c?pkGQQdwkeVVHxV zRtrpBkSYR6TQyE~YApb~kB!*AaZA2Dd9!BE&g2A{vS!JaGY9MH!wEdI>^Y2mYt%@v zCO!##zGUVyM{t@wZ?4W!&)U=s7}P(PPp+k#7jNFZi5azJDp%mu>UoP6I=DJpK>DH8 zhyZTh@Jlc!nn08VUP?!mIRV0PJf(zE+NMogRx6KqAQFW%z+WX%IAk;NO#o4e&oAFo zCle=?o<}^Uz!pV+M(tUO+34u(eDn6L#Y>m6*gv!2S>t~h=0HOV{Pj z<8$T8RW=HsL#K`hckgC`;Q4dsx;Qxx89G$ZX{OJaHEaG{FE{s_m#$8nJ8se1MF)-_ z1+}2p=w3$#9ldlUH1_qJrL)_$Z=cOOCjxHS(q*SFpOOK#?%L6&b!#=KZC3;r%v)5a za^QeNAlIl;y;7+PRprD{Q0165Uznrm!(EqaO&p`A`P}AEzgO^)| z|9T`D0`#9hNdF1l6$ppUUb;AC>GX)`$UOP;G9jkl-P=Fy-E-*Bp_k8|1-^J;wpsu< z4*2h!uvXCUfCu*-#KvYV-n_2$&@R;~R$4V-k(aA`Tzq^;bok5gH%x-*>grnCr+Vwg z%~+HEfy0I#K5@*&(RtO#MGY!dk9r$kGJDA!nX(qmR{+Zdfi3b6-}3=@fDdmY-0ht= z?A=+nSF@KvK~*bNZat#&l!bE!j~M3S>Z-?f-+2gm#7mOQ1MlkS)~RDhA0MCaxTvP> zn#F}j4(Z!p>tN4>#Sm0Si!2tS{@6MXd&pwR=6#pArc@}t5zL6 zco4^P0DuLQR(nwLbu3>35CIa!;B$V~$OJp2<_Ja{0Q|`tmrvcj27okJXj#9J>QmOlwtgK73JQt? zprD|D|9Uro;?F)nX3vZn-8?wJ@^7}>DWUMU1=BA=Oi&9tXUE|qN6ei#kIlncv}Ey| zd2>5;>NJ1h!XCYPdU$#=DJ`ppr?wl(VHKo-RkRWyVWFXH2*4x%$5}nHKMn{8kB+kU zaBb7IlS-rAzI~hD6Tdw9^0jH#HbaICJjcsKjC%i`P2tUx+b7`hrlD_lwiBuA0 zQ3O$xB@2*bt2r(y-pS6%(aDh#`sPg-;n2I7Xrxgk(-cN%Sm?T4>%&6B9{KwlO{Orz zTb))XNz$W7kFVdnP65VxQ~A&$IWpvYn}69%QJKESUg&ADKhAo%8T_0EuJ zHkPkasY0bn%!zVxaT(lake8RI40OV%$(_1%e)cq=N3UMXr>}e#7#JBHJ#OU0L4Ah0 zc(_Z5E}XsS^qDgppsEF{GU3Oa`**b}b(YMT+0Z&GSefYj_>rTz&F8J1 z7xg}(eakiis2bL4)}nnI5kLl7%eS_j);=mBdfBSw7^ncWTbqt%DjC3{2Hr}HM~)vB zK?H^0(#t?WLD2vxDDRb+Jg zdB2edpcK$wuL&XPcn0XFai*BE1q@E33X6`Mx@~sb>g{|hR#su&!^0z6jvUd62`Q}G zWnAov6-!mTFmcJ;It?2<-MyAjS*8*XX@EJI&_kz=teLW?VXgZ2?%v&VaDU+Y;A|c_ z^ZNMsR<6im^GPP-yrl~t$2={UvwW+j&8Cl^;_B`;Y4XIwSB{k{R<3gCYGW2ntW~^r zv?<95R;whZHlxHJhtcvjBQ^El3EL86n_)%cS`Ka7u441?BSy_!uwdh^ZN8PN-Me$Q zrC&?EgBt>b2mzn~=;I%V;IC#x6QC-jjyEK>9NEDcv>h9EY}B+#|Ni~Ej_6UjaD}!V z+76pFrfkvjyEg7>)4k>WM|Y=BnX_mAo+;~QaKH_2*SA^2CVl(%VO^5^Uj?{3c(FkN z{d@H3)U*3V|J%oIp8MK)L4ea5{c!o(B?7<$B9wg9=|8Qf0WmjYgZnzsks39^+WRmC?M|JX(hm)GY^>O zWmzJUsIk}Ie(Jw)`=aghx3O6Vt2eEC^!UNVk>h~SR{c8k?J}@vf&AS@c5hs%ak~!f zC(oFcBUjFfRVrTfyJn`=BZm%895e3hgOkOJ6mHw3iGR?GtvB}o;DsX>c4*Mt4ym5K z3c4P8&k`!#f9{_THyP%?eg&9L%$afW#I5XD&|O1)lf26IZ$E5QA#+T7bWY@(wjgD$XY?ioOucq z|IeJMf&!+0LkB=X!75ug;ABEv9bJ9$_;`7G1_cFqd3(Q)jwXb}CnO{#C2>}4>rg=@ znF65yVNowZ2>;ro9Kr|yv-Ow1gUF(+Q|mb3FqH}Vtr~#8pCB91=Q$&8 z=> zj0*qh^kLcNY~wVh<6^)FVTnr4P96(KO**~z(EellS$2XILA9;b@#TvktP(W5{p&Yj z8@FxsvdfqwNA`UO_P=-)JgoP~t5>hG6bG9}tJUi}cI%pGG^|*&dg`>POo5j~zc|rc$BO#p*^jEnc9=tB}y^SFW{a z*D}iV;hg`q8Wn3DyM8pzKdwvbu50(KpS5UC$vh=qhP;kRiB*Bx45r1amtf$KEgQh3 z=M^hg+OTJX06YPC*H1gFU<)v)1Y#BOKP3Q8fN+S%7|AlBKoLe-jUE7M)~pVnFs7tv zb=v5tX#ao!j1d5mi9~>4;Qt#>KtVyt04OL3m}e#h0I+5qLxv1-b8~(2IxF2S7o_6v_XM27vs+!1@)} zivC5n>FZ>Li~+0pX>(^4_9@V&UAyM>8z);$o40Ja_3Xyjt|LnpEB?^`@y(kz9|k{c zT&pqT`qPIT=0sUifl9&z2!U@QOCkkfK;M4nE?(Gw<9MTLO~T`2Oh(i4*$dd-Ca#&G z1-$_h4h$OD0CB+K^JmV6#J+NU>B`ifrOTB*ck%4ugZt}Lt<|elr?-i5!`96-g2^gN zUmDn?A)+-HlB1)e{DYo9c@Z#a*rd$f8D~87Pco%gi7Z2^&qLGW9Gc@2H59vO9S$8o z9B(iLhlZTGcIigIgMkAEaDX zAZ7B%9P<9n^T^k)_k{7l1Bmsj);oRqyb-K2aNN&>8RT!Ckw$Pz=Fm|>4;(y@n3yPojDUW655bU56ciK(KtW;j z=uvD^k7tGqF0Kq0gi!VBz6b$uzlSiH#D)s?lE>8$H~`}Xa_hYpvhS;aeJ#*h_I zUbl72j)OWKy+m7AZGQjmz5f^fR;pjHXklaQhX8;7`RnGD$yR1!t$wfadNpjUX%A$qmI4b=a`Fd>IYBU51LGdZJV~de);P0gNMxLba8cI4}*)d%gJ+Rw4l!DkzwPy z^+b}G@#^U2+LQK)Pf0$0^%wy3Y1n7r&>>ihCeN6P0pGfL^Zea2D&XFk!uK3LSfO&2 zGgr^9-L+l~ybAFCJ^SB9ypuuJVojB*mBRM!#K6l46CeV`5m8YDV~HTZ*kU#D{~*(X z<8c1`1qqRxHf`3cXHNj|&g2Oc{-bJAG>ciXWUX4Y8rU*M)qJb5LCY(auLyb_%+WOZ zJt!zB4uFCJ{TOZpgfO9E&Kx-h3>=U@fBx##s{#5}Gj<9ZosctkwvRtdfX%{u{ygaV z%^SOa+HDmr7;^yhv)GiTChPxJ5JE7)c6$9^SxZAxg~)&~5J^%Y!I+qYDIkE~lH!*n z5daYPWgqD|M@lHd@F(zp>|F(z6vw*#tE#)l)*W|u3GQye34{a*?(Xg`K?8vVNC+Xg zySuyFvdAv$I@aA)^*S>POEx$m$<3Q{>zmA-g=MC@r~0>Zj`MEq{KvTS6=1Teaza-9 zwz^);+M4{lLDEf~K0V%=bmHjo`t|F}W{arPz8wo*_>4;ZhF{jC0J<<3o8a(QM4 zMb&)4%r;BuyoCXyIEXJ?tZ1Xp>rI7(GiR$>xpIerU36f~ktb*7)0urFQ;eKQ013~-QS>L?DneK!0;SXjG-KXugU64I zvd5IpU)pN3=;Gok)k5`Vru2D85SS)Bra3|ss*W6 zfe`@2K-{St@wcA(2nJA8fg?;Q1@sy&FNePZBv~R1F$9W$M1w9+=dobG9KF#%F;uBo zEpvu!e*Rt$gCC3=GMr;{ zTS3X1F6-T=cPStr!Sn9pTND%w$d2S?YktT8L0V>e{^@l)s0zS|TNen37zqr3Usy1y~YXeuJAYy`2%;8=JX+_~}yL|}|^V9rwW|J0t#Qo>N|-~jwXz<<<`Aq-@j!W6Cm z{g|!8shYwW{0~7^Rh>Z(05+>l5QGaCF5C&euW4nD7(U{gHf^SipP&iSYq3}PiWR?| zKG$ruju|s1bLPzH)2DkF{3u3=#|RTbFedOCSgDl&Zr7rN=HA+4ST_PFXWl%Wd-ZHw zt7)4?-z=QJpmvKIn&Z`+`SXSh8`{5r|IHgW{Je9;rOTJPbnmiY>cacM5BiTBQo2%w z@}E`m4e&S17J(3W2Sm%!{rdGYTg;sYcAhqT>fqi3Mot^vb_@E@!Duc>^BLMU& zS$BkyswxVXHR*qix%<=}!GR1IK1Ac5#w8|bZ|UlltDeO3*|YF@ix&C^1&tm&Ucq4F z>cWLfTD53%Jw9_eHb-gv8uBa?YI*kpTfj5aOR+^Pf}7D0Pa=ibj3EG5sFgzUyi5Fc=IY zM~&LJZbO-pr9vM+89j59Hq`?%pZ?u6?fL_N6)S)K`InVZF)`DBoPGAG*e_;Y!cXy9X)#T*eOk%YVfe32M-(sfO6$4SS|L*$VfZbO(qXnq4y#~ETDSn zJRUuL41fXx16s9fErMPEViXN#Y9*l4TC!T-2?Mz4-2^N#Kpg$g#}m}i*-^Mf5JWFe zkA@8!PM$n@#K_?pGi4A35eq0K^hdA1mA!w*kRIXLK;r#jYy}aB3zr2^eRE0$B&xJQ45#S(6QcxOW*RGvQ zl`f$<{6Bp7m{Pmb$?AV@{uDbC00aaRj4{xE_WfW0h8U+-yFsVdCrMUN{&Z$Pq2DKR zm?kjJ|Ncx2A%@~a^G*d+`V(h*W0V#;6YrEv0joqypEqe zJ!RO~kjQ7}#`jL5b^w8)Zoy= zu<5)tOFpkwKQuP#{LO33C~Y~g=la!a24r;C8x%#Ey=cCTs)FA1-O8l^w11C*T5)>K z&b3bKt-nM_z|G*P$qm)5QwPu+!#4D%0~C^UW6w#w3VMMc#2GkB3ZEWC6cwrplHl&; zfeGsMb$2h5J5>~*pcSP3eSGaUv$wbJ=-y+EI=w2}#`PNQ9pJ0YppTz8Av7{f=VqKW zcGleo_wR*1jx;BfDpAr+&}Hxqm@;hK%?npnZdvyPBJ0IRc5l^T%f^j%P=DIB1OT;e z+Wx@aeH*XrQ2~Mig2we9Jz#J@8(5!)ht6HLSO%*b=sSJWZPChQF_sw3C-}z3dmMQE zYBjH3y>c()9&q3(+x!0>1S~Q}RiFfPMT-_5FraVc%9Xu+ya59+jzt8FJDQ=d;+H3H zO*r+%gaJ4dD3EW?+_`7ZoaxfF>w}Po1TcCPi5>p)aKV)TxZnbY)HwjB~8UQ$dfV{65ARV}edl6lydXD^{p5Yu2o>W5!>;a)nYAVFc`7 z*YN;=x6dLI;DWXQDEr6n5IUXCpyO4`SA4MTH@m8^zajj7Db{41|E&W50LT&0ys(!V zF>!d0w%xO4&1$#X$M>A*W-_fF)AsR0)6gI^%m-(9n|Ry=SZMd zC{XIo)f*g`pMTKVOBZg0-L?V-B}_{>T3M`P+xZ=s45l($6LXF zgUY)s%N|~y*B{e=(u$>Tr# zv~2vi374*2o4b19(mBf+LEQ#*n?7RdwJTTiNy4iAzqP5CeTBW<|R}c`QBO)}LWDK}sf%0dsp0_Ag4paf# zFF!6%R1*XcImiMa8R*t+o1BtR4F0!4nM#3WN|#w1_^DN^Mu`%|h(M@Hi9`T7FG2J= zbt#_eC;WLDz<{cZNW{z6y?liVYuB!wFn;3p9XlAM`2Ve6fD5h!zy%ka_fm>6WvZV~ zK;x!OTDNH}2$&?!h$3-L&)2O}H$(bNZQ8cE zdiAO(h*VKh&FubDcv%TY{BuOYQ|AP-$gN1xVM(MP5X+|K34B;d6$4`&IB->#E0!&{ zYR~3))% zdCnv&*eCQEzjN2FQ)f<#qE1%q2)*`wQsC)4(%gCy(xH1-lVBL!xnIR%!>%to zw7nDaSeB_%E}sCfKVDaia!OSM1T#Ei^n{6H$0R^x#oXnC?>`VI@7S`HLh0-k^V0gH zA2WPp(-sYDm(v6)EA^Q$bn%3FKP+2z^UjSkC(k6sCJt=dci+JSmqMX}>D zzMuAETtpN`sMF^i{R0CvuexTR{A5|>3{vAoRLr#nzF9K|X|W4}e*_$*$PrS;5W*s0%s2=3!8-^>L{%xm zz!-aT5SP*caNzGWOkn_ayIrT#E&FN3`3o0hMMcz{JF6+{xdzmWbOXsWbWrNli zE}S1Uu>YBxCpGHJ`~?frXUWvAP3JP7RZMy=!-Du*3g-eyTeohVFnT-#wsz}=dEYOz zgZwn+>9iR$wMi!i3}eC&us23gDpjvty`~oo?%-a(UOkh^xOVH>r9UqEdBw5@)fxam zvldMf;0Ulc(oV|)|s*WIn5QdcK{}TnQdc7VT&>8eG zF|jIRS+Zu$oiDH1W@({umnHGQ(Zd?SNy|kQ$_ES@l(%5sN6#KBlol>pWa8w>q9lFM zqDB9){U?1tCDEL;>(BvrZ?B(suQ+!2$j*Ix`b_ViFL$1OJ9nSFb?n#WKaUtVq|xW~ zHCu+m*N$63$(T8Fv&K!VV8I~X(wqfu-jqN@fQ*9MqF=v{vP6p@YCW#B8iN9m2#A3V z0o}ZDqt*~pfe{c068?ATXU^!qMT_Qx2MuoDu8kllVC-1n0ClWX5oM)H<$Lz% zDGK7h@UAm=?%W$UZXo=+y9`VIRQ?7{S@K_^VM_4tV3fVCpTLl|-u`TgTUTmkL7Lf^fh^%KYVJ|@EKld0&d0LE0?MGm@b1ZUcMBr zCJ0E6oox%0`phDPUnua%T%EUJN$%o>8a8OQR+~YJkJAE23)rk+%aS&|pPMHEl4LcX zxpjW~p`B(hZ#=N+YVa*HRT3>0GuVGv^Yey1+hWxC4VyL+K+JBdR;3C9nE(*3tq&xY zFiHWSLdA-8>elw~aK{3HHvpwLzJd%8rCi>+ha}kah#-Vs@!k9c&=O8Y8OB6QJAYlb z#?I8Ik>N6(@JyZQ9?=`*LF zJ$V`=`U9{yt2txA+^23{(Tsk5jqciXR_RP7*Kgcdrew*c1!^^~QI9KBu`2np=gb?J z(@XMNzI@q|MT?AJWPo=Z-}fXo6az~m_zmvS&)*{mh^Qid{??UwYko=q%7D3n(E>EY z9De4;d56Lkb|$y@e2_u<>EujnfvmY5X+f|@|j18=Yt^*zzjg4K#{z?d-sh?N)RNS0!&3%P5FNP*WrRI z0dT^W9PRV z3O#ThLX*G-KoRhFoCAi66)U!A(c)j&0l0hjZcssxszzrYwlWRb5d-4{Q2`|%{H?F3POl+ z^$i)00Oa2NyWh^5{qw3{zNp!JRL`-k8?^4(ugmNoXZ-Zb%0^!_A3l6^*@_hsA>r`3 z!;@xDId%D5+m0Ov42p-M{+cYa7U;h7F%SeUbpg*`h&jD|shb6FBFZ*5Q_I zTa&C70y>Lg)sl*MbD{{uS>1nK7Kbr*rm_@R!8w~vgrFHEv&Ct^XulVwTYl{!zWO{SdzY-Hz!NF>=*7_d>R|-A$l}w*u(}l zjInmmL=gE0<*3f(RCVXhCHdix9xp3*?x%20nw`{=$eDmJjsx_(~K72S5 zBD6HaKV2G4GpcK+E;g{*z&>c;&@yEzsK8n@_<{gfRP)SlXU8POsgzo|l_R7Q#DR#Q zYufw^AI|_Aa1NLtfdj)p{zs#VJb7|YpD}&d&|&@o-m=U90ezREJb&IPQUI(VUx^Ic zfBD|F<4^aVy}$L)rEN#9?>Te#@TFS^FNB=Bd2Qd>`$w-{U%y|u8v!za6msa2D9ntGtsY&m}9=&qed!YxrE67Ae#6bv@1 zz^foL*&R&93m1Fz=uu>Jj8?_>_3;aR_#~?_tt3gBnB&>&7cXDD=XlVBqd(Be)X0n zPTG0U>fcs(>)BJ`G=1ibR*O}kYRJ=24hS&1^WaWGLR>msVET+1!egQ`XUp86PNStO zmM5?zj761EcrIZ?0KYr6CiyfnXwR;r?6488Gf{ z%u%vf{88Jfb7u~OoCOf5%-z5c1?JJSCR?c;gR*V&77o-7+ye0u=siVusz?@YWsKMz z%CIXptyLKVzzooHPe4jIda(^-g#WeNRu0^GO(BFoD_*#7-@YwcH02yP15w1B0mkTc zMF|X!2bBXypg7KUj1qtCUgVueN_>(z$((?YNQlU=9$^NYBW)=Lj=@T4V%+WUd=9wHz>@Z4ONr;4FdCQLj$JhWZ@`t|GD zgodKZ_e?@ngBTy8TA>fIYtbu6|$Dg!R|4$hOi=tqa?E(>0M*H;dXO?Xm zCQqA^EmQW26)OxIHNtAO&X_vwiS@~l4ugC3?UNW6zi02>O~*FGK4sNURLDK2UqwXS>_B-k3PLj2Au&+{y}N90iHy#2lxiWCnm(l#wwt4 z#sSD1Vs58E*YL@s0n{8pIkbWPd6?nmXW{Chdog7?BVg+htUzU+1m4 ze&zC{B~yNxv*O0BTYYEuuT`Xun`BI~SZWlkQLsqio(8$V|3 z_unmU)2?mn;T@`!s6MRcXiZzWbIWdFVb3-m++qZM=ay~kOg$2E34k!d2lXE`c;*P1 zDk`Wtz@0jFzIo%uZ^ySH5EW3qY1;bf)5klH?9g)&0bu|ZyA`m&0r5YnEF<{0gAjo+ zg%JGp*I#$--mOTHLVy4v;OyTLd@_vas0ZQF!Fz`&CMzpvL#yCj13aeQb-|8xU-_De#_ z^_Y7E$Tw}iarg7S9rI9!bOehGz5SxKg317uE?%NW#mW?z0gPS0?3g=kwv$&+3z9Tq z$k4PN{$5}R0B-{bgM0MV0n!7h{aIB4Xx_5LXKtU*ZOUK-4q&fatyZ!8Mfh{YP2q#C zE4^MnZ1}L6HERNT$8Q^e9UvZ_p1KnI@R^&a(|&`f*Rj7hFpZ3ia(8nN^!FRrd3dF= z<&K;skY15&dS&GYC8WI*1i zqVw@P5=Q{&@6NsigHGq+MkG-)x6+)pm_i57926e*EN}jNL=sM2Jf+P~Yr6h^X@a67 zBAG%ZNwh1nHn-#D0045G%>VU zt7dD~tZCY;*{BKQN>;9LDg5fKXLn^z(_~Cvp=#xPg$gJ@zi9YX!)nb=UpzN*%;?o? zev45O+IH?xvP@Y!D0@%u3w{=i00|%*g)1YSx49am>>M)xrVg&Y7oc*B%%M14bOOs_WOT^Y`}?9cl~+_zNLmuC0Hy zs(nhqB!7YYv%j4^Y}gPjZ^k6KPDBLsuI>;j(EHDBY}ozqUEK>(zq*i8{4Zr;a{=CA8Em7etT=Ot5jBB5yZk6^#w?J{7mGkKe{;0R zIm?qj&+y^H+qZAeIVS`GU`8nb`zLba*9zd^m;Nh?7&w3kx*#7UVz3k0YR80NfxIZY z;XlQcpQ2cfoi}=nIc1`xx7y^~8M7xnxfRDOm>{5k)KTYepVLq&1q6WJ9^lrK}ES<`1w#+Lu^W8XmoLX@!b#Va-c<}2)O z+}N^)6=dXyqF$#Ik1#O_h5~+BO6Mu^DEQIRwLgzvJI@W=Mz$RE@WBI*v;jYESi!*F zu1@Pbe%Z46r{`(#wtYKgm8n#Y2K%C)e+dc*==Ejy058w|M-T73c9bE)UtySm|LB8( z1%M$#hD1II{r>0gf{cN!Ten&H+plrvM42fXiCMyuovANhnGD#U%DzYf(z@{%o7Zi) z85N8G>kN{cQ6Cu-&TY`QN3T`CuDTgb?ery^-rIsXwjk%mIU|dKVZqSA2X*(lQnIIt2eJM-}!6TPCaKW zp64xi#;LK}uJ7M^eZLzR{EhzC<3nzqcp`#CfD)5F!W=_Ddh{e@)uuIOPy_%33IQ=e zNeq1yryxhi4>$lOfAmnG&gY!{L$yc-&mjncvxdSr2uSCkRPj7{a*i4~szJlLD0K_< z#ifE$U3DqSun52c{6^@_!)I*IqCJ_Y!vaUZIYmOsmkn|JVxM6Mzc?BMFmrsHD5nC& z5|$v!QXy|qZ!oElWJkT$b}jGFpU&~1c4Ahz$t@&2ro-;0!EZl8&$jm z{cSeeg$EbSNlBdX<;#~RB_{g%`wEiCIrsMQ){ElfkPr&|vl69L#x!f*(W6Fd5;{%l zeA%*1tM3*p9N4Fy#st(v8|O_~peV}V0fU;i{KDJdx#cLNu5HZH@5^`a{Q>RW|_VaQAq9*sgZQHh(*eI)PK^Rfslrj8zGY9A5 zz!gyDESmE$DkMk7oZcS3SMFT9a{D?1hA^5kXIgkB%dJPow+|ZiFR|fqZIk#wewn@%Z@o5wk}=d-hB_(H%Q=95iUq$PpuA zV`IF1e1M^JLFv+@O>1Rp#++F?HtaBF%or>X1GuHhkX~>z2qr^|8m)5X&6B@i!I1di z8`rL%KX-oN=0$p=p=0aL!zT@kd={?xyZQV3j~F#Z8%vw<{VWf?TmL}=YE`Ri2So-Y zU)BQMdi1gbB!S$aeFra-m(6T(1A_o4$&&Oa?5P5jfs!+GuG%&0*g>TL6ge*-^csE5 ze+8VnTcQRcGOmcCP_JI?S+i#T9{meW4>tZDIf=tIFX7m&vs(@(1c!MGCLI z@hwQ%t`Xy$BB~%oMOLLjwORx~#Dow5{=;y=H3H;<3)ry;fV z+zG~YTo4nLCnqjKJVT6Q^d>KQN>xsom#^odMc;XtJSkN%!4fh4b2ld}ih>{tZ%tY_ z=R|MN{D7R*V@ZsWrF0-T)h*xmmd`j(+7R`1PkYYsRbCpwqa&Hmce!Rs<9=o)8y*`^L>XSFg9K-}vjgO%kjLi8d=^48LZ- zMI5-I5D)_WGjtl*d(QXYnXvKu(K8!YuD$%w%02gvK700L&Y08Z*i-=-bF7r-45K)fXon;n zV1wTC(vurDuwtMV*@_V2z$mbHQO>l0%77FuSV)UiZJ=7fo;e^xv4VxoU{^#Gm@W+k zRR@v|APwj{eg1_)ZEnPG2~$sRk)PRp6m9;ZMS!^5@>e7~X?hLa9nsRdaIf z-K!Vi7rxr5I~H79fSI;lbQM&9hcWk0UotR(G4#@T`SdP(0o(fW%`GJO z;qm>49z6)ol_Q&iX#deewCgaFPsU#DdmK1*WcSg%b!*fNy%(%n?Xzdj(FOW+n%Hal zifIk%)QgUdZPc{!tbx>y7 zvnV#}z)3@Y-M6lFtrnRK>CQd4Jbc<11{4ANZspP_bCeOVCrElA2si*Gh}c05e)RCn z`EvwB06_3RAX;yi?FfjfQiWOr0{mOG{!$zIuTr%k^ifE}8T4umB`L=$b~_dD%X-wD@%PdkQ&lQ!u-Qu%Es23K%0)qf|2$l9B>*nC7{COuQIM>$ zNnE8=rihdGib@5H1wd|07k&3>+>KyR5fR=b5lIBmsXDJYwtn+XTS=7cs+}_q{~QD& z2;}Vv0LB>H1&zaJRb|SkhLn2JKLi3KAtsIh5J1{t`~WB5)xZ84d@mR{|7UfLoR5M* z7nu+j79M`-?!|=Y7;abQ&YPQPHGey2LCymCG^zDwb(-D2e(k~CyI$BwFo;_=ZX7;z zxXx%8Gho7yk;6}%KK1RdvnEZN*u8hp$&;t$%$aNPci(;g(|0*CXU&-TOTWoya&WeR04q}d6N95Zy)uPXz6(l8)38q}^-y^0;= zsH6xBmCyop0)l`D1enR*0=gyq@dPr;S zIt&dBow9U_rcT?XOP6!O7tUY2(5zXrRlodFB>!h$Hfp_X&#r>S3RS6Im0-aRiXQYa zkui+1EE%)KgH;Ct0f@IJ0K!3rG+$J2k}E?_M-Ijb2Moy0J*vC0gRPN4Wjt@p*;E$94Akul>0 zVE`^FEb8h*uXO1lgANb?dJPE;B^PxBNtiTg(uE5bPM$ao{~QR$v2n4ZMvYpucwzeV zY2j_qe1DScR(GR>Q1X#s=pW(g;ACc-ot7_FX8rkn1Q;L&95Z+iXe#EKY0>bJ!{3nO zA;*Qn5MY5I1u@0}{be#Bj?sTCV8RGudeJvSS|3@N(0^PSAHSWucZEiTr_YvU=b-~u ztEE!WlC`Q<*M>B%-@Gwu)QFvXcJ4i~*I(zGkQj#%`eDU)SM6#y>m!zG zn4dctKo)qg<%so3j0Y14fS}WH=T!%v3Pcd7sxk-p#N=ii&|{FNr>Cl_X0tgeCWeS& zo?->Z&YgPm!R=*}zK@BFO~i_9;KRNcnmcE9YkX4s&)fP11zO`0Y89-KDKKsPvq<-h znFf9{!fkJfB!;ps<)OxjQZQHi_a@f^^rlLu9^Zn?D_oIXD=Vfh(!-rO9}$ zH_z$a8YHmy!+l6FP8;f$%Mhx)LDakCefA6dYe{Ler;_!6pYUsdEH4IimgSUt_}%dm z5P?uK|MCd6%0n+(fK#R7!1`Fgx)Kzmo-{Bk4U%?y?hVJ|aRT)-K1sxBq&x6@_@IqT z>yC_q^cF`d2Iom|ZS!Q?)KNwZr>#*5yqUzw!@;rt7T)uU$nW4^OOaZ!Qg*9B&ZJ3# z25pxm>(iHWssb6xZ1=6b__wVM9$xNM`mMa2bksu`+=feOKsy)KKEFx#R(L2DneM|T z?${w8yHW;PG7~_C07#)tw@0@2TN3XJ^_mZMnX#&O->cX7Za%OyM+~lAtCV7ZOlo4t zcs^Ex3jtl?psU7xEMbc1lh#Yc!=EY%A7^r|-A_h&z{oN*J2imW;IykuPuKS|&%N0T z7kksGL}D}RO2rcX)i#?bm}fW)x`kPfB>>1)F|l#eOJvGuiVWgy|Q zCBl}8hsd!fja-=+){wKQ(m%^Gw+7fpc_L^&ju%T7p-l1a^jCVnd8km20!lYw^^VuU zBdLg#aO`kTC(AmIuBRMRlKl*Dz2I86GpU3y$>wr6jiz>>r7~kF&h;B7q8{?gUqrF2 z4LG;%9K8;6bukmDhy3yDRCE(V#Kux@LIQ5BnD^5&g1AAH&1!Rj`u(q${VBaxpH{_Y zG=gDu`6L^hb5$(rTyL)Oz}(o=MC8~3B?_$q+v5?wDhrICDepUu1e)r2&#QxvGD_}N zMjnBx_Hv>!kF~QO*@CV&c2lo+Wr?QcNp{+mllNW#x2KicYeu)dOVU5NN~K(`u0N-D zN>2ag{Gu1eP>MQv-XO`0NN5wiYVUqn@JEIk8&N%)ctZ&1NS`fFIz5}?AnJv|n%=ed*X12kKZ!~{geVeY$K-Tf zuoocI9G^F}zqXvmmoC{JW6;eg;m14~Q&f=}Ku^c4N5=<75#O59xP*ZN!nrVD2tCMP z2b*>QWNZjNn|h&KLBH8g)3Yo?LCh1UHy4X;8{Y>vWh6YWGX8kUT08%%+dI>+niWFx z%7SgQ2`?KPQTu6M_-sDpVjT+BnAIQPnJXS&F4x2qc+vY+pKcGX`o7WN{Pzxx-Q=aPMV}Sw$h~S%Byt_H<853N9Ci>2 zqJf}0KYh+=yvQO0UeNF(=Kn}^59zRjnC zoa{FDhyd@$$uT{6uRiSp#5J$jBl30wBNiSnYKq?G_na$|&YytqHkb7Qnjt%WK5)&E;^$q!Sj+; z3?`&tvHAs`=cnh&a-rghAfQ|&yVG&MBwI9GvdQnTA#?JMn}yr_Wa)i}rD~N?uvA@V zF2B=G-PxHjUu<~5_u9qo0_woO#m94bb=6xxas2#V_RYW4#}f0@+J(Mg<%(c>;5zQ@ z;b5-m9$nvu>zUUKH@I7M&rNQ+9WT0Y+RU3O-+vR9g8gJQKds)gIWHo1pa>(t{w zS)T77g}q$gP^vTO%!5Kc=ya#a;Z-fSR|O<+St+9`a3*JOR93gi&Qat>QqJKE!g)X6 zM7s#i0A7*84RZq4)#iMtJdkJcdzY>=Nsfby9amE-YvY{@r^T=4Z?n?0|1fqqE{nd2rB&DFRGc^FKi2m?DzY`mmsL3gkmxsbls+ef7nxirZ3zSM!44$sBW3cgQ29HxpTarxm*rCY`~IiDc0(cVAEFqmRwWru8GI zMw>#ODF51+(r?i0a%uLHPXAN3sLx7y@HVrSrRptnV5nDnIR;RjH;mq76>95Aoo->2 zr*BYc>B3--q{|6G!hjqh+49y9Fsio6w|_;mcVp)%{~U^=EKUH{V)#7%?G%#|4jVX$ zTD&oiBWT^Z)Tp*?2P(yVG}{B$k0}c{b?C+b9_vhDwV%f1m1?R{Y>t3?P*Bcetx{2wNsfb$Q(&wOK?Ct2eOts zZG}1}Dl&3Q9FDFB=%utWQPMs)d)-#8jz1G(dsX5xny!_B%c$o z8y0#$^ZN353Iz2Ei&&8Y9ky!X*SXRBuMQ_Vk5}4t>gqFIm&us5g=uqdz6rAE)PGJW zTn`Kh>J6$VTO2uPnuKQ`pFiXzz~gP=$m-&XrX3=aG4>IH7ZzGN5$QO#JBzQuw~6@( z)iB^3M35vc6&JBsthZ@*pWI+j2tbcmLn*V;Ece}-tJR*ShR1`Uj)LqXKxV^P+(>b8 z-_2c<6!F$Tx~F@Sr~#AZOk@y)?zzmTk*ru;I?hBe_6(stDm(mvcTbEPGoDz>R$`>O z{c!#pY8-q^dgurC$@o&{1^FGlUs_|Xb?Op6XmxL7|1qtKvOd--rvH@%t=auKazk<> zp{W7uMhi9DHYOR&I(0D|=mD(sAB`a|X`OCZ^4sVN)bR-734nD}wb+C)d5V9P?O>tc z5!hBvmzue(d(Unp7O@`8>|=EKLoq4h-|M81XSW(fwO$>sGLo>-b?E?C+-6^y5vW_B^(raiwb^ylf52C&@PCi7*eCE{cE%Fl+9XRjO_9i zx&AAWt_Yy5d{suFiW<_H#?`A3o&KwR;SOH_*TWEwq~C`4i;GEo!Z=@KLpB zPY|cPe`@t-!-b=LPj-<=KhMYW_aKIE4Z0=d%hq`d{$F4*;zv_;epM_U-NxPKqZ!1e z;i_WSmuTFOX#9Wep*QpDnUa#Iks#=5%LtblfXWNN`H@zdCtj-Ea&dVc!q;`bJuTNF zwhHPMmvJ5sC>H;7dptioOCJaBhCX}yaG;<;Q(HeJPGtVsx{QE>gBwYHKLE0ZF|Ns* z{xp%zR0Zdq$})(ikf6EPeD!d{UKvl`E37^H|NW0L-lRhufD5lc7u!+iz@Y zEx+}lueECQ8e)XSoI!s4XA0rexfl?36|QJyXfOSbPo4Sz_AcNHrxyE~0ud{TSmR!! z)Mm%qL&hdRd~Yz@Y@tRJ8Hr~mLQ43~$M^jogV)Xx6T>;M= z5Ldu=I8_9P1HKg6HC^zwuhC2gzmp)xX@zfR$U90l3ao7l84ZT--;0{E>SgQBB4VNkD z*)(QbX>q+eDwBz&NM%yx{FKrrkhs2fkHL$mf+BNrTK~iA-uUHL9?w zvBh6K3ruOCFz9oe!*<5;;RaQMUJlw@W*>j?1$%?jyt2&-*s|CKQXsi=YPG%>75uOL;VdsqE|GYlS?$FA5~$;!ueEsfm_mXK}nZkk@8;&6P0jg=7f}k4OJJ%bO{dmI;3JQaJ zvXGqf$CJ@mLuWwty*VgatD%2-E+PlwXP13MO4%N-i~cC=U~3?R_qjXK9-avCNid6I z7XuSQVJB~ze7s<)-zGb`kxKwz@fClk2|xP~vg*YdD3Ng5rj1j6so2TluR9iY%sX8Y zPY`F)`0<+x=JaPn^*V$5#{Tl?#^rJUTff`sU!}*5(LJFb-y4={ykPHrAEkv%!`=0qS+|CXcxtmnJ1#?#n5+D z7LWTraNxH!+?1xYzD(F}pa^-$?18wu_BEPaFQ<_NpQ|j#%M?xF3XPiOHwR)XSMzeJ zt8Yn_zUwvI_TfW&ocboT7ZZAO>%#)>+#(Jm^BKZJ zN*~Z&SrIOp4iuSHA3Kw;#8)J&sND zcLpKmd;FaKdDC28dTY15FWPat87HHP6Hl_xXt!FaG-0OilMa*^00$|Xl^WIh5uj@k z-Q%K3DYO9~HACio6x$!JHSP4iIJAiTXy-Y)s zm*;5KwaxjjqGQE)(=QtM<>hCV>^L=Y)p#I+Q7!w;lV~`~RwKOV1hn$>bkjI+p2M^B zn24Pj(#Q<_B8%%!@+)#_J5*;fgTCS1dQ@6-&b$B?5DvEQ&qP)ksQ4d|*~xb@*ua2k z_DiGQs&>D1G(ojoKzl79zudLeXa3hVX2EwO^N9JBAP~3&GFYEE%TN>s4{zDa7XTRgHi;D_5@iJ~28>X{(L~{K+AAd|o(8YRAik$o z^2%gPhjkma_uWo(6`hz<$o(wuu$NUp{Ai*1jUjuDxnJNU2xK)=&GxzNED4+#m@D)z z15_(_{-&SJ^K9>2>siM~&_bYLGOT8>L+!yLIi|Pi4wyJ;GZOnJGS8OXB8QWhwYbeE z!xIO_bh(|TQlib-velq~#oYYnNQbX#U(D?q(Aqs{L5x}r2^zJAurNzB>{=iDeaMIg93f!d{rPCwh4;ay;$ z`6s6GVX$IeqdywVO1%n@;^w{9sn#vqFeOD3wE~By`D_={5SE7VF|&Y*a}uN=H41%t ztdx;|gm$$K1G(a2xc+qgQk(`)-_#t6LA~rG1a@ldAh_;$bVrg{N?Nf-9YLGkp#7xv-ifx|*5LJp zHiAH(tOf=uR4yu~WBJM`?|c*=^ICR-HQ0te~Zy*X*=4 zn#?0(^3j^k$85~zcyDYI-+o_U8V7=@_d&1P@i-;dkKcHwS0o#`dZE>IvMRTg=a86? zu;F;Fznq|4V<>5TAlL6BD8bSMml@qV@M6};nN95E|!&MO{#4p zQlP()Y(k|V)~Vf}njaeR^oo$hCb`Y0=8HkwlSxzuV0)QJXMO~16m0sPO+HhEi9>5|QViy)r@(W!Ks(W5{pM@`sEO1$ zI=1ND?!0eAS|>xRU4a@!t6ze0K1;YsTPQ)cVezqA^;y7o(spWR?ED4=0R{fPLb4Vp zw|qLg{JT~YiY|swrn*VzAQ!%PYpv7jlkL<7%0mAt)$F&+ifo3Q4lRW9k}*rW+!K&# zW{~&jI}d6f;SRy5O$~UbEEc7pXkRESY1||b>dn5k>~%}CP^sK-UQj|Mca92p@({s5 z9S+M3*)1z67>R|M6P(5OX=UQj=R`4QH5>#3;mZp7yc78_`qmkO5QMV>eLY~G@%Y0| zrM?Ygl&SIUNlB^HiOyk-w{=S65vH;RPkRosR40b>@)XDf5ZYAfY^{+uQkq9wda{W2rV;)n6JdM(E%MkuEZ# ztr$u@DPQt4F*b~0!_6DfN@#VVv_YauVR>R%S|~>7&yPE@{JUtt6+f=X7|8T4HcaNf zViCXFSb*C;nNskI?J0mzOa?J%mC8%*_etND&8PF0Tbk#>yJc*_{BT*Dq@jSR(7!G( zKxEPxO*2hcfMfsUT}`{tDb^82pcsVbHNUyIWF5VW-~J_hOe`?<)Nu2*!n+Z;8B@g;1~wAO_SwhXqNzaH5FWk8_^KTd^M@J#6$WuB1fv zgLNfV6L!t7G`%An3`d8jjW(}2u6V6MqHku=zf5Ea?(Xb!cr9Hqn@`a9_xj#XYiXS< ze-|x3TzH=e_ZRuO!B8m>DfhN)HhDg|Pe$_yRpjCWjmaIftN!p`JAg$4P8BgoxtM$d zY+vttl4}-i>0-&hwE)yfj1^4-GZY4MbShF6(o#`tZDP~d8)OL(r>fYZGI>tpg$x(N zioqFDJox$B?eiA~L~G<>Xr$N`rIDDXhKT2`H?9h%Hh3Dx2_7J+oRONR691i%i6=Ra ztis<^erQM@aa9?k99T@0WHF){gc)0#32HFFA%6o?Cu|&x@l&FA<2kA0GT_hY)M4(< zPAx8$Xq~QnaWwgOr^K~UrXN|eEaVxppx#g19R}(?D_djvO!@Ij8lcE#8yg$9+@6vV zzbNAWsZtOGW(K;4)#Z5LF)=o`%xl^gK^0{)RxG>5SEl@4#CckPY`MAYVcNJx+C(6b0#>0iPm?dg%@-E@F+~ z@aq{{i2QNi8$Vu~g)4t}hAG1K1Y|14En zxGj7iuV)dhc98Fp$wXZ%K27Mk98T+W8n&A?a=UE(c7403l+Lpa+*1Ab|F%53OJ#04 zmazS8kwYe(=&oFE3^=_$<%m=09+j#KI9(k$jg#Xv2s7}P zwWa%%tSZP*Up`ZzR)UGx5311Zr{uMsD>a$?%=Bkq{S&KRa}*+#%#P3~&bA=^3xV`? zf*>VM;2CwG)LG8mAQ_C`2~m*-S0)Wi3FD;`{Aeh?qmGp9xP!u>aol|HZ<{KO%HRKt zl|CD;D9_T8KbL@QJc)1I^ei=}6vP=s(asP$KuVj<5P|S#^^e@tK8=k5OIOy`;h~`4 zflwz%KVqKkjo(;~U`bz}R#9@q81eeuWAltBwV%e4xTCx(;pi*Bcmmls*Q z_L?dxe~zYCEoWE$txuuhzi;v#XfeK3wnrdf(rR%%&Ukxtl4G0w)#Yfp=yZC`3aw^% zx_plt3NehiyKbBY-9-|P)ghSFsr@7m>c2jWq#QKhBVKm#e20|4?}dqlrH^J{Hd8PV zTL!}p%PLHIUDrqM7pv5-2FmmL-<}12L(QTM zBoYRJD+H6>q4j87S{bn(UYYeN? zRSQ4yO_vDBE*By`lwy*$NfZ8FxW(prKU*{IIB&xZWsRLKsSL&4Km``AHZWDBAcL?) zw&m`RF99Zn1=l!ro}3Z3iK*&Ok9gn_1z^jin7$`VNN;hz9{!3UI*}v%bI2eJD|QC$ z=T~SEhQX-5u$xh*T&?QlK+Id4h2Leo&qBWUg|+Q;j=d?*XT}pJ%WQgWcicyEf&I}N zxSRc>B&1k!V~4-P)og(Bh%ti;Wls8K-KMUMaLHn;bV=vgh#) zcwylNGSF}a$7FDuQA>!x2cXm##_E&8rf0cLh(J9sH=ZVusb3SQge(9zhv|xz2P%SH z?hKONvj|ahpn-8~Y2-hbZREZcah*epkiANp)Uw?{S?4>tNB6O24sp8C6H-Rlqa{S~ z4P^d}7sZV711J~z9%Oyw{IUVs(>TTBzo&@#K7pRX+@-}wai$^#MulKIdaL(-DL)LQ zyqc*NlCWzr7U1Iuz$Qc)*jv26^t?W`a^`w}Z>Z`|jTo84bQVkbS#(O z_Xb#F1K*KzS56jsW`F&JXWRz&M5mPH{4#lu`b;-}>{`jeWBB)uNu&JBk!J8;M*^na zzTPZhKZl9Fz(7w-{W_23ZJAVsEtV`UuNe$MUa!4&V{2628vNk|HWqG{CGrvfJ<06+ zIhr-}f7~y`Inp8Y{(aCW*D8>Ta2S&c0XDn_DC8#ki!io5{x9E{OjxYIus;GC5`w7pM3zOy+RJQ;*h(` z2K1A-UBgjBO*Fu$v`|mMoBcvlCyEOQvpw9NsLoH9u5G|{@n-ykhHCQcw}7X2+|0F< z_T%js&t;@nZtPF06WEO7i5xm4fmldUp!Qj@PL`jKH*ejY;`TsVDB7m2#-u%`g3-|^ z4V+S^`+BABsuZG~O-)VnT;_ef{e=pd%9-KG=wu}&sPYFrO?Ng!8|%l=B6$l*`rbL#3Q7KQ)Fdpz%cI9bodH*rn;Ay0c~Lvy~t zVt}q++gW!jMJaRQTWi{TH{~&Z0uVDMDGQpq#`J8i-pN(}*P6Z7>W&9_WC1}2&Ei3z z(?Yj0TUrHdi9}0&L?#R1v+B$SBf!f|UJ{WnQrM^l!HjYZz3>B=Okz@n##v5RzOD^^ z=1$@8em>DR$EbzHZu5Zsj4z%mzTssL&DLLsp6x(g5ki@h5p|Us=oErTy+N;V%dZoF=cOaxo z7+K?|-ZZ)yn);>oU=rgSP}QS{PkNbW{^^B=*C%@~$o(MNjM$dN7DO<#UaSa(ImR{$ zJ?}l*t28zXH9(Q}?+L}qWp@Z5d=`9cEQ7xOGei>elv8ZjkOrAS8Tj2-hk_Pkm0ZQE z4g3v96SM2y9ww%~^6#R&8-{G^8utytK2T!($NW}>LrJ8Y2V-O}A;sSj`+!1*%3@mg zBFi4SkJkmUzB<3p1oXAFw^Aa1i!n%qJ%1~k+utPd+hgSB4EKMO=rJ(7FoPAt<7!-mrgO#}KRupDI>N6DqTN^! zv8~^y^Y?4D@8}8~c6u!|m^3$Fv03~(#cp1Ackuz@4rGJ{d2S@D?VmY9C|Ud|1M=Xs zadg^k3J}%N72*2*tuR$c`K;Ng_>kr>nGkK0kHZXzOPzeja9TJJWI>brZ6o|WC{#I| zd$0O%#?DW7s4FWuDDCYAaZ6+d*nTgT-90MZWYN8i>+g8XQ4OL{iP-5Oh9WjgzoEau zUpeQSof@zF_kn;+?v#`C3_7u!?drk0nY?V2)l^x}mPcLna=VSAo0VFl1~4IE(5Zze z5>GuM^D^j7OiY~0nKY#!saL7AE4Q~G$Gly3y0Zsv#FzMke9i~h6p3Yy>5iH;wEjN( z2e~uW-*%DcP_N!-vWocZ2B2HdzO*wLhjZ`4UIJZ2?eahduwzob34=4~JWdtVQoz|U zxSzU$5(Mi+XGD95;^fh)B3qc72tqaX_7N@{e$_Q1dV7n1C37h=oPJR8#a_l257)_V zVExR>O;V7S3{>OvJVwj@5n&i&kQA51Aec$|>d~|&4+6Fyuz8tmTuh&6#!7iPnsHS5 zzi-?N-9~d`2Eb1^eelZUvHE+G=?XgAmxKM|*zd)|OCSVLa_9})_VjeC3>>O9oj^76Q!TJVtz>j*(GH?9tM%q?iTz3;O5~hIhm{c$7 zWEK9s1S0buuO^82%#43)$uy-MGD?4a{)hyKq?wAi@icPYrUHfU(aIT6@YAxHCpM@V z|9~2y)fv7;$UXr)9Mn|BV)5N@R4DB5b1v4WfuUVU6`w@XpjoKNXzCvY(9n(ZRkk|? zh5$i$3?K)A;51Cn=&*rr7ZmPnkj#6(IMskA%KKn0hUyP_lypxSm$_%`;TwpB`xATN z>>fF_jFb5|IgrtQ5%Q?gZDu-f0j?cb@|hxbD{V@d-0wBZO&a}$wQOJKdkn}#eE0s* zsxjD)Rer>K)ORy^zCH94LpWSgq(9B|#lQKW)c-rfKM=iq=QVFWJZVU^#jk$&e4^yX6W1l(#lz0Xfpm;$N$TBv)|n+04)O3SpYk~`yKw55I;85 zWUx^(v&NId{ND-D>cdhB2WraQL@j(eU;R#a2Xjp*ZAp$Z?x5SdaqGFazSkXsi8KVp zA?u0nwyj=%p&<0n#e?kYtW}{uUR~LD0u+$`Ari@6VAHFLCG&5>H87)XlHnA_|HW^$ z<|M|$!@KtEDF8mCLLT`%zifD!Kc;cYvwz~jdoo7}7=#Q!#eW0@*phc9u_<8>n}7&b zCdbU4g3d>@SQ>SXA%@9n2|}c3uygwGe15euCT3oirgo~$IMXlPxI|&Y44ZH}uocdM z*p~~EE``wC_9Za8{SnmxZQM~F$o|IG2a6uV6G&(N`0*nrXCB*EW1Y7do)X&HB~8#i zS`(%}<{`G1+zZMUw@W-$j~_8%Oy}~tnIam0ZQ3qz=;;@2_ic9$PiTLJH(UEYT{?2A^3>u6oWjpjWi%EJtIroT;7Z^4!8XFsZ@KFOR$I$?%3qohyr%PDaCeasv zOxYQj^E@0^{OyE5TRRZz^8Vmge}>~#tALX?H_7O^iV4!dwb87bUZ{1Vx3jZTN^(+Z z1d|jwu6DFXl>fGz{_14<7s2o5WZ^B~mmlfUzkc{7Gx;$Xr5lSPI0@i*PdyO}+UXX_ zCm-;XR(q7K_>nB-Yeb5FLk6DVZy-5tSz*B&Vm^=0vf8rLJh5Z~^)Z=>$JQ#$RkvGU=ce70G<-<7>T=Xb0BTo$qE zM-L3O*geS}@w+>p?1{_rxJ(+>(YqWON#jtfoH$&0)7)t`;rZ(9uZw`F4rc^Z03VkN z(@<~SOtl*K$KZ)2(LA3dKGM*boGAfG7(>vIZS6~&H8J4T=BMN<7&2i03Z2iHcu)h; zrr$l<`3JcSsdJROdwIDW{4+fGJ@obJ6GAt?OEHt(xF&Xg-ur&>Z5W8H<&6sZvxj4DVHk$8Dp`mfKbUFuvlDB)(>FJj+t(~a-MfK-}<-Ou< zV;MGzR)^OUfV^`e?8Rxf5bzE8mPN0wNqO@@ztN8CCRDDclQg?gtfRw5AOpehD1q@d zKL>qpDi{2Cl4|PKeOiKe8ZjU7xrRBfYQC+D(Kcw+-zXBS03Gqx{5%zDg7QVoG(Z-o zhUg6xu4a~HcXvCc?@y;%IJKz{h@EPjSX4>HZ*oKGD@oqGg~dVpkJucGUR#Cy=$~w+ zvMAtf7}#hGYg~VJME8vsqs%F_d*A1oP{svFgCPM>qU%6FRjjBT2L^ zwi=V64e>yv`-eM&0AjV=#ux2!u6~@h-+CVm-i6*vbXW2So*W6B7)l`_q6d^dW4&9S z;&$=JrjDTQmaT(HpbSN1v8WF>Pko&;tD_1yj)>xjjfoa^Kd8Ytq9)`oIim>q->w6L zrWt$9Fd1gN)Jiq1zVSWVQ$Cy3_p9tPsa$Lj_IYyOOZ+2!O{&{uzkZV4dQHl!{cFoh z=}JOut8yN3*AJrE6?I7L>%JquQ$fIiBeo{?SsFq600rLU3vJrn6v8iD2V4x zN_b4%CYi_qhQYM4@&kW<<#pTVqypBnUY_$$6o*)9bCS!^jdtpm@#AR zdU#%t;bIvYEW;#Lz1p(Bx#=ac6`(jx9B3{rDWjw(FhuHYrkH)h{WPEQrnTIpnl&9g z)lRn$y1B72_RqQ~4|#uNM&IvsDmHu~$9AQ0S#9^EtDSYV$@{(^5k3s@0>9n&G6az& z)o%5SH;o1xPkjBCCEU?o3>5BZRa&$!&7k-|QtnZt!D6=9c|RfTgLH#$s1ut33YBcT zFD@AdP zyRXjsm}A~)rkDm!QtH^mtHu-mEr|_}PQ7|Sn6848fifB+pdb>wc-j9?GP=9341wbX zvr-m*VRt1d-do(~;zTI?3ACqY^<8H0S%4#2JV)3HT4UWI7iZnm%p_`;2iwg2PFsJ% z6IezfmRZAbdyaU}ma3aP#{x2{Bt=%s5n=G4q85!dd70dI{qC{k!oCmpM3J=Mcgup_ zY-h-a#qWZ692bF?Xa9qkUH*6gaYD?hd(CeCBv4Eg1tmA;k6z|8@;otxWG0upL2*^dvXW-KK`S+AT(8=j zndZz|D9R3F=IqFGP|^#oQGTYlQ+js2z23yj^OdJ?{oF5;QG61-G9}I5kH1W3j^H%o*DjTt`#&&UGMe zw(8&+O6slR><{$`V!OW0vk+F0^beJ-**6c@IpjJ4x40HR;J-L_x+w;>GJdQGnd(@c7tNNX64uI(d1~sDwD7h^OQ;r@_zR~@m^dF ze!|H}$#{_GcIOLg;ztxzHeQ5rVr-1Lz5Pim_0#Ii-%NUKP0A$zD&UCir)CT>XLpu~ z>!s`M)h#i%iFU64bEnRW@l>(Wc-9BKa_jW4rn~6H1|gpt7N?X7{yNhM{XD2;V;>#u zQ=6envZcnX_1DCAYM6mTh37t(1cROTcf9cDWW0{OZf^*sB}Eb?IjmxicI|X;t}h~^ z-lU52QZ{Nsii39~2r1s{?cSA+SC)JqLe7}^$(0w6wiI&=G*m&*Fagq{fWlegt5^vM z`u929E`!{?cRgOW0RKTs!*gS4=x;Jp#bR3-V9}Ls58|Jz{59T#4ZjoISvlvk>x|0) zLSZ*lyr00dYFfMa50TW|TcVzJBP{4c`w#>IQ7?4ophnA~n}FrPy89w9PlXex22D%==tE+d|wwrLcv6*LU%F3Q6y63L+@(tjtWX$c1VGczjY6_v=i4 ze|=wKoBR1lKAL2@qhi*myJ9G#>h)@}E`pHtm)mL8x6qq^rQb%T_I?Jm7|-6s_k@gK z-dv6?Hs%Hi*Tb7a8U8?_&`>OLY}hFS<4DegVI5XQciXosJg_psd;#AJ@fNK0Vwits zMo{1EsQUf@Tbw#!+6WcZ!~JP)Xn)H5KTfgXlx|FkAzUOM)rpY z*JI<;!{MstE3eo7XiL4N7K28WTIWA{xK=?yMq_`^CqLc@Gc0YcGy)} zGO_~wtuPb2(t&&FJnmGoI-T*zfy}yQ)`)_d{wwAUdJ>C#Bxo<9{ z|4e0DX=M!@203iymAulg2UsBt0*fPEGME9GT{hD9VNy)BT6viulX@{3SXiybi#wkl ztS)!Puud2K&hEO{NPK_PUmyM}w^5c|YW8WB9{49~4KWU8JR8UUz;Hej2S31BUxiKv z@!j8v)|(}P_w!rjFdq1AH>;PTF`#gSX8~5#@WCdlQ0nRAm|cT`*m1qvA>x>9dI(2s zRnLNF`Mtp$@cqQ>F%WjK@NlyshW%@#svcf=)u_MIamXdr-vAzMl>`5YROo`Z>6x!e zX+`#`!)8AKN3laHxI36j0uXn^zf5AnS_5&AUcS;B^n(oxXZJOvIpA`+iSsu$31hYG zZw=2Xl2Rg8N~ zI8^f5^qL8@W8IKvVuqotL1ny`EwIk!ryoVv(@h)MarbL}`fxUqh;3(^XwF={v!%$2 z&u-p(q^i=Ncxo7qRM76})|l{gweIz&t4*WLf_2@zuU&zEdAJ$@gG!UjD{rorHRM0A zGfx7gw_WHL=|6WP2$%-TjO@9detGa98U72yp^wd)^EC+tg=iekB(bP{9r13USnTun zM++!4M`6^Zg>_LuGJ20)4RFIM54rVPjRO(2JA|5lM{U4r&3{meMzmMMx$0qLo%@}H zfU@9Sus@&+NTjl6XK-5-Mn4O!I!+#v1)RTmfMXY%Yc3PS1;aU94gF99h#Z#$iVy_> z#g}qNdU*7S&OY7kguH?*Vnyk~+4-a4uosD@3c&bge!NisUer#vReDS_Q9${a&*$>j zGJs{C(|gx0)OHfI#H?Pi7owDTLBzHuWxUCkURH1#zjW~cSS|lcU`J15I+L7FfLL*~ zq-)xKb=g@cvkO&zgNZ5PeQ2LuD1fMdKas#Cqo}SP5=Go-v*>y}JdL^P{Z1aYUpvK= zChwNIT%N2?81=UG;h#Fg+Zp&Cw(p&LE)ilVJ*n?IKDb`ZZS*OSRoe&Qc*BL|moi0A zVpqZ8>RFM2An;0)J@?LQY^H>eHvtR=?NAqOX)lQ4O*5UuHQ%-{o6=ivt;!iFnb|Sgw>$IqdhB6QUF|Hc892(J|2^3sz~} z)49H1<}2MMP6}GxZ0Cw>9_=v6gb_f;U)`=S&q%r4Nu&7RxCXCj0Y;;7Iyt|Kv-;YW;?c5>*-Oc!JijV0DUl4$lp;WB1dAPPM<<2{fuDRu58K ziLH-7yuopIJeg6Up_;_I2z78lL#poj{g*Q99}^L(Vo~QY-vk^JZCC!DwmN@btBr430q1UZ zzCT!p!ro6y#Iql5MNm9oVnfLy1MT#;#uXf*l{!Np8u4Ll$lcw3;9KMd+|ch82(k*i zl9WtypY%lztuJUm(}(CV2>Up(pUBb$mNLT`n~wLFO3(UYB~sF8&mfTD8kxiR>c<7? z@tLX1y;V4Di;gx2^QRtj+lZiWMKSCwM==VvnC|nDEV6Vpm8PMd=Yug)g_+7M0oR4} zx?TAc7K?@qHofM5h0@>nKEOoxE9iGX> zaOyQG;(m7eSbTg1dTb~yn*++~)nAF*Nx?VUV_(g=wXIWHL&pin(=eYv?Cj{WI@S9u zPB}}+dy9?LDZW9Wq0GZ2On*w5>KCkmBlc*bVCrv}(#-Ns27mX!@Y4%vN4N;PB|` z;`Fpp{r4@qY~@?(4aM5gZ8oLe$HxqUR#ki{SheqKRv=UZT`-1x$;sn2V14xkD5B_V zoh??*wy0kOdaEoOzi6Jm@bsKV0WI!~>ZQlHpw<&B#;)+tcXl9 zRMm)WtDslv=Tl7ta3*k{F(gWlnSYH;?k+Ux@hEBx^R33tRF>?b%WJY)!aJ_lf67*e z0|lcotE0q%hn0#mqfJW8;6gd!u*=J zMX~zfJUT4h?zPg6VNsPln%TD9b;xKl zZ;lx9fWJ;|i%m4%i-w=Mk71#i%rb_SN|XV;2z5r|&=bCK{)LXr?|bj&`h`3;2Az!u z!l9JYoZS*42bFm&W#X}?unP`t&4s{cRM=dP2)odlV^m%7$fYvQ|MbPN+9DwX(cHk} zitj3-^FNt&%%Hqn_ak*ATtYfXJQhz7Ofx8qV|V_=J@bJ-M0V#CxTH*cn2AYRvJrz7 zLU&Ip6Y<^e?6FETX2)VOU}vP}mu*(e5rom%`@R#-CfXm7K*(lXHudFiXQ1czq``4b z=J#=Bk-)j{X4^31*9HWd(lx0RqJnK7o;QHy)_>hmZ-UJm9@VJAp2fb|@wN+ZS$qcj zBZp(~i^DCmEM}GOP>+pAHko6W!ik#m z{l>9T3JSdbb48ZKcAzod0DpS~6KPD4SbUBS=Z&{L0AfcZg+3I4!4#pU_IdC22gHq# zx2F=GU!A{}O2x{{`H8R5A92T{6R8G@r;!JQE+2fCE@pU-TFac-Qw?$=%ij^L~USU>$q3 z#Rt(@HQ3?(+|<^fD=*)0xfjM`G{M^;R) z&8lZopE_b=TPP>qtS!$v=Lt9+g1X{L&vTdznvxO{JYnP;YZjz0uYPJZ01Cxd&CPZG zo{vmDPYz3d=n!eAtf-asnByfy8e-8C; zre&vso`yyyZ7v)CBIPTTr`DRVc`DLGaMvi^L#XflSsk&q% zrShjE4_pGM)KF}UG(uh1{p&#totybemBGm$>Kch?pW;2=Ohkc_%~v%*mCFlw#?|)~ zGFkl!TUy{|Pq_2{KIvqt;muhc3bxyRT}=fZ9zNjw1P*mVs?&h#= z`5OPzx!(Qr#_S(E0wELqDaIwvwhGEx+*O65l6Z+Aks4hZ*Y;Ry#wvpr=1=Q6gkDut z4u>;jAS`DvG&RWeAix$*2QF!XZ-|$f+5_-f#FNOTR%(4%YBm&8*sDO)_<`&U_-$!$ zMK#s0X~X!SqLYC2FUHg@F^7_S;Z(^8yjXkV5#T{vt1%)$MWrZ6AH@CJkz;zgN~HlJ z2qS94b=hT)Z^nhl(jZ2@V{h;(fwoct=u9Ix1W=W2{o9+n$@9khz+|nS$+9OcEr$pbOf2XDv^bz z!62LYiuqdAA}-s29fcS3Azg`L4NcLRK8Ek?q$9H*ctpbm`6d#~s;*S^#gZtyR6kHR z_0HZz<3GDfCPRDrEP#gA!QD~Ds4W8#VTK#6zQbQT(#=)ufvzkDITWm<>LtlR8_c;E z)I~b1aTo~cyYiy)<0OpN()BV4@mumTy4bJj2OoLmxaZP&5!rnzd|aOUx{#L| zNOibm+H3RzxSY0o^*z~bm+P}QOBbBrkphqRw(zhC_AWjaL~+=tML8t>skHug%hzz1 z!Kwmv71Pc{Q)4&+5)Agj?L61}q@BNZ<=*y`Kq4Yc?BYE*h8?z!Z{ouii)2!4S zO`@VOQ^1_^#Fu}5cm}~St4qLtDZr9|LWBW>X;`Ko;S=p@C1-5Mw~NQp==F*T)%mC+ zD-kgU=;z0rrQ-m|PRDo}Q%qFUXc|+c)_k|$<8_18G;3PM9Fi<$MWG**D&oublQx(J z#E0Sg{PMUN`i~{hu-T@muvnwvR0_Vdm({nam12`e)`oO5=g6HuB&x8i&2>MAD}X;O z%!vCZlK;Kk;Ay@YdC{+a3Tbyd`*^B4AS!0QC(-%H)a#9;|oSKewba=nsiMRC7F!VbhV|d~`lo zA`0wMF4~_A(67?~ktxJckTzQX7C%vQ;A_@{WhU*LvcR56vbP0cBFpST!K#rPZ6Pf( z>k!VlAj&B=oA4H+ULKLEcZhL*#$Kxa3*Z4OwSHDa{j?@tac~IN25QY~%Dh3yXu}t0 zybq_#{ybkZV)N=5kqSq!?#ycFmEH~m;B(Ywf1t|8sxu*^4g9c0b)-$fLoPX-xm-W< zDH%_t;3xYo@m6@XRo8)f_FCnaS;tV)SouC_B`Gqezg5zL2e`s@<4>}L{LlMxpR1Qz zg%RwKDdIQZfjTi>aNB0c7#)mEI@irFlC+0ngih~+sdMP(@HpH$m&xPq9oR1(GP1I= z6@{^<92FXwzk!+uf55&*n%xp(w9u&KYfzg>7cD+SNWR`+)fdYSgnPKczYfp0F9#7c zA2ANQq}c}Xo}OD@hQh5hJNk##S-32vLYd!Ttlc69Q#=IT)oOQ7 zcLbx$`1G%#l|6{P;8+FlDolO_V79ZR&Elx=dy=$poD_olH)!;zAIwQ zA;oS-h;h1pCU8>0wwzzGm!?F~Aywa^D0Xonv|Y;W0A=q`6rax|ELRuzk`pWj3DZM$ z=y>*ul86xQn&OA6W*Wj4BZZjRn+y{McKO^j+pl#xz8g*|S`U<=7fB_!+_-UMt~MTS z^!?hxdbq)$*Eau(2{qLpX{0+)mxRew&9o0H9Yw;W!2qA5ElC7HkRxG=m9wc8gOm#b zN9&6}zSc@0c~UD7zK>Vr_q=*o0@Fjg`$^%vqU-*cENeIxEAnYgR~H*7Op;@Sn|vVD zhwA*Q^Gqb+F+i!7k6#szJ6JVGaQaruRVYkNpBA3XftXn2z5cgdLs<^{&_eYSHtl#K zckV(L7R=H0{0Zv0$V4Jp@|3YmBlRAAlJdpbm8SnBn(=wqY*FHv$JqvEiIi4_Zt=g2 zb%GZA<422&!s2L_<<^Xl)15sc8eZ5I7SU)nRGvg90DsdH;lwA7T#9BWoL$vzl`_HDp`=m5NE$}I*kIl2?JfYQVQT`(( zHm0bLYR!ma!W*9tP(hwZ#*yxYA*T?w>l*=HRTiUJO&0xW1vrZrt3Fr9dz{uGKB(ty z2fI^ARPtZ8y0z$918#Irq&Cc15_!nzoJZeCIO%8Cq)|B_K#z1Iori@6p#jt zIB2F)!2t_Ip#N;ra!tDsdQzsamFkoh9BeYirmWg5CCYn1H)K^oe;D~I)11BB$L{09 zmF)q`YLCz5=@ljGcrVa{5270Wn*k-C$1RIcD~vdWP8yr|gW_x@ki+Uc#~hY>J4l_} z>fk|hdsMGwf#jc>yFO{G>UPyASpjY$C84knr*=Fc*gB+_^o!?e?M$s*n+t1Kt<|*Q z54FjsMcC`w*`mCqd?f-l=%{wUnrt!lZErQR;jVHqGCl@faE;q;-R*3>E#T~V12wSS zdu9LE(v9U*-dTi!ZN1gB_w#z{K37)j$tt=yLUY(Kx^jX6e?CM@WViYew2IvJzPauX zRRJ1622SgIoZK;%|B4pzFwUZ&;2Dc)LO0aORJ}3_wlfN~d7Zw=q*Tz;Vtm6bE~I6j zI;$A$-!11O_yfeC-7B@0^`p~U(z(NSerP}mmUdD+Jog%r43qJC-|hm^IYD3i`$N^p z1;W~uGi0q@y0y7}NY7#`cp!F>aPq*F`e489~&qQshGuH_C}&)BRVAle#kdD%AMjk&$BzLg-! zDq2M90k(6=xzh1JwI-h?UyEdu0UFumHA>d8#EwRNc6>X?x&DVa2qkeOm7tF&!7wF% z5RY~?IMSJ3X^|m_&F9M2NN;W{tmr`Zla|gv7`iwWS=N%9!;tTy!@+cRI#SpnC0-@X zbgn^rz-R(ydiYwP-OLe&Mn_=5M%p9+5Ex(oZp4zRs{DgQ$f6M&(Ay1m+#e!C(q8NR zCaV>>jkyQT3MwM`?gd11Ox}!Y=G!;%!7hFCE=i&oAcO(}z{vUgaU%vi^m)BB>@axoe_na$AxbaNzQa6ZBF4KHlu?v4e!LdY@P*aN+; zeBQSC`aX#MQ|kOA_1$8e#S>VkTHe_V;q{3FCo^zef(m;dy(XX2_ThFCN;~ z-8W2^Fv)X%YV<)n&k@sY&y-tlRkMCvfe@P^DhRCX{_)(vh=3V|Ws;QUY=Sk!J zUIb&<2k+ZZJgf5a8RR#9C%^ZA+Idq~U7;UJzO10F5ZS``3W^?xqe$xRoZ$o?A=ql^41H?kODr5qdY#3NoUpH`-_$Ln?9pblnm$n<*&Kvo*Qj` zZ2%2nE?ucmRJ!>%@Oe4zuGF(!v}s1CAtX(#+xY?(deElHZoOd%1j|9Gf@Z!+erUJa zZuZ_xSpYk8cl6iwEeVEj)8Wz|F~?fI*$nNkk*eBOj(}nfC<7FxmWn1e5av7;yo}v6 zU`53>^eY4gY1?$?8fSzBK7-9E{B1h)O*>Bed*IXu$fu|zs++y>i>)(aJp8M1mP&`U z_Qyln!^v!(COk5@C~+Ef9rD64+yh3cl6R-c04XpnkUh_(VHRq1*r@CG*KKCV;OmIT z_`zPnYSR+^86sMao0-EAc6oD&4rFQGnjHY#w26VQ+r^dUzgz3MyW^!fySj|ziF!t> z$t?GaZhs?A`!r$K64&%8TLjTW&n$+;8f#ZE*eovpYxwXD>QHsiSkRSA^wvc^Qik$?`hCGvB=jF> zo0PYOQWnQd@tCj2Y8uv`K84nnE1OHUjzu?M$^FYeo#cJyn#Ovw>E@ypl&R!~nGg&4 zd&8xF<1hInUq$Q%x&^>n&n6qpnH@hIYFJQm1^hbC`rN?QYaDRPqVpdQD3>N-`T}wP zx9yY>nw1UC7F?AmV*V7*sI6o$vwAU}!k9|+>7y)`ClTN02HmEq;xQ5-r^|)hl`Q3e ze<-;TDEPaUoYukDzt`jiJP$`QOk2h;*fT#R`#*uv?f&4I6=5*Hc9uIugp-2c13Tvm zc8CAq3nK4NJ>NggS1wi?g!W)Tq2rQHEdkS#R2+WsB2r)p)J{4eS{VCAcddh=DCcL3 zl+6z_jR&_G32lKm1i=i0--2L4O;@q;YNr zJ{EqH61zR}t*GbaT0UaxEVzrFTe85FW7a&-R0bvl4UO^8eZ=7|wNoLVHFhIvQMmnf9LeX2uSi%Lq>5#T-2DF0=tq)7=2y0_tTuqnW9EULy(ne#Sp zeLMX&^zjnX752kQf62woGq)4FX0??NE|E(wK7{Ya+CiQEFP<@iDA;{IyonwJw|h!Q zCXu7MGtBn#cYKxoty}9n zT{kT!&=H42@vEdzfgRBuxdlOLdEOe&&tATI*Vr7J(uY05pm&t$061_6S9yRv^X%l8 z#sR9`v5c=^yR`-etGCWPu>hiH4+gCt4+CsFzr;WFyZ}%aYDJ68_){*?n}1v@6MxIp z6-u!#M%t)Y0;T>*nQJmH{X}474(>yruVpejkqc9@aMb#^6 zVGH~fy6ZmEZNX!+bR%!V5R);zW?A8;R;`5caO|6_%EDxd3W6648Oc=oGa8*6VgvIe zuE4;Bvp3hT`fLST#Achu*dLBssooLaN9%IGUZgKUngi~O_ZLJVN6CrrxB)QfM_@BB zc$@GQW`Xvl_#};&j}i(t>a(U!&7$(FPmZICvC|=DQK|1*_?>%Uh7w}!5H+Jd@5TpT z44%;F&*kKUNZ2Fqmsg)`FyLhf6Gz8CQ6QU3f*UNAW0WJ~8;4r51xXRi0mBO7bos

esl?VthYPw{F;pE_;;pWru)+}t`3t1 zcWssNMA_+29ZT0Uc-sux?gspRJJnNU9fLT`-n5VwPv z!&d}*esZ$bWUDQPhF64(`4Cf}GI+#6%5dHN`iu)LdP(h!g|hxwe<&Ci>vW$rJ2o&71zo-7xw{x8u z3|i$?#qmP0o(zUnURUIj%0wb$#)O*}B2Jf^Bx$d${^JWU8_UiQcA|NBTle=R+H1S=k*bp%YVu(y)n1K6ntJ{&m_ zFVxQ-e2dag-^>U^k^gcV2y$AE=6q^lrba}(o}t^tPCkSu(=47sN-(ase8smP$u`3x zB48M=H=hB}43B+t!-isS=Xt_Mze`cCQE_>j-F_1NriCJV zuparHA7~8XUJ~}&HI}nb9-S;NM@z-60nLYYs~;#iP@|&g@82VfS%>(&d>b=QWWzY^ zwJT72uu6I3vjWJCL$`npti&V~R;fp1YJ=s~ojPy?#Tf2+aCkAzzTI^<=Zn^doS-o* zNmafb5`{9wuEB5VNVKq?$sXG!Q+Z0aNz@W1thnJ!xE?^bsVKttLhMxvg#`U=*V;|v z&$J-1$rFcXQ8cTSDyo3Q$`LI$hJNw~ew%s+xZHN%(Z(7yi+uGb1NOO@@Hx{^a+ z#b7OGvJFjFUp%+|_aIFb;s={Y%8=$X{akxFYFpu=*Z3s0R+xYA!SVOX^l<5Wl4_HD zfFZduS;tbvQYnw^pWU}O0i?hjTgYoL zuFQS9;k~sU_c<>EqJ_n?c-t zKsc`$25H~h$A=7zCMS$HvP4xtczU>0Qz|~a!A1_&phqM>(BIR5sg?_qHDC8cI+-um zu-93#4 zj2wi{ilh=tIY#$9lZ6C#P=+cakyLH~>MPHojZ#~7vu-Sr0CsH0^Oe~wMuv)_L@^ip zT|t{0FmEUFVkBf!04aXOJIw9mZV z>FByWG&|ayh0SRLx-{v+4K1JwnYenRUjo7 zmr=EV9RB-SOBt0_T`eV7u2)k%Bu)0qH(}f9Ku{QF6NDc9c-(d;^!BVvURw08Jf#oO!vCD$G9LP353*%n^7# z)Mcb)H)X;2p^=mN?)7uwf7P2);{$k&&5rvE+r@hV5#<_5k!eSj@4=&s&~Dp*9n2U~ zS@QX`Is*hdy_f$z>T zF8m05MejH#^Xn2JGM1BBnysQ+J+;uAKd|}bH(MOnEV0l63~r}EgpbLf!qV)iewnLE zNB108SVFeEs;2UEbD=ss6XDdp!0`)8%m!IS{; zD%=?o9N0VK{k0y zlGcFxI<9apJc#GoxVVX~8}oR5n!Hg*KrUT{jdo{O*Dy-X=GjH5Cx@;@2SA?u-Efj# z!wyLf$VxGhkLp8Q0l9?jlgPQn+FD z^1{f^*RIiR@D4@3C+*?HdKlExCYBTe{`%WKTUC&5y%U0GE&)iociMnY!0gx!omKO! z-joL)8A}xApp)cxFf%vz{zIEt;M+Rc8^TxNyClP6L=Qg$HgZrRjq>>wCiO%nN2z9I zTDNfg;rpu?C^$9b!=~Zt>sdt2nee_f4K?+lXs_s|w=_IFoN*AYUn3(JU6Mf^!t=?^ z&o04P2)9Tona*{gDl17dzdSTj>H6kIz<+xe=#t^Y8~vb*vKapcfHJE;?V|{~KS>0O zKhm&d&HCQ=e&Lff^AU{qpQ(_HNwe?f_uINXy}F2n3~`2YF* zd4jNvOPK&NBM}n=YvrE^^J|6Q&Wn!gTcdw}!6T-}*&5~(rQ1}}*QTt6d?AnhRQdFP zSNkTNX1d-$!`I3MO~)%u`JYg7gkf4Q@e8knDlSyW(7Y1Cc@j5z%0WGXTGid&3@8dG zJZ3{0$+D<{T#cl6>s@fb=nw~=5Zn@+L0N)>>tm6Z3>BXFwVWGaEgm(y;VAu{BmAP< zlMVJWFJ%P?9`FN)wcTI7(3&9Q_Qeo_8c^+3I*x=oyZkcxO*Vl9`|?A*RQm`2r`rTS zIOC~3cti`Sz#iB#dP;$G)iAWa0UIvNp{{*uqp5r9v2G2(UAf(hUHw_(kB zY_7s<%%JkMp$4A|g|*!z=yJsAy*DVGg$dSa@BAla;#tL5P~gP6T=_3PF0|ajCX;tfzxuz*di06P=Y1V??aZF$Tj9Z_N^KQ$4NE3S``lyY#PL?~y z$Bfbf1zf`;)+P}kM@0jB`L8vt^ zNE3S&2avYX=D7ZB4?O*=$|%E~{X+Y|w#~~fy#H{duA5P9i*5cFJVez4OWL3=6~}Zl?z0 zMI6d3YAWHkXGUG7T-dllwE-(>nkS3LQQf*<<<3K=nT4m7R z#1Yb6CpER*;iLG74s&|*4R?%GH6Sut=7<#wlXH|`4kmI}T8vOdL|CT4GZC<~SV5TM znb}rab_(sprYaDjSuzP)DC|VU_)O6OVa_n6HB2-~ux!Zs^f@R6fWe>5zb$@Rt&u7tbT5L7>r$Y!W?$|g0_u*`Sd_DE^F3VPV?@P# zrB=C`#mNFupgpFjub}7oOB?|Yy*AzSJ1t|F`l~joq0Vnw=jNGY&v(K@R#0@1X$e}1 zF)PL6%-6X}6o*Jq3{fKXAM@`5(lgVU+arOONZ+yVulb&whUh7mOt1HuUP&{o1N3%@ zn(=cBC~!BrU5B&G=d#_Vm1%t*X)|6{YgKnGzd9S|KuaM*V>>?smta8#sz4=ZVcIds zkYjzLuFG@}5wlcj_IJfN4)a^5=iY>HGG%DDO}R})1!u+kV9wW@8sW|=MY9&$S$cFv z^26v|`pz%2{3wW=V6qCMN%zgAm+I@dIsd;_%2ZIo%SzO?933z`&fghA-;f$zrzr?WL zi~EuwY#!|_?Z!~Uo4kWII$9iTKZ%u2=SW^kGX!f_1a~h{qw)szfB%RnuxBE81u)7` za)i|`8t9oS{_L@1^NAB8@sF0!TeDjnE_W5lnq z!Cg(jn0fqwvn<*FCp>9b{ z6inA^G>i8+Xxo|;GM^KHY2PUvO?ql09__uX=a05FR#tJ3((2*qU7sAV19)&7XIiRPGgY<%Pbd0S9HZ)6#+ zGKr&d5f~&jML4@R&g@;kr(&MTF@@WYTZ;Ia>x7TY*2~iK3#}}643_NGs?EQ07a9Sm z3FhRezbp@Ybrn6_zsnZ#y%%&lMDvh2?$k<5*;yov(C(>TJh7b0T4+;;3sXNzGky{L zvx9{OYN1_#n{8Rhb5e%HCXzFV)&|wtq(>=5t51}GHgGN1JU1RWeIL*RiU!VS+&34!xCy$`3flW8Z#wK56LzJj{GYS@SyWQ^&moNeO*h;5G zQKT$zY3p9-_BhleTWP68$fL=&}M1EFY)}3bgF)n3m@VLxJJJ* zjh{Z={IX*^3Sn=yyC3wt*&h(K8{Rh%C+}}2`I`;#AWB>w9M4`|3lwg}3))oxEi5 z=doD{WY<_RJq6GyifO|(m+peyVIO6!>f{!KFx}|NM!rlT_i<5MO5+R&73v}lP^@6|1b?spHUS|0p2_(<oOZuPaBE|6SsAU` zZvl)_ClNutS$}oyaDtQ~p)1$+cf8YIH&vzqL(lA+b?b?emw>`J8!*2iqe?yI{i0iWByV?;gi=R!}XUvH@7 zQwZk*s|-`jb)-NUo7xo$Z>p=wuUjR3Y*rxdr&G&X$7K-hKI*D$O26X`T)5!l^_Qev zfYUlF4i0-X(ggPROajdpWm-*g&2~CQ?WI>(&T8k8I=oIrq>t^g)VUarfV!e|ZV4;X zc}l{xpZ?zY-DD+;LHS}W#*Yv#He@W9&?w$;7$g;#Vrl%>v))!fHc-U$n;3C>8vlqxDfz;;71N61JdQQLd;KRn^R!IM9*`caC0PUEXl#8K_ObI(!3UYe3lPjQo4jYGWBYqU3WJ>R!oV@U^ z%}Ym~BUX&BNv~On;qy$zBU8GAgwi7BsEbZhcnzu7<)OVNqQLE~^67m^w{X=@;#RMa;;O7q>Q|#lj4!WWH%b zn%M(`!{hY|kYAcVbk7QsgPaxGNYQ{0F8ii-5ACtLM)Nl0aeu49n|&UP3K9w^MX;P= z(5PnpGSkauHuU*a3mg@gO4Ga5AAx5UB`bUG;80VB#COM)Grg*)q(~KG%5{hw8I;Pv z@7O|dkdW`^b+S3BE^C~ekIJ8H&cit*+&&csDlgbHAC>9mUg<g z`IM%qN{N4cFnzJJgK3En$bkJ>6FW9?-ll2EPB}juFput|eiv~;<6>p^Ze97K5*&q< zF*}*TBRzHU&w(k2!}GEioJ7f_-+KA^vhL{Ba(Ap6#)MI?{Wi-*-s!MtPd;!45`s)R z@#1k`bU7pZ(|7vS`t4EP3L<8szjbpa?NKdK&u0|=lJ&UQW_zHaf@Coocp&SPlZ14>FO5+3of1eDylO0*b}uQ!sntKRdwEOpDw_ zUf3ANgU#3(5LEPY+Eutc8_xS<%zB6|X%b5oX7sosZ21-eWd?DcV)9r~9#6N0MM~D5 zKw*VZ%3`g%(>AKi7?c=>hvf6S+w_iXJ=Py^9vA0@`m&L~J+09XGGDCKRo=BaKJgq8 z;1jnyk2*HsFrTuSi@tgog=hCSPa6K@i(wJIjUn)A4ZY>$-t{#q zc1jdT=kN{v{laO(2ZFj~_J%o7Km)!P!%8{4V9*HI3oqkH!6&smK99Pi=tyF{bIXB! ze=>L6j?FgdJs)7=6;E@fHdu_O4bxD^5(|{rKYQdE=0zs5V~+vheL#5XJE|#OEiGos*^j+bc68s{}{og{_}R*>`mYvhvp{ zN@LRVaX-^xRR)7a^$wNsK^HKH#x%6VfAkQPOKbE5?H@gr?dlLbPLvt6g^|6{NJxB- zPjyg@E|PJkxL==;#A;%cB7I+OWnb&>nv~+T8gN8A-A_K6u0jB=!*6e|=f7fX?b)0* zf?$)|5miM3?mRyGk06N|D_uAWaz^B!V^RHDp2I3K_Io;XkZ}cmV&6lu5>QBK6@Mv6a9K${pRkp3EbE+qX5xBG!Xiis2oY5 zIO67v)^9?v#Op2Zx5e_<6&l@vBEUkmvZ(!TdK0tJY>`+t2@#ta5%)FVpOACC$xnV& zRaL|yKG%H>3?_&fY)WyKwRf%vXm~US5{9;qSXeKC#Pt(eN3a(py6#T+IF-Bdfq)ws zC13#8UCmk~9GWzGfLRX-o3Rndh`Sv@gEL^IX%*1K%V90Z&`H(!>Thw1l$6;{1zfkU zLsy0!_2obvt_%6#oeaJYU{>XFc=p@sCeY9sg!PH&GpU4@xp>mgs}-OI(_~BY)4KYr zz$M`eYA4kB4|B%HH@L~-k^bFWW2G4c{)+mR%$T|%BTn7Bak*hfQ5Gm333S#-KKGlc zG_!N9LYxoE{l5wsoikl5<~fDn;tAh0KIHS;WtGl0n!JW9;7vC ztko~@eu=_%GzWPy>JEqsO5~H*O07&MG29DswVX;Nvu_WjL7^>3p;dVrnO6r1CeOa# zJl!4C7?L4HRv^O4RIiwX;QESTTU{K)k)A-hD(G=B$NsSBHU4pv_q{tl-#LI*DnP9m zY`5VT^k%DU-%r=P}_a6j0;GW|21m~x*W&m86Y{PJx zjM3pu_y@RYl58f@7Q-14_zwsXxDeCC>_7!IPH+;$a{G`42Dfvkst!2_3T6}VJ3a-> zUsLKOq2JCkXvN7z3;Dmr9^;ZaW4hPF4&Z68aN5REW$?=(DNua0e%)gGM_Kml0H?r(pE}u0W+l;k3=iE>DN6#2%JGj;FfItfC04T@i-O1Nf4XhobK36Ah+_9q z1MzzKlhFw366M|byjJT(@TMLLZU}@J) zkwrAqLX#XrWG1xN-qt--SejDs){FKhT0gBOGOCY! zZE%#Mp-PZQ%owCVCtraC)xV?Vt`qHzlnGUp&3ecxh#VT5PjXLPLu`ZY>vkkv*b=LH z5MjHo<-n=zK|atCFSbipg2r!p=GSfh!>rq){}pkdj3ui)Y}WkGQ!?%k@?tsj(QgJ2 zgQKKk)50R`pnZEiuE)a6oO%6Z=6taL6xh-EHula-+Lt)tCt$JV27MtZWDNWOPyb^Ye$0B zt1CpIp`d}UW24*AzbxDDA72AS_4{&Q@Kb}Un0_$9D+;zfq)V4Nw5~w9q9ykQrE!J9 zIef8I^o2?|=_y`k_ljaQdc4l6i`{Gc6S-zH1Rtjioo;_`wf_A(u3cj@7%*wk={?Wl zx=SenR-rQVWGvskqQn%#_`#;TDDJ@p3a(GVVboclUW2n~nCeY#(E9ARO zp;Olz#39ASc6$xCXegm=PzY3Oc37rQ<#L6xrmBr;#IpK9aEpPi6VS|GC_dqV2M>uO z`Yv3_KsJ%l;HrZvc0|q%0SmeH;4Ga=Fa;PrnvNBeR-tkK_APi$B{u3ayQl)$y-e}QU0Z`Z_D?f`AC!*_hCp)*%}LH( zAL^3Rk|E{bI^Il54(fxCJ=VeUQIU54#bgG%K+p~LxOPsv|Md!+>+c3e-T6TM4Kbb? zgLjiVxsD1Hn|Y3=0>q!PN&gT7HM^plGpuI-DtnmeY0qfQ}c4Qor=Vs z2wfWQ>*IS=_uTlnQR4jXMj2D0kc8Rrs%QWnd^B`+l3JlH;I@~8e4Wt4NCWRC;aF6G zSM|Bh0zwk&j4Q7piXL<*3YF-O3Ls!6HjB(} zI#)jKC*IFqCQvD3VKQ{lUF9{qDewR4f$FHrX+7nQ^WW6V8hSx?E3cL;7=W%N?$Z15^D#x`WxE z%Y6Ud)IuKbRS>Wa`wO4`dS$KAm%R4%dHh?pMjC}3lYXs0z-l*G1^6(9+P(9G8ow-# zY0`MAQx51&QdlMs%z6GG{GuOd9$_}UxocrU2~?b%0Wfl(xP62Y(O4pxSrN5jbPAIN znxZC>Hed6_W-7-)HkUPhE8epw(UW4jQp@>jOjHb$PJK#A&3|BZo!&RFFvYXTMonny znwnjP?}Ay<7X)0T z0k2YWTka_O@S$tf=YPA*j#rmxs*U0;(;&5j!Ku)7~2h=QS?KM*_YRcid^6pgqgY2FCP2sQQ7)sq~REF{(F!uW+-3Scfqt7;Bnpz z^v`7}JGIB+AzILdfuphLws#H=tgqv1C2<&DGD|e&g4H&&M(lLo8$pRYdc=yVvuw37 z@>ujbKb+7XVgL)!)NGOTbS6iOL)Y@uxO_4F3zZX?LUa@-&*;3W?RtMSnNPlBXtJk5 zvqvWrpHFBmDkU29AcU>WN{KpaJ&qEMeaL|yxIY``2IDGSzEBTSA@~7F5@OWu{L>*7 z*1d8uL0K*~HU{vH-rq%q4j@0ee55ntj;EshjBy^mQKMB;l1oRH@ClGm!*9xyY2z>( zz!$(~y`1W2b0dhukCP&a#uiLxG3ia}&g0SInxTIaTyllJq)%nAeN(4b`G$rb#a?!7 ztL7j6CcdZ10H)P#tfls4wK&F_%>KnQvqCp45t*@ELs4A6{kaALhknwFyny7%^7HTI z?gSHwrJJ;m5XMzLyVZnx4XWgr(J=gIMZNLXFJ1RzX5tpN?Ud21ZJQ-_iw29yd&TKy zaKKw8yA_t(?tWLtGD?xRij65M94g_1^V4-*(Gbg{^JtikS+`LuD?N9;`l1p(CIa}& z8*D0n)qwpyYoMpsa#=i+wazy-O|qxBg(@!%^;C!CEZvgOiZ$Q_OU;FsLW zx%$>0VOu0EYHdTmw&LYj1L+H6uMncwaE`wOuutZrN#y{na*rXUMy(A4WPDxnbdoLo zSNa19N5F4hs3h0$he1ka%FtptV7YZx;XgJI1qeo^9*P4oC}awpan21^|Bt0}V2`V9 z+xBFliIWCR8k>#nq+#R6YK+DZ&Z|?W`e!^PYHgjF;Jdb1F?b##O zW!N6t9X?EVlEzqfN9WrWs)`?*@RSQ*o}YeE>1i{aR%(_ubVj?%iWPJQFQh@Au>Kix zrat{SWF;=##q}+q=Tklm2|yn<90PB3^*g{_>L-uGKDXzYKXL>Z1jyU5TNK#>Fa)*4 z@P^1Ht`z5lNDXlJg#Zjeq|$MJbL+L7EjVnakOet(#-L!qqJs_O7+n6&B#Wwh`}r{! zT(8fII3IkG>1_0i1ZLR=cH47$R)NeiEX>mHxjpn?c-&WtCdb=Wg*qJIwN)^urR6#2J?UNxLRva%3Z;c1EhjM&;6E75bTarq6J4;c6x*p&W10Gp zvn#w$#`m7#8n5v5c?{ntbvi&?-0euOcu#Rzro*cv^ckaKT9iXsE`D_=QM)95~W077&#n& zore9aLkQ{H>38wO$TZppu8^ZwD~?&*8Q7(M-_zoH-+#}KF>AQAT(B``D9eDoNo3O7 zI!+upajl9B3t>LKeaKql@z|vSn14xFb_#1434qSHLeq!V#}1{N9(hfjhp+Z)7;LR! z7LUcEQ_)|lHNj&w{y~7HFyMq7gtkpFSeucM;P$?1l1Tv^7(e{zv(_?dN@CRY==O(A zC`InR>06X6ey3G2$gqe8DK4}Bg21Gn*YaTLeY99yPNn1yzhF+z59B~D)ORPGV&L_5 zYH|`o&h%J7NiOxzqZuuXRy;2znJk^VFOcf}+w%K2cnEEu&*xu(+)AVK`Ba7Jnbu73 zSoM+>y=ytY^SR1iscO2Kr3=&P&o9wANDoYXshHVRg7oT@d4>0x08>Ooh@^xm;4{!T z`D~#|cR*MSJ##8iP8;_yJL)VwniDS1ZmGmFkmWyHZyJZCscd$x_(ufGIlzmo2@DPc zkQvkIE$^QAsmv8@8>Tv$4XURqRTADD^ix znZ#I%0n#}-UJh@`6|T`bE_GoYy**%%eenxhzfdgHpKfC@kUdQ7Ra#=f!mTD-xmsCz z_t|x`Ojw`ReqG!Ql>#1@^WWrb+r8zvPoK8KOc>XT-RngA!j9ME(r(oE;u721+D`tK z7h!m5*IWGgMI4#I*`heK{iU^0@o1vs!zNY?1Gj_wuPt`c{52BRr7(yFM9j)pfv ztsz4W`1ENSLoVt+iN!tniH4Z>As@B>U&y^NaWbiq){5GIpA^ZCw}@9m-z z;U+^zDp4OlpWD4#+3&F`y>^$gcWby&P#i$tm5w$y0rzCBTNtAT6~Nq&g3mtL-(xXf z9tC690R%+wFit7JBu|h4QNgQrQPbi~Hme8YAvx2h>mq_4To?Uk3k0k7Z1>Wi6k!cE zOCnQn3jpnO<*Lp8P)6vg0Hi@7LcJH&NG5G(D>n0go6MFPCY@S%(|;CDJze#fr0sO|b>z$%@?*371ksn4ALg#NTW9X4zX;PNNtHkT`s70-Bvpukxp&o@4_H=ET56Pd*KHj$n=ZBG8k zPy$XLSuv*s!kmfX?wWgwr>jKA&y4KmtB1j=^jJBeVa*@a9jt?BMAWxOMb&-aSJfS-NIXjy(BY=YH^ z#_gb=V~aILD~+~js6OT3-iJ9)msj`S=O)*D_ z4;V<<1$~oPqd;nF!{_DfG__8^)A{c1t{)Um)(t}cPZ(m44pV>;5&)cTBNzA&IYRJ>#n%Ym;>J6QQW?~l#3oIzV#9)%p`gQ}O^oH0MveF+ehPxu- zwBK6pgr?Bz7xuLy;sZcZ&pYYLyZ7z3tyKX{Hh$Q&bBPBn4huP4z8cE~#=GN%tJ{0G=CrDc#(=8? zMm+c&04o?p6itWkZKtK6b%GzeThhr96WN1-6g3+NafnNZ`H}mJ=fB?vv?|Y!7r^`( zOXwb-Eni~OF0mdgAf;1nYv+fAnA^-p$i}jGC@2;$`64PTeKXCaLE&MG`78ibV%AjI zJPPOI^{-}rURXQsGC035#i-gP3%){D(e<2)Bgs|#2QT6-`g@4UCzqR@H6 z9$y5;CT;*@lM{btR65f_vrxoqshz5rfAWx={P$}NxiG^$Vfq6m;&&Q$&m@8uCSoV( zo){)iN^xqhMzddIt8-!dqn-96T7cO$gK&mawrp+}eoF|ul+7fI-SI~^?s4hs8eykd2DZwY!Zf{zDj_`4jmWjQjgEe7VvDS zsZ#SuR0he@Vb3c1EOyhK?O z{XFmTvzJ_zg;H^I*uM`&XzyDoql|7J&8~_hgaUz zemJL7?#Pf&VAN?bHyG||8nE%#F0|6DTJMa{q52Y<uvD>s zuZOZ8r(}`@>W??$@EMOJowZ)@t7#cP$S6s znAuF>t1|SOJ#W5+^l6wGv38S@rc4XTp0}Hpbwwy20QjNHf$@OW0L6Try}Y3C(KI1U zK+E5gmWRn)ZL~_6E?$m{T8o!{8b|+sXHPfS zDMM3LI66!4>i+JSuld5i25IN({+#yw9noZ!`%4ul#7H7qv(ttUBZP|I?(p<#NoK#S zO9Bl)KeI6OhRO(CmMZ%1(X>F5Bj4a*Cg-OC&)M3{r@`2=$Vm1Py}7FE-BNsoleRqhB*c7gLRr4Yy0suQM>l zQlDcui=FS(>hZm@%Jkn+&suzLoL?kKNr6=2h}iO&32=brdYgmub1LcS0Ve=E$@kT+ znA=Ou=_kG}LIaqcOPe`MIciD#$NAGM6Y=*HCet_`QWo0xtDONY>}IoxEZkpR5mV9e zJ>MWLdc_FosMBVPg^~)Ms9zQj;$c`2d!|NUYS~5 ze>2iSJ}D<73#s0_7n&gvO@hZ4uN@V+`41?WvryW3ryB*8 z0Lljkk8y-`Ck%L$`VY~h5=A+J0#b$@ASDw~2kK@zSX?Ob3EFmUdc{U<`pS@`l7>&9U&gRsp zpF$p6*Go|CFGQ=Y8wQLb2NRimX74&o0ntBMsnX&ZH=28)BsI4u);o&rhPgePTLM4u zm6HDl^utvLq(H>2{*Npa)Z?k&=c*5xVTwg_e}wOk&M>f9gPCAAGMJ6DX5&NX5=-y1 zc|6X4@1^N2Zb8Gh${re#5+KkREwj5;{%m$U>EAy~&&T`^1K-2_;e1zx%6SP7Z6xw3 z)LbzLUvga3j$H%qayIPzlw;2>ibSDf6{429Lqmkw#oG3IfT%ce=97d2A@*PM(WzK+ zNoq<>mEo*`JZ_VAYFu@m;y@4tANRTyOw55rm!Ov*Hr_pSYZ3w6rafT-8Xs3Q?IBa7ZylJ4A69$acU8A(JTM*Oh4IHnc`Bw2Y;*qfA<; zmL`qp?w<#|%;1a;#R^UI66Nr9s=XeQ9+$Ord3t2ack;$kj_Dfj~IcDm5CT zym`vPCQM^=H?GoZ=$l7dhjr?=TKWsPXANO4QIw=8U>6D_}%z-d)qVI-5 z;YnW;0XWzyuo?`L*{D|4Lux=_KsZ=TQHV+T8*qx{3pE)e-qFZh2Qvx{6B@!3!jB6fi`um8}GzPOybhU2JJ{rsvOHL3wa(e^ahkvp`x`{Gc| z29g7xDA<0q2;AN}E2MAsQ~JaBvq)qBs6!k_X-${6Q@Zs~A_$YhzC^&;OjDM+jKT7U zC!C*#?jQ|J41Y=J5O4oI^33!Jz(q9s*X_^rjpXRV4NBAf;rk`FQSJUNqRb^d~CUzV%0sCYN-j1&x?kp6&&IcK9=*nX6xNSYQ@G&t@hE;l>fc z%^^9#7}FPpO4#%u!x@ZO%ft|XzxpPgCNAP+ zE)#+{S;ke#Wj_D7T^|lWx0K`cIy3)7!CEE|OC|W-?F$~B`_?~Lq{ZYVdl`OV!;}~WCQ%=w-cJ1@z_y&T2*Ii91*4BdM__8#lvyfhM~YE1nh_1^p0Rx z@W*d!Z7r8qEywAxOs0!18OeS*d`wKdxkd=-mZ?gndtrN2y3uw$LLtKT$6Gj*_wRKy zv@CYpA9jyDx4)*%F6GVYDVMF(q|vVr1}j+ksJ@7yb_pfON&|k?Rv_VwTFz7^a@GYh z|MvZPHsI`OgasvuXngrFk{smzr8#{Nc9bSxt<^8j$s@YknvaV~iLGKcD%A>zP!#9l zjntWhL6T8Q5R(b}Df!_msxC$+8PSFvv>rs{6FpCR`;NuAV z3%@Uwo@(TslHrT!cI+Q=!-E0a-kKg9y%4rH;u&?$h<6(Rhc=$dcCa%*Dk06DdYknw zs3V%nU#pek^|qdwj&=@fjQnNA^}CptZ0a!f!Gu-@r)A+9{Jn06%iVRK%z;fdiVO(N z=lSuM)$CvCIB2$xqu#s{=n7dTx!f%`&X|e?N%TsHd8Dqg>?!1vH4yOS(3T&dj=F7c zZq>ic%i;0%Q5;p6B0*8mvto$%)42Z%!zRDIC(Bm8T)_~a2~1^uHnaYT7XctM3+&*> z7L6lh)XOD*r`a8XnB_SnwM8jXmtkWFgk-bay~jvRx<1TR1AqLKCIu9L+waNk=t6nF z}G{;f$4CPbxz60V(bM5H8)r3XD=J(zDDL%#v(6TeoiEo4(kkais*|E!tE#Rp)DeaF6^FIh?6#x92< zP5j{(6Zc2P>3p`t{RT%0?2E0lnLs*BI*zy%deCsrk=9J(sF=ARsK!izc%1ge$N7^R z9jO)>0z==T15kr8auP^xc-}{Qx})IXUi(aDN)5uD)91DQSOE&{N~&0h3Ch3yfWl-n z9>%xqkUU|1G2M>Ed6#V0;@Tkko}A;uWBZ~SV&f+(yAEyt(C?6h)ear_`0T9W$op4V z(z@U}1Y;5xh{cGxUA6nYCc%d~;B%r?5cB{EDfnROgU$1{MrOhWZa`Ap6JNktqift~ zU=e@#TS#ZS@Xa=uJnIx^G$R_scR_;s9nCQc?BU@dCVB64dT!Z!Wu40j|H|I3+)k;cr$r6!4Zf+q*@zP|MiSF?kuHJ+J+)73vW z1=5&V8bqFcFS7_U3O}2xoghL`ZfIj@KB(9LQ9r(wv2~IXeQJ&DR(-#nI3(}E+~P{zDE)+Vzh~-M;iKKHUF=W9fv(xVMl$#i9Zsuw|`kL>}QbhyAux` z7L6K}x}Vk``rXkOY0+)?i4t;D0zX83=u}`Pi}Ulj0G~62ryX~`GUr&O+s2%uJ<-tK zkt%4(tDNX}11GP=EzWv4@%v{Ak86~uZ{xa^N=40_#afenz9WnjmKY?4xJwB8m>Ev3 zP{?|(gF&->zY{{;kNXEi zb^czn*-Tp*1%ko2h9S@fIr*DtEe-Ww?rc{D!#9hd zOQEgMo#oSJ_!6$W`d4VbpWKKH4Xb^%CW{$4;uq3pK;B0q8~DnxLC}@= ze5lk=)x>Htv(v+5V=LeblGL;%Pt+sBtL8@P8)QPnS^iQW7)>>d>6@avFh zK9=Yv0V@_OZben_jmQc(A0fluhq25)*zRyQkG|Vv^|ajTa(hU0s%7HU8$e!c7tJgA z{}*C`gaNx|*ED)<1w-ButKiiLj|Vm`1%FvIcS~ENj3o%Ig6EfK!0RnDaIb&1lj2IlS+2bU#1=~5o zq3f1@WBgD>wS|`8W)%GSLT|nN7aIl&0D?xryM`P0C8G_FLUVrhh9sGql zdlY|x$YnNAe1)Mn2^Y6auK@rAF+7caf+ezTqaCBTTd{%&(sA6b!f&3iqBK6v9FgsN z_p)*ljZo0fX!tDbLEgm`zIZxa^7RUQD1-3=uQqmu8g1)S#gHb|fnu0(z0FN~{9ShHd+Hp{XH{rYV&`vfWKJDg+SdzjHlT_U4ujrmj_CUw-qEo=L9 zce3OA{zPI+pr4;64Gw|^6q(h-0})aurd5s@TAZ-Y$3{^Ko$(>Ivr{h=xp6!;gMEBs zk^bxhA&(QJh@akIWy(Qax!h9e7MqvKW{h{=xA|9-wb|1Hl|Zoi+rj-xa>W9>?|${< zOc8r)7oG01vs9zNWw3bow;l41%7r|Yk_oks=aJZvf%&g`3ylgJN$o3(e#2eNIW+;l zW#HaoSfHVi-y=(BXT|!-;1c^GQYhyv-Z2GTcOBA=yqxI0e1x5%&OA z_W(v5mANg}@n*yi(Esuux&m+{Pj&mV_M;3gvdv%^C^3mA_|qd~N(Gni8l$lSFL8S^ zc*5@yJbYer^KcHa{@HUhddbK7*PD3Oyp4QL&yU;gBXskn9X==fS-P!z6Paei^vHX zW>FK9WE+dy=)V9~&8&C9ED67cvRlZ0NLY#o?5>qrg09Cls7LtJbb0b^E1#2J`uPFrnL(y7V9%Pi>V%mq{f&-RgwUDuxS#cwB6|zww``6P)`^Xip_p@Zxb1JS* zcBpS=5u#)z8YN-D$qEw&?Ke2IgD`E@Bm3TbuJ7#bLD3DWsE4igJrZL>HS%jMkRi=% zsZ{u9hBlw4FqnU#@zepVW{0Z{16^4GlriWzkWBu^U#lR#lHnt1US2`cXy`Eg((S=m zD5Ima;Xf*7l6-xccT2HxVeiN5N=vZIY1Qxi8MU8px6rJWHs9t+W-0eel9rD;G;L(DwQvRLjBHNXK_VGnC2s3{f}add1PV5^D3|J z8$24+ckI^dJeTBBG%$w84D%-s9vM^4*rj&ozVtc&zi{ZJc!y{l|5P+N%!a<+jc0oN zQZvCtN+=BkAu8*QL2X{7mlYKw8$crA38fj1hzy<$1o;CIR*TiS6w>nE3V|CAK0=i6 z05D~8e}(A^WDpsQOeBiO{C$ISV-#0CBl8WyY9!>dPXK{v;1SU-gz)BV?N;-v{o0=^ zb?!#?EY;bw{ietzmzwdQa=V!dza=zqtBA4-zl~@hP$(Yx2W3 z{65ZbLY`gE3`G`Z!YGKvAd|Okt2fI|F@xR)X*rIj=;rrsnUh*GI^ zV>CG$zuY@Zkb3_oa`s)n+^pH5pMQrDE}Y2dhxNKY?&Fa$D}tI3d4u;0t~oe-x7R=0 zsQEaPNgmg0is`~;s{>XypmYppP_0FX*I5axZ0d16Z7`NPNw;Y!>$_!`(tiyzuWww+!TV zoVG@RI~!f*NY0?4va%NZV62$*c0IFd;)q_I3EyZw)d}S!GyiIL+Al5iO@APWS5ZqWrh0gBuSl#+?L8&pC-yg-Y!Vs2c7$_JKbLo(u${eF&<@w3m zb%Rz4=*D&8k-#7ZgiKHHd98{?tqHh|PL=-xaq}UlUUdh^4y?-dgHq-*m-Y0lF?*Q* zw{M^xp|Aw7R$MPc^7Hce>>#g~MeP%S40oWDv`4Sb2obkL4$nT7v6@X(Ta3_^(dl)1 zvpF`DBOoAXX)W^=T?X>>a6nhUvoIP@}r zv+FuK3|!ZwK!h@jFda!!9pU&{&Z@l@tj`xNH?1qsRODOrOet&v*vY^*#&{MlNQcq2so#t?^@@QWm!n$Z1w659t zayk!<;F_IgoR^=%jI6U6)%6hnPkPQl-sO$x%`uFUg_Bk!ev7N6`N*jyen3I&x8O|a z3rFd~q)yBJXqw}}`5KBgSEJ4R^YED^MD!;Cnj6xy?467yjm~yCp9yZlgF_*j&r!<1 ze_MIGzWI)(XA79lRXzw=O!-x32Fc3~0SaxCM+H3YAIqB^YxhUDtUwjZZ8q=IeTV>k zW4IgJ6JN(HJ?r+dpU-E>F{8%2kG5CkLD1g7TQQh^6#qjLJ2rn3z!UXXSRK1aKr9L~8fAm|9mxhwp-#)b5Z|`LhX5Pw%kZSRYGcj~DZE zz!OAa7a83S5}FyUE>y_YsxfpYBM~MrcvSb3f-x#jq;Te4Yo)>E-Jvi;Wp1+l2y7Cw zv(QJOcI07jS`y_*XWywoJY1e6mC!p8$b&w8NJ}M|kt>AU>nz#ckhQhuV9{AEK8U z#21S0R8@u5yxSIiGu-o7uvYpfP=+B47zHQ~?9hkuK0bZQnDww@K$S<2aq>5&h{MN0 z07yUG_$~rqQ6B>w0OLH}DGsTus)Y_UhJBFO{Oyc_P$Vu3yY_elTs}z%6Uv15B<#tp z031CupKu6|!^zS~I$EC&3X4W;@(?~LMiYRf0Egwpt)N2^-C9Wk?*InE__1kJ9)FvG zBNXyJ-kjPQ@p)h0~s}o^_H(%j9-aX!bmQ4Bbab zKN4wkd)fX<#P6|`5Im7(I+Rcn;@k0?<2WLlmlNT1rXySEd?1z7-ay$@mPzL?4Ky<} zp`gL(oc02b1ZNI{e)JDaRpAU4F6Kqy8ffqFWIIj|LFQ*OkQl`$VH;ugPT<3a0;vc{ zTH|oN(ECXgLf6{0-M4Gm{0>PhK=I)~p+g-E#jS-nrx@vn*bQcpDwl|gqmbx#5w8|+ zhlK%1yPB`}IKFtlz0TB|zyATs+S;LlpW(3;Ivx{Yh)#CK#0-UW*< z=g*d0RgpqESTW z)`xmy8hdR5`wd<9+0DZhW7NaPE?Q70#z zqkI-z2OzHfE3GJb}(5a!e{q66l?o}2FKWnY=p|m*T-;4y`T$K5KX4SrF3jIeN zvLt89ppOyAu1fyMto8&McVD40f1-uZ@MSwf33Ci48nnGwPt#g>PP|5fcfB zXtMaE5WGszWY>ll1B(^N?J)c6CvwiDYu@D6=U8^&B-9lU0j$-<2rdAZLYK&Pe{!wT z89Ps|v}{F9X4H)ap;c+N-a~@vN#)wt&WZck>J`5tWeQ4<_saz)((`?57l`3cGy006RotKzABP`d`GiP@@6j zpg;fs1E#d}QqV36gH1n=q3oHLNrctor#WDqa^3eR*bJ zPoq{MK~yA+U8QXAOWU=+oF$?}M@nYsIJm{Qvwrv$_+wTjMC`N{8w`15(}^sBJep%C zve)SD`e=hh7Lg#cgUT{f9`9~oPBpcsLW#kYsDGY@H8ME^qKX>&w}~?!zoR}RQdiDQ z-oMnlZEH$N^>i*pwahNYsK|dFo!hcTqF4aW@wVKazBV*FSO5@CMj$cVXfr+*?T=ze z?3u%2)_k#;i2Lfia>sW16HpPV955y8O>nL&ZxyXfvMfyzmzII8~m;Pub>$*8jJI2>0f8?;t z$m`JGOT!_8k2gJxsOL51H26`_C2~z7_^W+D}+dVG7LE(2qL*+7YYrYIuQz zF2Tu>uAo@Kzt<)_3w_Ab-$jti;n~8%!jB zg6#b@xosDIwO0x?O&KX}5Gg-1X*C*a4CJaa=}s9FL!?D~?)S6QxeAm4J||Ju2aFjD zd9vd?7$8X7NixJpAa1qW&h1Pd)Bpww>wWRqTlT=jyba$+`OKi9xLr4a%Z#0XHn}Yp zQbq#Jtg1?2qgf0=`wv7Brhj_7@vT>&_$Lfw_rNTh&O2w!&Z=Kq-ezPF5z?aFX!~n8 zJ4{YZ6&BZPu0^$fG?n8V9-vaG(uWu}TCCO&O#(9Vhl!t3$=7m5Wf=Zq3)csCFG(8x z*g7ky{1>ZPw*mTc;$$vc$hSkp79BR0?NKiP3E2o427YfmUeJ5FV9*Y59{=^*ew;XL zd=?+CsY^I4%Wd9HunN`ktpBgNSteDMGQmk0Tw0ahf7w;CVSbzATBNo_q2cedS)GUdVI4QH-a4bd^1uQ1o_c)=f+dqAU&b?ZJDFaubrn?Y z^8Ha^ppbkcHd7nW%`VX2d>{3ZUHN&(19&k)<eRYt`Q}`@ZGUK!6eayObb^3$n@bgsv09^tWAas)9Y?b2+DwLXVX% zba7KaCj&{@g}r@ddQX+`n>_7@cxEzxybOTf+aXVh`LHb4C^i(wy(F*V#}!~?y=09D zZ}2Y=0gY%C6PXNDZ8P&bC>&>@REaOrEDho>|E>>0^{w^VQ6Nx>FGxmjprATm?C2Rg z6o;Y(?+AB1F{A*y_#{8%D-Dvzb=Au?l#2C9$w*`Y29MdJP&y=lbD**GP7>!=m@g4e zbs`-MwPIKjsdR#qwgwfSf*W@df z^BWwDC*~W~i7V6ueoglt&(X8>^Sp%T`5s(^!h7(rJH3P|3zbVZy>}(c_;_$3Fn4!; z6MnotxQ3IJ{z zp!%zbI;mrBjrFrUG5;`xw!3RiY_C69qhitfFXY_YljB#i=H(rLq5GoBEAF@xiIJT7 zvko4>uoN%|FyyR4KKsJOl-QoLK8%8;^h~x2I}m|FQkKy_Q}?ZhsZ;T*63f(yt4$}C z1GP?MG8C?_ZTKv4GUHZPx&B;fUn~L(=rAVt8~PyJX6JkLtJ;($Ru^I@dcMzBjU`bQV<~F_bE;fJw4(~`J%;Bd8CmCSl=Te1>Adz zmPgP{qMj?(YaWi95m|x5%^N1SPK#58S_Lo56boWvCY>K{+*rtFW^OSn^!eGADx_Zq zNgcU+S`vSk%0kHl!$8)T{vT(r_5iXNrF@CC7WbR?fR~GXuRvNmX>=naqhM70HiHpU zMWtvT?`<+nm-E#p2HFTv9IqJ403;A=FFaFG#Fv(m%rgV=hjXV9a1HUQlQ6($MDX(36RXEO~D zBQ~~7031vP6b8B%0kfx#NjZ=|!rkRg=OTFsFKR&3u;kMpjGcjrOeaK1-@wX-cx8CaHdZ%; z=h~n!juKr36`nf!QU-F=l+E2-u?Pr<91vUkB|EVMV)QyZ!o+Oyx|;fmn&ZX)wETZ_ zqtEF7K&0b5J1X#l#1X1t3B@XHvJE-6>%GNBKm%bgz|fb-Z3|F4SVG?L2RBNO-}Q1j zRnp<#lH>v|z_Y3sx)|Y>0BgT`?sTQ{g6U*FkedHkv*Qn^gl~E<_PAoTYJH)7E(Szn z1{;nQ6&99Tn}+*2o6-`80Thf%NEgS|ncO;%som(s-|p?@4I^nD@f6;0=V=FUtJTJn zATUBrV8dT5@q64GNy(L$z6AC|VIH8P`pPqkzc%M7iitVpiUyI9=L>-;g3$|gOuAr6 z2+Yrxojq<3mD)Wx0)L3-5ya^|6Hl2)PIg!qW&3Dt#Vk-<{NNx5;L>+=5FvHw>2!+Z z_fGagtv!>~d7r#nn<~n|MO$gIK?uos(5|Z(7y$=E7{KABR-}YtVFZ2Y1TCMty-AF3 z5KQNGo9N_6!BiCzEo1v?A&CfZ>B=NDk?BHEqgVrh2*dMjQ#$isv+UB1ZX`)BhT^b_ zJOWjzh?{ZLoBc^kPc)+zpd`u?%JDZ?X%rVI7EjnA)Uj0QuPX*8DV>cWvYDY=`a?e( zoAzgm(-EU?%kEI}PSxq2BOnM!9V13cLH4RW)5st_aZ^W$P3=?%`;jExG{Y)Bm3)7{;t)ygHuR%?t$P;Y#-h)Gw=)wC#?(Qdi` zq4TZf__3MX+0v)u;j!CH8bh;fn$u?8cxB65-9{IpaTBJBX}LS(d24N^GfEiw6oQ<> zrfW#N4S{Nd0%LNZ85}qo6$k69iWN@-P?F4X6VWjEQAtO&6a$!cV>cYTLD-bE-s2uB^`AV+jNff2SE)x1GCH`mQ z?_hoZ*x%}juCT+-hzlc~Rfs`hjTYD9Inl%bj}0Di)EqLc4nq^<6!2y?r|^XdmT-~u z+wN$*3;+uYJ%!VI`2~%5T#xVLxB@>|o`{&=*X>yruf8NY6q9;)EKw$|K((`S8u=1> z1`ZEJg5lG>s13+_lbvo~KA6c03xXmG)@^?{?Nyb68Pz)naDR4?i1fTatx~JdQgLEG z&_NGGi+_aNL#jEgm&en&8ci!KKyOD)AGUpkAa--ay)>3;l8_58Y36ZvCnudwA80c2 zJg0Lixl;}*M#cbJI8099&Vuc_epf&nbeVeT{kF@jmsi6!B-8ja=PJRge04+_J9VscGjB=J47u%D+G=5e=)mC2+f?uNm{Fp#Q! zB5$9Er%A4;4PLt!=ghOJUc}da2 z9PZN3XhvgZHo7bBr$a~h*E{CzL7qq+<;({*M>@!FZ1NI7QV9Q)gXhC0-t%m^u2uIy zUcFNHjI%U2vqWPWl2RV%H(>Zq0t*A{AJTe%EZF?O4yC9f+Ur_(aIxYj3|C4nk(iSh z?6Z@Hn~H7*h#uvsdpO+i{AdoqavMwI;BaQbP8hIP*k{OcytpUiY9CtZ89172@R+0H zt6yniAPFTE%gl_6<|bK9h10j%ng4m_Ihw{z>HE8!q5EIFRh-f2WzdhHx~Hz6(<%Q5 zK+t|bjZ^-qFUFU+9B$@Ox0)>A%$2R3O@B+6LDa&PdpucDCOcTl?r6G}8B!RJh@kVDHvi^O1!q$odxZhE4U=a=fXL;d96~ z-)|sX+TJBUYoX4U$)}xnULn9tLwhTm*j=G+U;k#YU9Q-=}o$=q*+5l5k$QVNfH=+8w~zMOEmTMYv&(M&>8h~b#DO#$+=2tWwq^;u^K_hvRh z=k{SA@WqnGI!RX7X>Gbd#)u`o&Ulo!xM!2;x3@sUN>h;0XP`I+1_oQOd}u`EyTKY$ zBhea@W{&yHFh_Ev9=iCRB+G<<53%dv>=+Tgdhp!iubwn~1J{#;inVI#{Q0+KSDU#x z-8kNdlR*GS$t;ay|Lw-xU&X!aCQ>5~zp2C%bt+(VxVx-y9KfEDZBkf}`&N?a7qL?5 zv9N8UR%&TWdW40)xThfnToBv+E0$v*^9MOLO8X)PPb^HGl`s1H?Rb1E-5?AFBcykOH3D#0tptqbM;K9j|8}w0)#u z6on-&eX4(yMIGG}Zk!zK6Q^TAV6CVvkWgw?o5MQjxh6B1mG8~v=@m?nN`;2cXM0Sr zk<&$7_=bytvuw5rRyLd1n}}aXOHphvgoTkZo0RMqYw~nou3)v>%wR=+eA7M5G-i+_ znK*ONq!?7bNYan&bhuRc*>27di_tWrjT2BOX57f6+oGbJ0tbL&1;$=IoC_!_-t3Lv zylr2|B)yFuk%xU?3dXErOeZs`Gs-RJm>Fzc5Y+2SqLLsJ@w0p0XP6)+H~D-tfCkar zrC~zz%#*FRrnZEtH17`3L%Ulck%mufrm}LT2Q^qw4iPM`w*O!w5cKnSp#8_-s08c) zn6-itqowwT-m6b{l0KSjy|qR6hi>{ABw_?S0C+ox^JchV^E}lZn4RyUyGe_dp=fyM z!&f()gukQVfCc_Qz|l+rlXju(dRud=>qQj8Je5YA?tvSt(QW=_#t#)JvN(nZVxbNX zdKQY|oa45b-qdf;npN>8sY-Mp4R=-U_XU4}116(`{ij-wPjJ75sY8)W8Ar7X?Aj$z z$nx`BP)-CpE^x=71v6>c7LdfKb$LBmycNHQ+AR~LRtmK&6?sp<3VE#*Sr5=BGhGOI zdVGwLV}-zu)&q{;CDkP?5Q~3MuGmk| z-gz9uWo;@|OBafhne&->Kf_@dY($1o3NFt{sqx@=9KX+pXA)||w~17O1g#YT_kyo9 za4PV<6PqmV*Spdi@|`c2m7fn=osM`thYY{}iv8;@mZ%%ql+^e2B+y`|ABA|ViPiIz zz0W4mWNu}}_NG5_V4M(l-ehw^=$+qeypc2gkKV>R`3uUFfk-YZ73c;_Poe+GQ<~eZ zAHX8%9D&43DkA3(Xk?&7^1vB`JMUr#=to(qRr$OwN)O}yIsNkq)#HSL0h$V3sjxOg zI#O${)9UGusUtZ$>0TAt`$mdXXm}K)!F4~HT360_;wZ!M>OqN~p%aP)GpxeJHPaBO zt<7a^6B-GT8k#`}D8DB%D4@x6(9unVNj|A6@w|$J;B1S~i;I0-0GquE7@?(wQKAu0{yP)No!OUIuw0#flo zfzlVjc^or~?sz*J@MO@!b~fz`0~-nZZoAfUJ%yo*m4s!v7G)@)lQ8L3F==xyP}p=l z?c)pSZA5SWKLA8QyS|iXEJccWz6@VI9j}9i2!dOP5(DzwyYJZRt}4f z95rq9?9sD0=TYVale?R&C{rd*Xi%eWfg**LZ}~Mxw(R9|7W4M>T)k`Sg=<%`XUtrz z#Ag$iO`AAk>gp})IOpxY`fBH%-OCRAMnD?eso%oy=h?tAb;Q&wx34W*yVL;K4$K0g z-N8EkEO+6m6)Mg8ewGT@S%9cgp~~JP2U0qUA8q?3g0{eixTW_G3^Mv;-GiVCW+{04})hoGPFw zZf3wwKDy3VEp1l4ZdTu*j6MO(#ub$k#2Ess)uI>Otcn^O6SnQlq2EqzrJzd!84v*g zk;4Z-Ad*VuBx^$NKD}h1B50rs7JaJ%b~AWIB}6AC#iW72O#W&64(naHXyvOHFRfU$ zGH2$@t9P%?>Y1Z$>(*FS0{r~@^y|Cw*HvDgp3T2#E(q9Ww+Tc5K&hht4{+f{$$ebV zamf;H4mo?HV46T6ZdkxrfVp$$pE+}0rBq?c`#QH{z?7^wZQE?xwE6n=>j*G=qk{H}?F@0w7*9-095}GwyoC$8v~G9z;k}77Cvr#L zC;&;5E}abM-qT>CWKb0KT_h4IRkbmL&psCzws!hdq~#|1}HcV_?L4B4}% z$(}L%_JhbLq4_IR5YqcVJhX=*!`nVzCne7f74AUEL-Nx39mW zJkx>B{;K&#_O|N{o&zJoIZKx|U9H+Ra^=a@u~U155FzA`JxcTugd4}qAQHd^5XWP# zJ+M3s3%ha8sDK+KT9iqU0F=M3F!8Cu8RG~zfF$V|Wm=c!(9t93&!6uzus=sIbk2xH z;}*8-(%Eb=4;VbC=b-*m$ITcudhD37V+PF}G;7#wiXaN&x9!DIQH_u(ac|BXJ z^cor9X7HT{iH};8`!ao+v|rY0$q<~na!v)MW<<3sH?I2#J{2ohI(y*^1-0U5m4E(i z<>kAVF`!nB+9cX7-|bk1KqMeszIFNPt&4ilsesL31He0u>=Z%g9L2(aMGl~FP=G5m zG5VS1;jq<4nzg$$&ecZmrhx}XLFp$s6phI%T#5YdHVTULDk9-xzBm@$5SKH$KA zZvKQjV;@bmN%P&rm|#NQeWY6yM9$Hf3+Kf53X9tb@e;h6_ZKW!Ui%_C&l{DEh0s-%!fc)jlSfV0; z02m=-;k=#!em0xcsne32_vzq0CnK-i zb?1Un)OonMllN36a2}bD2Yc~e(epiDRqR|kTUw{3Tfii3M94XMd>#%_>n|(2R z=#;&O_I>-q!fXX||G0cv|90KW7b?|v)`)0s{cYth2lnpVckVy}D4$g<9TgXC1)B^i z7<-ojP|I zGipRaTzuLzX}9m(DM?br@)e$ihW@r;T~u7GLaCxCDdqo<1EWBINK7Cqvc;LN9z6Qn z_rI+Ac~x9&0_PlK%--KMDkv~e3;p}{?Q1mZIRhtkn5y)@q{`w*2@x<3z_m|Wt5yw} z+6WTf%@09Irbh!=z4kXin1FOMb9qidMIy@<|%YYPChZ~D<=$9nbdR<>dZA0KaoK~a>y>cYbefea8$ zt>+#*+k5KL&STFm-Sv`@7uM-GW-3)xMYh`h(E?LS%ESf2F%sMix$@;LT)5EKnPZO} zIhwCP{+svjo)5ct^yGC%OVhZ%Ib28|kh)w{K(@b=S+pGJ=y)qg_2JNNEo%bCLq zyp}IpF?zC<+*T{8t6GHpa)hBO&6raOB17zKVWJfwGzo|yvDmnmM} z4Azk|M&FMN>E5cV9`uWU`Qc&o69jn4qQwz$Q7SM6=uNyPiLG+Aod&$;&6*!EMRH#ofDg%Mh4$_@E(e`hVHD zaD7e7<@@D7?!CTe^_ZVDnTTNvhfnT5*^XZZ(jJSPBjy~xVJ44j0FE8MS-?c43I<%DU_s6MtXhpq zfPhnYC0qHwph?dt;N(FpPT_|ie(2r1k1WfC2$ZV8F+!MgmTLIt&k3rYh=jqyIrDn< z>S+VT>1D*;KEajjtgNUufry&Cx~8w1FJFF45NCh{_z%Gar)oPQfI#J-DD?M7Mr1g# z|4^B#wZMRY!nPlO&X_f8!3t#|!hC(xPWCKTR5^9HRB%j^rP+D1{Ru;iVz2_U=+N3z z2R5ACn+Pfwh|CnkoFZT;{sZjQSD{Lfz&LH+uKnUA3z8kgDPZwqQ)|QlFaib;l>{*s z`;+Ln`{BV?@9MbVCh7!^2nUY-u!kUoIYXax<`^OLY8rq_1wl|SZ~@C!nTTS%Y@sUk z(HSXYqMzr}s0bhbfcUt0|GQ0PahY}Ur=L+HEP_LF`6@XP5_uX zV_Lz&g-s?C$7suzEnT|x2nzBaI)2O(IdbfXvER-7wr%^izwX*{>D~nqSkIO{kcV#a zqG<>O2~hv`-86@RXZCpIIbT;>`1xF-<939fw^tZTxk>%yq&pCiByko5hROX$(?)r8ot^Xy&EhsuNAjIcm_yzr3nuba^Uzt1J5{W7T z`rjwjKN|ouZ@2!vnAW}b-h0FmeO()15aBFkIu1OPo12>pK{7djj1Q_Hh zsS$;MKPb+QYJGn4>8BHa`3(XP5E6e0N0B0UD+puDb))t21Rnf|h== zVG2Xw+k9zf|lfH*Aq-D4OJvnOPw@j%5$$i?-eJ-rzS@tkvm_wed5o*WMpKl zS-WOw&aw(SGH2f08y~;9RjXDm@ru$_-;epeQd=N2o7R<=mZY1hEwfsF@W}_yy!w<0L&J68J~iT5 ze?puKD1rhs1VPlRRWCgyqpJ1q5w-sRCPU3(mmx|J89#X<1d$-ycG!^lWhDel!BMO< z;cp~HtOO`VjLPxw(P#%MVPN5krE?Z96qeyi`fd4wU*=2?Bf_jTpy;pGM{0r=&=Rh! z)SOPV{62{DPCx7DK}P}HzFv##F1ex%kqXOQxM1Psr(Aa0v1fTBz>Yoah?_6G`i_V0 zEfbCd_vU+V9{I(W_3PK0zjpEcuRmv6yWAVG%9La|^#bJfw{hG`00 zrG#*5j{zOC8Ye&oU`QC6;1c)`yMYxV1R@gU!Lrxic%!&5s3L&a|C)c^u`~b)7g(87 z@a^Oo&wsk?t4Z#fqWZ}-k^+f_hh@61&4ld`S1yl(Wlb`nl%2ft5}(g^#1Titrz9;~ zvEtOBCp|IzvH2^OmO5cK6uIX3D+Uf2_`ydXdc{8HA*dq=Ye3CY> zyDRh7964~nNhb{{(?tvoxIP#FJK$9Xn>CSQtqBBV#HSwJZ-91{3yrAtPrE5zIv{M{ zZ5#p%8G_@2R&88YTvkd9!!+^=@@FrY=Rg6b-{;5QN{pBT5F^H}(GIcW#_Wo%YjQR_ z60SsHq+)$pZUFIuMD(u?AOsFa%6)*>x#iW%3YcrwuYdZ^H&$@d~e`asI=s} z$zNq}+~f~f#n`%e-TKX&H>dg2Qq$5rb(iGiluk`Ln5Oiu-hb-@07yz9wnv&7`w3I! zawRx9kfTHI{_xEw6Tb_AGyn-l+t@c4?66(8X5EV~y6}-lA8OH}sq}#e5RfK>!2be~ zNJN^lX3ZMjz$5Rg^sIq>ACv4YDGe7|fdOdR7X_zEq+y&o?9@R62PGsXum-3~wDLbi z3<3d~L4Y7IU=Tu1XmgQSD~U2vM!xtea`R9-so$By66@7~iCVq7`O=fgl*}9*!WsoE z0QgPIg77(<>v!4QOViEz8=e1NEPi`eD6mHb4_{w{CszwdehD zah*DMnmKdkbMHJ4K@O|uK7DahZnmxLqT=GDw4|RF{`A?%&zSk0&)$3Vtw$5%;$L{- zg*QgOR=Blr_R85WfAV6#f&H(!=-Tz!8!kNOVg^fTddgrYpMFXZ!F8M0KJm~K^&2&C z;B;)>F(r`l<@aAd_R=Hm>$hvys5OB8z54oL`Hg@igPC{CvQ!1Zf>+;vy|lbkK$DVRoL^YxZ!@Ro1h+DTfUv_6Iw--R7_lCJ7%?P}0RItfyIeM8 zSXy$*2V=hZc*3Ys*fy8|U8QdRPiL7mfe8$dW&_Fs{2&1t0u!VKL5MU$2^^5I)=w{6*~aot9B8`LYU2>&!;LZ2?(=gpg!oRRv~17w+bKl+R)zevncdGm$-eXPef004`kM?S` zv{pVVVB0nz_EO8p8i3fvSrsg>8#DY3H>ze5a0-umon$tk79 zWpTo1C8q#z3yX~ORIJzxVU!gVY%MNo*uU52F;jA9E%QaR)Iu;6IQolbQN{iu?FbR^ z84I)5z4*m@TXbo}M7bLhtc8@Usp1GHnVzE4^J?z!`>#~yvuv;N^h>?#|?(S`ppF1+x<1wn0KTKz5|$w{5yS5o zhBAn;?+_n&n}Z2VRWb~kNpm1BVB5A7(hK>2@zqzKe)37RjEwSN`STyYFm2Y%j9OW~ zfd8&%?|J*Zw;MHT@ZyUvzCHHcawl}|h3Dqw<)o*jU3m8eAHMTmht};qNwGv>@JDk_vP3d>3Z34V|WhAK)>TI7`6aKm+F z!LkG+!Dqz3_vy#;S1l}a%Ab4TS%3v(!-fsLFG2uHg0^jlOBO!+%1Z^2QWuyq?Y9VY9wO&le|1r)JSmWxZI@*S3yI~_xll5xHnDw4>RnHlg~Zx zTn+XaKKwmst-5sVeAiuf#>M;Pe^^5=23J{82r%I~4g_GLb+1llMhe1!M$_Z_2D&#z zog^fhA3y#Cq!r)1v7P7(3$|Hnf@Y4|wN((rFi3D|Xi$f4$8_$M3?D(WW=$a4`?M$` zVJmy|ci-Q6=UrfcAqj#Q(HBn9VHE~B2DuiN73O?BWzHL4tQ<8lvTS3DEdokNjvDj_ zIpV0O@}S97Sv_TtWTjYXDTJ~ec2$E0^&2&9npaXJEc4oPukX;g)1>KBZQ`Rk4LbL< zGuEwMbHN1{oO9HQ_rnZfG{}dbdbQFBNclkJIA)fqnx9y!*|kW=2{X zYV_?p;EGGHgkQe$5`xwcciwStypduU zL$F=Y#`2u?#Tye+Q+`@F<*o4_q@}0lmTmcT(q{!F#cMXM|9sLHW5che0;*DP1%>+d}+H59fq8EBFR4*3dXFrFF*rYFf{2PM@kL%*^fs2ynLDs zUBPyVTk3EG+Yl@S?>)w280Iav+cp#oO6&Ihpd#= zJv!Thm4GJvL4ZR??0__6d1UC}M|N-0J`Hii9I$-Y2OPEyNZYojO`CS}^*3P~ROPa= z(BB2NhJy>&ZTxo1!cjkF&s>&JuG87fFychSjkA1*$f=!oX_IAwVMqloE-$m<1LQM< zcDQ}_u7~ySd&g6Eu35LXRhu@3&wBBbm%VJjxo4f7mXdnIgEx5j+h(ns7ZeqK{P_D) z~|2X!F%dU9!g_rJl>bB|ArxHXM z&b{{!fA#e@O;|RTU-M}GL}jFq!S zJT;=cto)w)?rB`7kvHIJ+_<0Ih~2AOXFC{bT(6NAUfPHEi-(dcoioeU6h;_tv{GvDkcf9b}%Hr%0*nv`DKodwn1E`d*Asww4cHtT@7_6X7 zvY1s?^{n&HKY#ec_wLpuk40@KjQ{EEv(C-S&leDYAQB4ho_YZG8siW`YOP!-KqbV- zJ@MpY=bn3R)k&+&Ohot}A(j&eIl-cm`0CX*FI`nqRNAt0$K0i>GHYfcEfFqIZi5A2 zzyLxFq7@&Ea<-P)WfhW0f)LFF3c?Op5CfVJHn6dx_>p&BTB{0zZ~>udM<@Y|9T0ez zxnPPwEt)o;^4p}O)Z{8hRlosq%CRYL?c$Y%n{o{srhn^w<=!9N-|FzpMy58>(Jn-V< z4eB=X=C{n6HS^(j9)I@EXFvY%{Zr35{k-9qWr&QAUw-SZTW?=fvh2)(XAU`P$Q`%d zzQxVG`=$r4dGfXsjy*LP34b!-0EpJ)94r}b zf#2zqX&eH?i1h%(h@q)!lJr22h~M4qmSN=Qlg2Y< zOnvx`M|=VQ;Gsi&aRJ5bYy-cCcVJnD*4m2q8!3U;zWiWi@umv65kwp~8ZPY!66xIw z!x51H5N8Agke-_U)mLA6djE(*s3Zo%W5AL94m-S;X&5eCA*3NC{sOkPr8ESnZOgWg zKKkgz7hPP{`uF&svi@U4f}!m@fLBSOGi1QIX+va}=?M3ouGs7%d4M=+!i1_=p*lo9CU^fZ_CrcImfy!dX1 z_4y}Wn4Vd^R-*YHyM%WR(2@|K%# z&MPUr;`-}O?tQW+75d)CAKrM!ZO=dQYJPc91?)zx8Z~d-#0S~9b(6N8+Ax%N|L5O& zwy3-3@OQ2wRQFTAHT1ly=A+O?b=!^DUEI& zx&koZ(ZMa0n5C;Ow(GQQ(YmCp zbp56^4)jUK3|+ip`NEZR`Znlz(Yfbca_hA!-a6&1Vc+~b%5gRMts1rK9NxT7kIr4k z|1`F1yUz9NH!6$-Cw}$GiKm@)P`ScFD%z}>eQ-z%Wb!H>Cthwrhg2r7yxN{ z{`u#An=&moFHdQgS^j>%5c{<&8Ia6Opp#BI>6~-Vk%r`L2qEwvCI(}-KDHGfpnwS% z@inVMz%#6C)vp`y`6vJQ^Refi4bts;bOFPg%IB+*g%aD}qDjVvjiqZhb42M~TFmwD zk5q<+Q(Dofe$%5n9QO6xUkc$U2xgbT1lmIkSL}EISy)>3>#vj6uUc2XenYi3Z|mv} z1#310a|`2Ordvsl8T3nAs7VU(C`6-kR}KcN+1G_fc46IZ=HSoDb1QR ziOWdKDcIVeUOg`&^Uil4z4-F$zfGU^(!0;Ua>p~CYV)1XJy<)V&i!}X@$zf0&RRC_ z{(FYsb^WbNS1iBnv3t%QeAdy&9&^>LR~IAHsz=ukUV4An*=L?K=)~urek$SyZ@KFF z=iYp^C|q#=<@Y@G%2P!M-tz1{T5AW+r=v%b0TXcy8aeSh6M-PYU?|8SBEq!DVIPEo zBVfk)1B;d~b-@JX6PBY6^1LlgQT4gsW!=Sp(i4ag>j8)n+mkeTYmX;01+`%5qLrIg z8?XfU#~;S6T)EtUv1G}T`|iFMQ2D{UtFOH_1Un4%$P16Z`{BC_mMwr^ez$ncg82)) zlh99-e*R?AXRFt*sn?)>acRkOpS<9e-ezTH3DaO9c3Zy$XeFQmV%CP`_rCIY7#va& zTSovbK-e3S44>h%wcVmc%eUTn>w*iX3h zCr)_`-m#%<9X{rmqc6GiqQt~_07UdZYyHO{(}cq%d0Qa?MyEmjvf4CG&Z>3nMQ5Wl zh{SjV0?U4z=4+S*AEy8G1B7bWypf}wJ%u-_4xS^xPtxb`DMuW0Sc7(c2+0r-v~SkA zQdqxk)46AyxAfPUlfV5jci|f31k*V#p8XVIzZrlAkraZ6q=Y0QCLB7D)`A5|Ll`D& zN(dxu*|cTani*x03X<~LTd&Q}&sPG{G9qx8IWK=}2obGyLP7$8g27-QJ~0p%=?l3+<$ zMY(`sNy||V_Rpt^5hK{U zYgUsw4Hs@)aP;BF6_;21I(KT*+D-bk>*DFeA=%fVO}lP&T2B6Xl0P%a7w|W!QO6EO zg5i+g@?|EbtXsLdcHMf0VUi|9l{$$45VRw;>yz1b*)7jLwBF9Mp_R9UM)Y-R5v>zt z0x=9Bi74Va5<$!yuG*$p?`c+RSjr{V0|1Q~KmH2MU61GsVi+{@vk6`L^hTNm z8wdiW6-f3Y5`uCeEg*oh(DKi|QN=+WVbVkl4%|KFULoP4c^3W%*ud|C- zkOMcwE|VtygG{LagpVvA)8jYZ(<;8Yzs!!8esYx|B(}ps5@82gGopW4B~Hwm|K4F< zyZ4`k$x`|QMUmi}Z;iO^`kMq}WA^5T&6<>!m$0Ba?z`)<%dbq$sAk6b$ub^#;K93Y zy49WlM8W@c%u&DUH6G^v$$+GfGjmV$<^E zYAL_1nEva`U!QvGnPn?il$I4g|M@dTTk})Xsy+JR^%-&o9Fe=fcu?tb-z;zvyOhcO9W!-vpUwP%#bIw1LiM<7E zTYpFlf57l`y8$gE0KLD)3WGfy4tMR;X~>Y{o40BXX8bP`gWv&)u6_D+9njY)E^S6(4-?L27F1b1%K{=+lqI z#l>5eb?yDvKKJx<-q^3FpML7ekDgnya>Z5GUhOHlJ^JBOFTXP)qh8G?o_+Gk*PnXz zwU=N2{Pow~e66CQ!i4eNH=~xXTKV?qci;TxT>{>EXM_Xi;in!6hC&)_LHJ=VUb5IL zD6(L>D)Q>9ua;Dl_#q7NTOYnPf8`>VH4`al97+g22-bg$Sm8#D7>DA4F9JL41RuQS z=1dX)>w@1(Vb{s3`OE{4NMZvbr!eR2i_di-T%Tx`*+|13_0za>PrK-ri*BxqtceqU zIrfC3PiT2`tHv!Nj@`a>rw*+;dC{g4ClsHN22+k3H?C@0KNt>b$%@&RlKAeopZ&IJ zu8BAzsS#z5jbafuB1md>A}Zufxqt4tr#pA*q#Xu!nUb^1uQ5h+2wFx)#v_kBQrVnM zx^nFU3?>k>iu1)i{q)lV1`O1$0ulbVh_OrSqyQX-0JQcBON^uhFw$BzS~hd$qRBG= zbQySfqa%9(XwZ!8wOFxb^=W6FnOC;; zwyW+i6XI`q;*JR1zDM+Z{q@(=c}IH?B2b* zWm&?IU`C7>k{|$6)N9=WEEsEgkX^VgWXVrv?QUR!*E~y_?_%vQ3RTN5~Ojc z!URVMMS@J3oSHiP(&0Xz&->k3(wsSS<|X%ER;zBER_)rn^vG)ye)@6Qsujl^ckIH2 z3ogFv;*(E3rSFjghTe4QF((|~{qSCw-hb7tPu+3eIp=K6-g4)QcVuK%d-|zo<}H}F z6@^x!|J_&KHBDKBV#JG0#aj>S+Q)&)&B=Y>l?U?*^1HR|)xCG0u~Q}p$X3l-KQa7C z23BoY{nop0Nr+~(nmqWx0~)SkJI)GRaPh^`3WT6SZuskOM@66^O=%g?!^ik)!8 z=QC}`i3qod?N`QrR1OZq1t1_$i6Fg0rD2zz2SXy_hO#m;@4N3l&-y39-rB3^PYfbL z6as7b{l2^Jy8GsvZrZwaD*)F~*fW|HK?otiyjj_nWYIce5R0pUb! zQyR%gnmuz?EXkNharESJN2?lE}ii6j{pO-V(seJM!ZQN0t0MA&D*@@+Iw#I0Tzq` zyX>vcJ__04fDtHm?c%~s1VedfB?RG+B}R-`1|UWZ2JCIifOXfs_vBR++F(EVKmPoq z+@QCO+i$+3LWLfC;i-i9q-(CaiijK_uOz>I!-iL%bL}&)y*z*A(sM7qPzX6|?wlI6 zYF&EyWu3ZoS-o=AC*OS#i9`VSd}i2=uwVMKQeGSR;T8lPKtp>gTJR$nfFRy|{4e5$ z+q7uy^{}3D$|*Y)BkVgdNWgZSAw!1TbN4-3Y2~^?(4L~9nq9!%W}r9>?%k{RNhh7y zv{_@4s6^0z?89M=C|V&p*m$>#1cdY!wC0_xf90}=*7|?pRL2^Qc8g0&NlKIkb62l{ z05pCZ^E#YY8L>$40u07!+ z(^8^G^EQo=YbL^P!5|0?-m7-4f_6Y-rcIo>cs>U#C2%NUnU>N@3TavvGm~bo-2d6< zUU=i3x4kg`wHIEmlv=uM>1}u1K5yZCuPEP;vUl&p3yO-?ZQPKQl=R%~&s=iRMR(tO z?>Xn3`_{v6zWVG7AB=c=)Z1TGOG|6rqUBB3U4QAt7eDaGL#;ZtfBc~*KO6bg^PfK7 z^RON-KJh$*W-giSkBhtaj=N_soFO11aJH5f=jei()$0XPlOB2Hu{q1;1Yosm+P2gV zE?mEaA(}RBUb{vehKxXkm2*V*zO5U#jKIw=DUy&OHxhx4fE6?V!GLDLI7EpNBbEV( z5hEHmi$GPV2w?c6b?rr$zwp{~K|mU2kX^ETC3cAjmY#p%g|+I}-n_i%)|c*n;_~6m zn>2m+y=USPxH9RAXP5vr(>RRg=j=krNJ zYNf;BFaR&+YMQ1r2mlc&rM+wa$vFUmcJV4h;eg){SFM`2q@ZAHX0wJc(DS%s0fHY0 z>8*uD5CVWNJsBFP2qQkeZ2pqO7A?Gr_TpvhDC7uTrRkrcX*aHi5Hb>qq#N<4_CF@? zyN|S85`^Qx6?+Q9kp>GuYZ5_rin(IsPh(n~d%2A4?9uF)JN|b;yvryB65IZf5Q0f0 zrKU`qG`T~UZbMHTI(7Q=gt#~}t7ch6sD0-yTMP3(9{1tp$6cf)ui3oO)9`t)+ry9C z|G;$@Umh+AZZ2QrMf%H>9ya8p4LKWwg@v6vbkwf9cGXH2G^b?oikyvsaKeBSj%u~3 z&B#&TgvovLr8hqO_@ldD8vf*6k6&=s1&v#^c;v~)hd=$02AzNIxhq$$eCO*AEX1Y6 zC-&`i*qjw}Bup1->8fP}89~T`B&gME*BCy_BA?5OfG+Jj&0D-M;zR%efB?y09FoL{ z5z7F?h!L&3)qoce0whBzW!J4+*MY;Z*>#3L`Cy41te@uH5?)!lre=eB88tEmV{(I}QHy~iW_Qm_( zFP>Zu~qL~VF<+9egCY1>%k1=a?Lt5y)j<(8#G{MM4_tR z2wdd~5)56dW{u~befEM2&J*T;dDgO!7&YoU61e!{i=TV`xid~bW8k2HLp+A=etGWD zIp>^n^)*-Da_cSEUw{1%Km0Ib=8P$mr+)FpSDpx?X-fQ8;hnE~x|4>KAZpyJOSi$t zK*0kh{rt_hToHj!^cg%9WWv#~-Q|;JBH)MXuFK1Y&~-Yr329dvk{FU9_Exg!Gt4A2 z(5haej&++S!;}!Tn?pgBnWbL$^G5b9Qd_mA7A!U$AxfefJZ@gHJs4$=9D4Faq)MIl-+j zy!}!V;!=Ex8tj5ukRO3gE!&$g*X3?pxnZ4R#~|s#S-g0$QrZwE4sl|{h|K|r5hFSo zQECVvIVq*Qw5V2MMqycAl93Q`Be!3C{rjJMw7hJ+k9?!w`uz4AZdqBh=IoQs&)&Sb zMq1{kc`LIrYVHa*;WJ82zWt4- zmKJQTfbGIIAa)wtiCxgpk_=`&aNxkFo_ZoREj1Xdh>wp8hbx4ThCxD#fIk5M2~J8* z2D4JGG)?U)un;CGTS-gqI>9uCa#;v^=iPU2x%n1PIGl)>SxQSQCJz!)3fFa|ke+bR zWtU%e)m4`zCnqbd{_S>c)zWqP$kE8eYvtTtWgEV2!ZBEzyb0JKtnKLPqiaa$)JctPDROcAHOpzw9b)6NJRu#Lr25;1P{%Crf6yl5{3oY zxn|S*PrItFnL@5MgiyAN9ca>jVc>>{?lVMUw|<3!CMiR9#2}#^_I8+$KL2dcu}6D5 z&z;ZTd2HKb`wr^&*5~he&beC>DagsM+pxilZ@zZs1?PZOan9C;b?VQWKJ$npjxYi~ z5$}KDy%DpQF1q&mt8abm=KD^+sa@-~>+?3<@Z61!%qC}?eb)VNJ@DimPc^O8U}Ij+ znfG6Oc$a=Hn>TITpyBPqZ@K6C`#=5m%O=g6eEHqSkKX^-*Q39kGGpp>*WK{svroEE z8oF-vy4?=z^}$!42gRUw6s`SXR8- z(c&71JTYR#9Do=x2S9;+kXM~?-kTr2X@e!`?hCGe<*k>B5R{OEyB#@g`qUDH8DNM^ zBfVeSo^6{of9~U#Z8#5KcHiT#K3V}g0PBStAC2?*uYUMe4Se?6hrw{DM1@Ye;p{?C z5kgW*!;q0k2z!G_IBZMErcIk=WTXen%PpTz5CL$NHhG8hB&p6}s=bwKz zDJg*jdk7)^y>`EHl{6#($5Ey!@45TF`EwUA^KZZXR#H+dgan|qs_G;#vy{f3j8SF= zz#gnCi#oQ!%+;!;3n8mjOS|}z3!64+GJVGMOD?&zTD58_tLgJ6*G;L_sa3mr_3OR<;oHS1 zVyKHxzi`}7Kd;VRYeEW$3of|$-S^(JU7LZRRHRe_gde_ARJafTNisCs0)im`VlYFi zf1Rx#BSy>th!JxDh%1lf(llKseC=76c++I^QBI&l_)MsN&3Y8(=gluzVZndS;L|_- z=CdL=ewZCo8=pD&_()z@qLr*y2gjTIud2T>_Qak`w8UP9LAK5>7+fv9|x4C*s zCbaR}2cLC5vTuGxux6tMaPajruU4;Kz32HSR&3h3YTCTE0|sKl#-GRk*tSKRV18l# zijAfQu>e5C*exPDSsY3rXko+;UyWTltpIKaE)f&7!H9|kXxddxNW{ZxlT`n~%WkS= z#2Z1!WJv)3Zv2S;iw=N<*zFhr?Yfp}83Eske(6fAQt3 zF1=bQwPwTmwjJ89T)FJiFFw8H&f6!=oK`KX=Buy1^60Ji6%^!^lYP${_tinom!Ewx zKff^K+ABA&tC5-M*L45W_m`mj_ABoE^o!4%wP=3$VSR3T=w=(vbr;^`ua+`(*7VUo zej}h05V-BOTOWPqiGoOx4ZDBO{#hAWBft5|0hgnqZbrTQty>L{4JQbP4Jln%pk$aQ z9ed(O-+x`kVFA=it8qgABj5Y{gEF`wC<6xmEyRcsYXXiDLohU=z;NJkDzJdG zp-T`-L!f2LmcNYO;)7+9IriJ{GXvFqYp>Y31JwfR+?FZ z0V0qVLb$GDNq)yP1vo77bU>9PTrrhoW1mb*wxUBEL z|Nf2F-e#VkMn)P52r2*7(w5i(byyPs68I0m%uYqvG6Ddq*Q*5w*cwSsPqXS&uPK2rJcitG z8MYCZl+vhCBSak3uXDtpK}b$Z4~BDBZ6t=l5JUiDZ)jJgBqR>#+++F1wPocwVN%h* zB0%lgn{wc;EZy+){Ank5?qg}!B>5-aY;+&k*V2DL0A69e?K+NYx9-?JDZN@rQSs%^ z-}cD0k5*JfmX$Ak@ww;keei(^(*sEn;X!-f8VA}n+hQTkO5|PUDq^C#Hdoj5e|oK+YSWcH*enT zg=w|cfj~eCDP&aP{@>TU7r$mrP-MWH3r zr?u>V1WL=1mVk(jBnuluD2vSbVw7aS){=-=|8WLDFcg>#DU;*Anf~kRKYhK0%atG! zOs)+;L2H8G-8Op*6I2T+uiW-Pb>)k94O0nCVn@b6@5S`rBhj)S2q6qZx>`q+!w}Lm z4WH#TnXFmAu5t6G70NLnYMAjw1%+koNWYa3h_eiz3h9Wc7Z$I!UALB1y=C*}&%XQ0 z$T{D2``-+1r6cTPL~w71_JvAHPM4(K$!g+HB-`#-Ly9W|zNW_lQ=yL&ng zGfdYsM;|fWHC@x)&6w`)?rsi#_v`z4|NH?S9_PND>$;vFY(65tXj3$)(e>dL)IoGPl;L|E#aAz3wBm6F8}z zvTZr}oX@3NTdXIgyv-3`0@wTZZ+>1L0E_iNZ}9OpTXTG-;w6kCd>F-|qmB9qweKng@mEm4ET(J8N2VQ`^xSe*t2Hwn7bkuuH=4p~IqR>*mtt~tY zLet;~3{-0tSS~Ydxy4F*fSVk2AQWfMLi6;`CTt|z;82G+ZYoHNO}+f?Pr11b5>&@U zm+fKCGk)}5gS@T)-{ktbxL(gkxXk}bs#-Tft#l508{L#ouj=>gQUpCu8y&X1gw_&A z7Vg?y?o4PYKC-c-@!8DAc%xt*dEvF30y}5B#Z*BW6#ROgd#9zWNXZX@0aC}C^%)$} z7m%i&xw5V-_B+gtAaV26`<-vliqb3IFQ%}Ei^`8 z>FLlq*#Dbkc#Zxn=r`Dv&cu2IepxyHgSYRfr z-5ZDxnBE0b3fRLV6Z7w4K=n}(aU&$)0k*u9ngBI~QVff=lBjcRJuEw<&!eJ5%?LWSd+!;7H z(Q*o|zU>tQ1gzcza9?e1S8Pyxby~_l;vjFu#f6I)^%N+^6)Fp!CYx+2WEvqc`5(=d z9?j%R(-D-Tq9k-ChQ)fWww{GzP$((O4l#|gFf&=KR-Yi>fzQ|f_86#fz8!fzZWt48 z-aDbcA_^xFY@YL_H6(dzM-g({90_jgI>|?F^Q3f z&B?(N>pc`hq@41}!R5h{bd<$hUmf?4+vN}uakY5KXbsWW%(^dOW=uv#3w`j$qtVU& zgvsRe0PZ~IXX8g^ut=Xq;2?P*aW{33@eV$8Qdj)dH4g%ecT>1;z2itY{zqle1}`iB z?AoFYWrDcA@_mHBPUeO^xOrOmu#bl-S`|>dWX)m&Z+6(g2JSl=f-N_d#+f|V-Q4aA zf3S`;L_|{26^umHeEoc6ocTZY(d1JHiP9PRY}IW=3BUB<2(Q*MlETG&N3rTP+AS^+ zwnCx8l3nWTNQ@GGxcA$@@qT$oBJ8R()rmZElV>8VdvQ2dvCjvI4aQc-HeKzmdet!W zdkifm99>jE;V?jCi@G7v82IJYg^-83xHv>)882A3(+5_u>H|yj3uSd9;&Z*qVJVih zH>un|6>KNlFc6>I;>V4mcsnR@3pGX_M`EoQIxpjz*xvua2o9`!pDyTfIEE|ElUSIx6$dSAU(AsJ8Tl&6GUike&C|MKRDJh1{o-mfKA>=OZ6= zPq9|Q+IP!zZNHLwg}!j$_bF!bFbHNBU@(uZf?_^I5-`yIk_Fnf96EeGeknZ7~%LNSm=y7&j@1#ONxIiJmEyG64mV@iWBR_1J*t@Nq1S z+4Vn2w_t(-8 zKnZFBD zG4R7M{|=MOn}d%ZBS`qqQ}qRT)-Q)x&t^+7>U&;oZxVOR{$ym&tUGZ3tpF>G2fYUyCjS^1fkCFUq@hx1LBhUA}@-f?` zcG^wF`Qq;z%`BwVX?Mb8x@=F*mRE*t>u-oDg>Q1S(v<2-2wB0e6Brd>=N^`>R#@3l zwY;W{gas;Y|0kBuf!_?c4{=2Kk+(u+(0J9xRvQo7W7rbJ7gf;9Ukp%0jLhTV8L5vR z)xd2VFU*lwd`SI4?62pI1PNYc5~&4h=(<+0y{pN%=tXtzfl%``L0=4nybuv zn%>Vw3YiHoX{SigsM(Eg;`F=$T!SW2Ok+&{h6YAhK6pJmH8eK>H101p$kRzfA~V=y zAwOwx<6mje)!O4p+>mm6wNJ;YD|SbJTIGAphf$^qu-Aqp_17QgDGce8SuQJ-5aJH| z8=5zVRl$@UkC8{V%`dkNLJ&JA+eo96q&}=9{fo(xink7^AO_0M|E!QaHcIY{v`cG^ z%w^_YvQ>U`cOQIyY5CtQ+2}T`v^Ne`Iw6cl!o5H5!NH*Gvso`=`t{j*Z~@k>&Sn;j zo?!eUaAA(`6p8SapsPL`9*^qSf8FOpWrV>87RJ}js|wqe=TVL| zCEs8aCf|!Qda3D0EmlOg*EVGm9^1!zb>$Q5XUbww475BXo=vBId!R7m5^4x!{>vxl zkPwuX^!IFL_4B4_$%yp#a$ zMhOK3b#O*^PPkRe(xcgI^^Tbm?oSL=DRzV&*+M}BnxyaQQ?k8dwt>B2-uK;DOz{2pclu_{4?E|e0 z42KVxT5^1l%cwK&n+&7{TO>G9h2)E?_@F7S%9B9I} zXV$h<*Z*|PAU0@}=e3y!&UC*`d8ckJ>f7r>4Wnc-p47FQZ``N~&T!j1X(0UCdA$Yp zS1S+X`u7RT>M5L|g6#@;c`AxfKUsmaH^2eL|J;O;Iz>Xz&CA7_;^qs=4V&F}?gxyh zSYLXG45eMD$vWu?kT;Yi=mMt+?CgnU?&I)}8mx;^m#F*NRa8}tIc|O|Hj^OW`J?%S z&({+#B1gdvf{#@~KjDT-RK7t9NXZ=(>kavmDKv#ACr>?K&Z^x`S7=ni^ZUG=|BT{c zN2|i;lUTCU)|VUceA9+@N7vA>pO$k%*q7(!g0}52=ycxpNZM!a;o|3Iapf-v%8-lA zqs(^Kdt6}8JSQ|D@wyJ}A8kAL$8BiynsFxrv_NAdyY4uK8Wrv}GfY||X?!NV!FFJ$ zTdc9Q3x&J#l?>A7!7vspr6a*w8*!^>6if5z!|4Jm!=kS}bSlMc3tZ3YZxyUG3m0OZQBu2&jQYBD_A=a6 zZHumtWKHCLtIUV`Cir${2M1CQ+&>g%Qh4ldQ1*qJMEG!X=+Dv9jCt^%Enl6s?`41) zUOrMAXr1KEI(`tBfYYCgnFNVJu14m1f`!>FCh06>cs65uJ5b9yV>fUe3Wk%RNO8Yt zC3o^8tBn!RHiy&fL`F&*C?MKgOPUOz9ag~d| z**g)cAP=$s(>yuku>*lukm$s%^*sOjwKQ8_d|1@OU7S zatm!;E4b3T?Auf>7yrMbVi1*8*~hX5sjn3=@Gz#CQxkShTpR6MCG*_CPTef56$N287$2&gA z{*81&vSMWB&8hbEYbZkn;Sfk>p{W(yBXb_1_sb~;!EGWte!calm(}$0KWBM$cl7&}Vcsmt?vjXL>MVSbcC zzk};azrvI~x0g^RIcC#f9jyKMn5D*iRN&xxs-29-;~g;~K%SySeUAp9!UJpa){yxswJVO-^+jd}Jz_8=x9OBL9zWWG=ciOhA zWZZ&vIP-cu#MYk5eYo&s*m&D+6$}>E=Ulrv{w-h;HefyW$WiPnlN*mJGkKeX1LiPbo zsKD;LNp-{bq&G~R9aD71=fGv2+f}ZHlA_CGcFWpe^W!Ww1T&UbMj=yZC5`a>fFTpe zGch5pKiM3Y3Q#nbI|!a+28yeQMM)`8R7UNcD=R_1r7=I~-s}9aU`NJ;M`6~fF}(_` zjsGkEc0q;CwAVqQ2QFs=CTIqioKpK)-!@sJLDjNM;+|PXKGjFxYQM`DBenU*XsmMy z8Jctx0NM9M^Vi`q;`HmZ+Q+b)s>xMreqrLgnx0aMgO+MF*p@BU4SY)QJ>ng7fRYJq zH>^RJm_MTT_TO1g^K-D6E`;sK9bK`tc0zB>il^gvcZ7icxihXnLBGQ}H;_I*-$ncN)g3qu`QSLdwS2PL zk&~f*M&9^TQLG^smzSbmTW1hePEW=d#A0mVu$%8|n9%VD6V=}aL={4Z;fPFZbS#{g zJLF93`@V7(o0AmkZgjIt*3bPf`FtyCHRaP+TwFY;F*_%=`#1I)uDn?fpze9KwBheB zA~fotc{@oln(Dm#y6(?`(M!MVLnNryN2Gq;)jLGn>y#O^+%_MTlk=2UtBi1Ak}ZPp z$y|5{UmpJwtA&S*W>oDD?X3ky{Gsg+_i{k$Ca`-S<-N1AX81%xKf27;?tQzsq(b4> z*ErUm{`fqPe6?2q;4E@V4t1Yc)U6cZP7DOHs-?gmPS={G6J@SN8wVrhoGce3ZYt1N zsSz5G2=Vun8uB$2nRD4)CQ$N=uDC{G1E8v)sK83}k8ITP=04 z!(8~&kHu=wRPKH|I&(C8bGYbzuG-J6Tbb1z>Lxg{m+A1fd(?zdNd3J=^v@tGJIBvZ zvFro+UE!-sjOUiy)<>B9d*x>v{LA`y!nWI+r8IxNR*Ple-)~zQ>~*`DWcRg`p7AZK zp5qY)jv(J=0>5p?bI;wH=k;In!X0K}&Wky=_UA7lVdxCJb_?Cg{a#M}>QP?2r%e8Y znzs>m%jx)ooW6<9qKMkrN^q|xRA_I5x%nZViBX4Sunm*<7udL%r-{((wF_=IFEEkuceGO%h|Hb z^hCVOSb`b26=|88?4N7521f4xlTgx6&e<7bT8O!N-^6vX{(1Q~+x;fO8RP+yHf!VG6~n$$`F$F!X-CNzZ|iU>$O7N5?6XJU_c)k8ksT^X-`&_yB%g#6QpL`j1J1QUaDS%>@_tC|drBUyiZU*fU#7@ltP zw?E5<_;i6=Uo>djTmA{5AEQJVP#e@Jjqt3xnT%(_7fDd2Qa~_g$5WAi0K!wlmDkxa zy@bhmc+t~ay|~G7iqbsxyuhQ6!7I8t+)FM$rixSpu*tt*lCf7Rt>_ks@$NHgv#$-Y zyBUhAS~#S$0V>asA2?F2oiEu{XVDZcj|b)%?z^heSac5mcuE?55olO9v8?;yk+Ld} zx4>5Bvoibh#1MEa^R2|o8@@jV{8aGx!sH1xZXV)pWq!*8bNnq6$-MYF3z=Zd>`yfx`!(qHVr7YOtIC4m2=us~vck={Y^Eu@v z!i$y+6=lT|g|17izfX0rA>q<%^BB?8uoX#N@8Q4u-o2gD0iOsxoUGphLWF@Rv6qJb z?IcUt1}=4zL|`{Rs7n@RazD9iDb%bIqeU;Nd?s$H3b9xK|7_(ks=DnkH3TVih_wwC z%8&^}6cM#HNBSW~?ay`G;XY$iauu!NN?!{-hfe`CM8HIDJO5?~t+gqCIc=Z-yRhur zzmoaNLKoXPLGt!8cmaYx^G}$tbX(mV=E@Bm9qSthyM(gh-^XI!+LHw%VF^Njxh8nK z@rJVfyxR%baaNUPg}GoCW^)w3*_!ur?$&%>`>gDWLIUk8jW!tjaVUkBNk^)l@r@$H z-@eDfx6)*m9*2t2qmTbsE>2AgIj%^*^AnLjkS6{jLt zW84G%jw(|XwIbm!>ib=#*TSGfN708HLySGg3g-t+mk6=t>R`SeI>x42T*LNms!%3R zm)2uhw2AP`!PKuL)~3xcrqY~4Su1Q!`?1wIIa^U*G@C5{ zjZ=>a-oYdb4Sya_^k;Z%yyLr_RWfK^cHdfwYCFIGhqe}Iyat&wuxq_YMP#UOyE2{A zFM@mHwPn+w9S`y!QA>qZbfd9>s|4~sGAB!jA>x}Wrf@lhp%9Q^aXWQcr;)>O{s(7> z!__{=ZglzL5x-2+3WaeFNYe?C-K6-I#wEvuH6{r`yd?+B>V!>HT>qwhaUbE<^3)uq zjS~2IQm5r|I=RY~@5pp^+K7&1_Val1Gu-z<^t$xp6(q8CbA-!5I z-1+511?N9O!;~-YWQ}d?{tlH<@h}}S=UhZ6!dS`LZ@plDo2*Jm|Mw`dbUjG^D|#47>#9A!k7&Kn zb8UBdP3(q^{{AB1y@GGh?5fvh>+MMOupv*ux4YeC!tnZ)n7OQy+zj$Kz>sj#`p}z3 zigCG`cAWycY^04koz8K--5t>qbbBiHXMvjDo(ZzY|1iARCkLMK*Sm59Dg7%svu}f8 zt(ryB1j~6=yhY03Q0b#7tu{+Ms1D+ zH6dc3;a~KHQqa(jcC{MG8d5pc2{iN&Me-Lm*fN$U8Bx9qEQ~IY*Tz)dKwxQKmk<&Y^pnfb&`^|+owismGl9v3oALrDC4!~;Xj+sId_?TAwXYGa`Dr?!6S;zp z{DxxwflT8zx$6OwL!-iC>U~6-45&W)07o#=Mn0~gQ;v4CwqmO2J)WX3T(=1$)Axn504zxlH@63(9m`A5QsoQn(QrWpPh- ziMAuig>SQQC`-m|w{{gXy>K>V?;q2MF66)kmGkdCM67_1eIX8my`<-{51_Be6)VdrVK&F$b0NOx&% zj|+zC43iO&)9x_or|&Yii6cmt(Uaros>yd1w~IlyIt59a9e1lO70r+6W|DLUbKHvV zGVb>rfD{S_(7@8VwCFBt+lt@e?B}ZjebT~9EoWOd04`<5$A}X2k@}co)if!FSa`Re z|FqHM#UFV%H3jXG@(@1cB~=E|?PO-IHI<$k=P&#wNvpTKm@hnO2pm%UQ3crNWIj&$ zbBSq|0xjE{h#D2`B6OzuZRszWMSpiy*UbA0%k^8yG(5tP z&L$vtDhL!}e<~fncHXU&g29;_)dX_lR=z%WO-IF3U9vU4nCvr>iw*{l!@nPvXn9tp znY%a6OMsPsh=nIE09Y2xxQaL(2J=iJc^xPGgADIvfH?FxsR-$)m(e4f_S@nTjejZJ zp0`ZGtqWC#4O_t(`&SPL-9Kq!NHf}Rc5Cc1giU5G%+pl~KEC7LZ2|8pyIm*tUt)x* z6`L+UZHtr6^li0J97%{7Di3Wl#wD?W+qq-GOjWxR(v-i|qxo<9DL;Y^=Bu~3lMEeh zoVDyWy%1j@ZBL5n_a*`1i=_3osNH`O<8@S0iP-Q4on=I(iw!yX?S5hPb9v$NW_<{@nID%S+Z+H^hXx(e|GQc9BFkt`Y?BQCIKF_&RGPDInA{{rE^A_0ATVQ%)V72WI39pTC)sw?L9Z6A`sAM7dpp1oq3PP@p>Npc17^(+)@_e z_8wjSbG3I(;wE9NGqKN?dxdfNgtKp_Gpl|kw4uS0Vc(55r|&d!p9vWr8M0bwzMDcuA6SO9;gxHA7Ar;^c(0FIZ!3xIGm@{iYaF)mL+M|Jvu2Jip0 z)qBCBSJ!lyT7tG6tf8-)weDfNv2KDA3|dTX5EVs3RTh71kM>~?-5)pP7p2>F47#<< zFIf;8;_jVEKkxD4vHkSv>lz)P)cJo8zP-p%^2inq$v&-s<;hiW7(u@=EK5+E`cdmi z|AF)8erS@RqpaqLrAjnP6_g#Vw*{;LLO76Wn~A@LO1L`Y=0@4D}`Af6FGe@ORoP8 zX}c!r8Eb6Qd|p)UxNIeQ-*|DH7RkS>%}5Y{_3o5Lw_U=! zHQkUfI?hof>-7jW;Z&|G9)x~TnR(W0CpMuZRL&q63Z|ZsiR6T|uooiL#3dIJ6k64LFdVVXAdy=dAiIe$nv|v0rZr8)_laura_3kz%SX3 z^F>4cBwrbB9NF|B;)sTuqW*V>mmuADsm)nJtHD_ti`f?QCqu#MYAoj`O2~0SQg)-; z^bxI!rr&lR2}_!tVZ&R-0eOR z$kv$kUkfzbt*ru#$@+Ql*~NPw1BU}sb28`0_@KB<_t&P0tv-)0|5#~A<>$NwEw?WT zhsY%8^04l}re6{f4IGb7@ay9>RkrvZ(3} zW-GMr>gK?t>{*%rc^I%MIJ5)a$(1}@?Sj2;W;o49V#PkRwGgxZGSHLcKJvIZ1X@DK zi45J~Lvm3zU)rG`-1*$+03UiG*vk@KSX-iB5%&B;f|@0;RF&51aQe?oO)&-<&7>{{ zwY^N2?=P^l*JPoz9amYCN1s3a)GoQtDBp(ko9l}`qv5vfdo`jPeeYS4g-2v9R+lgrrSarU9+aZi` z=zW{pQH`_J9C|QQYj($7>(AG!;LP#1F=IWat546NC2!dj<&>G2S<-6kUo6ZsEs$Zj z{Sepy1MvDpQhFgUt+ZN9rP((9A(rg1bIVeCUbNz==-8+teMS{{8h5HOAGv^HdOh_d zOEwKHeYRHT%=hr>ygLd953Z<$oB&PWV7Ju=trd?1_@Tg`xJ~=V@@p@LQ)ONL%>z0o zdJWI+gHhDHBx>D13_4`JykKm<%-?l0aJSggQ2E9gqwlk82cY_+h204Cb2wj9kbpq3 zd1fx~nb%Dp>EPf4R8#mOY+>dhxZ+c<_W#+Jp>Lu3Z5D_zf2w>WmO*G=<;!-t9VpzV zMX2GS(f3U82}!tUG_h|+AFFI!A8S(vO{tNwKXS~6niocN7lcPged}aok%s~u2rd)@FJl~JjM1BZNuf;*9Xd94asfB;(KuI z*BP%gOJ2RL{V`pubCg&}<_;}Op98y}-0IoM%Y^t`#Aq$L9(k<<@CaS^^%(aHwSw2` ze}C!BL0-V$ry;&)=@Jbeco>9Jlq(THDHsrvFmj76Qh!8_iyDocg$|SBuQrbm{fUM1M>bV6A!c3NGY>hg<0*h-$5R-7zh&D* z?e&G4&HDUV_tSwBB8E=hAj~}0?=QD%dSi6^g6{49I~#!5e9Tzr$(MQ?D@}g)zKJ>p z1!K!H0m56LhNDqR6KfXWXX8PcDtuM1oqyBnx3|6SH-gI@agcLAqTniKa+pjPsY-M^ zkWF#jU3eNdilgAP?=D)Ysl__@+S}-nY_2CR|CmJ}zzzbt-z2oQ3ebG{dG)mvmtpJB zR1|^G_F$8nYFe=R4N_wGGa>pv*Q)Fy)l62F#~TR{vhc8LpU(lks#N=Ce9pzZM@87v|MMCCYAxYlp_r? z(ov+sni$@_#%Tm`|OZao2SpkV37KOh7 za!Jp0B(CY45RjG*BE(&};rO{wXFoWnbY(TcfVI0E)V%r{t5sMg)UEa18}TJ}?MF5?M?JV(EdI zZlhnQW2oBYnF-mRxo$XFz9`)qJpm3`d^2S?)83&7k~yE91Fa=z+tY=ai|5K*Dz2vROOM$`ht$@OOLLo(s- zr~zF24enDtkt0TMiN^5fdVasgT2$bM(L3uC*HB-%Nu?x+IDgXNU(BU@C|%Vq7H8F( zeY5<8r$CI9t+U}1K+J&&!vWEv#Mm3uc>~gp=hh>4NyKp~k()#3mH3oOX@J2bfDx?P(o)tPGDZoZNl z&H|R7qC*GDZ>;?J_fi9WF&0wBg^i-iK^1?L_~yg`D5T`K(Kahc6hjXcM_+|f!TTKu zD_O1;JNKJBb_X9jTme5z*e4a;UHG20Z@iE@n02^(j5CLzzps{8=5=ETu<85h6<<%6 zY}-6@4Fz=@dAqK6tp9xul4M@*LJ|Q{hpijvA<$827rwzG)v7S<<`((*kq)IzkJ48b zRO9RR`h3T>?+x1+S=5r39~c=+7`UiEiYFr4rQz8Q{Jdu~x*GpGJ=)&w`4mXPDqn20 z-P|5H@XPZYZM7OWBf^UVL@@fR>$`tf-(FBm znQ?Ez;)6vD#KC$cj#b&<7c17i4`{iuh)u?2P{7K~q1*2!r1C|h0up%spW}ujdR@8k zpji(Eb3dvhT#ZW1!XkxX5plg!NpeOms3-ektDlH4VciIS3((%Bb2z;hJ#L8_98R>D zDb^Vp+P0=-_Fj(hWyt7WzPYe`-)O`NEu50|_P~ zVYAuLt_^=}Fy$w13J4{0-kjZh;heF(AIy=ik8peYhS%7>;c85(#>1%t_OXGzLo>67 zl*g91O8iN$0WVZyQvp1*7=cH<)BcYVU%!zqf<{A4*H8*&v}qkC;sv|8GxXd;boHoz z;a6@y8+a0c^@@*q!4%9i6M5j*x8sy41P_-6nBEs?jqNYj$phLlz#GQC(@)#scGiOv z_Uo4t|7|#bMcY%ZvQUbE-K8gGhtKqewD8XIl&m@nK9k!S8~Wvv!}x~4^(>6+r--oxXNljefxodEdY=kKUR zMPJjx-$_yYAN9M2}Tssp6^mZ&mH2$DKLC0@%FZGFp%K8E~F}TT%NGXuoiXhmyCyqdk`(>$= z&C;&o?{?Vp7%k^ppze7FOY%7A*JqpMea^Dygkz(wMVCz)FYBillxsutFh+Rdf|7L0 ziy5oWMR!A`2bqv3XH|=V+_`1fp&%_!|BT#z@3RMnLNdN>|8{eN9irCD(~yder`u9| zrRJi$ls`J}qJ-{h_re8V&S|fbSlqAr?=-i|OZ6HI1DbFC3(xId-?DBTnk!#T&s5h| z)dVqPi37z<&(}w69b`A$eyG}&KToGE;*hM-s=7vf&PU9dmQ`*VE#cqY9VF3GdLBrU z(jWjaq#KH2%WtFR(5Z}f2?{QeFM@m?zRaB)acCMk$UYZu8SsVkvYML_zh`}#kN`L|?fOhzIK{HOCI zBe&rU!LzozRVJJS#OQ8bU#Aus5Y-+~j_Yv)xRJkk0bfw7`k#lcakqt_ud<*{Aj~RG zxI(YtEhjcr`B9)4wcY|cNid43si|T8AQN&IHR?ok&1m45f60JdEnr~7tAt06-ni?R zjjkpSLsCE^8oq}m23#`W6_T3{?q$b3yQN~6UZJz+S!a%keGb~wI_`Dk)RHIgHb zD1pn_ge7+{273d>aDStXrA(h>&4xanjz}dLxgymPvj8b9^U>6ciN+|f+Y55Vw$He) z9{~10Ltf^qWr{SPo4udNFIHT0%}Y}*00=x1wwqR3I^4q6*5EK=rkKj*ewtuzR-SCJ zbjsKdl!$C>;3p|H-|ajp^E#Xh@~pHpeQ+HPr*!&=e%kc(($hlLUf|Z#vev4_({#II zZt%{JRZ0?RbbujqI?K4H(h>dS(;MD{B77{FH@C|VPE0h$jQP+{d6-Kv-PPal@H$Hg zoeJ4fyZBcvG>*XdZcCGa-V(7O{!+4ExHvV^EA?8L4#tUWDgL+}TvSM$O#K#LYm_o0 z1?f7+?>zw&4;DgFbISKh*xrQn%3HS z<;Q^G9_(`KZuqe;DV`nn=S?igZTo`3_8nwTwh;5_rDdX|p}YiKvjIK60pcGXht4`s zvn4<9;GyyabcJ7^eOtd2`=) zZ-Vp)LQ$n*=eS&=^Y$+L*PW%cH$hVIE7Q+sSS;)C-y}e2+82v1lN(2?St~mIm}Lpc zmVpW1*^vN0z7=X|$%od5lR}eX)*>7^$lN6*d(m3~#jS`!3j9MBGaA;)S;YKI?Lc4klUa z@5n?7_m)*VtIHqhUWM50V<5Y?P^P)76itkrc^c{)*A9A#2_!9lWW1>PHKrHd!AfZgO z$$3iV9uU>!F_C6J)aIET`I>S_A05Eptv?L#Z-qHkH|!cXkIbx=u2swY1*oZ}($=sn zHlKU5)62gzhqJN@vKf3PeSoglu^tF1t-JioW9W7Ghk;diuUgmnDB{(j`|%Qsc9nu- ze?FIV&T_ff9fK%_V=baML4WJmc#n0d)$ulLp`X%0UsKlxx%KX*{~_iZzSJ*|ksu2j z))Bs!5ECo&nfL!zs3V42#;G=0Tv&h+ZN_5MiwFO| z8L6UaNP!)!v&E)+toCg;JOyyvgqhvh4Voo?w=f&4?G{$TTgAbtMTjHLX-IH+&K`H?(7=`6hz$88zCZaLC9I|J7Njmf^TA#?rP`w{s>X3vL#Br6`P$Lq9;<%{9-#kwOy_UmiTNWp`Em`ocf z$nU)H_TMcdYn4Bp+(mQm`-FN(c-*@;N8|^b@r*wg8}`4c`Y|Bpf8DdzaC`%AM9A#lX*pW>Gaq0!v?2Dl~w#{%Bz3>UPe=-!W;jhW@zk-mY4Ov!;w}> zpUNoZEgDR++xQO1k1%Za`nz`BVltbWpmqvF#yF3{7iUtG2nq^1nk&OehU6PCV$m#0 zzv+y9i;EmU$SWcvDgh{W|F*hc>F<0kR`SD%X(EFg91>qusicJ5!(+xGgo`6}vFk@8 zAb~>8RvK_%LNfjmC(X{O^>#k<#%*O^oK@5~4kH?(J7$iS$`{ISD(u2LyLTMWE)+!Q z)Y?35{C9?8)zzyHXZs-E`0I1y-`^|NX!I1kqfzyJr7|aGNi_%h|5#b`HRGT|mvikG zt4ju+H^g7gd*KaGy39+LTOkjr8k~v4C(Gr*3_Wb2Qjvh%p#@voS*QRKCsngI#wN9} z&$u<(2J6NT{>^1RBk*Q0EXqoy!l7YRkSD=9rN;itI;c18tXFm0+$|5s)J#E#G7zNK5K zmORn|dzR@nD2e%_J*^^k4*R~xgR=;#=yt6~#7W?Q zz>ldyyUg@-GTP=S@?*R#15co(VfUb0 zW_oH=V(V+N%Fxx`SjJ4T2`P9aH3Z}ZXVL|Dzx5hnfmf`_7ti}Z*V7y6*V_pIX8k;D zIp==snVHOvCH^r#hzbWrjV@OShapWv-08X1@#rujYJ|G_z|VsAGtaAJP~S01yNA9JiZ!Y_)v+EAZ`g@B_Pn2o zuHUB?1QCI7keIgz4jK1_UVy^iY}&)0B)mVy1!*ZT{OBK$u4?WCVZHdur8v8N$o zSzS8sX4YWmBj}C&SR?`#hjz%O$RuCT`s(`Axm}LuHJY;!RL*^oh(kchBFsoy4=k_+ z-U1{G7kDNAnU8g+35CC>N%`8$SE2= z3oiqF-2V#%MET|KzZnW>3gR(nj&Zg{)vS;^`L_(cXEfk8>;jOQ265Hgm`LLKgeAjI z5R(B}MFZ2)*Q%GhC_ppC^VL>-OL@9iKA;u>ccH$`dLhhdl#hXjVUA0Ith$nz$5!lm zgo2CL^7*Whq{bH zV&sTv#piC`dY*cpMkFD&ZwCz}k~uF|2`=3Zlo~K$&7{`-P^}OR0KZkz?cm;fNuz8( z%4k`8yghN0_~Lx%cz~Sr*>bXynBMe5%XRgTGBMZoQD`o?N+3BruEX!;R(&j%$})A$ z5Fi^o_k;Ei|4mb0k+mq&r_`ET(rP7!%j#nXuaO&*w2NWP0(5&!?c3sS;jWAT)awvxaQ z_^V7Sw`;l$O1E>rW0o|<9y__UHKK&O6d^OhPzeCNrosyK2_s%Qff}*PECJ09Pea?X zadPtV)LHSrp8M-<%IbJ59|5UsGM`m}MQZ9h=GGu_!dW9T(1wa%%&ygR1F3>ewFlxy zF6#obcQUb@9S@Kbp12sJPI!WYZwlQewH48YMF0iA><^t&}fuA1kEzNHsj~iZkdf<#TwzX z^OIu2A=0@Dy|8N4Eh*&;L2jfaQbO3szFnUpOf`UzMjDIcd}m*EifHV%r&5HiZbs-F4(Q&2jF?(R_OP`X=E zK|)GeKtQ^?;kiG*?|(hK1ZLf9?|oh8c^tlbOG5at*6XjijwL#{yBe&N6zvx9{g~-r zJQf*r2UU$}>=e{Y!O%q`S?KG_v5prWbx2WccMlL4);bcwEfXB?i68lwn9~rEwAy&S zH-<>ybPcUB*wtR*_+1A7Av@;)ttbbNfy?>(|NorwX|>wB_>#(`YH4%8{CO9#TauaQ zyZ5F0QIHH?l3rm<`RiKJEu*`B3Tk@S+sFLbe|;{x+v!?7TL8!6m8a=9ZDVeVOP9XvLfRuq)9yfX5OHV)vsS}z^Kdwp}MWDmUXe{_d-6N7ptf- zABrB0bcZ`|7ZUZBnPH&iXiiN`;Gr3E6I=shTi|1Hu7IB%Vb48r1Nh+Qa*_!zbvzDnC@}7<*)8Ac_ zz7hR$r9J+-f!*t+gpi5n+i8)p`p`CDx=e09o`+RvEJ0dJS)Ld8SkhWK&oC`t3Qnd0 zj5z(c#;qqo$wtQcJt0^qEqYCc85U$GvRL%cDzFse-T3b=83L+4qn<02-af0f$f4ra z7m=U&8ugzSPrtY^Fe)^Uu{t5f4ian>U3lv51Uuxr`j6AO~Ws6K<#-S_ePCUt*+S>3gV&Po2&M3povO~?f)Xuf(Z>0ff09FaSbk$;?Z5A$C1{2w3h9XdFwQ)iE zWJmcuLGYdn;y)dnt*pJ}^U6ephpwZDUeaWU37KN!Wlt3k- znk^cO4Y&co7AbCLKba9%w`FlGmbe|^EB!nMGJLd?sAdw}`2_+B5=wM7LZC^uww035 zNFUi}CiE|)cV2u(SrT!d%<(}Cj`H~`^NEO?t>Ob~Ybj+*Jq*9U<`<{~zUY>eH;?jv zae*Xs#6|>_^LUonIS}3PyZ+>*UuC=@93xOj^jc(ppxBWnlE{(xp0z3E-Evjlj4V%!;GmgBL4~zKM122 za6heU~CMVugLmB7!jh(kh-1<^ppQ!x^|r`4CVpC-baHk*%ZxA zC$vWeku>`gCCcE;Sv#HMG=lN+%@NU_(v+oIm~i`yMaxGo``Ea}Xe+4E;i8sQ97{)+7%l8L9`eqd&+(xKpI#3_NG}&> z_lZrVpEn%Eu7>O{;Lvz=KHBgfbugZyp+c#qmPrDX#l2P*%h~=fZhGANYQYwWN2llJ z)d)~n@t%v*TUwHTD;>-EQTZIoyFBc8`D<+KyIFsdR`2b>`S>s4jZtQ51V$&tPgXj2 z6Kq`KwIngmgI3^|E;d>61UwCO8+LTk(A1%*Sp~{rmQB==5d*Nl!e?44-ES^@>yjk6e+^%$K(F%tDYj-!h@!~>XIe^%H}bH_v_V7fvNXWAz{aLZq)FERN)e;N z+A{cLU~egt8Tog)iY?;HzVK#=H3vCAdJo~f5AwcHZ62nV;_25f4P_+6&U?R$b;*WT z?jJU?W4GI2t18c@V>PeO{DA@=c8BxQtU-1*03^X53W1wX)|X&0^rZad(MCM(L?*j& zYpuO4A^lO2hY+?{(=k~H>>0cuov4y@#H^0s2wRg#elS7Fsjk05QB7Za-qHQ9SVM+m zOo4~qL5yF$hq!ABRj*MCqzQ8t9W_)44hU*hvQFN@sani}g&=>Y+xdUb;>KAJ0~Xzs z(pc8ZM_{7gxnJFpAPOiT5=6FuJ)6&dRSQs7K57_$ecx+xZdmRhKO;sLyUrZle~)&TB8 zqDhjJjqSEK-mFC#cE?E{=D5f{`=GAji@~NYW!J}8NcK}uUUt)n@Nc-&a-;o2tD8np zgo3ij+Q$ZqpJuE6j=S4tBU|z69p{2pr5-Om>?RApD}THHrk5*-P0s(|IY6NhC4Wfx z0`{iGa;Cke+gadZ;HEf?ZR_dj<|3k&eS|BX0Rsj^2YbyAz*XsC6O}O`jQXnq0fz2+ zO<|*4%yBA7ib!ZUgxsa0pR^1$kAeg=Le6rJTT5)lL{PD65)i-oe2^#0^z-(9&S(=% zH&-~|bALGbi#)LOj;i&te(#g#;x6)8ZZK~`(%Q5+feq=ekdj!ywE*HZuY-fzyij(! z^dF8(c^2ttdv~Gyi8TTHxB6In6JnfwVA%;3nF#x{=J)`FzvWI;5nTdu{(s1#|2&0n zYlym@C#gXs9ifb`uDYzuq~I8s2h?s7z90!hjNR7c|Ef7aAFvm2YTGnBFV(Y;qhI6% zg>x%z1nKzKg~3y_s1XmR=;-Js9Udl4?lTsVR9BB#$D)nLf8%eew99_|@p)f|RDg1o zAe@#!np1FhO>+W>V+WbPJ608WF7;lUlex_EVDX7if^IDc)9G0SY_|ZrGhu~k03qSm zv;4|A^WUlJ0BDDAPA5T9R1)U=obm}iZJ`=RLFR{SD1WnU4 zmpGup-ypqE{O_k-%URfQ=AKC^fD@dnN688 zDxwI_zU8`fY@c>8`XiH?BYoqQ34zi~?$#2G=sU5GU-mb`Nn^}eg`>HVWoV_)qj$ge z@`4OSgKUz*U~mwYokVI7e5^CJg8mBeIu67S`fezp5LAFv%bxGrg=0(QWhsE@EHSsI zZ9Su3_86xvTz<`O4-N{SEjFnVTKImAxZ3J_xI~_6+_0nk?|1n>ha_r4bvulH_UYbF z2owbG15173##=~u{0?+;dIPz-FNP~>aKzk(5(xr3*M~=9V`FW{v%^`~PmZObyd<2) zC%~r{(u5j}?E|J++y___+qIGqjyFak7aho9fg%#g+B#s#kV z=;)9(?LqZxjM~V5D*l|QNdG$2uDMD`O!xx@Qp*2Tt|?kG-Nf~eOT?YZ$W$6}rd==S zs&Gdse}^&EIJ?1fWVZ7yDz!We(ihotf)-(`H%=I*NWjcU_cMYQ>GW{%gIYxfXE?vP z$|o^rM<1|cmeuFi|9*SlNJHBuIC2{S4f1OYy6(!M$?A$_LbF0JUhO*n&@%1eF)S?z zf;4f_(Ub?(^pJQ{QeK`5DiZmgH|;l6QuZ->Q~9EUTX$1_IP6hYg21siX+VJY>s*zl z_BtX2*gGG@1C@gZABxzMZV4AT6N!k<9EYC~ZD|#gk(#E&i1pZQNL`q>bZd&o@38Ez?k2c15Ja3fa zzp~}!z^@1I260sImFUgiATzkb$Kjv>F}~X z7o855&PFU~#O%B%vZVy69?D1P2Y2J)+wHvVA_>jfR?TTZ9V~>=4##z=SUo#-eLA(xlg^4kilF@YmUT^fT?fzrM@9{y=l; zC1YzccX|I9!GD5_kNLoNeuay;Cp9k-pdB{*8k~`24u$GD_52BpW})nQrO;i}MStZP zORU9)GU!&|2JHn&&Q>;C?D?ygH15}h`cex(LzK$6RKIFc#5+iXU_-cl7hE|nJ2XR4 z-w6nk^lvLu@x!mT^N3aRO!??jhn<{QB{UaiAB?@B5Jwq5P*$yIR$(3ydp}yKH=l+> z!zvMzF?OrD&hQ2lY-e|XgY9X;Jel_f52w0j2mzF#i8_b}3O6yqU(EM46lCe}og^ZJ zjZN`*;p^7$!0U9JB;an1K!wR}@) zN{%cg`HWZ;)nQg=Fpz>Hh%*hWt<+2Xmo3k~lUJU;#lQ zC17_BilpLJR(Q}J;KrWTb&z-lH#yGz0OtI29!=FBhh<>%%$86Yd43)c*piYCmaowg zklX$n2a*&PGIcDEK=io{erFVjFf07?2<-I4PH%qMVoL+7V(-17>^U=6_NPR1QbXxIB$LiB zUbi`v%;Y2l$ZO1b5KLNc3I+yHD)!j0ZUQ`7I{tqtkx{nSvG*UP=~qLXp_&}LcUbgfY*g`Y8XGz0Z4 zl)8&A!zO~jSrE>@C+k0_jhdXj7DI*Tke%2+@?J%nmVKOmKHxJKHLrX;sF<3XqHZw> zHz&n2J)J49f7d$AxP3=Jz(!{wQJE!Z!ZM}QKM>fP^P-`$`e>=`zEU^#?8PwQacZ;@ z%x!9Kxo#Z`YwIpFcoON-Jdh_b$hPwIKR(xh#O0BU>+SE zmjju{ZJ>#E$hwM%zY)KlT}MyHP>cL4v~4ZPaom*Kb#l>K7U}?XW$g(k?9*CIwt9Lh ze}NN3jrlgy;7RF+}`9d~k1f%N~CCgk9-%#55p zY#M?1=e)a>a?K)+moG;Tnl9oW_9`mL15c1Od4#zKLbet`>&%u8d z=4loM%_A&N1mBg@24R?_yZaft#3noEPo*wTl?i^WVDLX;5@3TZV#v-wMnhzm(p8Z2e<1TFRw zrXF4LtZ(m{)$j71V%a09uxA(38M4WGg0I1^8XEKezZ6Skg$55<>i zcAWDBYQ07Bi05hb4XVNA{oZF+&5~Gv({4Ec)56yrz(!A6-A+tYAF4E6))sS}Zgy5w zJmndr%uS=jjgU9=#Rpa8ULLN#&~e#zTF`27S@wCqefRkEQ@ga%q~V9B6ND!Vj`!A; z4mq4rIoO~U0_Ib{ zcAB)c4y4H@D)Mn;p zO-GAPZ3Q>pva>;_f#ID5tr#IMhEogJzXHD(s`{MQv4W!9uTy;%+Wy;7v%zL8S(~|* z7~glO=k0=<{4=i}%evCN1SE_8`Wp{(?!#;Z#{%A9bGnhCDX~^EybmOwu1{o)&7DERR& zQRqcM$A-S)V#_(YY3sqTZER=qjC~OhsSI55?D!1)KdoLdtobdH$X{RsU^lTA zdHtWp7Kos+b4;mie#l@Mg+uruZhDWXm(|^CU*9SClt_L;-0f~=nwd7(#AilWuPDEN zhxNUhR%NgwY%Sn-DeHfVq7n}`H%)F*n@qBlD3$42XQKvcZu20w3FnD7k|PMrLAS7^ zYES+SkU`&+A$kfPGbBi2^nv6MclHBI4ks{t|M=ll777w{wN)%1LaAR!W-hK8WL z56FDom9jS@lg){nK`%!1MR5!b$53qV{$*YurGm1(n@rvh=|olZz>b5%7ODssU4x;= zcs`>gK!im_?|UsB$8x08_X@=NRt4p=(I_lb+qp>DXErIrcR5I7K5bp*@|Wvww-zNR)qGw# z-jQ_s@>8yd5Pw3{OU{v`PiFh*P}IXk9d$UipSnW7#<$v=RK}aP~g^$EaCE6r@{jnk8<<-u`1wSix$)6JC z%;&rP0k;jNhlh~?4dF^->zvY!JtkH1+lbpHLI{Fb z8i~j=zSXKF=8_BaIOq$XbPWkJ57;H!PNaAYIDQHcf7t8iU>Rd&vE?=PMAtFbS4hH^ z2a`KVtY7aStU|ub4!hnt>9fiqH$QaU7xcufK7PZpXj^nEfLdWN*}qJn2quQ3`harK zjeVp-Y-DINCKDYKYhtcza3!F4UEPGGRnAC%iX}q<(ukw(_`yP#{P>XxW-Nv;&R(K$ z%$2(YD2}BPZ&^Dk-$_m3^uNSn-$1==MQ2@*awaDQ-`4%v84OQ1eQ~<6dr#tj(<+4b zie~}N@yf<FXXc)%dpr34msm?+mv6$x zXs-Bbt~w}M^t->&^qT(99lHERpRsBesI}GO4nO!Upi85Fo}XH z)rYI>D72VQKLO~~Ji4@`+}NJMOBAEj`7wjWAo}6^{CZ|}wq-+@mDk}=4LuRj>uHS0 ze#9aQT&^|WX{YCHHx-X|4~lm;9Y)RqGY40gv8Lwft$v>J8_q^dJxU&PyPj}Qwr-@Q z;vUI3mPo4H0y6v{2vjrwFPN@v<7+}KeF<`Lk+HleM5{NWPs>pER&1{;iunlp(T^lE zo6$~R5fz-3Bw?NG=8Vo0ivJ<@k|(JW7?t5Fl9Xpig3@w;{fSMch?=u8 z2$teR^zvz_=-BUXaDvHV9V)s`VQ-}(6qXN$WMyAz5cVz+(+($YATH?0Qkm6FJ3PCn zG)JSzYp}drQ@tuy0$KL+_r)hAhu>ecAD@JY79KeVE1x|Y+DS1a92|xLT)XG$iC-;J z%u2G`kJ|TJzdm(1%#dSqm_uo^4H_W)!B02Q#e`Uw0P%Ij``jZ(0FNrnl1+|Lr{pLc zOcMTLu0)_57s9V&k*%Wk@oLkqta$w-Rb4Jf!y(aU=N8=M>x!K|bFf5Fjaut6TnxNT zDBXI$7n%9aEp2JoK@Ki{z5+2*3 zhjvcFluCpp%8u|2^s8ypVw1z~SG&Jc*{pU3ETs2ms?bjFkB#Waxh9Z%^H&-n#r1Dd76NWDdn2c?5?0U^NWJQcS6sx+;36} z3>cZ2ebHA_)u@J*2qr(jP6i+sSYmGP25@{n&AJ5(+dw6Ub*GYq6$T80otH9dPt1Cg zwt6HV0T4n?Akuw(wSz5Qv-AxBowxaw^?oy5A1Jr8Q8qhMfrpp_lotF14uuyn2G9^^ zbH{KAx~jmgm`kJK7-ilif#=Jzyd{3SsmG`%78bDPyOpz>9FL9=-)tb$T|dE& ztv`xSlm`$DAW7q&Z`l?2s8-X3p$RH-m!zx zV1Ohce^(8ZZ9l#DTo$?H?0(2ef1x)rd>V<*E%utx}2R>6OY%m_K(f> zq9C}u^%q)sw*g#ps?(Gd6@Lig6kgJ#f*I*pu{@s-2az;A?q`i$b{8t{X-MO&@*T$$ z%;G5*%C7NDlZ<%Lc}+H9SLlnVIGkZDg$}9Zd4e@&`i5Gc7ryy)zfF|ohOkbs5Ln-h z=&u4$(YFi$rUR3q`lw;}K!BJu6Q2YqE*6$AG#P#(Q z3YT^O^8LNB@*C0}51id{3>k7b-?EeFfRfFY_u_6J>Dcp`aFT=-IGSyw}_TrpKtZcXa* z2hftor_s?k&&x%XH{z*ZZ`gQnqAtxY&lwU{unT#Izhf~;a2HE--CgDiI(WairtWIL z2Ecd9Dk_x=tzZVY1pcIwnqE%vymqxc35^5@4wVrl@g6Ke)8)hqsES~i6WfU6%j7gY z0}!*A8RS5|<^TA+DZuP06_S%q5&hBqhd~$4F$M`tLa_N)mVPrUbCwYcX-+2!3z43S zmYktpzsMKo9QqeHOcMDZ22hCQP}dz8@~Oka|2p*gxOL;#@wZ1oQCmJxh&c<=k5%JX zu6HbWZuAfa?1dV{Y5--N5_QVE+UPX{5RkM6rlrS1>9Gg_PA8-YYlF{iK2NvXKfa^E zpK3YtK&Z$^?Z<-0(!$4um>d=hBAtox(WMsDT4U3bYRuhSAbSbm(r&{E@EMNgJGSk& z^DvJ+!HxFD!X9ddMK&u&a+b!XrhzLN95WP5qlo7s&p|4*IXPeW7ngWK1SN6h$fpiC zUy(>!#SRy`iXwgnH*!?3@vhG!BUI;dv0Kq8^Xcm{7rgM$N%zN0L(sbdpugOd0p}*2 zm|&*#Fup2P{f|~^V`Fn!x%Y^A#Z^re&nu|5D;}Uq z{Vsuuz3r5i1;|+%4XXQ{9*$q_mlfn9Ip7G$b}5|fNHM%(AJSnIpzkH+)_ z{Cq({MBEbOaY!;);Keme^yp!C1Uh6NsNZ;R%tU??WEU5z@w=}6B**HvHARjQWNx0>Mzj+>ZLhF{R(f=D#1ch;fgz;!9M>yL$Z9b8|LhPrQ z+2#w|rOV-~ibpy_(Ir4K3_}_K-Wz{RqI^^l)Uj!lu}w0!xX93^%m?dNpA3;baan4C zg7_4A&MbQJ18d4UpZ>fbe>@u*NSVT?6gJ+Jk$xS?j`h9LVdkqM1P$^yZuQC%ia)Gd zprHVjWazocEW*}kC{?BLHgDx;bgUvG;6X>yoj?4Y(0sV~$^Y!<=U3$$CcXNt>Kqk_Hb! z#*$epUsmtPc4?cV_M%1R)0PQkXl~Cf9cMMprTY1T2uC@Bm7NYH2paNIqWHi)Z#JR1 zWs9RB1=3mCs5YnEvA{MW66W>2>^+jCf`4ed>#tl=_@jf0DG!}mZZ z_#u*Nw!+Z2Nod7?q=EMAeal}^^2aQ!9!HZN+Bj6#Ukf@KNw{F{C)t6dhznkjmeODy z3fYa&MqgcVw?}TqsUp~pWsXs@j~mZ={Ix{t7Ph+B<-hUQk4~%GAa4O8X?1F>cxe;} z>AqfNbw)}~QaTtVZJ}bD)8Y3wv2|q9dW*-{NbxvTqa;mirYKEg!<6%VE(4^y(whfX z5AxqTe=fJTO&wo8HXFe_6JhN(%QySQ(m&?3E=mWLfBO{Ulpieqkk&Lf z4Q}UOpX=_1o_xdE**@X&eH^7+o;N*m2U=j_cRL#?s&pG>LTGRd>Dz#(!94GSm=~On zpH?}c9c4-zuiyR_DpG%Gou$Poc_e_(9`66Q_ujRA*Oyje{ck47$4|a}DMyyI*JZGr4=7yH6|H>Nuj8I%Z?-QLXTJAn5q|-nX^&V5lfj z>U^vF+m&G+-?)U<`tb4K&CiRDH{BmRLe{3MF#+N1VO?1cdWSTN5k1Gr;nZ%;^MwHq zo6}C$7tpR)&T(<@CxMO2qd>>|JmHxRsMph4bZAkH>cK2{qPXIl2$SSuxjU|`%&1dV z>3$eqRS_CkMyhsJCW94A%doc@zW$fKM|4WiX}0+7wwgTUN;bz zN#9jt3o&{~`Hx^^BOQpvAdW zE7q#wl2zW!Yg~Z-+ugiP746lrH^u*g(V#q(>fATZ(|@d!A5s|l(Z*t;?aie1QY0-f zplqheUF;Xi=e>_IrMRe)NS4R=e<~%B6A9FW+m#2^0NdbnO}EP3QHN`nq;m6H(cz4H z8q+5B3Pg9H<7_1v0Hcj`CrS#_7&d8qr@JEGZU=fY@8dE7eqDOKrE|Z<%tMOERS4G#(S^Q1oNxc?# zQ8}~=0VF!u5K?%k3NMR*Kv!e=e7%1lpT!VBXjd#dKS43~3_|1ixA+$e%MV=G{ z)`KIBMycso#j`ZFy!i&Y;5>V@EYqgM#OXkmhUSwx24)*{GU=zuG|pMt=4c(8Rm*{- zFnd{+v>N~I(NdG24E6^Ikm1^c1ejl#e zcmy_sX|AkUw^t-4Y9bieQ+PD#qALx!uunOC@ZcPeioAd$ds9UKY5M6$JdK%_F^~=I zaNit$6r*i&p0=5{F9YiA|32Rz_0q<{QUnqyZ)beAKi%8XudbUB?czOWjogPOK7Sxzy0Ao_3 zs^pS@|Ki}N*6he|oxp>KpTT)~wZiSPr^#Q;sZxut#qC$S_^&5-+DQM%P^s`Gr%U$1 z+{PBUj3dTzHSFZAmR`Cs|AK}N3C?E3I}NV?>?MoYD$ywRmSj5bd$lG)wc{03nx@tL znPs4(yF2PgaAJ}~1Kr-_OJ<_ol%ljU^m_s*o13lV&Vr>}n4a_X>u<6nTbZW5l0CM* zn_`TN-y`e=T_z@)4+r*sCjG6A0NY7O*((rKN5Hxrr^|Vt6Kaj@t`&K)p*yRI7x)wK z>7L>B!V(Ji%<38zwmG60qUnS-o;J3#E4LAOgV4z@W2^>8R-j|~+r z8vvr!MA!fE7UK4UQQ!C-z_x_~)G*(}0yLUGIo(ZPr$f3li)>Ua=oke6^1lon9!O># z7WW(fXA*&7o#o~AY9g8cI=U{M=QB>f$Sj&aU;;)wEo649=3E^kME_ZXVBvN&(ih3K z_sd?Z2Bk!9boVLF^mHSBjyl3C%@f7*|9w#ZihMcKb^E)YO1x5 z!>Ng%-TRUpoJBwEtM+x6 z{cHB)MPuXD;nG~C?nqKSF!gjheMIS=*v01xHn&C zmmT1~x(6dZ8%>#V#y&IYelv|s_&B1d3?SF6(?hRiNIXI0=Yxe=JRwWPZoR!E0L<9V zm|^X4G_?!-$G@zWyUZLw`Sb;HJ;h0!jbBG^ zpcHifz*u(6}*A!~~TBtvN_*plIL+8d4(B0|m z(>i2OU)b7N=#zO>-m^opD!lcNHrM_A9={8;Ut(Ss9(bNfP^y~aJ9W~rCjW%1&7Amb zKeTI0Qfc_|?QTd>O)I65dvbR8SX%}2lb)syod941!1@_N_@@3(9yN%?=)J?Aosx!? zhudkXFuHe_E4-ix`w))VVu4ptp06Xg-+z#vb_w4T*kwa7Lc1dcj`@R(Lwg~9a(r*m zq-r-JJds6_lO>Zz|Mf%*`5QXQi=IXSSkok?&@lPs#iosQ2(754fnMV0WnM=sVARH3 z@`J|h!!bbQSFq=|Gxkbsy3fIr)tKqn?fQ)5eZ29HQa-!1k`i1>R*H;78=?Frv%_sI zW-NB@YoWp`gN)5FmLlPQYu2uSS z`1Im%vytt9l5-d^kUH=yeWwqLM)d&iM-ydbvA|h5>JMnAz(%mQ#k#vKL#8 zMe0xok;z0jFE!Zw`gS-g`s?L&Wl`d5w#8&QnR{LuW_(XJskMuXf8S#>GBYy)nd^t$ zsS;pHEm&-k)i>Qx0dT#`+7)C*DIY6B!;?;iRt*7S789c9xa*ZGys^ck%h#qK6&rZS zsUqWpaP0TR9Tb2l*C8+mHWPM5&mv`)xFPAQ#P0WhY?@nYauzz-d6G#_KSf-u^n!{R z%@Avw=mdd;3{6@=^V1uSCFp%ixc3(?A2eQI$NVq1vy$A^_XjlKVE(fHkQ z7cDT zP!X7c*y)*T@XIUbJ1euGX?k;6B<^T)hZel)iBv-7Q?ant8IO{oD<%B$g_Sk>OZE(} zpXcrx-=j>ivDXRuV0I}x3!91(Kmt%y{7P2NzQHruYTduwiSzaBVL;UA=0ng|W-{v1 zH1i+{c5^-@dSh}dqO=90P(})OWubqJx(WsCp(=Ke$vj-)WTshNUQY(nuXI|P8icGh z@6x)=mY4o-4>V5+n)znYL69J7ni+=jd^?#mAj&ic8upA<04bK%y!+)~qBCiy!DV#^ zmyEC}5Rd>Mg7^DxlY(8p-J(uJL86!II6z;pLVd4aS9d}b@N6RL3Q z3e4#oD4fs1f0E4*RM9e>1yaDH3JKrd-rh9r1$p%nQ5D?J)#m#d&LCFErt3;shmlqUc{Jm6?BWLpd0M~ev!)0bdc32+?r(>!NVA;uw%6g0 zGTIjcN6JT9ELm$&K(@iHSPJdI4Tyl*=+}mQwUkeZ!bDCR^#+#u<7RKkAE7fJrF5lmYR9df}4!kFhhcRAq)K5c9)UPap4Y zDnRe?AL4;PuC>_Z=kf6qV&1YDG`qR&;>LG0T{XEvGpwXwQDHw; zEW%&D-|~?|zXBUAfYPQ`9!m%fC9~}2s;ii58_b^`6d20$u}C9r9?}&+2QylL$ zx5@8%_x)&{ypJVoVE1!k(Qaj(@1-GE)KhOH%}qdH2_Yzt=v|95yK$}eX<t1=V-X!^ia^K7>)@9ApEwPkA?J>~Omx96B)NxJpjoAa=|AL=*uegAQh z+g0~B5=EctM>uUzab~3Q?G)2(IItzn$5a@yPHRFXf}DqXmX13Q+pM zB?aAN8y`|rQItzF!N5V$Z~G+s!HB$%AC4_Urx{CAt<^DxYInEX?4Eu&zv#wTT%Pu7 zHiDJ!D>DjyJ|z?!gQTOQNALgF0Gtl=7a!LnpHM3h`O=F{G%c+Om6PvcLFr3jSKNSXK@TS1|qTJxYnb@lD%AquGq(1Gl!uUe3c$p6);Eg;d{qWZ26e{bi0h{9!&gRJ-qxmXr49+)SCloul? zP75sN1ivgV(PG1U%GrKUi3uGsiQV130FEkvv>0<{vXAhLIpS`B)ywJ4)(ui!_bHDn zhk{r#GuaJht4%~vp;93@KbZ`%d%}5vhDI+B_UeDy={99~pkWQ|2o=DHRbo>8c(GHI z`0Mk_hP`cAfT%H}U}@9)-^qif9+xwZ3Bq(RTfM5cz2LXw;Z1Ru`$DUT@e{9AgnY`H zPjqrrL(v}{%*6^LZ#R=bI5@pP~nI_reW4DR`>I=?*n(^gaRKr36n8|$;tg9dIxDlz5mV8NFQho zeRB-SylQG8^3O+sdgtj}Nh0c7lz;#);^9Ll^Px^}C1EO|l>GFVU_Zlr5Go}cIW0_6 zPcylpiN6f4qhqLP%tq)q$0&=epiqDUHV2loMf5m5qw_D=J@3f~96>5{cfYbVGNSfZ z5SDup-%-hObe%9HuvGm<0CKKm&`s_OZ4}HABL=J`6Fz52EmO&5HI9I>hC*6lXtnRh zl_u_1UpZ{A)nPV5LLPKhe0(-c6Dgu(iI3wILF1#t>}>3lN)1}VIy~VFbVlLqyZb9e zkwVtZ4xWMfxzWhL&h>I4G_lgOVHhq&7`06LMJ2@lE#iijFvfe~nOdy<_F?NP>`7eX zYC{vC9bJFBe|`72D2NM|5Z0o|ci2lh6$4Mx0`S{q_E`XNwTf^H#|$NsX=NhX@A%vG z6B;!v>DknT2}~O#;5cjgVNI6A%)L%XiK{~HGNn(eMlTGw>0LAWe#$ftV>>AO?7Nk! z{(3p>6Nko68X21Cu&IRYOKU3~B%Bgd+>l6&1#F|{Ah_MkM(qjs4K?4~n*K`~gE`9LRQ{n^@{YcW^-r+E^l)TYSJNdcSTk9nk!Z1gEu&|-n46_N!B z5nc;}9e4UBv5UzkM-sGU4P-ohePOAF-$`)Ru%r+LIEJizE)^&X*)zE5M}b`28+5=^ zkBp5Q@f}jlY1AG#`Qw0iS#-d|E}9<#&9O)Z9S5?+q1Me-FOq;YLa}JAw6yxvnwn>L zweGIO)-O~bCUNk~!f%XBf;#ZXMLC2AJG(JT|>x=x# z3hHCIJRP!{WM3I^I(Etb5h6}@v>$QA3vC7yo(5V!(jb5hRi)Sg!;R+6o#^AQ*GR!K z@7GfPa2S!#eo4=U=@VltmPi872ubt@^nd`SlPBc&9-EzSU~5T1I9t~hUPhba9TCsq zsGUcF)HPn?9>#xYc#YC1veviZ;R|inw~Rz3)^2_8hUxLGl=>@km#6;#nikO{+vEyxVBeO?Vhd-6Ap7Tk#}T-@6A@tRoZLQ z_UcA3*8NZePLEbg9&CU?n zKimx~-#n|TnDIejX<9??$Hlx3vfP>v<(6P#@1hEvJb*f7-utVUQbRkh6A>K981j9q zf#P?Cv7A60`)#LA&rfD-(cNP0anb8PMy_p}IgZuTx0m|4?*-ZB(j$TcHqOVzuLl;J zL-T=C3ygA0%3F%GRFu zVzv5spmC5wfEM_uP~kuFaBq^~HmXryWm4gHv1D=~2@z7>_e30xTmM>Ru#xBfjjvn; zR0+m6EDA_=@gw(CD}(v&1}AwHY6w?6mcOn&i($6|IEAG;(GNQl)h2!`7Ce}e41Z;L zXuSVEbsT+*4ePCZvC71N!$bHnlE#0ap~Z7bG0w%Y%Ald*0w6TB*^H5gOjLU1+)GD6 z=wvKrK~X~ueR+tZTZ?|RTTw}0)n-m375FvO7d!T)kcF2VHDXBtPD7K{A?B^$fJ)A#*v;>nk|cA z$3Ge!XWP~0ehqQ>?zfR}wr?LJT6K^;_?EKQjsmwe0s4MVfw~RZ>o$ zZQ^b4-m4@!&(w*<49nznd37WFv@_zaTmP?vChZWSv92xx%31JO-uWzd|4}xo?maH( z{5S%nk#3#}2>~JD`+6MBHc@Dg2@BPLa)*)d7_(6N!|YqrQj#p8l33FH(inC&bxRlr zVlUkF4%f}M9?!s0h;3XJ?%*t#n^O=W-<8{n_5Y>dQA17UX1;2n-ye&PV?;1PLF`y^ zIRJN}zYSBr+N`e7Ftz=1Ng&gMZ;cy*Ow$7j=BG;lW<^Uf9M=g`InDH7Il?M9Hf6xY zBv5R1a(D;`U7MGD_V;yr#_%sh#Pdc90Cy|5vO_X zK{u7@fnYgSBc<5v(B8j8$@E>O{~t?d!4_rLw(X%ChLG;=Zjc5^k?xil8WfQ3uA#e> zl5V6^x{>adlJ5Gh`+Yus0o%sRVy*K$j(ra$Gmh$J2S>ayo-=h)yu7dpY-}sQ!-m_S zKzWPYTkM)0Pgk0A1e^k)i5W(xsL$mON+YH-i~p#8y*%Ew?LXSJV^_u<>wZoY45{$P z;s%izk78`^Y;Fs3`0L__IQ-B*lJ!_e$(#Q7*U#A*pb1+mzhWF;9=ZM1R2}i8=}O4w zH){QSeUhT~Ddt^yk(`&ht*6@Xe69F5Zb|wGnyMxTDs+%C5)B+U7-k#{Pc5jmD?;=s zppO$-sEDMPV;6d95LmfnpjdyUjF88>Q-r_`4+<(K!xUEnW#aMfq` z`s~HMt^NsH4uq_W468I>py2zbYWilkM_$qJdY{2E^!NLT_fIQnCW;Z!)lFtsbwXEd z*MH~kd%w*4KkGp}_4FHUGj431mTDB3J-nX<2YS>z%{ArZ4Fh0cG(n+M@GKy`cCVZ= zWVGM1S5w^ zR|6O2B;|k4Rz&J4Dt9vN&&V>b zF0?|_D;q)(-JrzZ9x@J6D-3%nQ35*(k`X1yamFIP8v=-W$BrBAYI~2pk#~wMO?4RQ zg2{rni#x!0jTB6It9w^%xc34G-X{Tx5ZRsTpbbL8LXV!V?%zh@H!`}L=_{x}{^miR zAAWBIUgXA`SGuVzG$SfHDn*;Nzu*vroKzDFoRZ$BjRGag8Q&Yqlj%8Mb(X|%E48&d z0G-mx0Z%9;D8;oZ9PxV@c>)Zh4O1Zo=Ozw1WzZd<8Y%63;THb$_IPvmmC<$EZK2`c zG+Yno_O?i~eUe&zSb`cE8UdW`TuEM$bsa2xdK2 zvnxXJ#x5Z6B zqMR{d+r%0jL}r{uesu@^RN?~?uC@Q^VgXEDDHD#9;A;$S>opq(qe~w)lX4%YJ2(Cv z_-R7~mRL>qS+YF@MUk>%6`6ElQzCZPD%_*oQl6p5%S~t~f=s_M{PK$Ew?Mey^)HlzuB3N~O6-378P@0uGyTRwFMnnM z6lbIRsW9wI#z({%)RDRvE{lQd!`bjAY`1636DVsn{p<4;2ds>x65?67qs#Rxv12ge zjsLTYVFw~c{<{zmayaw4=KhLPm+xK~A4uLz9QzIB1wvNo^j{G|4EWOB-v>ZJnD59K zbb^r+nH~C9-AGDagdyiY`%D#o-QU*gbolKFzqIv9^*qlVX6VVsevo9WbFZ(KV+-j; zrp1qcq$<+qLq7_O%>p+iq^mc5ZBgi@kZ09xtfFKvVM+Ik&XE3H)UTG7%WIa09cL~u z{p>Qgs>9%;iTN&U#+mOn0?ln(fx-82DLjB5zNG>lE#7F*=+JlL>VC5P@o!hn;0Kc$ zcU{7ULF)xA^p_g6j#F;=nv`|@Pc?K47_y3&XeXvam?C2Q%)yW7Jt=A3O%4Gb%-hx= z*DjCOmy0_*UiWC~lb8tY8SWnGwm1^bQm<{c$@k|5x8J!JWrFTx5g7lcF+#+f);8kx z-CCvbzNVX{97NC|x5Pweb$6PeP?QzgO^l9az4r#P zTv_J}rdHMJ4^B9+I8x{Q>gqF~p64`TGZanTPrI!#{%~-JnDQz>d(;_v`U?^L8ITiQ zO}>ANEFr%_dX~`=DZvucXQ8H|3xo+A^KwJX^G0O;w)Xt{CaY1{@2@FsNw@6bW9*>= z2~`jq55YpI(Z)jIg+ToKrBcZ3eE7|cUjV?IJTesUUCP|6FPRX$dL(HtYu|TRJ^2{K`&*;bZNcx*gT%G;G!v2;$|Kw1fK;wEPVrJs154vZN= zRoH2_yP9FG)kBoLC&H3&y2+M>v~0G#C#R`ys##{$rL^MEd)vdyJG1n=^imZv+3QX%1;2t>MSpS1}T7w%eJXaS0utV(k}`ElYps&RvKc zij*N_7UP0;&>Ac;Z(bqM@ZZ8Xy)LvI7(+r#aR&2+tEXj?p5diI!#_N7Qv$+lb+KCgj>^k+Fp!3 za-`yLu~55`g{L5qhZ0L@kCP|#0(M-*)mc6$Fb;ugP(mDh4!5;;oKb=`4!*;A6Mo^29v zT{sWtMX1tQR&5j;?b0kxQNt- z-@pot0WfN^M|h@H<~vd1;uSh)!30zQBj($jAdEtHY$tjdX-_!}2{-@^zOQ^xHL=On zA>5FS3fN8Phbd&f5F;K2fQ^$74by2Iv{Nma4Bpn}A2!19LA4PRaEK6O{Z20}Nj)k_ z&bI&Aeiq}C=?sX?6)wUyQpb*0Ti@%*pZ*HM%uIN9a3`WDw1xa3M8N-gVbZ_RObHCv zZcU@>v{kiR6QqyUAS`RSloxcwonEBB6RQ%^7wvJO+_J8WEZ(%u?5Ik^G8 z1>+qEMWudiCf}<-3!oEh&&Ag(q6&||tZc#{A-b{aGh58=;%d{+(btv|9qF@tR}={$ z;v7FF_a%fKS{MHNcL%EPa7$p~PCO+ZdN^opaw7iOri^ZZU4JqELv_)QLE0kiGq>&G z(ZP6=i{<0FdRt8qSJT516NE2lbjWJ2e(xyZq+|$xv)dEMlojFyC@ThSAi=G}2J(LA zOPL;3{3b4Qjd1GOFmGIsO6%@H0}ooSwa;gbafU~~hZF9i^ti&WPXEAonSuWy+gTxl zF%lXClIM{wbjfx`S((ag{Gnh6A^h0SkI}cTy&2^%QizbRAo6L17jWOy{=tCQoQV9d zTNBg+Mm0pdIsPsL*LnxZP#cZ>rH*Q|uGI3@nKON7Q}S;MOWIuq*^DrZQzdUwClD0_ z0#&V@S;Sy(K!N)t)0+jFDpoPTfd!(|KON^NII+%VkAWq8>rt5<^9dB5$p$XOAC*dh zNk_5IJ6Ol$%S}<5Au~n|qK(syIC+q0SE*cV#p-Hy7yZ42y5eZvt5|(Z#xa0{ueY8g zF!qV$(%Fsx?i80z^_lK4Gr3+fp5Yx zQg)Vl`x$SOhpz#j*g>g5&hiBwtSI-$*p1&N+Qs;6dk~_eaOHe?u1BIvx~|lkEyBA| z{2u>Bm^H73ae5rhzf)jpH&gol4istPPq<(`1dZ)d;TU3J-1sF_pi`ZXdnokP`C^1` zbloe@-fD#ls4>&azZ@iefBJhG?O-?2&m`XdEXiCEutk7QMTOo+K@frHOF3jI0L|8I z^VX?B)JkYH3pH}dUOU^HR>CU6rX&4113TdD3_?yp{>9viQMA>%>|%BdbR$DpdYSRK zih45jm`<5(tir}&1Bzf&s2?MsT6r+&Fp}VtUdkjsbcmTMwA;~aS#{j6Hk{r*77i8y z(r=?`vXolQ3>vp6dB&5{)$+sfGU37b-(Oq?UXHMRX-U}qGQM)6eUS-Gp%NXZ|NdZW z#0#s*p+r=(;MQ-K&c3fsxjJv|?C$J0$uxQZ<;455Y~aNS{O3<-*B1F9DsIlCBC1HjVXUzYnT$kU=BcMEZdb(w?;xDWKu4H&K zbL<_MjHt&CWYZzSiewcFisakM*Ma|O5M`XMSRwYnd1-<9MEqZ4b2&?F_Q@5c}( zB&KWDy61eVkj56-O`oYT`T)gy#ZhlXIPQL&_}8D z2vJ(hP9*A=D^E+4d)aN`iZQ4Nm(^fXY{pAvp!02WwQB5V^EI~_e%gvHQ7PP^mXK7+ z?xg~1%HL-c;Tb;$_;9IEd0)RU?w^w9Kr%--Pie1S?z$LdBBg-I5E3xo$h(nd|JK{P zf#`U$!CxO}pousjj)kjRH);>ol9386vLv2CFf4t)1jY%{!yP)fx;a~jkLmCf3DUJg zMy6IRlZA+0PxZm~ibo(mu^oqzI1+Myjey|PMDM=kVuvX!D+A8H4-?{V?c-U^o0HU` z*YrOC8F_fdo7_esRPTMgUvTU3fgwibB=tV z3TOOoYEH+b9vCqe8A-yb>27vK{5a`hKyLvAeLOro$!JV~Vo!w}1zsIj^!V*yvJ)7C z85oYUrsELpUo04b5R$o+ZT)5_IS1a~j^y)9)|2YD+9=fDp2l^an6nzUxg7l!cF>2n zUHJQ{P5bj~v4Z#ucfM2Czzy-|YQ1J#3&(EMUzsG$jtS5NF6+zBoDpwF4UlDlLjy!f zWNIcV3<%^jvk)X7Mri|^Pqv53|5YZfV$^KEY_d_LF>GznBwY_zmj?u+OIr)45y4zf zRExXvrlIXAM;*vG5@^+fjvU<$f7)7QWp*#AS;RwChBcr?eWO214tF_C)QS|?m5xM7 zHts?gMudtWsZQG;o52OyPqjov?3<^Q0GMci{LsQbD-~WI?%k$G*Xwnll2Zg)y6~=I z7!t%v^8N#57|3uOiszb>80AVZWr}zhqhU9m`FrD?O0~Vst!=hr%MCsiyxj>!q3=v}qvKM$WtZOv&G~ux;D%l*scD!eGhck@#MB(!5F01|JX%*1dB-*C6F)-qk1v z8Pati2C`J>>-nC}gy@~2rmDoxoOEbJf52SX%~UEIst%h#!ix( zQ|Q%%(?T&DcYR`_`aG)%k<)|WNx+&(j+))`OtYlY*L44W_zF?FFYv0F?Net*rL@883Orq^1paEQ` znzvrvd1;$e9Qzu@7^S1-3M}N_fK1AgK@%807=zGzZegJUUcm}Kk^R@^s*Z{8f8e$} zI;B)VswkxxO%&weZ#5@n;bC(M+JzK$-fu@2GhJ{5X8e#vsi^gPX_bp5feDh3lCPZm zr+Ul!`fok#{n@1dzgMsDNXCpt-k50imiymnxrq55f9h)y|C&a;ojWQb+?=s2(!mke zykkY7{8e-wKG`L9(T=v+dQMWm@z|~rzYd(s2RC;H{7$uWBHlNTjd1PTdH02t8pWTl z59@8O|5jYbgzWyAtKa9n^tqhKQDpdNEaD&e^_?AT2)#u8)c2#fTJb^Y0^SpNi|3=3 z>e#m$bYa+{P21&{J30``G;imC-NL$o$`cA09m+ze9xd+7rhCP{(Y9FqQdm}NaFY=E z7V`}RqOu_vr>s3jk>xrYJg<1Emiz*qo2xPS_`d>ZMnrz)GY&=%n@t75k(ajz6neT1V#qpuTAwlti`SwY(Vou$pWS*z z5+N77#7b&-kT%rvQ(nwV>lj84Zz0}^?MYCvZ}gs%fj=`%LF1I7sgfT#9C~l=bVYAM z4b0C*ZgiA@!o|Tl{J~g|KC;W>Vc&niQHDGpAQUXw!-vjPB$?l3e9U2%UTmgEFO5O& z^DK>p;eR#0(2EtJA3Z7J+$2yUYHiug*6(`y zDHq7Mbc1muif-bi-^rdcjx0r?V#MCx0~BjQbgO=n2ue&j=Lcr^wUAv}e?`UM+zv!^ zQSJu!(`Pjk>jqnF<6rq+US3jGRu}u@vZ0}~B_GEh_N+~8)lczb;UJJqc&@@KQN&Bk znmik;;+Ik~s(^1R%Py?%(6{|7N*=3emd!w;rD%MmGJfjG1r9z;@0 zj2LVhVXhpIG(7fjY;xSQrJ=sI8$K<(!gm|kG!(@E6t54zdJXz1tJ;gArr#LiyJ!hP zXWyI=Poo2mQFa>amu9RVx>G_gpIku#7)cp0Dl#9)=gwwEC*SUOu;N4>1;g{8oo{ZEm+N93^+HqnFoA6Op320d(sS0kvgTCATK;pv0hyRpvx)E)BBp=d;8U9X6 z^zns5s$8HK0ha!#uus)aJS(8rIoeEmN2KZH@Dy7`m8jii8fW+5hr*9u=c(%_aK%03 zIi#?7yiTV|8M09YdE#GtB+bpv^M+YDlz9YC29Ty?ra#YRBMNdzk>pH>&l<8=(VNjm{S68`7*Sai$(Vr?K zyUOm&|6tD+nKN}2C_4>&m;7U#@W<~{O-OwND7{cWr>9QVDEB3wPwVNJV3(R;QN7>V z;pl4#HD%SSgC`LHWJ{r@kghMG5S`L6Kuf9+Csx#4$tx53B4MOxkBzrH`m#59czEe0;(22uI&|i}OGgHggR8_zGram`^#z|*BG%A}9U6sJW(&R7* zO+?_TH~m#nc^^>T=t`(dm}v*I#Z`iy%j>* z2L)pHC{zGfFeJGOHxh^%#%5igES2Zy=YJ{r2oM2$j``UzHr$1jc$gEI3hy=EgWx$O zR@PlJ3G|RS{hs56vYJoQ4~m+_PuykQnOJpN&{KQ=F4pewEf@mUYH%eLyaf3*7P_@1 z@J3E65&{aWsLlBQ?eE6)dEf)(Iqj$l%ki*>fd<1rVW!QVv}|AX4}% z-yCH~G9;&Jo!;^|DtwgJ-28Jyw1Cw%uR2gfV1iRt>I^@kq?A<1VXFU;(f-lbtjI?! zuP<(M#KQs1yb9%@^ZPxZRUPx-gBHXNhKhZVC8R!b3jlf|fO39klJ+u^N`f4fQBL+_ z>GhuKx>BVKrPDxB14m-y|*lnJ-Ul;TQB(inXapS3Gbib3R9_~&W zGn6@LBjn|04I3Tj9)mE1GsoAO0}=hntvEkmoDes1pS54bMdJE*JlBo3dK?>9J9cQZ z8F>EYLAkgn_%!qcCXEA57;WKQ%@CR`RFPdHujYk|QX}j0%^r^S^Jly24lh8y)@-*i ze@=Aoeg06fVC;Je2*HE^SzD|7p_GhQ)_b<1f`(%Un|%OB?u9uF%s*}cpB->2fkEo8 zKP>bVx?S)BFv*rZdEzO&{`O25wf-3zab2Rv!Wm;N`aG}C;^aBI3Ir889?!Pqat@PKi0p_Px(uyE`mb4UU``<_r@P)kF&)n{8HAV)=NTOJ8r>_oEvFir-4GbL zLn^b&Hr!-dwAFjV>VmL0{8}5Fjf4ydmerytF}o+(|7U;VjW3mCuW7K)F}AH{8&cs_ zu>EEyGQ$hsr>3YYmqBuoEKKms4_PECdEvCU96RKBBwHVhl(`HpiYDOrw@`${mHzsd z$b)RDckEqHJ~!+57)Tmdf^xn27dG-!@H(CSfZ7~_fA-|T=sC_W3ZHx9k00Y8=@7_V z=lvyGbut|(Cd0sBv+bNBGE*oe^%?%zI^^)b+hAhz*4zQ17Gsp8%{}1;TtLGQ3!_)~{@2P0t)bsY!i4I9}M zSz+oNF~rkkz74RsE*<0n8GQs4(o3*B>LDYBEB?ZJt4qkSJ>n+xdPRy{Iaa!ib@8eoH*L%lqfuR%0?cee zd*{aCMN~z`eUCfV%DG~`M>NfAN>!M$)XQ!v5E&AGCQ}EOk*;hJPq{-|5?cAi!h#e8 zy+gRER}t9%P35NP3(GheVcD^dA_Yu*HkVhZe@kv)7N`!oqvsJ>Rq1q1{7@Bd@#!O0 z(X4i>&-KN;(+5c-{Ep0+qkrY~n@LvaX!}Z#n2rpB3G(-Vj~Dwvj~wUzE;vHDi{tx} zC4$7+EGPwzY_f_~6-FM7sa;K*&&~?q?R@iXKFIx2<*_bwyL$1R z$g~ELM_Mxd+9M=&oD9)bUv4C}+3+Cb&zN4lD9`ehgz~Izy;Y?+J2W$OBolZOeSN0{ zCLDV^fdDx|(Lzbn0I>Y|1`r*#)2Sp-&-UCW!XV-2^;s`BN`gYCYAw7gHw8zai9~N? zgyhp?J3wI6{!eLwPVye4<@J6v9u_uSG>n&*CvaxA-1p%(!J3zw3k`d{6`g{V2rGIo zRg8NI6sH>~XdGM4DO76V^2dq!5=Lx+`1_^<0)iiXpBAc@D3kL{I_;PGmuBN3x*K-q zv3C#Fv1Do5EG&(3+I{$J_$s-mqu?+zBo;;ON_J-Ud3E~?{zkd_;?%?Na?Bxi4Q!MQ zCO-_paXDnr2IAHsn#r}wMqJD25VhjLPUnam87I0^8? zlSS8q>)~XPsZwSpUp=gDARRIi^nVLz%zyyXZg_^i)^Ae~Yb;X3T2DJsq7^o`54~Jm z56w()Pw2Xx?rtwlC&qFww-Eaz z6m@&?DwHtL_XilGy%hQHoaLw#;9H|5rw-U2+{afa}|hz(s_hj zx`6vmUnQ6KVOJw8B`w9_jKRfF2Q^9WP4@knZmnu%PJq=sG4YDLg5u$73lJI0 z<2i{*l59s3H?$Cl2o`(MSyEI)fsrAMD)xD_tj_gKNWrA&?vBdlSohl)-F&yVj~2Sy z*ZRE_ohY&hEe$bR$*$ozWG3o-+tRO#B4f_lPrP`Psl@UAVWD-+D};c3Vx_L}U+o~f z&y$h%GNvqgUoy0fO)0FqTsO#$k z+06^2S3AHx6!ALumcpv{A0^wZojg%ecc6rXIe{(@S}bGR-+wwC2`Fm31xKopgk4Y9 zA)hKxOvk|?%uSug%%hw|c*LJfn+kV+K_V3vD)SIw@&Iie7)4)WBi6DR+m>M zU2mt^3N58IMPc&+>k!Q^%4gyOFjprQ{Zmtkp9u|7585E&@y`%t+wn2f!FhOUDk}RG z@`qyqe$6(_({}SP86KC5L^^htlKnNm9PY!+tp$g%C7iU}%hls*(T9nvIFP{%{k$o@ zRPB7o`<~1|ah`d##O^Kt0v0h;7gM#?>NO)_e5=f~;NlQ~E=?R(pvQG3S)Dzf5-BK~ zPxDAx`+0Z*v&;V-~YI%}mIZ*#Xu#gOg0^RSAbDhE1b8BfZmkw^PmkpQSr#OiJMJ!Pr?um0#F8JEd<^%FGO9XZ>e8eX9 zITB)?^Xe!^*ZsHvO?AhOW8u#a)Spv(U%R0&7~V@|Q`l@1ka>t4W`H3yY&-vLy8<$p z?S&(iFlXW$o^NYhjm>o=0D%`7qmzx*2j+0CZa06f^SYiozufg5SfvL8!(MZkpD8@FuuUcYROyYz3XT!KvGE?C+L_1&NZL7}6cx#RrOMQys-dLktanYR zBb!$aFa=ACjG*3+7lsIk`JAOOCAGVbz=Yq~EfmMbD~c(-Gx5E8@XqDii2)4o4Uil!IIJ`OB@Kc7uqcz(+n+LN z%6+0%LnsSIzBfh!8;0ZOCNAp_??ze0b-w}i->J;QO_s~H{9LT{w3{Q)nJaeL zn3X%ZR`b)bLwNXbE)^gtZHb&2`HgEG3pkCia6CU}aSX(fyRFIU*ePIj-FD-qi6!SI zZ~s=BQCC;S*@R*9IEY1Z%I#(F$c0`y(D@=YdV zY{EvhZzQR%V{hJ|gB-_<635k73{K{mBjy`Z8XYzs{>MyB?4~x1fAPIF$yNdAjFdvy zbUgwT-X}mNS+Zj}n4^Ob-un<(8|o}5jMpqT)-F^yxIUv~$@!ZQvf6oHPq&*J$(kty zO=9J3Bvq_sW-YicLUgtz-gZ0jgBNNY?x5Jmdn^~ONE@&Z&%3No?zg|*^N`^n} zdr?DBp>@~FSZQ2RRcyaSv5rBri}YRY zdwz#uzoWyF>dx(v8eC7Cd$PHEihqa{sN7*VTbw$-ccvXxswnaB+ZM0bnBb_Vb%11>87y&UK%fKo zRPX37H0TEjG-#9R-2Ws8AsO$Wsp;-cOY7-Z8`?kN(l#aF7nG`jmk$8JNI9cGHzn7A zAIkuu0V0dl){>60&a>*@yUIhd)c&q#LP9SU`m$~sMr;~ujW=srnf%50EV`So59Q2g zKXu!hm^|+wYug)swvsI_J^=_GM2qg?Dy;MA9ut! zV*M`f$5JBlp~88X9nJ1JQXrvGx-;=!7gDDt%G8%~bd#tAtxW`U_^Goi_itDX9_CHv zQL>7hU`6cjA??y1(K?I=RgsgWr_X+Zz1wn$dp}|RetVk$6@@@emfs;$clnL}TAgOj zs%3OI$FG;?)e}^Xe4o$I9OiV0Q9e1==8^9?J3dy zAw$ODp*Xo=ljzr$__S)pO2Ycxa^`|LagyhsdL}r=a-(FN!cgK#TE-GUpu>u z4h`jUny(H59=@C3hGcd|UpzKn8{17K@YBDG3_p4pY=^rDk4K8yyt5E!U}aMM&*~4a4)Dc*Je1SBOOThN#QpBo!cg zZ&G&MZ1x3Yzest1kdIQ1wB@yGu~0wgCEzehnVAhXDNsj<{%BJDaqXmib$~6iG0NB8 zx&$A7w3(;mcOk8QV6|?gx|I3q{l3XdgT3sx&ml=!<;vYClRI|vKVy+o6v+tFB;l1>%tE%Tm&P~F;J|7=mU~y}IM~Q&=#y7?rIh-dy?nOXDZAz@ zGTJtOez}-RNa%iBoSa=szdZc%*0U=5rNyk{akCJ_B1uf7o9}*=8jv1Tzc6Viy5Te+ z5YXKrg}^yGbbg+8n332m&S^h$$Wx$_sm677uG{nfoI5JHJ3H-i z|NMBb=PHuI7=bEsvs#Om9Fg)hTm4eQ4bfNr+hVe$22q?*=U2ThKMFrSn}(643t&EK zzf$OFY@DmWIhoDlJ=ya@;$cs+?4O@Zsi>gxlVr|Rw>bgxUlrync~ZDaWaks(q3rhe zGrz@FOAA|9#ZmrBYqqy7J%y9~VR4mgQLgc1B7MF(wW|TE^26S}tAGEh9=sevG``ha zsn9mh+HTE8*ZHs33w_Kpg9(E{40wJnc{3ohCut%Fk?{BP%eK;VC}4Yj$U@+> z-mX_-ytZ{Uh>wygGJR*qSDKWZ=(E3#;7xEqk;>6{uAO0s`9{&|rai6i$91nd7&ea7 zJvM29tKVRMYqlXqB2bwp>N!)jCPJ^0xqEtAq+UkGS4Esch8P2?O;4J~`+KIdQ6eGB zJVdCsBn=PF$tDx|YGYFGHO}v#?{>2I8qFb=CuAqL(o{)9-G|M547a}}e)kmWXH0v< zTZ-k1K)BhTYNT_Q140LB4vo4ytmSV6opqbTph+kWJiKGsYb-~+KHXpHSZmZ^K&T|2 zsjt*0TP)YABq^r}I?EP6Ti_hwv9)tYnhi_P0`%RfyJi>nORHJ`0sU*07CbDD1o z4OZ0PqBW$KP=3W#c=-Mj!vQ$h-e44t2|J_obk5NI`4)r@pSfyj$cIblhnw-x5kxbv^?w7E=|HaoT*n;;BqIzL6FJ7nT#BWaoB6&OR`-&`8q1S*D6WIN% z!ZPT``)xbMX?9!V`30_#P8h>RbWQZjOnr=L`tw=?OfaI?XnU*Mq5co|%Aa345C0Sa zDhWn0W4zzikuH3;njY8g96rQ3nHd+;-GMQNa)M#$vO&gKGGPJjOg8cxLfOc0M;l#T zCGji%UqfcXVlSio)xbh=a3F8*r_xeJ0s;ZZRF>v}6>kQuf@GXUwNY1YZmN}EXiBp> zB?uYD>#fiMMMr=@jW&4(DDWvErLiuMzxPH&d{o$)6mGOrvT!W2E=Z>Ae`baDnoEp{ z(-ij%<8_11r)sU8II|?I5vT`B=yhCOK?%9S-%5QC{%vlrmsv1$JDc@A67$*ek#lx` zeS*QWQ_T@xD$rm^49*i*P4({6ArzrSsO4&KGEGG2mO^lB`nTuhHAFpjDwEF+ik=03T=y`V+b^m#JQ1bf zEeSuGF-q`seVmn%!(}l)N zz!|PveRx&a!Xe>je9(F~oH1*SkgF;WguUdiFD{HbtYTx5sH6x}Q-(hSbC9-KoRCjH zrjbFeMccDm(%5}?`0tt==Q7QRAXeGU&CP}rJ>n>j#}4TE1lvvaU#C#bt(pwygL(Q9t_PN z;>qXizyO1TjgF<9TrGiPs^eQi*;lt~im9ii%h>iMDcN2Z)~!PkQS4{(wk{t+^QHHq zh$n?nY!lfv5H%EC7-Y#pL)f=*RBr@Rlc{X|=JHL2I+7p#$ZzfnT@j34Mjv0<^rd%W zm#`aHE2S7g(1o)PmIi-s{6I1rmmf#};azma=qrLfZ?IHT6hm*cIP(%*sIjpN(EDJE zwbw^#Nx|k@o}f)aIj~HNTrgA9-xzAxa?-;XVTP2xCEOC>CZ~9!^cEj&&6Kc?WshRQ zoHp}tzYN;+S! zZYyuu4A^yRC3?levdVByj&ysx{VHeaPa~L$0}pe z)p$z${*>9-q(s%mn=@;d3R@2-BN03iW7v{)d}~lo8TojNDSmR_8K!gtXn=0WAI-uM zPjxtqisx*rO^2LT3&S()CS%Iyy3#M6Jjx#DkO4$9Lfs2rUJ+SueoA^{@sRoBUKM zSL8bil6&{%!bjgqE?kVlih(Zt<_Hzif`v(zBG$b4#hO{WJV8enq^;lOf?KMq;Pz&A zQ!1wypSw$2tl%F+?GU*=#H0ouWxoC?51Ps&AJuhCP(tgK&+kWVN5*@Jdo;#3OK3J% zWE{9|j%MC4|3~xaU5)T}0R<*%Wc#A475C+6f8_(|Icz-G9K=OP6Q+nS)K}ZhZ5bDC zSVT*WElX};&r8&(UzuE(cuSQmDAMw8xw(t7+WCI7fI2u)V>^w^xat1t%=`KuiUhjV zY(JMq3aee6{%6#O;ph_mCL5i?g;U{1>)ZQ`<68$^ycXxZ}Pgg|0JQ{wSXgHebalQsijJXQ~$2g5IoiWIbrGP**|7t!hozN@~{=%PFvlVzJAVH(`e%mBe{Jkd$2ys zJzm-hE=hVY5x6jY>RtT1o9zn1D2uL0Z#L>d2uf>0Ce!u^+9q^laQbSQMHD&(o z^MxV2r70#13|Iy%d+v;YDTCfU98i)Ey%>h1aTwdaTu|myf7Q~#i~=NWaU5o-5dz!)wSXyN;}o3nERzxG^LnC;I~j=r4iQi1>idIWo^&o zPCi&QnKpi;5Qt4z&bvM-IWX-HNeK4FOc#cjt0`P8e3%zx#^bY{OSUrUFYeFz^|jM7 z+G;~Nvi+Xp;nHV&-X8*c#*3COTT$$L0485>T-&E+-W&_eY8aWfhY0Y>C`<&C*=P5M(D^>Zzzp)$ zx)#}bE6CkHLGND=bEWG(e>q=LR)V@k8g2Bx9+?McHWG=HbnjPE(Uen)b?TPzUCkFJ zl|Vtr1!ax4k~ZgSzTQkd=%ePSA)or6`Bb`Lzl&FzFa$)vyyfb0h7VF6;?)YX#qHhEgiU9wR2s~!H-G7jJT!^jTp$Z(z02`$}WqUGf7OXp!O zVcOTtfdb>xr(fS%@TD%|&{nMDGKJjl7X%#FCpege-5+Jd=5P=Q@Uh9?b|cHv4v;#; zmwk`^PHQlDHy(v~y564*!v>4RA0hPPgdhVKfr2`xi!Ka)`~R+UzbYUx1-f`L@F$@J zShSLRrHD7J0KxWXKV>wH<0G-qGE2AhW=KNLvIoy$4E^R0VmVK5Pp8GMm#yd!pXk?~ zq(QeIM@$}Di9z3;N>rjp06k2|%pX&#ig9tCnw>3%l z?UOMwEp-t=oC5&`hA4P}@K?p`_-qE#%^}Zkp2gyH0iRm-u}!s$Z|`C+ZxQJbVYaK* zV(h0r=~4a4vGC}I`rqUltO-|&e1$6Dy?fFshtWeoP~9<Ar!myl z*J$c7MWpKgM46n5ViN^o9mVOj2pQ$QDe^6R;u@FcvpEC{(voTNGR^rbaBfuW2 zSJ!vwYuIwf7(VVT`tA5Vn-}a7$~!>x=sZ=*7bZuBNN-L{xlDW-D|&E1o0^K&6D(R; z{&okckBzo$dCgF@p<=+&+3_*wMx@y?nVyZj*?GN!=(p_edhuLFH%!CUFZGtew^pfR zCe6z5ui-+`#7_&a--b$jsB_mEgSo$rzP(han#iDoK7(}7sPWO$_dXtfj*pJ6zk&2B<&Zk_@7lTqrq zXk2QhX7=S_%N6<{4$GwTs!90=Wo&Q^uPu@-rwb{|x%M}P`#D(c;9*;C7Q(Pko4ee` zc)DZeF`mYsIjLcLPRh?Qq_VSdE5A#?hnX=VNXeM$#t@}i%BZp0QR2%;ZX$$P3Z~*l zGP0#_A#+elaRtl5SKt0W06Ia%zC;KZ%>G>DgIESL3?U-Nu!2Cag5^aaCGL4(P>@`$ zG8X1*wOW-*MOmCvTzLA@nQGOlGD;c97T|`KT6yV>meajc)zciZ7 zXU?CVF!Y=3!raBnmJArsuV%F>lcrCL)+HR+bR;3+c@|_lfz!>0ccp3fYu2qHKiyizCFj!pArydfz|4DFEZ0PR%H~-yePUlyIp^H z_t^D|1^^xd{+IR<0t|$ftKt2-V8b>Cz=jP>f7a9{s7zghP+$@vKTe-ht8UHt-_Fqy ztDxYTRBaUE>(6nF3B<9B=Jxwy_($FQp4orwaop4I*3DeAam}F5K26BUI1+s>CMKq4 zm8w$Swt9Gtpy1#Qn>I>OoQ@qkY}>ZY$;DYLdm;qy0bzsyN$dZTQ`?3O^4jt`v~2lB zljV)&p9^{YBOt#-#dBX|U=buTGt%Q8JOzxZv}*_ofUKh3GZuo?QoT>-TQ_cUCLRj( z*Osi#xpcK<-w#@SJOI*CA8*_B?ACp#RynMG&5-sjGm7(pV^~1x4`c2uUQsEsf`F*m zYT;NG5h`Oj41W!XLOD{rTyHd3fVWsouWMz#6$B6*7{r?`27^&5u#A5Cgncy+OQe(n z2t>5Avr{US49jpF#|yk#tyZX1q9_s~DwQgXkhJmh2}Y#_7|f_OYK*bfYNZ5W#3~dD zN~uT)$8yp(%P(l(Mm*}#VB%&nGh2Zvb4pZRzFWlrhsD-le_`~nm2E0 zG4uDIK6UZ%oH%kkh-kv%Sx2v4R47z-cJ{dj?Uzfxc^)4>V%YGCkrj6D+xzXdxlfYg z?HwJ1`~!GFFj~w94;^~>@@0NWaduvon&Ujd#og7T)MUJ$^lapk(N!ZV)T>#0{K9Fc z?_c?1*oZ+L`hk+$dS-X$&Rsn{eJNsE*J*n`_QLbn*u1Rl+TJyt++5_28UqzBT)WC3 zG;PXc{{Vlj+3)dyXFosT}DU=%103 z#j?!I@21aRxS(#02L2(znZ{iI@Q}pRl(4W6cezJI^{NYhT1FIXpEe%`hK7vz;`22d z)_eN<-nf0sMe3{b_jB|146hg&5FA8+Py{w?*!UweuinLfkq_`9IyOBz76kD24+If_ zf^R^Gq&x-^1_D+9mFB}fAA(x(O59egPlmZsPK7Pp43u8<%Vq~17qvKy5YLO5zQ95bN)QW*B zWO4xVo0s+%Ny#dOS|*c8*15l*|J}Ry7A{{%DP>p|AuRpIN`>`unOrjHIhKWKh~^Za=vH-Ktp$DKG3C9WP$K{LO}`S-IKWdiKaF$XmU8 ztu$zYh$JpPZtdYUR*OY?kraUl0xyb4#;G0bDT{s7ekmy_7@7T_P5gASy`zJVuW#%2?Y><#XZ`N&W2a0y zarWeiV@L1Zzu$MtAdR=%Ps=w*5v%(bK6!kShI_>PaZpNqNWFr zqo*#M#WP66KoI~UV8e#31jxp}`FM5_fC2ovIbjt9f&fwWg^>Hv=R;)#`T6-aZrXJI zj|=jkw0h0jA9e5b+2=zi{y5iU(s!!eWc~70ACDiEq08!Cv%`$Z-<`W~HWjiB5AQ5p zxR@53RxVz0`OZxbU!Q8VYF|%`#sCNiPz>*ezCJh%!Oz&RVTfu*#Ld-#=^bUXD|| zu8jGIKoplhMC3(Q##sr^aEcVXMDiRBAR%o;_7> zG!FUn)1>5t&`J?eHJcqie^Q}Td_H32gs&%f`}wwN-`2&$zyHXgg~f#ks9vk9+Ociv z8S*qYRi;+-?%qx65}r79@=I;j*4103%KkBP#-Xd62xQFgk%ZEblfUEv{1kpAP?~Qs z0un_Y{*|vPTOnG0 z69{0S@Mp1M1OLSt02?;S)v3!6ftGDkX+iSm1t~E_Nwpf(adCG+00?MMP*7fezEZ9( zHR;V@lF2v*60_N_kavHXwwdOxPR=dwC>)2Fc;znBAh&Y;Q6x{jbuJ5 zmCw$d1tKKGKD&48R+T<|TK4=P;oP}r2lt&lbu2wLw)?mVyxs^FYi3qf>aP9I6XNc# zSn25GSfhChnM@Y*^y!nMM_8HcuL}HALPQBl4ZP*(sS^g#Bmw~qp;xlJ2v}oII$^4bB?w&&?4BxPEbB)?{zWH{l ze_){0o-3LAJ$m%;_4Pe^{Mg|W$C|WkAq{y`l9`_I z{nABJ20(3Zcksxen7HS1x!ljsPoUzGH7ib>IqmA~66E7|`^L?+C)XAh7AjRrMj@A} zmCuS|CEb0kPIvL@l^XTyE}pQgdd(UKk00e(bmjKV@e?P^9zM&%+hgqHaocuoYuGgE z`>_jhbF+qhF}zKuwo8^Rk?Ko3cJ3@G2dSikvZ!nny}F;<3zRcPC_0}EV6R;oeJ5)hUyUz%T%&jLPr{9s1b3pb6+ zS7W}i)7XhXW0MndjD-S#_!|X&|2f#Ol>phWc?DK#LBJu9FWI!}CVcbnFNOK&wrA@dD)X@XA%fgF>LxM02tY7PEVhU#+P63{Svl#YXGP zC(qpdy^)==LhoLF-abemK!J$FY&P*cj~Pr6lrMjj@`qpwGTE!RN0z%-OTp zXfi2O%AwOo9XWH-%gd`xtJYx^BDd_?G4}h3I+N*(A){*6ta<(JwS7nS3l<^Ly+WHd zZSso?E?mAed)XYl(I}00zj68Mg)?WpU0k9XG<+2IWW|Mb8EF~r_O3tv^kYzX__l+4 z7p?fw%v(YuLUN(t*r^j%L~kYE1B_U?Z06zx77ly+_?49E43wWbb9(PiT_+7e$kH@(sD0#ffk zdsv_?l+5nKhY$LAc}fS_A9L*5quZYLPQEVg51!svxu{2dHvEer!+m|dr~EMG#L*)o zKN~WzZ(k+HR&)y)@IhZ6C%>DIAN;s!?bS!OJG5yVVCU1OV~1S6q?9+B1Z!$hZgh5% zj+!sty#6pI1^@>;`S)$vqmUG)y`051~N3J-ugtJ9cEChcsFSNN zoVRqbi5Cbk_n$^fYf>ht4IuslY<(NH5+EBkM4%!jB0&g9;>+}5Ukr^;O4d>CukhP`z?>xxI!391|PgsC$!+HQGdYN5*Bu*@ODbxw9s{QHn5~ zeR>7`A~=l+*sx*4#{0_w0YsoBI;mQ{N{|OoM&a%NI?ItQySfhixZ{w4AOZjtMM34{ z3?c~W8Rt%(ZvXMe9X}hMb?us$lPklME31C05D`(Mc{A{E23P0jYqu5d-0A1;6`z`1 zzjs&g3%I%DCpD4-z~l4h%PMeKmO%);-eXa&#QNu^ttnJ0D+^LTRjJ8XEb0JIxf}v{ zP3#EMvQ3HrWPlZj*}Y?DhX&21;uA@x#>L5L^X5%)Ph%#Hoj7pdK($)EW7kerA+J=c z`iCw0+BrK)vHx*H$2+;Yq^GCH$0taW5Gq!yB2a=5Qrp@22L)W)ap%;XQ>_{`A3x!1 zJ3IBxJ-ceV$dbjr-> z0$`1UeQ{P6Pl-#Yf4zu?g2j3>;YO3J`jx9z{d)AIg1mfAAz!y~L+4)Ie)xKs^uJ!d zUN3Sp8#Zln@7~>G$Bu^Bh198EM`fql_|vM6Jv%;1dvN&3A&V%s?bH3*om-N!=dokQ z<6gvt1cjN+=E%q}PdE2{r}j`F-+VPC=6PIkY2nU2yA)X2t5>gWyLXlnD&Yf0EiAmaOv3{C#qTU+1!pT+NeN%^U<9)&01}mx~jH+#d$k^C=^WHdi4%<55+&v z@B{(DdmvOm7!eWvJF#KI2H`0H6vU=HIeQredsJ!P90&l5fCrPw)b;a0z=6cE6>Gp$ zTJPh&05aXBN36SN(^jp(LLsO4`K?FDD?n4Bs*9tGd$VT6S=k8(PsjhX9WiXBdUc$- zb&;kP-Z*~x;pVMUDZN6W3J3`$n4uUk7*j$;LHr-H)n30@&hOFxA%vK$7Nf;1S;in| zF~|_$vROYxI)6UcCr}#De*bawvV%X}d-S+QwVHi{5zS7 zefaF*nTr>jG;Z3WadW*vf8)`;$?K*w9LvgO6XwqN?)wD}D*FlnAp#XP?b)*R)=p{v zU@u>97ms7-&wR01nu>MP$;ly4SGZvL4<<%De&yV$_;XSNnAKp=6&0O4bK-&a$|Gr6rczeU z-c7lCC+R`IK?A=WH(sSwTfm}Fs3c`*Ly=YlGXsp?XfTwPCZ{L6G47f9Ialx8GKkjl z`l9!ow}esvL1r`Bj^G3` zL=dsJ*W?*Q9Ie z{VX~=JI52dYYPe!;^Oo2^4hj5g6H({kx(XH)XxNRY8IGw%zorSsP?9WbO>eEHoBt`-~n1Dm9eqO034J z&6^t2GBOSwD@cDSYu~|*QV@mo`wvP=N@~@qle+JKS#NT1a;jXb&b>p2h?VC!8T<_( z21}mD%iNq&K`#J75)4MdFWmx&{;JkUAXW>A5y7FEnVI$K)CuzP)$4RTCDODhJGI8& z%cn?NXs{ZopmK6|DM`y}+@x_{K|y4N3Y+)uQL9y*nztT5a+LJG)~sKvGnhsW9oC~m zS1jYMJih%jHfF>}pY)FEqciBWX8r!NN2R{rssW)FkDYuaZhIL zTF}6)=IAfJ2=Mlosg+xI?YR8>@|2G!ty;Wm%KX!6YQv!_&^z!P!Sw)qv@D~^r`ZqcOi z^-EX0{C#*1lG2ld0|S~zG&y|w*vT6gjlAW<-o3Jm^Y7+A4pjt5sel22AL-4eYnLv^ z8J=Icapm(d()FdumOYyZW1p6u`sncke}Df7jZfeH{T8oYT6)!D1>|MtHDluBDO09? zzu`v=&@{4MWW@?U9$Y6m3^Kq6P8?XcVTBAhPqowMUyhl)aE_k0tl6^>00y&E=w903pDVAp5ZFqN|t7=-<6*J__WZ8~TG8|44+)JbD< zQqv&=GOpb)q@;sdMonfrl?LoIm$q(6dvLF6=T6|_ED7QJHEPtZUAveI7oJ=^|M=Yb zg8Y0>PfxBw1r_Ej29wD1;N+-~$tC{M9%fu--!mL10!_=yrYhE2HpKvdQW1Vlx%{y} z1O^y;kl(vcN%^kX|`-t4JUC)+vNHErH> z=!nmZCgX`qCr_O{OXc`2tG0G_ zbdW|Sj~V-Ac%_Q{`}dcG@L7!GpFiKYabsRy&e!9uLCP{&M=*y$ANU>e8-9>pq!TFF*hM^Zkbo z4C*&{;g3IDczQYd6ppXjsQ(tCc zfJ;($+S#)Nhzb-U@WqDWb?eqqDu~p|v2u;l4uB$p3_3jkm4Qs4A`c>9Oc6qW-yFj6 zzprj*!v*5J1?uZF}FoACx^G zAo36Mb#2(NLEaI~q682I5CBBTi%oL(39zPR%e*|mSq?0K5g@lnuh(ikJ&RM*oWdf> zv*ZK2_sSH?u46_End!$4AL;(p7>G^bE!NnlPs98Jin4QkBP(Xb#N-wh1%-w<1_b~> z+@T;%Q*$6APV$d!f0=Y;$R z1%d%H3?fwGf2c%aOfdlV*SKw+w%Fdv%=mW3(9b^eardAC(U$76bFzc|{44^$`{<#M z2M)Y^@sg=TH+;yal1`|PpZ|kL(Km12?DavP=PwfW@7g_m++;a$GPyD_HTB1}OGl0Q z$~(Zfq@>tcV*cKy-I@&yd*cYZpWs>_m2(hJiKH3_9ngg z!IQ_|Y@Zpb3SYDM$Bd+uyqs*p&>347bZFT5!=Am7RV>NRoBhL_d?@MGq}#_I^;a-# zTzvfKMWZ4dD^C4x8qf1HXU~4Diy7T>RO4FpVq%}p-M*+poepkJ&b4dQltesd&z+lE zn%=r*%Z4@USOos|vqvZHAD{Zs_+~AdkDoW=Zt_DHuxnJS{-C~nbF#CiES~P;6~w8P zEt)jFa`d#5ljEb*L{~45s#U8VKE7u_lO7QjD@ILw{p(}3Hf#_?{!h*-8#W<6OzAJ090)KEWdHe-tF~|OP`WjbY9fMg z^WhDn*({}NRdR)i7-UF(;nvk%hxW>lGRQ4t*320P4jk}x@t1Kd%dnW0F_?XiL&721 zty{O?pNKX_QShdwP2ELN?dv_xA9c1kA?FJpWn zRw{Z={iZ|PcAz)1zW&|68B=uq{=J*GxyZ;WArUGk7vIPVH;y0o4haqM^>?gU`_}Q} z$#?G>jE0ocrvW2E2%?}-sVSi>!~PwT%D`YO<8D5_msylUfndOZ$j{dQ+aMqo>GLv^ zU*x2vO`I?;EF%2+y*uY`UL_2=c=6)xdv|m5^X{kL(;E%F`}F+uv%x9J$#dq+o;`1t zcYse+^F}HMmAj`$x;{I%s6e4nSp{Aa{HYLGnN`TYnlL`EAYZE8A2s9ieK+g1^j3&tnJ#TiY;c3nCR==e$fx^*|N+i>~fMM)mCU-JR2+qSxK_jXQUuF+z~ zEHm@V*)?j^IDX>zXWtCXkRtIzM-A!msdUfD;nc2 zXkD?DqqFmpRZCqw+(-BSl9kK$pFJ3#k`xmgTfcEs<&YX~zTU1L?oXdSxf>8hgr~ybW7_*_Y zdF$pySy>ziPMkVNo4aT4-ugZjYdM5ksQJRX$>6>Vyo zTB!tv&3TcYdhkeCV331@L#-Y?3W^HNq97I0RQRxuh=3xn5&`})2q}*ZV=7`2pO&H( zOauU7U@$`|c~=363^)}iKp>koZPF;!4$cmDVjgX{x-BjxVdm_at=qN%fGD3jGJu^2 z_DF$z4-c=8dVkdaqkcE;U7x*b;hEc4oHWi0#?80)aGx-5#*vGsyn=kYckWb}pPMZd zWtJ33nw!%{p6v9{Hy^V_MO~+e7!ogZ{NMOVap~-Ftk{wef%W)(CPh# zqN482TGgpo`Rbh;-)^3J=fOQ+$&a#g)F?Gk^&2>YlhjQi6&m?^ddp)R3_9(5mC-k5!Ba8!~X9zNF}3X0$Y`%c3{7YuPHqKVs3o<@*mGs#391i>A$< zJ$p7~&Wz~9=TcT-$bdm<*_kUg{dD&B#UY;zc6V?RMQah!W)M>GlnT~osmT@~0#I6ZdfKpIa{z4EAOIAw z2~jJc9Y^=wd-2Fw?$o!>2QtKz^2P01x0J@sAYch#87OAWnwF zhbZM3IeB+G03}qUf;5oQ!^0CKC*Z8uYG}s--5rNWXlA!5A?(J2(HyuH955RW%;?dx7R_9~aj8tkjh*;4aLksiTcvh_j|Y7+?yCvj4xX!4tdwRNRI6BR z&eVBwrQ-DU)2lYFl4j3K3P=J7MzgsDbQaz!Vh{m2soVm6!;D~_Hs#wJx2^|x2aNlA zA`f)KjB1Sln_{3c+8F(t$bPy!YOusVzB^y$;~j9f2>6IRW&055`70k~0vsH?Xw-%EJf z+S zBbSq(#)rt@9-51RQrl90xK!GCk#^kTx3qkme$5H`PwVMNIbsD8p zk*NSM1|V;X;*qldKmZJgX-UaOyF;DFWGt21+R?5Nl^ zs+**Hqa}tpb7%Ye2X^h;bzIjk+`Qe_?_J-fbz6Oo_PenYi*pK+(^D55UcRY*ji}mn zsbQE~kl*~(%j(vy?d|RD3C?bgPLgH3c-8z-y@yYmI<-(+lAfD2a`G50luR^#?(XfL zB4)ifzrSXk8Z{y->^Qh1G%S(;l7_uBYt>Bc=CtqdLCLtkk#$>j*r`H9xD>ldNKJ`- z8EaT8dV6@ZYuA3&s#WR5Id`8%*Qi#_U@*9X%d_~HjNGj3+#GKwCocyNz1gHtDWYSa zu9`BoY*px=@u z%kn|zz&Q5k*5}Nr(|2xN1rUm@hVkEj`%&XAox64EH(@A<^!-|C94jLtF&Ok|scD!i z)5V~G5&Ro~@H-uY{}T}Y&vuGz*eElzz#?2fb-GdOHqroB-~dDb79oVCs(oNNP$Rog zKU3_Bv!_mX{d5oz01h0S?EOLmAiu;^kgs%gi)`27)fc%U8}HH%*P$x#Q>AyE;#rHZ|3n_DP$M7c5xF3UuF| zy=R`B{kX%Y5s?u_v$;rDqBR?CB;2W;TUTkXVPsrdX6k2?hkVuT3v3o06l(uK{|{UB z+p~Mmy*qanFI*HG_x$AZGY_9Xo;PD&v--_^yu7tq9RspLo%Y1(QzcrhcWB_SPlrnt z{Ca9}z_feE52OhLX0uu9Ft&iuse4!VhrZI(m(^?6X1#pbsavONRjY>Ii*RvqzI96) zt#(op=Q)1-L~=@UadC-MGSr}1V|#y3RxX>fVs^M&c$wRQ9FD;;Yv$UAumT0BYZ^vovJj-otyPMJ5E0Us&K57(hUj z%N3YmO-2I;aQ)V00BWaj=-s8;uD!b~AQl<)1OcLy0A&CnKtD@j|D~Zc_z%X0%>l4s z0}%oeKqxVSCM zA_7C-7ta?3fnix0tEgJ78sX_HA3*#+bO0#%cQwTSe#ZmhKfezV|2LCV|Ed0l4H1B) z05Yal()+R- zc}))hNXlNjbopn4hNx;(?J!~pWE4PVHl;$BiQ^zXDLga`>>WG>T9TW`0xHQXO1$%+ zeve-F_wK_K7#T}3SVd7zDEixkjzk2d#B%EPMINXiitjm55deL)#z50k)4aXCG_0I3 zSQLa_Z8}Q6N@!5<&0Duvix}+V7gbQZNxgb6a;KkWWN?YedDMBiVud)eS_ zg9h~I*Ce5yOrct}bv=X7=+R@URjPRB?tN!hS4q{!&&lnRsD43}BGzo)Ao&jzz}v;` z$GJ%mbp`%B(9@(F7%&!()H7qbVMD5?IMav)e ztU7Y_d{{_mlvBNmk>NkA{o&}vQ(=K&y()Jv)t5etiOJAqnZUvTRvOe#o;{Ot9R;A{ zfGsc^H|*P4s@1u04m{64FUpWy0;$#b{OL0vf7pN1#tk|!su+8VXw5DyE-{u`K;(b{ z0G-+3>E|Phd*bfqo?BFi5zA4Y17JaJHJeislQ3Wcgr@?gzyd~q%2hrI{54RZ2>vP9 zusHxWZ1Q~!#vp-_&SEr+7B9wW^ypE0_UyWr`bdFQj6yCDLI9Zr!3?HAuYi)mEc{zx z6^3OgrB(~Ma^*_9c5U1}-2gFE6cB^q{{l*(96=P`u@8m+3oypNZ2t`=u)quN>eKkm z_l{rtdm)7WB10d& z*A5E`zJ2;Mwek&`G!d6;klN=jYw)dDk5=$~>vp76z&p%uf-g7xN$o6g|=$&gQPUA;PW?#vH*^cwcjs1jZAk&`E`#$D^z zslPNfxK-nhr3U?){p%VA*BLN)P}A%-51vHtxVW=br53}73^nt7QcAK2;@V9cdiLuZ z5?p~&TDM_?8FQv(YYQ`8W)2zp>8TSZU0vL6-@W7I=5FU;UkYY#J9SQe4sS6zINHfU zVE~iWYJK|jiB_+@9erPhlpl2cFg`I+npyVnX*6)GnsMsfx#N)|htrEP?Xc#{VZ)`V z&q;adTC@Jp;X?xOH*elBfyKk#+c!Mq<^8KyZ(Jonb#M(<1iivn+G50Oad^f^yJr)m2kU%h#{p zLnwLk(|A*Fo4o!&M1E5*fl^xjng8mueGOeviB_vsYc!M+sZyGg$uL3;!(t8*5z2a4 zFav)K8}wgm{4c%#LTl>d=)lHxfY28)@j($0P*(V7hBb5M^&9r7#>X9acz*p>g@90S zbAv1`5FSEYi<43=pE=)s&=4p#0LU({+YlHW6j-ww_;?rIx@X8Q@~=|E5E};=<-E-D z_wbI7k8^f)FU-r==H$N;{RByvk{~E9E@Baf=)Z>YQcMuMAA){|tAH4SfDpmrz-@YX zYTxZM06;Mos33v>Z-6L8fG}Vgh!xfTANT&~am+J+cOS0USffgfq@48YH*bs@Ivf!y zY52s%#N3a5Fmd9(i>svuCjnk`vtmJf$(ZDY?n*sdw(YLNaiQy}DJUUu}0=!Ri+V-ue(Te$pL z?9=ZzE#ZNBIC|BsU8ibj*pk&hvPfA>ix*9pcjM}HsrNU(G~WoiV8;+0v3@yj?4kt= z^Ng8Zth=2;`NgmiSvfiLcl<~IYFDV6o%ym-Xt*RMv0?AdjM8iYC<|QIHXSdVJuiY_ z0HdqS#cH**YuV=frHiG+OaVtkgiFH*OkmNl8jE0Yb9SSY6ciVh#RL#w0wRlKRK^;4 z6HoZ&wHqcTCQ2?!SrM895A?0+O!6m#{Rkmk76%Cr3#(nb&X#SPS%wou0RRx7KcwSg z!v;bBmKgvWHpYPSPNKLU_R+HXy=|CI;#-@qCiEUKet!C14>^> z0a0W!T7GVeL-2PaS^s6lBoaI?5<+AgCwT>_X{igpUwHQ1St1fiJ@BofrR8Sso5t>Y zLWZx~ofzSAV;AAK#<|OnOy0Cs%O3a#vlG7@ndkZU?En6FJf?4t_Y?GXZ%5*HZd3Zc zJOie`-oN}c;CJO5@c-N&N03N_>;jLB>0T2qR6bS;QB?hEY1A)PV2Xk&NAOjGgLY)RjR{hkqbGK7F z_DRFMB7HkL1qOHjbSPvO?D}ba-(e%04IHG8js^$2hsTflxqA86s(s+crFE*;^siGV zL}Pd5AY)jdhypEpN%XG}-=hEw$C%A#nSv{^ z>Mz~7DF7fKK!kU-m9jFK#cEbEGA}!iUY&cEYD>=EJ8cJw<>SAP2#uURe7*=YJ1G74TR@D_3&G~-r^ZX=V_rRvj znIHj0pWqhl+iu^ob?r}Uib@L{99+K`KKkIH11;OMTDEQFteG<{qP1W5 zj|04XR<2sIVDUV}a9(yk9}gR{V#Nx*xw!OE{?q6uc|<#U#8+`CNr%oHIC}PIh*xm; zE?w7cSZ}tPF`%;-&bYgKS+s>EmeSbxc+4^!RamGXfT&O^EEWp_Bmj>Ac{zGZH;YZW zj%_-eID3Lx>9zZ};a_t8K+DRm&|gyL^UF3R2;r(#s_fXgGb24iasUuQ@SlqfTL!>} zEdx-_8?9`J0Y(H~#>!ZRr55W4?Yi8(b@zq#rN2vnSY*H)n`SLEg8I@4qV;VC-}@W@ zLd4%;G@v)4qv*~5Ee9bA@8C~60Ix@+ey0gk{wVxQcALKbaPsRt-#9XT`)3H~Pv{?u zAij1S`gR9^;$Oc(`uelK35zg7m}M9%Z&hnlk^$l5==lBj3)^?s+-ODB=3* zZJ0vCUVX0Y+1<2ROK|gsjC62N9@@K`A-GG|9$+*kJbEhQX<%qLSgqjd_F&IJJB7+B zZ39>Zkzv@fFDXK7`3Xbe_hbNm+W`&x+H= zeUp-uOj%~dsukHZw{B!q!#2%I3JNQE1xNQ#eF=`0H=U2KL7mwgZtO6UW*6`jeh#Ldc%4p z`2~ETF0nAKd-Farwer&StG#;l_-Xx59LRNGs9UqH46*kf+zN2;$ki55nY?+s)+dgi zUAk`l#2kp|SxDu>#&YTdniFW-{S0sGk}BTinun2;RjjMM;7 zO09%r2q9+Cq((|BHM771h#jNaj5D};q1FKT1 z`}glZefo4aH`l9IuQhMgQqIZ+f&UX302?+4`Zsm}Y}gzC0)PP%M1T@Ig~n{wMLPKl zCcemQv;rTb_WH1YziDgcn}J}Umi;Ic{QNWl10E`f!^fx~Tgz($$$M2bY@J1#9)AG`nw+ERBNW4FS{(c95;&o#m?*sx(_WH|g;Yz_bs0b{sx=xk6>pnHXIpcHs=;n2~RA9M#1z(jxo$T7%Y?QaG8 zFgYbYF{FJ9P{|-$R}`PK-hTyS-Jc;raM z;4p>S-WZ<}7yV4~-xQoehFGh~B z_TW$orTw}NXk4f9`Lk!U^U|-xU2apYZM7=Zj~+RhUz*#mOK+uvI^SU2cyOZ+TYU`q^5xX`|#6R^Iwga{z4E8~__Ogfi0|00XcLM?|X{I9G+^h)+i> zUAeRb3^G<(BIqz+6{`>lu@auXI|D$ez_6^Fo6Cg@XT!on5rhACtbe^hFU4%60h}cz z#WGHA<;@t&e$zNc82#a{!9U+jO#ZEA2;x8gD*t?Q@OzYG2_}E^^M9WM02CtzUh69m zh}bZy;jW!K{QQ03cVmN!1R+$G%Cich3^UZkBbEaUWl6ysCr?FnY!5O3i?k5naK?@M zmo8r(`1N?;fKSPci;b<)p^b3o`P#MXM$VlM-u58C*+pyJUEQj;Y^i^lsi{@5Xy1iP zmoD}EcJlVci=0%Nih&`S32BY{4Scd?yV+`CG-@wz@4}2M1;d%Fmhx}(U*!PMUyeh} zBG0QBqpSSO@26&i*7~o3vhNy#P7z>AIRHCAiJU zShW_@m3!yi!QS28MS3G2eAMUOgGYp-Q%6tgptwOm?T?2Hp1tgceuD#QVp;&PL`p|ZP~uv;Zw)moZWRr`Bg(Isq8dY z9$!^rwUOwZ8HZt?k6gEFTSk6XxL2S_SDI(Y32_h2%+JwNbM4AC6Oxkh%tdP8T33sT zi;2w^id>w%@^!@)fmg{C7LgYjWVV_#z{s#%N6Z}N+BI!)^U5^?u?PU-&rT?L&yUrg z>;XUo0Prinf>7e@vhuQiGqFG^m9(5iQA7yA|4O+vX5jJT z$48GIm8^e+ut*3(OnFiMUx1CjPCbdfC!_Lq=wITHuq=D`&fQI$H;o@RUZqyR@52T! z3NluP5CV(HVhk9Nl7m7+{FB&-`gMRnROj}<03e_S6T)1{)1<5y=>vxh1CfGG$2mEK zgol^hf9mKLJo1Mh0Ql_dkGw*{RgU&joe4PFzkFWk^CaHR&aPh78X!RbVIzR!fES7#tY+Rhw!{gnhOX<3e7NZ%^TTT^g=gwx6X+GYK0vDG;P|VZL3yez8#aXN-M!zqa?RP_%}69!(l6Kt1Ns?DC2QPN3|ay-GwY=#PhUT(VdV;yV$+hc zUS?}Jm8+BM<-~J`?;Q3~`%atn?dh{;uHU+zT$0Q_yE~xA0GUjdl9BQ-{=PfwQm1|$ zPNg{e{7#WcD+Bq9w6y5wF($C|YSB$sqCIf&AiBu#Ab$DDSZR36nKLIkwd-{E)*ZP@ zMF5g=Q#fE{D%bu4KU}b4@!?a4NdHi$oAaKEOAyZ{LqPb?VrlL4&ej7^3hW zfeo7ss12LAC;rpn*z!C&0wfR$h!Ftk1)L<|J&jRzFXG91S zQFL{4wpc6%yQe>O}IYGw0TqLBd{Nr^S)rJj*VayhjlqT}? z^ZVqJkKy-W^Yl250|^U&h_D<<8&6JMs9LXqMx_EP0Re#(K#dY_KDu!5*zjr794m*{ zs}K$>TwK3BH{)gBNfT8;n!J1W9SZZ1Q;_NQvqL+#e>iMNqnpts9c{4z4@buC}2P$5QE87WHzWhTs9ot+BmZAqzMzBJ$p9qhwnK?=Hu=@ zcglCWx9{YMb=JbU3@GIGn#`i?I}ab&`+4ubd@SR6N~`9r@-FQ4@%Eha(;^F)hcxZ? zVT0Zg)gyN8- zcPe; zJVJmtvv#9~kKD9&eX1e--KeN=gj2 zY29YbSEJ>x^dtyR5gs& z^HbrSJCqVEB1J@`b9hBRA0Hx$r3SshY~dJI$;yZz5<=i_gir()mSh&^W*Tyft$OmW zDiMm_ISz`7A`3V7~_ zp;M-OBNg3l+qM}1V}k#*Ek)2T^X)OvSDOqAG~B~idX_7X?&cXwWbae~BIVl@#Ol?m zpE-NV$;laW=2f^P9`LBuvAcX#g?t?#n972UM zHQNFG2drGRT&+@L=D*SUr>{q|X_?3+;J5hI{Lis}BLIc`5Rgy@-~Iy!%wMyjz^W5~sDPV1Y>edYZaKf-0)zu* zP`^PuZ{E3gmldos;A(_dadB~x(gG9!1H!=gc>BD_N)7T4PD@X08dOus%I+mT>D05= z`UBf6AToe4P=BR+gQ$kbuU*tz^c)}tkOC=#!b6|L#~>hbAWD#%fOhQE_2RWFI-L#x zFw2;Eo&phoci`+1Ay$#+0XU9T%H%qOjsPVHFoploYk`1)SKxP)0FgibJ0JpO8J2)J zdekVXNh2gA2qX}}fB*Ed8=x?~{+pxhgG71D^ zCMYQ2<3arb@@yT zAR;EnUg6;5ZUvDARNg209x!4u($X)Vzbpr4(c;BxHf~TLO}l0t!+j!`tX%Qkk29~| zyxF*ZQ(p)FfZ(94-0XzZlt<5^Z{NS|>h4mrPR*3;B#e-!oi_zWmv#LGk!~-xYd1C5|vfzuf=uF1!!hA1( zUlG6p0uSW=<3~4d-J$?0)=s)dDkwU1N^gdKY__s-aa7ARW~`iGRw2 z)z`~+;Lb0YuQxrV>GK z&r^{%EPDE9FLe}%qm$$5)29|M{yr=$2s3~%#TWyo@E_4LX2aG4V8ezW3Jm5%Vl|me z01y!#zF@(EK_3s!&C35vgVl%twE{d&jwOH?#1cw=(|<*PD5at(U=G2*0XA%KEXNBL z07NOFlwt;?HP7=hnGF7G!2hFX2>tSfB|vc5ZKD;i?8eb!QC-@BjH^5OheIPPRIXA*%bNkC%3Zrw7HRy6L zS!DRZS&LdXX=WGIz_UrS=)(u4=Fv~4PCNV4>gq+sbt_d%jDCK9%XTj>Zw|9Jj~=sF zEN(8YTCG-rWt33#{TKDi43+*NL03?BOBm@%0`mK@~Sr3Hfr4qE>*>*U>QsU?{JvI$>KdVK%x@$1LG zUNM~mJiKx9pwO_d=1$pt`k)+;9N5a9J}1$?A)VK*Yj7;9=LN{01|E=*uG<@ zHG8+_TeJ)?a$x#)?s;JU{tRQT1w;g-SgX~Q>I5Jx$OzCr-FxoYwj;hMLjW*>2nL-T z=;t4Nym;ekJ+bEUdTmxJ0_>!*uNoPdkQA)|l?ki}*qnm=O?!40fe`_dT#(5Dhd{6g znVp?OT4^p%5drcnDL(DRi~q;od4O3_o&W!P&Y8Jodu3@$@6r*FUPU4TVhe~^Kn*6b zr5O`VGl|h()L2WRSU~?sQ;PK7d+#i5VSC@Zd+(ic&ikK--Gya$5mDJ-yq|radFGy5 z=Ikvq@B5y&Fz~Xs)`a2VVNk02XIjPjQpB&w&&mlk!oVHsCe>veq@W}TjS&%J_}&mA z6!01%1ZNCTnx>J0Qq;eH|7V_k=DSd6s`z6V(_I!IL6YpR8UAtN^oeE5RutqG;2I!A z2+Ou~%?+?-O`FBX#g~0ce?Z!Q1(ymC|jH3io=WA|eMp z_;8rpqcP4DomcXS;X8*B9Bi&EfjNuk`%o&tIauX4j8V;RrVj3fD=ae1ab_piX;LS; zUV*=8;sX!(N{WZ~zBw_z{_dj(e*Mf-$IqN@*18pki$UAHcTav{VYG;e_QVpz!j;S4 ze)sKCSM#-_OIEx zI@}ZH@kR)^*KXaARhR?lL6{nJp=-{32#UZsL`HGe!i5X`vJ@bKl2FT5tjO`_Gw=v4 z!iKeR=ca?l4gnmD+fWOaF3F`rg5biXzzb|2hgiI9X^G)i3>tG;kS5fL^wVD~m>=g& z95G@fAOJEgh79fF0Pb3E&zGNFn6ChW@(2%yhT28fb+_AH z-t}CO(3rGkwRlCsHA{$44nT+yiYWk!?12LYu2`{*3piwEWPkAfhktnXj|7rX8U!xD zzj5puF)#wBuZ*~?+U1H*xOe>h_utoVU>{s%9)5W8`|rQ63C))F`8a`44nQaepn6~! z^yH=5*VOk_2=_7F8YqV8L8R9qGC^j zhU=Cub-GHq;uBDyaOmikz1w<@89n>$f8TQR2zUFoFy+?iGnG;|+;Yp_Wy?FZ?^KYP z?Y41n|ACC`tdWmCj`)amFTL6Cfr0`Y;E6cH2a! z3*pSw^9jlgdg{@qW`8+v*NJ@!M7!~IPP!+0T2_=DMO|X1Kp+mp1`=H41h$X9+`d|w15j_WV z?bh|5Z@q3nnFy+Lpqnirq=B`W{$ zORNB>OMUK5n>KYUq82Thaq*Ss4Mn?m?df)XPmimt3-z^k-VPDU0SFO-926Yt)~oZx z6Hh$wzyk!*nNFZ@pASIn!j|J}AvDb*WBIaWet!U0pCN;XxLhvN3SO!^a)=NF$r!tF z=#9gN4@VV3Re@~4Ai&Y==RpF3t(-jvHelK)k#GrkHE(j#-eoK1zx5w53>kS-F?a{s04L9H7JDb{} zQ)fiPD!;X5^XBXCyfab{Ly>RQUte}|g)`QzEe-f}T_;PzvUjXmD}$y>(-^KUf(r{u zM!0ijX~8+|UtM<9qyp144a0CWV*zdE+&QZ^Y;^Ik&ewHKj7WU(^?w{aab(ocQT1Z$ zYjB@QKeKSrA_ms5PD9RJn|E*fY}QN@R+Kjy;D%uxK6Kax-2(YkAgN)!!9xd^D!&QU zrcL|g`VEhsI<#f?Ca>_`KWSpyX6+eh>Eea&y!D0!%cFU0pwM5mZ0TZ489GD|7NG6e zy?e**9THX;yc*!-DjYq0*nr{H!U$}|mX&Y3^|}qy<8sG&W17@$l$4lAAeXLlh*A}B z77A>bWm7Vt0MfwQw{B}0ri9IuV&EpE3EQQ62M-;ph$NIjmVmx-+ZK~-4&4TqjDvB; zm;hs7RaR4o6s|QC;d?=dPzX?n5JF>&m-{%q5w71n_0;fT!!0tu&9_rj`i?S!P~cZ$ zkhnE(T6)^LwQF$!g{#AeTSho$PegdsWqLvp|0=oa@S3HT2*CvxPd)Wi+qSJk3V)f9 z4F}i&0gQoT;K(1r14laF&;usEc=yBg^B2PeEQO%ewtJW1w~c}gxZO}3zJOES9!4IEOOlYel}-WzTm)2vxDP1kgxxipc`YoP$|B&EEKj(Xgs(}PXZ2RpE#U(GRhs>uUk(SlmrUl2hg-$!$E!erDdj` zK6BCqcHbRi-4La)&-(KKj2?b-9X-ko6%Q{=W7hoHHdGiyk~b>B748AN&<)#OyneNh zOa>;vOJS5UWk5*=#XuLk^VJ3Uc7zBKG5|t^kTwBOQjvl*sYUC?|M$?S|uf_CW$J9TG6@xP}eQlV@(7^kCmxMnDHRhXR*MZ=3{%6Wh1H^u%Le{N6u5 zgY?{bNp-b|sDTeYIQI9?WN+I8DGx1Ko{&`M{(n3RA&QS4{dD?gEItAL;G~~D7T&kB zOX#_|xrmHhx@fVJGY}aTad6vqk}^Ij7HR24`31pXkQC{f#u($Af9FBjRZIJG1~=cZ zXUDFy2P~)>byJ2KVCO`G`26#^+js6}5H_s5;=)IN{n$-E8m_|);PrQ2pSO5k>*j68 z-8Mmk+l2k>U;eQ7_VHwdXM@TUCzuc(mjqd4(|K@_ z;EEuQ>Vyap$^i%w$^lrpav2;Lffxh8VH?V6k2T?*`!aJg-I}MYrGJg~C#TC43{ec6 zu6%ZkJAd_(5O%(1Zp+5dmPzK5vgPFjSxzuQ^)1+-xFLz$-nAfuRzW zExl^bqHmdzz%D*6fhr$PT_$<%>(06Q5i345UZY*d31zTz&^eqPuAj^50{ z^nn8hzVO2H*LUws3Pj-IhtV6H0IIC3Ua*;Mk}fWGtlrSAPaiPAFyX+&htI!s|8IT+ zq6O2Y-E!C6AcY7WmJDWQdy?wGXHWV2^CKsYPi)czC8dargasTsj@ZONTDrGgo8YmO zf4%U3zy00QXx9XV)~2^V?A31o+Oz;?2-q7w_&7{=cNsKr-^PtjBjHnrj}PiMPzC(k zwr*|Qx<%dYJ$8RKC*0$0*{|=erAq~=iX#10zF1r_REO9V=#Gk&x&OhT096w31tPVn z+gHxnw_JiGRE;a53ZU{W39d>Ck3I?itU?nOAEYk@pDAS#-`_F2tb4Kz+*59kg0*Yp*pzhU*Dtq zly&n100(D`MMp&D`SLX3wxq>@q`1V~{M^d#$@%j``BjL3ueyTpOP!6bYVHtYym|9x zbLV~Ds%;C1Y}HK&#;TewdZpKw(yaM9 z%1|SmYPe)pN$n+~Dh3i~tonaZy=avpsytP8>1x|DJ#E&bGVoQ(5+`rD1ZST7c2=O z?sxKW9R$1O-U%Q~f9>tD4^4!WP7R;tiS=Nr`|rMow`vL-oIQJuLLZVFBaDCim*?xaqJ}>_S?<`kc-r*gci)9( zjnZFy148Gqv9^?cU+K}sOPC_VAK1Tg73Z9!Qmh8EQnBx>@ISyg!4}FD6Pc@u_of_V z5Cj`lsa}enNZAq0t&`i)!VPKJAPEYO@cMiu1Pb#;uHCXuhgK&lsZ-Z3E7z?`Nj-in z^+-6v!rc+Q2lSt}bY4oz(c{Nc6d=GgKmglzZcWe4vS7Aq-d1pa_;|{$-Mb6~o7HXH zq)z?)`}gN5AA{bxW2fD_cN;KseLe-I!5!rZvx7mm+v_VTc0;#d5!n9y`_81C@aP`P zf`ZB^&fUIsn?L9WxB zBu(f_$y%R`B4E$H{Urs(;n7jx3XEU9wqHUp&iU=Pj~OsvKxRg!5c;%f(`L?`nVp^O z@x-A5ugi@JEZeT+X|7q3WsKF(42U>!cG-$IL{#Hd(3bMDo=Zko*5-^9RZ6PkWge`W zlz4u{D*%jD+2@r;6uW3*RgTMfRpX&@MY-IZ6rAxNRIz^rfK^)x05&gQd&7m^a#y;c-6XAsVy4SskdR# zBCcekh7I%5vu%=Ymsb&4hQS5rs3jzW6(;5{S(z5dvY{?%O;!4}3gRv>LAHQ4d*yry zn6NbHd(RvJ;DR0>9g`mL0hX`Cmz0!54qz_04OngfQZc|MeKwdI;@)0kvNF=vp4z!T z^@I)Sh9(6|Idw9if)>aIPtDFIi}ICEgRXH`Ttb2csh~IJ~73^70$wAO49dDVh1k9Xt&PrXd{7Vho4Lv$CI>VQ5!(6#>rGm-90CGVaG-y!PlKED!gh8dH zr77Fi;g(<{ZW*~`*;1cTLQpY0Jc8@@-h1z~FJ_<3PYVJ&_wEY7wqOf5umBS_NdN@m zOmNOL>8b2x&@^4zHok8#u#!>-zmz06!~X~&LOB2-LOB2qvP_W$2t;5Sjvqbs>tFxs z^Lg_*;}vz6Yj1s4FwB@SW7x18JrS<+`kR8{;NPJQP~Dx*$_^yq^Of4t_IkX!OD9Fj zmNojb8D~|E0pb$!hkEgL*dF`ml(OyqmnZ-BX=j`alX+dR2ofb}=@H2dkXMYe`N4qS)1w^% z0cc(n1rhF9`pyT##{M|%;K7A+=RNe?U#9>5FC#{cjO^KE%NO&)q9Z$W?2Hl<5fP9E z!ozVq)6C9ig0Eh;Zoq*4uIRXvtJde{1WOs_;MMTgaZ;HV|}H5L)n0;FXDy_8M0u*>g>{7 z9a006Py>4o+`MHofow<%GC3(ZGA8=av7=m(2D@%#oyN_ZZQi}Zge2GvwfNDaM}IVL zW{Kr@FCMz5`dTUlV|3 zgA^d(0C`Rns=~P-Qk-h14vM;}IRFF-3k(0~qbW{UV7UQIkhUdsy~Z4Xp51y8G0Ub~gFDX4r~N%ePNHS4@eRhS<&hRR%C9l!kemBDb8p;|j` zb%-1x5U;YL!uZ#YodUxT208}^0|OK`uHVpmz(Cl-UsxQL5G5EO6+B^+pL`O!0D(oj zO3&tp>uyAZr=2*}x^4qla^;o|FW`q8j4w?cO{=+Nn*`i-Dr z?Bt&#A`*pxZXG+@jF1ppa(wTcH{QJWmrtJAxZP#3&1=@xt6%@dNe`tgT$pxjdEai` zi}DJh+P8N+(@|j&TQ+RyIc6k0ZpVJGq-3JPfKvp9s|qQS+%{mFZni7sU@?Nk*kv2j zGOB2euDeQ1KL7^Nyirr1&zF&%&Oi<8CI|h2;u7DkgS#1!=!xmux35zaKes3^&sPBO zwr$(xW#pQLr7KsiF0k`B(}HAj&UM8$Z`lOMR!v)H7vwpeeNShd6j{3Ej5KJG6dk+w z;Lgn5nH-vevSF9l{zZ!xk!9%+CYb;$D)BjDg9ogjOLtqA83A{%?mbSVojIO<8emG= zia~&ESZg3c?q zPzX?n5Ue~IAXbb1;Dh&_lz@YW4rs10sU&I%&cM!|&0M>B_3d}x2Ex^aU9=iN3P8CT zz!->NOQ{Qi^H3!%pUSm%c3EAKe^o9m(RZbQ(lurQFw{h?ex^*7*=LuVmHL5~U1p3a zsUY~`sWXO;xxM#|H$qDI<)$So29CN}xZMa41W+#=K?x;Lik$R}uzC&H>D<=sx@?&< zuj}>Q20!r-fQ4`Tr&aS7Ert%t&C1%hX?=$gL$LG2f_d|9dHRXK{uHmxk)89(%P&9q z%U@`2*Yw~0wf~So_dfb){`wtz)@>{m?8nbNdvM0*DT^0t;G8+9g0qE%h4N@h+ZL@h zuUH-K3EQ_~H4{Q-LV!uC)O!3XyX7QTQgHIPS8iE$I4{KjxwyuaQs9?*3MWUv00Jq~ zCQz8>k+wacN3WD)DftCC24qfQ0g) z5*9Sjxd+46KnZbirIw);=DXlNA6b`OQ~+o(U<%BJvLFj!6#28<;H{FHre+s1+}`G>LOozkbrlkt2^CJ#JZ6 zZJvmAm;2c9rd!m##Ao2d+4&Dyt<|J`F-GH&_V4&T)|n zL6?fSQuskZmA34{AR`V7!*9JE3UC2I3tS&E@>aMR4C~Xk-nsknMNiS)#!Fc8~+}zL;y&d&7<`r7%6r)22n6GiT0_5lA_6 zLPAEubN~2B2M->cziW+4cuZ+}xQichLMVV&Qs}F%R;els*qiSsHuV~kT( z4W8H){2;663~+L!%>57MaUmi6`A^?nb8=@Xf>wnSoH4=atA1Sqcw*8cD_5=BcH#iz zx=@mx)6NGC8oXfj3X?1s*zF^4+qh{MiT@0&Dl!TdcrCly!(v|io1 z+qT@cXSW|x!XDP4my3&)dp1gF2Bg>Jt{M(yqQZYq z05Pr@h!n3$Ygd^q?MrIKfk9!crexrda> zw&gd<0SFO7{nu6iLWKOLLXn~ZUa=^m4;cGRT5Pv&UYp|DDA{%^?j+?arNtD9ZOAW z(g>PB(1ze}3z(QUZN{iEw<5w75uE^EVEGsGhK?W0BEn$+8Ci2apL5%jk7VypSvza~ z9Zx-ZV9_$ozU+;vv2zO2WOzLdsnXe*{`2Q!+IO1OzGFBPs;v7wzLcL^R9d7!K1Tg z`U0lra+yZ(vd(*A-{MVsY?H#vs?f`fi#FR$BG1Gr8-?qDfYF0`J8HTy{uxAqGGC? zX+$az3@|W7wqZ%Zg;W6smsBPyDC@u_K_p2#S9z8(fp1Agn;NGb^&79h(YIf3CBb$4 z?-C*u0u&+yQ>b+~!3G&d=|>-YFloX)5z$eQ07FSpu{SyvIxo9<0frwgAwtk)ZgK`$ z>88+KQAu&Iux-<(!6SylFV}pz#A)MY2P`na27Kz1H(nY4)5%Ekjv99t7!EC5e{Ao8 zQIGs&=(w>vW-el-M%TM;MMC0jk4%QosMET|9g`l!smylWuV+H!?>ZFMu5I3$EfemW zh$i(udFj<#Mve@RiGt<^c!|#!+ow;@UVY~L?eD$&^zJ@%2>E<0DjHe2kcf+kN!z|d zZc}a>u8y6Ml41l5UDpmy`Oq*7jf>KtpD|7htXf?%V&8Eh2`5ybBu7xU=dN3t8z^F2 zC?zj0F{{2RCImR+%NAe&(;%1-rnEJ%AcBesMN)&v2^M?xFa<`W1ekzU3M&W@ph1HW zBxMj%!qTCG2w?eC%D^lr4#pIbl7=Dyh6)H|Ljho5j<bYP44w^$j1(&(BbiU9n2V zI74)F41kD;@CFSUbnMuvQF3z2R?YJZ@^|jsIe-5A>$_d=^?Dg&hG85(e)8zyBepa+ z6V)Q})i<^QoMepMdv^c*Z!bLk^iyB)Fr^4rix444Plyn;757s6!RuTT04&I~jEptw z)^Y+EV5-E%jeVJ!2iI?ff+L^{@*!lQ`u(550T0VRn+^kr zj$FB71&fXb)0Qt=21Bl0yB>xO3**P#6CE9^B`2?(IcxLWxjiOKnDg)dtbP04+a7X0j@J$gQ)0@tttzyT~viQu=;iD zM@PjxIBDW9e({UVn>PKw|M#iWt8vz>S+BhE&nJKJ_#JoL=G5nT?k|7duwmWX@4V^A zYp=cLM3KGm#%sU--ESX#^s({d?{3zt37{^>w}642Jb7yE+O-D{9{#Exj+_D4eBM`x zP!2$dP!2%#0C%Mn0M7V}|9J6tzk7y299k_}p-M3jgHY(uqV>)lyAU+??AzCAKz9&e z6Lbaw6hT>V>EN{*Y54=l0r;MU0EMuX(m1b_$6vk$!Cz1m799-*lp6DK~Ff7*TMfT>U?A_viQ@+k_qge%{hKzo@6rW^y?6b_jyDbe=*54Gd*ne^y#!cT z@&44tPI;d`y;2sgY%z2&!T}S=^;v0Ir&77nF!Qs|?!5bMBsVJFwkWiDSC9Tnwmn7nJ{iY_oPPyN`dbW!2vw?Le+>fb%$4EUxYK%7;+aiM&t;Fgkb>wzft z)O#=NFFa*{DNaxXC=Rgm6?}=;7y={##mb{ubOi?mrD|HQ;Sda*pa4ivl{u=4Q*`Bs z^XkdUjB%nGj1vW_$vUj9R7EDX8(9(b5Klk2xE2!MdR;+g;c;Lcw?-m+oCxHvo~b$j!a|=8;=huIVx6)}qXuCdrM1J9eg>J)2Tc+_8O0myx5l zPybA}RZMuKV{&Z$d``E{T@DoP@7TBBwiV0MKmAk_8Wf0~_og${Rv}rE%7h}F?Myj* zwD6Q&HVY*I@XLDrFFu)-y$w(NWFy&pjR`gMuG`6bGCF{-9E@$-Xj zugfW9r|DYbCXHsyn11TiNvA~8BR~CVUQT{#X%K)U`%14Kwv>vLWm^m8&;RMqe-o61u zAwq_|<}cPeEaTH%{pIjX?IuHnW8Z=Dk*m22}DXOHDCpT`_vQ_S;t;uy7>{_?c6Bf~J z&>*K>fLnJJY}pzpEp6YnU2azP&d+8=khW*TR%XgW8#Z`dZWpl%$2lZ}P>>sI6#`lJmKz^En5y8IPl|-KK|FM|LWSUGvnawn{@0_ zAYIdQ@^T(~{IT4e!s^}hzW+G@AwmW~h!FH&wgDV8=xf%l*|~cc1g`7Y1#S-_BH|k- ztGr_9ZurgRbLK*!MybTLohlF_L=_e0GQCBsCBOviRdeP;0TK~zFG@@Zao4yB5P-`K zTY@+w0F|0@l#2WaSUbL4aBSU51k~{1w3l0^YmLLTJz`+52MMcP}tLYA^ z(v|>qC82nnt_huhF$WZ3ZnrIMRG^g7bpZ!iiZiZKR9SmiZ$j+(2d*G|LSP@Vn|s`4HItXhXI%P`z- zuLJ=QU>5~{TNOVQi@@2bJqMa}xDE{Po9pH;>^E{0lnECH2Q=4~4`%he;||2aVdk6f z-RZ>TM@NAGtNhs8mM-huyQl69p2$MIBrpXxM?k^DbGIExXwecr|B-E5TMr&|YVRIl z%H;k%Au#igFL&zDp?*?5qomZS(4sYIjDp-RmoFJ{&xE|KyRuHCHfxaVX_SoU$n>q- z>NjaweDt_c>QC&}6~WS->(_Vf(lzVE>HPF800FW&!6m^I*`#bGnGmRiVwdg3!HIqS zV*jWNXY7Ijpg3?&`xmgowM_E;tIQ)2>>Kfru~22i!iJG8lxdXl$+Q5Vl%+)_5eyqI0AwlFknaoBuXvV5(-pm?p?hB z7Rt4MMR2=1bm#~Vhm=sT4I35;GIC%k1OSCQA9xT^Q4oLz5azw{K74`hx7?`5Mj@c4 zzVSBl3K7l8=l}O#|Blmn9Dvt_k`ifJu+gMr*W>{`m%aWTGW>UpzO_~JRuT0Y#P{mC zYSqdG@4tt}4M&U|4Y%vS&OPmJ8lIMV=FrO3Fic&D6*Ff=wP<16_P#}n5ET>U4TsB} zdg`PA+%z1Bz9Z%R&s75s^S2MgF&IPyS1uskE8PV)gVMDxQI6-90< zeG4rME?t)#B*1~7QQ#|xt>1tNSI7Q+Aptj_2vS1VV%j!^284w};pDzUEjxD_{;Nme zh68+rQXn%8?HfW7f@;*g_rlH1(^+%>-|Rad`{{(oeva4}=`zIqc4QI2~ ztzHKXSeW!lU&`t73{y+N z1~evva_Z~vz@#Qkn!|6xQd#>_O0$Z<7)t%0emo7ue)s~ZJN6-3hu`mNm~`i@w?P6; z5_YawvF7sy2r47D;N*eBDDh8t;uqO__mylv)S^WTrz|64Vw2l}5+Y;(ga~0vN%U3cDMbnd zUDp6uw#C3|)BScH&U8so5J6>a-MnM&{6%P)^rN5LgCf9(Q4{a2+p#VDIJWD+m(%CK z4+Iqg))((hL!OBMZCJ1pk{Nyz7Rs>|Y`~@qu6}k=3LYoo6>HnlvN*m2LWFwiDW!<0 za+n!W`3`JCiH#s&Lj@%i%mV+?&*r0)j_lYE8;zPZvx`d(uG$0xdY#x_KN^Zc?3=y{ zK_Cpb-g|dslSass$PB{eTD@>76oe2H9-Ry?(;G%0$p!GOD>hrj0q8*e@Vo!`Tf~GR zA}s&#$)aUD5gQ2wM8$vkuXm8`?>cnIfLlf*uXx+?bxjBML2fQA`}nqfJ2!4i8a~L= zu9 z*h{ysEkKD0$sw+R&RM*wTm=YJ=E!&C{Kz?xfKsr9;6i8soGoKKE+#fQI`St!dGPT^ zAN#`}p84C~pWC=`{VV@^>Am;gY~H+4Vqy&E&;;Y05fS552G}{d2A00LfXA!7_WHkW z=-JaIiF4Rw;cLLT&Y4c2KmF-X(x$R!3r%z6e<1@nM92UL5#qmIWm7Rozyd#8ls$Lx z!ngnQBJzUo{Qi051P}(e0m60NdyFiraqi~O++*$<58VaE`t%+MjW=lB7&gHK7!b6n zsN~dvgYXBdM)^xez*PBYSp*aaAwqm>RioVRNDAdBFotrw)Wra8?)x9YZ*pR=odIpw zkfCs~HeEVHXJOGXT2zGQ_CNtlz^lVTW@eVmD@Kv>#74u#_ODpAblN8{F>t_8IB*~* zuyOV>1b|``>`WQ?z&$Yy>cYVQ1afwqMyAoZZ{NcFJV>mX@)z7r=oZ2?Xl*x$NkD_Hix$&)$fZnt~e!bQHkg7ENg z##nZC_O^9vJ-SO0(SQ4F6SPcQWBDxj`PL;t7=TnzKOALF*L5MdQj#(5>=Vcl2?anG zE=!tWVeayWvL4 z)?Hv6>BTr(YWoceNEEBZUo0TbnzU#Irmvm52&!Vufdk^)77SKJ)ow%LyCj6VAU_zS zEuCJZ=OYjd!*T=+D7KGm+KC{}Mdo1ixUulKz&T6`78F{Fpa6oMU7WY~7znKfbb*GA z|Cxoo8HmG;6UU19sE=NICs0%Z7ut^)GVI~|P-Ma8$g{HdoRp=32ol+=4g!P4oN$ zAma?R0;j4zb(h<%M~D62hi{cY5{O+>37*w!-hT_Kzk()ob`H)3=R77RCOR@&jl?1ABqP3R>WR0$oVBqH?KxDkpYc zu8}L9QlumzBO-?m9mF})m~aNK^hGi!sgA{4X$y0&VVT1-jM zln+0MON>J#Z_+4v-?CLGD82Qz+Yu231qY{ld_vO}C?pu*ej6NM+L2v*G;%YS0O!MR z9mBoh&~!lbA36*+L4j9X2XoJ4tow2)h`}aU(wAFMJuTp(3{ARtgf5KK2oY7;zw8I0 z0EO?ar@^<^8oudd)VFwjBKmeStTs5d1#B9kAS* z?$&kkOPF?qC(J4;2@j7(&;Zv!C`ivMsg52VhI44HupL{sLjplSM~kLS%o0Cr zIKaYv_dV>@Jg@)|BD7b2^P4^MzJ%Z4jESVUj>8A1A4-8uw?F!5FgF*(+b1_0K5PVh z=+mVKe0~V5n)=!Ch3f}Cc=wSFo8;-#o9?~W$jH`PH@m)Tx5SQ}N>Wpi6#Lm{pT#vu zj!aCN@Z9e<&tHI)GxOhj&#UQ!pP0O1<3{7`8M~xp+1u~7A3MhMm9|W75)~empOwQ1 ziS4c{%r7`TbIz%wDVgc%F)^{b_wI=Z59b7pbDbDdB&AK9E2V@ch+X)^6sy%Uo?-yF zWC*xv&E8F?&CFs12p~whCJpJUVU}weYtf?F_1(L*yROaP!2>2wet7S`JqHf%`{0B3 z-hB7vC!hT3eUrwGxMg@;VhkyO!!isdB>>&6LlHQb=E(P^CJq;HU<~a$wEOGd{?@Nw zUz_CDNkHdJIKse`lD4#_PMx}B@p5no;rf2(0E7rt0SXahZJ-I+W0-r3cB=bZ1H6Cdrb z)@Dqc0yLw_P5>3SI!2U%{*lY3!U%vv1RCOUCR=l>{E}mX{q$hxfu}(WKoPe3w*LP9 zL^ND66u4OMctEcyyL>r);>5tvu<_{x2mms|(rYfckhgHtT0oIWC_v!E&Z>s^%oHG? zx4SDWI-DR#X^0#TG1orvT7|bW^EM?gHS_5hN)BP`-WI*Em{*3g^YL zEKSp+k9Oi3MD@+-`T(SUH~@zS)JmxfQNaEkR=YX~@jVX&cmL!kVBKmUpee5;F?-_qx8I+?ZQGK2wqkGZ{^|MccWn<| zG6^^U0&Lv_r{3PR;qk|SB0&V{m4zpt`OguD6M{5>YL&OHL#EJW-?`Rv};WGNt83~_J4FKQ0J_m{b=0FYqG{ z@W4QOkF9Hvvk4#vcXwajUp@pcNYX}L{>OllN3xF=X zcoE!e#`p||k^upRh)~Ob37nt{3Rt*#E#Ef+b{2S`2w-T!U#|xT3oL|(MM(ffgxdTo z-L-YNb1k-*A}b3DB#7u`S+@s#LgA6T2m}y5Gzpx9I3=w{AV&iau*HWaMUTzuTXzYS zN@@cTp9*+xhDy9hh{PxcVkinBz%6EJ@fx!CiHu@65I-%qpYWMV$F0UaoatyJE5Y%j zFox&2PvcDA8p24SFboiXr>>0FerL-;(x<`68gtBuXqD(Nq0b6Z6Aig z4j7YU`NrwrMCA~Czlzr~ZsL)nZ_#-C^{{Z<2cICQBs?4l*m4s$fFy=GyK1kN;^;@u z9qos8Igjq&?+ZBLftN0T*=emk!$9LiD1Tj`*WpwvRUiWyC3D(soZAI1>3Z=~rp;Qm z0suIdmsZNKMW@q&)h1n0MuUq7FKoZ#9ta44zLB2FIv_x+QO{X02WJZ?!|w8r&vcwA zAYwxdk&)mAP>@ySRie!b1RVVE1NV>#OqRuq7sKGt?!UbW23<>TLCCa>g55`xvL{81 zOBA|#!WK?&3=UQ7`*{7skDUAPBXF~Y`ByeQ`$MHn1?~OD&aTG#dJtjF9d{Z6gU-J8 znr#HbcWsstbI9x&mSK~5*c6>MKKINyf2|+Mi{^?7%<*4USxGUp)oxQNm55MTmc^_o zRmYAx-ENlW8Ja=)v@`%;{K5IZENDKef^echC}bX)_~Ont`o$3rL>9O&Cy@>ZJZ_?d z@)`b??+8WEqh+>ufkX5!fWUDcS{gD1*BlgHmS^!@>}nL*s9}efr|Xi&w7}pFe+o&z@cD)~}i|b4o~P0A}n7 z2+$!D3c>Ne59?LrAS*LdtJRVu@kL|4<&6ASzx~yVFZ{)7wO)IGe)DBa#wb8zGA6`V z3;oGYjL=eXeu4$U!Z}}g<&_f$_5&eo*szh}l)cBx`YN01ipyvj335T0W4*~^F5opkb83dCRTmUQp zMT5x!?PlPx4=_ZB0z-pcfL;ss03wk#c~a!~ba08lUbBH^PM-!c0<{Q>FNCtR9uJi+7M#~WiP7jDc@WiOCU39SE6>lzFAPEcL z76;pUBrNLzY-M?0+ExN?mQkdOr;`FnW8b{gg@~PfI_2d?8v)`!Dp>{MjY+I)@@fdujXq% zSQzT;!B>Ii8sganBMC14|Q-5N}c?!i!1OU2n-ffWU~kqL_Aq2Y3g zvwhGv7qHZ}Vay5c5Cmyz>m6+B8XbI4OjKG{HnxLtp6EAE)=Dvrh?6O@Z~p1N?HEKVM+kXw|XIZ1L&BT zbZ*nlXDiXTLlyiM)R4&N3t?9X_+iz=`#0-wdVcZBF|?{p4^sK5b7W3 zzhEw}Wb`pHN|~IXD67Xc?6jq=Un3=2r)eYtdEFiqK%q(LYUB;eJt5ve^Z)9MI-lhH z;iVT~R0jhU%uQhYUn0J*4MhWk8vx0Rf{auMqImO$jh|H8r3lH@ zh@w)db$V@RXmEU7d}>O1M#lJwITL2ioIZcv+#h`Zdq4l#Pyg%Be}3)NSC5}KQdpS3 zYuAT6cD(neKmFl9pZVT@{^$1}c;LP}?$|tI=A_xPr%j%mjm3CFLIOY0#6)5A_1_M~ z5;SbTcl+wqtKo}RqLADyKt@ML8v+9Go(ccNr5lqm8KVG=$(W30rW1hVc(KLYRa{*e z6c#aY;X+q=M`LBfynD9*0WjG& zSEnyt0)BL2bRwwb*D$3^bhdY#KX-B2#@i@=ZBKirTB$UQkEi^!AQMjQ-o50W%@VAY zhCq1w`U~=}WG|Sii;Vz+0T#gN+5PU@n;v}-O>AE%GuhnVPU}u40KmbqzaJ<88Avu!KN}ymA?^{RAI)ec` zJoqfcqb207=N@S@x9F!v6Gro@vRklalbIEkB_)!6fB zw>M#8in_sTC#f5pp6igB9NllJtO*DW(T4>S*O)rwO?mD~@_N!$k_VI8MX34}CtX-MZz9AB_-F(_oj_2IF0r$m(JK_ge->d- z87KfRjF?US1_L1g!C*&!UuD+|L9uZ`iSgHdb&Gc6uqGl1L`aMtN6O{ko3$=BZb@{U z&(FXc!os~~3!VtHPibMO`r@*bj7;9mg4+wEOyt<+n!3cSOi-#5V#Y}nCXtdnODJ~k z`0icH@7#o^Bkvzw9H1cyuPi(?99XtF_o}~28<3N}3Cr}#32@`_4hT?>j`rYt@9MPr z`A=-eb0a5aZ=RNo2Q(L!C8Vd?28?^(*?I31PwR71^cg9@!DN-+KtX9!Ys;*yn?ZH$ zGf+`dnwp%f>kUj!O@nAHbi20v$5TN1CStO!+jdvY>2ujLWBCQS;`89Ex9>dGr< zni{cM2J>n8Ffj0g@kE=xb{jg zq=j?26yp2m_c?(BS|;a}tjLaj&xn6zjx~lrO^RV?CMGs!*|Oz+etry1->h*cl=!|( zuE1sn2uUNr&0z2+XiBBj;PEPzN}*Jc1WEWLBqRiPX=#ap=W)YRM7e?j5p?3XjwFG)?l#1I=@{zYuL`bt0yHYaNdF*k5D^gWv3RD>nGagJYR^gVl3)dMaeeEdOUB}ARe03O zZFf>ESgKp1a?%Y+@g=*D!Jq~F$t4eN0g5Qzd%EsKA#fm%51V`MZMyg{5JX#X8w~KE zQEYnZTOgx>2YY1;*Z^d5eP}45RScU(SO*IvwPeF4aC-#1Gax9)VYUJ%hJ*)#(q|SY zX&@O>Q62PKTk|UcD0N_fQmJvY4zME*+kiA3T?k@R3Ph%-rg_xiu$!IWEy{P z@*qNgSHIN4`q)i8ptyTX7vIDHA3DNd?8mB18rm5Cq-P&eiqml``NUU-}fef#8ym@$Uq% z1EclRMMdxpgVvfh@Rx%@CYQ^b8XA1+kO+)4zmnZ2PW|Ey=yU)N>%Q~w{D-$gH{bqY zE)W8!Bz^VRd;8e|i#9T1%l-F(hOl0!Ed1*S(nQ9EgiM+^Y06Y+?y7qCFm$`^xwX^o z-lUtAwf8T7wKsM`)!>m=|HfCDYuB#buwesQ?ze8;a>pHaE?c&2!Tbd?XHB0tDJwNCF*YVDIyxdMDm*eWOeQB8 zh9E|jZ4wxUl*uTcMIZ2$Afz4C2k-!r0(`~o*1!D!h9dexfN)`x;*${lQjANStQ z9TyKD2~JC2wqPm9w0$MD%bJkN~j}F=|E$DD*X}Hn7B{^A`k@ z5eR8aQlrg%{LoRptwY*p2=GE+V6b3yj8g7P3koF|BxAy&Vu2vCv7(bg4k8I&5p3Sd zib^2KxTF;D2of?DDVNp8s5M%(pLDw2d->oI@Nf}zJ#w-6H(6X0*a(?NvS|NMMkT`Tb5?Cd%C#@~R2z=)`g_uW6LD$H2CaKen4xC1ma zRa65jH3AQF7A#)2YTdQ#dU(+4j~qGXJB-@?$m4*5`%XB$bC0C0as0ab@6iPL<2row z+UsDlfKD?76XEkfizG=?zVdCNw0XU?wY9#*B-U=;28*yQ+ z8BE%|VaBBC?BIylV+OSX)X-me^~~wh;E{A57&Jfe!=E#TkiLQ%rQRPLqF-`SNO%}f zaDL~$k=7PPT&&e%$wTefJ%OFpqx4?R+lmv6|*9_sDOo-@Dd zy*+41E`H-Zr_*(1&mIIY0~M7k@4f%Tkt5BO704y;@9CMc?lx=xaQms#(a|w&M~@}P zC1@0?iFa4G*2Ybq;`MslTiP&kKp_#jKp?Jb@?W!O zst7=7<%ceuywY0g27!|z^#49Z4)L#|B#i>ZQ60c!H61>B__x3P%`?wDvw6#=U;X-* zfBNI^FI+fRURH)HiJTuFAqZ%i9F36nMK1V)g(MAGys_PyP5IXCQ)EoCv$LOl_D3SF zwIFy{4?{4Y&jUcRBHP^3eBt7SQS=YrXc?0+1z=3ZgdoUkuV1O$`T*vE<$!PZjI>&Y zEW<?&wPhUj`6nj@BaO3J_JnEoxf6Z_#99~ z&ivVwRs$ULmsFkl+s;vNQFG$5&^-ixvU%U$3}ITJ39xu2&Noj4M#hT$BOp>hfw8N9 zT=Y1QF~F!QPMrrHK*4lZH+wzYgry6J@BnZM&Nj&dpwkB=VORpk0f?Ygj$gSLAvHT- z0~*Gm5n4gu4k$=)v%#UEAzA~FQvdz6%~fYEbk-r31<+ySryATmDBz(VJ{^`E2PCL6 zV%B~CL68A3AexQ%D)qyIsiHwzA~RodKHqD0NR1vJs#4M5<@WyiFW@9Vue|-)$F339 z)fC`iHc87`eAm{(qC&TM1lxVqao3hdAAv9(2z+l-1EAm-H~@k?{@QymWCpE#$$fW# zAb?i@DMo?-5K<>(g6PAQX7}lRhs-TqKJAO?@9Gf-oM7{WruI%4&<`T1v7vFsc~dvxT6bXw0}?_Pz3Z?pu-F#tbA~L%ADywFM+%6QV}@3 zpi<)aubz}@@lKsJD=s}%x@&ek;9AXTht-B$AaDpEgOr8SBr|_eGgNiruZuUX4MN(F zO`yj`24to~Ti?fTzUOL|R01Y0V&dw>>hKU4@JgPmdFhN!U3mV2w7eb&T|DK1Kl}t# zs;dXizWT@KK}JHSx!}+l=(TuSI>Ap7yLJJ1__*lU-rioYSS5$axG0DZUbbd+Y)(#d zUA<{|=;GUN+dA68Vwo^y5)eZAqPgfX5ID|1A#w8Ux674^>D#t8*4E+2;KCbkFT3-u zprD`$b7qHTWuN}bD-pq=u`8C>)YOCq28Bo90_NbX16<8(Oz z01JCn=Uzgd8zBOse~#lxlK65Xiat9XqBK-}#3>{QiX(UaYNcaJeLZ z!6-F|APG?rKc!}Z@LAg}GA0sNQxpS&1Ehdp5P5;42wD(%_#DEEoQ#r<<_7&gg#O25 zOaT~^F%ikHQ6{dde#lL4hsRRsDV4pj2j#&-%`@KV?+<{@SJu zPduVkt2>Who_*VPK)jMk)t3}>NN%+0X)KJG-_)@lbpvL0E^Q- zFf0oV-Sq8;hstV&&i?EvGsn-K3qXwY_cfK2fl~;Lid(XE8xQ~^b`>Ef1A~1we!(?j z0U#-~i3auUp5QiOI)insV zfB`~I$0F66mf*I)3P3?_UT$AkH*kr_BZd2roa%q-x2}Kk(+8z)HXB)039o zc^Ak?VZhFtTp&VmQOW7UM}dIw#Brda?LGY;J^u>0I0y|}yJ3@3rII?ad%+I4z53ff zbk;Ng55UN$ty~92$HjN{fxz;d3jii&PTRO)GYr_eN-Nht^eE{35AXlD_{a%xijC(k z#Y~?#e(@aEZm+Jb2BY!P+aCf=f!P{t2sBL2nlpFqU{7yvS%oS%=z~{Y0?vEpl{ZA1 z%K!LduBBzfXv%-*y{uVt634~On>!Es26|gNj(+%&xwbYnHKn7it?$ICu;7r&vhpkM z@0fANUFgK9+PxR;{jA%AJaru4%=H_zDmBBR|H;!Q-+AZVKmFg4lKgo8q1!fWpa|LLd7Ox%@Fln- zZq61$04RW*6FFi1oF!9Iv+L^{4N;Nv@80MwYnRCxhz)=Lg+H%YvBK+cm0Y;A`tc{g zW`Fnj=QiB4RTC6=_Q=tBix*Jok?r~AehRfAH~>_#*6OOb$rB|$x67CGM@4ltH%4Wr z0t+Am=rDSX78&{k0^pjR+Yap)^I~Vt1cn184FX)*eMsf6%UV7cym)}nTu`V{szRq^ zfdWw4E2={R0+r#RKtTE7vtE~b%KDYSg7oeP|Ir(-PF^%SeoCfvZeu^BhkAOgat^@X z50n7?HeNwf{z|ZMqSGz192x0LW@>UB+PQbl9a|+tO8`(JPz2QUi2_fgMp&$7U7$Z$ z0aO5X->_xSb*8{#GpDax0wXqHDbZvl!UBf+x`Sflfdb&T7yj^@@BHY;pe4WwhhKY7 z9~?M)?Fv8{?Vn>=tDSYa`+NK2rsn_+k_>vm%K}4zp8{lH?H}?B3zFvLzHasP0(1-> zKXH6H(%F>&qOZj+0zq^Z6-8mZZ>-_kVJHsv!29p6fB1e-5uDvit7M}rEMP^>;VdmE zn6hG_^s9$s?Pdl!PAUpaZlD;TsG_%b>w^N57R_#Gs!pCf0lY%Xl?tUo5svrK|cWncOZ)#2bCX>-Hc|5qe&Zeel zbRAf|Kmq7v-365g-udwHAO95m6`rj6rD@ei%{*7wL_B5-}GPoGK8&T$WqoIY_9 z(?JH#o7}VKbVO1jWX6Ko*;QN`m5~V?YwPU3dO3ee&O|TA_V)H6BPcR9_UPXI{{H&V zm?)#km>d)5r_-0=6$1mJva|D#9g7MLbJ!iD<$Hqy0)~c$Ig!U;1`k62>w>udAHvdq z7WH3%8$yu8XQcilZfX|@QsU5gy>3$Uiwye1yDyjYHV%Q!O9D>TzQjUsV=^WJG3EgnlQH=mwJUi57NDTJDzB7~y=+I<_HTV#6B+?><-)t}rZ@ry zyat7zAtbWmQUUmBFeOVzWi<>MJ1*rPf8$N?a8Z*}g>EA_y}&`)C|oHcz0^jW(qYk(vZ=1dRG$pQ|lPUV4}i%m~g>Vvt~J^;vG zx-c?49GoI>V5#W6&U@;6%dVDEeoEfLT87L- zpoWalA9(rQfx323DA%mIT~a>?s4ObTJ8=wLTzyIDU`H>IgwEgJ*3t(aFYs{X%<0lo z=fD9`)2F4cSq2PQ+tiqU<{S|p4u0~WnD~;zhiZ{ErR*&8%Oye$+)kD`e7DC@}X8 zaSk_4FgaNhrGwpKS7TFeXD`qcuyEy4ZuONbK){ss8@7&vmrE@J_drLRiT>(n%^vTntA=%tOQ6M8D5|VDa@4;&+gIEzP(ha)ri6;%I z$zTO`#15=9s>&m$8;+gDo%OeEoj!TGZ}7xujPJXzzTQ<+H;Sv4Y`9%Q{|*n=W5xk< z94jLzXd7xSD2q-`1{pYex{kj64s@9!6O$hQ)}x?OG@ZC4*ZNBnx3O`NvCwU@G`FpP z`rAO$uFk&WM~_vVI0d~t5KPj%`GyJE^(W4BlvU$gV1&<{Jx3;!!7z9J5k0wdEkKZ3Qd9qB#058}r*3a?H z{aU0nrZph<`_>!p7zd3&fY-}j*L8qndCtR8H1j#~(~nVJMF3E%l}U+7k}B)< zkkr2=?i!OZ3ecF02|*IqUcaUya3Ht|>UhwD;}#+YT6VfegauOaMZ7_s`EEYafEt z*kUQHg8>1;wA+68xF7=%Va1(Wzz|v*DlUMnz#~#w$8#24+0Nqb0()u&Efk@ zco+~cY4xh{OBaqt@tVtOE*(A%I#Qhx4H5n@WM@JH?|b60k%k@^aKH#F<+P6ocJ~gI z9XjJKX$(tAgQ%dSv}D2I1P@3X3%t=~t?C9Z6(17=IvV_C+RRjN!1=%J;`?ks!q&&X z4KX3W0PC>t#hYT{3}KO2k{LvRuz=AhYEw;nM}C#mo0q5km7tcu(-$pBpF3+b|ETmzLjKX?r5B4p0I>03^x#f7Ca0HEgXQ&gr4^d+NC;F1 z&d%1)%>pm?&aeJ}xEy!@B{Ni1Hk#|RwWi_4AO0H5c%~eJz}(z+c?dIo@X0?h#dz4c9d-rNBKL8nG(rlo+DW^*F#}yZpy@Wmb0l_En&vQNINiV8o(~i@E*B-^*CP zy86J;BhS4GZVwC%L<9uKq~}0~@x=453#=<~^GXEhOYgpC(&S0tc9}~`kL*8CyXznr zO;d6v1;oT;&z-Sy>+M6sLn@g(z)#oJ(b?PHnRoWW_<4)Yz4}Js&X26d5r2(eTWMKk zem(-A`j7V52Ti2Y+gV;UWzE{ouFea`j!D*1o=;e?0Ha8pPDkmXLpGaT5JZVG@imiM z%T7?f`WG(2(_tC@u=wOKc)8JbLBO}juQ5pGuWL^<%@9C0wl>t&HFS1#|M8Fi^`oEu z_}L%+=v&`G(+)GGY{Cke?n1nKCGRpu5M}(A-g5fACK) z*z?L15iwq?%i7uv1PoO+Gjb&W#HVI&UU?gMMQ9l~@Y@%Fqo4f2k3cRfKXwT$uBCU~ zwc?&TK!pGL!4ILw1Ul{Qk3R|mG@ZQC)zrTA+3x|#80$L0!h@XJ{`7YtR09MIRM(w4 zdJ=dM7;u^Fg53)!EVP^})g(qL0)id=7Vwh;0S9*P0xyi8l_k0*eCci-{>yWJz?R7P zxYV4SQNq#(&p+Q-Pyz(3d+=V+slf>#K>g)%+#y55rY>6wqGSj+#79zr2EMuf&0qW; z7}7d0u;+!Bp=U&&l5yL29tM|>!(()uKYj|7N>ZowkBM-at@~bmz4^!m@PHyHC_5#i zt+}n@;w7+of?^ZMU<3MIUjD&PKmfPRQB`mi#{#CGX;028+^O~JL_(6l4 z2MQR2X5GrQK+&L9`Xxkz0`m49wKVkrfFUR>CO#RQ5D*#)EVQC)#SD=t@%pIH!MZkZ z2zT6dFQrt1k^~yS#qN0Xb>Kwsz>EdUbLPwjHO_dO`u++bsIoW;eZAV?FBi%Y(1+cqFcY4yyY4~<;Eaa(I;UGs@z zMQpSxEe`Db-+uik6DmbKtX#A-EHDIEaP}DifOT_{GsJ8Mr#C!4e(N1~fdHbf$TWlm zuUflCkN%%-6SSLFJaGS%&1;)a=MIcePhebxXd9L$RVMl$#ipTGPK~s52 zi7+@^fPn-~S9(VFl*|brfZ$>G?b;2U-4Ggy)w@WBh5^%uufJw0FOSKb*jm%5kBHg$ z=yyaf2Vvm|+248fue}5P5D{iGnZq+C%-*;;GAJZ@?Mj26?&5pzrKP0>1q6l#2Ddf0 zjGHzCizrXW6i)y~qfw<&p-oii=)mvSyv0itN(BHE?%C7W*o5sqNJ;_V@MT|H$2xAj zWCQn6|7pR=2=5-=V+9XEGCuV{7$vxUofQBYC;|wIqFK%h02D~8&5Eqs?V^Cj+q8fG zf#3Z4x6eKI!lv7|Jo3n+Kl;&6&!5RXe)P;>pOI%pg1lbp>oey#rUM|!>gAVT*8A&4 z;Njm+#$+r8U`)nh00{EriBl_9tYl~f&k0|Tlti}*$>CUm^8?k)xXk0E^)O@xmAa$2u)e!(>DFyNm?475mK7D0mzCdp z|AXMCZaZ;q+{Ez^8`6HVFgiMxOb+QStls(NdryD&Y0&H5e&w(C{@_XQgUc`Pn=omD zW@;QbSx_;bFC zRb5;4sbZ+AZZC=uvqYG^ZrMZtg>Y<8Fs3l3J~SVF7n+J7-BHU^R+ni?w;r%nXYr$6*s zA;C`^vcC4a=O4c7ej+&n2w(6tNetIEFFi>JM>H8wF7hB;tpFo@uC9D3`cCG(fa0#u~G4zx^5Ug_|Naq7Co;DxUIik9Y% zd0W?j1Q?<0!$TQq8T5p4;N}FDbC|7t&234urve8<9qmCOAz)Af3JGG%;Hz)F@vWae z12PhsUzK~gue*0bTDn3c zE6dB%Cr;3X1UHpeCazu!{qM34Pr?G)0}XGLyrA{-bGV&Yb_b#P#{YOq ztyEon>s=s;2cLdgqfp0(Mt2kxoqqluP^;qO|=89}q?#3f539T2Fy@B5aVU>x;bD>rNw%ob3=?)Toiyl-F3idjoH-3crQ zrr|$7`wV!!6DCc)ck_1erOi2Y?tEus8}7s|p33pwzP!rttn{b9`=1aQ41w}{@4C0< zaxs`-#>B~*kT4*?Y#e?U(`$O*{%5}zoS7)$PKP7^qr+h065>;4PMZZZfL7IBSOvW! zI4`VkPv~^IsL+`h%d4ee#QDmve~Tf=;ACHY?qz821s!0*N4MPtZcY&u2y(jW(5Yj8 zcnw5&@TsSQ3?V**t(==Poe)THv4)@^(9$h=MQ71}!T~54LaxM}%Cah#)eag)LjM*Q zjBsFd|N7@YgMJrb@O$LvKOqt$^4>krbhZ%Wq_w9nFga=6Lk|GJiMMvu9?t_2*4%^m zSPw49oIO7{JN?YdA66W?1P%Zj%%3#}

O?;1=He^Pj=ZX0M)&F%{K^bAbm-d*6Gn z{tZSvjzKeskPwpyGA;DDz~}`I+FF}qb5enXGyC@Le(6Q&M(8wao_Ii!6fM!FRSdWw zep=4dg^StdAuziGfyYJHkgg9F}f%Aq1g-lM(fFUCYLTzqQ*_m8YyGm)VnU-LeKjJaAt~i*l`1SD5&Sjb!6CF<4hrS+m232Bt+}U%8|d4; zcaN*3#n#w3(AVc55MZ{9)a2$DU(DlKRu>%`tkKO~zB)8GxGOggiznpg<>j3?>9IR3 zEv?4>!6kRz9TXl}Ra!Q4>2i%sapv$*v(;Mp;ZEbw09y2Mz7tz0H2f%v4@>>6qcq;ny~!t zt?gCSu8JDB%_R_|$20^=*}>g=C|>YWX-9_5;9*tCY4g%24__#dr^c`R?jy%vd<}XA z=PX_b9!K%(dva37`zv&NfA<3P59_pQ3G#Lh$vtp<-(d*$BQ;v~YWt>pADX>%IhdjT z;L*b`zK-;dr+)iu@bd$e(o)xw`|^9BfQMiDW%Pu}AOh5O@A`0OU3Coz03n9HQw31l zk5Im;p$;gZ(xVT3@LtQgVtsJfrY(1X3cw9l-#G}4M#4?^)pUYDLUt&O3#r?80lHmh zko_+~Nm7-W1abf;6uz=kENB8h)x+O;LZ;S&hFQ091MBvH5#*u4iii;N;K<=$zTSJX zFk{~Q6?fka8W^l>`14aggmx47`$aFChhVg;wEovmJ_{odziq|BCm#kD&h9+0!05V0(v3 zati@KmlXEM_kU1u@@#EE8E_CjB^|WH$+vdb`1A_X(6G6E5CneFV_PxS?ww;mLR({NV_CH-I#d}E@%E2?(|o1~|=- zRp0rXVz^eNn)NA#XCr=j_ z6t))?2c@P(rKKr#+WGg~lM)+OSy(~=p;W3m@^S@-T^AfU(AF|MG^C(tty%*B9i5%1 z2}Eb4XU(43Wg6s309vLYWvs?5A?&IJ4{?m&VE{+=) z7ZMt(_tX10QQ{{Fd?m&3e(uP4i+`b8Bt!Rjg^+Ka9G891W-;`T$^L zI58~0|KUd;sUjn%O_^#PF@wv!Zsi)@=JF4X*mU=HZ%0?>nRA+u5KR6vTKzXADQTdm z7lMO<7i<Hb;0g^ zDT|k{-gYO@(9_to2fYd|AVkn;w1CoU+x5rins=Q-qV`?S{9wVITftXS;PjQtg=JSk z$ER2pq#emSV*8y(Xi%`lVgVj_m#63`0#FE_HL-VapriC^dqLSid4oPUeBtyt;fcvW zQg6Kc7i&#taBA{X|M5KtHXx1UtsnkOY#E43O@H>Mzk*?lx4!G`-~9n$>%!IR@BGfA zKmjP=<9Bx*-?0xwn7?e@(%UuzD?)%@^SS&FUib@7lEA_A^~=I1rh!ou8;!v6)ire> zW5`gyyff!7?%V~#Txe|E-QRu`Jis>fQ?bDis^9m6+s03wh08l}+H}2N5GW|#?g0hq z?dadLb1!%}uyNpK!6i=4$Vp932M)TLy5yp4v|hoQ6>Ab=llXR{v)2S907WOn#0?LR zfCxdMVW5#A!Gwm&*k`-8BafUO0P36%|7o0%p&eHFMf@b9GHgUVc?|Rnw`{I5=8>Mf2vz zW>09kbUF9L@#fYR7#a)+4P!(?lQw=_WE3X&ue@4@=+N$T^!D_Uf|#;=xk2yma=K>T zcOTY5KK1d(D>iLHztQF0d#pwiY6#cpG$g@uoUc@>_|>HUB5uy*|7pRp;?4Uakl;Jf zt0b#>TmHQJRU7aufJbzXTI&V4QKmrn^o*QdFB=jP^3qE${qi?I|Jg5pynp|m0|)m% z^UM#v{pce>K>>;5;wNNh`THA4ilS+PB&GjJg8T~82MHoC2m(XNWTXuD34+95Ie{Iq zp_XU;?Qehk^I!h_{rBI0=bd+c^5dVp{`%|P-Q5EJHI3l@GG&xaHeteqhA9Af||Ie*i`o)o_zcTN1F=DjZv13(A@w(=kfB!OotfKUOsGHcfQ z8A}bkLg6JSAVgkJC{%8q6=me)l`FtBV(RF;Ty!NSAwGNE8cThhE-VZv(lcnfdZlp2 z)ak$Z)vvz$<7bp2UzU63 z+`4=3?=HC-5*f-&951%@5FFp$+GaMIvNx`W5%XY2J0+ugySg)G&22b$HY7D(p;7m@ zc7%q7DH0RB9qkUQ-9I_G=G?iI#3U*vy5;UpjN{^nE{n z1_D(u3r`1 zrPYF72j=0^Cr>P1x5h_xmx0C1stA|I=|?I7v2}NU{>5`M9=M~Xv{*qg@tN5m2WTAt zB0=50e)qN4w}0nx2+*2`jrzC%utM3c!xI)S6Z?Ap>#r|8^~c|Uzsz3Ppotv^q8CH~ zNCqeZbP88b-(Uay!hiheM-Zq17w_oo!SGWuF$4rS_r^zvtmfXaX{f9^XnZR8Q_V;6 zTN+!IZruVjfRYi1N4yr7T&?ltI&vbd=egw~#;hYqJ^XT~NZ(!stO1q9^nKH_jX zmfW!!2;Wj0hx>YZk=~(ev~l2cf?f*@fo>ni zh*=#M1$qVaSt|0bq^6ITg-5iUJ=f6Dy5hb&yqwo#wrLGPK#;(Lv8c@EaK>&}V61Ao za`McoO`AcX7-{dUDk+(R5gJZ61O?!Y-t)p=f>nNV@88~j;w(ebYMH8~t##sxW#F$f z78E0ZL)KQ_<;z+|G2`y-U>zB0ZEI_3@zZDpmc!gWxLh+9FM{y!#xtjqW=^XuEVT6v zdF;;Yi8+38@kRUhYqXlQt(*JKoolXdjE{{CPe?4ea4{kx9OHf4+S*v2&&!c{qDO+@*k(q!RXu5r_aX5#M*4O z{QP{x(=5wk4=R<~>2#1J;%|;36%*wkQ`u|c>7V%Q;YRco`X3v?8v_^^9 zMNp%4mv63nGiIwDlP`_dNW+&aV=^W*Lw{4^pL~CsMK1seaH8U}TA^N-GavUEkwEa| zb~=#ROi{AVD@7KwF*rDQ64rW@%b>ker_~L&v|*TILR=#2azo6xAO8LiprDI(@13%8 z_4;+|>g(&%Vq;B%Lzo^BB%aMdg+2ipwS0t{0{CpaC>50cW+Ns z(bW~-{Z{|ci&Y=(OUz1l+U>D3X9Z-ZcOE(8^?KXuo3dA|h}DG_A3iiWI-+NA;MAUj z4?p(!__a%i@+$(U0fMKJV-qD@JK`1kotMsD43cZck4wJqm%jupw45(N^Ib|zd}2gw zVnPxSG+5#2UtUqtvIReQ;L%_F7&L$_$KTjBd3?Hxl-1YN<|M`kM8(Yuiv=$aZtt#l z--(NgnY?7#tgRb?g692~TDseoY+5^M`C<_8T&|(06p1CVvnFM)nFBU>5}a@~uRvZ>HtoJGKtjuz+=~}4-uJ6# zm0@B3@xAYZiyOXD6*whB9Uf78CBN?*f`U`R<3t5%achf6J#_iKe=S4|aZV z|I^=5g@>Fyat!bC>bvjoT5K|&tSl~%jEoM5CF3VfiJveLTwa+%CX0-$yl}CrqZ>oR zJKHFlgM|x!`b*Yi?_6Z+F+$6kNGN($t(Sn}OrY_aB-zVIn%auIA=? zTyD8i0SdK@pyZ5f;*9L3rl#78Dx4R`a#!Eqft92J0|Ps|y83(jD1t!!hoNcS%PC|` zWl1TbJ(BX-$ceA?LHNZJxiAck;GU*v5CNnY!cD`T(?mH*3MFa+G$)FMwSfi$KI|1?5q(7^EmgH)!6AkK z|BRd*$cQa@dk^ge~o+F12WlfuwbK7EQ903>WDygmSXk{33 z{M@O~Kf;yO(38gVR%d@(XUgOmkRoq6c%tdz`DJ(CwQ|F40_On-fW>m)_d8auS-oKP zTscicXkeyHCgDq#JN&^1De2=AS1sOf+wDDV9V4|3TD^{@WeGEzL2^|!5G z`K^0_1dzcr_vFE2g+;|hs~+5*oS9+h=m9NFXOHVWSJK$hJnxPzyPtbKEhA&r!`pxZ zU`5ZcTP{e@Z0^=AAHDi2z4!2>1q-5MV^{k5ft{S4Gg8u)eD~fdt5yL6oq1QA8yc6c-yp43V=pqL7A#ug z7ZzGqS7$SsLy}W~hgmabSj-miu={`i5~3o*gv{;t-=|^};1U*M6u4Sr@98V8D4(-% z(Ov)fJz%+^q6(c#C5wvgDJel;&Frn~Nf9(^4T$`NtsB70u~iMM)jI!ydwWYt&b)nc z)ru8{#Q30);I*4K5`vKT;#)Hp%!d&F`lbe*N;5kkkW|T+ZruzdQF8I3)9%RMxszeV zu#k``)21Q;U87c$6xDd)QfzcIf_?-r@(I~#B+*z=EeERb-4A3knJ5Uf)=#JP<2+ui z!7m^%u(7JfZFi(CT~c!7Fz583(}3f744xqfg5@}TMYS1szHyXAK@?a4|K~`OkyC_| z^IB#6)wcS}O~oz{NJ2*N;>|c)`1J569s~*q8KqdhV)-wB@e7jrtm}@xj#iKQ!GAsl zK51%fIeq3d&q+V;zVrU@@Ni^gWMNSuMbV=+brjsW1@T{ZMEq~%bDk~=z)F4zQd%}q z#Q==Sm1et9%9Sg<-Q9AT{QjT( ze4wJdVE=x>cb?wyfh;^6Az*5JB9JgHB?TYn%7g^3!+Gp4uc%~- zC7ZUGhK;npp}M+yptNlI-1!Tq&Q8mm5ISK(MOkU}M~5e6Pe@Blnt9jV{q3E`@@i{c z{YN`L0A9TJnP+p*VBBpzwQtX{k3K}=bz)f5%qcT~Cpf3`!@awEFBK+Dnm%XB^x?8H zXzGw_)C)1;kCK7lU{WF9{pbGzqj|~fc`}ibGfJRj&^xqy@BaMbr$KL6G=FJ! z#TPC?yD=_y91>bUCf!}Me1jlyxEPudng@Y^qJxKaz5fBe8yFNcU>XLe%iGzVnKQ|5 zv6k#SC|Inru+Yhqrbb6bxx2c4^)xCBR`Az6_2Zu|S+xf0de6Q8p|`OsdVKnV3t zmKKo7G%@4m%$(C&TRYs;2o!zl)agS<4g=4lPKZt$u(;AEO`W@LePvPM`!Bx;DtTvn z`|$&Zu%GbAsHF=RscCt`*$c-H9n~cyAY%kMW4+C-U>sRId*1BT>jt}fii=B7V{FVX zijRy*T)2n=f}*L;?rz|P^3t-z*tncYQ~(u$=g!q?v;-n$H`{#iqQLSrO(Xgzd_q!^FftjoeB-?5quG8=pFPt* zG-$LMy(BAYs8gl6Zyw+20v-s`<#mDZxe9*KGy}jPQb)GcAAbLb(6CVWbop-#89@*L zfMxl_r1)=t`;o_=c=X|i9{ynG`$vu*`qsC;wRX+ghaY+Gj~0WnPxU{rBG8@%-~IZrr$O^~&|<&s}b7?Qy%j(oBUf3;n}aP_OKh zn;4%Ela!oD0V(i2O;hkqmoXWO0T`397=Y8KPfE01;!_PdqffH@yn@+t=K9VgzdkVl z2;@H_Kv~2eAZQ?UK&9)V9$dL8N@ptHe>qq zf)gjv1JKgbwrKg%>bly#-oANr<|0(>?(KGQEJIPT$X1uhL7_PEx3`m$larE?z~3-X zQWg>x*4*0}9UH?4aQxWu%xRM%v$IUK^@glTrEmOArdFn}T6pa6#|ZsnG1p{anbYO$ zFb)iujf+>Wa+)o?#i5Oky|jN{q_45E@$f_D;0#Ce58~Hx6kudI%0} zK6b2cXkhjo8z4jvwS!krpQVGeSu>|vyZijcr-O{F`}lZWd1Xv|+{6`&T^5T{qXC4f zZqNDsd;84BZTH^eaX3hXHUS0z;@F!z(8{}T^BPbqxb6X3t!b>S3=IzPQ~7~L1ws1v zUU(kq<&Qn_Bw&ju+S}S*{o9+j-?lym2}pVan5}I!RUe-?di%Dm5phYt$bkp0z3qip zUfj5KvtNLL<+=t zc7jTU;SHF}a@&u7l>5Q^hQPqIRV%SwFqyHi>B&py@BaSdK!RA@vU|_I&EI><-QL23 z=yZEkI;}b+&}Fs#?ZsCfee7}Y_jk0nt0SUv_w7%gkf~2iZOFTb_c|pb%iPgruqOL%BF$Yh|_ z+AFK_iwl=*+hlb+khPPQm4$;Qyuxs2cWG&9YDQ+%%vpu+yu*3e@i|!_XH*8QOsPJ< z_W%xT`sPjc#>V!}F2s~0L&Hj?YRcVrHk>-?ak(;Y+W?JCCl4L=_tORF^cLfYIzT@z zBh_RYX{)K%sx^TTp-l~qav-CkqQd6Pyzn)f&}^-K-AsH}dYMU;T|4JRFcb2-kW4pKWXN9Yaw(&xM2p z{qA?azia!pe~KL%9fQ{|0lAm+_w3o@a=RUN`-!6`y{wlaNu^SWA-bbw1Vj*dV3mw~ z)OCpfofTM`p#F&*A%YJ0f5W+_XiC64;}fX`MB+wI!dGYm{pC-8jnUGhaqqZR-{2U4F&T>i7?Ux% z73hE3uh%^Qq$qx4auX3ijMnwE0bg(Fc=yomC$~MM0a8ZOf|nC{o+c?${3NaoMoXRn z{;+)6`3u<@nMkK#l(cDJD09N3%(uBxjAidyl&0}Y3dd5W$& z9nQ1o&u{$kv(VE8NWGwCdynm%IdRII6|2EvgAr5z;Na1Nhwi@P9zUfD#|I>-@$)-< z@X(a(93*=r8j?XK?>TeD*<;jbRLLnRpwud9zYC&W_dgaw1tzqG&ElF9EKwygw zAFHgce&Ffv-i^?o6To8m__e=Mlx)?OZBPH|_h7YjR+L}9oVRY>x^dHIKAAIFs)Bs# zWL0%FIw)eYavs`tk9K-id+wEn*5;)tX<&C>%)LzJ<;Eo@BrKfwU}hHBZQ$XyKKZ!N z*;idsHZVB2I5z6w2Ro3FwP^M{l|X{aJ?oBbzu#nKco}L(%*MvuI zd*s{TW$VsgICk>*Q$PK&E-GgJTvDl4TiZJ#5aRpkG274J;PBF=OE+%b3Lx5Q>-8F~ zRwg@o^oZT%+_-A3N+ws)GM910FDxW$YUn^q3nt-NymGaSW>}|tpszp5<4sLY@$(Pp zY;Wx~4NY4z4`dAHAsOoJFWR+-pvcLK7XuJgm-BFmYWE*7+ih;1GZq#EL_`Ps2c)N` zGf~ksM~~Y_EHXd8Ad2dyD3Rkrb8>i|A86~uT%Ven8cMQK3m}O29|0gsp2I!jOC%bQ z1PvmvUer*8x8Hh)!l;!WJ^QRquf;J-z*q!|0wBN#jK7!~faw2*>0Jcid|i-3p3Cd{0M%<1NR2eZa!7F@n; zEidmG?6a87A%;L{v6Bgo+`+-KIS$x-)Q|HfyhlVY_{WhRw7`ocj)d8Ww5rT-A2;#=I zT)22S_wqfDKK}5}ekMUUEwC+vGD_|j7Pac>Ct6RRFSuH=?cs;-`|f}ARaNtL2d$Ks z*4NCKJ1_stIfu=0+vDHf^7MCcohvWp_7C)FF6PBdnX>WmZ-dKSm!B8zXD}O0{t=;e z&J!A70E@+BGY6?d)dIz~w586S0ip=QBe{DHq^4&Cq^1VOM%;P-y~8c-tlJwlDI2t! zD{s6hXJi;59TPW>BpH=P0~#gR><4%33h?(|ao6VP_;EIiRSG)^4w$uKIjGeK|MqTJ zOw`nA)B4&w2J0JwL&8=fvtQ+R;l206!$bTg=8zq2l!t4$STJNBnYD49J~I5m$43%l z6G5$NDJfBD)#Ikks6Ba_;Y5X2TUAkMXlOy*1~n*uj&*q5YOSW|#A$+-5>X;>VH=wj z1jTt-kryfpN~A~tQ4p?MvIXKBhVoGe@gKLN2%^Z)H0Iu+7%C<%8r;R(x z3`tU>%pZaHy!3j#MwgYH{rKZgp!Nph|Cc}G#-6}kA2-GEg3PB61O^%&fAUe_0p-e- zA|xUj`~&jy(Lb;&BqY@7bRhqpWu>SLj$>(>Aqj$I`F~-ZtI7JbVpUH|Pd@ z?z!i_`{Yyb?;~R}rT~n|m>^vET1LVMpGF)dZu}~`1TQ7+P#4=v_)~EIx-BXatVyUjIOh_A_kv9I!*>h{}*oseeYh|_3WYP+JZs8TZL7$V6x$3st z&3*mA^Ysl41083VY`i@tEW%|pY4p0rs!Fv&HFy5Ji0mvNsphH^1MOWC7tPDf&yNa? zoVIw$^u>z@ni_VwG9nuPF657i~$7^V!|VlB@c3?*yU|(%dE&hp$L6Q7%eBgF6Ur>e?()m*UciEoDc+sT#i|ikgBAO2m_^Z z@BV$WrcEEWXdx{U3`I4ST)mus`MzhK35bj9s;qJ}Hm0PcVEjODUmwU6%a$w$0>I|D zcyj;Tl`BKj(m(`YWQX>CXb26NHFIWsd;*Yk5(7fdM)RfeZckfLZ9GtUq6;Mn|UjE_ZNAG_4K3r0;INH&fpw;2vgCZh8 zFj0U2&r^mK;FVAv%t)G&n zd5^c~?AfH0WQ9V38TtW;f!;oX6Nubgf#Z_m6Ljs^QUvq~^!N65w6)E=?Y7!; z=VT}7ytnTnnYMgdRW&e&-Igxd!%%%DBP4Ao2F=*VVo{I0EO?;l+4S!0eIquCUC2D z@Wy+%Ljf>AG60LyX5PPKt&U|xFG=wTOiAAD#c>cmhlogACqv&lBc26-z>r|Xk(2=F zmyb$LwhZ()H@Aj{hoi|ErM{{L=|C(Ayugo7NvSHiinj2OS+cR_CS(stE^lEx)8kQ0}`}W@m3g$0fM5r~D#l>#34OIn%gUBL5`?N)3 z%dmQ_uDh*0zp!Az%4LCJp@Ut0fdK)f6%{zD1xuF#%a)gxj*E@$?Ci9A-P17vyiPY< zQr6tuoHcziLVPs08=|7Y@v~NE~EF@*5M~_bez5m`{{t@rb%I`nfUs+Lb>g^@UGn?O)tM~t<7pJJbx)GEhA*gByVSXLH-qF zN2FzCph{U$QmR%cF_j8QQ<|{Q?&fB~%OM;e93DdBE<*gAS+iNAv81$Apa{hAVr$2R z%a44vj+}ic7K8!~qX-EK!f0Tcrm!FMN)d!q z>(^m*zzw-I)4cFGIrqZ<%1(c)d`A9wf%pf!K8dVE{NsK|5qOEoFXBF)%Za=|lB|H7 z8MVjd7AR3vFnw5zU-o;-;zj>5uX{DX523q^CRD2Q^ooacFt;}ELyUfxIA6uwXl zz-POTZ!U;N0PzpcrD@{c`|f-4$tM$&;(!FsBhYeE6t1UPx^?{ol6AXjg@OZ zUM_2@t`;d4{N?i=x&vgs)4m2&prWGEsZ%Fw)P63P_mhzjf&l-wWlY9m0LEmDI3)OJ zHUG4LGf@~VBT4`<8fItk_n$I#DneCswrVw6Apb510S|6(>qKDP(cab9H$VWz&@z_u zd>%KpaP3ua0GHob=EqQ5=B|>FG$~S2yrBGAHb4QS!3e}ZR$U06#jd}nFE}LFVz#(E z?n|dmrDvp1oHV7Vu)r;HSs9rsM(%Pr(&x_shoi5(on_g&>KeP0&ZpD@S z0n140gb8OqKIC;cu_)W7`1p>_Hnf6E%QbG?B&-`XVzLhonam?jhXd&-84DKMTbhb5 zUrtI$=RFDomPb#+iWb!=SRqYphy>kXwRPxkcnVrH6&t5*-^794ot#f0&x z85tQZ6_x3;=LTprh``#KTgFYEq%@i?UAi2ToM;{z>gn!wT5VI7FI|qs$V_I?>Y%T$ zpt#sj-+&>HTkg3Z42Bc0zk+mvc{6A4dHv1X@49pRgd7MD?Jp?Ezm$91<}Gs{dbsBB z$6co{g#?7moIV{F+%!C*Rra{8cCAV!0W@9|J35igAEfsm?CA}T3Rgr1XOB-en?}HD zC)})>mU*o9)~04mhl*;?Yk&OXwE1(gwycBQhfGZ^hWI#xA%FmISgdGzH>k8UkZMLQ zdRegA00@Itrw$Iqch6tEuqH52aM-ew(-aKDjacT-pHI>>`atw*?W~+B7}V?F(EZ>< zHw%r7l(r71%Y}LZaC}j2Zg6M_Y6V?Qtr)M2M$?J&7F1j~4>a9WQ63)|jU!|X8{ zY^D)8P1^=dPO}xAHWWcRE!M=@^ML12d80_Ouebz(pqxS>gQrYER|eiVyT?jW6cB{X zY^I4@EX2e&>47H_pBhAqt11dSqF$EeXqpmv5O|KHpsb>tkECQh1wPw_>I7Jm1;-$jI)ot;gR&w8H|miGn4^i;_Rm@3bZP}byJtP?Os8U422BI zjG}xV#J?Zy-(?&kDH6wtw)Ro{flh#d{sGp_0w9v~)8$OaP^xls^Dsa&WA zJ$YhA_A*aPdwpdi+Ui>C8Y~tI2cp*CKXK;trph{bK~W#Z35=N0034d0LDtq>_-Kzz ztDQV`N<>(gG#Z^IYs#ekAAF=DsOb|Y4;hC+rK~9_!Ssqiin)Bdi78R#xo2Unck2P$*zz1!=TeU z28?yD|1H1}wEm%oPy;Etc!>}ptf>*%?PaBY^b^pT`mzZ z(eYW?S3W+BY6F%(o3(5?*zLRDe0#>MS!ki>EoMrgT(o#mb#bw=y9X;fq)eNZd-`O? zw8@&V;7dDqV-!T_`0)uImo7MdWD)0h>`9-Q4kr5Y_Dy=$^esQ&Mr86hM}ms+*|-q$mD5h zsWmkQSY_(cnc>V>4!$krN53?T5C8J~niN_0HT8448Let*f@z`UJ zoj-r>hd+EaJv|jrMCplhEA0nZ-(IiBTHTa;{6bP>0@f!X)Ko-DJVXY7OuF4)2Vq$e z9d}EXEZMzh7e&h9e<5QsCSwY~n2ZSl1O~G@>~_o^bH|-q(dHT!7HTl)fq?0=ri`+7 zMB%?9dIakPkPkonAR451?fy^@SOD_#vJpi1B&l3K*mqqyLGsk9nG0E`*GDT71fPqV z6hC=V3OBp9aR)C)QY6oFh|KU;mgR6?;5f{u&QMZ#W!e7y$gW0}295N?bqz>>2?!3t z3ZP+;krh={yw{7IjhyTWOzgP!o^FF)k1c55E$Qr@FeN7$jP0DTzs#1g4|6 z+j#b@&1M(O_StjiB~6{u-`c`hZS{3^f|o<{^Z1;}6_+lB1qNdoV7W%6iHj>edaS*v z1%ddo+iy2Fx3!m+dtGiUn3c9*9(de+t!@2-17dHV!)8m0i^m)?hxQ%7r=Y{(K>tHZ zLcHMdDztupWqn)Puv?@k*ljkW%`#!ds`g{2u-0tgh$(H>%%<{ch9ITXLk>r2$<-h~ zeRz0SM^^`Mf~~C5Yq2HAC+vUeWsScsYf^Sgd)thKi-D1Qn_60%ThMSmdEtUcfd{vz zy!h(GNfSdSPPR8S)iu<+$}19LVzGp6&#)2GTslqG8Iz|@pES84H$S*561nekph}BN zLdHeM=j7mO8~X?K7OT^4pR;T+osf9o4}Xk_jSY^B#9P+a-=8yWMv#AC(WN}K!4RLA zoRgZ4b-_dr+gx12yFHU9OpF{C*H%$cSysM!^=eGBlbVnyL_}V>oY%YmfWVL#N1=<3 z@>6MQtEy2M6G*}k9)iHM;^GCJMw5_`7>XA9{DQWY7K6?oS4OS(qX1IU(wl2)hWZD* z0@qU4h}~ij7=xiep_stP5Ddj`tZU$$?yl+@8A%f$xNHuRAQ+i!xNm?aDUW3TriJTS z6r~tVL1a1Z|8~(LME@j1cXW4!g@*?O1qvV$U_7ixpv1`dXgedm^}&bD_I?&f7FYmM zfG;Ki1_OrR|Ms`P0D!L?{S((Xk|6M-7912{7>egaiY5^KkERluGkXp%@RKG^N=Z$5 z?oWR%D!S@+yO0go-#@_f0(>dL@hlEb5Tx=fAMXAD?HX7_Z29u#Bz0{KZy0uP;{i;L z{>o;1#~^2RE}K3t_hRnH2M^C*w{*^o)!<=4#)#L8k3oHXV^+pQUgFx4UnmBErf58!8LE*UyA6Gs$ zCgwl><3BKgI-(-IUJJy{PG7Qo!c0A-BCu7E0tXS$U!Rw7>pGC6G=-aS6&_58Bwm6eu>-^& zfd7s5kU1f<9Ur|zgFgR&9;relr_QLHix~wu@TeGxV#># z#Xcz~$7Ofa)YZnOC5I;`H(tzBE0qeBs=A>rIv&w}G-i$`IKgBzcJ=lw+_(Wqvh36u z&f`TtgWc&&nm5bT+J@Dm79#TIxvN*NhDSso*?rh#RI1e;)+>nO)S37|bYHa;H0!}9j+L;RYWni?@{X6vPkNOZvp09er1>t#8d z5yPPKWb(9W5E}#i1K{yqJ%2GSF3vwZtgpX!XvB!i$9cU0YCnXwq1ZS$fE=pStgMpU zyo97geP%}O*|SQSk`SS@vt!QOdElq-EGj7}RWqNqn<)P!nWtf{fFv9xj;_Nmpj6ci0M zx1(c+@z)NH7+rSflch}r6e}x=ZggR2O6|04SI4+!^6nJ zG6V$lV7j5;fS8z=-qsGDWl5S&pEawiq_nfO9iw#xUhuKCdA*;X%j4071?lzr*1ARo zMcW*XgoFfK{PwPn*=tsV#adoaOu#4PYk?3yl_B#f1h&ue$bIE80H1wbk+_*&B9H`u zD<@-QqA0QgUtM2^%JbmhKvGgNB?>$VEF%sJ&JWKYx!hdh0+9tl01$6**2&M9nUw2{ zBJmPl*1LMu>epU>H6Xx1$q3*1!q+FiJI?)oTw9*SDd6nxZiKz98^;Kx=a|G&G?T04vT02Lxy` zv*h^&HY^;Ql<07~5W%OfS<`&{BnBH~<>Vj*g0(xr;W8SHLmZFw$S@I5_dp-Ti6g^C zq$FXUc(uQ-y|#t|!tQiLhJ=Nuq_$L+Hxw4}Jck|(m)WB9*CY5I8Nd#BT%YvGQy@OR zeBa)V%a;*1q$Q;&G%A_!mi7C?)@DI(~W*zukr6%RI$N{XuHpvzYp#|D=>1hZ zPl_-wFxXjJ7aAVf*V*l-MM$q1H$Gj{)md9zGtk{rT~&(;LwaVqeRu>b7OR!2qN7Jd znoP%74u6BQyRW>sgpx6-sq7Z61M9?ruM3>N4`&tz~| z0HEAJ|H9Xj{C#sh#2c(mie@MpL_v7`iN~-(6yWfY!*#W_EXy7`bco~m_O>=r6g_T9 z+o35&5O_R`rWuy=lK&)_!Sf)9B7XRvd+vFo+4#HP{Z0lXh(M9#NO|jUd1D}{iq?eC z1dWHG_i^8mLw#1$)UavUIoZInATr=9fkx1y=>MzK!A3(7@UutLG=Mnzq>w6+a~ynA zWK6~=Kw~ln=s(%H)#!Gsqm}3QogeHZ2}(Ng3(BasR7%>zx)lsFZ~mO`Jo)6}MT<#_ zxJF~;I0U}9`}yM&5&a|dL=!2d=6LwwZ(Y22aWsWC$8i9kD*@3cKoe|Tt5S&@pzsWy{uH*lA;mNkB-D_9KrOC z3Wd^SHiyT@X;g|4v)M81nz?G34e25Ug~&Y^mziX8+G!DVA;A*^123OB8?|szl-*u( zAvdt69~mshkr76v5IMo+bO@Ez@sTlQ$kA_aN527Pvq6*d)LAoX>gzaBKHtGPQc4P%tB0DK2L}d&q9c-*tf<<(r@gh=WVN7|M(yV(I1pIN z*b#I8(PN_1HR5t}AmXs6Z@PWFPIu|_shP89V~!&vqL@p{)f$b%;WQ5n@GLuR<9btV zZBcO{>Kl5kzP`S`X77Gh;3>g-h$O#HN*rQ2@rt7{lB8oLV%P(=v{ z36^zq>HT!blj*Ll>+SEixf~-#jDoeOv}()92t^Q>`;t;?QMnR14z1~oOm7s4k>Zk` zo<1yvi(r~&7)?-szspg5F%Pr2>Jc(llp{rlWxbK(Vt^3kDkWa4;mkRU-L9Y*qz1Wd z1pt7o4?|F3Lqq+zg!qW0MBZUHn~dR6QQ?tMtu?i**@6^%tI_0OJ$BX;td)EtK>?xY z5U`J!^Wf^T3c_a+05Eq_3|Z7ddL? zjJ_)c_O7mhj&8xjiiF@51e&D04u`Mm5+LbI0H8=IT3sOiQ7T;Ge{W&>#tA`?G(&SN z7aSVe)ZVJFX+uMTSdR;xX>|<^@yYRBykilDkwDYH@S|s4n=AD{<`m{R4jJ

-oqdkIY{%m-BG6oc>x`c!key7k#cs0)-+$BtZHR*Q{N=YSjvorXGCg0Sp*=>7|zd zpscI{DRvn5Vz${xk&$9fXemqVrzv1xY?J~IJ~3NH-hTUCx7+*5OV0xjLq&D&Zd0(* zpx`O5mjhaGy4^cI+ELkAYfuJ=G743@{1Ycl^$73}InmtzDh77B6x(G$2QaBnZ4#SW;4FLvwj=$-FI_!b66xT)IS3q_U)> zv8ho;(~;RZ;P$em&`i>2#&9Uq4b@R8wA&l9a4ftHVMIqL6=bw>YcI1eRpm4!GAk!r6A_uI(OxlLx_a`Y zoV-?XqW07&U^$wiko1Kc!%!qa2|SOtz?bEQ5+ynci%TsK|GlCtw)~sPjbI*vAVNYy zJYJ8lp(*NTP7B?X61f*IVL0`B@9rqQT8=vy;W9E} z!gE0+|5@Q)yvX_LqgarPWHFh4_MPwh*;yG+k^%x~DG3{L-v01kf4bUK#Q+l%7n_ll zMH39=6u{%t+IZ>pD;Gf!IG&?KX4cHatI4X|gB-Sawl?%~_~GRS2avU&o|&Nu4FjvS zsHiA5J`RKbkgKCoA}m)SV#YY#xJi>cD$2WidjkCRDYIskA3Yu!6CIcsUvTCO&54s{ z%{q7D_<}p{fT1BQ=B3yBBfOqCF-I2^gjEHm&zOb2g^te7h^R;mtFV}?nHd?{nCQOR zI!vMCayikVk+ynu-H~GyjY6P@1fBI1NjWVxk|JdaIU|?598Qco$62dXDu100`TBS- z9Cn9RtKp<#xvX4yt;7$ml~SoPn@1$YiDpJ0|9Cd8KafOnS~4mCs2d=5!G*#VsHmt| zfQe1*_S~~)37^u3^G%*RkMHWr&(EVN8p}th{j@YCsUHkQ69WGa9bUdt6xTj2pOm-~ zeMP%(_~!m;*5ZE-*B=M{#YXe#QAam_<`JsmnxjVQ<~=-!qxuqZqJZaVA6wmMHMx1V zx4%!MDOAJCn`@7hTxzloj)05%yI*)y;ow=5Q!SHYj)CK6PVYPZF%P1Gl!XR_{N?x0sT49wCx=PnKn|d}9sqD7 z27vskR%z4(HgVzkD-!VtWyvC%1Xq46vMQ&wR0THFpS&j8ZNnt6@M{l62_>YucQ9hQM<#* zBZQYqL21$Tf#b*eofsE^lzLn)oH!N^<3t{f>sGUcV_BU}kL~C;ASn`=KiJN)PV~2s zloA_J3Bi6W7BgaTydtVeNHL;m8oeTjev~TZl*v;t%V&R2FZK);i_IjNPL)!53f5(_ zo2#l(hf&Fu9M2(33X2lZ4CAX>C4wN_1n$Y3J|wq(kp7K?5U=l0q{r>SHOIOZjV+BB zQ^A9XpY!VKDon35Z`K?j3Dj3zoL8aM*mzfEZ%ehYv&PzG1}_1$Kzw0$DB3Jo)_c#r z_uhN|U7w^NBglUjQYcB0_!=A%`1li#;<8`KFT`3L8m$I1iZ(U3csyR0)9LoO2~r$Q z<;ih8`30+t5d;VVo<$c`mwxBjA3Xn)-ze0$=KRSE=XM?0tMbzfS&cmJUNrOe+Oo>2 zvu0U(hw9!cnfCbYzyeUCz6$}qIvKSt;ed_9MvP%ww|<@A%K$;Zn2gC71!zpZIXqfl zesJOzb31u+lvRCw-829BY-#CLxlASaV!0$90!fJfQyefwBJ!sY^w2alF>Crm4?nPU z=@LQU|23)ENS=c^bEduc((~K4Zj&xb(fE2R^uKkV5^l8cB)E9$TuY^m4&!grD)1rTV4#uTDxYnK4yKpelc8lJB@aS{P2 zBNA6noTO+t-?z#l%LTZM&zZ z3!{jYDk)zRrVtT*;;>6Uh~^`s!qGC09D1!GX= zzD%ab%*k@?ELg2k;&nZoCp#qzw7RN-0?y;*K}^I73H^O#MJ1F-#m$=o zBO@12p2UnvK{3(!=g%wT>gXwx+OJ;40mMW`1*IgT0@KymRZ&@SE&fjwT`s(iRj<<( z=H+4wsuKhdxPy16xw+|+Zv@`B(Zq<1LWbuG+-GvQZwCLw4OapAPmO>S;Oi?K&kvbL zR4V0YX9-zh8gx zAf-%`K!yti6DBbHP%vQvi($lqpLzQCyZ7vtC97A}jBEs>bMLqB0wKmJ2Rd}wfOwagb<)iaC*U90RUqx zObEf<#gjPYudAzfJ6(Z({tz52yy;@bFC|4}|1+IpOhS{B zg>T;L_E5JM7#F+c+?Q;flEOK6xm+UsM$`4k$S6y{eu9PgzB$67p<+w%ZH4krSnmXc z5l{vQD_Ekc7TZ5nbS~qZGtR+ow+99?Mk!@t`;gdJAOGe;5M8UU=ad1|A>?$qinFpB8XEvW`0|VP?Gs$z;qWPV>Xb6JZQC{x@sMPh zaZZS2Tzd#3Lr}#hvq+3#1XaYAnyPZfp#w}9mGlOPEGde7=;#rf!#-xrn1G=F7vWj_ z#nRyLUP5L{G`rZmYg=x~3C$+w*HM(TYKrv4(he3st?;jWJBVYVuuo--%`L zRVZF@N`Urk3Fk#jJm@vUHND}d??i;SZ;nU9+!YMFK*JR1! zbUL4S{HZx}-sZpwR!%D4pUVH~x&{Cdq*K-5a4h`zqiIv8o{Z}=gb4hQsHv%Y?6Jp0 z4lCz~5b0E@F2|h`jx+PHDPS4E3S@vI{)TZAhbIoOYXF+g2qLmfHLvlxbFEuA2cC!d zxxS~3-@*{?!jmVKr*$XrEfF%p7h-9^aP=A1#aX#hzCF-Rl3zjeD3D}H@L=DJ4~)x1 zj*f~huP9YjO_GfB%gze52zh&vK?DFHA;BKEs#5_K0E96hx`_c|4!c7X2pZl;*=i9z z4KTDpFh9ZMPvjTc9d<5&02n{(oQhAWstN#-MXcx@li5z!B#|(|40lWkW9XafLojdW zN$jk?TJ%i)_zvf^t7`*y;+5`LZNkv``+!&{vlzY?VDt@&I-Nu3%qLER5k{J(8@$u! z>DMV`l0-yFs5t2*Wu+qYl-j?)F!nU2HMX|+hXjeRfGD@P>axpA%F7FCDTmKg6 z<1hW=B~{ark#~wP=UqL~F#rc3Cwqi>0Vvn=T1!7%y`*V$(}ZE8G3A^w3nLfN^VHqm zKnwxlM%A71<2P@1mMfYANBEn4ga~1gMsIu>gZdkwoR*cBdA%OSm|*~Pgy;=ykt6^R zET1#OGme09VP_ik&@QJFW6TkFeK@_*kBCHxtOF6IV2pK^5wEJM@jZ2~+I7};M-40E z!0@vy0oKO&5f{rdHa&lskY$N8P8s8rON98!Ox3EYN|6;N5lN#IW8Bc(Btl93`}LD7 zN<(vlzpw+Xwz87aiu#&C!!qhyo%UYAx>mPf^%s@ANdv}#bt?ywa-DI_A^CZ{u36X4 ze&o@IvGh}wgzgT6@LHcfN&Wiu8#iv;qE9~Eyk&D$brk>*q;#v&g8;$6ouF>d$+Cge z%g-)Ja*)j*7{UZc$Hj^eFoc) zv~m}I#Alv<=BpK7iPpv9=?sh@(ne%Ed*SKE1V7btlydF_Egxz=-m$t#cTE{NK18z7 z7WX%Ge8fGZP}AV$fKfZAr`Sho(b)wIhjy8107bXO^ zWq)<)(d5WS74a!QJ?*e9ZX^9Cmk53cnBXT>O~qwEIX_mAKV<0O24|zqV#Ng2*VhOc zrHIIIAxKqGQ=>~fGCUjzM2Z==Q?ufO#kmVNd{x1kRM7F+HzxDR^5QOXXf4>PGLNcy z`}IqC`kCJi9X7b_lq1x|lgfmV{U042as3T5ub(~h-h1vBB{v&3Z1B1Ny9&1&N0S*% z5`l|>fT*CWpsFA{k=o*M5f>XZaG21~RRV&=WiaiUYnm!+1H(g?e)ieSNAHJV86*JJ z=nZ_4&k@2fWblwDpZM+H|NavB@yLLhFbx0`CRjV&Khu+{A3ShSwCVMFG=!Luwdsoh zK(}z)t+&p+Zdx1Lu|}ZFbA*)vNFR**zQ9Z}50M&?VZ*F*{AHlOoVD0*1?Z z^Wr^U=N9JQIrX~UK@pnEOBi6@PNRDqLF9~g(A|UcLPtmYnecerzT^$9%ak*OF=;af zPO^7~K#+T}6d;NWj0@vm>9M;N~#`T+b?k&x!0=E}908p_6jyYoI9IMfoh5#idCM;XAJh68?XMiLC z4kpe;3CCFXxV>}dzU}d-_uTWdMT-_0YN~)3@J@NTp)x@L>EJ~m6NEI*6hg`yYa=5= z`X;58S62-hkQNvsw7D!TtzL(0fmU_qt#^Sterue zQ|o^Qoz{4C|9JKweI3+Yef#0^G-SY+|D$m#w4M}67Q#NQZWop% zgt)4zI;GLkQSMe}gJ?WaSKlu+)ywo|k8|VptzikVA@R`*mMuP9SJVt14uqbv)c9HT z7#+blRb<8ICXgxj_U+T>-Ffef9XHC@McWdcc87kO3XloTK&O;*{^~12FZ7=^HMNXV z3CTtV-M8Pkh+z&0umx~}H5_@=n?_F`HE_s~L4$T=?u?3w^LRX?$6WGt=DOsO>2k26 zqM>^D?1^9nkT_!$Fg6@A9`xLfsMGEsEu%@{iPx>>V&LXM{J8b0~NlR^cuw6qL>eg|nGj(}q3WLyEXr|xLY zvRdoR+tZ_Z+x+d!t8$zMo%D3;;V=u=I0COvy~?M_ z#0W-`ta!cd!s4P4Lx)KgC3DjT(W;_vQbJfjfJiCXym`yWk;6n=hUl1>>e`x`#<~{j z9XM`ONlV>7KY6dp-P{UPJ1w|{0G-Rdg8;cTj}6;&<{dv~{Ih?2_L6a5f|9&DoL z(|r@2R+|X+UsG4x*whpq6=l?rG`XB3$6T^|&z_{D#Eg-{H*DS%6Bpgw+N^?JQ&-*U zb~m>)1_lSUQcta`xmImaE1m1Jb{%(DBU$#gcVO+7`VidF*qEKkkAnL zF~Wq|2f&00qegS#!vDVd>gyK8Mj0iTh|x`T2o@h3`|7JN4;qxtc@N1|WkiA@8R^eG z_ngDw*WGrc?L!9*0Y*si0&W2P)16-}-nzC@cY0)|%UA=-(AfvG9q?0v39~%3(_&vS zD6OxrFDfpstf`i*7Qyc~ZrtdlT1x+Zj=%uLX1!$G=$%=+yuc!&A}3F~Mo08$!SP)O zvVvkFTWui6hP}V$^Q@XY6@dpB<6NVj?h4&lkwv0BAkc5|l0}zI9LpJCjHuxZIX{?S z3?za^jv6`d{r8iTlPOT0YGQnAAcX;EKmm{!XijZ@owNBwX~{O8~GuH*;_9;a^^V=b+fW$ir2) zhagND!^kLgWpHjV!5BaH9CXCL=Ob&Pv(jqTP5DTP5NB(v@bNRoFpj8h4$1W;sH#9ME^F<@kRyAO@46n`(( zHzwL_DZm(h|NCd3edbx0*R9A(cR~n^0;n=Nkar|7(EsZ-Ys16CA9(1YxR}_e$QUIg z)X(l06&?jv|DWG}ufOcye4+*jfDvQBK)qhLfH2-HfFP+)yl6siRG1Kqp@+kSX#kio z;d7YiQPEKi^$pv%Z>Kti?t_9(03sqIUw`d&rg4K&bEfyS{8Yej?To35KL2dQ&=D3% z=Aa{lyJG*?HxMv%rsyZ~y`n&G1$M+!u{?XnM;n&caF;F6fe;sbhBBRTdj3HG6AYa_ z1UUNUanWJcS`dsWV~Q*@&a0}cB_fITZ98}G@M@mHY3Z>sQEuH+(^5}?DiY?LZ^_Ek zbzQXh)fp3AqW9+Pt*x$P9&c1gh((e70{vq8C4RnX)xy0i^V}60a7H*~x+E(I7y!e* zivpkVE@bWFte#&4OR`MDBO_n>*PB;NzZx_j-PhbeDPg$n$MX*Ym_TiV$Y{pQ>6eVW z#H)F`Rq@Y&l{zakvvvmx3&8GBHI-4iXz60N>iy)cw`Gc>g2R{1{Qx4uat`LKTf73A z08~R%7>Fzb;0xydHzEMWI*|w=7-8^gZs^G{VHyA?Oi-qCpXa2qqVoO+?mvF~IA!cR zXhcdt2Bh|%H}Bolw0_9wpM+5l&8S5%a>R(Y-g;B?3D!BKT;rT!{#{rp#h@W@126M7 zZ7Rrm{@ph=9ob6*toE=Vz{rRjw2yDYPnyVb)LF{-g)9Fx!NfRoouRXBu#RZIyg7nh zJ9j#rPAn0X>LL_SURG9AT!bY_wpw(~91g2+^OuyD6`Uxns3@nvktE%D!<|HyDyyo* z|IL~;zb{FSuWsSNdk-+P(nc0JO=yk+Q!EG)CD6U z3`~!Yh@L<9y(!Zt`FzG4oL+Ce6EqAQ?cKD^Th|KgOdEioZ4cm6qf;24GNG&b;Ecg{ z-E}89hwjSkw8ta`Xa`XB0j5*n7!j-^epcf!W1Y9rtPUEN z=iRQKcS4rGm#ieryX`qf-LWkh>a?5`bA0Z{2k|aSRN43BTVX$Rfqu7g_krV%$G_{o zbqpC@w15C=?~8{J@3<4>+eVgUr`JO;<{EExIvt@wfRW4DD(HJ*X^GJ9_xB6jux@=q zLR?}}Qe$hAM|J1s<)!scRfwcgy|$@A6amLYMg{}~Y}uZv*lbne$GEuRLk4H=*jCg~ zIdbCVE4QrMTX47;R24u6;J}RUWAvFi^I_HTd1d^=bzmG3;5>*}dq+h7<Yy_( z2LxxUk|OxYfdi-T)KkBG<>h}WilVA|7xxQ3r4EEKl5HG~4@;jqYQlz9Ur(QW^)0vG zCVIg2>5~iuSJzfaHo14dBuh*8 zoxf^k%$oMwC!QQQFkJ_VI0McJV2qWs#OB2~g)?W`S12$QpcSADS{5H(|F5MBa~mq% z3LW3+=It=_Eysjlj3v@` z@G;^T8NdXK!59+|^F-_2$dGBXul?n(epyghxNzae>(;In-65)~D#Aj7 zQ&Io^M&CX;0sYxR^iS5vdH4^9m4 zg(bn8ZGh33%g5OrcBkqVB!1eoX|@3Wg397eTQ-LVha|?uW~8SL88kS3V4BnEWK17A zW<*d$shfGN0e)YsT-HB1B{VeHrFk>6c1@c-bHnb;eT8{N&Pp$U2XqGLbaD62ofNEN z-!26y5+JU5`-a4Ry5PSl{RV&sa@KD@dOYX4haPG;mfI&j5f~zYz3R7Sh4f2Cv3@ND zWm-jj$e@9&vUJ;_z2okf0UmJX7Wwt=4HAImWTRmj*r|TO?VkQlJt^lPzU!TL-@$qp>oOCJpgHh9s+k;r>LfBwfl4D|Gbe$sLZE>pqJ(w_S z08E&`2oo$vMnt{%;)@Br697Qdv>sQ61_*cQof3P1nQ_j9*cdyxiQQ$8w2PL00ARi4%0jFd99+6$8O6+Ev#wqDb*tuGwQlX_YZq_WygBRefk{)Z z4vq?^7UGqWEi&lU`EReu-d^CWP=RO&fJg|V@BFIsmG`jGeTTgI+zTmvQ=BDbTi*Y; ze|*w)Q)WVoH#jLdI5=$ehYNw>b*sKE*s%uyn2Rp?;J;Abh!y{SF@3=U(4wwewi>D$ zfuqffS9od~fP$FEnlDg50mLw@0e*4?0c%e-Lt^}NJ3=s>0zhDp-|e^G4gds61f5eo z0P}@po56MP_|c%SP(l4u`lYyB?p?cfZQio^kH7yzVtjAJaa2Ui2XFp6Z~vj8BgQ85 z9pJ2K>ge^kkY_=bq+4#eIV>#f$H4wgm=ORICOGb}elB2?X+rqy!3Xa(Shg(7Jw{ia zVcaJFkMPM?G6jqQ5ODppV1WctanXTjDbXAF!$XfJNFG3IeO?&FNB2ABYE@w_R1YkKcY+6Dp{ei`G$Lc zR#IA0f1oHNF8$M7vT-aL8YBue#Wr}Mzh-6^Rlx8HJyTXREmYh78%C0AaxW9gdF zw@i|Zz|qOz47)%cQzR+MfB^&Q>l@&I7$(dJfC&>5ubTAY-~PsEPmE50cGBtN6@!BY z_#~l1lJCEy_EKN41^Yk#yQiQ1-7{&)18lM)`Pha=)ovEWveOGB+=#URFQyHva&|uZ zua^(km*i9xaX;A>?5A>eI4@VYKS7ps3?8Zj!5XD1rAU%AT{DO*CKw?^Fy@SlB|;d^ z113xmFf_V&o5&JD#HwIfLI@E>7R$2A^5X^hfRX6VyJP1LA;P%tz<$YQx&GFhhhH*k zPu9LIJGOg))-^XSUirng-8)szqLUIBMmYt?XWcNn-qjou6(+p?5z*mgC8ZJ}rR8N; zPMQ$ZEBMX-{%iN%y{+!n5o1QyHP^McoEq@brfRQ4`h4T+PdBaDa%4|Ec$-0WajGGp zMtqDh!~yf}xco6z6j=uvVh?;^)*WyD=YQwU2ace$cGqXip+%c`(_OJ1G>E~v^r4sNK=FUj4u4^;3~5g# zU9p`*4FCqfk|b%MQi50PTD$-7p>5l@Km72+BZiMyyLRo$m0x9NXIEENCPv3vxKxmv z4=Mv4Njd_R_fJfz*`Eh0aLO)lV+T%kK+v~u-;pCn!2c>tm7Yt zJ+JaigS?&l3bS&+1K{S%=YHg=Y5*@3>^&-e1PacYMzI1O00vd1)xd#cxZ}RN-hB1- z*suu1DF;RXV-Dv&3@`v4fd-s$4I!ih62Ew_v+%d`US70kLy@=PcufhFcz8ruLVPSk zOtib-mwf;$vSd*JBaP}DfHKAfm|+GGi^bxbVcm6yUlg7GPGREwrC`rP;T&^NBw50k zQ>JQaO?|zjC~i$HE-%wLLkLM0rKP1s)3vDR=+Ita@d*iHCNd_vzPTwo=SbF}gPV75 z=L!Ku3JM9n`}UtD4@fPmtzNcal@jWgkuhlf`t|E}Wfs&`4j!6ela%P_$l)V~E!n)Z zw7P;QatI0bs&3h8S+;6r?|#WKeR>}$K2}4tcUCMqP<^5bT=n3gNTR?f=td9;b-1&P z(KUS049#J&rzZ7%>9>E(-Lm`ms&#sG{o2o#${vrE&vaJ28iI!ibwy)I|3MM2qsUS zOesTv&PB6v2u9)>W!C~qH|&Gy)1kHh~eOqd3M2~#QPwbx!7H9CW{izMdL zk@Qml4Gj027*S!7@li40)_|kLsCY%kpmM=$03qjzDsMw`b6FK|01t!*hJY844&a8d z<1T&sjko^tMEW0_*IxTNI6g$uTB-=+h*ar&)7161?^yW@^K(}!g^b+?YV*naP^BQ6VCo3*XN z-CV_+H9&x5H&>aF#?#GGcmnv8Hb2*7uKn!lS*t(z1T?@Nt+upWis?Oh#`O-x+L)UQ zs`}|`uX>Lh4pQt^M=;dD(SwKdirQlb51%-c3juzr#|!bHOW&FUO>TQY$bt2nL;Cdl z#WQ~dJ1#p>s(E#A>1uW{%dZMbPZ>03JRp42%$w|C(S%7!De1sbeR)k~K`E#Jwc3US zi=e(00GdkcfB{et^Pruq7GPM64qoc+$=EYv^yJ$M0m4vOS*cS^mMrID#%k0{jl@WR zH$HpkP-$LSU8T4_W8>q-jvZH0QZiw}g!}Hf*VWQ`<;2P1fuY6uML{-4M4y!GwOg@^ zH)b6L6|iyKE>vSML@*%X-05`zbnr}=Kp5G(E79E!6DAm9-FS*~%m5Iekl>&vo_y@_ z$A1L|&=0HnZ@e@G2mn4c$xi7;dnf<_4?ua%=4GpfjTr98NCgXs!75vkTSei)MygL` z?y<=DxU4;UG6rXWYyrx^Dub>Amv?VipEhie-QORAEzbO^S6+YR<+oq+0I0wuz^u)i z*Iti^zg5MT`^5hZw>>eK2>=1G0UjFPf6|Dt843N>$|eg#47l*|i>m=eLOA9V6Z#}1)>c#&6&IIOSKfHrO&hmorVdKiyx!N|dG(jK{sM6b3kltO zaK9zM@ADOlXHJ|h?1x2}ORmVca&*RUm(y8NRP66?B*gWeH~#~xzisx-Hy+A8`qieD zV^fA-GUk${YgPz5;j(e#3o1+BU-oG|cvRpNfOk|W<*c)#fjQvQ)N+jru^)lf{mM!MvMZx--bDJs#=4|`}K0x5}Ctvgn4x0S*1K?#) zTmw-N0I=Ytzo!q+7(8tvC;%P~DgzrS+PS-T#$d22pZ@3Vt8SPP5+4D@O)P!&)u%Wu4x~#miy8N$ycy7;*od@?H_{$3~ zc-$^)pubo5h-T8!seK{H4hlbhbbnYvWYDmFpa54XvXBc>_e>EY+cI~2_~C*%|Cxi4 zgb+SIIZzWOSWk!mm@vU)A}1uqKlIRp?X>QK`JOm*0!+XS2N$mfHyWLkzVoYfP}y+! z>rAM0xf-2Yw`76_xtn$@dwYIlLSLXbW7uerY+~izzyGs*-w_}(G`eVu*RD9Y+Y5E7 z;6RW4->+Zz?ej9=KqbIOtuXQ}@j3gF5Fmrc;FDio2kZsxLTE?v;Ws}2aPAkM7Pi#3 zDp04kx)JR+Fs*NDf1NR>t9AXRjjFCW-LBH|a=(Cp#KZ)~xFB1arhB~J0G_G)Yo)xB9$$GL>P&GB#&!{bci^is~wRfd90sW@H`8{$%l@Ety+|(4oq7f)x+d2BfFmcl~{K ztL^3Y{#jC4F1l-~t;2$psM8aN~_X3keguf~JD9wO_7j3kB%t;F^u#<)DMYNLv__ zasaTjvwzgyoR}LA7BNouYNMXrtd!{#Gzt`~{#HM$4R?pV7*$#-pnO;H;i6-;uIBK_ zh_PeGKKYv`5JqFhjmsE1boGiAbLY-&ZfK+`b6Bid8#Dj<$WuTZ(f(oHx)wkH6p%!8 zuYO_bo&jL^h~Yw?6aWY&lxgteh6z&z$b<>RERrQQHfqkl=h&=DC%()dD2&{Rga!w2 zQ=p`A$D#QzzA@#p371|r_R1?Kf|F%^u`VrRl%MPvJ$N`Eh>z>tJ3awq1g*$fUh0>S z004L0dZ#4Ynlldt3``XX&7kTHwRIthiIwG5LLczHyYHQK{SDKvpLM7(A3(!EI@sdu zTx@c7a#k{&>`3u)+KBkd4LEd^>l_2l4Moawr}4)X3SWt&8Dj=LeS9MfGoEjhw zxPxd9MgY814sAvnzehU+^Y7kh8A}AqGSf89F_t9Gs4l>$Ex<45Xii*slwYr~rC+a- zb||;czA^X6(dznYtKCkiHuc(Tifbxgn>XjKn{M5nt|wmaILRghV8&d3*B%lW$Lsi~G~de;b%KFgc-5*1`R`1;>Yu85LnyoUAo- z&n~fjOjxf5TulMEB85doY%kmu@1Kx7aDabM(EE$tO$>-tTUrCdLKd(6$}cW_b552U zbc5we645Ay^8@w`8Ndqk-YK`-Jb6Y-PAMc*3>-c*b@)*5xR2-N4Ie-L(Py67^403- zyu4+L7GHMxW$UV*zt9B+%8uhzB{Rw)7!-)!rje^?RCGRb? zhXq`B&%M`PJrhDhzza}Pr8Tw6Ly|WA>*J*K)VRxsTr+F{@i5fkmjp5@{e)P7m0C4cB0i5m;!MsN*jiNKISnwE$Pdv=(kbSNY3hY~BsAniF zbF`x%Yf##t^2*AJR&PaFsqR&8x#7ls{reS{mfU#r&4;rO6&Dvxm^iUdLZ4rzB=e#& zyThN@{h`SNAy!@2iTs1}B}!F76vmjMNDe1f)oP24vWE2f__^0T#pQmItz_rkujYLyB}D>--~94nz$7#>iZ;8C?mY;t+UU_& z1O`URfkBZ;@vE1w{J*<@9^vRE0VeH~<2(X*l<5Fc3-@{f7iv|@j%-@I_3L++eU{%) zakTtIX=5!_!0q)^S6AzrCIl8$U2Ac*hK7bjM@6f;E?SA{jP1`pKnND5fgoyjyS<^I zUQ!fIRkvkstF5cmb;=DT(XJA5+DDI&3;aomqkre)!w`5hb@R5Z$RY=Zg>nfqf+z+J z7?Q}LqeqZMYW2E_LKuSLin6fi@R0Ccug!a%VT2VqIJ{Sbvw8cz-3cj4l1;v1(!}B8 zF2PoL#p;#Mz3|rwlO}46F8T6{`KuQQogc>Z#`*?#s|y48i4_0j!ma8PMQ{-ux!DndzXBXI`!%W zAI^6-wcLE$9qSe^)tj1Kjm-p+r~dI$T6$VT;fdRCze`5A;?Uv6pDl6~7EJ#6&vnj7 ze4IsA^aCd@oqWyAsnemVN{$Hs!yo_9(%3X|($%TyL%^et9586z>NOA(J$LSVCypG4 z6U_=%z^ngv?t2^Gm?MtVj49KuyYX(Y+Ck%s-gpaI+g+|ZjMPEX*=IcNV;04ePPlpaF12u##|y8c5#uY zU@z<2ub+sTMnyzicIo(_;Gpu-(wzMV>&mOZ60qj0wY9~SfIzZH7a$VII2Q|yF#sg@ zO%{dAj8os_j}Ruz8UPb!4Zun1aLhRqnLn?+_A&rqa*-)io4oY@&)!$SM{#a%zdN%k zk&U~1hyV!*5(w_@UZBMav;|s<)JuW36e&(|x1y2|2*lmp-LsNancbQ1>{4#az5jdr z^-8(_@5^C-`%8w&=sWK`=bYy}M~&KF0V->%eSLhyLJ@{xrl#fq^dT((R|7;F zqobn@LZVtNXi(s|p<*~8ApzV2CkIEBN(s^#78ceZk4TY*E(C;*fE!B3aibDE4qN-B z@skc{#|-$HAP8WJV@7dO_|>oi&8SU88dk(`*aK!rBhYBD;FoU-`+aCj>i-VF#ld@l zjr^;vBq;2EbSf|ky@?ut*Lm&m%d0IIvO5?UBEj?&5c=V!s8uS%rN(hs0q!G{!KNr! z4QVtQK92{>btC}*ok6E}3We3xRo&gf<;#~nc>G8rmjRW<&e}FECN^ql)XV#YFsxPB zWhyW+vClhK7zBHLA0_>%^%OKvB4T`}VuHt$e!fIxG`LZ zj~Ue=5hbJ~x;Z#b96vrMCnqI8t$$D$2J9nDpOBd*L@KS3&kgWh4+9C2zsmOi%{_v% z{=0fplonxZ7 z$*w3bpFU;A{zC`nEm?H^>NT2Hw_(k@AY%6Zri~`<9!R4?j`m0*lD2hnnXJQm_pe^L zN=azI)p`H;A=J=_xTdI6xc}>&W5Zyne-bIW?f!ZRPn_7CfeyW1lQ&L1wKQ6PzK zxPUM-;0%TVe$xK?d}%$Zx*FedgbiJ zCK%h*&bhUr#oyomX~NTnhQ=-LZ_Lfj1=`yOTeqquVh~xz1l)+lv((ZKwy!OsBRw4+ zwEyJAc|8-ZR_RJgN`MPw%>6Ra|2pfn-Wq^j>op1i>eB(>n=@xNpnsZ?f=a!zHx@8{ zSsL7_cyQqwLq|o88fR^7Q&UqX7D`Hp`jscHXDLx zO)brt+1a1~*&~&*d0bF)m&oM+9YnGoL;RSABnlZ6jEZE^mi9Kdwy+T{kJr%9pj0S- z=@mtX_}NSH34m2PLI`{h9l%(l+W?NizwXh!1i`TD|7WrRv{z>EnO^6*fO7>n9+rbF zCJ^@tLsui2Ra-gh7YT3 zsOvX4a@6>-3XK|8hN4@UPLrCGNsxqvwcz2ChgWajKn%>n+M-?9VJ0vI090I7)GZf> z4;$ju$9KTsh>O=RUAT09=*VHk6(#Lm?HWQ|R#7^B%7l^QM#_;Kk_;u4Q$>k1A~y2^_GpByIR|jLh<#^ zue7Z#3D4q>9z9C;^YeA{VA(kcfCFK>j_Cv^D*<9C!hkV}I(!VRPs(2|L(1VYL4(RPA2tr2zF>uf0xmVbYm2guA)B zZQHulC^Mp`VZVZ#KkHTU>j$WxNJk+GsVbUE?!|_>`#IS-G!&O`xjYV!7uvsHTSu3L znMHnXfkY@O%*mHOOM86s+~Wf$QFVs_Yxl?zgDI@3FMRZbYv;WEgDt4Nn{R3U#rL}r z-vr@w#4|a2@lyP~`{Nfak>%B0{NXqft7!!G=_flsTDHDF$I}s+GK{hNKYBHBii8hl zJP#5+zd0@AyUkDsC`D94UC|HsPhU@otrfKa8#^jGibVQWh3LoICv^-=hp70})2U^ijXM3P2A9$c=JN##8p zQ%+5DeNIU}!-SiXn_gaDg;+R1KnMc^HU&(tR;vuc@C=4pp#!@#|P6a33a?g0l3$Kf)<2jLiS z^a00!e>8VN`Ay&%xmZk)TK#&KP=rK}YZNcHNtZghgeD<+Rr;BSV zR5~@A!~F5sp|pZ*6-{?5|6!w~tE{1F)~p!=BL`f)am~ibZb;N%3roTCgm@cg`=72~ z$g3`iNr->);%P*97=hFondy2xVaDgyb=4@5{$YF^m&LK+S>L&LJ0&ZPYsL#7(BGWL z9~l-MJ#qvj5c&l8+1uI(B|hsXZaH_rS`@MTRy1NjQF(P^dfPQ4}pj0Uz-Mce(+SKf)&udC6EUYXmQ&YfazH;rF zsI6_ppeP23hRvG2X2Uup>>jsyV`E+plFGAEvu<6w8tNAq6xau*JhOm+8<#J2re>^P zyWZN`mP2Q80o+qeS#8VtufCNh=Bq)7M6ias;Lp);5uYBBY9*g!lw; zw^%5Y=tuBeeT#_Qu|3>SzUI(Dp zdX3N#Zmw*WeXMOoSP3zwc;zw`5{3*xe>D<6KD{Nx!j<6XOQ^}@OHNDqh{&6zr_ zy{)~^=xC%?uUxqJ`o#;&-d#Il)F@S5Gl+Rd_Z^7n98(sL>X3NYx$gStvt1iMF#(W< z5F7_?#P;N?&+oedfB)=Mh)^3}Zz>v+Big;Z=LfEz*?;+`q~hEzoyx?;ZphFvUcrGq zN;$`b$2B*VX;sfs5}}q+rPG2;JFqxJJ(7ar!s60WHkSkL0fF>fGroq@DW7aW0-^6w`%h6$`K0R`b~=8korSv%P5zi_C$suCnq99*5ABs~9o*VkNu z>6nS*vkUSRgr=^k5!P|_beF4DQzlG>BZT`4?IxBjd>e`cAl1;+(JEAkp&I~;$pRJH z3zsfdS6A28)>0Hz+gckG5)AhXw5pC>{7H+*cUG?Ml1beCeCoQI2`1xV((?|DtQ*O6 z#)m@p3U>XW*#F<&7aB>jX@=*HM^W>KPB`<;fwTv4PcGj`i+jFhO3MHne%uLPsbe5H; z&%tlMk3D!;U~R(yADGD<&?jW%z^EFi5FqteZd}jG%0_B3$kTW9&}hWv=qdF1*6paR z!`{r=t#4%Lq#3A3eP-(?h-2bxZi7_R)J4mXrw7m4#@W;cl{cn8iMQtokVu2tdyrI($p4P;zYY;)4Z3zY4`-7E z$z$^ng{f8Af1zaoX;e<4VoP3V7!=!rTi4Qb<;rzmUthS(!9hW>j~O7UuRF4!#(U=8ZrxzjQo?Y42R8fbhKwOdig)S_Mej2;-WWgz4qU)Y>53qbr+J- z8{@omiXtKPf93M!DbvPKB%uHQpyto-Q)rC*djncjAyHbnK+mG7bg79+nQ56r2St#=J3rraadgTr$Q?6o?6YStVBw5evywB?F8_RE&%u2bPIf#8N5o*I zU$_d4|Jloz{CMKn`j0>EsBHw@bW>Yfgk$j!AI98!u=K5^fsqja9gs>{mX}ZARKTFf zk9KS-t*;g;Wu=|98bs3|l7TVwB=NTppG4$8m3+Zo)&?*T#zAy{7oYIZetu4Fb_`xu zeG8#dIk~u3)z_w{q)r?=&V?N*jnEHDPs zfSc7yU@_52gp=rH#{)ex!^27YDnc@WtJ&X7(63voJZ0i%H&Du&_kO(KNh7z3k| zIvs9ITEYL4uo21_CuLv=88%CMS=RsyU`>+5C1eiK3nLWv5cVf%2E0LO!ARZ4_o zG=_l?MK`o&(}KqW*$P+rX{C@#aQVL8-at;PsHhw=c(A3w z0>iM==gz#fe7S|K^|52e5JEGjOrujYFxVQ~+@wj|1%b@fOO|mMbRan}xm>>hzog{E zkwc^5XfNHk+9~UL@0|@5)m5&pt~_&7Ad!P#XD6^cfBt-@OgMeU3@>Ll;6wZPc>}qt zwWFQMX2W>6lW=F|pef@gjK3T62(bRd2@`HyyY6Uj7ZMbdn2=D|QoMD|mNVxrSX)~G z%+*n(rHR?9h0A{Uey@TeX3m@2*wlRU`K@t%M+5`~?LU9S(!_e@k|n9hDHsE%81$O< z<~E72PTcmgV3m9|^5Oe8Tz~SfW*&^fUUL9m>tgonsua@V7>7pc=}2de==}sRF_w987v&bzdx|=^RK?l&&eM+aPZ#mzIU*-+q7-l>J`i3f`9SVR~Ii{ z_Vx1i^7I5J)>L3F?CgRL8O{toiZ2 zACOY6RVsz;o%TU}pIyA5rlW~-W>r*__dSYxw3jvsfrZs4sHv`g(zppEsfQ;B z=9a3eiuv>A#XN|CLiL==BLDEEZRz@r&qF(Ob~cG>rz@6yS& z$W-9t>C>-Y|2uc?JdKZs7ceN1!#lv&-*51sL7hE96B|pBLJq}wu)Mgi(BIeh`(1mi z`DSHVc}aI4v}NT6`31~gv;=ANo^~$C5UJ8Z#pL&2d~xubT_dJU`*#0!eCkz~RWcOZs+>DQK6U3lMJzO*ZI{&8F8L362%9s%7Mykxl z(o^@Y>^^efO6&s>MwGNSw#d7gCfxpmBO(0XB^J7Px&yjZXf@6r?#`|*fRe!%lJxYb zRH~NN7K)uhIy*XvM8dmw?n0;xevm1Z(6klMjE}dsm94EzqXO6m+p(~=1fsu| z(B_vHlvh?j2B4{-Atx(aU~Ueo@Sqiwk(~tya@5!{pfmIM@gt2&J#^^MjP&&Ojt&S* z0~7{Oh_UFI*;!mZ5BveBLY4t@9milfR3MNvLt!fvF2gsS#RPQKqf|Uicn(x@M<*vX z2k^JPuDOw-V?c%P=+pw2F_yeh-lDj>JoNaT3ef0 zTbNo}xOuo43)2~}J`{BERv-zw^Y^*{b-M@Nt7>ho*mC8Y|9^rv%Yj$S?iyY}|=advdh z$p+P#?B??G^{AD7eZ$M@s`nk(9~RQLv99*;;X~EkHB2_^*4;ZE z0e?rI%U0@AeO9H=I!sBU7vUIDC~mr6I!D%K9G$dS9h+e*clD8+*tyXe7ZzqysWLvgO-z z`;X=)r4QYm%Frj}ZF$tFNF==7;aT-SO$CE7z=s z0vI-vb?wGga4i#(ldfI81`aJ;=b+%=xpU__+B-Pd*%jsF^)xiBUA=}6vRWeX_xpZe zu$bWe!bR>A9u76rJRTR)j9a&EZLDwj_@ga$c6NRvM62rbf8q4%W6vrS%;-{MqA}Z98@#rF!awDU?=c+OPk$@4oA( zs$RbKU7D@kz4PZk+qel?S#;;*pFe&Y5qOwSP-OoIq}0G5n=l_{Diq`%fa;sS*t)GH zHRalmKMJdAF`UaKFeKN_9zV$d#tk)EZ;AzEP^mH`X|isIs8NMu=CTU%OKe!Fwelra;N z;u9kVMVXqI-n)7ONmPjXGfRLFMH+m7y?eid7y&Zcn??V<*4qc5*Lv;uGXpP$C8S_Xsmrkqdgx6rt>rM;kH?5I%;ACHpj_e?oFcn1L3vUhSy&(5-Qbcl!? z^ziD{){eF(1urB>o>-MVBq$;yKfANN)6vbfPjC>t?_(Z3XlZHt?6c2KU%TGY(lU3! z{0ZYGQth4le)s_dJ;sh60|8V7tkSM-48t95>^|APJuo;J5xNg|d~)df?>DYmZ}0A0 zTwlHK^WCLo#g#3M=MwJVI8#gLf1|KZ{u;ab1APNt&(9b(hfx@=A*nVbdr+R1SW`d{ zsz!Cs5Wf)Mf~VzVn)3Mpo<1Z=$O$#XpV>5=Me4lW+zfPEt#-(O0UEUuiZr9b!fUH* zAr;UeYwKPsfcMWolvCrS1_o$lR?QC2gX9J^_wqmRh0o?HP+j-Q8L( z;bQM-CJ-Dra(MczncY2N0+A1&#e_zL@l8wyMnrZtwFp=|49B`9BEGqqDL7pcv6+q4 z?0NGbS`X;k&CwaL7&f-Hjdk^@8R-Fjeh@c>suDm{fYAvA(3|EH6sXo{cpMI)G1R;k zR+J$w%g+z;6@URTh=_yy5#Tfu{+FZx#%Xl~G^Lt0a|WQRu8vNBPj8YWff8VAV*}A{ zI*voXD)8l?ni;qON~sJA7_xG6;QCA)GZyK|l%yocNH|;CAUdN>*agHJKMybPdb4u! zYinu-g!Y50XYcMp=doc0L?J+vfuSMblK`?ew-E5m3~_f3pXcS{lbV|1FLSgb0}AbDbRNQHMUA&(uu~t#^|5z_8feH+S(^(0{9Qb_4_0>b_RKBF&&$e#hxfvT z3;n|S4;wS8t-TGx_@_^wc6V`GvuaI5c(|UT_@*XeiNxK6Qh$zg#y`!)| zPUyk=Oy0e}|G)?o8aU_eWsZLSa|aHLJ9upL7;W)3BkVkXi%cB*`hsckkXAIB;OZph0rEJUuOomG~5<@z-j7nkXir_7!-wKBgLM>JpuL8kuh#Y-oS9@A?L+HIg?2>gxf z*RSv1y(cv_mBnDRHZ{k`JvnmhD5X%sF0wPTXgccNjoWuFUY#;*?AQT=1&GNZ5aTz9 zA3nyCJdFI8!btGvG$pA~AVN)}sTf!jA(7HhW=-+El=xOv58c|#(%nhNz&d)w@hM4S zl^pa~2)!P<6UY;Q& zkM!wzIVR?2Qn3WWnFt{(Cx@oaHXA!TrAB4#;%H(efck;@rbh3;K!sil^`J#XMS2=C zwYG5g^HpfIEgkJ5spN-~KjxMe+c?PQW^CM(_Rda%)B*ho z_%q`sOn{RG8a_HA$&hL0Q(2H=T7 zM|2D%!yzA{P%Aa04tPY6DuE;kiBNFgkcm^L0QA=(%F)>gR0R`~5^)v-_TJRi0)!km zT6Z6BPd}fk`nqpVf7c}u`3DDaP548GMok$x1;kh(0DtV{30SSAqYY$9#*P{du=_z= ztR6#<9I$tE?C(3EM=dkAvxXE3P=qx)BIaq_J>5nRnP}}`52dSl4SCRAtG2zlvAccd+&OoX?(g3J9bC`C(h`b} zn{WlU9^8vdj-N7f#^#SdYVB@skTn6iWkdA{GKVb0@l!v=rarZCu(!9fAKq_xeq~-& zdo^TKmd;)_ZTyV<`U3E%T^!x!Em|0x_sqrFbL7M^Et;hxqsPF>?rV_E<1w_SVfG@85S2 zXE3i^y7ch={bR?EgD7hBm{DmN=}@RJcH($fcejk!ppTDlL}cW98`jfs3}jD1EmvS=0S%)*AnP`4xOC-8 zU1iypPq+5SWq|u3t>o(A2I(5Ol?iFd83nmf!=qQc^ELqeqVzNvkb8dk{_eed;o;$< zM~@O%SQy+45BL3hc9#_tfXfPr1vXB%w6R;Te%$#p>AneahasOQ(jV; z@+=Y7w0~#AwqEOHY9I#s|Id1j`h(L5?eDy9@m~^^ z1bmn^?{5J0mCIMI0AXO})TzP2A$&g1#nsi`!4clm zz@@2etmm7VfufG2N4opF?}iQ=Iv_ma$%DsGhj8TEgBU?p)zqqxmP8arXOI|X(8)A# zQs5erukOQZpMU($!>C`1pDE+7dRVL{F+_tORFtk;)stUa-XLnnn7FY!5(OblJ8Nr^ zNX+GOTH4wWhD^=OGz3xAP>b`}O1&<=Fag(snuuFXb0b8iLGCcn+h0Y1X?YkE7}DJ` zX;W9bg`Lg0dzWkg(KyT0ik zL3iVycm)QWymLChEx^{!J~OWXnnnpMt!_NHp_FUvJlyUk#K)H=_45t}e5uHp?Zf)@2ShFri@8j8 zd3pJRteB9%esGnPYBdD%RT?$;ex@cS;Xz?c76=?QU5mffQrSFo$WU{0fkvfab6BqK z?n0r+%)-2&qO7Q_#K+wOMsrhFhrrg_%F>bv_eiHF2ptgXLC+Fg8ia$-$cN;Ev5OG1 zw7SB}&u>6j7>u|2t~P4U8#o)n1Qj2nrUim51uaBLbjUGpjp2kd_JlWOVRjyH{7i6bp zXD4T;xwts3T=KSt(w&L9*eGoc4~aN*@>pSG(R(Y_$t04I5yRm~jy^u;AJVTw*llfM z1-d;sRk@W-b!*mdfUtB^Pm2<%WI{Q(1mFd=wY3+P6m_(9^zGZ1#Ie+>937$;H(tMd*ZR_qrV=zmbtBYDI#Yhcs zpFoH-3`NQq|1}It9AWTH`*n7rUJpZ4|I-KT*R?T>4ooc)QQ)F64IkuZ?P{y%ESoZW z!_s#WpTr}Q%E-v<>5TIw@$jLL zeto^|J%G2Co|Oqb5V}O&3l}evOXZdVL3wp~M_0$}S+n6;7!)<6uA$-jwQFA9-nCVA z0|rI5i@Mr`9a4qt;*E=5o?b8x@7=#QYxZnl7(P!-ym0yav*+<&@B1OFUr0%5SwNpY z`wkws{qS!80g-JT?NF5gM6A$=0b4%a0zIk%0|IW{xE0>7zn#1Lz{!)@g!ZFNo7`<} z5>r#S<|h4yMXh=3Egv7Rs7Yf?ZLN!oOMiZFKhihA4~Dctth~S6_ zx>5^M*8Q8;AdTr@YoihMa1qAF=tHB2Ntzo(YQ^OzcjtZeA)>?0`0F+x96BD7))sU#D_U4uV{{W!p zmX(za96ZR{!m{sxer3&74NX}Ao_!o_Y-7`&V2F{Bm8KQSxlE8yv}sVZLG-t&q$R7m zR8LW1krCd3{;Su$t7l-|-aZii|K!t8r%aqQXx!L4S1v=C&`e;y?7elJJ))z>!H#kH z{KKbD0C9ZO)TtnksJf%FH?jKk{SPy;vIII+ldMCj(^4;MA1Ol@%YP-uN5VJt7x^0U zRfd5i4D@!Lg3!koXY&w^4XF%kH%JITm9V(25&C69_)XN&Rasd%aOe82`%0sQ}ZC#q&Nj5{GRrLuC*>ho!Gv>5t{(J{lXQ4_4L0P!$Aa%cV+A>oX2V{#R zav6k79bKG9amU&?+Q+_l2KBg5*MZ}>yOo)(mn)&yJ&u101rGyEc()$kw&V!jTKZOP zbsaQGu(q{{>NCVfU`dd=`}glaRJHGbFi0s}ymO_0NI2v-fU>Va2AuRfHK|41CG8T9 zh#Cg@itg@i3Zt1=2pU?N?_EE)cF{Y0foXt)ZDMkAO#br^{Cr?|B&VmvmOh_2X!4NA zf$rX36b%7=h{?bqjd0+{gPv~gAQ?Gw>}Zf`jW2li^{P)>#odcmyahQKsYaDmkq5Eo z*6xlQ&u%T4zNkYa!fLC5%(Ld*wK^^F+1@YAO)Mu)m_*{VJNJLS`{b6Bz1_3S)DeTm zP&%X_G=rm}#N9&ZW>r*IIA!Q0sY>Z$<9YPf2`*wz8#>uSASiAq7bC^J*hgG}Ss*9a z&&vl&u^*&9oIQH>hwC=oxN)Pdqb`)w7dTOmQsQ(-Z_W{Rv^!GPxn6-3Ps7J{9ci&yNY2D#H2dykDPM8vQ zuXlZIXJZe!BXEkI#K%DAgoVqOfT}6mjJNC5p52?ju{5`I^K=_BY{*YKBDbjE=%u4e zrY(K%gAZVSXsoJ)6hvNLVSHl3yan@b-?;;sfe*KSd}QAN$Q-U%xe}T#hlGX#?QP4J zEt#2_dk^kkykMcPx2L#A!Z$Ttvu1TpSz%jay_vNQoyTzOM|_?g7S2nZeqe|sBe&{#Qz*sm888k zD;gz1wfW7Z{e1%^dWD0NO-g#2kDouR_tV+)3+By(8Gio4g<`o_g*2A_o`%1{(Hk+C z+;;EzrlFy>uCBhRsRcurhR}d3fZm8l(rdjn0KL}B8>1P2{6%*CRdD6|J$rz}+&8R0 zi-W=c7!WM^XQ2Ooq%H(Yl}KAsUmTk(YZXS08R;4vQd?T;@8hSDDoXPT#*7**5(+6g z<`)#0lAZz;SHp)7gXF-IlBABV&W4(rnWJW@bUKw(on2q*$g&$fWs+Q_96MzaM8Us4 zzb~TCz;Uyu9X)<5Usy6KV*L0?6QH$cTXPHWR=Zo8eFFpP>p@erzO*PGK-l~xi&m~) zjd1Mq&%Xdop#hP>Y{d9t>lb(L-@A0}GUyK-{$$(1b0_X6K9d1m9egSyV4{pp%pZN{ zVaC5<{KW@&sRm$N4hvrDt56)eL?xFYjE(4`uIlTw9e= zkhEyz>>;7yHZBeb!}vYSk?uqL^zRF#nu)_kcvU&e_1f~fn$Z!_=@}``0&7qr0s>WT zL*9Uak&Vqw$xT_KW=t;5FFbW|SET)bRZEv&xqG9iu6XmJcfFWy&QMoiW`6I{?XIq_ zd84PaH8$6`G(oFcaaT7K_}tr92~K%L^k^tb z24VQbl;kfje!q79Ivs|DLLmU+g4#+osp}sW#+O^xHPl1N?4m_)S$Vt0$HsB=G>~C& z?i&!Fnd0f-C{-(htpZ#f9gAx#YwGF(-Myfs1X%c>Dap4KBvfVei40Td3Ft!yjPnz> zu9S6G9@zH%u_H%8jf#etZL)4rw{XR@C5TQxnsCVs3DihAh8n9Q2^zv$Wo^af#j_X9 zm&znVL&A0+*xin#?=Ic|k^AG9PDY0h_8x;i%9toO0=Zf$FI za(9a!H8wseLC>NmYTA5KAHw_jGnkJGyK*Rz>**-EHmteM4;>>@%`5 z)5^1Th&+7m6jZ4W3y;h%(3`1W@>Jxj z@d@_#5`@_A5m89+k|`25#v^cwWFczyu&I)^PNs(3wP)|G>o=ahc#)Ntzi7@|P|N@~ z5g~Nu>=~%g%`GVcV&0>>_sh!5z{}F%v?~v942u|cV9$Q21V}4R-MRVeZL7By6%^nk zQX*ANL!D2cznPU8LFl?=-C+a5O)ahZhllk^4tNw7n~;|YuE@&eE2a&ZLh5vo{^M}~<-J+dA-2qZ8#T&T}^6dx<6t7fiQ zh|Dp>G92L>F}Nq-pPxB<7WDKWQ1#PKKS3^Cqak`z0KL{*1JG-KLKYJHo50)4-ij2E z!hzpnXK%M-+m1VTeqOwIapa(I78^6-)mP~M4T?z{k5W=t^W^GXbB-yzS9Ch6zOtgH zN3wX;s>b^IhxhLH2*u6KEuhoVCpdWAxbYkg2UyoHk`kG8UZ+&7Mmh*}-+A$Ka&GeI zLE|=W-4g#IzNEgQqM^1=c)#fr=V(biaJwz-Y`^(v7ii9aYzTz0SFe8;sOM9c&Pd8g zTe{|*QB%h)fA8(~?k?{>{xVYAZ%_m%U_vcHU!T5{CrleYcI4Pe6C50!zWMYEGX}e@ zwt+@!#t)3rlG>kwr8PZ7+QLE@E z?ygU-E4>)^Aif||Z_0KG^y$%S03vvJdAfUfLc|@S<^oGgXmrrf)C78n{UgHbTMR-a z8bWJkY6>x9s1?u{1)v564FQ22XEzrRz&U>PIB=kWDxH*=2m+uaMS)2C>DY6nRh1TI z=F1nn1)4V1wFXt0RrA(lJC^Aoq!@d2Sp5wtZQsIa_LxIeI1l3hx-m3I(lSdTWeZ&*1-#hKow-t$Vm*uf(m1? zT6ylp707;ci-a1DA(BntXvDfSk}`e0F_jYeicfg_sanl_IqY>9k0%N2G$3 z>5z^@`d=p&>ThL*e}4&SjD7xgz|#?pg%AhPI28HW`j1ykTomTlXU6F9<%PwQCQP_| z?Ftk~faVn-^=o%;e6@4u#!Xuyhedh#c(=$~FI~C>*nRB83BUyVde=8NjRB-^4XMv7 zE_Ao`DyyxoZ*07u_NY&2@Y0zp{6d2-T)*-lE(UP(%9X1f+?|hJKl1s`FIsv;UO|4_ zKlwyKso(*zu(kqM>S4?y7@s#D-T)6!q?Bdm=N>(E0!r0u+G=m!`+3$|%ii0(QLa|4 zUbQkIDY-$}L}A#J8Pg#TaQ*hJ4>oNqEH9Pnl#m&Kp3OfT*q@c3yYt)KpdowW-07Nz z`VAkvj}Us1lFT<3FzC!rc6_#d=ckWj<39Q9i|yOCDRax>lak@!5l+!C3gqqU(vy#0 zJeil0c>eOG+Yjy`7M;%LK*9HeCy&0~v$LwP&eX;n(cT;op?;aG^|g&XO>J~N1}+^& z{TjcAy8{(F@GKB8g#P7s!pj}v3>HqKH!9o9S{f|vZSxBYp^_fyiBV%mJ$douY4S6P zUOs-&bY_4fVjw6^cw-EFg8~@t?CP{=(V~qTHy%57e8h;6j-W%`YrWQ+0qC_qo)Z0M zBnfm#k0Z?3xfLe?JR*w6W`btg_8nV2JltHJpn<;=-~&Ye?H_@?Io>~fV+i%=t{*w2 z@00~aj}9L=NU763eEdkOQQ6zrSlU>xSg`^!@_>YNG-|+r0VRcn78VwRM-F>Ym?dg0 z@6#t3lWTxbJ0WyjR(=lP+L&i?7En^kFx__at4;50w70dp@$jm+xeXY@A@&iiZOuR> zS7?GNn_Xy_L(!Mi_6Nwq9bt} zGWpzZ_wDuZ_fbe?bVSmXY7d^3o2PqxVG0dVI#_}*_Jk(mJ= zk-SH0&NE}uSt&VLh0R4Xtfo3SxzWkD|G(2bM+PX4pUP6Vx$xZHHW@k9HDKQBozL*{d) z&vJBg(L&%qBcni7?o4{q<sXoJT8jYYxf=&zR6GZCtHI=m@BrYwl7(Q~O zue+z6z5S_^KZ&XC2%8X42cH``-->4naaq7&GbSv&_UINHksrPDp}U`FUSYw_!iUoa zPV;f~i0(TAYBUS7vum3gCQX@QWo5}T=Q~;2*zovnzTRp~KPF;CZfQLaOuW1Al`Jfwih&1KTLfwBs8p#hj-kI=OQFGl}n*` z+|$>)zgd`P_sNkGHr3SHSRt*cleXBJ+b>+W5RjZBZqK1Jb1F(e*$@Et{F$?R zBt2=lscM8Exg?Z{f;|G~j+*iK>7$0uCL*7jHhD@n5(6$TDKAbcNZ9=LMi#>F-MK4~ zh!GtJD6B=)-iya9*k)dVflZPYK=x^c1)-rKYD5kh7$w)pRU{#lckMoZLLwC_5CA1a zdzE6M{nG8u@CzNWUdz<|HTr+0J^ykPzH0rZhvg)O{bd~tql}woVRQ|t<{*|H5p+a3 zvut;K`0=evS9DEXi4S8Orq7p z$G$#1vj1=p~IhjAI#Z{2$Mf(EwA#K&)4 zh)YWz5Iq=Vsh}B%l$0-DxZKs!!^OulHYFh@BXzJ=+uy%$EO7i2o-2?dr?}*U_3tJp zCCd~FaQJuc|AvK_{ldaVjhSp}3B|i}*R5GsP+8Hf>gvafJn-qkt2eHLvjH;_mk-jf z7Goz&>`|$&-oD=4*!0$tB`~+(7#0{Dgt6FAQ^6LPLK!iQ%~-O0xxJ@Hn?$_#yF-bI z$@Mk$!-kA_bT4k}N1vwWWL!Ld5keCC4jl0G_gXY-{;5MpCyk$IK6S>@MT?<_gO<{Y zBqB>&OM6d8hP@TS5cP`qPs9Ff)d<5bwNzeGh8c~`(!ZSMUeS;+JrvR?5D6Jb0wL<3 z>Tg4P`Az%P>(mfxJ;FGMSpcoz%Y!(2%F2|-=kVmRZi-D5G#5=@KF>Vb8!?R#9Dj2G z(^u!^;OwB+QHz(lg+~s!c=2LIX(gxw8Pt&JI6_EEXlaJ?!(MNO-fO*h?_YBOdac(0 zq3ZNH8p4fpD2OE5n3{73ql+B^!w^nlh(d2HlJvKgUkX&4n90_m;IMEA zp5>Ml`uPTg28Y-?*j>JK$=lnvPhcPCC*G(`SAd=r;g5h&_G!^s_@d5yHYa{f9;m2WG{#Pd=$_tc9{p zAY>Qj=L7Z~F?#sB@4j1DR+1DS@8jmaY{ffO+iP3oZ9K%1AvF^j^imLIu$NVBztgGh z4-=@!zi@F0_=Qq;3Tvu`%@#}(5ux&Rb%z#Fp@SpEP%qrtJUA=@vK)4{wyrKNvK}$$ za<+AM0J_pP>0LbBfJhCMYPG7y^qg!5fxygM@H``>QdSWufD*?6w^Hu~xQ2`!KmK{# z(>!&ikDuSTfs;*4_}vPrRI4;c0`Ll6Bq#D2W-d-nn4V0{%}mNon=)_|8)sa)b|okx zjD{F$LQCQ_0Qy;ZIYJ~3Q3iwu53;qeDXswRpGp^R&+w>0?jm11fz^$>*Az%?XKfSU z?E^UPO47|S*x1zU6gt8e%vtE*U~j?Z-@kjOo@fmZ9z0^wxbmWs@X!HVghTyCKxp8F zxJ!Iw4)l5)F-Jy^%quT}TnA{CTG4DN4s+kt!;=S$^YZaJbLNZ^X)7A4PhL7@%H~I} zUM~`M9ldnIlxr3i6#CKjt)OuP!l_%2ePV@dmakaRBbMk?ny*fMLqqs9iCKZpuw9jqfda zpTS`tIezrp!`~vL|6%L5K#Th7r#-F~?mM^c{&vqE87UnzViMO{aQNDBHe!4Ec=its zOG-$jn{caJ8oxWX-`_nLp5xY*RtzyjqMnkh?E3CnKxE(S+NGeh<3~;a0h~5%mO@#+wDB!1Q`p zg-*jsQpY!V6uOB+#t673P`6a=gzI7$#k^REKO)ak|_ww=@&}UF;RtDdMr_&P$ z4j+1J?Mi5=w)V>pxQN@8#>6v&#Rg2QX!34qZl!Lf~~#X!_4HYid>qGoH}75IQ)KI0fVE4 zWTd7+E(?-5kB)pdbLt!)5C0{LmgZ;Wku(a5e5xdMM}Inb=;ZMaKl%{ln?j*sNLc}c z$u6m``1rFOCypN9_W7qEd++A%Qq)+&;`82G`8J)wSiWXe)X3rKIho%dN!az-uBO%& zNIR4il`1r{g!r_(j~+o8!v`BSe)G-FACDZ)%`Z@?6jP^9OUz8OaI#swVZ(cGue20^ zncshKcgb5zg`)0Vd%j66$=ZMUG-AE6d0S5qxG^?CG<7JuCFT^v7-_=vzxa>_)6%Gd z%JNRNSc}Mi4!F>$|G{CYZ&YH!O0ppP%Y~1Xe9nHNb4@FG$vMt(w zi5ZGrFa&*RcJBO&B+!{NXW>PbnUy71DPVFiqW^!NaBHviW&nDv*GP(>^f(LWhK7XJ z*Vj#-G3}F2w#nsEZ%+>cJ(T%(B%|L@;Q!b7q7I2OOWF!6yj?s!+}uG^{l$yK;=-a) zBS#WiEwFFlh58~XDL*S4s!o0V`~VvE59|BwfqhDbKBX{MhLmPBGaki$68B6YSBj)! zUkgu&A8QF^TzoR<%s}U(hgmUULnA>>rwd8c-TKh5{z9opjASN==j7$mA?keg;u(fW zQIBZ%k$p8aHIO=x6N*Q1kJh}idO`TSxhvBKrgj$`}J9!nMF~LRv63ZV2a=sA?k|qqrCYVuHY2 z%EQCsHpGb~ZNo>5cC~dldF7}UX`nw{X-yT0H1-TjE{AjT?j1IlV~iI%9XxZgjg)GT zg3e{j#WF79Xp#D9Vto9Qr+%LPAVPKa^7(G04hRf!vbK#+h)0N4R9xCEm*I#DRqWRu zULz4XbJ8Sx3k#5BO)SX-!byHbF^-rW$_^4y3b~p9{DiY;h$8fmR-u!W{-1v#k-xa@ z)ZYo{0`8~ihL+DHsizyL=~^CQ3=9kn@bLaNDd+lwYmt2hE`Dn{Jj-t_UzXmIA3QkX z?P(L&uU`B9@|Aj}@^0c|!h|K0hy(2UfRA+e#EGV!dh0SLYex{sv3`*HEal91CWy0g z&T=O^+b<4&DNnt-XyPL1_OSi<=Uyg0lZH-m5A@l2bf@5|X|QMLy!rF9^Ygy_=}Q`- zZCSj@)xibQgU|-s!rW4>lm+z*?v@B+9z3)#HzyJ4>Eo4>lK~~;iP`B5U3E*QEE?5+ zWMgB4Qlo4WcjmPg16}9K-MiqZKu!30(sKuUhsK6_C{Kdg^vs1>N6wyr2313&qKaE9 zKL7rky1Lq=j3kg10$&$shYJ?WmCIH8_I?Zcwzf`=4qk3U#*EA;%p+m8VloOV%Y8$G z+NI(ZtKPOU;eEUJTSpI9p#ALM^SxfLefNWPG;@w)zdlCIVeF0Qv>7_?sR*P&wb_Mq z3J#iPOb3y_ostxE3L2+?)5SFW2la1?jQ$er&>&q=OL>?yI52crc19Lcz;^8GJ!$n@ z$n3uiOTWs@@OiwKZNkFB`o8tn;xE742}-Y^tk5m);xIS_LHv7|-o4iA0Q6d~VKm&s z-IE3_@xT1c-Q5iWNLJRSG`iI*z4})#ZT!vq0LBCO1^pv6%78|u*B{QDI*m{hd=B5= z#}_0!K$QjR|H8wmXBB^_Ov zMFoDrLBj_Rt0*ZwaprWbuchsWn$PxqsX)4Z{P2?MN(?dSIL<vm{yGlQl}#gTp;++ep@2>Dqa3> zG(85loj_dYtc&DbdqfMBTQIwwE8=5+RXeodw@P5XO&GBJGShb3j6L@7eu+9)SuP zfe_7HAb6VaEV&^w$hH5L)!Pmn+p9+!@N*LLvZ6cCxH)U}_tq7!GfPn_3!=od{L49XcX?bB;ad~+~ZC9%X zX$JNg@H8zh3$ec3{8?FLnLFiOeE)25QL&G^yQ`(UtD9?wNSIlf_13hdNJDu0_-sG$ zg@A2^5I$+@loMx9tB{t7*iWB5RcnX^Q|5IF+u~B<5W-ZX1{&Dvkd{ISdVV$6HJC8X z1uO=rp=(H$z|qFPe;?h$YZ$`LTsTisRI992j>sNK$Bp|xlZe2Kn!gl=2$JN`=_-u^ za4<>zONv#rzs(8I{#tnq_DYai6B@_S%I33;TPpI4&K*CYLdtJG+%1(zcAwmpusy-k z)(VodhIyR!(x;u3`Ee$a!q?1T{)Cwg+35)kbN=v@k|NY3pLm{IA9VE7Y{^`24@4^_XZLFi{7-HZ> z4dsy3va@kqw(@OzTWb=bS&Qak#@P<)4Xo|#Qj(L|Y;K?6(9Iujff5mrf+h7Nm&ac7 z(Yhf62GbPul(N*%*KCD0(ktFu&cDVT936G?!pZLrAB;(kH(Ywy%NsIa;$b)gbxArZ z>bMjgA)GX-7{B~B%;h+0mI@!|KF1B9MT`EOhLH#nh-}of-;BRwLb6yE{ID^RVUvd= z3kK|sdM&B^=8ZB^rAU&(3^@`M5*h;ETefUgDAb>PvIC}ty1K^B&JOf@)@!{+qa!Zr zwO)G-Ka&{3{_@RYuzY-c%*`#o{PHt+lbM_I5bf7k`)h1W+J8Zp66zK2UrC@&W!b~$ z#cBCOk21hFu(Q1bVkA)1sMG7O-nhnLbD(jLRIY?+6x2VJDwR+y*60amZ#O6{g{G}f z)03VQCp*|U>tqU$WU&xf+`fC8$zpVAIyOyOla`h471jsBZzgn3R(X~J5g)v>2`ACs zQ+sI$8yPX`y*2NyTD7`G(=mV2d}lj{hjFoa4dn>Y)+||3Sy+;qZy=up`UI_fd&Rbo zJ|vJ{h4f=bjnk`%_SV+E{rjD|aXLI?Adk!2vf+d6+qdV{7ULKTILCf&{!5lF+5E*v zMrVr-$Q3i{9>NxkuSSHu^2NyigznR8Kn;5tLa;zwD-#Rgp}IIYLcxH&jXfk%C_*o6 zYIS#ZfyTNu_4RtV>jct2J$Mxh}(+5uV zad62gEO=U#9^@W0W9aCl)Rae6$#80O!p3%Vv?ny>a1d|%+wa9ceU?<88{*v;TqYMs zCmE@2Z)*+r4=F6m33lycVlK!jE`SDGP-2@~mpdtPVsZeRKxDsqqJzMW)KRsv>Jj}$ z>-emviLvAT2dU-Cr%lP=>Gc)(KrM1&eP(n>w7gr~+S*!AwNi-o$vYqC<>W$#0qEP6 zQk_9Zv{BxJ;54~B-i7$f1fq={G&wvh?9R=bZLJOV4z3xk`BO)FILYnM|EAjZSBrx_*j@ zSbW6Vx@H4NDw)_?9Y6h3^swlH;-V5^`K;kH+-&U2D=HH5QxQU@W`d@kCOTqp5u1ik zXh6vG(j-1IwKBJAm9#05dfu2h(6Qm-mCFsX_NDXR>TYb$&Cjbj?_oKusc#`?Or-@blx*|O!Od4&yyWjIbZb+U)H(;89t(8%cP>5u2n zSrXzO`1KcGaOte*5hETvja8B=YX*Pj^cg~#`0C@EYDAbK?z|CGpyubm%~LeQba!@# z43Dfw_~W@BS%@<>YAkdG-t_4v4AIApo)8oq{Po^1ojBG#s-CD}qpHfQ8#`*ah=ZAN z-&wtC@1AcQOzm}$pWbtZE)w`UA|I7>2{s`5zh3`C*Kz5=EKnr2oEpHgZ^I(kyzyI_pqq0{6Tsm;?oiR4dL9E!5XEm zJ1(>QMWzXEg6kPCsV+1l+Yw<@53aqK@$6Q5oC0ZJ9B3&m`9IBYBn|t6I%(LekP(R^ z(w=TMZ_vcZegj4>n8fvVKztgaBl1;f>%Y;c*M8O!x`xKaUEk~hy&wk8*2CyB_)s*A zhEo*ziXHU-q4gU1`x$^<>op2dB%%!6MktEQ<(Zh6`1$!BKXJm=#)`*h(_W4j^0%G6 zeihz?1#jF)V^-8)dMx4WmGlSCe7ypOj~Y!9B(y7nzNJv_2bHPg$BebIvWj^eQ&Zo_ zVln5=nTs>&KqxP&C{M^w&#cRH^7G0m&eI`6B9SPyiu;LiCM=!;X>kJZ2e(-*ZV+{( zKFrf2eU$Hzxl`wZs>c44`!Ga%d*LdvSakEzt>laZA9r6f%HrySt0dBcSWMqhgAboN zeC+%&Em98h8|3Edb|?1k%8yptF|3XqIlSWCHTRxA(je88s7V$Mwg8c{3Np>w#voITBT8H5K#xJo~=zR2F^rGF5AS?*;a|PWE0Q>u+p;P zZeb_f9kEnAZOSCLac3W2*CK*vV>Zrl0-s}Y=Kfhd!d*#CWYnOXy4==|7AfkHie;nv zj}GDoH+QtW$OR~mH+Hx6?GrfMd^qTwvN+tjj&=>wb@vF(P5IHm!>>HPl2V`!e00x zzUFMML?*S7Io^K~<6`gV>FyoWFBIxP4xBtn$LYQ<0e*o2KR>=tBHhr4VIxM4JazKq z$)8WzFl~ko7+zRWD(#VE=H_%Yx8Zz_sfD1Vp{7+KgIr5)vn#h5m7R(nVLf_d{l-m#5^C~zAJ^6~)nVu(C*v5`xqCN_a0-Jy0Emg;N+N`dRr1(`r!T=b0Em~Rp%nGkFB9xPhH5mFhEL~0 z@(F__FM*m~!-nHSWLra%9os}fNKc+Pu0XQ6L#BEA`tCfnmxY+KhD?NpTx9y=v)9j> z)A(#U17pygJl$HKh`ERf+iGZSRAD+L(%A5AC0G;2AyOW%mu13VPzcRmwgf7$ z&)z;;)K<(v^c_n#Rg{+h`1A2yyLYN|Di=$qGZ)Ve88USL;e!OFLx^~L{@WOnb?V$H zNK>p{yVlXhzP+>Kt6g6qoj$mKznJ70Pdkr!^X6T?aH`UFGTSx$U#8d1@ z`_CL4KWL1-z|Pgf<c z8A|q_r60Gp+y1&2$g z8TuvCbcB}9WE&j-8iihdk>syk*6}yaIwbW7!;JBy7IAw_YI|8jLvuUMV1dvFG{*sn z&*#tM@7;RU4Pun5#zBs2oLO(n%Xb8&UasK^7kNi#FEvzN{xL=Fy) zAho)Z@)9vpSX(#%jOy&}FlX>Igo?%C69j?j;CM9_d@DMg-Xv)f;N}8L>)Osn6(ZP( zv1$H0H*emoCAzgpA876Q=<(xeOXla778J-r_Uit za{sMUkj-FISZzb~#0gU%a0L~{HDHYmpymmslWaCS#4iBiqc~2-=y+mw2FSC91_w7* z*78`~056}WrlyYWuENgpL7`EQbAWbQLH+wCCBKLY2nS+MZFj3Vj9p8My2|RQ(WA3c zGds0CL!*Y*H^bjGD0I?r-~M_G6_n+S3XcW?Mq+uEi-i+tM`~2+xV%J13wvKDS156g zugM}2jv-`kVl#L0^w!3v9*HEmI2A_wm?5L}gzibkQ&_>AV*#fOUS(rbJ(I=l(ugUf zvq2^sR;{}g`>?3Km_$Th=g@v({mKq^d1LjJn41)$ z3oI<88l_yNBuO0|;czgRF`=Y0405{h_Wn|ROaAePo1%U{1MoTp2)=&RYsV4dAY6^q zIz+SNm@l0;C-y0k-hyHN`R0$0A3D+~Yl6M4n!W;RFg`!<9qh}VZn=_3$>-L@wCf7u!?3q1$QvCC$j!v#6&E-~@CFE_6+&n=Udu&e{ zK0eex@XU#y0Q55u?riTQ?iRz<+kf!j`pp}=lyEQfQGEw7wPapNJ`3Z3xQ$RH>f;%x z>XF;p*(MdIB81H!KRd@zhutu2z_8TJR1)bI&t905on2H{0-m-2GdpzX;GP3N$IoM>NLo=} zarE#J9jRp+pa_IO~g!$*%WF^*6!0@V>V;(YVn zw?UyHN~D1^AOTI<*+Pvn4UsIw5O#OZnKP@hvK(OLJIhxvaQX+E-=~m1?&*`-s%ko- znKAho_1DijN&DsVPucHp1pCG3qG)u6nj%yPKBO8W^%y2cS`CfXtnHB@y^vBLyLay7 zJz)M|4Z6J)KY~n-wk9-L;sWDr+uIugSjm{E3^lr&6s#2z7UMnVFb^C8nnIASDoBpmlK& zxC+ANlFAZrBp56fUtkjA6BL^in^BhmZ2JJ$0MLg4ZB`Z1{3OjLxiF$+*iLkbz zrU%LDM9mG&4OUhbfQC&te0V5ia_QyE=N~8E&neFJviD@+?8nKmk27Ki1V${Hy$G@v z?W*?DrsB}RAYV@p5)l?mb2ZXjzICm!t(nW<+j8yNg&i?3;wXgKSlhe1dMMQLyRoWdIc%S{LL?`7xVV&hEcwc9@5fde|{TQLx)qM_=Wle;tqDSaIcr(p)n#~l_m zj7R4vNx7AcwTYRju)VXQxzwC#4(<~aXOl?xC^=@r)X6jF%!U!ALV5|Qz`2}I-|$Lt z-M1%xxD#`q$2M0YZM$6Dtq@amLmi|HsR1r)F+^cF+*UyUl+k{R{{Mt`)GHZK5%e$Zk%~d45hKUE``-I4Dsh#hY5l4X z!h;5kn>=1--~#HvHHCA!dh`0>Q^$0OG{sH93Ey{Mf3vLBjBnk?ukZQWSH3*(6^bT;Q%AHRJ1T7M2r|WT3}+?rR*jUrO;^s$BUIcG=wWi$aiUZ zG~U<|W6d~bPoBqJzjafn6FEA%^dR}r5ySfq=-;mE*s^0A=(JdxSurqnQDqsZ%EH;H zN$uMA)>8=2uE_)0C6E*g_U}s~R9;)zEfv1ZaZyO$FQo6~o6yRjBE0W_Y2&8#kkXUq zPgT`d8{Kb=&Sp5cI)WpJ5M^a&3wBm*df(j6c z^6z~Ao8S1xH@^Gb?+OvvmL2E-X9a@_fxn$NALmy9&Ijj~ltM!?;*7Dd4=@dR+gEN4 z4M;;FYR1nhtS1G{&b?BsJ3I8*m{n-?7vFtfzgRbf^zs!eRvz|F5_lf$Yp%ZTh2=|5 z-TI1kO#Y@sO*F08jD5aiUN2VG`#Q70|)nT z@GK-il@*n>Ck_k92qKl0Jr5o`U;MOn0wweF8d41e9tHJ_bPvVd|Lx&> z({MDHMaH}v=05Yxvo3@OAAkXFg3rJ6&fhM0IOSwwRwU~?1OnqyNgxF8a=rxU51I*3 zf9fg$>)5UV)^(bgopnSUY-mMwx;535ODo8#L`mp&`To%l`;=DCo_x{DWy@=G^)W=o z4j39sBwpLHhQKbr;Ns9K_-LkHf&`QecEcrecJ0`?y?$R|e(A`OBbKjSX2a;7-#r(2 zUd|aaV$9m@YZAOLE7K!O2IM^VaN9`*7CN=no;ofBsZ*xgiTEo^Uot=hQE7LdIC0{#HLFuPs~|5JF;OxcKU~Y5AR3K1 zT>B!3Gjq~}i7zgFB@54ooHB08uz`a^ZBPeNL4En^o0-&4z4EjHe(hyfg{jNxHE%J_ z4xK!3*KOY+X8dZwuQf~t{g3ni$3qW1SW;CHG7vQ6m%nt|%daeMNjFUzJ#oaykq<3= zz(F91Yc2c7OO;foL{<_jw&S7u)kDude!+wb0j%4!(PL21Txo(Kyoe%KC(< z;F}xX;82tLkDV}K!hMe~$if%!zcF`yNEF_FXeWUcgf7^{C5Mjf57&!q+kSri5)&o| ze)@Mm`{!T$OIoHAu)_pk^_y?{I`HA=A$H5Pb3@O&y$y#MxQ5TcqloZbm9h2 zY5%ZH{qR5kr2(D`&7d`%xw7;}pPvAT7-=Rn`%tPWKd} zE`Jq*I^)IkC(iaO)%8hTAKSUD5c2f282~f~otYtMJo@P4Au(X@!M%(u9R@U@3^3NY z%Nk7mHB1Me5A|QKxSn4DI3H39A_lNP`v5GC~}P$E2YL zj~Ql{Bz;f{*|ZzNY4frsYz^`eELr+mCYKE@QIpZaY%a6pP^ERRYlOSIN8#b+X;YyGKwAPP5^Qa4_qP$W7`BuIOQ$jC% zSo9M!5a{rgxVveRW8m!Iw;#F{o z9X?=2Yz6h`laHs}oWblhTh}jGxZuj!bN=@~{j;lu1PvNJ^1DC!pRSf|LAKRRht=Ue z^c$Pjg$SU9R=!pa$t;8e;X?U904S)_2mM|6bSERs4kEh=PD)x}gR2G*lnDl?wrs|i zg2Nz?k05}M&>4}|T4)Ft!hvss60lt9?pttwO13dj7VcYX)}(dj_OE`efAyd=atmL0>~O;|0p>%` zzVwQ|J^HwCe{s(*w(Qwr!X7qwM2ph|P^f}MV4>z!R`@#)?lu^DlvH$Zv4#vEXxK*5 zOl;cvcDP7bURn{7ls>H8WwYolbLUxRH1v~BhsOsp2Va7H@2($2EZaeL(&Pz41`NpQ z%!G;KzxUm{THI8!q$u=h^bw30J*sa--=F;aA2)5;)as-|dt)#LMu6fP?9)~L0RW+) zP_uP+0#O6p;1*5`2`voTd+KNgLP9fW1@&ju)aP2O{KBFi|KpFt(LWggJNMW~0iX`x zV}gCcgQ}1J7beg-6pJ|K-FC}Qe)5xxLv?1r;wFL4SY7xVi}P_l0XR?g)shj98u3uG z;?`SldE&{1zqse$qA{B>=KH=@`s~?#&{^xI9iR@JeFs!$;*$^Udx)_yS6t>;bUdW$ zZhm{yrj1KiET3@U1#@qnd%W)W@R1|8ez0x9!iC{%-}D*NM}2P0tE-pY^|K#tKe5L} zV8F=1U$FAoMr1R{6(tHYP>0=y3_Jh`f*f)O+G;(BP@J)d6&{t%zk2NVipYe0@YL}) zwye!ZBmo{;*6%v7*8)Xg1YpbFS$FVojR~W`PUM12SYa$UDG*~;BtP%SiDL}XRc*b? z$_k>b7p) za6$i3ansJFGb8#A+kJG`?;gLuPq$u^Lpx`Y%fVZ^Znc}qP9HbEUs1)($(NqUH7|H+ zQJ_R7m#V8jCAB6P9*IrwZYnHD&X_dqa7|5G=IVmx96ZGtk4LxG?s@EumukJ1M%iZC z(Qs(q=(Pk;k!aq+7oTlYK7sp?^?uv_wxaJ)4G%9@C8M=uu1Y`t5@9n-~-^M1O4*)H^1}qpX4B1h&qwoT)QXiOF{?_LP3%i9V7cs;=f+Mh>gbo;|C`=35gf5L2jUF~44$Fr4!sn)Si{}$y0>i5ZOdmeM1_05wxayk8m-UX7 zSYQ^6`E%wD$}blp69>Ef!dcVHhY)-Q+;!XhQPo3IaF_4gSSM1UcJa5q@l6}igQ=6P zaKn6ZK;M1=02nu3an*pT>J|hi(Pne=t2f_Tm{;Iw>B287E9sRi$%DG{OY{2V_h>{q zjodfC_~qG?F3Q0(OF(W$P+I;)sSl!BbQEq?J-rYRF-^P@=vZ1R*T zJ^NN^Fyc6H=zvEerL%9ow>E0U6*Ne1et&byPXSCplhPW(ZOevoLpCt5Z{Hz<25Zo| zcQ=Jj8u>h)fW7GH=Xwnr0+WF;C{0pBGk|?M^(+Jq4QM87P8~l|%k-IszKS5ADPq0) z=Bg~Dgz9jWraxbTDwQxzGZeV{;*HmIEglflAG7OcWFQ2F5Oh|34u$xW3 z8A1@cfcA#B!)$LEPTMp)Z67q|-gNy#5B&b?^KNBInkEO4A_!s{A3>e|{r>9Xe4I}J z&g=S1NQ3iG#Qmo~`={UB_kZHaI7mw&0ob-pjQ)rdNZ~IXN`i9$h?_HSz5a?@=MEk- z(ukUEnN(QPvu*3vP(Oa#2ivOq^cg#D?3#6J-`lp0#q6=uF3dp-Pyo%jAt5a*?m|Zp zyX=ZtWtBZa7BMrNM07;9y0n+Yq8^kPv6;a+Yyt9gFm>AWenSUI@T=!sQ(4x-S3(ip zKJWIP#XUU)r&{XUkn7)fz{E+D0N(1SrPdKLmGYl>e$la}6LA*5{o31VL(X!hp2O6j zK|==?m6fz0NJB8ttjjO=5eTTBy(Sj0wn^nnU|S-4aA|GWYJg z?{W}H$?zEsP0T@{i5sTHV1Ud36m~SY;+D^NI>@z%wiFx(a^;3M7q46r0JZ5;cmMj| z54O~}feYL8!CRGAHK4Bn+k*`CTPbRIP{lnf1jt2D(5*O=$@c8l6EN#r8cwB8YOU*= z>$Yy+>Of|70HCa}thl6f?yWa*ux-1yeRyD3TQ0k8*UpqmmzGpayYP~jncTL2$C72Q z1rQ9j5s4KSSG=@hiOH?1Zj~J527c%ld+=m!!irBBH%Wq$`K1|nk3I9`JKNrwIBsHD zUiWuD-2Bj^55`$EFIpg=$4$JTRc8fg=A0`$_#b?*{q;ANE3kMZdFQvkMPQ*LZ2vJs z6)XjM;E{(nZGGQ|4vnS%C<2AWIb{{y1p)z4Tv{@6%xHqC!4t{k zu#v+Z-wPn}B8iaf-M&Hs4tApENU!ql3Q7X^{r=@4L@~(8K}5 zcii23b`ppF`?3Lby*iQ}o0CJX{S z^ZYX_*RC?a6@&$A-~RnU5O`V$h}zoP1BYq|jKD{Z8P(X_?7?5~{G*>DO>d?Bi~>op8|Hl}R*=kUXZ8Bkg~?(C3U&SW9@sx|=*0Iv-17XA z=Oy&o4eMTAxlDukGRV5lZFhW4S!TSX2r(0^vuK0}T7PPl3SjP!88I9 zbmah&Ar-hSLAEZ}tcZUS3|t9u$NaB<`72-13Lil#eZ~>^nA&^Im!Oc9`Tm+HTgV7$wq1YLTR1Y%I1V$P|~l&>V< zSmxz>?%%ZH%~3-})P`#GC+nw9nHoxMEPDFsa8B;x%VuoZwf&tvJ9af3YnElBA5Es5HVK{J(W_>BVd?VMTj2?)7}$&n7rnP*M}4-zvP>zY zREod@Ai@ojUequhEu;(-f(T-gGbyD73Lpbu;J_jK_U;Q1bW~GFNCGUFK7y`WJP|RC zPm=?1&N^~{ol%=hr+*?;SD&5U@5~t(z;y?A4HD!546r~ZxB*U_=cJ>+B*1`y71i}8 zj(c#gz3kepJGUP`UPC~2kKVpkH7AZ_;8zrtUVX{TXPV()~*YO`bmW{^uTzv#6ompML*;-TS-Wwz_S$5phJW z*i3%=uJ8Q)JwG?2vG9jZ@@O|Re#u1_J+*YPg5uEg=goiX?e#nMeaK;S9gZlOp*wY0 zv6gkSQ5a$U*uE3B9()@n141Z3;`C#s)VW+57?cD&f+m;>g!cGTd?C)}0Q@0w&cuUh zXn;VRv}UBFr)wp(BAU)4hL+&1PI_z5}JXZML#%q5SklWzklbq-hXe)u0sbE zk;QEthWsmc4nEHT_*+i^&e!~epblyyGfmUF@y0Ly^ymLnQBeZY0RDgkpev+?B4~hh zlA#QMI{Hw+r=I{QDAp;61~53-_u8AwpIjXAQC(iS_@!4u8{kk%zq)s|*2*W{%KT?m zEZLho>47Da`PsHKK@rF{Y)9lYcmSb5J^~XwVMJY#>R&o2wB2dSHcMy@CZWa;9~T-X ztlPMrI!oRQ?SjvZAOF_6w+PIKciWXWZQ8J*rm2=-1W=Y8^F?5S26XRPbNqmU>A~j! zffCWel+$8?ICz$nqkS8;0Tv-8wA(S14S(4(0-b;CpV0(BcMVdZ&fvf)f+mn|Z;1gQ4N^)Gx~A(0nt=>(bbgc|FpWYx?t4zh zyCX&NO9THWGg0x4{=JneB94*VEJ zZocOFMK3%F@Mt9295_PyRk1>ub6xo5-77=w9mmf_VI>O+4z<=X%gp*toY`?jmSLrY zE1(R3Kmt+$3QPlx$j~ssh{)AKLK+|eCCLc1Z<5hfnXS-q98N0`IEO)-(wzwaSKy!B z1V9mph;tCv0JH>%Gp)4Nk^uzfBe0B!6hQ!C!vFx{oG~Q>3E7@_>yEMzf>MwKV$exg z49p?;S}7WGl4d)$Jty^w`OK89f;P*;)|MoZU{MUQ#&WNmrHerOuZH=uBHn@V4kVVO2lN)s{ zCkrEmQ!kqS($XaoK@w5RG>-;NChV{{N+?P8t|6>!PXG&N>!4C)L;AbkmKIh!sj|UU=b!7hilyD6Fyk?X`z;^&Xfa zO0}i(;t7Hx=$vo@$OtgBS6>6j;}HwGEWaS%C^%Sus5zHX&^C}@76;$4Z`X!Us~dq0 zR%9lUh(ODDJYpC;AgMtiH?g^?As=j1S+4|GxBR4#URFBqAK&djTs8TkYF@yhBUbF% zYpzUym%+Yi(iJgHCe)3yuPWv-rJb8Ezo9C>yMo{_IN1EzHxDbS8eGvQKtS!!R3d)+ zW!JDkGGau;fUuIds;t}uOc47_UhRrPK(@E)EaFHzO7d*+(tFIG6<31j7N1Ela((A$OPwSmfX+c=Af8_GVne1W zkkW)9p#_-W%px1WD@$L~upIDHZkvK8(5YN=nnW7%XiGzrpY?!N_~GbZ0zn`QkW?}Y z*(fqDgn)7&a|jd|z^FYm5D**$S=~-EFl_;VaixQp73sJ-6+%F^cQgTj?mS2;1Ry&h z+FPJAPw0gDjQ!{uN&^7n+PLk?xs!*E1C%3OUk4E~ubFmP&sd=aE`)D@4einIk_)C- zU`#_vuL#ENmtI#IO%h~h+9Uepb?*}`iG#b)jd0T1WsFr)LElJ^<7j zWRVgxUub_%!;t`n0qPSi%>(B^oz6AFDTGyus0X?3z?NNghXJzyk->#M0b#&yg1_^~ z{^4Z<^1uz4_dorh^8F$s2^hOW^9h;OP-U<$sOWFN6d)gdb&sAg(~cn$g;~n-83G$T z>tuib(C-7`_ATwzzoHL;9c(=cV01;FazsoR4e8cnEp?pioC;R2T9tqqgVhGVE@(?4 za`h!&&_br2Ox#MC8dpxdxH?)UpaU&O_a8cC)OZOQu@cqYtD<1@W?w(1@4&EQfz6uv zg{T?r-J?3>KS2Ja>Wx)bR=V@O?Y81)%t2`a0Lf5Of1*yNJop6m=VFri(gRRgUh%76 z{VE*&OJ71m3IPD@vfn*kvwmyS&O_PShUPuB&ASfo zczwtYdbB>u66vEqP;E8*&4B z4xD-URZ(6M7BLIJk?w*gAC?I6kUwb9ppHap(>oj6WGjK~J#etR zvXaA;APuZpvnI?(j+{8kVF*YC#XzTyp9Cb7g3<<=+xC61``}&z;jd&w zE%R>iKn{c=vUx-S(=M7m;lha$+J{cV&5ssdan+o0W5#iqF2M)&>QQ~!=VwNYJP%r# zW)^-tUO4^IODrQorYYJJ#p#z^TpTO*5GX){uDohaG!|2knlTrC_mc8ZX+VKA3=T6i z8GPxDR|O=%LJPM3eFi%4nuE4OjmIQ(WmV6Cg9p0^Odj#tsScASO-aMirV#)c?X~#n zmLNN6>SStXcSzzsglU?QL_F+>AOK_eg#~6T;z2s_nZaAyQUP?EpZWZ(FED1jxMcC( zgZmh`0^WLOqt6kF5cD5B5E_4xVC{f}I@#zpXDnr~)00E$AOIj>^0}q2W+6D41~dM| zIV9crZB4R5!@f`z=hDy5fCgh61Y#PDK902_0RhzeIMnjmzy5B=s&|T1w2UW{Eaf~rI;19>cT8=*)+wptLF(md|%P%Wl zFOw6$?{gBz@epG>d>Df;g8%~pgoKa~nuP|?JfX2$>Q<{;>Z!V`>fU?K-cRD|?rQZ@ z3xg7ww(oBp)hgY->)ca!+Gp>x2a$wFxJILxj4TEq)9#o}B5~g4o%gw1`2P35|670k zS2y2$Be4R*D?bViSoQ?~lptQ%FwwyLq6{QBv~+#r<{j0pQd+87sjaSZXvtbrhA87x4^c9E)6#DE>d{{21c|~Z^=5hQ0Sjgz1v~`(`_xXpA9zLx(At*T z^XQL$LX+Bs9(XwoSr*lSTg57JLwcZ*ulumu%-5dc+R+L97(fR$nj!rtK(#YA{3r%8 zXcl3%PEvr1QQ_Iy&mAt72Lnl!ECwN>lcD=5;20Hberj&-=H{@^hs3M{%Ye*PpNgRl zn||?!-|FfAT_ONTSl`Zl=%5D7M53NR|MI=>-F4U3a)khNZ1+|f@&_KgJSEKhe}7&` z*a#a%E(nb<02LJii5eAxq(1RhV=Jyc_{bMOOBYOlAcg0E2lR0^FgO-9k=KWX-~VOz z?&b)_5|dhx;eeHC&@d+i8?ssx6$|4|oA}-vuEm)X5CRb77>pt4kS|>O8sj^E_xDdf z^~}0mn+&Yt2ufsi@Qz}^=p!Rj05X_}4`#_;3(g2eslA` z49Mh)AaZyswYhWs9AXuU~B8D=U&FaN*zUT$&lfa0@0l@k;-wz7)Nb^Ddvdb@j?6JqrJZJLA!EJQZUlJfzzo-3`nJ@e1 zo?L1HD8X6_fR9oDKq@|sl|)!77U9_wuld5KF*$^RAFv&9K!Z0xc*1 z>z;eQ^|yZ;JLe$ATJId=_d)^4u=t%NLsW$+O4a3p@cQdN^soQ=&r`3s2qQxZ$Wb|r z*%D%)%au3m+_-F2d*i;bxrU<6qX1?IAR+*Speh~;?_Hc~%0|r#k3D_c_%G0T6A=Jh zD*$c`nqCIZ)x%wQyM0pdg+I*#<{1VK*)oY*bJEPh;htH zmIMfJ>X`T5aMj|~EB<5puPW-Ay4(7YD$MV^M_fimMuu0)Kr{*qp1x%ov0fsp#JB_BrbodemS&p@LstoP|A{SoUxJ`max$JQ*WGM@^VL zP(`}A@wsVFOo|%vY7N7H0El=1U;-%vN(zOlH-@}`L~Sh9od;ff6k{4DALB_%d0 z05F0)G$1&j@!;B*m*mNY#^l1tD^XyX*|H%b7^o^>1aQhC0`V;BJ4?-Lw|2L*z5B|G z3zwdUqM^ZPJrJN6h6e~#4FBw|J6jL5|MI~HS`M|PF0o(&{AgFBGqMD{cJ?;A|fWwJXi)R}* ze*C9D^O+kz`cWI=_|yX4V(fcq#oUKxOyWW=(STqX#HvhS5~H5Qvj_*Iah2q?t#-gC8DHpa(miU9e-#rlh-Xtj!gSwM^=~5QAAQ zLm7a+0THpX7ZLBB_XbpvxSEqIKXKzJAHEbt{*5Xl3Q)oJN9Ju=u(B>JglfaV$Sh?Y z$QT2X)WzP=sqcN)d#<`Hy!dPsfvA?A=on~C_9V?6hwVW7T{m&)LA*pur5g#L+j=hAJvUtE>%QzD~B@E|XM z5)g7*+V`VpAAO~9O9`=mYxkIpw3R@A@`XF@y6dj1uD<-JjCN2ALkR$?fXBus=Iwc9 zeZAO17)5NX8Yfcqj&hOBN2xj?up&@`7h@?dS0HLYnrK`u=S)x~>E!sfzx*05s7KI$ z!bi-qtw2Zm^1t3YrfOMuz|}{SIWv&RaTn z>V@z5*bO+P4gp{=O#TjEfk@HY(S2A4ShRdGI5dIni~B6>=(7=LWGes}Ax8qA`ef?V zOaA3wzI)|W!)E`Akw~RKflp)C;-5XdbH)0)gvXm=&P2?CI%Q+5v1C{Yp2ai75L-ir zg~Y2k6#$h=g<-6t{X6%f|3wDDaj7U`ta|X-Eel_n95fVt5O^kK77Mii0}N$WHo_Cb zu?-TouHUt8-qOSFl4wp*0;(havRZX?gYc9w-~G|Q2@nJpzXxPwWMo(yoOKe|_7Y)%#xG z?|aL6s^o#Zm4GWr41n2$Cdiqv&Y7Jn*U$d>G}+dS7)aq$k6|zdGYG??IBCq|k3W9X zhd=5hN#mHo3dQImBZ~pZP^cFdP&8)Dm~Z@FU%%nT52_m$)^r>gRc%?ga>L?ReRrkK z6eQK0jSNG)V{4R|0~T==NmH*xWSI>chyo%6ffsnn1u#a$b#-*q0&zzU0}v0kgv|#U zm#sO))RSn==3VL`Mn*1!sfs+d@)hN7&a@ZR@r$4l1gQ8cH}= z?%CA1)5AK*f7S~)7v`?L_S$Q%xh9u0Z{qzAg$Md{^YrI-tlB&wXvlGBKm_%wRgKNq z`u)8JAAIV`z%TI*#vAD-VnRq^o0kWxb;h)!}+I~I^z|FFbt0)7y=Yy&X{oLU;O#DZQEN~TN&)& zF}ldeRsb@X3;=7*9d~^GzyJ5oK|mHN7=lL)2AIJtjD&HZXXV3Bt$21G4wZ@=Sf_a# zg2aocWfyWa$8B2K($%_n+xq8Ly!^<#XBTZ*x3ALOo1>DE#EO!rXeq8F@ZJkE<4x<6 zD9Cv@>|U{^=xs=W3i&jJC@BwSdn5%ZjDUm%9GJXM{Dl0Ng4pJbJJ!!#iqw00i{{lArzj?svcIECX0&R)1W0S-@p@3`X*D10)QL|Jh%Q7{n* zt?Rd?9eofL)nNg?DWk1gnzMSbgY)nL`A?}iaP8ICeCm^*JnZ>7+!+RX)a+jX&q%Rr z#@wxoUmYu9(S$i;*+5xiR-M%F%*us-_ml50+VN^1s03dj(}lPbNtJ9XD)V=)`oZ-3 zUfr{`9Qc@hiWCq4c`|#}Z9*U5a6Fuf^UR#4DIlJu8c1J=?>%tOw|@9f%lB^HCp|I3 zZb&=gHElb8IOG1GKmEwFugq`rY0Lm~Jf+(07h~GZ}${+y*02x4(3&Fj= zyzh?BexdbXn+Jw_nu3MiLiPzE91~U^VSo8A-GAiqjCLCVXSR>ykN@Py1{}Yici8!n z-|GG+zup!pm;y@D%AehNXZ?g?Z4s6L#eXdd3=vEah5{w*-n6}*16C}3bzGD0_x?5p zj2pTCXa249m{Rs_e3A^3% zNKCRU?WPF>RR4TcaVAs>T3@(74pMh(SCYAwka)OB#i2>m`_!=mh^bAF8B1_4o^1mkZW}E0S zL~#c^*?_bfh%%Q+Xp@1s44nuvpsRw z^AU*;yO@#opNqgDdUZeuwlWMzBrq9p>0CiKmmEW|PucHlSk9M-6=RKZ8oRZzF85o_ zw*De=WZlU@v%a+~=NC%*55M?bnZQs^JZJ7pzm2MQcT|2BV>~H%YfOr;z~FddGZr}A zs>ds3C!);A6d)ujVQ@!VZeHU9edn}-4Li2VU%4k0fR39=eXcnLvaqckIft5jZhxow zdcr0jFD$CM`!>t|ar>m_ohc^&)%nR+SJZw^EU1tnDLviQuFc@Hj1~?wdy#{C5+Jsi zMt-OJZY~;G3#AbrGP5UKelVh`tuo*=jhMd!v!Z-cGlFy~tyQu_&HBj!{+{(@{ac3; z7+NVNYxQ2I72adJ?51#qE@6)F?x29bVWoNcH(IO}h^utwzpqL4CHtF)G=*2{_O0(I|)?Yen{3o{f=m<_-$wtj?7eJVYwN;%@?Xmo1 z%W+D5;4!n=K{-hV#af*o)RfPSdftj2tBmX>2C!GhqHv@wn?!8X}#n@`>!NxNyA-N~iOYTUA0Q z20KJ4AlPx65&|9jBx+uC3wm?lzblZ`}Gd7>ms*P)d!truVR#hZEHGiqu6k7Q*~|L30Xmyy&^= zXXx>gAnFa_<-?snPyceZ>r1`@oar?Mnx*`Aj~1v@yzkTB^Lum+P#H9{8Q{_Rt$6vU zOD#!Gn?u!7P*f|PP5^o?Af%tpMi;$Z({#4qf`{gq1qlYQ21WoV`+P4tsH9$L%y|@- zH-#M9`T4IRe@Bgk-;827WJD6Z5h*ZRttzFQxhxj+a<|^A@V>i)1S_Pt1&hq!GKs(r_Y9oP8m=Q0Yz$FZZD?pFHmm4;wkm zs?0E|EW73`6wXve+-SM7@29Mb_PqxJdkA5L>j0sFfg_X){b`f02A&O?-ZT4El+MXJf8Mm zJ)JnQka)TQaksulQZ~e38woQqGD>&Vj*~gI@1{i&vo;hYi~T93&jWoxcvQWmDv84? z7eZqr0r2v=4k1L?jhp;ZBM1@nLMK>GvGhvlF}#O!-c&iR`?g-{Wgx;WAP&XV;+y>7 zF!iacqtB=949TPjO#=Ji@D64X!AZl$3usbk@1Pt-U^ECeUH?9^pHS zX!6&KNZ4AdgV=-ik|`5}2VT$hb+73yDR**dHm&n-rkS^J13U~FU>K2tBrJptL$LWs zSO|IhbR!>dzoD%*^*Xc;mtiK{m%gThwDH$XoA=d7O$6Y^KgcQr4kdu*|6Xl>wfSko zcLCG7*5~t2pUjvtR_Mm||5BsNk_6T@=uiA(o7H9u^a|`vx(zU_$|-TX_)$? z(waA=ia4k;-7X25$T|qTM9*k--0hJNd#7!UbXbul34TRcS=P~qu0i>MtMJinJ|*$r zawrCUj@RSsQAuEs_h|j|NzbNZTPt-Lx$5$Swhrgx4pNUP-~9U7jkpctV!y+$ecvYwCzoNJs)Fq-JL4t>!)00XO|JGHkq$sHN-X7?nACP_p2;Iz41a$9sr2R?Hsp> z%MGcK0J7n}3R;(4+pO@uimB|t;b zyt&JJ?G{@Aomv{g8X3(eqAp>(TkDsNNa$7OfvvR zgbt($iqHEoYQvem1Vos|JLL(Uj~!te_-;hg58%SJPQ1S+pW{&(8@E_rfpkcB3*<`z zu@L#~*5`$!Uh0b>Bt=m%D!|cx!2ZF0#cqS8HmvdHvt?z8SX_zP1<)^;Z<_Q~cb(0# z-<(g&E7gg3_Fo4IK=_~7lB_y+gPZLi1fmVj?ypd05DOw=R{Dm)a1tqKINL%tLrx@C z6;`lzNj7#krsxMfXYy_!!1Hiz-GZe5rxcfggwJk2*&>%jpxvtqd=6Nx(qaWKAY~=d zXtC+f2)FqC-?J4=&Fx@GQ*`4o_Sf`!+jh!JT_)dp)84cpp93nsV)Gnk{C&FV?D3jf z*^8A&3;F-0A_jJQe?(2E(S})(mJ@@&P2=G@>vqJ}KG<`!&-(4*9ZuX)|;On)}e=pg3 zDG_kCEnPOK!Jsrw`i5E_695BaR0X5sUKHMX&pFCt_cBKT+iM)BU5Z#3h{FpA>uhG^ z2$zE)P9QwOZ?rIu!{{|Szdtv_UnU!7=6fs4u3sbloIbO4xjZ&>yFM#iqNiY>)|RbU z+v7}&)7na8!IC4R$OI*L0ZBg&b@=uLN^R#SxufK{_hQ%FoqM|W0T17A|4emxUi(oQ z`-h3#4QD85vraNA?jTGv&gd!6?rwzxgdh4yW2ri!B781cCHvr8DyvfSK>F|$3Q0eb zY*!W%Fb|xOVq2S;mFerSLf{gCz&E{9bM4g{7E}Bxl;@w0(V_XCHZWVMum*F1>mNn! zmy5sIr7d)JY7B~_rNy+~+n9_brWP^I5o?WfUmjvW}U6(V&+ z0pq4&9K4Mmhd3l|rk-4;#Cr5rI~G2{2V*S|{U>Y`6s)udXL0eFiJP`mnE;+e;dIbn z1&Qm-DKX&(LO#Iu&jRU<^REhk_-cZQ@AHzg761 zf6#ra{B+cGP}&a??U&gox|kilbWI~29Y()n-F&F*{!$F}I=HTuLNis;c#540xbY*N zf^e|=yc`~Mp$1FZghVUPQs?+juJ|dwRwPH#Q39(Wq=!I_*(Dr&0UkwBCpIJzTN9u)0P0o`E3;3Hq;z;;hJgOSS@&Xzj zPGx1J5|9SB@o#WOq4R+(su@BxW~ic>9j%?7K{CQ;3l8jS!TaR9e|8hhN3(yh>SyYN zi%6LM^ic#eW$W$C`J6t1c(a8gD-oHq1_Ve*^aellfXM$SjQN1p8$&FPSjGXDA45}b z{I^RON$a{ZQta$6A0=HZ3QUVxMgV!{=C4(|FMIs@%Y$K$t*OcQb$?N=VeZG{<;@_i zva)D7&6M%oBql8E_5po+D^4RgBq|a(9EB!$NPo9_O=shT6I+fnu(LCe@VJR%HFnJF z-0tmqNo74@bbFEQGP(GF@ZC{l1Cs9eh zJ*7JiEPGtKdq?N$^1cCnrgV2pHq5kUx3iEl6nS-EO>04?bqzx(a8<@O|7ma_gEUts~3T+Pr2J&5Tc`aPD<4lU~bRkjs&0pAs1l>qFn+8#J0`V%HKE^Tdz|!O-y(yM+(9ru)2)7k zbmQV#yMFapyGLs${}Mwqh4jtet9c><)$Oe7*1#xSy2KdcnFAis!|D{(qy!-HU+7K` zX%&QRDfVBGzIye$jrCG?3M(G=t_nVMhjYQ7dMkvyymS=B+Pv+~2gfKX+LQSlxt6Lf+6)MyZX`UL>(~;q*_g2)Y*#Cr2t5AS_8*9HlFu;$V>!5OfzbYF) z;nQL`{T@~xwD-tAfOWmpmlpr_w!>^Y8iOkK+meyzWczDy z=#2dT8jF})v&yCZKN;yVg@Q`3-SY&`uRM;|{4I?{Z4&LoeS%A&Z_{j|ATlU5zT z83Y`qr{$R0z$gxx{diWg6*M%SE^KqC<;jjQqxZ>v+LT(J*mX#h;u&dTU5$q2rPmxX z>FH)XfVot6U8cpAPK>}*b+y{q-<|s$P}t+h;6Ne^2!>@ie`R9h5e>}0GI=KJWBsSMAR%@vtb2irtf3MBgHt3V`gwYp zW?(>7fjZk^A)h&^z7x-hKh>{IQM_8Y988oq1%*W>C$PVJKK1Ztz}m!jyOYb;BZ+A@ zZgzvF6aRSKL>*YV@hcb$ISf>bNMUC*@6sL{9UV0llp(o`2BMKmIQE;|4=vSpU%j;| z0|jD7y3A^vjPhVf&onz$R^P&`7lL)%rh8BP3g5cVe)p-XuYDN~;HeX?^M?u1LsFt& zZPDCaUqwEDi;ps7GkXJ2Dn_AJvWxuY?%$!!I-UNaeyjdET7gmeKvMNyxG68z29XqO zH%`mL$uN`r=Unmo{bfZBkt7EM0zwa2&R|mWRam6V6JlWLLMIU|280Bv$Xe=}-1%`& z3VnExclvUH7CA&|WoCcA(WyfXaD(^Sg}9gBd*3K`RzzsU(DF5q2I6`ZA+`lu5B=?C zzAv_s$Wv1lbKoacIE$a%W21_W;7@WiW24>XFFMiNkGr+COSgvc z@z1#kd7fkDy2gF17i)6d>#|kn_A#H#%EY;KZ&HOD5dvRxPwLy_zL3MBKs_=)eJM3e z-=Q?w>mTNhS~qEB-M4Cm+94FZSR}u%-oPQlyr3HH;VroFK+?#HusRuA{) z)L1R9pHfT(pQU~>jt^yzucWo@ZIk61Ei|>0mG97~%e8zTLTdOMgO1Ry~g@#Qds3&j$yLDXLhrBnL#&xn<0ZXcq%{A`8Ro zG5k85Jdn+$OHi+WMw~vKv*JK@_8=g%z9J?CeU#wo2ofna%U4(Fy>*-!@BeUqsjDK~ zx{6?T2A~7bq_ADS1ieptp>F^=lgaKF7jfkAt-%KQlRmqJS7-H9vh{lgQWzcOVb+;VGhiN5&yYs=xrX@Kv&w)KKKjKEFJ51XX|hv>}m;MFTfv25N(L}y?m?pB2I^XE>kqWE7r zTXinEEa}RQjt(oRZKitczn_`|mg~&8L4lFgyyc^783b%VfbBc;t^9Q&?&bC?c=>t9 zN3PD_Rj$$m11jy)?om!<%)eJL=Hxd_urjK&O=qKHW{$wwRtlV&~c#*eC_?xQn+-06 zO(}|t)ksXw24Luq>cci?Kp%aBKwjFfxl{iZYO?DOZ)C~cm)#$ZU0H8pRZK$rTqBk$ zOjr58 zFEh;P^J=*l$*^zYj^jesjhL5|A)+xA(QJ?^sBn0Y;w5LClKvKvLT#l+L0?&HRkOpE z=yFIP4Di7@fHVQnKFyDP=|&VTQ@l|wb@~iBYuEDhnCqu%z6xI6^ESZ0s1dEhfpblK zB-_gg8H9ld6TGjV>OHu7y$qN9dY|^bcXn=~0TiJySuG~nnG`mq6wa@U<0U!5YRvM9 zhb*)sr=#(9-$tTU9#e^? z0b?}5niC3YJtn3q-%7=2Pb4mFJofqmyzWZhe!|%((5lq8q!(i2H=L0EInnj~_^}3) z*2@M$iVkU%g@XGoHlS9?3954wecZ;&Ef#-fHyk*xgO9Pv)HU5M}(X*2@YC*yDn9vwLPk#T)R~sMZmU}!pj7%^1! z5ntI@2)Xdl!Q^L{!q;!92o<_Prsz6YgV?cUc0c=i?tqN$WgsxT7x3HnI}m+Nc?t^< zge^*`7u5?O0R{F7ZN6BRyt@8SijuRF$nHEWavHn|xbS{q0w92!u`Z2Tqd^jGm7ZLJ z3cS!=Ci$#)xNVpcY%zKR2}4~rT%ll@VhyAUeI6YJ1#hq)_leZ!dO=CXd~4em+aelt z%WFUEjblHT7;bLCF-VhU+{OJx7^(OpV-*)-VN~#&I|JIg;k>7KI>+0g8f3TAd_=rP zw>pQ!+UeyblpOkHo>+3GbTi1dV$YLtygM0xbr?W6+3twyQqD*bJ))12~&inV4A}(?eWCO?=B-p_BDDBVN(I(iiL=D6MW4W#$XO!ozzI@Q+5~| z4Q=EoIl~)3WDL->C=yq9Kh?s2Ts<#fk?~m@c_Kh&&o3reJrk`A0)X=CA+o(0!igqd zImSd=Pe0C1YgqoA{rtT7jjLP0eRIv%mL;5nS~zje2RfpE$BcI(R|31(`%J2HOp0vG zWjn$2=4crC$l>d3Gr~9+6F?Gxk#9;4P}>Hlw0pHopB_GoD@twM2!?3DfOdf)n`^J? zN|hrx5-F3%d%Aj%cN<2*D)bmd;Jx&=;prxnY5*MjAQimA1PO-gm{W;xGi*q|ce($} z?e}MPS6SlmbS;h3VD-D91rpE)-es3=a7r$)Gg+eY`UE3|MqI-hgfy=B%!Jg#)& zgdqTi@UODUpC`&Yda*)rtbEx62SU(vYYmUabGv^ni{7r&Rc(6CA?Vg4ePgJ%0^hH; z`duGq7-UElI^AFbbi?Vu8D zdSM9I8dBwc6OTt~YS~6xuE}D#PepN9P0L>wBOFBXm)z3%m^>JRAlEZHP!0RGfT`G% zI1059DNGG@B%$EB%369ic4Cy*frQscPS0a~e6@a~)lIZzHO{k)(J}}OkR#a+4*58Q zm#RDxj{P#Pv`jNw6#&`dTtGffUia+L<3jaxxMta~gD#52irc191mhRY0lt^11hqY1 z^c>hNLnb>TkK(KauaCc{Pwj=lSlgOh9lZ|OspQK&cr&S&;pWZlzBO8vh=%>R$()y-1cQ<2 znT83Hf5MZC0UG;Wi}F%gNGFE(Zti~yJ>Vd08ylZs@y z2jfCEJB*n}UB{m|aUy@v-{kWY8W<4#F^t419rsM>x~2vx&y}}yj8L`EXU3%bb39(y zro$1XP?NwtC}{R^Lf2@~|IciI=jG7Af`4!yIuvZ735RCKzrDnd>+_P)ePMXjym|AG z%K5S3ZH}Q8dWE>Q;+#MplT8&^@oQm)%1ZE0lZPoE0{hQg>_Gs{n56UDdtqeKL|a3U zVb$J`5Ez6UZ(BEU;wcFO z?Y!t~jUl!UKu-?e3QK)l3wzrPS1t?!BCu9v+M$q}H*sJ2iZa%`^C9p%$@Mv2soVBb zG-oY*T2V)2d+du*<^J|212MPYKP_vk7ej+$=-9#Zfne9Tq8SW0P~+ms*z>sgOO020 zQ)NYE>5b5-?Hf+r1_1;&6Np=r=~Pg$D1QFZd+V znrB1BWkgeHZC+N!@f?>BO-=_(zq*36y}G;hDKFq(C(&RZ5LPF}bB!(ug;0_g?^o9^ z8F{R)Hrq>_CB}dE+RLe~MsLvoo?d9%q+RV2Vsn0@mfx2ge4*W8%5XmM%Np|mGTIsdo11hvZYH8CJT--o@5~D)MCz@Ub!k2S z)$$J#YO_oRLcxu$U>Jl2Cqj~6fje&fsg=Mz_QD$7{ml>$MNt)px72VM)%;mY$|%wx zmqI43pPl4H3AMp-1;ce~jW2h(pSEB1cx{sP{L5^)3f{N4;ARLky<*Ie#ZhrBT!eTf zKxGi=Nb_fYD3Z+mR__X8t82&eLeq9anzU27DfgSyU!VcfS_+G)j~R+F%W{Shh0y%&7lEL~BBfA0_aBGQ1hmtQPw_ zNRaovcwGPUK1}a>7lI=FVL*_z+b^?|F7}_wN|9>b0g+9-?8*uN`{w?7oa{0dMx*_>>9bI8glWI)FJ++* zcqY9*5+B!#H6Kh3Bv$t${Dr${s}2GvIUQSb%it#R@kRV@ufY#$+!EgBHJ1YO6*Bw< zu*2pm7QPuvdSbvJk;_RhP0>>|Yk?1Cmqq?0m))t4aYgIQHl&3-Ji~od(SNy!!IEu9 zT*htx(hMBipMo$29oo3kTRAik3|b2m`(*qqRQX6%06|;*uaPXktfw1xpR6PRP4&Y9 z=4KwLiq#-7HA=hpjH{wR3Pt58ym0LLTTuulsP0E(Y7x^#T#v>BYzGmLPovVMa!+JZ zP^k7c!#pn@NW7d38<&SX5S~KLuQ8NO2$uY9>8b#fVR^g!Wviz1`1W?GXLWH=xLe)t z42ce*otE3;4urfw>V#{)MF0IQ9Sz2n|Mt$@=?kBJz5d<*vFBVv$3s2XraxoOj>PEioU%*Kp> z8xVK`cU0!kfAPaFvCu!u0vMpxx2w7+M08GD$BKD=z2ft@hud|e!jFgn0N7O<8fuLK zSW(fMK;U#g{p#uP%jUzx2}v|Bz$*8~>IE^kY@kaZNWO6&XPw*cZe-2>EW1l{=dnqs z|J$W!?#H!q+wx3_9$GS`-<$qhD?NU`2Oe}oE|Pf8(Y$*-i&^G!^^$}3jvZg@1Q4|w zRDGo<_ZGB?z5L)4JxM50;QwAsvAWVd@crMHP`Nc7{LEeoWbK#ReTj#;rxuqV-vC|b z{NK1d_F2Mz<1M$Hu860;lz&}_#V|=-e$-Ck5}}LB*i28#Domy&}?X85(ko+uzMkS z04%WmHTG3k-HFDV$x&YES5<~j{e4Hc3P0LjGeS^>B`S#m2boTM9#7wNof3QH{}-)y z{%zhIn$0jdLw@1kpRI0gD9h!QIZN%I%nU>XV}m1d3P8y7(B4?T^=z|-e0Qzn7Y$}SHqCK|N zwED2@g$aKk$=^UYKEWHz2+l^{MZs5Q#>7OynTE*#fgcsUqjkrqBw*}BbYeH=>%ylc z6OY~GbSElNwqhsU%pn-$#dEGK7?g(+;=B6Z7F*Jp{y>kImogG7Fgo`tksc9MIO?PL znO0M6o~nf*7g7O&7g#_0%`b;VXh3Yqg(gZ^X&x+%Qn|(-rJrAPpJ!6@8`M&w#VDRt zE+ygu5IOEOTNZ`F{`48RflP|C0+%~h__eYHw6q!@A;`##WdQVqm*46K(?^vp6ECw`90+7GZ6&l^nK>xwmO z#(=f795c089h7iV)#$}62RqA^)mJTV6JFqbaPa;t$|E2ul(IbP#R^|5yppZdWt1R%V+BHBBQvO$P+;8|cCQP8~I zPQp+&@o5Gyh#|Vv*if_9c*KNZT18obVxZ#9<)NHsWJU>dnKjjKHi3qEPvBNJ8a$u> zX&|wlGlY^g*PlcRtKzH0f!0*tBg)=_TfYgcHkJg^fU1wXK%jhaiDnk(8|eXG#%S1g z7oeiObr{by3?wm-8lyCvG$-QUTWhalYc`}8?3l?|$GP=a-`afHZ+MAw&dE6xJSLz@ zBR>Jt{gy9;gTQo(cU+%&n4R4A>o#1BJoKe%M4nuP-UbEv`ALt&rN6Cx+y3*{^Ze*g zxb}Dq-*9m`8g%c_a;b`9V5|F=Dq0Ec?}4;v4er& zX%j{AjFpVB*K?QN4>8{Jzg}dvpaTJD6&kE=pqJj)=%wbkT?($+E|T_@9{7RhUus*) zwpyuQ{RKoe5#|$r;M@K5Ow1=nE8P;KN-6xshww`2xoefX(2FWN?YO73MWgjVDv8)R zk1!$#9>xd!OjhDbrQm-9bZxvQ&u~-f%YKtg0D7^5L94&HF+~%AFNNq$MD`% zHG?$6%$UuaaqS1QmhLu|TdbEB#Y8$Wea00IoWW|Q=iRkE)_ix^$s_>)wUTCc^QEJMG@W6W&jjPk_)$S{u?F~uv55) zqo`evg=cJGQzzi;0CSC_8W6&Dw0Fue1dL0ks!^9<_h)L6E{uKH!9{ZkmJEsDnZT^^ z>4_~=YX3ev3o`ZeY%3~4LL&GbWi_AHeSy(6x8c* ztozQXn%vzkRr3-xCBD1-=Hx#|_jF2!c{&`Zz=+H|TEE_S@-TS}V2qX_OZ-nww zfDeEK*!y&Q_~iZL(+0UbFI5@2i@ae`DZVLB+faCH_)_nakUoEmU5UHChi|j z0-jL#0!CDYT9$yK>N8GI5ARj$tA-ubxG5!C~O#%`OP*6!1+u#b5wL5^$yt8eLU z#0ZL;o+2#_l(h&4Ks8H%zEHR8jFj$Q@^vgXsSCsPQDzm zw(4=3l4N_IDZbkHm$?sPGnWb*l`ETGoCVFZhdJW#a)9X`4|^U~Ixau`)uKmq8+~rB z-i_fmCLM9N;WBkT&T^Gfz$&Rh7t;pt|B+aur46{)xEBA{i}vvuH-i=^|K@5~57GTE z2}fP*L5HnkrO8$slDm?k#LQqWIArcpw{PjJ#$;)Q+$o-N`w+9)ZBJLxc7Ha$>AR4+ zQbYK%Nu}~9XcxG6zLiKY2j%L%nC&|aiHJRiT%ti`HTShZMpvujj`tI>N5pRzq)8e2 zCrlppbzeDrFALU$VU(ES{8$}kHhMsxlDLgHF}!9Cut5>Bsh}Dtg=25--wEy$!_L3$ zV=g0uWuNEsGNkCCAK8ZFV$Rw;t*?)rp6)jV)7-E7WFv`%(19S+j6E)Na0k`d&$A)x zDY0fYLiB96D}7UHFuJJ4`r_wn@eRSi3?Yf?P&pn`P~*-9-NT4MX#n8eJG_G!zUkn( zNeS2OTaoMI)mOH@Bj3mPJKPB_1`23ZD{59)RI{+*$xqJ{F}PW?cwiuQJ^rm=CFU-7 znSTO!57L2YyAUgekH`{LuWO%2)4t&I$=BCaPY*p$9=f{ek!ULK<`eL7FRVn03#_G} zSbYS$Kpecul0A`zmVddMXk_kzNbp)ROdGpl5H`Gu4b(|6aY z9D~+(HIXc!-a7zqs7NL6_jW{o7hCAOr|7GHTb8ynV_vPL|M*4KIo?RtJ zxSg*+2Go!tX8i_=1>tN-=txn}kdso_`}XOT&m^_W*EF8cLcJHDNQSk%VmUm1ePxX@ zj2Za!R#HvPu~?@E0rB+R?QMP_0QXdwj~I@qKqzTRi7};?=_weoQ`!I40nCI( z;!;6r6eMY!KSwf@)^t}_FKdxtB3_2;q~b-s#8Z@!jUoDqztOhwp-b-i;cq(sl$hn? z!7~^JQ9e$$@}lV91CeI#Ow%=m#AxoOW^iJDU0R#X(R`f2$-f&;f{k1$F`=Mqs9cH1 z$6v}oTqrpN0RG zQ`~E0eea=ffqW`blP)~x^+Hoi2JKRKe~gPZ*@#sjwB9#7l9W zPtZXTvLTlwrs3tS=TRX{4E}7>JuNVn+q*mRVveFrn?2$TO8VTgH~k=;_Y?-wEW#l& z4N;Z0CmesphmO~`RfRyzO9;mwCL^Sp+dM#TANsS z6S9Dy6bDAp997MxHX0mId%-e_%!-;SOnb2d`U=WOy|6|nXuc#iFKc<-7(TF%(T$yw zp|G!id6b8rP>3P)Y(8D-^9;F#V!>S=QToNzRk#qz=&ktDL;uxjGOL+9`=8Zr^V5b{ z^;$WnRV(ww3_Kq?2K#JvG+^RPT5!|Z-s3OV$_~Lx$zDx(9VJCV1%{B02IcJ05E>W@ z?%|Yw8!^0OHYl7*E?eHmZk${ZVcPKLc-`N7( z-XvKHl!VCul}}s)dFq3LIZ0KwX-<422yWY-E?*1E4PyRuIR5T^|GnK*YvD{*1twi9 zd@!N4pX=kONZG}kU8x>ksv;83SRv3Pb+t3ud2%zW`=thO z0!sR#T*e}&#MYVAh}bmY^Y&N~Wh5>P22Zj1{ef6UlBA1Yu;Mk9%HlJbl(B~BAYPa< zuB1Z@Kn51<`Y5|cinZUTl*wSv=@MGdUolKcI;d&5$Vz|_0wIwNTR2$EeYG9x64ufr z5liUC_15*EXbNcK^o|)5=luqMC4J?--;N3|R}0zfvS_RAxA^17-E$*urodpCZVl$0 zkAX_Q&w%;g6{e_d1Is3akA=d^FgWzou^@mp;~i0UolsD%=LN<7`yq48asnbH#r`8% zyO(R;*Qrv_;Fl%MdJe7V?#|t4oFo)4D}XW(20yg;rxBwk%PZUohoC-`M#4PT>$WR( zN}TTU_T!}RbyKPO+b(1^5Hk{g{2gl@_An?!3^c$4?PCiLL(Efzv5EL3^}L~(i^pXT z*QmxPXXhnKSKOSNm(7a_$U899coKB)Ha_KSSD*htAL=Zz6%g<-&sV zUw-ZPhcULA{eKMVGlgZ>z9tu=cNX%7&=bI za|19SfQ3=h5o(dk+k@JG`%_k0zn`ZvMBK)~1%)>?tTbA8@5k^j*PS!lAPEoS zpAtQvdb*!Nz;Ok=EOeBWl|4LO1wylfV~}*&uC&+)8a~3hU|wn@CrMdVZ{pDeMtM7b zWH7tX2uajZg$pjYf(T}){}PuaCeg6`U1%WqYY{sW2gAXucg4E$Xb+RaiRv`BGjGiS ziFPEir~x=i=A6u1Y%C;EBV@F`XWO5iew2#LoS2FdD6;ZhxYi6%<=JWXN zr$~|Tp8rF#xMho;1a_~(l{7Bgk8M*Gx~v7Gr}2PW-e?>3V29Xz%e20^tQ4;3-B5p+}cXL98h`_x%2co+P8ugHs6#0 zh;@5EqoSlY=@)o0d@?*FD2zTkFg6RcqN9!Te3z;$#jk*zQ8kMerESo2Rn?3tcbK-u z0I21>;=w41v@z5X;8GE!N$1q#(Q{vAvw?E1F=m#>e>WuViMmmg4taS}@~lsdR}6;gEmI{#sfjMFQHgPfFXpR7yq?)1Tu5;|}kY+1LZY zRQGEDt#NN4Bq)`aC51u`TA_x>t)SA%D=Bx)&1hqAU14ijTm})OH;JipB zFHotZkjrMBRE+`HBa5M}v-xoBk@tR;JylwZFz9Rx7k}~A_wk0A{-0gqzaMl@p44lJ z|3}hQ1~l2d{cQ{wC8Jv!Nok}RozmT@q=0mfPAMtrRJv2TLj9OI5Qt-+X{~D3ol3A+8c|nEBqTu54(ae(k zH`dAyq*h5axEg~ z=W8F}tP#*LPHuuh7u5}RH_BnlMPe+jFndCsXGJJ4Vc=Xuu9L`c)}qiPs$gtxqu1UV z$NKk+$N&fm|`nu+1@ZEQ3@Vyl# z6kfWMMoVj9(MXi=MZ)XKDpnZt`ljal^+C(y58|V1dAddUXd`v@Qq{lB$~$=h6OVe0 zO)V5VkHp%OFKZq~*D0y)GRDd^XU>+}85EUXQx*GqN&gVJg=0<#rTmdI4H1lw9rRvl zu5wj&m7nbLOA}o#5|`5W(ivs%BM3w`-?~EuzlzHMe0GIkpb=^?j(V7#tg?KaQ#Qxr z8pWEw%0Vl^`*Hj5qm4z(UAp?I#NiEdVOTufHr%{iJeNX;E@j5%oN z!tAM9`DJ?zARc8N%#q(|VVWx}ya~oFt(S3b^M{N6KRUa89iKJ>YhMZRxn-2kbG%BA zE()z*MMCUNR>Se*6T+9Fe1VU*3udl^s3f@(DJx3EP?@A09vjPjCAvQIcSYnbw^u9* z1tCSz&DXA6aB9~CLIw4BNMUFcxW5t~A~$e#{bIYicsW&rocT{nl-u*@w_M@7ZLKJv zZ8N#-b{K)Gpy&PN;3Nk=#32k*#OpXaXju`kII9}oWqbI(SrV+{|Ch~-+c65y-$3Vu zDKcPE{2y^Cn8P~L@QzG4%9~m{l>A!!cB9i*`wS`B3#fIk#HEe#rg>Yws;0@`Z=zht59{xIQEidvgN5NwTrT7y}!naM(-QV87EZ;aVb0Iig2`ANL;!-bs0W z00c9~zPHQaDOkW^H)}uKy?OSv+x<@Hk_fPs%_Sh8V`N-wadl+mHwPBZw|Z!zV}i?r zNEo%|@AO1F1&LV`J@zTXr{0|eZJwQoJ;uJw4}&hkogle{C}H{cJ?F0il5@Q_*(StY zyFNLN3ZdA`b_5*lJU3+-tt3#?$y*{FKI2?pQTidxA8Jfnks(b6yE%cnyVL0Bare^U zz+a8s^j?m<~UM5=LR)KqK|LiO+md`vRFG z3Gx)I09x-kWVxK5<>~QK!e+;6lGCuws;Hn8{V8Ch_)wex?h5(;i9~I^b_lmjIF!Pi z5|aRG=Dpme*Zn-EyIAH#B>)fQ0i+X|%vvP3d4<@gL>d^sXfUZT$5!l5W&`wZ{s9{k zA^KA~-yt1I%IsA_UOeSVrA$Gp*5~=U3wUuFHc&xu8V23|y_tAKPuBXevl{iV5Cj?Yd{ zW5RLnpb9>vM0(Y^wsXAd(OPJGwjRMFhFH?pu8UCaBpz5?Ol&Y)>}Onc?}xjRAM1!n z>VF3Z@~F*2ksAS<+1%47Z?AK>T&0@hcvreH;|exXKa~xr*P+!EV(R=VAf?rLCrh}5oUr^2 zMt>N zr7_W_Z4W@AJzyy=l~>B;+hit*mSc!Ll%zuo15&jm?CEi`|GU9y6pIb<6x{Chy_)cJ zt_{qZjBzu5g?SiSeh$Zh&=C`op?vL8z=i^i@o@S5xJYdX|0$yXM93ACYgqeLiPvwo z<%xNoh~J7Aqj57t!2u|msf6kqxgJ<8A5KKC20kypCBz`J0JW$$ZvP3#QWbf@(ck|a zfFA*ha2-F~`;qJYHPz5mzlo`bj5NVTP!1KvJ^yc?PtD`D-}8ZtjN$^okDG>rv7ndz zGY_1Lsb5JfN(C7YAsSE3&qc6bh;~H-2B^}8mHqbpd6(XKH;(ycSF$odv zP2;nG^2WZL#&!6wN5;V5?a3z53 zkHW!MjN@K)hHd~kH%{~rY_?wM0qLYM8Exlrf9N1kqxu(6+l@uez}8!bpvZBAX%(em z`4!@sq9)*8DwApg;)SUkY$zJ>s ziV3?bB~?!hZWvO&sW*O7OM4NQI3NXwuCww9R<*1@Hyl)VzmQo< zrq{(d-)ZTvxY+p#1}UU!>K#X8&3g5Z3wjQ9B5AVmULEumvCc)dT-c7+hf9v$BISiN|3 z-3Ac#*z|EE-ejfaWXW~EvaQOI-fC3SSrcD6c7bEH(dq}m4`Qc(t4UItrRbxl$Xrky zksa2*dIWX`XUxR0;ZnVXSi|GF7@$&NyLaBiP|qW_qXRoFd`#Lxs;5pwzbRbfBl{QoHke3e!#@qTX54to4C%ryf6!iO zr$WpQXZ`E|pe2=5fxK)kxh=a1xa}yxLLmR|3+^9|(O;dGY(!zd_{8)U*@`r5zen0_ zyE}@W{UkBHZ+_E(y86U{U!q7pz!F7deC-oqF{+MJ5J3*8NZgFdzjDV2*X%r$E_*fO zcGX+H!+0&Xm}t!x0vGVly5PRpvT%q?SUebyo>9#dZScA=IR|k$Oyj3BOMxkaj=u=oE3x#qQ&IK z&E{^sh<_A`6Izy3mzDuF&xqWt;dupsXuA$qSRQia;~1m2ZFMDGB36cz&&YHA&&wQY zI_G7+-h)B5&d$_?_cxwi+*vfg`V&MSn_HetyMCuel>Ea+dqHFrQ>F||g+5<~QfCWY zERKEqdz*(0at?T+ScegTlj}DnJ*vKebw?yaA3jp$=jxdY2>#*mr?o-1%6~aN zJ3nuD6njcL`x51EySV##a{SN$3ax0n&0IZ)!>&r60iTAl@rWqCSU>$eXKEH2ihP||lE7VG6#1WY+9-1dJ&pIwtVRBSKW zS08;|@i4VXrPUSnsUn=k}?bEUSw9C;bGweFoxuwd_REg%wH8o zU~GDr)6D7|5z_p8vMOe7cgA zQH}D6{P#w8J1)*Ec2$I#R-64Sf3#{kQ&JIg3A+_2B&PhaFRMC11Q?dH(q>of_(6fHg^ zMqDCo1~q_Wwoo4IaJ8qP=iM3s=M_o~hH|>#r~@`x4rUm46ip$U9%F$RX16sKo8o%N-J;2sC_0Vg|IOQdm_LY6-=Uf7f- zmPfPf-B7%7q;z2fh!*ba!E5o$V!*ZrKy6@M8rhafFie$_;vnQea&fMWE+qOKG`3Bn z#5yKbJ5H_yw}1`hHnW2@b)tYx#4)PLWUt0UI1o;8!lmdw3}SYK8HDzBMr99OvTA@N za@Eh)`c8Z$kEo%YW8b?t5a%;1;`hxrYG2ul${l+g|{}ZXbWSzskvQ zzdhFP32XG*NQoYv$mKa2pD{g4ka%i|)RS1n#et$?DZcfw-RApmWQQNbuc9u+98x24 z6XVtWoOw<7m6NcIR0$a|OHq&GhEiZu+C@4jqZ2I1xB1$RJo;EZ|A<&x=L=asO(~{5 zMyf@rX+0+%6<$tQ`L(hR1vl9hdEKXc&Si)s@gY%IT~hRWkY*Kz)Kyu66~*9Lm?FXM zcup*)qjw^63~_e%#4v_d33l&>c>z3DS*!6R?Vcu2dV(zq5%_G zf>AT*fP_(75HdrA3|>+>7xS7L;}@L@fzrA(ZtNaF#WuOrbqmEijY%#X zE2mi*)P9*Z!e^s!Zv61N!%n~Jr9rQCkI7g}7#bcSxy!WTtEbVt4g{9jEgO^D87j$y zh($9d%y3d(PvF$ZDw;nmB`QQvr)muzCE&PdUKOs*m_tye8yhF8(oU~wdnG@&5K<_m z>sdLiRB%uJD{bBq(F0J>4hUUpEKjD2D3=0ipyEkpmq*+@ol%>#?J3O`smEozmHR%2 zpbF0>mf=ek@h!_Jhv+`7;)iHo;$W%=ONVzHMfIW9*$xJ(i1j8mSxglrCrW+MFD3dl zaOQ;n5Iw94m-26OYp@`O76!u+Wik)3CpfVL8PC}o~=j~zNq7BnH5od z)Yv7Rni^x5kwvA8UnxvO1Rk~D?L2?;tXz%mQnzbB!Jp0~ZGVT9iL>aJAbU!o+QczA zyx?WL?yKw{HGx+pM=UWfM}CCH6VKCo7VIngb3&!0ODDdm$p1s*BlOgeizpX^0O^`b zxYSwk6bi64Ao1X&CIHjh+{<}~bzh|AvYzt0i8kgKCY6k-wO}JoMC+qdw zZH)ccB{3f>7BmFiz#G5zoeR0b(74VPE3TqpI{misv^@#N!R6RdbWlA*CIX%Ely|5S z=jKE@h@hHREELTc$;3v7dq=Ki4FYs%+PyMyLc`QIy|{s=4rUL&&Sp2r^zVH1(O;eu z0e2@|kKx^qeWAEwE~AVjWrgo%|=(D$-*=|7{iHb&{Bi`;~^c0B4qxBq>tO$ zG*(WhpI?jy_v-<_0K{kIJY`@w<>F~?rrD_p^V5BSpQ_Ko8Q3ntl3KH3wzJOd_dpvn z0L%z#+#$ub@P9DCnpo=ih=Ttm?2OUCL<9@u9_%LuOZ?=8w9FbV`-enUa!5-WGnI zxb0`F&J82xD3P*)IvHHNmkI!V`l(+n$O9qISLb7ZMHoa$mo#qv@dhYdu>RG2cyHM6 zNY+!`b?hYK1qlgPoU@AKiz#A6gE!C5e^wRd4*JGgDh*oP9$F4Cd3YJPomBId)-(glg;NfvYBICDJ;c`NNF+pMG z5=Lbv*HNnOt}S}%yVJw1_du|Yg3nu-x*Tq+<>bFBZ=BaaPTmFOFrsiN|GNXzo0+`F z%YsO1^2?VzD(McU$+{$#!YtO0C-W`SHuEWP3kX)zyE{*=wvd4ye95bIgxNh ze3pRKkL%9C(u~$=TC>vbz>Q-zvw$=3W~n+vs?sJYlgOQrV&fx3^M+n*ij@$g^Ran) zRvvFHaFl3s2C0V6Ljiv??V&;Xi2VL<&Lf3c#0+|+n(5l_5yZu4fAeskLDtQY60g}^ zpf0u|LA7a{y%2*|YS{eue5leigty-MsocrWi{Uhxs1W#PL^94o0f497^G<<`25{u$ zL}KRtEAM#^2RScBWHN~p99-CoN_OB58Bz+A8_p3Q0oBDu<>ye=~KgzheJZAB1K4Tr)*;mEAGH9a9*r(}O+I8Jv50G%+wd1bw z?_Zc&WzqxmAuf%TYw>Tw141m7>-3%q`Pr0Q00}I%CNL9)58Vd~yNCtEYZl*o-=*Z0 zm2}5tL%{$!4`#y`Ks=^Au9j~ndQMeTp*X;oE6>>w>iDl+xF|o~bh`ZMxEqDlI%)%9 zL&xdFeacu=b$L=`-&=J$z2@au3t(p}rVl$;IV;CzHz{ybP^~)SW#fOCstf?`WXC8b0w2mP#?y?R4R#Y*#e^5GXsXhE$t`YvNwjDF%*4N4TSlr~q5?;P2MYKDlz zwJW-Qj}nNB8{++!tmg3e_%&gDIX@8uN!_@={3dbKf$pC2ZNjfz9|;tZ>a ze$^p0W#^6#+0gl7oAMVNwr@r*+=pRM7wxiIpnaLIDbAZTU-y%Ar!vWfP!^gDt!L{q z%!&-d!+8=Ur6U*wa0VvjT26*T*S4Yd%4(;>b2ts67j&3h6E?l#lJK_ z=q-IJM>&y5D<)<*#8#hlG}Tx;PDNH)(Bru*=!_)jEZ3`h0a^Nshf1ZDlF0a1h5X5R z#gt_JmI42TxmJq?m`S@?!^cjBZLa)s2F01#r|GkyHb2ETZE`@qIO6Ed{4#T&hN7O{ z!amW#&d)3YB20BEO^a`DC3dEjIZ=Uy`I(o$HOmN>Lh5|bXDTHwy}BP(k=KFdEYHnt zo0{ky7#2ZaIxMLXzm*Yn4$V);4hN;I8-Z<#&zsBi@FL7DQF>mj5wrh;drcJKqmllO zpU0ruLhEf6AnoB|#m0>xaTn`!Klyq@)!Z}xVX@2Z&(I*|0V@G3UwMW{Fkm)zdqSSt zcd-4rbc1HC;dA6k2jZM}x3@_30gOsoYwO^wc5|s}??eJdIiOXd8KmWcyASttQ6To5 z5$B^Qf7siIKiKQ(0!~+bVZ3DS8GE*x1z=~u=Q1n!2r5)pO!RoEo}glNLxFj zgwWBNA6t#y!agO#Vy5e|^-N3Rp%-!HCGmW0b&%Vz0jnzqgsfX+@)5zA_`_StA_h!c z9+%!viVS)yYOUiBZ%P-H2GBxrrTQwXlA{7G-4noW&Ar!C>I?GRY z+poZ}_GA(7QcN-e{qvNJ;kN(xX9uQ?BWX_*LPlqFp=TEW)oF=kfclr-C z?kQOcnH`qQ>XPcl)RWhfi|XSgt%wuVbqCgjMk5uD6HIs z@ozRd;B^#_q8B1AK6=6U^84Tc@fBc9AdGNh{b~Y9{KJG1paCpufohy89oJ3Hhfp@f zX+!4ZC=!buM>dyKZVDXb+lrVF0BE~M2p|bIwEIykIJT#DAF5FDY&A4hQY?c8@mWAE5kb}#Boc_f z*+Ood?T7qxAA&dd6}VK)wHw^`G<#xv;bhp!}b;d3qoRuiL#w%QiqL{lWQ`|h2q!ys7@CHW(@WXTP=TO?UDpvFjx*B)5_S_@XsRM9VOJI>-?BvEQw3!p6s+=1!k{RU66044pq(Z4NlO z?xiQwb4=KTNGcqxB6KL?GPXJ^-16|MKD1WnbI)baOpPr3Ep5jvv|RK)3)w!uBQd$( zkB!gc7vYi9wDtNIvq@{-ceJU5Bd0m_li;sRPw?Jt7#!l{x>J7l0@jR6Ds?WfTl% zhReO{B$Ve2Srf)swirPuk$nntH$>tNCRFKOy`~67VUYw`qR-^0bpSX67ulixGy*C<{-3580#q4 z{ora$(fL!y9)O|(2#(enDBz}(Bnl&sB4Svw6E!5R<^d=Z12T|#Hg!}M;+zf(YxMCr z5U=N*wYL;40Y??-0!qHdVUNcBcQ%aO3n~zz2HWO=)^qMt{OinG%z48GVF(+^{7Cuf50!`~*tO0HgC~lfr7Eefe?U364ply`VIyp*ARq^Xo;a>gS1lF4C$_y#d?NuLpTPCqe zTp8v(vcqt7#V(5#`4TxBTbvJVhPlq0-1wkux0c#5`Tp`mvZXE*LTua;+8OdF|avW_B@qM3KhW4D?uV(`-hbNw5M z+{WYgRCsxJM#WmInSRXCBi`Zqt1!s#In%AW_4&$~oMbR5>KC3(4KpC35lKJ7-bsCx zElji}Qlf|Sfqa|E6{u%N2La;KW^%iZ8GOtA5KRFQ2St^lp{Y5pX1XJuCO_Qm5qQ0V zip5A{WX%#aXpc3$t!ygokr`TKjyZgmACTQ8s0S2~`y|oegZr$bTDyZbkXnK|UpSsW z=~!*Y^ZmM?|DU37xt=@}tb}xy;InYc%dc$J{tqiP-Ou+eATHtv9Edfk3UX9z+G)=p z9B?X`EhQUTNcY(WLDee7OwiElQf{oJ{o;XE>e`&?on^TwIc{!%-T(@mwILX-RyUl_ zkJ1Lpe1_unc(2N=cRwH%6|Ua{P}JE=&S}4Xe%x&#&_KXWk{OjN>ju)1aZ%UHo74et9D1vLM3R$~3spv(<8 zCk+7>!9ealh~JrvUSgsXgq8dOc8CZnBZl;6d4n*1XItjYdaSX2v8O{VR|iLSqPG^J zMCohY&>RzPA_bVVo^{AiWxeB-=Axd6jsa^rBp3=4G_HR@HC-E->hB%YhNz<}nV%S4 z&!%_ZRB?#;DJMq>>P+fLey*gcGCZFc1}>O*>HsY>yBGfBHGzo2NWTKy#I&3YQVTIFaRMZb!Jp4w~Wg8@3mjSJjTp~mE3fok+>~Yp@T|2 zon*M1udnYtIvN<%WB9ps;I7u|B=>EWz|TznA2Mj##852Ri*jC7#Lzw~Jr|RvND`kdWS?7A6kB&nrzAT0_v@lXLG5EAe;p^bmauYMa zL9{$y>G)hS(fGb*qrhZT;DD5GlGFjrjZIT>DiiI30;TA@D5U?qoZFwB46f$pEgx3B z);lf##>}XS%;_g{{oYr^#vr|9WezC?mP$^0ie*GCawXW$lgH7PTb;h-d(l}TGwezq zF3FG&^tsq7!evb-63E1^^!@mYCEX+f1KW(khZAPz{ix^l_H`ri+ii}c;}x&TBHXm$lN9dLzjaQZ75)sNI)BQS6@h_4h@m5`<)OP^1_3Ub z*M5L;t^;cVtn+;`8K!-l5^EZkR3(Ylt?L#O??FYacZ5A)DUT;Ha$k^4tTm_L)41PD zKNs1045Swv9vynmT{n};awZvxN0I)9LM#)RktK^S;C z?UD%}GbhBA0r7df4}RCRe6>5+t$na*w&uRV^CtSJ3vFlyrv(qQxLV}3KV z1HH#lCYAMPMaGy^GZ)@xVMZngdTNlmR>WjvqiTxKjL6#Ag!1dIfwms$9~AvqzexLC z36Y|r?z_&c?gV6A(zNP!Wovpj*xHnmqFZHn*1r;X`z#TNi*zU?GS-=VuMWI01u`hs zU*7FHw_i0QSvzh65d2DReqOBF-iaKMFYNmkPT(H97(>u3v;&Powy5rR1w>(tAX6+f ze}uItfrnE9T$B936Ad8WDKe+TA1DS!9RA{cs5GZRcARA~IQICyJ71}DpVqw0>7)UA z(ZJ>9i(3u}55Ie+#yDk@GLNy8rmS?1^&2V7pec2r9dwhF_4}8Q8~gK~1}w9nA=K@V zU_0JH>ynWmyB)RAQn=ap#?(#4LevjOhc>FRl!=NogBz)-XPMD;{!a^p3DdgYWsZu) zvt#A>s$hz(VTxaz)&r?J>wen)=5X_YRT7*`LgPm^Z9RYZWhOyvD6ReZqri!EK9Fx) zLKg^N6aWwAuKFQ$W!--Vxm(8uWB@ObW2D~GCk%oi5+f`cZ7cSsmKG2kRNh9ahsR(m zfw)ye+>Z0cGH81pF@hR+ekEeMFq^atd~4XDeB&^2*l}|{MXT;4_Hi(dY{mtqRD#dU zY3ZG^B@KXasfk_9Qa}Cji^_+-lf$Vjx6AaU{>ZK7<08zXEJ09&iD4eP^5IEQ%~|cO zk?mZ$Q&<%;-7_3C1^`-=21UhKj&HktD=)h7`U8>n`9t%oNNe;4%2iV45W7kGyVK_Z zX`8X9L(=`dsK{!Mck<|WhlkYfn0f%zy zp~`t({=Fr1xhQe@|5_o~j*Ck*B5~YK8pXjyJ-Yee7?nJ)PDK`3E-u3vobQT^Z&bp2 zGxrQ5lq7SF8xAvuUv*tfztO6iQfrFwvM!>rjg2)n8MyZPZ|bf{XNN_HWzX;GSBGlK z!M))5mP9Jzy`$Q9U8i+HIKGx7p;RAcB7{_)QBKOrLQ*^} zoFW_cY43OQ*83L3Qt@y?^K=F}d#B<;z;3}BgEXk3F0D#& z2460ewVsManoORJ_SUiMxbtMKSNl`%?^qh~_$>?HXK7rq^QFnav@#~t`*@`7C(;Te z;9+I`!=gEu`f)0ffiJF(7^e_BUv5eL9TT`IZ77Yi09%aEA|*xJBV}HfZb&kJ`D9;% zO^Unq+qYqTD>0vwqMptyFV5#dvC|NmZJU}E2W%sPGQB23zRK0sztzJpq55L@(TBjz zEpLOHh;`Ak5uM#mIbY49Y>X>PqG*MX+hB%fJOC{?LaV#;{mJ*EEV3l2#2J~^uRb}< zG*GbMV$+A=!nX&UOAQCx7&Cm#ED;58LtQku6_Z-8FA`Q&0euJotjmS}Fmp5Dj!_KD zNYpAq^G@r2UQVC)@BnEPMyuy)`e;vw0Y4b$GCN#AX+RiXGEB6>LNFX9i3c22$|QVF zdn3iK%G7QXnP+QdIaWJYf24D?-+j%3^~*BSZ9Z9l>7w^7JAb-aN6b6aC?|(q!2F{N zqfEmheVtf%d-1(ne+SVB#wrUS{n;H=(vB)pdP$X@K5Z@veUp@O@m`v4{hoC4d*0_^ z()s<@?@#6EMFFPXe^9lh3t1Dyt`|3DW25)w=U&-;1l+0VkPSq@N|_XB3iDZua5p8s z(=t)oc#qsq{G6;YkRH?zAssAkt?JLZCLGkX^&owZRmGI1IX2#xLSDIE*F;eQAf^O@ ze;;W^{*xdPZ+&-iJu8J#X&%lWG0J5giTR^mP!9KQcfRIWuA960x{r8vT87ojBiQ>i zUh|~EAx-%gJ~x}3KtqehzyL8{b<-z6br?v%&hkt|@iQb18fUp57Cn9_i|HsWYs|n1 zK-2!HYPE$e;B`U{gKl#pKp%#lsLy=+LE_^{j9F$5*|?d}>Y0(1TokYjj{LcKUmThg z6e0Igvu`~jh^Csrr-Sv|`yQA?PlP@{K+}js_S(ZE;7-}hpwsutEb!08%-6Rlt~Sh! z{ZLc_ctKa+Qm^P^PyZYLjR<~+1ulb99&Bw0bQePsif#0&EE>}QUkdE7RBj7d<7$Vh zGdX{-Q5jcZMPq;i-{VXW8b(`u`*XF-Xlson!=Pl1b0k%H9YVm4p%JW-WO5Eq(yKm|0e`V)LhheBEV`bZ{ zpenw{fWsB>mJ;z)Z98x_GkM%%`tXNGld~kFCsC z1ODlg;J;W>;1WdT33AZxx!!)Oh`W`j{Z&gYAY0Sx7~(47UbX)8)9m+s`U#?;wYcsL zxfF|H##`&SkV0m11%m!VtjCS>Z-G~bPXNVJKv;PV=iO?vNj<=LK>!_MJ#*~zqosPK z*6$(vdX$}i#s2QW-k3{+uV9fl^lc6n@%cxN3Ng=2J~6;skW`#4ptIfUcpELJIxf&vQor+(SannIHz-^-Rx4ZF&E?V4jGd~*L&Ebsa9Q_Gs? zfq9e!fJsXaY_SP_%oVm?F986G3sMpN6gx@#cIMY_q~i?( z{LeHY6h@v(=sppeFQpD9;e#=FQ_H1Llm}3vRt8zD4vSycF}MP>$0SuEeo6#ftW1h_ zLI4!Jj;bvm*1x-tP8%y~!(hZ3*2*QCO(k!2G6bU%!<&-AHZ&DKgU#O&$l}RK z?rWhjDuU2l#qA`=z*nTG0x?nWf=D1)V^dQhP_jc|#9xm*zw7a|?9DvX07Y3wkQEPR zLHTb@HbJ8P(%`*fMv5YaDT5A%I&GrO`zd7$&gu4CAIGz`CdS*vMoQWwxDG80quzc! zqGs(pXU?zr%0LkZ${Xj4iC>VH<2hh5&$-(6r}{nhdb~KU=06v_TE(sQ=qA^~y&;p8 zJfnt=KV$^_G{j<_``isf)XIP9ObS>)0(G}J#TcO=e>`qI{(x_iGLav;G?clMw2cl? z)HtzEWxR!^!Y*8bYQ5qiIYyp`%MJ4U;N1vK=TA*@S5B4Dp%A#H9efEJZ5P?OtsAzz z!wDs&+-R)>^3*5cN-|U$G#roXN#AUZ5&?Ns(n!FTh@^!gGVE9OpOeeWs2wP zf8czTWf%13e6hMJHcx7G6gz1=lqhQzK~6ZJ!8esBe8LFU;h#t2R7{@rJS{1I@$ zk?5Z+*jc57)ATlKe5vY$=%Pz)mUmSZYZu>JOfSb8R!U)o_yc5)Yn@hu5gra+j%`76 zePC=EOyaO#6uw!ur!S`4yoSec4yuI@EOjAsWj;TKO8;H%4Idva+nHx8{36ymIc@(B z`3wu5*%=S|S;1)?4yfe!SV-mC&~l2_bzE{mG$de>LOWD!lfyJsP&E)EMYv`cX)RtJ zCr3voC}A>^ec5C`#CTF0QV2~-~GOecztLhEYiGLv+e&FS-#(Ku@AgVz5ks2 z@V6Ubzq(BF{HT@JwX`w3%6QhwaBz@X&om{=?xVZOPVji;^{vsnkl-&R< zlA@GXOyL6P=*&1Pc-xq;nK+J=;WnQV@c8=5^zE5T_P#t;x zI$Iw^H6Lk#y#jqirhAt~6Aa_*u}U(;Z}q;mLqj9dzO==Xyo8j~Z4et`(fAr8u+UbXpL)@_a->XZu0Y;rB7Y=b(AR4+;v~JV_hUVT*|eQ6gc`KAK{%Y|MGzDbHeEkCor| zgTpKHCimGnVUxEB-(P9zbOw^A#y5LbOq7+EEgw{NF617;I#dnr^SMRcyAiz)aO_QO z&ttO{zrAZ^#M4>=T->v|qcHeetFMw3DXCe|bNcgT!t*vSZpQ*GuSy1CS>x%Hxale9 z$# z<;7>kkHTo<47S+}d}5$pN(^M1+dUNl)U@CF=;Lxb+Ua*gGhs{tHP#l-#mMt9c-o!s z_}2TSI3u8i0e+rjYQ!>e`<3Ti_|!e)ER}+AZ4@d^6U$YYYw1#3aD%-T`#67V?`iC0 z=cAFCzlW$OWyIi-2q%&&fmjXtji|{Bxcq~Nqo$e?asQXd;4oMe$b&)-V4?w$w*i7_ z3;Gp2M^e2wRw+Jh4Q;ziep&mQb%dcI67+s|mfC&0Eb+LD zi%tIp0Iof704SC70A8iS(;BU*ZeIHJTg1O>PceQNJO=2t)aZ=tX;}%;z6^(gD|FTR zd&lV|l2HEo=x#=k!^x)%xst$q0|TD-3cemwKKo6Nr;!qW7#6Lt1~P{_yb)~%%&Ac0 zFcd}!PE80yK_Zw;b5iX}r6wZQvmUsESm8*cvAVyJxZP8G-g&kN@*er~X^=N^>%3q% zX{}B&YM36#WmiWCOQfvh#pnSg0{eXGr*JGLbg?mrSt=1;Z`@Tf*($XG>YC;t^5ma( z9}j;@o5c?~eo1af6MR|(nH{dVWiRDIjWY*&>jWnagi^)vwi$Y@Ny!D8!6G^<*qNk6czfP@(cnzIcl)BkLpE%ZF|9i$N-^a` z_$xwe@6UNYvM2dOzbAgxn^3hMziTG)%_hbAr3tOdM;=mm#xe)ZF3J;)up$eW3uCIb z=gJ?j)aAB9f;Bz|&#Y^*DH(;L^|<^9dE|8kCTq zXe!wNVv55_nj-~zXaROBfJK0CVJ$S{XR1nEMi}{L_q?XbnN6P(ORIwKLvG2NirUA+ zJNQH_t&~}u*32r9g# zhCK8ga6I$6&3YHPQkG;XTTBQz6^$9?vnCz3oJ#UxC_fy+LbJrqf(p*(0xL0L@{)=1 z7#!jZBiY98zkR81i2Wje^jqj(x^BC>4#xZ)oH+SxzT37NwUB+X0rLEJrIOotZ=5;+ z98y)r3+91C&IfYm?br(OfcOw$wEFAOHc%fCAiEm;Vx?~!9EJy`7@H8lZgF1ka^1S` z^h4yX!Woz}0Xf$7`s_n>30w6yjy#`Lzf##ZF9{*H71PT`A+Lgb#7bXkfHX8q+G-}< z&*|&;^U9p-cV`JfDPI}@u%1_u=qiSdxdF!UR@4+gD){`c|8bj<8R9;#^C`y*EgIuR z^Z$;D4z*8JsSIFVy%-@g>PhYw^fy)S!|#skrO8#=36r@Q8UJ~?^VKNWeGRQf;vlLyu5z9MA5yf3cJePg@$5A!QaFSMw*G562Sg-3Zg`u9iiL;fcS@ zZAo(yN~5bas#;1cM1T}7BaT_elN033awp46{DBT?ZiF92rr0cG>E|o+F2qow-?Gm8 zY*1vOMuKtRD=Tsr`1qQ|f+699d&p;*4~AIszl6R@+CziaDN>B!l#z5^_M`SUyvYkZ zYT}%(15CN53+>&SQ#FlDa3aQPo{!VJA3q^&psp9wp%g}pB1`^lvcc6kYX9~(xh0%y zjg(k&Cm(PzpaeW(>%fNnCq1TyKqC9ggt$d663ulLv}J^R5VXoj-h{*EzA^O8d6O_cYRva=|n?;0Un!gX@5UG>qZ`26db@&si&zxFy%j94aL#i(&M_b&Y1_d3 zK=?-Gq_cZ=O8TTT^F!!f*gGv{1t}?b*qWj(aK;aZ-yOS^;qH^Q@*b}bAxx#EBNg|9iv8SOo;=qK<;&Fb(k>X#Xlccpu zY-k~K3>XZ=iyow-0X*pK>suvj6`LaF6<|@;gSWV>B};|~1BS+V7!CyUJXllMsR9B4 z5`{<5qiMhl_m(M=6Qj)3KDRd)yqID7*@HO+V$0twOdc!8lw$0|u4FN9t`brYS-O8Z zOaLWG$&@j*ui%YJoMown&DWzWqZikHZO>__`1FFo0-llt4T3?iTjg)*Zy zhAgIk0GfkSF{LClq2(Gf?3RvU0PITj27Lgnc9iWBjY7p82KHgl1|4>f!Ho(-wO3@A zI(k7#Dt_z4xBWm}=894#^8#`Ugh4f;1)7357yhYFmh~%Np`ezgKpLADF7kShnF>6{ z)6lfUn!d@!IOj59O5@IqSC=L`fRJrY3vR+Jm&k2bTFP@99*Q+ya1^JppsK;B`{D+` z$4953Ef6!KM!{B!iH(gx8KG8446hQT%}?H(ju~SNX9k|nOV9j)F>C5P|6N0Shah53<$y5mv(NF<_;ZW`c9vDOhob<$&W>7F6 z6~k;*4&{oW178xO#OMoDvGiIm0pLl@C7O;-tf`joO5CdnEXibK*iuKWN%b9zWgL#4 z`9qqAV8t-r28nwmyDFX4R`cUOFwo?*E;;_|YW+&}{+M80No(G~O$(;-^H&Wb_j~S! zWiY%Ls9Uj)IxluuWkzBCh&7$#RdyT-9I%w-yQX$F+}Os*{am?WBLdZT;18iGH4c6G z(V~1N!hUHR4YKdATNz}+-9Xge1KdB1CMo9<3E+vL)5HuWgvn~-N!i9VS>}@yyJ(kG z`2PlDgfq7LL^bVp+%PJzglBS4)M7$Di+B(yDp111z*1q?@;CLz$eS(b!+7p_#VCIM z0|C8!{O#riCwpk^cBazV*?bTK9z0_tg3z#bPes_v87o81(wjd9_g?~DEF7+>ZftC9 zYN}ew&Si8)dksQWH{gM=785oQIb$l!_tmxyq0cYo8xss#H%HI|@Ss*~M7DWA^yFc; zB;;hUc)AdlU~}MY+he-UL#3+?Y=S6+CDs4b%c!#hURJVs&2s+F=I`F2El^g8fyu>>YZHZ`i-YXS4> zYMrG=9t7s|z8i@q$k*Zz)zvjJ>xa#sJXvQdU$~;u)6?^Ic8>EgH8geRM6N#*Jl+&~ zaMXYI9VNBnk_3OGu-kNX(D9~c{dxutNX!E;^k{+NOGCpau{K*0Br+YZc;d0ZOb~** z4VY9_Q&Z!LryI;`clxjJO!ue0`5nuEg(8Au`lfVea*vpj|VUj_`&I5o@&ORe)E z;N$05$_miISQWR-sar%no}3=bwo+r zBhWJsY)RxUc>UY{=Y@<@?Cv?#!6#iFC8hneX>jLB{$uqWVQAf?k`nuPel!R^(0uMS z71_8LcHOM@#_fd(S9c*MlSAnE^fw48X}Z1Iz%R%@x)db4y5M}bgW)R9MOOHsai z#lQ3z@a6S%4JSX?s>RZ-xvJ|OiHW3ryA-Uo|4~S-BW*KXE$_xo?GQ04k4D=1B$CmT z`q(Kez!a3!;qY;r-dz);o0VrsT3x4^C>5TKE9Pn@8oayzF*+v6t|iu}gRSvq$7iOB z)i?*jUd;K=?KeBAN%e?fLX{qZoLGYM%D7b6p}#dFzw=Yd?43NO{cqH!D+=|}W^1rb z82YkYnjwQg4EBA$ea>!UFkANd+3sY;+`j>6K&X8_?0Qd+=Icn<-|s}JHIJy&iekv4 zopdpECog>5EW%B%0&61nmfA-Sx$l^n=shtv#n%nTNvf%a{MY$&K(J*nnfB;Zn8oP) zZDi>7D+0y*c^@0^z6HiaK_BDE4}&v@Ic~LT?by(F&P_)@`5!XZls}g&7RmKj5sP3d z2Ny>Q+_U`lyCa%%f3C60$140{g(zUFL^J!Sy$X8{#LC^pTugw*^Fv1>phEooKLoP= zey6Z@yw&-3<`RDDdY>mn{6yq>J;osX=kwtbj>h;lsq1%lR7;^s&y0QujNU^>(0Jh& zS%3d_sV8lb`$K_)xQukKVcs^O#_=5zDu4&iS{agDB22PQbtUZmfGUaX(R++2`!h&{ zz%><8NND+nHodV2?kvMUtz(rfyQ_+G1V4M9-?e_Z7w`bvWartuO~(ULWl*>{KC!C& zc0}T+LPtx`^9jZ=rEBOnS1+>pU-Yt^{>v_khr2+I#m&v}NC^AAz!Z6&SICzTTWiR) ze@lhroSPWB+E6u!IjSRi;KmK_rP?zlu-MY9KQ;PQ zi>>~1evL_zi%n%^g(@bEEdJAr1W>h0If7EAV|^Hy_-AmF{lK5arkUWJA+Utp?%(L? z&Xa91+$oTE0VEZPJskw*(jOT~5DpNbG8~^EHb(vqs_ml#<2^Gz*p4))EdeA=KvLfr zLwIJ^6Z`L$HZ*Zaf1~s0tI7eZU2h_zaK06n-3B1DTcfPdu*iOf;{WJB{a&pIx0~kt zB(WH%hXKE zO@T1Dm!Vtnb>r>kyXO?TD?iP?Az@7|7tK4B378w$unck{ZGA|FGA)v67hmpB4gKid1I3NMVxA<=5?{cH6_tAvp zg&>)te4V95Re;n^KoU$iRqT-7L#n|_che&)VC=jM_X8u$B<8)kW_+HGycciz_U-8A zcsS=#Xz=V1rJ9kGt}bbxfrDSqQ@zBDp0Y zY=*Auc$Gx~qB3?RhD*~!$ID9}50@}V;}gf>rKG^r(#Ag3efo*sUTl4SHJ#3YA`c?^ zVWd$q$Q1-Cs&Anrrmn>wjuipocc+c$cBK-=t)o$B|WE^Zq1z{t*=oAOX*jyOdR)!skzA5 z9)0s{XLb9wPSm3w9U=SyiKZIBK(|mG4$9w&iD@oBk`hB4k{oRGBLncJ6<{gCMe*aO zQ#w?UxX{D(7-IUiPl+ zf6vW6P4i1mHeR(D?IyrXp4MkcitD#Ew>bo4v=RY{rI684d5kIv z3iOGQOW}6>+n49sQl~`$i=hKw!RXm32w2n}9<^cp9Wwgob5+~9<@5B@tqq*VdQfd%bj^U(~7+GjDQ!!HuZQ+h->UuTjS&h9SVtdx_ z@|iNWSv?(P?D~x_$xnNscfqLmNm2mIZ_`$2y-6&9c=uWkEFbRGMeqE;{(hsg6tX5T zHN3y*Nmf1m@f!5?77tk%A)2kpM~keNNk|~cAeK1sWKdN5Dqo$dj~oN7HojFu;l~>| zW!F#1E1-UAS9)xmn|YzL`;EJ-dvJF`5#R28DB;>PGNvD9 zE`GSibw9U5+=uq^iTvq?digBgW4YI!neqTctVRj?0vEP{H9fN|a=TUgE>C4w;qW>A z1!Q>_&km_{B@~W5&J9>30L8;>qwz7-3ukjIULD}Jv<%fA;h=0?sYq?^Ti(~l!H*au zJSj=ZD7*GgGuLaJ`d3wFtgnCB(FoT@dzn#R| zawi)(p5D!cs)gdpTzLI^dXK+-kXHvs{7RL*J*jD22;L!NuK4f-0kN&UOnO1-6w>|~ zdyarclfXKHA}j)?y<7T#DB`Msn_{ym4>!hCd&Hw z+p<9{)sWZ!e&uKE zY@~Ndi@jXin0hOhxv)+5-_-0fYnq+q~L2b8Bt#2)z1IFz)WE$$q0zu%gx z65`d*$L|&xx>s@XS)khXE?E$&U!WTKCMD$r1oZ7Gf|@?|83^PCPfDjhiE!3PxsCT| zn|b<@&6@`bYOr*%y*-_j+ggr(AG&V>B4vnBgdPz^LBOsI5tc#Q6UTiPUI(oKH3Z!m zCGDv$Y*XewMa*yg2SxfAat=w>hsv%;GF0-yHk@56W#0)H-&MD>L_)1Xu|s=}w`$-> z`=K1d&ml@#kuK@O#A!%)oaWVu*huTeDrq!Awd7YA6b>!b6}J~7mXK6`Efxv({whL4 zi~seH+N;-74}e@R=-IfzGi0k=I{C{4Z)xGVwL+NNt!)M<>f0ZQ$QdElac*7x0Xv_ znqD*bY8iByesmPC$)%v+N)D22_Q3B@e{3%*pU>2?sus4q=&Og=X_khi z;Q(pABoWWN?EBG8DvP1XvhUj1;o|L7ZwWNH$^a}d&_iz#ayH}GoR01`;3L>$D0%<; ze%gN9=Dyws6%2+z8C?-|kyLHU>%@9zje9AB!44ewMN1NRyO>#|-VV3y{+^nVyP_)J z{Y2_cq`wEj_rOdSXIcgi)lZc-|0zQb0!1y=uT$Eo-%zb_v&$q!LGLw{E zBAo=%l#N>5%OmggV>rlSt!ERR8|OogCFb13UB;nBH{E})&di*58SSjX*G^}|Hc&o) z{Ue>`G{b64oj`u{8M`+cr{UR`O7b(5 zRc)j0m-dopd+6=Jpe7n??w-fonVQnO^1+Zz)V_4`JHVAsO)UkB8z{LIZ*#L!l|!v5 zE?dbjh16guu-cgtME;tKot+s-w3xCmZ{%^#Bk|nhh+v-FnFsL*v&WK&`u!Q_7i|Al zTT6m})T7Mo?scN4(zyF{x9CJAnRTxM*8~k@4ZAp0VGW(8tl}&xFF45TwZnOUf6GT3 zwE;s(!%~=;t6M{I;fMLP#s&YmIO?iLD=}*=Y5qz@7mT1{-*;>5WGwiLAqiSmfoDOE zw{IYg4=|;<%t18yLeDg4w-u8gY^6QczHDdlcb+YKT|WKs?S)#$AzT&s5(N?Gr=1-q zCyJD@jDY_}#-oUOXD1BJDm119!|Khx9&7TquDyB)gaZBOvGL}5!)q=8`$7)XkZ;xW zy*Ku3Jsp-kxC6l8Z56gqfZK1qT^$gDZJ}OH3g06oG%Phif^AYm&lg9AXf%pxtXY{Z z&>(o*G0c?BTy#^yh2<+*dHGa7cpaT!bH|9@1RgI1hb6jemC-I@fQ`r*KD3F zQo#=%ly-Foe0j2i^~T+vi|ZMfnjQgBCy)q3-Uf;1@23U=LD3xlxx(^N&^B}aQIM72gbh&B@ujyAiEwlHkVKP~kw(ERG02dh zN|3HaV}o07#BHBtT7|_5{3dl2%nQd}I#>+UktD|DgPW8K(FLpztk*S!TIx!TH#IU0 z@WZa%S6H9#iUjPNn|aRRcMDa!WzkTjGS~TD(q>kPI$6^wje5gP97cbJU(d0;;j*B9 z!6(Xix1oMdCWAunTnLE~cKe$G?8fQr1D#&^6nB0pkrT0nEep0Z6>WHM_7r1E>pVE{ zDfoqvoroByNb0M{w|Oc5|K6T>S6Jz@|ITOlNuBtt(vz|qXiIOe%mIOp5OgfhOS{Q&UHQw>cUZ(X6$^) z=ZlnbH~FgKzv67!iWfcR_JF#A%Gaw%Bnjk+-M42%si(`cv9Jz*{vH26pGqtOS7Jf= zdi4!5JT?t6A7!c4W}k!rx3<=tN6QnnOsX2Pd!2;RqJ69ZW@|HqTYGJ)dtl`LBP1q21ke#}ebo06gt z1QsnB9>4*rqkOPRFihu&V+Scv)!e*;_h2QZYlYOH)rA(mJ!J=%sVXB&$YZf_DUL{5V z8CQ=99M@B^lH9)9Ul_Q@T&e+~+?oqG`b{6OVCVQ0{*;5Nrj*9_Udloz7anLob*8!D zo%jn%$|qShPsmOG>#tr-5!YqLD00^Ql~Gew4F)mTWT%t|<~~7~bO`9SBI@{gboFQ| zq@JB@JH7)O+f^eGwmVbqEqwU^fHk&;^bCr0)8(9C>*A(6t#HA8v#wQsqu+@@5pjvy z&KNv}k;T@^*8R89l+tyDh3a=Aw&(^c+EqZ?iyz$PW4zU;)Y;|9R3q0)@;pnts7f2{<7$h!$d1H114#h zaZ6HB*X~_NBFl3ZrJhwa$@vJVVPfZ~tUg}h5R!bld-_!&mfYTZhn>7a%AvkGhLI&p zCmfWDcH`DRHuO8e`uOt;_+b&GQ4vZ|u%=^O2vyVZKXNCr#?;kL%f!TF@UxtlqD&OR z>r~9aqS>)zkd6jIvl2wQxfHmTB&T&r0QD_B7N@g*$r1n~5{8i6OV@)}7iepJ^Iv_M z<3VzH*MhT>^lNP`%kg?Pue7`eRx~EXhp!Wa7{y&^BJ%M@!S&M>4YMvww`W71i4$u6K(`P7z`Q3i zspmbdGrlwc8+L&kaL_I3$*iNbG&4{FdAXJ>_sFH2C#j6_`?WCwdJ#*Ak!XVm=HT)*-`F@)U*>qxT;6?8YFee?Cet3l(zqf07 zIPn97uYh+=`E{!>K|Nj()=yvc?WVWMdyP1KvYBA46lk^G5$U_*FhiBvh>e3v(|4js zlwXVv4X}X=9}Iur~)-$st8P z$?0^6WVrKuw`R_>y%i@|QHSiYP1D!r^BoY7M5O1%D002Y7WWx8am?+y2cl6j#P^~5 zTf$n{dFR{XGS#^LS=VImS~AQ{by;dFC0cUn87aO# zA4mt!_R}J%63Q+~gbpSxjj={3aOnRO$1eJv=eAy*pvZwM1p>kuL(xUUZ+xW})aULk z1N5c|QX#quZ0mc~&mA_r!F;NF@pN zVtws~eoo7qJGQA&I_TQTtBJG4gwrz>7|K=8{LW(FucYCFBsbb}Ycv%B1IVivD5NuK z*l*rZV2A4@sktW&)mJQhC8~GG7hA}KR;+aXAM~WFCRsQP#es4 z>Y8GBWYc-^xvE*ZHXSrxp&>NoG*{wzuK6N50>SnV7Ep6rXk@t*o*sP4hCkJLuz2g^ z>-RuH{ZsxZcM*oCIHaCfj7UDkPwK|b{iZ&JjHUVKqvr?5M;by#PYmv}ev(JSx9-QN ze-S5>aq8d$6s?w;)G-b>GnM0#uwZ$#>49yjb&k74z%T9LhX?aL>j^#@Upn4e0d&My z$2{6jp7=7p&tutj$Jzxi%F4==sZ7H76Gfz!J~hld&AGr%0L*sy<$9(tRKSeNYh34j z3hX)tPeZA>Y*<=sGJWm&!CpXik9v28xIq2pC$9C0&i8z1Btgdy^5)z(|0W*dZ+rf8 zfdqqeD0G|6xYAJ@_R0)&vEKsp?_IlZg6d>aG?zuGm_5H8-crs_{mh2BN{wCttk|qq zH2I{ZqksVj1H*I8FEwU)_Lm*_F2KRi#s~X4joUDD>n5{_kNM6N_W|piH26;ZxC4)Z zFn_~8qnbgfSwn3Ki+6QZrO?e&E1#dAqM0MT_PMJ4YGcRKd@hbz_CG)w%ocmf$KQ*TxTYp~z#;gy z$)#V^f)hB4loB=w!JYbEzdlc?!a;y<=l7>}O(F!^S9v0i`<=1jBnzyN$3@=xB^0O} z7u1ny8`|;wMxiiZ#^{o5>3NHy2Q%6&yfF6}5)Z)M4zI4-qERf|i^QGPy%=^td1w|B z%DVi?FQh>`!H~+ME;&~?xKNDQnkD{tb6-gh{snQLg`!U3_b+WZhxv;yv5` z$MNy;7ALa=%Ps!f(r(kEBkdQpMNMd;S@hF4S`srM$4Q^dH1%qRa~c>XEr%G~%FuWn zy*Zv+e`tLKF3K-oK`=$79z9%zmA`lsFiC^omAWIM46Q|-?G&O8zPDx5-TEn^#ld4IZ5dwU*o!W5ml3+EEL;0z` zOISBJen}svXlG5DpTgKJYz$s*7DNX8;X5oU%X_Fyh+vnGiA$sXvO<{i4nU12_QfS5IWHI5}< zb~|e=c`$k~UL4ajwb;TYaasKjnCdLOk64zcae5S!R^QATdh#Fex=R+Wd#4O7kgA@p zpE5lLg14h?3hK}3n0c!i*}nr7g4k?FT>Do51TM8uk<#Dhiapm>>Y-{t#g9GFn^X?k z{jZNLWVrwkHXd?+pYCTm&V0B({WM2FEwVaX2irH2#47u2x=iuW%|%yl_&vyL*KL0% zV>dt!r?(1Od26Q}NrVqS7VRHtOht6g2OVdm8Cix-)bBS5eKKCP#WCUGrZxpk^HJ=} zt34WzE`J{G6Xu~CHEoK#dd@9hm`0P-LzW$yscSf`>UcQ-RX{?!; zey7{NwR~}vWdGF8EJBr1Uq8;8X6y?!Y|QY1x*cAn{t*`a!9 zaC%xAD_aD&0(Xg;C?u~A=z+Adaq z{}3W)7(JgmXqyS%si2VC)kUXsmn47@qI&sw@NK{_n1{RRlMQb?Up?x-l}Rl%X+9YA z|N8b&9sV1yf(Fc6e4h{Q%n=D#M^%ywuLS7p5;gU-N5tTCMNW^{fuWa*iWY(ILh085 z%IEdETFTa>^&!?9jbeD^3CaS}4*?7fLV-#RQtO@R4;|=~paRjb?s{hmcSWM@9MAT) z{i~e=Rv#nh#?`C>t2{!r;;4%sOS_M(gu_$$v7gGzZ299M3imS#q?%T*fd@VhLz3^O z0ZatntvuS-woCFX^eroO%|z6lT@%5~3dZT5^h;AmRbpjrk#N0I3kB!dI%gY(kNTg0 z(Ea1X5>~nMZ$1mvWrD@UL3g0baPZW}uZzJup|=x88hm`{{4ZUqwZqaoPQ}Nx*b&pD z^>y0g%^}h7%qNLh~58bKb?@NQy$9k0kHJ2U50k5T3;i9pzp~-$^cLm_- z^F?4VKfGf8Tq%7LMLPUa22NNIfY*tts-K*#oZihnU%YF8wyu!R|7eUu&M(h{RtQ~E z4Bjo#L{Mqsln< zx|#`-AcRAKnpWfa^MbCj587`FY72kPTrJukat@GSDmZbEt!-+)u4p#Ux%3;X&BppM zus-{1GYoFp{+RQkLYk*t=>bMKr2CHt^hC-r@AkhEHYc8_o4b?3Al@YLl;mznC#wxM zWsau_7`!V=hDWv!c&nfC(#vXZx66e-3dDT?l+$lG0QMId@EM_vVoYwNt0{5U^x{0q zbSuT(YXlYP2W>bk@Syu4O##b5^G*D~^Rum2_>&Q1m)>~hJ^wr=i$Mpuh$5aY1o|!V z4SW8&gl?SIW4~wB!^d63?TqT1GatZkY!m9B=L3&)fj@Cg!i;b5 zNXy_Wfc(D-gmb8|c`1yoq>f(Tzw?hk9;I$uwgww6QPen85>v^XO;hX0V9_1{OiF2Jiswa%n`E? zrOuX~13Hh$Sz--&BG@@D?*j(H5&|Sp2$J3po4?2ld;gQ}2+-o} zAKQW?kh%bw(adD@48$@*fvjsBmV`$%RiDW;wcj4pff2CM59?zI(7bf!Lq`1WImx_G zf4J70SB`z&?sQSGS{#Nw-Z|K9>djV{Bo6S8fPMzuk`` zR6@a`4fq^p1hd+g0oTx-fcJA=c$Ym2dfv-r@*yK?1@heuZwYx20QU9%N&~|riz|>! zL#Ptj!V8%3jsP!KUAl`{sORGf#YK{F5C1pXx4(yZVz%PEVln{G-Z)Uq)mf9?r&LMzt>h`E_3u1ftnW zt9tu_>+!7zN%ZIwT+6=Z(_y3g77XUaSnj;ARo}3Xm);T#bYgTs)ArzGq$+TcHH+OB zW3)>`wdsQJJzkc#T`}dJ#1P{5>gW#hYZk%Ud zE^t9oM!c;7&6g8@ht_yYsKfAa*Gp3+?}Mdn;80`!lwS?1V;74nyR1OO3Fj0_#_2Dp zpXd%TL16E;U0(2wS`t%acdatjO2!nJRGyqf9uC$pNGFUS+7GQ>QorRg55(%Lua6z& z3J~f>Gs`$8fhl$I9F5F}-t9UP>t&g(2ls$b>ZBT_Cq49*b%53S_qwBP5>G4;G&kT} zl9YS&O9v}cH|UfZ%7fn&d0M1=pOY2=x*|E1TMC=o%@)H3Z?a$_em~bPq6LJ=F%053eP;P~Im4LD_4N!2xn31zx?&?fK` zz!+rb*lPYEy1J;TNhP^fCd`qmt5WRxnyoDs5*^Wt9wzd?Rjw3cAYSGFQ>F|4PG{!U zHY{xy^87vxAx%G}!XEDCggF7#q(Deilj|Aq?=W$c&xfxAu{c9TQcqVt?40jq~#=CA0W&J(P#XR<`4N=ewj(iW;&Cj5r;fTkGw~ zj}lxg1Iz_3C?GI|X6&by3&DNJ;gk%=LP{=7oOa|^?}Ie+9(B<|^mN6V8P|Z-hU0b{bWr?CdHpy?mpyS0c`iG}t5x@+e zE1UV!`QjS|#y^zz)lXS6&ub zY5O1}5lO}%T&+79&dnJK?D_0jYN-D6bI(hYQ=yzSVnxA+_3v-5&XY9X-d)b%^%EuS zddU`gF>R;AN0K?L^~MTjaQQkGV0byqFx2#dd=y5OK@Lb!Yli6&GM9y|hY1mc^-eaI z7l&&W2j#}fO1i2=OizckjaZ&vWeY-n9*R<;U-OatV^a+HFl>8vqyQ^AtEa-6luxsT zxeGfYjfx6kPqTnVgA(SwzyUNs2dkg;55+@)D?Y@Qh+>i@BidxB(cl?m{ zY4oI#>LA*g;O|=UB$IPMSrDDwOHnGUAf{=D`nxW~YrQLCbCTEbrcLg*gMmq*&7f6hFZB8lo zzj1la>IUMj&Ec#Ns|p}RV6$jPy_rrAL}N|r_Sm%onwH^P;XGMHSbpc(5u%b2@v4XQ zg$>sm#xIi$AToojl{%;*^9mtrGP7jcgiK0#!|Q~ZfBUDVt^*R5ysCBfKVE-&9RAAl zHUY#Nc$HnU6nbD${&C`|IE)YJ*9Xk8fC3uL|1y zUBs_}2B1bJ^@Q!uAd!rb=(Y=t>IBvGEtyG0`khS=v zF(?p#yxbO&yrYrz5>JKD3^9Fod=g3h`tm@r&`znJ?5b!I)6}CQPC2b?+p!lF5g&?& zKzHIYFHvYuPZ5>Lr&$8O^CVp9>^A`5ZK1g+vqHogh(g^NG%Smpv;}}d_|>nxjgjQ+ zA?pJx&$|D5ncb>wx>GSJ`YDd989lBQu+$k-kv$_TY4%UJ|0#1P2*bey5=%fi}f8bT|FLx*I8}G2Kg4UrZzsITJ@` zDZ3CrQfQo-@>>wU>TK2JWllEpCpBme}qIqP7h0^?VP04ktkDL1lk>g`a$A81m7)7oyGeE zUTRy88C~UE|MhWrYv#k|suHa(o~WY=$;A~Lvk5_@e%WG#nn&Twcdo_&hm=y2j?W(P zGd0IPB4mhY`QwQmPmxvFd;eZWN4dlCwG2)ujO2&-l%}t=26~o1joJm|a(EM8%IE$; zwH$%az1KMT@_KVP_p?DqXZ^;~(FPU9i`AA-0RybTbWT}@IgIz~A-snjSLbon3@NT?+y`G_61`^W$V+i&Ac#@LwS!5rIkOp zOYcxRd_#MHq<-?mxn$pOGdCb!7+K#inUsP-`6(U)%8Uc@g95GGr7ngh!vOEniHsIz zvt7LbuW;HZ=!n<)cV!`{Anb7DQ-MT@nAq!+Ju#2@rq$Q04;rN!QhwNnyi|PL+59hh z_gm1h=`PCATRlI#=}6r*X<)pu!m8wnu3pXW6h(?`vFEI2UNnUgQ$rKauC9PeH5=?_ z0}&7Ezk?=XY{7roH+2-4ZNA6%d-{a?{MtBbI48BTOTtuG2`0t_@je#h6|8Z~B`F!W zv2Slk&p8rnh}V5$Xb5<51mtdSe40h6yE>J(bbS+9BVvC4u!9x^)c&+W#?r)OppX(PVvwFaJK?9**GD}GU{p8AD`%ddn5iO-01Hq*6mmCz z`_Y3Cg5@YuL9dDo^lXdS-iJ6TQk(HhM_1GyFtLD{!&GSm(+jM6laG4GeMUzch% zZi(yUFP0utWnB3t<`BCl5l6$|$Mt4t2C5t0Srfpv0$iTIJAQr4ai0%Ig%@SL`C9%s z@x830e=3`Lefb|6{6RrInxP!I_HW-5QsbRt$yir^W)d>G&Z&EX<-hir{AL1dnb_W) zVffKgblb_-&|hDur7n;2aGGe#1VTMW@$CXFI8lNmzEX^8oR`0|gOn+Wr z(tzw?m_iJd>^tfEn}4Wy+&h4{sZJ7&|2-H8;j3nx1UD_6``4a~t|?>kRXr0EDNn2n zzWrH=`c|1$?kFzH{h+${iXv=_-z;-%pb`nn!STT9;q$H7?!rVSX_PCoP)HBYXDx=9P#x z!lXc}13xOpo&tr3Kz_k^@uNwnpTY2v-Z_Th*zHX~3V2A?#S@=T%tK-!bzP{GU`sfy zN+b;pKHs-U{S@O5(u>HSj}dKDPmwF+`7`rnv!;(#13hXfA2TLc>^`^Gi$G&y?v0Vm z5LRu@*=~N<`;e~=nl^mFWozCy@?V9SWP`+i9tU?|))^OzG)VbPNaVNou6M6r#-^5r z1}Fc|e%`TD(LVflr3)^Pzgk^n`{UP$40Eiw?v8N zb`KDZL4M`ciLA20tM$}fx{6q66e7M5U}mrY%9e$*uj(^Wx(VHe3*wDxS9(T`lE9GV ze;HisNERH5e4Y$)3nLU26cj{>5I<)8mKb1V7EHA@-N077cL3V%F|<$@C{wg&1IPZmVfO~lYaY#Rv634oKv zLs=8w>qQHu$e6bGoSWdbuZ{(as zfN{47-Y!;B;>4u&MSQN`#qg#Hl0{P7`SA`%3zfIw;S(G)y<6Hqf>WU~S&mRJii?S# zP5)zyv{KWU+fmaZ0$ZOdR3lUWX@gL-YEsT)eR2q$b{#9okgfTuRPO^C5%0!FTZg*r zd#N4{5aE(|rDo2Ogc8;T#4++b_XckU#YOIqI={f=P8?f>Jr0CyZ@C$Ot+g-Q4g{RSn89z+WMfBRb78w38hWgn=Rhq z%ru{`cpe8uD6H`CQ7kMs0H#y+Wd8<{Nn>U!O78NHUxxn_ywC=T6~(7f)FH{; z(WMFm_#ky>Icj0$6MUtj>YwKjmbduuAEB%A&aX>yQ&6MI92^&3*RSK_S65$8ABEF_ zH0>cQu-6eO`lI-&aAa-$i0t`0Oto;TLQzKZ{W4&cl7MuhSS(#0=2TojYg0pa5ifW1 zJ&BVLj7CL4B7du}m5Tzj9A}ROtiJs_=-J+wX5t}{h{D3_>`%tzQZy$!OP;Gmajn5; zE?uf#z!7>mR8L~FiNaGwc*#+_Z_Pv*g zB@mHnMa^^PNjhUD7!+aQzqk%q{Jx&lYNU^qg0k#laChN)>Uem%_g+?;H407|Z<;AE z%a@jCwcTDt#nU@FfV+7_l{mkjZ2wYXNWHOp6pa~cc#0U#m7FrB{$r<eTQzuY-LBS{za^aX40x>b2>Xz?+6DVb)rzQd& z%#kzdpmMGWvd~Its30{`ZwK^$kEp)<@6q~v5gCPbwaOW3LO=|JXMR$3XVLik=ldq6 zroazb{V(1uWcxbL1NSB(Urze<$OWnGOJ^)?e9qbtq7drK#9$1q`EN&%UEp*_yR75i z`HbA*{Vp<@R7q0@Ne3k*#m6g<9O=F~!V(<-zSteOBTNr&ywPv$rD{h3Lbw(SFuM-P z0gTaOZ7GK?@~LD^X|nj$@)5v0UElk60@NOv)Z8!{{BGZ$ z^K8qJ^b~~D*86qhR)74A@V{R!{>gwIP38+a#Ar4b)ol%m^}Uh{2r(mH9+)oM{^ba_ z862ZWpX}#RODducGi^F#D9PZYs>||DBi(EpoQeGTC@NAsKDjqBK4U#~wuWF%V624P z9got4DDb!uxcJS(f0(Xy5klULv?uK5oK4s3dq#>c$iN8C+rIC{Vora5T44`JlgjdI9J%}>I(~5HhMawMjE0~*hD2J6 zI=ugWZx26Cn!=qmwkvTZVV~Yr#UF}qqyE&sbb1c7n-v)4S&otkUvKF##oDZ=6E#&a zJ?}NCq_Z-{e_(VR&mkxM6X~Z=#)BNjN5{BwTeM&acY51O?nZ!KziGUhYR3{#@Qf#! zxw5=qx6!coVzet(??jZBl_A#!WZX=)zZjOePU(ZZ_RleJS3o16oMggxds~bSavpdU zbA5KFdV3?Mr~I+JLD-YLppCBqi$N-=j%!f<5uFqvY*5N;npLBV*Yk5Ul3^`mA2umB z@rYId^pjCxm4zEH4fghH`XOKYFLi@e5!SD{ViHKYfakxhhc1U*K@(OJaIk3RUawTS z$B)$V1x-VcyR|#GI8_=Y5d!<^&QW&7-d8L;B+P|nP?9*KAuIQZ3BW0`)plE!LqQB` zI%RGZ{I=D(w1GYAzndHgf3L~)E9$T{93YPY7fi57g!G0-O{MB~br!oRIrP3@JY6w@|JqJ4KoCO=WsowWz$h_*MK*oW;q%l~$3pDRgid0bQYb3S z&0a`T1zk;8SI)p|&UFU|+4sSJcsVc&)P*<0GafB_oruIj9$Gs274OM*{SE}PThsS^ z`Dp!52#-IBJ#3@C;slvgp3L|iH}}^|W|y8Nz^q^Dmo;&$zx9gYG$j-Ld-`!}<6j%|$faKHCD%Bt`6 z5{E^<_(>QTq)P5KlRBpbtzrDKkp<|!?I`%K`QUTAS5h08u4G-NB#}|N>O8;he|uPf zpB0&^jE2uaXz#+aMc28h88MNn6?{8@iEyG=gC6`!bDV@v(`}|_`y~YvP-X2X>cbj- zFPrD_be{bjkcgF^xWX2eMfK|qJ5q@RI}>cwS+eJ^Iuw$y)FnHYl=FVYQkb#`1*ec z|C6nm+n@YZzOibb4AgnGijD%7@A29T6BYsrpX1HNq554j&--=cSkMdk>XEit=hTZt3nkJI?rZ7MCJf6ee1cF01BhqW333NDg=2{{C=hRJ3aa^~$#GKbI?Z?7r?}mQyFf;NMNDniGL< zo;5duD)6i*@p7S5sK{pMdV^HK_#>T=h<|^Shz2I?C-%QO@d+9&zI&@v5ctDin$Pu@ zz28cxZ}W&EZQauPPFic=cz}f*2f%1!=i5ZH9Xv7J3bAgt!Bn3FOf<_o?qn ztp2kyp`iH3_uYo~TSnSi_L`YNUlUy-&LyD3s`=&8riCGfc&jVIVn+2V_b_Hvi@MdCPwD z^OLt-#}jjQNw*two;T7}{rN&M3E$&0GzwlQQNVuS{*R>d4y5|~|Mw! z`|BYOa*+4I~VHfIm`=R=3%uEWLVrNm~Xt~pwF^oz)XD)af%buMx>h?0v z1xIplHS1~G_vW3gAK4S%NyNe+XTJ}|fJ-7ZKqZ?S5fNz`yg7g@02|UW;_3AqT)~Zd zv%fS0t_sq8#f!whfnW(Z4IVI_b)zkOsoHKZ+aq0U#e-Z6By$NJ_n_GEv#!GjB4=5)MZ*Q$Qop60|7T!Fsn@*PAZeSj{93L+x@i8qWU zBVuvOC2eU$B~oeqJs6Xo1ENcuG>o~TbN*}t^hHYIfs2ekY^^J1kn};Rv9|N-sG6@@ z1fkdA`WPSf;uNS_r0GNs=UcI5ZXA%d!wJ0>$nGw0q<>zg6}x#uA(swyg9IkP3~ zWMpO&Qk@^gRso8%bu5slm?cr(>2)Lg#`f!lx-|F)NryYXg<$v04mAfJt!c^yi{ui6 z@xW0`6dP;26?Hpv^^;Sg{k%G4{)hA)wVZ6qoc_J#pr?ls^#8xWhjK@^)FVxvw#0p&vLFo3ScCp%n(cm z8#m29LBS`qjUUG?sbLwB>1-GPvL{lCxe0|^L8wuKwT`mSe{`|kb)9i)^gsGONv4*M zES065qlJhgU*BECa275|OLwYT8lxqL=oe>)ZUN`(vc7%yM&tiN$P>JQ$>eyF* zk5?UEH*taC%Jkd5na^uu%iI@Z9^ygaSmvpKgOI7Jkp+#D*?3o2=Qig)$)O`WL{T9|mK837 zd)7VcIy7S9*iO*~}0oQr2W-kdI2KBT;2YHpM1h=Hn4w3<6?< z37`J4T;8=Ynt9$wXHL%)%3D7h>-LPpN)G+az|>_;=~e)~cTqs+RY1$7=8J*jK|F(h zD;6f4j#y>%azy~ll@#k(-*?e1&IK;L!jNv`@Kc||PuaMn3D7Jut+TPr`*e`N z-Y1<6eXiYk^K*n9jFX)Ki;s~*%E@8uk3PqM&ALLs)*~*#NBq1b*;=vNUx7@ruw0l+ ztXJ}JpfEbxW5)X!oP-dNWr|@rj6=o579eIgD;NP2LU#6@UJ!rlPHlNP)-`_8O)sP* zBY}|;iHNxW`r`d3u!;+ktOEP07KNk%RYPR5Mt{p z$%HJz(WLD|ud6}JQNv}z{h`+4wFFB0;yH6Dq2bTJ%pk0gP~-&sJuASFy=YyF)zeGI$_>5CT?$|y&ZOgF?z_>Yu|mhH z^Y|hF$vJWL09q@ZZG4L50N-H<`*RTsnd?_$4?bl+<%7Ms<&I;5cinu|L|;^J2U1bH z&pR5TUIep{q4AuG6HB3P2ft09KlPI!8}`t1XV$k~rY00ccDvdowVpaXVJ5hxY97&z}|- zGq1hVy_GR0NiQMUY`ITs1+wqgCUido=tuo$PA2s%w-J34Clc~QJA-*bAAl3OT+e(_ zs$yVY)HatBmtz^9jf`)Z;}7bJwuQtI=sQy;kjcOX_n&kN@$2LYX@&;%m43s4x)qDa z)Qk;0`{p-)8EVxwjk^wH@?i>3eE#&ZNDalIt~ws8uv$?o{C6dvK3xOr=1LiPRr^0< zXCWbz=k{#xq-zVm<#zMMO1zv^x^*=vCdN2~qsA9cEPbw=-pd%Q9HMJvv=vMnkDMa+!W2t1NY?zVF9WLo*|QM2ap)GIDRl#|Aq{U*D&62sRypO2LusW) zmA(}3{rmvuTqP@LA^(ILsH8BO8k_-40d6~zC2IHYrK4zUVJoYM)F+>ZKBoS=00J_k7;O(=x zjR%@IHPbxXnrmfxm3{e9m{~etcJA!r-^@#6f_^xJ)ncAtLF>L2qohlrgrB1&cGKiY^QV1bM}i3(?_Zqv~Z69DdS1_@r`AX>}gH;7a_9yGqV zFA#3COzH-aI}#*# zf19+#5FaTi3tZoVWLsp_5s47lA+Uh<-|$^8D&n`Kwc}{w+eSz*#ISE9lTTg_<5A2W z_ibFu>pE0Rt%EIoz-AvEUZX~-uD7>0;3sUQ_-!ldG#L<*(T@X)==2A>R*eM^S^JXw z83`>pBczspc=9iEH&$ZjzO*DeA9D(pQlgpqJ(_Yc>hlS?QDwalEErd#%sZ}iE9L&N zY*i~GOV~v>TK!P7mgX8p#>f9Z2`A(kl9*Un?&Z6?`0`hV)~mP~z#GZW_cgknoiG0h z@5S;_ncK~|2VAv*6JNGGHuactf|(*%9m_uwuOSL&;jMm8%IeFNaD#`O&Av`X?j2xoRIcCgxi zj$PiDR?zHsmav^0A$5Za#qG0=gN-q|nin$ysz7mp-^nrEoxCK#L`^|(`7$BxEb+pG z@g2a-qIh^=0otRo>d?Qjnao!!uwV_HZ-mAcs`9UwFx}T0Q0Z*ZIKm}0Ai5zFamVirMF57IxhChbNn+V zY9(Hm=-iw4T6yk3Hj;R>8(s73UFM5)(T#hy6+CshbY^K6i+&X1tO4(%a5D6rC;o>S zY}H^lSJ^mCeluv%9uus=4To8OK622=^2&bYqea~>;=KJi936B*r|o^pMoZ${`lr$T z{e}?Pqj;T`(Ohe}8{|-uT0s(OEMUR(F$AgDBOrM{_FhItL?}kHEIFDN!Agi&>ziIt zBe!pnLxlRHSlZXY@#=T&?fILs>_h$Z;+utuAOe?ITPs4VpSyp*sMYF`-XnWt!nbe1 zpMf9vD`n7i<%uAYNkQA*{tXNE)aHl4oXp@;uelb*V2^4W%X^%&hMJkf{etvP+a62c_d1seZikUp^>}RW|fSc3};x zgQK*{fnxc0YT>7DBGf6MA$FgY-hAe+)gf-?+SNo)@2TImq{O2F7aA0B7JAS(I^U#< zONB(JsDS1{nq7_`d5IXm3n2yk0k?=EyWziRsV~8g{-bH`r)z{JkZ9zL*4^rhq-{q255_%9VCr&^i}_0ZXI?lomd`iJ#d=gT9sk&+r< zBTi6*9c>F(PX-5Hqt=uH3M!Nu{r*jFgGg+^d3GbQ`(uJZ{F|P0?y(My?nM|U0P`~T zDi~x%zsUN#n#xHP4-eEj*ebNXlSvfFD3p8aPINz8L9<7|$^#+=-pNgc!VC{f|IH$U z0(?;?VIR@6sA&9cx3~|!#Ai~|(HY%tLXW~!fuW9HhvkQ(-oFlI zQi6up5X{>bGrQBT8%qiViR-g;5%t_#usT@!s8rS2eRiARCLvFZTB5E5P%xj^_k_|N znw9d$621|0oqn2n{r6)m;RIs@@=>oGIaqd2Y%^ba?3Z$_4JGIoDn>K0~DOm>BlOfCTN0L75^9<|B>l9(>s^n=lu!< zhx=SFed~ z=l>=%ife;oDcumhdCi$Obxlnf2`%XhqlEE?xR#Zn0B+>nIrA#kbnglUw@QIXozH%t zm<{Mu4Ruf!7K-HPax^N_X+b~P3|qfixV~H{E2kJ*5aOJ2{4hfP@C19ljw zsRnI72Nb099N2#);bfA8bNtTIsiFqwi)nZ)?bTOu^<9ZFc}c@?0&z&|Ar(G$jB%JT zH{9#iV)I7Mf_vi=LJ9_5Q)5^*U9pne@*^(sO-&5DP6#}znbsyKw28Noh23VIPz0iW zWU+lde8jwRwq>Esp3WVN<*(d=z(v0u=(&al+w_rM{|7sT1uH!}6%Q8vE9L7D&9Y)E z*Jp20-;d6#HlyiBkxf}LAU{l6K}|Y0Yk0_m`2~(`A6VI|p!@>B+V;)}neE;nFeqwHB@6|8W zZTo{YdPJw<+}Bfidyj-VSqm+`TquA75cX>>JBb1$B;zIzJK}kE85d@Fh%Nz*wCC^S z3c>%8aWIy%=)YSWBh@$dx#xK<8F8PejuDb+84_5Va%E->=H0;Rz|!s8R~V z(9qB*LmH-`G&=R+BixB!cx8Hm$Xn~h9Afxgbl5OAB4z;dvop_SD>B$V)Yu*Z*M&#j zU@2%z%>MT$=j7k||Fkd}m&is|M3)>?OU2I7Hd-tU;LN&(iGKqoaGhtBdeluXJAjmuO$aYVU8RkIreI&?hp)_U!rpgf z!20eu{;Dx~(b4P!Ca~6SCFwg_0-N)e?+I(5)#B4lvIK&=KN5OCUvmA*XIJ`UJh(3i z&wZ9%hL@)0DcB{ss}w}i3OrA-J{|hT^Pd_eedX=ROXCLHUf7M}f3-0xy>vna8kHR5 zJVUkl>h_gfLc9-MH#6{Q`O~h?wqM2=zdH1oc`1+a_i%sn4;m!njmKD7>o^9M>NhN} zg~kKl&uSO9(Xk*?H1Sg62MswYK`ELe_s@0a?iXh=$F1f>RZ2BIM z0&dS)MOG5sV1~OaRV1u}?8=%)4^WMvsGwXP_pS(PwAAIWD2V)yf`Ba%`R|3j=s-qh z8d~bMjU69QRn&&q#Q!{SCV*KQ*??NcPxnGY;s2aQakrWYl``3+m@2a%xM1f+?YUG0 z?)^KlMum_MD_yLF%a%N*C%3RG`T?BZQZZD@%HvwV!PhtQUISiVsFUfVT_9$I*y^a! zwXp+Q#jY$ur!SqG>Q`5!6dnSNL_;Pc1u5^yNEzMJiAWOw<+RJAd*-b?H41&Ky&nxX zC;IHWlMR{db#MH*6eXZ;Iy+~JxqlNb^bf%fw33MQjWP54OF_in^E@%*4=A&m{l2S} zRA7?zHnnK3FAg+sle)~^pt4@@YBwS1$*&XNIkBFtE4a7dD|$Z31SM_{PSyD6)Hp{D znKTdZqu!Hur69dc(|V zo3H9S?bkUnsGF)MJ@SQ9FdkQrgVdaWQq~aT!g;ocngEIq3ccGeJ1h&L?_FKWp3B`3 zn0Ttxh=3x54$8{OrPh)XJI=3Kr$6|IiaRkKxi^Il++rYwZW8VKlvFsRKdw0TsEtOW zLnD6beEM+8xP>dZQyaSRHejc!b;tO=XI>EY2H!aM$k!f{U}|1iG%kTpdg`h@(0MbA z7Ae2j?f8-m?VyG0R=VZu27>9P9@8A4NB$1tWK$OFqeMbN8VrwveTu#}`+Br_BMw4L z97%X%R)0QK=0n`tkciMK(O^1VH~{D#beu9X=4Ck{3o}eYE#yDtjpA#Bg|LIzPTU_( z%z$92?sFgt8P|CQKJE#mH{=6D4i|x=7mI^@E9A3YffGAh=g>e_2O}bc$dD`j2E>o- zXP*KD7SqSoR5NxRNZhC{pDp^2&za_i89~<@d^(amtu~K;~x%&1XywJM7!3Dam$SiwZSu0d{yhT} z^bay1qy~z~{I36|H%$GGzU!$@6w)Xwm7O0CN;@v?4+(-a<;P;R(?_L3wvr-hMGy&_ z`;VI*O_SuKmbpdNTOLh7553 zmn~@R%Uktema4k-NQA!f|3nt3k&H2yicKJ}pi~%HSw7f}`2%$w!%n3qYnk^`0j}MC z@skhi8N_e>yVd<1Trf}|Ywd-9vj%GW96-{A2I@XkiZ=89p1j+B9fNfWv<-!R9Hx-E z_**X6ZaG0n1GO}h%d4uinkamvag!EWz#YVi&6P{8{n34TgjN6zHM15Duh8q!LP=EPkBOBWzl6T2BsW zdi{ITn#~$H{!?w{jGY-BjWV1W)#EZsN1@fF5kVG->kA=;_}tKV$nX8I)5q$}xH_4$ z#Psr_mo0y4J~rSHl3{7((E1_azkq0Tz4}!%Jn4z}YKSsHrM*oCq<*DJv{g zeTSZ#m5C>Ok}Y2%d0F@M>Yyj!ta|od?1K|vIQjgZdVee6LRi#WemYb1f^^U!O2xlhgyH?1oy z8;m!F$fk{pfCAmfBwIV}G|RV&%7<*C1-+ z4Wi{H4I|2Tgd<@Us{!N=K37!H)?#r9|L^g0HEZ8IOd;zT!?Y@1^YPk=S%Znp`(NE;_cab%@< zmEl$fKQU}J=u!XwiWv|Ld|4R>;ohgxKH5e}aKi)7@2 z5+iGx+o_TO6ZF3rIOyzS7PmgNEefze=p~Xzj3(V@Eag-%DrK20fr(g4$V2Sd{J*^1ofU}Un@U2yN326@vZ$RQqy z@cNba$#fYl!eEZ0Jm8GR9? zkb5xJ;mSNV{`TZgqj%Lm>)%tN{~h=V4mR+>aCN2N@IXI8tdRwQOfaR4Lz*lIq56X0 z{tn<40+F>M^BvirCwZ(o=wz}eA zmx2)A$ebUzp}A4`?oB9+M)B^~c87a*huqPss)$!#>x(LBioWdq{YY_rF*N312GI%5 z%L^0me{rfoNun3e7i3;nGCTRhj$m%3uB*k;@EU}W&As+QzZ57*B(v(c4Eg7C>wq}{ zCU98R{@hsV6W7wuHg|XtDE39gh~rY8;_&+Q|K{6+b5M530}}{>1cCp&JEd>nJAn3| z50+O;A1(%4(%Xonhjrq%F%9_xXx`|s+SJww=VEI6C;?ver(BAsv-k4r zkD#be8|`P)nDS=spB`F)+F7ywFwp=dFd14JsD1B&<|I2FF(8ky({ zh?U0zWu92Wx~I~W@R!HxC}m||EG(|UKJ2i@@}tL$pMkT5lb;-Ep=f3N+xFM}x<}y# z8!G7K!}GZ*&RZ6=@oBCy5a)jn>;Sm0rSsxc5R9JPnJ>jrogDw@#Kpm476B*hKX)T( zB2Jn=ZE!FGn14HovC;()gmCP}xR`;tlX2sELVlD&_kiF(=T!!e|oz`SOFx` zN-E-!2vSQi0vhjO7Fl+ocrnGDwu7pLKkp}#4W2mNpduVz#=>%w2%!oBHT?F?yF9(; znG6h18~Mw{LZ2%JX^AsS?fvFp-mS zYNH_diLcnmegmML0mQ{j5t(tLRbpY06>ztnK~Qx~=xIhRM!P_NE?0upx;?WAAu>yn=E3%*8@4e!zu0)9ZTRD3&1n~7&x^GfV5P+o#Zvu+z0f+7HkB@P~M^^M#?TO zds4?2D-AQ(_bnE&^^+_sWB_E#tveup5s!yPgn@k(H33+Y2S7befY)4$I)#2)D5MPG zK^cV43dK&eD1Tkxoh#e7#?4+4}n;z~p6n zJq#x`Rd3cJ*NFa^qd*%h#txsYpF6lu}<&j|2{tQ1=}_ z=5hb?GaLR08ShHUO4BneW`)F$ubWV3Ji^J02z4_x3u_k>7gJKQv;Sw&KK=AV=o(Ey zGGG`9$KIIj%+$}Vrp9)G=VaojuIx;=74c_#C}7(tiP{aO-~Be*Ca${ig#z6GZ6lVD)SMeAjc!{ngYLotGYkl4Bu1eX6Aze!7;^ikVoe(4CLi z(dOso*KU2v37GQ9X$OdkS%<3h`#WteTF-jZuW|s2lLP#E|AAQ4*<9noNQPKqT$zX- zuEZS@3WfT;eS?#6P4S3XZJtbtqb`Ch?s2hre#n1vwl+`@m8g<24VinSY<2y=8*&HB61Xm3a(2AE;)rq1y}tE* z@4>WO%30oXVG99+cVCrHmXvN;F^QKCpkP>X%ZsHqJlZzN6Ig+_K3gl7D6+!Z>QCQm z=x9+j{V}(s{lPgD-E1`oj*P3Z%h+{ivS#4FDeR@2Wf zxGpCkHfd--SN+o55<-n=)avMwz-iGP`s9Aj=~-iX?nr>jY(rlgQxVu9wiWq8pt7NP z9oK;YyveV3=O|vZ?9m!T{wnJUBV!PGI!Ha~-MRDHI%Qi~BE|mXj0-3{9!{Tj96;0| zP=+X0a)02E;cNTb7Qvc^un*fj{9@(mizOdModR#|z5P0bQTLp}K%h7o{3H@zZJ)rP z@DN65nmT=$Ojkd;bg(QUUn`OSWB*gre@`mTMh1PHqNb1U{hfBq^A$6GJSveo_gHmG zxhLX+)haWT{15XXcfHc%}2cQ?T1TqwGH!*^Bp43{Wh@cFbASK`1g9rRj7&Ko4;hD+>&JD05j&~;-DI# z(DmbAFv<7kSyonf`pvz38KM``y1KeeRs-M^&s#(jUxKX^7)TPXj9MZfrM(chPLVKM zqe+HzaX#lZ8Zq=~Ia5k%c`_Q*b#xO_x&ryOc#dP-;v6F(!x@&~x$ZO)uYhvnr4qhz z^6x(YwG9YLfcD&rcLOVi;D&sirEM1MJbAtneU~FSTRe-Vq_`p+)pM2dv0skdF@4(D zmQ67vT=zVE%ecelGg66#mA^2f7#435`MD=V_Y+ixNWk5}J_Ro~sjR~7pIWl8>93AP z8KRU^03CLDuW8`)vNAd!mmcbzt{6}S)xP%MG>5Wc^vuixaY>EZo;GA`l$JPUbk->5 z!^VZU�rEl=V-3cAKI9;Y5h$s+a(V719RiMpdtJ>*@g8fo1f0mV{@OaDTHNn2NzH zuGNgDG9X@`->oHqEwAZPhX$&!<_TAdFVR4sd^u%$+3`Z1lPvPNv2~zJsrURyc_8gz zeYxvx>RB26xGg5d=!@G^%Kxg<1}F*n_4f@MS3mwWa|)NUm2f)yrfaxA+?nIEBrW*h z{0R6JOyV}bDu6I`;WFrdzB~6pDV-bu0Aj|C$I=fnJ)fm}kN$oD?B+x0Rb|7hZL*&J z>E=cbP^L~pBWaPmw2ECq+@CQF3YIoegh4Ubrz`KCT_5BeCmsE=bxLA`%cFF!?;Gj|+6q_iZeKi?hj-PQWkrr1TLaX)WEMJkn_r!gp9t=~2+1VuC|yxhl{#TTVI z{g^R*oAGjb%oT4-%1|1|#(xzY@~iVl=ZE;6cp+|A13QSN?EqCzkjxAFmIF1czYi7V z9d`Crv)}9na`%74{f#Ia%!))&>8N`V=CP1Cr$3G$(B2-W$CE+bNOsq;T#E^D{aR`@ zlF*kNua2jj$PB4qvp}P+!W&OQi>?~j7ALBeS2EWZa&>Vffuwx5UPKv&t10MeYxBgl z?nhU@st4 z{N8*nYlRE^Qrl`9&-7QJbd-4ZezTqmSw1V`Z#N{wcQISa11M+bfkp}^6GBWc ze10D0T>5~{zcxE`5spE2aLo9{+M4s7_IB*6eu?dtPshCBj+Y|*Q6i39B2}1 zlJDNdce2{*Wqving(t%S4A7OK8QQ!wi*GPD>0SCO>dvBpDEw)@>u!wVT`G&e*Hf(v zw5?UVWQ$|Q=irXF)k(BZ)TPSo&3MP7`(9Q7z8qPmSEk!XM^mm9S)?LdsuzKac!uN? zI3cDE(J2u)45dEPsK9me;?Vy>8`R4iPPl|ehO>PBnZ-6HIyc*hClW`GG&hgMesq5D zYG=4Pb42QV*SN{~8HCk}W?kq3I~iMbUOLOt7m9i3zSa)EPX;T}ksl5Rql-fNp5^|W zGjF({Fh5&9?zlo{aNr|0sIR@Um*PIty`{WeSD({*)uIs3%=bo&O*;j5W1-U%eY=X_ zhwQG|Y-pFg|IV&XNqE{|4a56UJ2%bkm%JP+J#)q9;&hdlrOQ2YLRS>LU<2!Weev(s z!sVQ5iXqHW`R>+y2$jv=D%Zx>)PTc}<`+lDHDy>E@?fgR3U}~OoZj&2aE?LZy277nfxGcVP^Ru=i%c-Q)%&=vcXSE&dIVeo2JUCg5|L{ zppp(}qe5zI+C(zSZ~Z8+nIA`HE#k!92FdkLrv?Z)Ya_lKGs(JP?<)O%xd z%_xdMBAc5Mp%P1JX3Gc{Qb&x1D-QgnTVgP(Saz(?c)$LICN&a?%wr|HgMW80{1JK! zf;;eCCj$JAN+t{8EfRcRM_E?QCxmYuyjHp*$W5gQRwcjX9C=SZOyvwzNTM5nAmoc| zKD0Q^@ahRgr5W9R;0=p=b=S~I&z2hhUEJH)vwx{kziX^N#cpJM5@oAW#6!sBs$wu0 z_8QCoUhE^Dsi`sAe!VWb0!60A3EqqF{dei$!;2n~b(g7F&p;IU;DS|8plI-yPmU$~ zX_^O%leAPwv!+R~Eww%Z{}sj;PeA+@!O4r16lORj8f%Tgw_An6-O|Bl9hd!Nl;s_b z*WlVjqNR<+WMNr_$Zcar!B>M6ffFecLI~&7h~G}KWkBPimHr9$gcGB{E4fvKVF~*2 zyOP<)z4I9$#1hhvq^zQ)pW#=ounx84+>$w}Ik#r063(D;K_+G5(irS>MWYnQpFx5k z+RAZe129T%xEgZrMmCLb&nNThn~56WZ3;+NN#MW>Ap()|8uUxQNWgnwRmR7nG%xSG zs?fwRLx{0qqT`C0SlJ2a#}FzcQai%}DFCGuePx*W!GI@~tgD=wu;t5$J~^ZzKUZDZ z+C`2l9A}dx13{%}>y&}}gm~Esm9K9KyVNteE;vyCN+~27l>2tQO#HE?%59stJE9dR z4Tl*=*s6lbox8hFV~s@ub!BPm7ho6Fmk2&NLjgVa0-UZlEzc z`xYchnTNwnR@92@5&q-HI~TJXRI^LJB#}nhoz;`&|8_gK-YebmIsB3{A8_q=Jb(U& zE9c_$%SH}3(Hj5+`OjMK12wVW&#d{2Ljalu5z`_FgNI~`k6wAad1mn}A4j$Es17G< zKRlf>yC6B#P^i0RD)R=4Nq${ar{~~vdoEt43oa6A~q8wT<|n!^^?b0^*B8U$J}81(=~nDg6*^77U9 ztkqiyF6A5N$#d^V35niUzCVm(t#%Nf*(ZU3jLR?9vNowS70O}47jqq#lbVUj3U?p8Z{0sg3kJd40?%l7!^RC935A|9HwQQNk$ zR~PTp-%v0Itx$V@D&PAFA+sX}6DJ4RG^J=bhO@}r!f=YuZ=fukQD^QH>hn7Xx1>+5 z{3JXNdUg6pu<4cqH{rowAfjI14X-x#_XsPyaXg&hL87y7)ha2IWpmy^VJ0nlZzd%w zsmD1?h<+*Zk5f+DTE@`)gi`j@(I6X|ovy)gy6q_`j7g54-*ijpmHa0hTsS01=-?UQ z!C&zVFe47D8qAX5a@4{~6gP91Y=}SdnJ5el35MSMsshKqn3@QuGkg8s-h@3R;e8l> zro6cCkdw)m>Lp?taW#xe@9f3JyHjrjldRRoC^IBM;;u{&!b8I-+3Gth%&}?S25-)! zLCp8Vi0_}To;&To)ul4HJVoyn1co7dd_H`TeR$ruAxh6p_~{OnJET|OyPlwghQ;2% zEq@=!8p8qI2MEg*reKXu#mr1=F7n?UTbZ6W{qfo;zG17iLwg0?noH_JWGir88Mt^j zxg7+O#_)p!N^u{uVWVrlr}t^~k|;NSa)~r;=D`Q&bFLT79+M{Lmi4FN%-@RM$qGvS z>!ph1Cg2|MeUgX`{|Dio=l#NSZM{2FaKu|C8Q^1(y?egH22G%chge`C#1V2t$*B}9 z0^AKvT`v?IlaV)lTDsTDH))NZM6wi5Mh!D&d)#!}BpWgtucL@U;|lJgs}b}2DspmO z9IO=kP0h#WqoqH6c9v&-^w-LLek`>%8<=ez6Fu;4)X09~zV*93pw&aK6ny}*4L#j^ z8E`tfqM>USWtS|&gCH@yTk-7W?CWo{1O^u)3ju4H=H{`?3{muqd)XmTPk+#j7d>{X zP<*P@{UE&QL`tx~`w2$UKBmlA-;A^3D-jE#8?(ms`7Lo53!*UJj#?#mG5p*1!{f(h zX8RMo-OsASf*>JINav=s8>mpxEW`N+SEUz43i)- z;V66+HId&+juTp}+B(Qk#_&ThWfU~gR2-4tEY zh$9a}2&J^A&Y4}rAKL+IJsNggOVBI(@}gB@0?J(v&r5sHHBAMny~lDffBKS-G_0u5 zlp98}G970x1n&@83W{%7D~(D#i!)heK!j=QK;-bTL3f@cvYSkd{;XtbU?Wgw4TAMl zFNMsXU2xa_`s6?+28B10+zop1#~GH{S_ad8mWux1_^D0^vW6zomJ_8iHBpJ!e?UEvSYT8+(Jc(EsgYvf zE%TsZ^;y0zhqfaSB!X$l`23>!nLUM&fecMO>6Kvv+s+4Xbfeo5i1&zB77_|HakpW) z`SfoLgl)nHBCSQLtzd|uvHm*={hl4K)^e`<9%SWGlOS>pTVK!Rf23uwhT~ifi)yysA0>Tpu3TnxmnUM10FY1NGEZP;FdP(lpNmGM+ZfiV2*D%NPc# z82pw_Fdfh_yuI*VknnAg?x#?DN}7o8n82Wl!niS*&ptK6bhHK_qu#M7Y^WIQe)Ix8uQ z{Q;i{)+_#7qd-(ww3(ZqUy{VZBHFe)VKo3j5G*B|#P9MrJ+I8#&(Ld_`t_=v597-&&Pnakq

Y) z*&9MJc}tZ8?Eyc8p&{|Z^oI3U;nee#5=VfB!U-XG?zR|k@VD|4i^pnnadvu@&!|?( zZpfX1I!IK|H^@}I|3T@ewm(iv*Pn0wknC_BS&6MQYJNTU^Uo$Cu=V1XVuG9c4bfNh zPuLU!3c_6st+i)ol*dUirCc8i%KDg%QuG!kUR`8;k-)WByZ*>XbtKGv{VtNPxH+L0 zv!{^qzRXZbw)PiJ{y1Gig5meVmwEe4TB^J+&*hp+h}ciX9FS2V#QkFcc5wcQ#m{X! zrZTvaAzPzZL|0Rnd4Ox#U{J%n(ssM-+UGl9lwIx}TvKwO2qmxnEcpALN?#R}iDW9W zZPtml{S;29k6iLwXRrfYdF|z*X~(kX!ukc&x$kc|C0N|F|J3f^9dvrIvEchry?oan zH)B-%_0d;du@c!xRhQd%sQl0c{xZfy!5bV9oh0h-FTzj?Qt;bW)JIX@IrAW8 zc~^n1-zZ2>7la@8^?qxQOC*i+)oJPESL6-Hz1R1FuqZ@^)JH&spgn4**Ga?a&bb`y zyN$Pxlx^gs?pmhXe)#N24(6>`$4EZ^GK)v}?M`>_Bhk5ed(In=`n*$hnWy6!ny*Xs zI$s~Akij^KcPXF3 zKIsm}3cvbETytnymTn>WJewg0%O#2z#k5&A6uOTaz*jJK)~^U7LX60ZOs?UC;X z42`tbG0TAcupjx8fnR8aZ@>yy&zJY`jbH@d`)dC~SiRFPcR!xv`%&e$3}OkOFjox4 z8KBEa0uk^yVtE%#-u75^6WnuN+#wOnlfN$RJT2GsU0&&zlf=o8?OEV?VgLU6$d}5} zdPn_3m-p+wT_#qmIF(SNKpy>b>XDE6`DgSBtkF?Z}nE8;eh z1cSV=&};5R2vObpKQTo2D$FMRp%0G^VTS~W^oyU^d@*Hh_=AD%fvm_M5R8_hCW^ny)OY*4oS-&da|__mt$2B~c^miW5PgNVs-Q4_v;k<~~LdU6f(sfM(Q?$Y%JL|I*ZdVf{M`LiyzBB#(^PbC?zt z1YSrHj+WTL-t)bZ&yjrQ9CM4&9^0;LS#Fq)3+d{WLj+MKl{zoz^|6Oj^3gS7aS?Gk z&s0fXBV5TwtIH8e4Z3xz=AYD~43fhVdWrMYN2}^7usQLE3DIQvT<{&9qX@{QCX`P>GOry^Nrx&oQq9f5cozgxX555^z-!c*zNF`5~43gU>kAkN>9ub=KM zvsHDRW$>$a9{qXA^{&q*>)897xgIs6D>e;+zj@O)%he=<@QC zXn27xB0heY^WQv;=k-_V%dhejQgyZ?#*k`8SgzJbeZ5Mt{CSB~_V5ysFPZZf>xm0T zRUI82WBxm$(Teg4iSLMK%F8f%UnLn$Z6LBavkez?*VXkO#iX(h!XLR78F*dy=|U2T z@u(P&1ngay1B%giICd4vj>H4(ok%i0U05^Y{l86vma31( zZ~BgZ<^d;*!4&k?Z{B#)cDVkUWr9JxpHgB?e+GTWMyI-s{%*z`0y^;(Kn*tkt{u`4 z-~2Oq{kwR~(Th-~=W}^DguW)2E$+=ic|iZ@QZ^TJzyuhl^ zC&Ow~yh{2gl6-^!OV;so&Eol}S`EWa3Uj=0%V?(?lBVf}6&x@n(J|RG{oYG5Dq*TM zf;2O1j4I9T09c3&M&(H|4k2mXdqQ+qa_{@Dub*g`XbFOpIQ$!au1s_W6?A3>2XP~k`gcE4N74$91VtJ<|9FKap3z+=%_d3 z<`;;7H&mzg0i;Cl7mAVT@Pq6&`6xJiM9L!dD;&KNcbd*=7^|NR!0^xkaAJ=HT%qb zAD>+EsEf2%Xs^_((&Fil9bG*R+?iSHo$60)b)L#m(Vz9c%*V0N+cNf@3?g)Xarbj@ z<)4_1iC|ucD&D^OnR;)#D*G9!a(bF_do}U@E=b&Rfx+^{1UL0gO*h`vkh!1Bmx@bq z6AxFCRnnCoKej5tl@y=V|E5qB&PM%t z-}X#Fuf2nglEW*=I*NT~>XpWhxd;WG^A;Hy&)VHFE$_NoC9mTCiJ$e`|I#09*Mc@8 zFkkJ)(qDlw0(D=hWj@})^p@YV(NU)|gy@L9D1TMo{q-h_dqMo}e0BTHGQk|>JT-R6 z#&y$sOhEB?NJhFXzUb8C!eq|x&TD@VsCpC-aP2dHyi-Fe(|uH!yCo|}av$&0m}$Vw zsMz*XZuKNw)NACe4YZ20L@uW?+iFSso|r++rP)zJN=go;+O^!N*2~N)O`e@Poq7`3 zU@LC5Z?sO!UYvWgSGrbzD$<#;p=ohn-U(kTB}Dy`6<%0180J*fyhg|7Y;Rf7T^7XX zZXrHTP@*_OBpH~L!}4>#p6`0Y+pg`pNkJ)G^M*^tyKvz?A>Pb%J%wmSmBgR1TItlZ zH7r=a!-n(N2W~6_=?~GNFgcplG6S5rKB$Aq{$F8d0TosB{rfvKQbRWkUD8N*NeKf8 zqI8Qx3J6FIB_)kWrvf4=-K9u(Hwe;F(hR)o@Be?b)?4e%TIbH(Su=CbIrq%j=kB}r z=L_{a1>JTN+F(v&-Mx%^ML46+tdv+f>7fC}`sCcls_fD=R{e;eJ`qH^UBOrbn8rPF zwfa13xECZ4V~0aizxhd{3CdF?ly-s^dMwb=U#|Ch8rRuFC@N>NF=l#FZ`9IFQn63&(xLdPF4%L&Jjg zQmIE9mtJ0S$ew3kwALqm=E(HE-LpGT!p%iu-g>-ed`Ci8)_TA<<2J37oAqWPT{ZGz zM5gW;yZbYI6&JV`K}6BYo3xUEh#TRyup&=UgD-z|6k7-H5&*bs0KFmSFCwt1XB@PG zCn!c!-DZ;O7ZqG(O=tj`gI+y8&%z4yIyekd3+xhBz+gI_&xa}*KR85wk zv=XjHrviHv*W(x(Rm+9z4OErJ6dDch`GIa8sqRBd6Z>bI%rEp>(-I#6CD+UY?woSEItXB0r706y z0zKT&pUxQ+aY6uZ2PLprknaaFuVa^*H@E6yB~DpiRDNXVh5_%69xJ6dO#i7_H~^)b z#k`i?eu6S@fgsA@aJDKWB2!Y+@B_dOm(7W{of~i&rPd&oRU*GE5Qp#h;N>ni3(89UXx4gK_NYYi zuIqWKU$j@JOttmiv+~RNBxeLI6sZc?=0r2rzcZaFri9ECE1+R^1Pgk5GH_RV$=u8; z$(af-5SqCzoBwIQ$ZSLjUr_Idbkv5G=^loMFZ9eW6&NP;G3`VGYLU^@_lc}^Ld@l# zNaq>2gsECdk=ghehQuIUx?9CoqmIDf!xy)`;CkyA1@dAOI0k&osuX7y$Lq~|`Cpma zJ4HY6KNf2;QrSD9I@-LD=3hI0NLB0GRv zcPX6)D;dsm_C+P%YPn{k42H(qqYg6aMU%gO z@g`4PqW3Z%&Z{{YhzRD|6vQt{WRHFpvK=-8(b z!K3A8l8cJ6g`J(U-p3SuW(Mk9`Ov3)xWG=LLI)cHef^G4LV+>)-iqqWNbfiM6a0}x zn#J>@R=K;gGa^Tm9R=#uJS_G%sAzV^@kBo3o#n*u}-4WK#0ABqfQQSS}4;q_{eQrR&AF78d&a=Vki**X|-~ z5H=&=@8kK`iC%GiKG@c4^!Vubo|khYlq5740ou@H6YB$U)^Cc`PDewb&#@sfk@}LVe3d5 z=6oM&ny`0-eEM4CC2``(m3QUt3|!7nBmKD!)DsoObq{1KR2wI6M#lWP1+eAbwQDY3 zmC#^kP+7fqs9(@=GHT~>l54h|n4q1JYkM<9nvr9ASYIDx`AMTk=^-^oR_Anw8pqyF zbNN4KK`~SFS3f>0s>V*+O2|V_C11e*W++vvNaAQ&Uz?GEn#quyfSfs*m@$Km#-$q# z7MIuEuA1) ziL)u1t0X77kl203JY(qhr;A>e{4UeVX?(AF#7tS0Ft+SvpS$oo7IG3IbB`BA6P@BG z2As^YPbNTovvX$`x~@DBJR+^8l_-CD^QDMDvwxpmP}JEw01R9N>6LfmAKQ{Y&-H9& z)6O?e+`RAXd12tW%u%@JUjj!{2}$8Ozu@uFmnoz=bG1k+0fY#!wcRMuiO(3UEM@%A zct&qAu&mPM0`Jzs>A{F05ZByVlN9@x;C!x7og`}SP`&cuDae40Sn;<7!vLGcLh@(i zn)4=h#1EAy=p}Xr%|j@pNsVT0P6@*C$_rT z-`N$t9nT7~$HW9G-DgJYOr=yuhk+c@ac7ocwvZB8+y>Prz^lI&l@iw~bkZ>VUF=8k z(9}G%6$3um0JS_fNm(VRJX7;WA_NfBXyU`AoWDX1hetA!Pnzg)9L{Ag;zNz{mm`QA z*T=7)Ycgn9$gc;m9I-CfqoLudFvq{?qps}=JERX~AU+?w1i7EW-b7CMQ!oGeybFI8 z9FL6pmQjvS8O4cu5-#9JQEqP2l z0R!I3F3Qy)ap+JCsx?05oA2 zsiF?7WegoLwl|&M%L@7Wg6mvm<3oz@nuz!M@+-qrc9)^1XOG#~ z+VJ#bbw5#&X4|wdgz3L@B#xrapu$qM6#9gp)Cu_x!wf}ZryxdB>5(E|gIBYv1YGXd z(TwyhVN6zrIGLEfYG@%so8(*#iu#m-x`g-{id`oWIPz&t+$)<%0!MZ0?sRLIUZGH9 zlBP_}pe%7iD4t4>5E`a``LSnM(8CX`t~A9h0igVFh0NByGKPFEfl!*=I}+fF9mxr4 zBCh`P)aS}V>RTgHu08Lm%M1}SqoXqwmlXxHSAqN1j77R&kY1WISER2NN>9L1<=k5WyYzFh z>(-Ca{C9gw6bvoi$Lk=Mm4O1*fiC)*=mMXnvOOhQ0 z5JiK~O!WdJMjr|LFx(XW7~eHmmCx@b89BT`E$=anATyAp^w4wBEMCjoqb2EE+tea? zEUAOlf_x{bwd>?a;zxcPyi=>r63Mi*@Y(4fiAwLQQpDZ0f97awA#ot_;xg|eXDdqz z9W=hD*q7AD_Ig9s5bhE%w z1;*_`aYVLUepT4&L+$or;0;iNhNSU?9DSy=mKXddS`E7G zD73ujhi=hPaIY~UGgGD_2t>;wY(wl-;{mSyOL7f`4mBP-{pK}IJAR2vw&drU?UWQH z3>&3uVn4zhDSC|1r$wF!5?VX`IPdRIi?Yn9@OJZ|PQ2v;hg`8PDU#;wn5R_*7NJ4$ z#=$|^e*I)qNG&-R1Rcjr*meCdsv6&lV!yzD>lWb3WSse_d24{g$QN4C)uwg&)_~8v zgWCWxZ{;rQ{(ErDA!tn4D%|Vf#oPUH>l8lJgq+M!Y74wRB=x@jSuuC=`Y>4qkKtmg z-<9cz)1t>gt(?i(B{v#Za_$2iI1%gQQ(ijyw<@FnGsPKzZ){M(?NdPK*Da#AVkr8K=4=TccsmeLZq7_UU!q^i zazI@+hh-O6B>PgEyXmr$4b(VXT(wxS=ery1Qo;80=g`P2Xn)*zXV&uQo%ef)z-i#k zHTlJ|@BPWW+1FJpS*;g(9Dz zXvv!RBn2ciFZy@ee&zegKQ+o1{gcMekd%q0ch2hnnMevNN*evmuy2EX&eG9@?ZEYm zvK*6(q`WL8N-*pHJg5HtvE21o+g+jM?$}Loq-1mm0*MPI@?t!0TRR%yySV{%Dx+1$ zn?Uu@fV)%LRG;p5XRXKmAH_(FIG!_{fp;=qmC1Oot(~6d$cYQM_O19I^SYR(i~aD- zbTW!*>+mieXZ@~*j|7FKe_xy5I(nSgFEx3BZh$4vz3l#(C_ot&Mw8@ynA%Xpq|#Zd z{4L3Fu+FV^h58)%RO)AcV9iK$t(Xi8t7&PQR=_j^-F5anAF$`KtyKQvoS_g1TYccO z?!tU==gAy%bU)X8v075{FmM%7)_Tz@CN|27p@;Y7^J*ogCT z)F^iLwP;3JdAaJRe+<8qPkoHex0PSMd}(R%+lWZ$vq9S#NI}8Y9fpe=5OSTlH5yF* zf&G}P7JcKN!7S7HD*r8zv1SgxlwqVVFDr9$c3>VYqEEm>j+%3ob#V>~`>3ddNq#}? z?b}}rXZsiFT(Fa)4_VXUWVSP56J zf4+CVW1Gx4qd+HaqF{oK%g%l7GT8d3 zYmg#8RO2eMW02NvlD@pz{XDD9u;|$PR|GNJXs2D-_yZj2I>JbRUfd2t`L+ZP?c8Jw z#XRtnHm=|oh|b;vxfWn3`s6vi6cJ;2_oW;_UN2_r8l|;(^fx)S3TJj9(ok%DF_4cd z7l#>h14;TqxZ0hMpZ`!9p+C+FMy<>2)tO)BVlu}t%C$8#L{x?2xa5RcQW253Q9vQM z>lJA}edBD{2tsL4pfV2+VidF^A=fdvrNW@#QsDk2r&VX+Nl$WBt|s;5wcl?VYXWM^jMs{n-1F-OL=Rx~{#F9NPW@ zn~aVp;b$SNaC~0io%!JP6@l3BU*R(fUXvD|iJu9zk8aK%iHZqY+gEVg6Ib150s`U~ zHLE8{R|<7Q-*~zB8ZXzQIkLspK#}m6t|L(Sf}h3^r+lwp^fh4;PY=}=9oi3nRQD*K zpNFMQftJ$m5BG0BuNk$vQy>XGXa35ZsgZyx;&4Go6xHO4tAl41v;~5~35n^O#j9wV z0D0=HON;Jsjh(LGU~|-N8<;&LAt!fuj~Xh4;jn9`@0YUeNX&*iEN5EiAzD zMc-D&VWCT-{!Vy$hF|=dJ3V5-yY1DnUpl8>_{ADxeer}}f!<>*Qdu-Z1(}lTv!Ch? z&YIl7QMK=3^S$iksv>*HF{>Qq^AGS2iG^w=s|)zgpZpU-C>Ft#ixWY4_uBLi!St-a&uo`H%>ixD{7AmU9Jc78s;=2 zj7Iij!Y3FFKq zS0?%>ukg9J&D#8~hO?xqEdit|EuhAJ>L}_l%>9{O3IkIShY9>!PyYg!6=w*k=CzJshMU zkq=&7p~_VYrX&AOvD>AKeDX=)TOK+#GPEz#23TMDsK&$`665^;oMQB=dqJq(5BOKE7f#;}cYS9t|`4&9+}E<3RqFP*d7B1b`W zpjGQ;ko97Pt-%5qAtG^JGsV^u^77G1H>}E;U&m=#_fX=LJx}7|7H==BUCO}) zL>eOGn@!qi+;)+QgzH19k_m0D{mBX;SOnBUHplrY-5OD(xJp>Voc2t0wtM%n&Xo!N zAkzK(&jO+bw?F`&MNvf3Aq$o0i_a7a{RRPt_jS8F+7jM+sD?2uamjQ_9X4ZTT(2+U z)j#IWLV%C4ru%^BMBz7dc|(n}RvzMsFG-aws*0a+0YBB;(o)Csm0@S6SEjXZ4epRN zIZPdc0l6Otn6ZYhzTi}!?cG&bkEm}IVQ&znO1Uh9*<8!h(=UWtW}io(H!zTR^UWyj zt`o|Eskgx?(-d-T2dx*}(O5)fF5*~#B^meUvY|xn9%4e}UIM1je-pnV&XOQsRHsm- zt^HF)t_R)w3y7IE{i|4hRem~?ZERoSFVw(X^6>(n;rn(a{NYBnQY^T9WUX}=RQ0$BgY7u?*D`@ehw%0 zj)h2RI$j58bq1cFJ4ig)oA^01iuJQH9+yLu;)fPHf?yDgW#eDZZXA2W(mP1XNC+0}F z|8DVdUob{1)1^gUG@S_?7pXG2Tq7z1-IMx-+F5UN>$G_eK%-)4NHTVZy2(h7Lx<~q zW~O}wVyvCW?A;%|yjQ~9n57Zv4*z&5XlDI0_KXx4ej1AV>(;tw3u>yOkz6gCy)=

Ezfqmk}0R{kpo7bbXAjpuVTk7jq&R9&t;J%H(r zF$wUsQNkcx@N|%sDI0j_hj3y}@IL9|5WC$8IsV1rJl`<(Y^DbL0N>H0%SFxkX0tS! zuc=q;ym339i*$tcP!WbXEJU0=_C~V#&HnsxA4i6m6JfB$W`AIxUH7OhWE7r=T>l=% zy!7GER8~0ef6cd{@%SBc>)I2oMHd055oE7L^AdmjP-s%bixugPQkR8KR!&rqw|~5$h_VKA+ZanDol~ zU1k;5;uXa1H+3LuAmo%rx(3N&fhFY1F240%ki!Mb?`ytN_%6*ZtzYG=*9jNZW$R!h zDeebxw-)NQEbv@uAi>Qv3PXlPG3x7cfUX+hf(PECza9I{ThV&G3ks&|nWE;pPTF%s z9D-fi_4S0h8gW_|%kE@Y>nxQ&Id0tU$^}>m#k(&pfNKoEaj9wQMKj1t`3NF=K(8h! ztn4JcjEo34Ba)w%3{-+`zawm$0uTAmLNl#fF$@etNCg1SNt}*-nv4u??qv+uwk?0B zgHDiO;{Ru{QI=Es^@f@HmhNZut9C zK&7_TrL7zXw*`HC9OQfvXS-9eb7EW6GIq(;gZuG-jLOoJzwWtPHOD(VH@+>+L-U|m zuj4|UDc9T056_-en0?Ur91Pn=&4%V;m;b~^UM%$iIDPp4J;Qvlk<1_KN$@3%?6V9q&C!HONsA+`e5b$h=cEuNzRR% z6+{fu@D-y9K?qP(ewSv`$r=jPLjyNxR1GpOIi8L(B{{r&_3A;vKBA)5W%g+8C^7MC zd0W$dUL)@iFQ_v@t@3F#LrHnGQ?;~az7|oRBi~|sn$n|X?PSW!%q;-q)>es1KBDFQ zUnix?D21g=%LK~k)hYM(@5_U&T5Lix=f#>Gkkla)&~h9`IjyWK?sMX!c0}+Xcr;Fe z2h6SY+3j~u%&tb_s2zQ%+-uRKdZAKslc%O|TZ-7*P0z^i3T*Q2490AW z*#Sp}``=kX#m!k7X$h)Pnd(9REJ+7taedGNfeR@cdOTD%cwD=BufQk92wxwctFrp# zCeY*k(d@0mRLR!D0<-)PG$^b6%=YZibL9tB7>=kFa+v-{-9Zkx#M>F3a|e0o^PteS z%R<}156b(4?JEBDZ)_OXr-C5uxt$+u(HQAHE zd6p@kr@Ubu^65aei(V{k=&-{iR``*nU|e%j6nV>MX=y4zsZD+CBIF_8%PRfFeP4ND%U zuekTL@8I;%CGPNTW;U(cc~6>{cDjhfhmUW)?{5EghyV@oeK2ugzrpP%fn@uc<9B5$ z)v?F)LIi0iDx11CEVPo94@5i^;Ha(qHn6MH&|%P&3oAxpDak>$-pj{t*-s|~%JiCHWFfRCXA!2i3CYY?#})Wf1@krUkQn7iluSGD0gFFf$c^*r!`pvLLJA@-+0GH{>J)=1s%$rFGZ YeEqLm{yoO^lm0!at7t2iD_R8q4;AAv&j0`b literal 0 HcmV?d00001