From 38851372e15e03eed513238d0af31c292b32c040 Mon Sep 17 00:00:00 2001 From: yolain Date: Tue, 4 Jun 2024 23:32:31 +0800 Subject: [PATCH 01/49] Upgrade version 1.1.8 to comfyregistry --- __init__.py | 30 ------------------------------ pyproject.toml | 4 ++-- 2 files changed, 2 insertions(+), 32 deletions(-) diff --git a/__init__.py b/__init__.py index 9c228a7..c6ed820 100644 --- a/__init__.py +++ b/__init__.py @@ -1,7 +1,6 @@ __version__ = "1.1.8" import os -import glob import folder_paths import importlib from pathlib import Path @@ -43,35 +42,6 @@ os.mkdir(styles_path) os.mkdir(samples_path) -#合并autocomplete覆盖到pyssss包 -pyssss_path = os.path.join(comfy_path, "custom_nodes", "ComfyUI-Custom-Scripts", "user") -combine_folder = os.path.join(cwd_path, "autocomplete") -if os.path.exists(combine_folder): - pass -else: - os.mkdir(combine_folder) -if os.path.exists(pyssss_path): - output_file = os.path.join(pyssss_path, "autocomplete.txt") - # 遍历 combine 目录下的所有 txt 文件,读取内容并合并 - merged_content = '' - for file_path in glob.glob(os.path.join(combine_folder, '*.txt')): - with open(file_path, 'r', encoding='utf-8', errors='ignore') as file: - try: - file_content = file.read() - merged_content += file_content + '\n' - except UnicodeDecodeError: - pass - # 备份之前的autocomplete - # bak_file = os.path.join(pyssss_path, "autocomplete.txt.bak") - # if os.path.exists(bak_file): - # pass - # elif os.path.exists(output_file): - # shutil.copy(output_file, bak_file) - if merged_content != '': - # 将合并的内容写入目标文件 autocomplete.txt,并指定编码为 utf-8 - with open(output_file, 'w', encoding='utf-8') as target_file: - target_file.write(merged_content) - # ComfyUI-Easy-PS相关 (需要把模型预览图暴露给PS读取,此处借鉴了 AIGODLIKE-ComfyUI-Studio 的部分代码) from .py.libs.add_resources import add_static_resource from .py.libs.model import easyModelManager diff --git a/pyproject.toml b/pyproject.toml index 0815883..7fd8a5f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,9 @@ [project] name = "comfyui-easy-use" description = "To enhance the usability of ComfyUI, optimizations and integrations have been implemented for several commonly used nodes." -version = "1.1.7" +version = "1.1.8" license = "LICENSE" -dependencies = ["diffusers>=0.25.0", "clip_interrogator>=0.6.0", "onnxruntime", "aiohttp"] +dependencies = ["diffusers>=0.25.0", "clip_interrogator>=0.6.0", "sentencepiece==0.2.0", "lark-parser", "onnxruntime"] [project.urls] Repository = "https://github.com/yolain/ComfyUI-Easy-Use" From ed7d5846f7e32a77c9f6074386afa849b9db6821 Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 5 Jun 2024 14:19:07 +0800 Subject: [PATCH 02/49] adding some creadit in the code --- __init__.py | 2 +- py/easyNodes.py | 668 ++------------------------- py/ic_light/{func.py => __init__.py} | 2 + py/layer_diffuse/__init__.py | 209 +++++++++ py/layer_diffuse/func.py | 207 --------- py/libs/translate.py | 4 +- py/libs/xyplot.py | 2 +- py/xyplot.py | 603 ++++++++++++++++++++++++ 8 files changed, 848 insertions(+), 849 deletions(-) rename py/ic_light/{func.py => __init__.py} (98%) delete mode 100644 py/layer_diffuse/func.py create mode 100644 py/xyplot.py diff --git a/__init__.py b/__init__.py index c6ed820..2326f14 100644 --- a/__init__.py +++ b/__init__.py @@ -42,7 +42,7 @@ os.mkdir(styles_path) os.mkdir(samples_path) -# ComfyUI-Easy-PS相关 (需要把模型预览图暴露给PS读取,此处借鉴了 AIGODLIKE-ComfyUI-Studio 的部分代码) +# 需要把模型预览图暴露给PS读取,此处借鉴了 AIGODLIKE-ComfyUI-Studio 的部分代码 from .py.libs.add_resources import add_static_resource from .py.libs.model import easyModelManager model_config = easyModelManager().models_config diff --git a/py/easyNodes.py b/py/easyNodes.py index ff90628..9bc8b9d 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -19,7 +19,8 @@ from server import PromptServer from nodes import MAX_RESOLUTION, LatentFromBatch, RepeatLatentBatch, NODE_CLASS_MAPPINGS as ALL_NODE_CLASS_MAPPINGS, ConditioningSetMask, ConditioningConcat, CLIPTextEncode, VAEEncodeForInpaint, InpaintModelConditioning from .config import MAX_SEED_NUM, BASE_RESOLUTIONS, RESOURCES_DIR, INPAINT_DIR, FOOOCUS_STYLES_DIR, FOOOCUS_INPAINT_HEAD, FOOOCUS_INPAINT_PATCH, BRUSHNET_MODELS, POWERPAINT_CLIP, IPADAPTER_DIR, IPADAPTER_MODELS, DYNAMICRAFTER_DIR, DYNAMICRAFTER_MODELS, IC_LIGHT_MODELS -from .layer_diffuse.func import LayerDiffuse, LayerMethod +from .layer_diffuse import LayerDiffuse, LayerMethod +from .xyplot import XYplot_ModelMergeBlocks, XYplot_CFG, XYplot_Lora, XYplot_Checkpoint, XYplot_Denoise, XYplot_Steps, XYplot_PromptSR, XYplot_Positive_Cond, XYplot_Negative_Cond, XYplot_Positive_Cond_List, XYplot_Negative_Cond_List, XYplot_SeedsBatch, XYplot_Control_Net, XYplot_Sampler_Scheduler from .libs.log import log_node_info, log_node_error, log_node_warn from .libs.adv_encode import advanced_encode @@ -173,26 +174,6 @@ def INPUT_TYPES(s): FUNCTION = 'run' OUTPUT_NODE = True - - def replace_repeat(self, prompt): - prompt = prompt.replace(",", ",") - arr = prompt.split(",") - if len(arr) != len(set(arr)): - all_weight_prompt = re.findall(re.compile(r'[(](.*?)[)]', re.S), prompt) - if len(all_weight_prompt) > 0: - # others_prompt = prompt - # for w_prompt in all_weight_prompt: - # others_prompt = others_prompt.replace('(','').replace(')','') - # print(others_prompt) - return prompt - else: - for i in range(len(arr)): - arr[i] = arr[i].strip() - arr = list(set(arr)) - return ", ".join(arr) - else: - return prompt - def run(self, styles, positive='', negative='', prompt=None, extra_pnginfo=None, my_unique_id=None): values = [] all_styles = {} @@ -227,10 +208,6 @@ def run(self, styles, positive='', negative='', prompt=None, extra_pnginfo=None, if has_prompt == False and positive: positive_prompt = positive + ', ' - # 去重 - # positive_prompt = self.replace_repeat(positive_prompt) if positive_prompt else '' - # negative_prompt = self.replace_repeat(negative_prompt) if negative_prompt else '' - return (positive_prompt, negative_prompt) #prompt @@ -609,6 +586,9 @@ def pm(self, shot="-", shot_weight=1, gender="-", body_type="-", body_type_weigh return (prompt, negative_prompt,) +# ---------------------------------------------------------------提示词 结束----------------------------------------------------------------------# + +# ---------------------------------------------------------------潜空间 开始----------------------------------------------------------------------# # 潜空间sigma相乘 class latentNoisy: @classmethod @@ -680,7 +660,6 @@ def run(self, sampler_name, scheduler, steps, start_at_step, end_at_step, source return (new_pipe, samples_out, sigma) - # Latent遮罩复合 class latentCompositeMaskedWithCond: @classmethod @@ -820,7 +799,9 @@ def inject(self,strength, normalize, average, pipe_to_noise=None, noise=None, im samples["samples"] = noised return (samples,) +# ---------------------------------------------------------------潜空间 结束----------------------------------------------------------------------# +# ---------------------------------------------------------------随机种 开始----------------------------------------------------------------------# # 随机种 class easySeed: @classmethod @@ -867,10 +848,9 @@ def INPUT_TYPES(s): def doit(self, **kwargs): return {} -#---------------------------------------------------------------提示词 结束------------------------------------------------------------------------# - -#---------------------------------------------------------------加载器 开始----------------------------------------------------------------------# +# ---------------------------------------------------------------随机种 结束----------------------------------------------------------------------# +# ---------------------------------------------------------------加载器 开始----------------------------------------------------------------------# class setCkptName: @classmethod def INPUT_TYPES(cls): @@ -1807,7 +1787,6 @@ def adv_pipeloader(self, model_name, clip_skip, init_image, resolution, empty_la return (pipe, model, vae) - # lora class loraStack: def __init__(self): @@ -2070,6 +2049,8 @@ def load_lllite(self, model, model_name, cond_image, strength, steps, start_perc return (model_lllite,) +# ---------------------------------------------------------------加载器 结束----------------------------------------------------------------------# + #---------------------------------------------------------------Inpaint 开始----------------------------------------------------------------------# # FooocusInpaint @@ -2278,6 +2259,8 @@ def apply(self, pipe, image, mask, powerpaint_model, powerpaint_clip, dtype, fit del pipe return (new_pipe,) +# ---------------------------------------------------------------Inpaint 结束----------------------------------------------------------------------# + #---------------------------------------------------------------适配器 开始----------------------------------------------------------------------# # 风格对齐 @@ -2303,7 +2286,7 @@ def align(self, model, share_norm, share_attn, scale): return (styleAlignBatch(model, share_norm, share_attn, scale),) # 光照对齐 -from .ic_light.func import ICLight, VAEEncodeArgMax +from .ic_light.__init__ import ICLight, VAEEncodeArgMax class icLightApply: @classmethod @@ -3217,6 +3200,8 @@ def apply_advanced(self, pipe, image, instantid_file, insightface, control_net_n return self.run(pipe, image, instantid_file, insightface, control_net_name, cn_strength, cn_soft_weights, weight, start_at, end_at, noise, image_kps, mask, control_net, positive, negative, prompt, extra_pnginfo, my_unique_id) +# ---------------------------------------------------------------适配器 结束----------------------------------------------------------------------# + #---------------------------------------------------------------预采样 开始----------------------------------------------------------------------# # 预采样设置(基础) @@ -4305,6 +4290,8 @@ def sampler_dyn_thresh(args): m.set_model_sampler_cfg_function(sampler_dyn_thresh) return (m,) +#---------------------------------------------------------------预采样参数 结束---------------------------------------------------------------------- + #---------------------------------------------------------------采样器 开始---------------------------------------------------------------------- # 完整采样器 @@ -5406,6 +5393,8 @@ def callback(step, x0, x, total_steps): return (new_pipe, out,) +#---------------------------------------------------------------采样器 结束---------------------------------------------------------------------- + #---------------------------------------------------------------修复 开始----------------------------------------------------------------------# # 高清修复 @@ -6010,9 +5999,10 @@ def doit(self, model_name, device_mode, sam_detection_hint, sam_dilation, sam_th pipe = (sam_model, sam_detection_hint, sam_dilation, sam_threshold, sam_bbox_expansion, sam_mask_hint_threshold, sam_mask_hint_use_negative) return (pipe,) -#---------------------------------------------------------------Pipe 开始----------------------------------------------------------------------# +#---------------------------------------------------------------修复 结束---------------------------------------------------------------------- -# pipeIn +#---------------------------------------------------------------节点束 开始----------------------------------------------------------------------# +# 节点束输入 class pipeIn: def __init__(self): pass @@ -6097,7 +6087,7 @@ def flush(self, pipe=None, model=None, pos=None, neg=None, latent=None, vae=None return (new_pipe,) -# pipeOut +# 节点束输出 class pipeOut: def __init__(self): pass @@ -6129,8 +6119,7 @@ def flush(self, pipe, my_unique_id=None): return pipe, model, pos, neg, latent, vae, clip, image, seed - -# pipeEdit +# 编辑节点束 class pipeEdit: def __init__(self): pass @@ -6252,7 +6241,7 @@ def flush(self, clip_skip, optional_positive, positive_token_normalization, posi return (new_pipe, model,pos, neg, latent, vae, clip, image) -# pipeToBasicPipe +# 节点束到基础节点束(pipe to ComfyUI-Impack-pack's basic_pipe) class pipeToBasicPipe: @classmethod def INPUT_TYPES(s): @@ -6274,7 +6263,7 @@ def doit(self, pipe, my_unique_id=None): del pipe return (new_pipe,) -# pipeBatchIndex +# 批次索引 class pipeBatchIndex: @classmethod def INPUT_TYPES(s): @@ -6654,606 +6643,7 @@ def plot(self, pipe, grid_spacing, output_individuals, flip_xy, X=None, Y=None, return pipeXYPlot().plot(grid_spacing, output_individuals, flip_xy, x_axis, x_values, y_axis, y_values, new_pipe) -#---------------------------------------------------------------XY Inputs 开始----------------------------------------------------------------------# -def load_preset(filename): - path = os.path.join(RESOURCES_DIR, filename) - path = os.path.abspath(path) - preset_list = [] - - if os.path.exists(path): - with open(path, 'r') as file: - for line in file: - preset_list.append(line.strip()) - - return preset_list - else: - return [] -def generate_floats(batch_count, first_float, last_float): - if batch_count > 1: - interval = (last_float - first_float) / (batch_count - 1) - values = [str(round(first_float + i * interval, 3)) for i in range(batch_count)] - else: - values = [str(first_float)] if batch_count == 1 else [] - return "; ".join(values) - -def generate_ints(batch_count, first_int, last_int): - if batch_count > 1: - interval = (last_int - first_int) / (batch_count - 1) - values = [str(int(first_int + i * interval)) for i in range(batch_count)] - else: - values = [str(first_int)] if batch_count == 1 else [] - # values = list(set(values)) # Remove duplicates - # values.sort() # Sort in ascending order - return "; ".join(values) - -# Seed++ Batch -class XYplot_SeedsBatch: - - @classmethod - def INPUT_TYPES(cls): - return {"required": { - "batch_count": ("INT", {"default": 3, "min": 1, "max": 50}), }, - } - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, batch_count): - - axis = "advanced: Seeds++ Batch" - xy_values = {"axis": axis, "values": batch_count} - return (xy_values,) - -# Step Values -class XYplot_Steps: - parameters = ["steps", "start_at_step", "end_at_step",] - - @classmethod - def INPUT_TYPES(cls): - return { - "required": { - "target_parameter": (cls.parameters,), - "batch_count": ("INT", {"default": 3, "min": 0, "max": 50}), - "first_step": ("INT", {"default": 10, "min": 1, "max": 10000}), - "last_step": ("INT", {"default": 20, "min": 1, "max": 10000}), - "first_start_step": ("INT", {"default": 0, "min": 0, "max": 10000}), - "last_start_step": ("INT", {"default": 10, "min": 0, "max": 10000}), - "first_end_step": ("INT", {"default": 10, "min": 0, "max": 10000}), - "last_end_step": ("INT", {"default": 20, "min": 0, "max": 10000}), - } - } - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, target_parameter, batch_count, first_step, last_step, first_start_step, last_start_step, - first_end_step, last_end_step,): - - axis, xy_first, xy_last = None, None, None - - if target_parameter == "steps": - axis = "advanced: Steps" - xy_first = first_step - xy_last = last_step - elif target_parameter == "start_at_step": - axis = "advanced: StartStep" - xy_first = first_start_step - xy_last = last_start_step - elif target_parameter == "end_at_step": - axis = "advanced: EndStep" - xy_first = first_end_step - xy_last = last_end_step - - values = generate_ints(batch_count, xy_first, xy_last) - return ({"axis": axis, "values": values},) if values is not None else (None,) - -class XYplot_CFG: - - @classmethod - def INPUT_TYPES(cls): - return { - "required": { - "batch_count": ("INT", {"default": 3, "min": 0, "max": 50}), - "first_cfg": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0}), - "last_cfg": ("FLOAT", {"default": 9.0, "min": 0.0, "max": 100.0}), - } - } - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, batch_count, first_cfg, last_cfg): - axis = "advanced: CFG Scale" - values = generate_floats(batch_count, first_cfg, last_cfg) - return ({"axis": axis, "values": values},) if values else (None,) - -# Step Values -class XYplot_Sampler_Scheduler: - parameters = ["sampler", "scheduler", "sampler & scheduler"] - - @classmethod - def INPUT_TYPES(cls): - samplers = ["None"] + comfy.samplers.KSampler.SAMPLERS - schedulers = ["None"] + comfy.samplers.KSampler.SCHEDULERS - inputs = { - "required": { - "target_parameter": (cls.parameters,), - "input_count": ("INT", {"default": 1, "min": 1, "max": 30, "step": 1}) - } - } - for i in range(1, 30 + 1): - inputs["required"][f"sampler_{i}"] = (samplers,) - inputs["required"][f"scheduler_{i}"] = (schedulers,) - - return inputs - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, target_parameter, input_count, **kwargs): - axis, values, = None, None, - if target_parameter == "scheduler": - axis = "advanced: Scheduler" - schedulers = [kwargs.get(f"scheduler_{i}") for i in range(1, input_count + 1)] - values = [scheduler for scheduler in schedulers if scheduler != "None"] - elif target_parameter == "sampler": - axis = "advanced: Sampler" - samplers = [kwargs.get(f"sampler_{i}") for i in range(1, input_count + 1)] - values = [sampler for sampler in samplers if sampler != "None"] - else: - axis = "advanced: Sampler&Scheduler" - samplers = [kwargs.get(f"sampler_{i}") for i in range(1, input_count + 1)] - schedulers = [kwargs.get(f"scheduler_{i}") for i in range(1, input_count + 1)] - values = [] - for sampler, scheduler in zip(samplers, schedulers): - sampler = sampler if sampler else 'None' - scheduler = scheduler if scheduler else 'None' - values.append(sampler +', '+ scheduler) - values = "; ".join(values) - return ({"axis": axis, "values": values},) if values else (None,) - -class XYplot_Denoise: - - @classmethod - def INPUT_TYPES(cls): - return { - "required": { - "batch_count": ("INT", {"default": 3, "min": 0, "max": 50}), - "first_denoise": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.1}), - "last_denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.1}), - } - } - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, batch_count, first_denoise, last_denoise): - axis = "advanced: Denoise" - values = generate_floats(batch_count, first_denoise, last_denoise) - return ({"axis": axis, "values": values},) if values else (None,) - -# PromptSR -class XYplot_PromptSR: - - @classmethod - def INPUT_TYPES(cls): - inputs = { - "required": { - "target_prompt": (["positive", "negative"],), - "search_txt": ("STRING", {"default": "", "multiline": False}), - "replace_all_text": ("BOOLEAN", {"default": False}), - "replace_count": ("INT", {"default": 3, "min": 1, "max": 30 - 1}), - } - } - - # Dynamically add replace_X inputs - for i in range(1, 30): - replace_key = f"replace_{i}" - inputs["required"][replace_key] = ("STRING", {"default": "", "multiline": False, "placeholder": replace_key}) - - return inputs - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, target_prompt, search_txt, replace_all_text, replace_count, **kwargs): - axis = None - - if target_prompt == "positive": - axis = "advanced: Positive Prompt S/R" - elif target_prompt == "negative": - axis = "advanced: Negative Prompt S/R" - - # Create base entry - values = [(search_txt, None, replace_all_text)] - - if replace_count > 0: - # Append additional entries based on replace_count - values.extend([(search_txt, kwargs.get(f"replace_{i+1}"), replace_all_text) for i in range(replace_count)]) - return ({"axis": axis, "values": values},) if values is not None else (None,) - -# XYPlot Pos Condition -class XYplot_Positive_Cond: - - @classmethod - def INPUT_TYPES(cls): - inputs = { - "optional": { - "positive_1": ("CONDITIONING",), - "positive_2": ("CONDITIONING",), - "positive_3": ("CONDITIONING",), - "positive_4": ("CONDITIONING",), - } - } - - return inputs - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, positive_1=None, positive_2=None, positive_3=None, positive_4=None): - axis = "advanced: Pos Condition" - values = [] - cond = [] - # Create base entry - if positive_1 is not None: - values.append("0") - cond.append(positive_1) - if positive_2 is not None: - values.append("1") - cond.append(positive_2) - if positive_3 is not None: - values.append("2") - cond.append(positive_3) - if positive_4 is not None: - values.append("3") - cond.append(positive_4) - - return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) - -# XYPlot Neg Condition -class XYplot_Negative_Cond: - - @classmethod - def INPUT_TYPES(cls): - inputs = { - "optional": { - "negative_1": ("CONDITIONING"), - "negative_2": ("CONDITIONING"), - "negative_3": ("CONDITIONING"), - "negative_4": ("CONDITIONING"), - } - } - - return inputs - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, negative_1=None, negative_2=None, negative_3=None, negative_4=None): - axis = "advanced: Neg Condition" - values = [] - cond = [] - # Create base entry - if negative_1 is not None: - values.append(0) - cond.append(negative_1) - if negative_2 is not None: - values.append(1) - cond.append(negative_2) - if negative_3 is not None: - values.append(2) - cond.append(negative_3) - if negative_4 is not None: - values.append(3) - cond.append(negative_4) - - return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) - -# XYPlot Pos Condition List -class XYplot_Positive_Cond_List: - @classmethod - def INPUT_TYPES(cls): - return { - "required": { - "positive": ("CONDITIONING",), - } - } - - INPUT_IS_LIST = True - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, positive): - axis = "advanced: Pos Condition" - values = [] - cond = [] - for index, c in enumerate(positive): - values.append(str(index)) - cond.append(c) - - return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) - -# XYPlot Neg Condition List -class XYplot_Negative_Cond_List: - @classmethod - def INPUT_TYPES(cls): - return { - "required": { - "negative": ("CONDITIONING",), - } - } - - INPUT_IS_LIST = True - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, negative): - axis = "advanced: Neg Condition" - values = [] - cond = [] - for index, c in enumerate(negative): - values.append(index) - cond.append(c) - - return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) - -# XY Plot: ControlNet -class XYplot_Control_Net: - parameters = ["strength", "start_percent", "end_percent"] - @classmethod - def INPUT_TYPES(cls): - def get_file_list(filenames): - return [file for file in filenames if file != "put_models_here.txt" and "lllite" not in file] - - return { - "required": { - "control_net_name": (get_file_list(folder_paths.get_filename_list("controlnet")),), - "image": ("IMAGE",), - "target_parameter": (cls.parameters,), - "batch_count": ("INT", {"default": 3, "min": 1, "max": 30}), - "first_strength": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 10.0, "step": 0.01}), - "last_strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}), - "first_start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}), - "last_start_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}), - "first_end_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}), - "last_end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}), - "strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}), - "start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}), - "end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}), - }, - } - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, control_net_name, image, target_parameter, batch_count, first_strength, last_strength, first_start_percent, - last_start_percent, first_end_percent, last_end_percent, strength, start_percent, end_percent): - - axis, = None, - - values = [] - - if target_parameter == "strength": - axis = "advanced: ControlNetStrength" - - values.append([(control_net_name, image, first_strength, start_percent, end_percent)]) - strength_increment = (last_strength - first_strength) / (batch_count - 1) if batch_count > 1 else 0 - for i in range(1, batch_count - 1): - values.append([(control_net_name, image, first_strength + i * strength_increment, start_percent, - end_percent)]) - if batch_count > 1: - values.append([(control_net_name, image, last_strength, start_percent, end_percent)]) - - elif target_parameter == "start_percent": - axis = "advanced: ControlNetStart%" - - percent_increment = (last_start_percent - first_start_percent) / (batch_count - 1) if batch_count > 1 else 0 - values.append([(control_net_name, image, strength, first_start_percent, end_percent)]) - for i in range(1, batch_count - 1): - values.append([(control_net_name, image, strength, first_start_percent + i * percent_increment, - end_percent)]) - - # Always add the last start_percent if batch_count is more than 1. - if batch_count > 1: - values.append((control_net_name, image, strength, last_start_percent, end_percent)) - - elif target_parameter == "end_percent": - axis = "advanced: ControlNetEnd%" - - percent_increment = (last_end_percent - first_end_percent) / (batch_count - 1) if batch_count > 1 else 0 - values.append([(control_net_name, image, image, strength, start_percent, first_end_percent)]) - for i in range(1, batch_count - 1): - values.append([(control_net_name, image, strength, start_percent, - first_end_percent + i * percent_increment)]) - - if batch_count > 1: - values.append([(control_net_name, image, strength, start_percent, last_end_percent)]) - - - return ({"axis": axis, "values": values},) - - -#Checkpoints -class XYplot_Checkpoint: - - modes = ["Ckpt Names", "Ckpt Names+ClipSkip", "Ckpt Names+ClipSkip+VAE"] - - @classmethod - def INPUT_TYPES(cls): - - checkpoints = ["None"] + folder_paths.get_filename_list("checkpoints") - vaes = ["Baked VAE"] + folder_paths.get_filename_list("vae") - - inputs = { - "required": { - "input_mode": (cls.modes,), - "ckpt_count": ("INT", {"default": 3, "min": 0, "max": 10, "step": 1}), - } - } - - for i in range(1, 10 + 1): - inputs["required"][f"ckpt_name_{i}"] = (checkpoints,) - inputs["required"][f"clip_skip_{i}"] = ("INT", {"default": -1, "min": -24, "max": -1, "step": 1}) - inputs["required"][f"vae_name_{i}"] = (vaes,) - - inputs["optional"] = { - "optional_lora_stack": ("LORA_STACK",) - } - return inputs - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, input_mode, ckpt_count, **kwargs): - - axis = "advanced: Checkpoint" - - checkpoints = [kwargs.get(f"ckpt_name_{i}") for i in range(1, ckpt_count + 1)] - clip_skips = [kwargs.get(f"clip_skip_{i}") for i in range(1, ckpt_count + 1)] - vaes = [kwargs.get(f"vae_name_{i}") for i in range(1, ckpt_count + 1)] - - # Set None for Clip Skip and/or VAE if not correct modes - for i in range(ckpt_count): - if "ClipSkip" not in input_mode: - clip_skips[i] = 'None' - if "VAE" not in input_mode: - vaes[i] = 'None' - - # Extend each sub-array with lora_stack if it's not None - values = [checkpoint.replace(',', '*')+','+str(clip_skip)+','+vae.replace(',', '*') for checkpoint, clip_skip, vae in zip(checkpoints, clip_skips, vaes) if - checkpoint != "None"] - - optional_lora_stack = kwargs.get("optional_lora_stack") if "optional_lora_stack" in kwargs else [] - - xy_values = {"axis": axis, "values": values, "lora_stack": optional_lora_stack} - return (xy_values,) - -#Loras -class XYplot_Lora: - - modes = ["Lora Names", "Lora Names+Weights"] - - @classmethod - def INPUT_TYPES(cls): - loras = ["None"] + folder_paths.get_filename_list("loras") - - inputs = { - "required": { - "input_mode": (cls.modes,), - "lora_count": ("INT", {"default": 3, "min": 0, "max": 10, "step": 1}), - "model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), - "clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), - } - } - - for i in range(1, 10 + 1): - inputs["required"][f"lora_name_{i}"] = (loras,) - inputs["required"][f"model_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) - inputs["required"][f"clip_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) - - inputs["optional"] = { - "optional_lora_stack": ("LORA_STACK",) - } - return inputs - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, input_mode, lora_count, model_strength, clip_strength, **kwargs): - - axis = "advanced: Lora" - # Extract values from kwargs - loras = [kwargs.get(f"lora_name_{i}") for i in range(1, lora_count + 1)] - model_strs = [kwargs.get(f"model_str_{i}", model_strength) for i in range(1, lora_count + 1)] - clip_strs = [kwargs.get(f"clip_str_{i}", clip_strength) for i in range(1, lora_count + 1)] - - # Use model_strength and clip_strength for the loras where values are not provided - if "Weights" not in input_mode: - for i in range(lora_count): - model_strs[i] = model_strength - clip_strs[i] = clip_strength - - # Extend each sub-array with lora_stack if it's not None - values = [lora.replace(',', '*')+','+str(model_str)+','+str(clip_str) for lora, model_str, clip_str - in zip(loras, model_strs, clip_strs) if lora != "None"] - - optional_lora_stack = kwargs.get("optional_lora_stack") if "optional_lora_stack" in kwargs else [] - - print(values) - xy_values = {"axis": axis, "values": values, "lora_stack": optional_lora_stack} - return (xy_values,) - -# 模型叠加 -class XYplot_ModelMergeBlocks: - - @classmethod - def INPUT_TYPES(s): - checkpoints = folder_paths.get_filename_list("checkpoints") - vae = ["Use Model 1", "Use Model 2"] + folder_paths.get_filename_list("vae") - - preset = ["Preset"] # 20 - preset += load_preset("mmb-preset.txt") - preset += load_preset("mmb-preset.custom.txt") - - default_vectors = "1,0,0; \n0,1,0; \n0,0,1; \n1,1,0; \n1,0,1; \n0,1,1; " - return { - "required": { - "ckpt_name_1": (checkpoints,), - "ckpt_name_2": (checkpoints,), - "vae_use": (vae, {"default": "Use Model 1"}), - "preset": (preset, {"default": "preset"}), - "values": ("STRING", {"default": default_vectors, "multiline": True, "placeholder": 'Support 2 methods:\n\n1.input, middle, out in same line and insert values seperated by "; "\n\n2.model merge block number seperated by ", " in same line and insert values seperated by "; "'}), - }, - "hidden": {"my_unique_id": "UNIQUE_ID"} - } - - RETURN_TYPES = ("X_Y",) - RETURN_NAMES = ("X or Y",) - FUNCTION = "xy_value" - - CATEGORY = "EasyUse/XY Inputs" - - def xy_value(self, ckpt_name_1, ckpt_name_2, vae_use, preset, values, my_unique_id=None): - - axis = "advanced: ModelMergeBlocks" - if ckpt_name_1 is None: - raise Exception("ckpt_name_1 is not found") - if ckpt_name_2 is None: - raise Exception("ckpt_name_2 is not found") - - models = (ckpt_name_1, ckpt_name_2) - - xy_values = {"axis":axis, "values":values, "models":models, "vae_use": vae_use} - return (xy_values,) +#---------------------------------------------------------------节点束 结束---------------------------------------------------------------------- # 显示推理时间 class showSpentTime: @@ -7371,6 +6761,8 @@ def generate(self, positive, negative, model, aspect_ratio, seed, denoise, optio output_image = stableAPI.generate_sd3_image(positive, negative, aspect_ratio, seed=seed, mode=mode, model=model, strength=denoise, image=optional_image) return (output_image,) +#---------------------------------------------------------------API 结束---------------------------------------------------------------------- + NODE_CLASS_MAPPINGS = { # seed 随机种 diff --git a/py/ic_light/func.py b/py/ic_light/__init__.py similarity index 98% rename from py/ic_light/func.py rename to py/ic_light/__init__.py index 53c57c3..5b5c30c 100644 --- a/py/ic_light/func.py +++ b/py/ic_light/__init__.py @@ -1,3 +1,5 @@ +#credit to huchenlei for this module +#from https://github.com/huchenlei/ComfyUI-IC-Light-Native import torch import numpy as np from typing import Tuple, TypedDict, Callable diff --git a/py/layer_diffuse/__init__.py b/py/layer_diffuse/__init__.py index e69de29..b85200b 100644 --- a/py/layer_diffuse/__init__.py +++ b/py/layer_diffuse/__init__.py @@ -0,0 +1,209 @@ +#credit to huchenlei for this module +#from https://github.com/huchenlei/ComfyUI-layerdiffuse +import torch +import comfy.model_management +import copy +from typing import Optional +from enum import Enum +from comfy.utils import load_torch_file +from comfy.conds import CONDRegular +from comfy_extras.nodes_compositing import JoinImageWithAlpha +from .model import ModelPatcher, TransparentVAEDecoder, calculate_weight_adjust_channel +from .attension_sharing import AttentionSharingPatcher +from ..config import LAYER_DIFFUSION, LAYER_DIFFUSION_DIR, LAYER_DIFFUSION_VAE +from ..libs.utils import to_lora_patch_dict, get_local_filepath, get_sd_version + +load_layer_model_state_dict = load_torch_file +class LayerMethod(Enum): + FG_ONLY_ATTN = "Attention Injection" + FG_ONLY_CONV = "Conv Injection" + FG_TO_BLEND = "Foreground" + FG_BLEND_TO_BG = "Foreground to Background" + BG_TO_BLEND = "Background" + BG_BLEND_TO_FG = "Background to Foreground" + EVERYTHING = "Everything" + +class LayerDiffuse: + + def __init__(self) -> None: + self.vae_transparent_decoder = None + self.frames = 1 + + def get_layer_diffusion_method(self, method, has_blend_latent): + method = LayerMethod(method) + if method == LayerMethod.BG_TO_BLEND and has_blend_latent: + method = LayerMethod.BG_BLEND_TO_FG + elif method == LayerMethod.FG_TO_BLEND and has_blend_latent: + method = LayerMethod.FG_BLEND_TO_BG + return method + + def apply_layer_c_concat(self, cond, uncond, c_concat): + def write_c_concat(cond): + new_cond = [] + for t in cond: + n = [t[0], t[1].copy()] + if "model_conds" not in n[1]: + n[1]["model_conds"] = {} + n[1]["model_conds"]["c_concat"] = CONDRegular(c_concat) + new_cond.append(n) + return new_cond + + return (write_c_concat(cond), write_c_concat(uncond)) + + def apply_layer_diffusion(self, model: ModelPatcher, method, weight, samples, blend_samples, positive, negative, image=None, additional_cond=(None, None, None)): + control_img: Optional[torch.TensorType] = None + sd_version = get_sd_version(model) + model_url = LAYER_DIFFUSION[method.value][sd_version]["model_url"] + + if image is not None: + image = image.movedim(-1, 1) + + try: + ModelPatcher.calculate_weight = calculate_weight_adjust_channel(ModelPatcher.calculate_weight) + except: + pass + + if method in [LayerMethod.FG_ONLY_CONV, LayerMethod.FG_ONLY_ATTN] and sd_version == 'sd1': + self.frames = 1 + elif method in [LayerMethod.BG_TO_BLEND, LayerMethod.FG_TO_BLEND, LayerMethod.BG_BLEND_TO_FG, LayerMethod.FG_BLEND_TO_BG] and sd_version == 'sd1': + self.frames = 2 + batch_size, _, height, width = samples['samples'].shape + if batch_size % 2 != 0: + raise Exception(f"The batch size should be a multiple of 2. 批次大小需为2的倍数") + control_img = image + elif method == LayerMethod.EVERYTHING and sd_version == 'sd1': + batch_size, _, height, width = samples['samples'].shape + self.frames = 3 + if batch_size % 3 != 0: + raise Exception(f"The batch size should be a multiple of 3. 批次大小需为3的倍数") + if model_url is None: + raise Exception(f"{method.value} is not supported for {sd_version} model") + + model_path = get_local_filepath(model_url, LAYER_DIFFUSION_DIR) + layer_lora_state_dict = load_layer_model_state_dict(model_path) + work_model = model.clone() + if sd_version == 'sd1': + patcher = AttentionSharingPatcher( + work_model, self.frames, use_control=control_img is not None + ) + patcher.load_state_dict(layer_lora_state_dict, strict=True) + if control_img is not None: + patcher.set_control(control_img) + else: + layer_lora_patch_dict = to_lora_patch_dict(layer_lora_state_dict) + work_model.add_patches(layer_lora_patch_dict, weight) + + # cond_contact + if method in [LayerMethod.FG_ONLY_ATTN, LayerMethod.FG_ONLY_CONV]: + samp_model = work_model + elif sd_version == 'sdxl': + if method in [LayerMethod.BG_TO_BLEND, LayerMethod.FG_TO_BLEND]: + c_concat = model.model.latent_format.process_in(samples["samples"]) + else: + c_concat = model.model.latent_format.process_in(torch.cat([samples["samples"], blend_samples["samples"]], dim=1)) + samp_model, positive, negative = (work_model,) + self.apply_layer_c_concat(positive, negative, c_concat) + elif sd_version == 'sd1': + if method in [LayerMethod.BG_TO_BLEND, LayerMethod.BG_BLEND_TO_FG]: + additional_cond = (additional_cond[0], None) + elif method in [LayerMethod.FG_TO_BLEND, LayerMethod.FG_BLEND_TO_BG]: + additional_cond = (additional_cond[1], None) + + work_model.model_options.setdefault("transformer_options", {}) + work_model.model_options["transformer_options"]["cond_overwrite"] = [ + cond[0][0] if cond is not None else None + for cond in additional_cond + ] + samp_model = work_model + + return samp_model, positive, negative + + def join_image_with_alpha(self, image, alpha): + out = image.movedim(-1, 1) + if out.shape[1] == 3: # RGB + out = torch.cat([out, torch.ones_like(out[:, :1, :, :])], dim=1) + for i in range(out.shape[0]): + out[i, 3, :, :] = alpha + return out.movedim(1, -1) + + def image_to_alpha(self, image, latent): + pixel = image.movedim(-1, 1) # [B, H, W, C] => [B, C, H, W] + decoded = [] + sub_batch_size = 16 + for start_idx in range(0, latent.shape[0], sub_batch_size): + decoded.append( + self.vae_transparent_decoder.decode_pixel( + pixel[start_idx: start_idx + sub_batch_size], + latent[start_idx: start_idx + sub_batch_size], + ) + ) + pixel_with_alpha = torch.cat(decoded, dim=0) + # [B, C, H, W] => [B, H, W, C] + pixel_with_alpha = pixel_with_alpha.movedim(1, -1) + image = pixel_with_alpha[..., 1:] + alpha = pixel_with_alpha[..., 0] + + alpha = 1.0 - alpha + new_images, = JoinImageWithAlpha().join_image_with_alpha(image, alpha) + return new_images, alpha + + def make_3d_mask(self, mask): + if len(mask.shape) == 4: + return mask.squeeze(0) + + elif len(mask.shape) == 2: + return mask.unsqueeze(0) + + return mask + + def masks_to_list(self, masks): + if masks is None: + empty_mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") + return ([empty_mask],) + + res = [] + + for mask in masks: + res.append(mask) + + return [self.make_3d_mask(x) for x in res] + + def layer_diffusion_decode(self, layer_diffusion_method, latent, blend_samples, samp_images, model): + alpha = [] + if layer_diffusion_method is not None: + sd_version = get_sd_version(model) + if sd_version not in ['sdxl', 'sd1']: + raise Exception(f"Only SDXL and SD1.5 model supported for Layer Diffusion") + method = self.get_layer_diffusion_method(layer_diffusion_method, blend_samples is not None) + sd15_allow = True if sd_version == 'sd1' and method in [LayerMethod.FG_ONLY_ATTN, LayerMethod.EVERYTHING, LayerMethod.BG_TO_BLEND, LayerMethod.BG_BLEND_TO_FG] else False + sdxl_allow = True if sd_version == 'sdxl' and method in [LayerMethod.FG_ONLY_CONV, LayerMethod.FG_ONLY_ATTN, LayerMethod.BG_BLEND_TO_FG] else False + if sdxl_allow or sd15_allow: + if self.vae_transparent_decoder is None: + model_url = LAYER_DIFFUSION_VAE['decode'][sd_version]["model_url"] + if model_url is None: + raise Exception(f"{method.value} is not supported for {sd_version} model") + decoder_file = get_local_filepath(model_url, LAYER_DIFFUSION_DIR) + self.vae_transparent_decoder = TransparentVAEDecoder( + load_torch_file(decoder_file), + device=comfy.model_management.get_torch_device(), + dtype=(torch.float16 if comfy.model_management.should_use_fp16() else torch.float32), + ) + if method in [LayerMethod.EVERYTHING, LayerMethod.BG_BLEND_TO_FG, LayerMethod.BG_TO_BLEND]: + new_images = [] + sliced_samples = copy.copy({"samples": latent}) + for index in range(len(samp_images)): + if index % self.frames == 0: + img = samp_images[index::self.frames] + alpha_images, _alpha = self.image_to_alpha(img, sliced_samples["samples"][index::self.frames]) + alpha.append(self.make_3d_mask(_alpha[0])) + new_images.append(alpha_images[0]) + else: + new_images.append(samp_images[index]) + else: + new_images, alpha = self.image_to_alpha(samp_images, latent) + else: + new_images = samp_images + else: + new_images = samp_images + + + return (new_images, samp_images, alpha) \ No newline at end of file diff --git a/py/layer_diffuse/func.py b/py/layer_diffuse/func.py deleted file mode 100644 index e5cbfb5..0000000 --- a/py/layer_diffuse/func.py +++ /dev/null @@ -1,207 +0,0 @@ -import torch -import comfy.model_management -import copy -from typing import Optional -from enum import Enum -from comfy.utils import load_torch_file -from comfy.conds import CONDRegular -from comfy_extras.nodes_compositing import JoinImageWithAlpha -from .model import ModelPatcher, TransparentVAEDecoder, calculate_weight_adjust_channel -from .attension_sharing import AttentionSharingPatcher -from ..config import LAYER_DIFFUSION, LAYER_DIFFUSION_DIR, LAYER_DIFFUSION_VAE -from ..libs.utils import to_lora_patch_dict, get_local_filepath, get_sd_version - -load_layer_model_state_dict = load_torch_file -class LayerMethod(Enum): - FG_ONLY_ATTN = "Attention Injection" - FG_ONLY_CONV = "Conv Injection" - FG_TO_BLEND = "Foreground" - FG_BLEND_TO_BG = "Foreground to Background" - BG_TO_BLEND = "Background" - BG_BLEND_TO_FG = "Background to Foreground" - EVERYTHING = "Everything" - -class LayerDiffuse: - - def __init__(self) -> None: - self.vae_transparent_decoder = None - self.frames = 1 - - def get_layer_diffusion_method(self, method, has_blend_latent): - method = LayerMethod(method) - if method == LayerMethod.BG_TO_BLEND and has_blend_latent: - method = LayerMethod.BG_BLEND_TO_FG - elif method == LayerMethod.FG_TO_BLEND and has_blend_latent: - method = LayerMethod.FG_BLEND_TO_BG - return method - - def apply_layer_c_concat(self, cond, uncond, c_concat): - def write_c_concat(cond): - new_cond = [] - for t in cond: - n = [t[0], t[1].copy()] - if "model_conds" not in n[1]: - n[1]["model_conds"] = {} - n[1]["model_conds"]["c_concat"] = CONDRegular(c_concat) - new_cond.append(n) - return new_cond - - return (write_c_concat(cond), write_c_concat(uncond)) - - def apply_layer_diffusion(self, model: ModelPatcher, method, weight, samples, blend_samples, positive, negative, image=None, additional_cond=(None, None, None)): - control_img: Optional[torch.TensorType] = None - sd_version = get_sd_version(model) - model_url = LAYER_DIFFUSION[method.value][sd_version]["model_url"] - - if image is not None: - image = image.movedim(-1, 1) - - try: - ModelPatcher.calculate_weight = calculate_weight_adjust_channel(ModelPatcher.calculate_weight) - except: - pass - - if method in [LayerMethod.FG_ONLY_CONV, LayerMethod.FG_ONLY_ATTN] and sd_version == 'sd1': - self.frames = 1 - elif method in [LayerMethod.BG_TO_BLEND, LayerMethod.FG_TO_BLEND, LayerMethod.BG_BLEND_TO_FG, LayerMethod.FG_BLEND_TO_BG] and sd_version == 'sd1': - self.frames = 2 - batch_size, _, height, width = samples['samples'].shape - if batch_size % 2 != 0: - raise Exception(f"The batch size should be a multiple of 2. 批次大小需为2的倍数") - control_img = image - elif method == LayerMethod.EVERYTHING and sd_version == 'sd1': - batch_size, _, height, width = samples['samples'].shape - self.frames = 3 - if batch_size % 3 != 0: - raise Exception(f"The batch size should be a multiple of 3. 批次大小需为3的倍数") - if model_url is None: - raise Exception(f"{method.value} is not supported for {sd_version} model") - - model_path = get_local_filepath(model_url, LAYER_DIFFUSION_DIR) - layer_lora_state_dict = load_layer_model_state_dict(model_path) - work_model = model.clone() - if sd_version == 'sd1': - patcher = AttentionSharingPatcher( - work_model, self.frames, use_control=control_img is not None - ) - patcher.load_state_dict(layer_lora_state_dict, strict=True) - if control_img is not None: - patcher.set_control(control_img) - else: - layer_lora_patch_dict = to_lora_patch_dict(layer_lora_state_dict) - work_model.add_patches(layer_lora_patch_dict, weight) - - # cond_contact - if method in [LayerMethod.FG_ONLY_ATTN, LayerMethod.FG_ONLY_CONV]: - samp_model = work_model - elif sd_version == 'sdxl': - if method in [LayerMethod.BG_TO_BLEND, LayerMethod.FG_TO_BLEND]: - c_concat = model.model.latent_format.process_in(samples["samples"]) - else: - c_concat = model.model.latent_format.process_in(torch.cat([samples["samples"], blend_samples["samples"]], dim=1)) - samp_model, positive, negative = (work_model,) + self.apply_layer_c_concat(positive, negative, c_concat) - elif sd_version == 'sd1': - if method in [LayerMethod.BG_TO_BLEND, LayerMethod.BG_BLEND_TO_FG]: - additional_cond = (additional_cond[0], None) - elif method in [LayerMethod.FG_TO_BLEND, LayerMethod.FG_BLEND_TO_BG]: - additional_cond = (additional_cond[1], None) - - work_model.model_options.setdefault("transformer_options", {}) - work_model.model_options["transformer_options"]["cond_overwrite"] = [ - cond[0][0] if cond is not None else None - for cond in additional_cond - ] - samp_model = work_model - - return samp_model, positive, negative - - def join_image_with_alpha(self, image, alpha): - out = image.movedim(-1, 1) - if out.shape[1] == 3: # RGB - out = torch.cat([out, torch.ones_like(out[:, :1, :, :])], dim=1) - for i in range(out.shape[0]): - out[i, 3, :, :] = alpha - return out.movedim(1, -1) - - def image_to_alpha(self, image, latent): - pixel = image.movedim(-1, 1) # [B, H, W, C] => [B, C, H, W] - decoded = [] - sub_batch_size = 16 - for start_idx in range(0, latent.shape[0], sub_batch_size): - decoded.append( - self.vae_transparent_decoder.decode_pixel( - pixel[start_idx: start_idx + sub_batch_size], - latent[start_idx: start_idx + sub_batch_size], - ) - ) - pixel_with_alpha = torch.cat(decoded, dim=0) - # [B, C, H, W] => [B, H, W, C] - pixel_with_alpha = pixel_with_alpha.movedim(1, -1) - image = pixel_with_alpha[..., 1:] - alpha = pixel_with_alpha[..., 0] - - alpha = 1.0 - alpha - new_images, = JoinImageWithAlpha().join_image_with_alpha(image, alpha) - return new_images, alpha - - def make_3d_mask(self, mask): - if len(mask.shape) == 4: - return mask.squeeze(0) - - elif len(mask.shape) == 2: - return mask.unsqueeze(0) - - return mask - - def masks_to_list(self, masks): - if masks is None: - empty_mask = torch.zeros((64, 64), dtype=torch.float32, device="cpu") - return ([empty_mask],) - - res = [] - - for mask in masks: - res.append(mask) - - return [self.make_3d_mask(x) for x in res] - - def layer_diffusion_decode(self, layer_diffusion_method, latent, blend_samples, samp_images, model): - alpha = [] - if layer_diffusion_method is not None: - sd_version = get_sd_version(model) - if sd_version not in ['sdxl', 'sd1']: - raise Exception(f"Only SDXL and SD1.5 model supported for Layer Diffusion") - method = self.get_layer_diffusion_method(layer_diffusion_method, blend_samples is not None) - sd15_allow = True if sd_version == 'sd1' and method in [LayerMethod.FG_ONLY_ATTN, LayerMethod.EVERYTHING, LayerMethod.BG_TO_BLEND, LayerMethod.BG_BLEND_TO_FG] else False - sdxl_allow = True if sd_version == 'sdxl' and method in [LayerMethod.FG_ONLY_CONV, LayerMethod.FG_ONLY_ATTN, LayerMethod.BG_BLEND_TO_FG] else False - if sdxl_allow or sd15_allow: - if self.vae_transparent_decoder is None: - model_url = LAYER_DIFFUSION_VAE['decode'][sd_version]["model_url"] - if model_url is None: - raise Exception(f"{method.value} is not supported for {sd_version} model") - decoder_file = get_local_filepath(model_url, LAYER_DIFFUSION_DIR) - self.vae_transparent_decoder = TransparentVAEDecoder( - load_torch_file(decoder_file), - device=comfy.model_management.get_torch_device(), - dtype=(torch.float16 if comfy.model_management.should_use_fp16() else torch.float32), - ) - if method in [LayerMethod.EVERYTHING, LayerMethod.BG_BLEND_TO_FG, LayerMethod.BG_TO_BLEND]: - new_images = [] - sliced_samples = copy.copy({"samples": latent}) - for index in range(len(samp_images)): - if index % self.frames == 0: - img = samp_images[index::self.frames] - alpha_images, _alpha = self.image_to_alpha(img, sliced_samples["samples"][index::self.frames]) - alpha.append(self.make_3d_mask(_alpha[0])) - new_images.append(alpha_images[0]) - else: - new_images.append(samp_images[index]) - else: - new_images, alpha = self.image_to_alpha(samp_images, latent) - else: - new_images = samp_images - else: - new_images = samp_images - - - return (new_images, samp_images, alpha) \ No newline at end of file diff --git a/py/libs/translate.py b/py/libs/translate.py index 4f867d5..8e769b2 100644 --- a/py/libs/translate.py +++ b/py/libs/translate.py @@ -1,3 +1,5 @@ +#credit to shadowcz007 for this module +#from https://github.com/shadowcz007/comfyui-mixlab-nodes/blob/main/nodes/TextGenerateNode.py import re import os import folder_paths @@ -125,8 +127,6 @@ def weak_emphasis(self, *args): def embedding(self, *args): print('prompt embedding', args[0]) if len(args) == 1: - # print('prompt embedding',str(args[0])) - # 只传递了一个参数,意味着只有embedding名称没有数字 embedding_name = str(args[0]) return f"embedding:{embedding_name}" elif len(args) > 1: diff --git a/py/libs/xyplot.py b/py/libs/xyplot.py index e1d4f7a..22e0d06 100644 --- a/py/libs/xyplot.py +++ b/py/libs/xyplot.py @@ -5,7 +5,7 @@ from .adv_encode import advanced_encode from .controlnet import easyControlnet from .log import log_node_warn -from ..layer_diffuse.func import LayerDiffuse +from ..layer_diffuse import LayerDiffuse from ..config import RESOURCES_DIR class easyXYPlot(): diff --git a/py/xyplot.py b/py/xyplot.py new file mode 100644 index 0000000..40fba99 --- /dev/null +++ b/py/xyplot.py @@ -0,0 +1,603 @@ +import os +import comfy +import folder_paths +from .config import RESOURCES_DIR +def load_preset(filename): + path = os.path.join(RESOURCES_DIR, filename) + path = os.path.abspath(path) + preset_list = [] + + if os.path.exists(path): + with open(path, 'r') as file: + for line in file: + preset_list.append(line.strip()) + + return preset_list + else: + return [] +def generate_floats(batch_count, first_float, last_float): + if batch_count > 1: + interval = (last_float - first_float) / (batch_count - 1) + values = [str(round(first_float + i * interval, 3)) for i in range(batch_count)] + else: + values = [str(first_float)] if batch_count == 1 else [] + return "; ".join(values) + +def generate_ints(batch_count, first_int, last_int): + if batch_count > 1: + interval = (last_int - first_int) / (batch_count - 1) + values = [str(int(first_int + i * interval)) for i in range(batch_count)] + else: + values = [str(first_int)] if batch_count == 1 else [] + # values = list(set(values)) # Remove duplicates + # values.sort() # Sort in ascending order + return "; ".join(values) + +# Seed++ Batch +class XYplot_SeedsBatch: + + @classmethod + def INPUT_TYPES(cls): + return {"required": { + "batch_count": ("INT", {"default": 3, "min": 1, "max": 50}), }, + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, batch_count): + + axis = "advanced: Seeds++ Batch" + xy_values = {"axis": axis, "values": batch_count} + return (xy_values,) + +# Step Values +class XYplot_Steps: + parameters = ["steps", "start_at_step", "end_at_step",] + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "target_parameter": (cls.parameters,), + "batch_count": ("INT", {"default": 3, "min": 0, "max": 50}), + "first_step": ("INT", {"default": 10, "min": 1, "max": 10000}), + "last_step": ("INT", {"default": 20, "min": 1, "max": 10000}), + "first_start_step": ("INT", {"default": 0, "min": 0, "max": 10000}), + "last_start_step": ("INT", {"default": 10, "min": 0, "max": 10000}), + "first_end_step": ("INT", {"default": 10, "min": 0, "max": 10000}), + "last_end_step": ("INT", {"default": 20, "min": 0, "max": 10000}), + } + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, target_parameter, batch_count, first_step, last_step, first_start_step, last_start_step, + first_end_step, last_end_step,): + + axis, xy_first, xy_last = None, None, None + + if target_parameter == "steps": + axis = "advanced: Steps" + xy_first = first_step + xy_last = last_step + elif target_parameter == "start_at_step": + axis = "advanced: StartStep" + xy_first = first_start_step + xy_last = last_start_step + elif target_parameter == "end_at_step": + axis = "advanced: EndStep" + xy_first = first_end_step + xy_last = last_end_step + + values = generate_ints(batch_count, xy_first, xy_last) + return ({"axis": axis, "values": values},) if values is not None else (None,) + +class XYplot_CFG: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "batch_count": ("INT", {"default": 3, "min": 0, "max": 50}), + "first_cfg": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0}), + "last_cfg": ("FLOAT", {"default": 9.0, "min": 0.0, "max": 100.0}), + } + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, batch_count, first_cfg, last_cfg): + axis = "advanced: CFG Scale" + values = generate_floats(batch_count, first_cfg, last_cfg) + return ({"axis": axis, "values": values},) if values else (None,) + +# Step Values +class XYplot_Sampler_Scheduler: + parameters = ["sampler", "scheduler", "sampler & scheduler"] + + @classmethod + def INPUT_TYPES(cls): + samplers = ["None"] + comfy.samplers.KSampler.SAMPLERS + schedulers = ["None"] + comfy.samplers.KSampler.SCHEDULERS + inputs = { + "required": { + "target_parameter": (cls.parameters,), + "input_count": ("INT", {"default": 1, "min": 1, "max": 30, "step": 1}) + } + } + for i in range(1, 30 + 1): + inputs["required"][f"sampler_{i}"] = (samplers,) + inputs["required"][f"scheduler_{i}"] = (schedulers,) + + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, target_parameter, input_count, **kwargs): + axis, values, = None, None, + if target_parameter == "scheduler": + axis = "advanced: Scheduler" + schedulers = [kwargs.get(f"scheduler_{i}") for i in range(1, input_count + 1)] + values = [scheduler for scheduler in schedulers if scheduler != "None"] + elif target_parameter == "sampler": + axis = "advanced: Sampler" + samplers = [kwargs.get(f"sampler_{i}") for i in range(1, input_count + 1)] + values = [sampler for sampler in samplers if sampler != "None"] + else: + axis = "advanced: Sampler&Scheduler" + samplers = [kwargs.get(f"sampler_{i}") for i in range(1, input_count + 1)] + schedulers = [kwargs.get(f"scheduler_{i}") for i in range(1, input_count + 1)] + values = [] + for sampler, scheduler in zip(samplers, schedulers): + sampler = sampler if sampler else 'None' + scheduler = scheduler if scheduler else 'None' + values.append(sampler +', '+ scheduler) + values = "; ".join(values) + return ({"axis": axis, "values": values},) if values else (None,) + +class XYplot_Denoise: + + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "batch_count": ("INT", {"default": 3, "min": 0, "max": 50}), + "first_denoise": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 1.0, "step": 0.1}), + "last_denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.1}), + } + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, batch_count, first_denoise, last_denoise): + axis = "advanced: Denoise" + values = generate_floats(batch_count, first_denoise, last_denoise) + return ({"axis": axis, "values": values},) if values else (None,) + +# PromptSR +class XYplot_PromptSR: + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "target_prompt": (["positive", "negative"],), + "search_txt": ("STRING", {"default": "", "multiline": False}), + "replace_all_text": ("BOOLEAN", {"default": False}), + "replace_count": ("INT", {"default": 3, "min": 1, "max": 30 - 1}), + } + } + + # Dynamically add replace_X inputs + for i in range(1, 30): + replace_key = f"replace_{i}" + inputs["required"][replace_key] = ("STRING", {"default": "", "multiline": False, "placeholder": replace_key}) + + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, target_prompt, search_txt, replace_all_text, replace_count, **kwargs): + axis = None + + if target_prompt == "positive": + axis = "advanced: Positive Prompt S/R" + elif target_prompt == "negative": + axis = "advanced: Negative Prompt S/R" + + # Create base entry + values = [(search_txt, None, replace_all_text)] + + if replace_count > 0: + # Append additional entries based on replace_count + values.extend([(search_txt, kwargs.get(f"replace_{i+1}"), replace_all_text) for i in range(replace_count)]) + return ({"axis": axis, "values": values},) if values is not None else (None,) + +# XYPlot Pos Condition +class XYplot_Positive_Cond: + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "optional": { + "positive_1": ("CONDITIONING",), + "positive_2": ("CONDITIONING",), + "positive_3": ("CONDITIONING",), + "positive_4": ("CONDITIONING",), + } + } + + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, positive_1=None, positive_2=None, positive_3=None, positive_4=None): + axis = "advanced: Pos Condition" + values = [] + cond = [] + # Create base entry + if positive_1 is not None: + values.append("0") + cond.append(positive_1) + if positive_2 is not None: + values.append("1") + cond.append(positive_2) + if positive_3 is not None: + values.append("2") + cond.append(positive_3) + if positive_4 is not None: + values.append("3") + cond.append(positive_4) + + return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) + +# XYPlot Neg Condition +class XYplot_Negative_Cond: + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "optional": { + "negative_1": ("CONDITIONING"), + "negative_2": ("CONDITIONING"), + "negative_3": ("CONDITIONING"), + "negative_4": ("CONDITIONING"), + } + } + + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, negative_1=None, negative_2=None, negative_3=None, negative_4=None): + axis = "advanced: Neg Condition" + values = [] + cond = [] + # Create base entry + if negative_1 is not None: + values.append(0) + cond.append(negative_1) + if negative_2 is not None: + values.append(1) + cond.append(negative_2) + if negative_3 is not None: + values.append(2) + cond.append(negative_3) + if negative_4 is not None: + values.append(3) + cond.append(negative_4) + + return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) + +# XYPlot Pos Condition List +class XYplot_Positive_Cond_List: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "positive": ("CONDITIONING",), + } + } + + INPUT_IS_LIST = True + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, positive): + axis = "advanced: Pos Condition" + values = [] + cond = [] + for index, c in enumerate(positive): + values.append(str(index)) + cond.append(c) + + return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) + +# XYPlot Neg Condition List +class XYplot_Negative_Cond_List: + @classmethod + def INPUT_TYPES(cls): + return { + "required": { + "negative": ("CONDITIONING",), + } + } + + INPUT_IS_LIST = True + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, negative): + axis = "advanced: Neg Condition" + values = [] + cond = [] + for index, c in enumerate(negative): + values.append(index) + cond.append(c) + + return ({"axis": axis, "values": values, "cond": cond},) if values is not None else (None,) + +# XY Plot: ControlNet +class XYplot_Control_Net: + parameters = ["strength", "start_percent", "end_percent"] + @classmethod + def INPUT_TYPES(cls): + def get_file_list(filenames): + return [file for file in filenames if file != "put_models_here.txt" and "lllite" not in file] + + return { + "required": { + "control_net_name": (get_file_list(folder_paths.get_filename_list("controlnet")),), + "image": ("IMAGE",), + "target_parameter": (cls.parameters,), + "batch_count": ("INT", {"default": 3, "min": 1, "max": 30}), + "first_strength": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 10.0, "step": 0.01}), + "last_strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}), + "first_start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "last_start_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "first_end_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "last_end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}), + "start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}), + "end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}), + }, + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, control_net_name, image, target_parameter, batch_count, first_strength, last_strength, first_start_percent, + last_start_percent, first_end_percent, last_end_percent, strength, start_percent, end_percent): + + axis, = None, + + values = [] + + if target_parameter == "strength": + axis = "advanced: ControlNetStrength" + + values.append([(control_net_name, image, first_strength, start_percent, end_percent)]) + strength_increment = (last_strength - first_strength) / (batch_count - 1) if batch_count > 1 else 0 + for i in range(1, batch_count - 1): + values.append([(control_net_name, image, first_strength + i * strength_increment, start_percent, + end_percent)]) + if batch_count > 1: + values.append([(control_net_name, image, last_strength, start_percent, end_percent)]) + + elif target_parameter == "start_percent": + axis = "advanced: ControlNetStart%" + + percent_increment = (last_start_percent - first_start_percent) / (batch_count - 1) if batch_count > 1 else 0 + values.append([(control_net_name, image, strength, first_start_percent, end_percent)]) + for i in range(1, batch_count - 1): + values.append([(control_net_name, image, strength, first_start_percent + i * percent_increment, + end_percent)]) + + # Always add the last start_percent if batch_count is more than 1. + if batch_count > 1: + values.append((control_net_name, image, strength, last_start_percent, end_percent)) + + elif target_parameter == "end_percent": + axis = "advanced: ControlNetEnd%" + + percent_increment = (last_end_percent - first_end_percent) / (batch_count - 1) if batch_count > 1 else 0 + values.append([(control_net_name, image, image, strength, start_percent, first_end_percent)]) + for i in range(1, batch_count - 1): + values.append([(control_net_name, image, strength, start_percent, + first_end_percent + i * percent_increment)]) + + if batch_count > 1: + values.append([(control_net_name, image, strength, start_percent, last_end_percent)]) + + + return ({"axis": axis, "values": values},) + + +#Checkpoints +class XYplot_Checkpoint: + + modes = ["Ckpt Names", "Ckpt Names+ClipSkip", "Ckpt Names+ClipSkip+VAE"] + + @classmethod + def INPUT_TYPES(cls): + + checkpoints = ["None"] + folder_paths.get_filename_list("checkpoints") + vaes = ["Baked VAE"] + folder_paths.get_filename_list("vae") + + inputs = { + "required": { + "input_mode": (cls.modes,), + "ckpt_count": ("INT", {"default": 3, "min": 0, "max": 10, "step": 1}), + } + } + + for i in range(1, 10 + 1): + inputs["required"][f"ckpt_name_{i}"] = (checkpoints,) + inputs["required"][f"clip_skip_{i}"] = ("INT", {"default": -1, "min": -24, "max": -1, "step": 1}) + inputs["required"][f"vae_name_{i}"] = (vaes,) + + inputs["optional"] = { + "optional_lora_stack": ("LORA_STACK",) + } + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, input_mode, ckpt_count, **kwargs): + + axis = "advanced: Checkpoint" + + checkpoints = [kwargs.get(f"ckpt_name_{i}") for i in range(1, ckpt_count + 1)] + clip_skips = [kwargs.get(f"clip_skip_{i}") for i in range(1, ckpt_count + 1)] + vaes = [kwargs.get(f"vae_name_{i}") for i in range(1, ckpt_count + 1)] + + # Set None for Clip Skip and/or VAE if not correct modes + for i in range(ckpt_count): + if "ClipSkip" not in input_mode: + clip_skips[i] = 'None' + if "VAE" not in input_mode: + vaes[i] = 'None' + + # Extend each sub-array with lora_stack if it's not None + values = [checkpoint.replace(',', '*')+','+str(clip_skip)+','+vae.replace(',', '*') for checkpoint, clip_skip, vae in zip(checkpoints, clip_skips, vaes) if + checkpoint != "None"] + + optional_lora_stack = kwargs.get("optional_lora_stack") if "optional_lora_stack" in kwargs else [] + + xy_values = {"axis": axis, "values": values, "lora_stack": optional_lora_stack} + return (xy_values,) + +#Loras +class XYplot_Lora: + + modes = ["Lora Names", "Lora Names+Weights"] + + @classmethod + def INPUT_TYPES(cls): + loras = ["None"] + folder_paths.get_filename_list("loras") + + inputs = { + "required": { + "input_mode": (cls.modes,), + "lora_count": ("INT", {"default": 3, "min": 0, "max": 10, "step": 1}), + "model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + "clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), + } + } + + for i in range(1, 10 + 1): + inputs["required"][f"lora_name_{i}"] = (loras,) + inputs["required"][f"model_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) + inputs["required"][f"clip_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) + + inputs["optional"] = { + "optional_lora_stack": ("LORA_STACK",) + } + return inputs + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, input_mode, lora_count, model_strength, clip_strength, **kwargs): + + axis = "advanced: Lora" + # Extract values from kwargs + loras = [kwargs.get(f"lora_name_{i}") for i in range(1, lora_count + 1)] + model_strs = [kwargs.get(f"model_str_{i}", model_strength) for i in range(1, lora_count + 1)] + clip_strs = [kwargs.get(f"clip_str_{i}", clip_strength) for i in range(1, lora_count + 1)] + + # Use model_strength and clip_strength for the loras where values are not provided + if "Weights" not in input_mode: + for i in range(lora_count): + model_strs[i] = model_strength + clip_strs[i] = clip_strength + + # Extend each sub-array with lora_stack if it's not None + values = [lora.replace(',', '*')+','+str(model_str)+','+str(clip_str) for lora, model_str, clip_str + in zip(loras, model_strs, clip_strs) if lora != "None"] + + optional_lora_stack = kwargs.get("optional_lora_stack") if "optional_lora_stack" in kwargs else [] + + print(values) + xy_values = {"axis": axis, "values": values, "lora_stack": optional_lora_stack} + return (xy_values,) + +# 模型叠加 +class XYplot_ModelMergeBlocks: + + @classmethod + def INPUT_TYPES(s): + checkpoints = folder_paths.get_filename_list("checkpoints") + vae = ["Use Model 1", "Use Model 2"] + folder_paths.get_filename_list("vae") + + preset = ["Preset"] # 20 + preset += load_preset("mmb-preset.txt") + preset += load_preset("mmb-preset.custom.txt") + + default_vectors = "1,0,0; \n0,1,0; \n0,0,1; \n1,1,0; \n1,0,1; \n0,1,1; " + return { + "required": { + "ckpt_name_1": (checkpoints,), + "ckpt_name_2": (checkpoints,), + "vae_use": (vae, {"default": "Use Model 1"}), + "preset": (preset, {"default": "preset"}), + "values": ("STRING", {"default": default_vectors, "multiline": True, "placeholder": 'Support 2 methods:\n\n1.input, middle, out in same line and insert values seperated by "; "\n\n2.model merge block number seperated by ", " in same line and insert values seperated by "; "'}), + }, + "hidden": {"my_unique_id": "UNIQUE_ID"} + } + + RETURN_TYPES = ("X_Y",) + RETURN_NAMES = ("X or Y",) + FUNCTION = "xy_value" + + CATEGORY = "EasyUse/XY Inputs" + + def xy_value(self, ckpt_name_1, ckpt_name_2, vae_use, preset, values, my_unique_id=None): + + axis = "advanced: ModelMergeBlocks" + if ckpt_name_1 is None: + raise Exception("ckpt_name_1 is not found") + if ckpt_name_2 is None: + raise Exception("ckpt_name_2 is not found") + + models = (ckpt_name_1, ckpt_name_2) + + xy_values = {"axis":axis, "values":values, "models":models, "vae_use": vae_use} + return (xy_values,) \ No newline at end of file From 7a842bd757be26b66b8785f032f9567082aea8c3 Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 5 Jun 2024 14:20:35 +0800 Subject: [PATCH 03/49] fix:clear the original data when selecting different styles #194 --- web/js/easy/easySelector.js | 1 + 1 file changed, 1 insertion(+) diff --git a/web/js/easy/easySelector.js b/web/js/easy/easySelector.js index 1d1a3a8..22ee309 100644 --- a/web/js/easy/easySelector.js +++ b/web/js/easy/easySelector.js @@ -180,6 +180,7 @@ app.registerExtension({ selector.element.children[1].innerHTML='' if(styles_list_cache[styles_values]){ let tags = styles_list_cache[styles_values] + this.properties["values"] = [] // 重新排序 if(selector.value) tags = tags.sort((a,b)=> selector.value.includes(b.name) - selector.value.includes(a.name)) let list = getTagList(tags, value, language); From 6eed75df2aa15afd1f091598a26d8f091f2ea390 Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 5 Jun 2024 22:05:21 +0800 Subject: [PATCH 04/49] integration of brushnet code --- __init__.py | 2 +- py/brushnet/__init__.py | 806 ++++++ py/brushnet/config/brushnet.json | 58 + py/brushnet/config/brushnet_xl.json | 63 + py/brushnet/config/powerpaint.json | 57 + py/brushnet/model.py | 1688 ++++++++++++ py/brushnet/model_patch.py | 121 + py/brushnet/powerpaint_utils.py | 467 ++++ py/brushnet/unet_2d_blocks.py | 3908 +++++++++++++++++++++++++++ py/brushnet/unet_2d_condition.py | 1359 ++++++++++ py/dynamiCrafter/__init__.py | 2 + py/easyNodes.py | 83 +- 12 files changed, 8550 insertions(+), 64 deletions(-) create mode 100644 py/brushnet/__init__.py create mode 100644 py/brushnet/config/brushnet.json create mode 100644 py/brushnet/config/brushnet_xl.json create mode 100644 py/brushnet/config/powerpaint.json create mode 100644 py/brushnet/model.py create mode 100644 py/brushnet/model_patch.py create mode 100644 py/brushnet/powerpaint_utils.py create mode 100644 py/brushnet/unet_2d_blocks.py create mode 100644 py/brushnet/unet_2d_condition.py diff --git a/__init__.py b/__init__.py index 2326f14..fac9e82 100644 --- a/__init__.py +++ b/__init__.py @@ -1,4 +1,4 @@ -__version__ = "1.1.8" +__version__ = "1.1.9" import os import folder_paths diff --git a/py/brushnet/__init__.py b/py/brushnet/__init__.py new file mode 100644 index 0000000..5955fe9 --- /dev/null +++ b/py/brushnet/__init__.py @@ -0,0 +1,806 @@ +#credit to nullquant for this module +#from https://github.com/nullquant/ComfyUI-BrushNet + +import os +import types + +import torch +from accelerate import init_empty_weights, load_checkpoint_and_dispatch + +import comfy + +from .model import BrushNetModel, PowerPaintModel +from .model_patch import add_model_patch_option, patch_model_function_wrapper +from .powerpaint_utils import TokenizerWrapper, add_tokens + +cwd_path = os.path.dirname(os.path.realpath(__file__)) +brushnet_config_file = os.path.join(cwd_path, 'config', 'brushnet.json') +brushnet_xl_config_file = os.path.join(cwd_path, 'config', 'brushnet_xl.json') +powerpaint_config_file = os.path.join(cwd_path, 'config', 'powerpaint.json') + +sd15_scaling_factor = 0.18215 +sdxl_scaling_factor = 0.13025 + +ModelsToUnload = [comfy.sd1_clip.SD1ClipModel, comfy.ldm.models.autoencoder.AutoencoderKL] + +class BrushNet: + + # Check models compatibility + def check_compatibilty(self, model, brushnet): + is_SDXL = False + is_PP = False + if isinstance(model.model.model_config, comfy.supported_models.SD15): + print('Base model type: SD1.5') + is_SDXL = False + if brushnet["SDXL"]: + raise Exception("Base model is SD15, but BrushNet is SDXL type") + if brushnet["PP"]: + is_PP = True + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + print('Base model type: SDXL') + is_SDXL = True + if not brushnet["SDXL"]: + raise Exception("Base model is SDXL, but BrushNet is SD15 type") + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: " + str(type(model.model.model_config))) + + return (is_SDXL, is_PP) + + def check_image_mask(self, image, mask, name): + if len(image.shape) < 4: + # image tensor shape should be [B, H, W, C], but batch somehow is missing + image = image[None, :, :, :] + + if len(mask.shape) > 3: + # mask tensor shape should be [B, H, W] but we get [B, H, W, C], image may be? + # take first mask, red channel + mask = (mask[:, :, :, 0])[:, :, :] + elif len(mask.shape) < 3: + # mask tensor shape should be [B, H, W] but batch somehow is missing + mask = mask[None, :, :] + + if image.shape[0] > mask.shape[0]: + print(name, "gets batch of images (%d) but only %d masks" % (image.shape[0], mask.shape[0])) + if mask.shape[0] == 1: + print(name, "will copy the mask to fill batch") + mask = torch.cat([mask] * image.shape[0], dim=0) + else: + print(name, "will add empty masks to fill batch") + empty_mask = torch.zeros([image.shape[0] - mask.shape[0], mask.shape[1], mask.shape[2]]) + mask = torch.cat([mask, empty_mask], dim=0) + elif image.shape[0] < mask.shape[0]: + print(name, "gets batch of images (%d) but too many (%d) masks" % (image.shape[0], mask.shape[0])) + mask = mask[:image.shape[0], :, :] + + return (image, mask) + + # Prepare image and mask + def prepare_image(self, image, mask): + + image, mask = self.check_image_mask(image, mask, 'BrushNet') + + print("BrushNet image.shape =", image.shape, "mask.shape =", mask.shape) + + if mask.shape[2] != image.shape[2] or mask.shape[1] != image.shape[1]: + raise Exception("Image and mask should be the same size") + + # As a suggestion of inferno46n2 (https://github.com/nullquant/ComfyUI-BrushNet/issues/64) + mask = mask.round() + + masked_image = image * (1.0 - mask[:, :, :, None]) + + return (masked_image, mask) + + # Get origin of the mask + def cut_with_mask(self, mask, width, height): + iy, ix = (mask == 1).nonzero(as_tuple=True) + + h0, w0 = mask.shape + + if iy.numel() == 0: + x_c = w0 / 2.0 + y_c = h0 / 2.0 + else: + x_min = ix.min().item() + x_max = ix.max().item() + y_min = iy.min().item() + y_max = iy.max().item() + + if x_max - x_min > width or y_max - y_min > height: + raise Exception("Mask is bigger than provided dimensions") + + x_c = (x_min + x_max) / 2.0 + y_c = (y_min + y_max) / 2.0 + + width2 = width / 2.0 + height2 = height / 2.0 + + if w0 <= width: + x0 = 0 + w = w0 + else: + x0 = max(0, x_c - width2) + w = width + if x0 + width > w0: + x0 = w0 - width + + if h0 <= height: + y0 = 0 + h = h0 + else: + y0 = max(0, y_c - height2) + h = height + if y0 + height > h0: + y0 = h0 - height + + return (int(x0), int(y0), int(w), int(h)) + + # Prepare conditioning_latents + @torch.inference_mode() + def get_image_latents(self, masked_image, mask, vae, scaling_factor): + processed_image = masked_image.to(vae.device) + image_latents = vae.encode(processed_image[:, :, :, :3]) * scaling_factor + processed_mask = 1. - mask[:, None, :, :] + interpolated_mask = torch.nn.functional.interpolate( + processed_mask, + size=( + image_latents.shape[-2], + image_latents.shape[-1] + ) + ) + interpolated_mask = interpolated_mask.to(image_latents.device) + + conditioning_latents = [image_latents, interpolated_mask] + + print('BrushNet CL: image_latents shape =', image_latents.shape, 'interpolated_mask shape =', + interpolated_mask.shape) + + return conditioning_latents + + def brushnet_blocks(self, sd): + brushnet_down_block = 0 + brushnet_mid_block = 0 + brushnet_up_block = 0 + for key in sd: + if 'brushnet_down_block' in key: + brushnet_down_block += 1 + if 'brushnet_mid_block' in key: + brushnet_mid_block += 1 + if 'brushnet_up_block' in key: + brushnet_up_block += 1 + return (brushnet_down_block, brushnet_mid_block, brushnet_up_block, len(sd)) + + def get_model_type(self, brushnet_file): + sd = comfy.utils.load_torch_file(brushnet_file) + brushnet_down_block, brushnet_mid_block, brushnet_up_block, keys = self.brushnet_blocks(sd) + del sd + if brushnet_down_block == 24 and brushnet_mid_block == 2 and brushnet_up_block == 30: + is_SDXL = False + if keys == 322: + is_PP = False + print('BrushNet model type: SD1.5') + else: + is_PP = True + print('PowerPaint model type: SD1.5') + elif brushnet_down_block == 18 and brushnet_mid_block == 2 and brushnet_up_block == 22: + print('BrushNet model type: Loading SDXL') + is_SDXL = True + is_PP = False + else: + raise Exception("Unknown BrushNet model") + return is_SDXL, is_PP + + def load_brushnet_model(self, brushnet_file, dtype='float16'): + is_SDXL, is_PP = self.get_model_type(brushnet_file) + with init_empty_weights(): + if is_SDXL: + brushnet_config = BrushNetModel.load_config(brushnet_xl_config_file) + brushnet_model = BrushNetModel.from_config(brushnet_config) + elif is_PP: + brushnet_config = PowerPaintModel.load_config(powerpaint_config_file) + brushnet_model = PowerPaintModel.from_config(brushnet_config) + else: + brushnet_config = BrushNetModel.load_config(brushnet_config_file) + brushnet_model = BrushNetModel.from_config(brushnet_config) + if is_PP: + print("PowerPaint model file:", brushnet_file) + else: + print("BrushNet model file:", brushnet_file) + + if dtype == 'float16': + torch_dtype = torch.float16 + elif dtype == 'bfloat16': + torch_dtype = torch.bfloat16 + elif dtype == 'float32': + torch_dtype = torch.float32 + else: + torch_dtype = torch.float64 + + brushnet_model = load_checkpoint_and_dispatch( + brushnet_model, + brushnet_file, + device_map="sequential", + max_memory=None, + offload_folder=None, + offload_state_dict=False, + dtype=torch_dtype, + force_hooks=False, + ) + + if is_PP: + print("PowerPaint model is loaded") + elif is_SDXL: + print("BrushNet SDXL model is loaded") + else: + print("BrushNet SD1.5 model is loaded") + + return ({"brushnet": brushnet_model, "SDXL": is_SDXL, "PP": is_PP, "dtype": torch_dtype},) + + def brushnet_model_update(self, model, vae, image, mask, brushnet, positive, negative, scale, start_at, end_at): + + is_SDXL, is_PP = self.check_compatibilty(model, brushnet) + + if is_PP: + raise Exception("PowerPaint model was loaded, please use PowerPaint node") + + # Make a copy of the model so that we're not patching it everywhere in the workflow. + model = model.clone() + + # prepare image and mask + # no batches for original image and mask + masked_image, mask = self.prepare_image(image, mask) + + batch = masked_image.shape[0] + width = masked_image.shape[2] + height = masked_image.shape[1] + + if hasattr(model.model.model_config, 'latent_format') and hasattr(model.model.model_config.latent_format, + 'scale_factor'): + scaling_factor = model.model.model_config.latent_format.scale_factor + elif is_SDXL: + scaling_factor = sdxl_scaling_factor + else: + scaling_factor = sd15_scaling_factor + + torch_dtype = brushnet['dtype'] + + # prepare conditioning latents + conditioning_latents = self.get_image_latents(masked_image, mask, vae, scaling_factor) + conditioning_latents[0] = conditioning_latents[0].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + conditioning_latents[1] = conditioning_latents[1].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + + # unload vae + del vae + for loaded_model in comfy.model_management.current_loaded_models: + if type(loaded_model.model.model) in ModelsToUnload: + comfy.model_management.current_loaded_models.remove(loaded_model) + loaded_model.model_unload() + del loaded_model + + # prepare embeddings + prompt_embeds = positive[0][0].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + negative_prompt_embeds = negative[0][0].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + + max_tokens = max(prompt_embeds.shape[1], negative_prompt_embeds.shape[1]) + if prompt_embeds.shape[1] < max_tokens: + multiplier = max_tokens // 77 - prompt_embeds.shape[1] // 77 + prompt_embeds = torch.concat([prompt_embeds] + [prompt_embeds[:, -77:, :]] * multiplier, dim=1) + print('BrushNet: negative prompt more than 75 tokens:', negative_prompt_embeds.shape, + 'multiplying prompt_embeds') + if negative_prompt_embeds.shape[1] < max_tokens: + multiplier = max_tokens // 77 - negative_prompt_embeds.shape[1] // 77 + negative_prompt_embeds = torch.concat( + [negative_prompt_embeds] + [negative_prompt_embeds[:, -77:, :]] * multiplier, dim=1) + print('BrushNet: positive prompt more than 75 tokens:', prompt_embeds.shape, + 'multiplying negative_prompt_embeds') + + if len(positive[0]) > 1 and 'pooled_output' in positive[0][1] and positive[0][1]['pooled_output'] is not None: + pooled_prompt_embeds = positive[0][1]['pooled_output'].to(dtype=torch_dtype).to(brushnet['brushnet'].device) + else: + print('BrushNet: positive conditioning has not pooled_output') + if is_SDXL: + print('BrushNet will not produce correct results') + pooled_prompt_embeds = torch.empty([2, 1280], device=brushnet['brushnet'].device).to(dtype=torch_dtype) + + if len(negative[0]) > 1 and 'pooled_output' in negative[0][1] and negative[0][1]['pooled_output'] is not None: + negative_pooled_prompt_embeds = negative[0][1]['pooled_output'].to(dtype=torch_dtype).to( + brushnet['brushnet'].device) + else: + print('BrushNet: negative conditioning has not pooled_output') + if is_SDXL: + print('BrushNet will not produce correct results') + negative_pooled_prompt_embeds = torch.empty([1, pooled_prompt_embeds.shape[1]], + device=brushnet['brushnet'].device).to(dtype=torch_dtype) + + time_ids = torch.FloatTensor([[height, width, 0., 0., height, width]]).to(dtype=torch_dtype).to( + brushnet['brushnet'].device) + + if not is_SDXL: + pooled_prompt_embeds = None + negative_pooled_prompt_embeds = None + time_ids = None + + # apply patch to model + brushnet_conditioning_scale = scale + control_guidance_start = start_at + control_guidance_end = end_at + + add_brushnet_patch(model, + brushnet['brushnet'], + torch_dtype, + conditioning_latents, + (brushnet_conditioning_scale, control_guidance_start, control_guidance_end), + prompt_embeds, negative_prompt_embeds, + pooled_prompt_embeds, negative_pooled_prompt_embeds, time_ids, + False) + + latent = torch.zeros([batch, 4, conditioning_latents[0].shape[2], conditioning_latents[0].shape[3]], + device=brushnet['brushnet'].device) + + return (model, positive, negative, {"samples": latent},) + + #powperpaint + def load_powerpaint_clip(self, base_clip_file, pp_clip_file): + pp_clip = comfy.sd.load_clip(ckpt_paths=[base_clip_file]) + + print('PowerPaint base CLIP file: ', base_clip_file) + + pp_tokenizer = TokenizerWrapper(pp_clip.tokenizer.clip_l.tokenizer) + pp_text_encoder = pp_clip.patcher.model.clip_l.transformer + + add_tokens( + tokenizer=pp_tokenizer, + text_encoder=pp_text_encoder, + placeholder_tokens=["P_ctxt", "P_shape", "P_obj"], + initialize_tokens=["a", "a", "a"], + num_vectors_per_token=10, + ) + + pp_text_encoder.load_state_dict(comfy.utils.load_torch_file(pp_clip_file), strict=False) + + print('PowerPaint CLIP file: ', pp_clip_file) + + pp_clip.tokenizer.clip_l.tokenizer = pp_tokenizer + pp_clip.patcher.model.clip_l.transformer = pp_text_encoder + + return (pp_clip,) + + def powerpaint_model_update(self, model, vae, image, mask, powerpaint, clip, positive, negative, fitting, function, scale, start_at, end_at, save_memory): + is_SDXL, is_PP = self.check_compatibilty(model, powerpaint) + if not is_PP: + raise Exception("BrushNet model was loaded, please use BrushNet node") + + # Make a copy of the model so that we're not patching it everywhere in the workflow. + model = model.clone() + + # prepare image and mask + # no batches for original image and mask + masked_image, mask = self.prepare_image(image, mask) + + batch = masked_image.shape[0] + # width = masked_image.shape[2] + # height = masked_image.shape[1] + + if hasattr(model.model.model_config, 'latent_format') and hasattr(model.model.model_config.latent_format, + 'scale_factor'): + scaling_factor = model.model.model_config.latent_format.scale_factor + else: + scaling_factor = sd15_scaling_factor + + torch_dtype = powerpaint['dtype'] + + # prepare conditioning latents + conditioning_latents = self.get_image_latents(masked_image, mask, vae, scaling_factor) + conditioning_latents[0] = conditioning_latents[0].to(dtype=torch_dtype).to(powerpaint['brushnet'].device) + conditioning_latents[1] = conditioning_latents[1].to(dtype=torch_dtype).to(powerpaint['brushnet'].device) + + # prepare embeddings + + if function == "object removal": + promptA = "P_ctxt" + promptB = "P_ctxt" + negative_promptA = "P_obj" + negative_promptB = "P_obj" + print('You should add to positive prompt: "empty scene blur"') + # positive = positive + " empty scene blur" + elif function == "context aware": + promptA = "P_ctxt" + promptB = "P_ctxt" + negative_promptA = "" + negative_promptB = "" + # positive = positive + " empty scene" + print('You should add to positive prompt: "empty scene"') + elif function == "shape guided": + promptA = "P_shape" + promptB = "P_ctxt" + negative_promptA = "P_shape" + negative_promptB = "P_ctxt" + elif function == "image outpainting": + promptA = "P_ctxt" + promptB = "P_ctxt" + negative_promptA = "P_obj" + negative_promptB = "P_obj" + # positive = positive + " empty scene" + print('You should add to positive prompt: "empty scene"') + else: + promptA = "P_obj" + promptB = "P_obj" + negative_promptA = "P_obj" + negative_promptB = "P_obj" + + tokens = clip.tokenize(promptA) + prompt_embedsA = clip.encode_from_tokens(tokens, return_pooled=False) + + tokens = clip.tokenize(negative_promptA) + negative_prompt_embedsA = clip.encode_from_tokens(tokens, return_pooled=False) + + tokens = clip.tokenize(promptB) + prompt_embedsB = clip.encode_from_tokens(tokens, return_pooled=False) + + tokens = clip.tokenize(negative_promptB) + negative_prompt_embedsB = clip.encode_from_tokens(tokens, return_pooled=False) + + prompt_embeds_pp = (prompt_embedsA * fitting + (1.0 - fitting) * prompt_embedsB).to(dtype=torch_dtype).to( + powerpaint['brushnet'].device) + negative_prompt_embeds_pp = (negative_prompt_embedsA * fitting + (1.0 - fitting) * negative_prompt_embedsB).to( + dtype=torch_dtype).to(powerpaint['brushnet'].device) + + # unload vae and CLIPs + del vae + del clip + for loaded_model in comfy.model_management.current_loaded_models: + if type(loaded_model.model.model) in ModelsToUnload: + comfy.model_management.current_loaded_models.remove(loaded_model) + loaded_model.model_unload() + del loaded_model + + # apply patch to model + + brushnet_conditioning_scale = scale + control_guidance_start = start_at + control_guidance_end = end_at + + if save_memory != 'none': + powerpaint['brushnet'].set_attention_slice(save_memory) + + add_brushnet_patch(model, + powerpaint['brushnet'], + torch_dtype, + conditioning_latents, + (brushnet_conditioning_scale, control_guidance_start, control_guidance_end), + negative_prompt_embeds_pp, prompt_embeds_pp, + None, None, None, + False) + + latent = torch.zeros([batch, 4, conditioning_latents[0].shape[2], conditioning_latents[0].shape[3]], + device=powerpaint['brushnet'].device) + + return (model, positive, negative, {"samples": latent},) +@torch.inference_mode() +def brushnet_inference(x, timesteps, transformer_options, debug): + if 'model_patch' not in transformer_options: + print('BrushNet inference: there is no model_patch key in transformer_options') + return ([], 0, []) + mp = transformer_options['model_patch'] + if 'brushnet' not in mp: + print('BrushNet inference: there is no brushnet key in mdel_patch') + return ([], 0, []) + bo = mp['brushnet'] + if 'model' not in bo: + print('BrushNet inference: there is no model key in brushnet') + return ([], 0, []) + brushnet = bo['model'] + if not (isinstance(brushnet, BrushNetModel) or isinstance(brushnet, PowerPaintModel)): + print('BrushNet model is not a BrushNetModel class') + return ([], 0, []) + + torch_dtype = bo['dtype'] + cl_list = bo['latents'] + brushnet_conditioning_scale, control_guidance_start, control_guidance_end = bo['controls'] + pe = bo['prompt_embeds'] + npe = bo['negative_prompt_embeds'] + ppe, nppe, time_ids = bo['add_embeds'] + + #do_classifier_free_guidance = mp['free_guidance'] + do_classifier_free_guidance = len(transformer_options['cond_or_uncond']) > 1 + + x = x.detach().clone() + x = x.to(torch_dtype).to(brushnet.device) + + timesteps = timesteps.detach().clone() + timesteps = timesteps.to(torch_dtype).to(brushnet.device) + + total_steps = mp['total_steps'] + step = mp['step'] + + added_cond_kwargs = {} + + if do_classifier_free_guidance and step == 0: + print('BrushNet inference: do_classifier_free_guidance is True') + + sub_idx = None + if 'ad_params' in transformer_options and 'sub_idxs' in transformer_options['ad_params']: + sub_idx = transformer_options['ad_params']['sub_idxs'] + + # we have batch input images + batch = cl_list[0].shape[0] + # we have incoming latents + latents_incoming = x.shape[0] + # and we already got some + latents_got = bo['latent_id'] + if step == 0 or batch > 1: + print('BrushNet inference, step = %d: image batch = %d, got %d latents, starting from %d' \ + % (step, batch, latents_incoming, latents_got)) + + image_latents = [] + masks = [] + prompt_embeds = [] + negative_prompt_embeds = [] + pooled_prompt_embeds = [] + negative_pooled_prompt_embeds = [] + if sub_idx: + # AnimateDiff indexes detected + if step == 0: + print('BrushNet inference: AnimateDiff indexes detected and applied') + + batch = len(sub_idx) + + if do_classifier_free_guidance: + for i in sub_idx: + image_latents.append(cl_list[0][i][None,:,:,:]) + masks.append(cl_list[1][i][None,:,:,:]) + prompt_embeds.append(pe) + negative_prompt_embeds.append(npe) + pooled_prompt_embeds.append(ppe) + negative_pooled_prompt_embeds.append(nppe) + for i in sub_idx: + image_latents.append(cl_list[0][i][None,:,:,:]) + masks.append(cl_list[1][i][None,:,:,:]) + else: + for i in sub_idx: + image_latents.append(cl_list[0][i][None,:,:,:]) + masks.append(cl_list[1][i][None,:,:,:]) + prompt_embeds.append(pe) + pooled_prompt_embeds.append(ppe) + else: + # do_classifier_free_guidance = 2 passes, 1st pass is cond, 2nd is uncond + continue_batch = True + for i in range(latents_incoming): + number = latents_got + i + if number < batch: + # 1st pass, cond + image_latents.append(cl_list[0][number][None,:,:,:]) + masks.append(cl_list[1][number][None,:,:,:]) + prompt_embeds.append(pe) + pooled_prompt_embeds.append(ppe) + elif do_classifier_free_guidance and number < batch * 2: + # 2nd pass, uncond + image_latents.append(cl_list[0][number-batch][None,:,:,:]) + masks.append(cl_list[1][number-batch][None,:,:,:]) + negative_prompt_embeds.append(npe) + negative_pooled_prompt_embeds.append(nppe) + else: + # latent batch + image_latents.append(cl_list[0][0][None,:,:,:]) + masks.append(cl_list[1][0][None,:,:,:]) + prompt_embeds.append(pe) + pooled_prompt_embeds.append(ppe) + latents_got = -i + continue_batch = False + + if continue_batch: + # we don't have full batch yet + if do_classifier_free_guidance: + if number < batch * 2 - 1: + bo['latent_id'] = number + 1 + else: + bo['latent_id'] = 0 + else: + if number < batch - 1: + bo['latent_id'] = number + 1 + else: + bo['latent_id'] = 0 + else: + bo['latent_id'] = 0 + + cl = [] + for il, m in zip(image_latents, masks): + cl.append(torch.concat([il, m], dim=1)) + cl2apply = torch.concat(cl, dim=0) + + conditioning_latents = cl2apply.to(torch_dtype).to(brushnet.device) + + prompt_embeds.extend(negative_prompt_embeds) + prompt_embeds = torch.concat(prompt_embeds, dim=0).to(torch_dtype).to(brushnet.device) + + if ppe is not None: + added_cond_kwargs = {} + added_cond_kwargs['time_ids'] = torch.concat([time_ids] * latents_incoming, dim = 0).to(torch_dtype).to(brushnet.device) + + pooled_prompt_embeds.extend(negative_pooled_prompt_embeds) + pooled_prompt_embeds = torch.concat(pooled_prompt_embeds, dim=0).to(torch_dtype).to(brushnet.device) + added_cond_kwargs['text_embeds'] = pooled_prompt_embeds + else: + added_cond_kwargs = None + + if x.shape[2] != conditioning_latents.shape[2] or x.shape[3] != conditioning_latents.shape[3]: + if step == 0: + print('BrushNet inference: image', conditioning_latents.shape, 'and latent', x.shape, 'have different size, resizing image') + conditioning_latents = torch.nn.functional.interpolate( + conditioning_latents, size=( + x.shape[2], + x.shape[3], + ), mode='bicubic', + ).to(torch_dtype).to(brushnet.device) + + if step == 0: + print('BrushNet inference: sample', x.shape, ', CL', conditioning_latents.shape, 'dtype', torch_dtype) + + if debug: print('BrushNet: step =', step) + + if step < control_guidance_start or step > control_guidance_end: + cond_scale = 0.0 + else: + cond_scale = brushnet_conditioning_scale + + return brushnet(x, + encoder_hidden_states=prompt_embeds, + brushnet_cond=conditioning_latents, + timestep = timesteps, + conditioning_scale=cond_scale, + guess_mode=False, + added_cond_kwargs=added_cond_kwargs, + return_dict=False, + debug=debug, + ) + +def add_brushnet_patch(model, brushnet, torch_dtype, conditioning_latents, + controls, + prompt_embeds, negative_prompt_embeds, + pooled_prompt_embeds, negative_pooled_prompt_embeds, time_ids, + debug): + + is_SDXL = isinstance(model.model.model_config, comfy.supported_models.SDXL) + + if is_SDXL: + input_blocks = [[0, comfy.ops.disable_weight_init.Conv2d], + [1, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [3, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [6, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [7, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.attention.SpatialTransformer]] + middle_block = [0, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock] + output_blocks = [[0, comfy.ldm.modules.attention.SpatialTransformer], + [1, comfy.ldm.modules.attention.SpatialTransformer], + [2, comfy.ldm.modules.attention.SpatialTransformer], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [3, comfy.ldm.modules.attention.SpatialTransformer], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [6, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [7, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [8, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock]] + else: + input_blocks = [[0, comfy.ops.disable_weight_init.Conv2d], + [1, comfy.ldm.modules.attention.SpatialTransformer], + [2, comfy.ldm.modules.attention.SpatialTransformer], + [3, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [6, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [7, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.attention.SpatialTransformer], + [9, comfy.ldm.modules.diffusionmodules.openaimodel.Downsample], + [10, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [11, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock]] + middle_block = [0, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock] + output_blocks = [[0, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [1, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.ResBlock], + [2, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [3, comfy.ldm.modules.attention.SpatialTransformer], + [4, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.attention.SpatialTransformer], + [5, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [6, comfy.ldm.modules.attention.SpatialTransformer], + [7, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.attention.SpatialTransformer], + [8, comfy.ldm.modules.diffusionmodules.openaimodel.Upsample], + [9, comfy.ldm.modules.attention.SpatialTransformer], + [10, comfy.ldm.modules.attention.SpatialTransformer], + [11, comfy.ldm.modules.attention.SpatialTransformer]] + + def last_layer_index(block, tp): + layer_list = [] + for layer in block: + layer_list.append(type(layer)) + layer_list.reverse() + if tp not in layer_list: + return -1, layer_list.reverse() + return len(layer_list) - 1 - layer_list.index(tp), layer_list + + def brushnet_forward(model, x, timesteps, transformer_options, control): + if 'brushnet' not in transformer_options['model_patch']: + input_samples = [] + mid_sample = 0 + output_samples = [] + else: + # brushnet inference + input_samples, mid_sample, output_samples = brushnet_inference(x, timesteps, transformer_options, debug) + + # give additional samples to blocks + for i, tp in input_blocks: + idx, layer_list = last_layer_index(model.input_blocks[i], tp) + if idx < 0: + print("BrushNet can't find", tp, "layer in", i, "input block:", layer_list) + continue + model.input_blocks[i][idx].add_sample_after = input_samples.pop(0) if input_samples else 0 + + idx, layer_list = last_layer_index(model.middle_block, middle_block[1]) + if idx < 0: + print("BrushNet can't find", middle_block[1], "layer in middle block", layer_list) + model.middle_block[idx].add_sample_after = mid_sample + + for i, tp in output_blocks: + idx, layer_list = last_layer_index(model.output_blocks[i], tp) + if idx < 0: + print("BrushNet can't find", tp, "layer in", i, "outnput block:", layer_list) + continue + model.output_blocks[i][idx].add_sample_after = output_samples.pop(0) if output_samples else 0 + + patch_model_function_wrapper(model, brushnet_forward) + + to = add_model_patch_option(model) + mp = to['model_patch'] + if 'brushnet' not in mp: + mp['brushnet'] = {} + bo = mp['brushnet'] + + bo['model'] = brushnet + bo['dtype'] = torch_dtype + bo['latents'] = conditioning_latents + bo['controls'] = controls + bo['prompt_embeds'] = prompt_embeds + bo['negative_prompt_embeds'] = negative_prompt_embeds + bo['add_embeds'] = (pooled_prompt_embeds, negative_pooled_prompt_embeds, time_ids) + bo['latent_id'] = 0 + + # patch layers `forward` so we can apply brushnet + def forward_patched_by_brushnet(self, x, *args, **kwargs): + h = self.original_forward(x, *args, **kwargs) + if hasattr(self, 'add_sample_after') and type(self): + to_add = self.add_sample_after + if torch.is_tensor(to_add): + # interpolate due to RAUNet + if h.shape[2] != to_add.shape[2] or h.shape[3] != to_add.shape[3]: + to_add = torch.nn.functional.interpolate(to_add, size=(h.shape[2], h.shape[3]), mode='bicubic') + h += to_add.to(h.dtype).to(h.device) + else: + h += self.add_sample_after + self.add_sample_after = 0 + return h + + for i, block in enumerate(model.model.diffusion_model.input_blocks): + for j, layer in enumerate(block): + if not hasattr(layer, 'original_forward'): + layer.original_forward = layer.forward + layer.forward = types.MethodType(forward_patched_by_brushnet, layer) + layer.add_sample_after = 0 + + for j, layer in enumerate(model.model.diffusion_model.middle_block): + if not hasattr(layer, 'original_forward'): + layer.original_forward = layer.forward + layer.forward = types.MethodType(forward_patched_by_brushnet, layer) + layer.add_sample_after = 0 + + for i, block in enumerate(model.model.diffusion_model.output_blocks): + for j, layer in enumerate(block): + if not hasattr(layer, 'original_forward'): + layer.original_forward = layer.forward + layer.forward = types.MethodType(forward_patched_by_brushnet, layer) + layer.add_sample_after = 0 \ No newline at end of file diff --git a/py/brushnet/config/brushnet.json b/py/brushnet/config/brushnet.json new file mode 100644 index 0000000..c22ae15 --- /dev/null +++ b/py/brushnet/config/brushnet.json @@ -0,0 +1,58 @@ +{ + "_class_name": "BrushNetModel", + "_diffusers_version": "0.27.0.dev0", + "_name_or_path": "runs/logs/brushnet_randommask/checkpoint-100000", + "act_fn": "silu", + "addition_embed_type": null, + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": null, + "attention_head_dim": 8, + "block_out_channels": [ + 320, + 640, + 1280, + 1280 + ], + "brushnet_conditioning_channel_order": "rgb", + "class_embed_type": null, + "conditioning_channels": 5, + "conditioning_embedding_out_channels": [ + 16, + 32, + 96, + 256 + ], + "cross_attention_dim": 768, + "down_block_types": [ + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "global_pool_conditions": false, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_scale_factor": 1, + "mid_block_type": "MidBlock2D", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "projection_class_embeddings_input_dim": null, + "resnet_time_scale_shift": "default", + "transformer_layers_per_block": 1, + "up_block_types": [ + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D" + ], + "upcast_attention": false, + "use_linear_projection": false +} \ No newline at end of file diff --git a/py/brushnet/config/brushnet_xl.json b/py/brushnet/config/brushnet_xl.json new file mode 100644 index 0000000..1e0ebb7 --- /dev/null +++ b/py/brushnet/config/brushnet_xl.json @@ -0,0 +1,63 @@ +{ + "_class_name": "BrushNetModel", + "_diffusers_version": "0.27.0.dev0", + "_name_or_path": "runs/logs/brushnetsdxl_randommask/checkpoint-80000", + "act_fn": "silu", + "addition_embed_type": "text_time", + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": 256, + "attention_head_dim": [ + 5, + 10, + 20 + ], + "block_out_channels": [ + 320, + 640, + 1280 + ], + "brushnet_conditioning_channel_order": "rgb", + "class_embed_type": null, + "conditioning_channels": 5, + "conditioning_embedding_out_channels": [ + 16, + 32, + 96, + 256 + ], + "cross_attention_dim": 2048, + "down_block_types": [ + "DownBlock2D", + "DownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "global_pool_conditions": false, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_scale_factor": 1, + "mid_block_type": "MidBlock2D", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "projection_class_embeddings_input_dim": 2816, + "resnet_time_scale_shift": "default", + "transformer_layers_per_block": [ + 1, + 2, + 10 + ], + "up_block_types": [ + "UpBlock2D", + "UpBlock2D", + "UpBlock2D" + ], + "upcast_attention": null, + "use_linear_projection": true +} \ No newline at end of file diff --git a/py/brushnet/config/powerpaint.json b/py/brushnet/config/powerpaint.json new file mode 100644 index 0000000..1ea9700 --- /dev/null +++ b/py/brushnet/config/powerpaint.json @@ -0,0 +1,57 @@ +{ + "_class_name": "BrushNetModel", + "_diffusers_version": "0.27.2", + "act_fn": "silu", + "addition_embed_type": null, + "addition_embed_type_num_heads": 64, + "addition_time_embed_dim": null, + "attention_head_dim": 8, + "block_out_channels": [ + 320, + 640, + 1280, + 1280 + ], + "brushnet_conditioning_channel_order": "rgb", + "class_embed_type": null, + "conditioning_channels": 5, + "conditioning_embedding_out_channels": [ + 16, + 32, + 96, + 256 + ], + "cross_attention_dim": 768, + "down_block_types": [ + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D" + ], + "downsample_padding": 1, + "encoder_hid_dim": null, + "encoder_hid_dim_type": null, + "flip_sin_to_cos": true, + "freq_shift": 0, + "global_pool_conditions": false, + "in_channels": 4, + "layers_per_block": 2, + "mid_block_scale_factor": 1, + "mid_block_type": "UNetMidBlock2DCrossAttn", + "norm_eps": 1e-05, + "norm_num_groups": 32, + "num_attention_heads": null, + "num_class_embeds": null, + "only_cross_attention": false, + "projection_class_embeddings_input_dim": null, + "resnet_time_scale_shift": "default", + "transformer_layers_per_block": 1, + "up_block_types": [ + "UpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D" + ], + "upcast_attention": false, + "use_linear_projection": false +} \ No newline at end of file diff --git a/py/brushnet/model.py b/py/brushnet/model.py new file mode 100644 index 0000000..409b7f8 --- /dev/null +++ b/py/brushnet/model.py @@ -0,0 +1,1688 @@ +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +from torch import nn + +from ..libs.utils import install_package +try: + install_package("diffusers", "0.27.2", True, "0.25.0") + + from diffusers.configuration_utils import ConfigMixin, register_to_config + from diffusers.utils import BaseOutput, logging + from diffusers.models.attention_processor import ( + ADDED_KV_ATTENTION_PROCESSORS, + CROSS_ATTENTION_PROCESSORS, + AttentionProcessor, + AttnAddedKVProcessor, + AttnProcessor, + ) + from diffusers.models.embeddings import TextImageProjection, TextImageTimeEmbedding, TextTimeEmbedding, TimestepEmbedding, Timesteps + from diffusers.models.modeling_utils import ModelMixin + from diffusers.models.resnet import ResnetBlock2D + from diffusers.models.unets.unet_2d_condition import UNet2DConditionModel + + from diffusers.models.transformers.dual_transformer_2d import DualTransformer2DModel + from diffusers.models.transformers.transformer_2d import Transformer2DModel + + from .unet_2d_blocks import ( + CrossAttnDownBlock2D, + DownBlock2D, + get_down_block, + get_mid_block, + get_up_block, + ) + + from .unet_2d_condition import UNet2DConditionModel + + logger = logging.get_logger(__name__) + + def zero_module(module): + for p in module.parameters(): + nn.init.zeros_(p) + return module + + @dataclass + class BrushNetOutput(BaseOutput): + + up_block_res_samples: Tuple[torch.Tensor] + down_block_res_samples: Tuple[torch.Tensor] + mid_block_res_sample: torch.Tensor + + # BrushNetModel + class BrushNetModel(ModelMixin, ConfigMixin): + """A BrushNet model.""" + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + in_channels: int = 4, + conditioning_channels: int = 5, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str, ...] = ( + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + "DownBlock2D", + ), + mid_block_type: Optional[str] = "UNetMidBlock2D", + up_block_types: Tuple[str, ...] = ( + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + "UpBlock2D", + ), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int, ...] = (320, 640, 1280, 1280), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + transformer_layers_per_block: Union[int, Tuple[int, ...]] = 1, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int, ...]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int, ...]]] = None, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + projection_class_embeddings_input_dim: Optional[int] = None, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + global_pool_conditions: bool = False, + addition_embed_type_num_heads: int = 64, + ): + super().__init__() + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + # input + conv_in_kernel = 3 + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in_condition = nn.Conv2d( + in_channels + conditioning_channels, block_out_channels[0], kernel_size=conv_in_kernel, + padding=conv_in_padding + ) + + # time + time_embed_dim = block_out_channels[0] * 4 + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + ) + + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + print("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kadinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + # class embedding + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kadinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, + time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + self.down_blocks = nn.ModuleList([]) + self.brushnet_down_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + # down + output_channel = block_out_channels[0] + + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[i], + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + downsample_padding=downsample_padding, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + self.down_blocks.append(down_block) + + for _ in range(layers_per_block): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + # mid + mid_block_channel = block_out_channels[-1] + + brushnet_block = nn.Conv2d(mid_block_channel, mid_block_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_mid_block = brushnet_block + + self.mid_block = get_mid_block( + mid_block_type, + transformer_layers_per_block=transformer_layers_per_block[-1], + in_channels=mid_block_channel, + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[-1], + resnet_groups=norm_num_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_transformer_layers_per_block = (list(reversed(transformer_layers_per_block))) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + + self.up_blocks = nn.ModuleList([]) + self.brushnet_up_blocks = nn.ModuleList([]) + + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resolution_idx=i, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=reversed_num_attention_heads[i], + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + for _ in range(layers_per_block + 1): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + @classmethod + def from_unet( + cls, + unet: UNet2DConditionModel, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + load_weights_from_unet: bool = True, + conditioning_channels: int = 5, + ): + r""" + Instantiate a [`BrushNetModel`] from [`UNet2DConditionModel`]. + + Parameters: + unet (`UNet2DConditionModel`): + The UNet model weights to copy to the [`BrushNetModel`]. All configuration options are also copied + where applicable. + """ + transformer_layers_per_block = ( + unet.config.transformer_layers_per_block if "transformer_layers_per_block" in unet.config else 1 + ) + encoder_hid_dim = unet.config.encoder_hid_dim if "encoder_hid_dim" in unet.config else None + encoder_hid_dim_type = unet.config.encoder_hid_dim_type if "encoder_hid_dim_type" in unet.config else None + addition_embed_type = unet.config.addition_embed_type if "addition_embed_type" in unet.config else None + addition_time_embed_dim = ( + unet.config.addition_time_embed_dim if "addition_time_embed_dim" in unet.config else None + ) + + down_block_types = ["DownBlock2D" for block_name in unet.config.down_block_types] + mid_block_type = "MidBlock2D" + up_block_types = ["UpBlock2D" for block_name in unet.config.down_block_types] + + brushnet = cls( + in_channels=unet.config.in_channels, + conditioning_channels=conditioning_channels, + flip_sin_to_cos=unet.config.flip_sin_to_cos, + freq_shift=unet.config.freq_shift, + down_block_types=down_block_types, + mid_block_type=mid_block_type, + up_block_types=up_block_types, + only_cross_attention=unet.config.only_cross_attention, + block_out_channels=unet.config.block_out_channels, + layers_per_block=unet.config.layers_per_block, + downsample_padding=unet.config.downsample_padding, + mid_block_scale_factor=unet.config.mid_block_scale_factor, + act_fn=unet.config.act_fn, + norm_num_groups=unet.config.norm_num_groups, + norm_eps=unet.config.norm_eps, + cross_attention_dim=unet.config.cross_attention_dim, + transformer_layers_per_block=transformer_layers_per_block, + encoder_hid_dim=encoder_hid_dim, + encoder_hid_dim_type=encoder_hid_dim_type, + attention_head_dim=unet.config.attention_head_dim, + num_attention_heads=unet.config.num_attention_heads, + use_linear_projection=unet.config.use_linear_projection, + class_embed_type=unet.config.class_embed_type, + addition_embed_type=addition_embed_type, + addition_time_embed_dim=addition_time_embed_dim, + num_class_embeds=unet.config.num_class_embeds, + upcast_attention=unet.config.upcast_attention, + resnet_time_scale_shift=unet.config.resnet_time_scale_shift, + projection_class_embeddings_input_dim=unet.config.projection_class_embeddings_input_dim, + brushnet_conditioning_channel_order=brushnet_conditioning_channel_order, + conditioning_embedding_out_channels=conditioning_embedding_out_channels, + ) + + if load_weights_from_unet: + conv_in_condition_weight = torch.zeros_like(brushnet.conv_in_condition.weight) + conv_in_condition_weight[:, :4, ...] = unet.conv_in.weight + conv_in_condition_weight[:, 4:8, ...] = unet.conv_in.weight + brushnet.conv_in_condition.weight = torch.nn.Parameter(conv_in_condition_weight) + brushnet.conv_in_condition.bias = unet.conv_in.bias + + brushnet.time_proj.load_state_dict(unet.time_proj.state_dict()) + brushnet.time_embedding.load_state_dict(unet.time_embedding.state_dict()) + + if brushnet.class_embedding: + brushnet.class_embedding.load_state_dict(unet.class_embedding.state_dict()) + + brushnet.down_blocks.load_state_dict(unet.down_blocks.state_dict(), strict=False) + brushnet.mid_block.load_state_dict(unet.mid_block.state_dict(), strict=False) + brushnet.up_blocks.load_state_dict(unet.up_blocks.state_dict(), strict=False) + + return brushnet + + @property + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.attn_processors + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, + processors: Dict[str, AttentionProcessor]): + if hasattr(module, "get_processor"): + processors[f"{name}.processor"] = module.get_processor(return_deprecated_lora=True) + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attn_processor + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_default_attn_processor + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + if all(proc.__class__ in ADDED_KV_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnAddedKVProcessor() + elif all(proc.__class__ in CROSS_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnProcessor() + else: + raise ValueError( + f"Cannot call `set_default_attn_processor` when attention processors are of type {next(iter(self.attn_processors.values()))}" + ) + + self.set_attn_processor(processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attention_slice + def set_attention_slice(self, slice_size: Union[str, int, List[int]]) -> None: + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value: bool = False) -> None: + if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D)): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + encoder_hidden_states: torch.Tensor, + brushnet_cond: torch.FloatTensor, + timestep=None, + time_emb=None, + conditioning_scale: float = 1.0, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + guess_mode: bool = False, + return_dict: bool = True, + debug=False, + ) -> Union[BrushNetOutput, Tuple[Tuple[torch.FloatTensor, ...], torch.FloatTensor]]: + + # check channel order + channel_order = self.config.brushnet_conditioning_channel_order + + if channel_order == "rgb": + # in rgb order by default + ... + elif channel_order == "bgr": + brushnet_cond = torch.flip(brushnet_cond, dims=[1]) + else: + raise ValueError(f"unknown `brushnet_conditioning_channel_order`: {channel_order}") + + # prepare attention_mask + if attention_mask is not None: + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + if timestep is None and time_emb is None: + raise ValueError(f"`timestep` and `emb` are both None") + + # print("BN: sample.device", sample.device) + # print("BN: TE.device", self.time_embedding.linear_1.weight.device) + + if timestep is not None: + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + + # print("t_emb.device =",t_emb.device) + + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + # print('emb.shape', emb.shape) + + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + class_emb = self.class_embedding(class_labels).to(dtype=self.dtype) + emb = emb + class_emb + + if self.config.addition_embed_type is not None: + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + + elif self.config.addition_embed_type == "text_time": + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + + # print('text_embeds', text_embeds.shape, 'time_ids', time_ids.shape, 'time_embeds', time_embeds.shape, 'add__embeds', add_embeds.shape, 'aug_emb', aug_emb.shape) + + emb = emb + aug_emb if aug_emb is not None else emb + else: + emb = time_emb + + # 2. pre-process + + brushnet_cond = torch.concat([sample, brushnet_cond], 1) + sample = self.conv_in_condition(brushnet_cond) + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + + down_block_res_samples += res_samples + + # 4. PaintingNet down blocks + brushnet_down_block_res_samples = () + for down_block_res_sample, brushnet_down_block in zip(down_block_res_samples, self.brushnet_down_blocks): + down_block_res_sample = brushnet_down_block(down_block_res_sample) + brushnet_down_block_res_samples = brushnet_down_block_res_samples + (down_block_res_sample,) + + # 5. mid + if self.mid_block is not None: + if hasattr(self.mid_block, "has_cross_attention") and self.mid_block.has_cross_attention: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample = self.mid_block(sample, emb) + + # 6. BrushNet mid blocks + brushnet_mid_block_res_sample = self.brushnet_mid_block(sample) + + # 7. up + up_block_res_samples = () + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets):] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + return_res_samples=True + ) + else: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + return_res_samples=True + ) + + up_block_res_samples += up_res_samples + + # 8. BrushNet up blocks + brushnet_up_block_res_samples = () + for up_block_res_sample, brushnet_up_block in zip(up_block_res_samples, self.brushnet_up_blocks): + up_block_res_sample = brushnet_up_block(up_block_res_sample) + brushnet_up_block_res_samples = brushnet_up_block_res_samples + (up_block_res_sample,) + + # 6. scaling + if guess_mode and not self.config.global_pool_conditions: + scales = torch.logspace(-1, 0, + len(brushnet_down_block_res_samples) + 1 + len(brushnet_up_block_res_samples), + device=sample.device) # 0.1 to 1.0 + scales = scales * conditioning_scale + + brushnet_down_block_res_samples = [sample * scale for sample, scale in + zip(brushnet_down_block_res_samples, + scales[:len(brushnet_down_block_res_samples)])] + brushnet_mid_block_res_sample = brushnet_mid_block_res_sample * scales[ + len(brushnet_down_block_res_samples)] + brushnet_up_block_res_samples = [sample * scale for sample, scale in zip(brushnet_up_block_res_samples, + scales[ + len(brushnet_down_block_res_samples) + 1:])] + else: + brushnet_down_block_res_samples = [sample * conditioning_scale for sample in + brushnet_down_block_res_samples] + brushnet_mid_block_res_sample = brushnet_mid_block_res_sample * conditioning_scale + brushnet_up_block_res_samples = [sample * conditioning_scale for sample in + brushnet_up_block_res_samples] + + if self.config.global_pool_conditions: + brushnet_down_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_down_block_res_samples + ] + brushnet_mid_block_res_sample = torch.mean(brushnet_mid_block_res_sample, dim=(2, 3), keepdim=True) + brushnet_up_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_up_block_res_samples + ] + + if not return_dict: + return (brushnet_down_block_res_samples, brushnet_mid_block_res_sample, brushnet_up_block_res_samples) + + return BrushNetOutput( + down_block_res_samples=brushnet_down_block_res_samples, + mid_block_res_sample=brushnet_mid_block_res_sample, + up_block_res_samples=brushnet_up_block_res_samples + ) + + # PowerPaintModel + class PowerPaintModel(ModelMixin, ConfigMixin): + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + in_channels: int = 4, + conditioning_channels: int = 5, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str, ...] = ( + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D", + ), + mid_block_type: Optional[str] = "UNetMidBlock2DCrossAttn", + up_block_types: Tuple[str, ...] = ( + "UpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + "CrossAttnUpBlock2D", + ), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int, ...] = (320, 640, 1280, 1280), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + transformer_layers_per_block: Union[int, Tuple[int, ...]] = 1, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int, ...]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int, ...]]] = None, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + projection_class_embeddings_input_dim: Optional[int] = None, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + global_pool_conditions: bool = False, + addition_embed_type_num_heads: int = 64, + ): + super().__init__() + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + # input + conv_in_kernel = 3 + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in_condition = nn.Conv2d( + in_channels + conditioning_channels, + block_out_channels[0], + kernel_size=conv_in_kernel, + padding=conv_in_padding, + ) + + # time + time_embed_dim = block_out_channels[0] * 4 + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + ) + + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + logger.info("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kadinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + # class embedding + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kadinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + self.down_blocks = nn.ModuleList([]) + self.brushnet_down_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + # down + output_channel = block_out_channels[0] + + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[i], + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + downsample_padding=downsample_padding, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + self.down_blocks.append(down_block) + + for _ in range(layers_per_block): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_down_blocks.append(brushnet_block) + + # mid + mid_block_channel = block_out_channels[-1] + + brushnet_block = nn.Conv2d(mid_block_channel, mid_block_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_mid_block = brushnet_block + + self.mid_block = get_mid_block( + mid_block_type, + transformer_layers_per_block=transformer_layers_per_block[-1], + in_channels=mid_block_channel, + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads[-1], + resnet_groups=norm_num_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_transformer_layers_per_block = list(reversed(transformer_layers_per_block)) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + + self.up_blocks = nn.ModuleList([]) + self.brushnet_up_blocks = nn.ModuleList([]) + + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resolution_idx=i, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=reversed_num_attention_heads[i], + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + for _ in range(layers_per_block + 1): + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + if not is_final_block: + brushnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1) + brushnet_block = zero_module(brushnet_block) + self.brushnet_up_blocks.append(brushnet_block) + + @classmethod + def from_unet( + cls, + unet: UNet2DConditionModel, + brushnet_conditioning_channel_order: str = "rgb", + conditioning_embedding_out_channels: Optional[Tuple[int, ...]] = (16, 32, 96, 256), + load_weights_from_unet: bool = True, + conditioning_channels: int = 5, + ): + r""" + Instantiate a [`BrushNetModel`] from [`UNet2DConditionModel`]. + + Parameters: + unet (`UNet2DConditionModel`): + The UNet model weights to copy to the [`BrushNetModel`]. All configuration options are also copied + where applicable. + """ + transformer_layers_per_block = ( + unet.config.transformer_layers_per_block if "transformer_layers_per_block" in unet.config else 1 + ) + encoder_hid_dim = unet.config.encoder_hid_dim if "encoder_hid_dim" in unet.config else None + encoder_hid_dim_type = unet.config.encoder_hid_dim_type if "encoder_hid_dim_type" in unet.config else None + addition_embed_type = unet.config.addition_embed_type if "addition_embed_type" in unet.config else None + addition_time_embed_dim = ( + unet.config.addition_time_embed_dim if "addition_time_embed_dim" in unet.config else None + ) + + brushnet = cls( + in_channels=unet.config.in_channels, + conditioning_channels=conditioning_channels, + flip_sin_to_cos=unet.config.flip_sin_to_cos, + freq_shift=unet.config.freq_shift, + # down_block_types=['DownBlock2D','DownBlock2D','DownBlock2D','DownBlock2D'], + down_block_types=[ + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D", + ], + # mid_block_type='MidBlock2D', + mid_block_type="UNetMidBlock2DCrossAttn", + # up_block_types=['UpBlock2D','UpBlock2D','UpBlock2D','UpBlock2D'], + up_block_types=["UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D"], + only_cross_attention=unet.config.only_cross_attention, + block_out_channels=unet.config.block_out_channels, + layers_per_block=unet.config.layers_per_block, + downsample_padding=unet.config.downsample_padding, + mid_block_scale_factor=unet.config.mid_block_scale_factor, + act_fn=unet.config.act_fn, + norm_num_groups=unet.config.norm_num_groups, + norm_eps=unet.config.norm_eps, + cross_attention_dim=unet.config.cross_attention_dim, + transformer_layers_per_block=transformer_layers_per_block, + encoder_hid_dim=encoder_hid_dim, + encoder_hid_dim_type=encoder_hid_dim_type, + attention_head_dim=unet.config.attention_head_dim, + num_attention_heads=unet.config.num_attention_heads, + use_linear_projection=unet.config.use_linear_projection, + class_embed_type=unet.config.class_embed_type, + addition_embed_type=addition_embed_type, + addition_time_embed_dim=addition_time_embed_dim, + num_class_embeds=unet.config.num_class_embeds, + upcast_attention=unet.config.upcast_attention, + resnet_time_scale_shift=unet.config.resnet_time_scale_shift, + projection_class_embeddings_input_dim=unet.config.projection_class_embeddings_input_dim, + brushnet_conditioning_channel_order=brushnet_conditioning_channel_order, + conditioning_embedding_out_channels=conditioning_embedding_out_channels, + ) + + if load_weights_from_unet: + conv_in_condition_weight = torch.zeros_like(brushnet.conv_in_condition.weight) + conv_in_condition_weight[:, :4, ...] = unet.conv_in.weight + conv_in_condition_weight[:, 4:8, ...] = unet.conv_in.weight + brushnet.conv_in_condition.weight = torch.nn.Parameter(conv_in_condition_weight) + brushnet.conv_in_condition.bias = unet.conv_in.bias + + brushnet.time_proj.load_state_dict(unet.time_proj.state_dict()) + brushnet.time_embedding.load_state_dict(unet.time_embedding.state_dict()) + + if brushnet.class_embedding: + brushnet.class_embedding.load_state_dict(unet.class_embedding.state_dict()) + + brushnet.down_blocks.load_state_dict(unet.down_blocks.state_dict(), strict=False) + brushnet.mid_block.load_state_dict(unet.mid_block.state_dict(), strict=False) + brushnet.up_blocks.load_state_dict(unet.up_blocks.state_dict(), strict=False) + + return brushnet.to(unet.dtype) + + @property + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.attn_processors + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]): + if hasattr(module, "get_processor"): + processors[f"{name}.processor"] = module.get_processor(return_deprecated_lora=True) + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attn_processor + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_default_attn_processor + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + if all(proc.__class__ in ADDED_KV_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnAddedKVProcessor() + elif all(proc.__class__ in CROSS_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnProcessor() + else: + raise ValueError( + f"Cannot call `set_default_attn_processor` when attention processors are of type {next(iter(self.attn_processors.values()))}" + ) + + self.set_attn_processor(processor) + + # Copied from diffusers.models.unets.unet_2d_condition.UNet2DConditionModel.set_attention_slice + def set_attention_slice(self, slice_size: Union[str, int, List[int]]) -> None: + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value: bool = False) -> None: + if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D)): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + brushnet_cond: torch.FloatTensor, + conditioning_scale: float = 1.0, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + guess_mode: bool = False, + return_dict: bool = True, + debug=False, + ) -> Union[BrushNetOutput, Tuple[Tuple[torch.FloatTensor, ...], torch.FloatTensor]]: + """ + The [`BrushNetModel`] forward method. + + Args: + sample (`torch.FloatTensor`): + The noisy input tensor. + timestep (`Union[torch.Tensor, float, int]`): + The number of timesteps to denoise an input. + encoder_hidden_states (`torch.Tensor`): + The encoder hidden states. + brushnet_cond (`torch.FloatTensor`): + The conditional input tensor of shape `(batch_size, sequence_length, hidden_size)`. + conditioning_scale (`float`, defaults to `1.0`): + The scale factor for BrushNet outputs. + class_labels (`torch.Tensor`, *optional*, defaults to `None`): + Optional class labels for conditioning. Their embeddings will be summed with the timestep embeddings. + timestep_cond (`torch.Tensor`, *optional*, defaults to `None`): + Additional conditional embeddings for timestep. If provided, the embeddings will be summed with the + timestep_embedding passed through the `self.time_embedding` layer to obtain the final timestep + embeddings. + attention_mask (`torch.Tensor`, *optional*, defaults to `None`): + An attention mask of shape `(batch, key_tokens)` is applied to `encoder_hidden_states`. If `1` the mask + is kept, otherwise if `0` it is discarded. Mask will be converted into a bias, which adds large + negative values to the attention scores corresponding to "discard" tokens. + added_cond_kwargs (`dict`): + Additional conditions for the Stable Diffusion XL UNet. + cross_attention_kwargs (`dict[str]`, *optional*, defaults to `None`): + A kwargs dictionary that if specified is passed along to the `AttnProcessor`. + guess_mode (`bool`, defaults to `False`): + In this mode, the BrushNet encoder tries its best to recognize the input content of the input even if + you remove all prompts. A `guidance_scale` between 3.0 and 5.0 is recommended. + return_dict (`bool`, defaults to `True`): + Whether or not to return a [`~models.brushnet.BrushNetOutput`] instead of a plain tuple. + + Returns: + [`~models.brushnet.BrushNetOutput`] **or** `tuple`: + If `return_dict` is `True`, a [`~models.brushnet.BrushNetOutput`] is returned, otherwise a tuple is + returned where the first element is the sample tensor. + """ + # check channel order + channel_order = self.config.brushnet_conditioning_channel_order + + if channel_order == "rgb": + # in rgb order by default + ... + elif channel_order == "bgr": + brushnet_cond = torch.flip(brushnet_cond, dims=[1]) + else: + raise ValueError(f"unknown `brushnet_conditioning_channel_order`: {channel_order}") + + if debug: print('BrushNet CA: attn mask') + + # prepare attention_mask + if attention_mask is not None: + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + if debug: print('BrushNet CA: time') + + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + class_emb = self.class_embedding(class_labels).to(dtype=self.dtype) + emb = emb + class_emb + + if self.config.addition_embed_type is not None: + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + + elif self.config.addition_embed_type == "text_time": + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + + emb = emb + aug_emb if aug_emb is not None else emb + + if debug: print('BrushNet CA: pre-process') + + + # 2. pre-process + brushnet_cond = torch.concat([sample, brushnet_cond], 1) + sample = self.conv_in_condition(brushnet_cond) + + if debug: print('BrushNet CA: down') + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + if debug: print('BrushNet CA (down block with XA): ', type(downsample_block)) + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + debug=debug, + ) + else: + if debug: print('BrushNet CA (down block): ', type(downsample_block)) + sample, res_samples = downsample_block(hidden_states=sample, temb=emb, debug=debug) + + down_block_res_samples += res_samples + + if debug: print('BrushNet CA: PP down') + + # 4. PaintingNet down blocks + brushnet_down_block_res_samples = () + for down_block_res_sample, brushnet_down_block in zip(down_block_res_samples, self.brushnet_down_blocks): + down_block_res_sample = brushnet_down_block(down_block_res_sample) + brushnet_down_block_res_samples = brushnet_down_block_res_samples + (down_block_res_sample,) + + if debug: print('BrushNet CA: PP mid') + + # 5. mid + if self.mid_block is not None: + if hasattr(self.mid_block, "has_cross_attention") and self.mid_block.has_cross_attention: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + ) + else: + sample = self.mid_block(sample, emb) + + if debug: print('BrushNet CA: mid') + + # 6. BrushNet mid blocks + brushnet_mid_block_res_sample = self.brushnet_mid_block(sample) + + if debug: print('BrushNet CA: PP up') + + # 7. up + up_block_res_samples = () + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + return_res_samples=True, + ) + else: + sample, up_res_samples = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + return_res_samples=True, + ) + + up_block_res_samples += up_res_samples + + if debug: print('BrushNet CA: up') + + # 8. BrushNet up blocks + brushnet_up_block_res_samples = () + for up_block_res_sample, brushnet_up_block in zip(up_block_res_samples, self.brushnet_up_blocks): + up_block_res_sample = brushnet_up_block(up_block_res_sample) + brushnet_up_block_res_samples = brushnet_up_block_res_samples + (up_block_res_sample,) + + if debug: print('BrushNet CA: scaling') + + # 6. scaling + if guess_mode and not self.config.global_pool_conditions: + scales = torch.logspace( + -1, + 0, + len(brushnet_down_block_res_samples) + 1 + len(brushnet_up_block_res_samples), + device=sample.device, + ) # 0.1 to 1.0 + scales = scales * conditioning_scale + + brushnet_down_block_res_samples = [ + sample * scale + for sample, scale in zip( + brushnet_down_block_res_samples, scales[: len(brushnet_down_block_res_samples)] + ) + ] + brushnet_mid_block_res_sample = ( + brushnet_mid_block_res_sample * scales[len(brushnet_down_block_res_samples)] + ) + brushnet_up_block_res_samples = [ + sample * scale + for sample, scale in zip( + brushnet_up_block_res_samples, scales[len(brushnet_down_block_res_samples) + 1 :] + ) + ] + else: + brushnet_down_block_res_samples = [ + sample * conditioning_scale for sample in brushnet_down_block_res_samples + ] + brushnet_mid_block_res_sample = brushnet_mid_block_res_sample * conditioning_scale + brushnet_up_block_res_samples = [sample * conditioning_scale for sample in brushnet_up_block_res_samples] + + if self.config.global_pool_conditions: + brushnet_down_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_down_block_res_samples + ] + brushnet_mid_block_res_sample = torch.mean(brushnet_mid_block_res_sample, dim=(2, 3), keepdim=True) + brushnet_up_block_res_samples = [ + torch.mean(sample, dim=(2, 3), keepdim=True) for sample in brushnet_up_block_res_samples + ] + + if debug: print('BrushNet CA: finish') + + if not return_dict: + return (brushnet_down_block_res_samples, brushnet_mid_block_res_sample, brushnet_up_block_res_samples) + + return BrushNetOutput( + down_block_res_samples=brushnet_down_block_res_samples, + mid_block_res_sample=brushnet_mid_block_res_sample, + up_block_res_samples=brushnet_up_block_res_samples, + ) + +except ImportError: + BrushNetModel = None + PowerPaintModel = None + print("\33[33mModule 'diffusers' load failed. If you don't have it installed, do it:\033[0m") + print("\33[33mpip install diffusers\033[0m") \ No newline at end of file diff --git a/py/brushnet/model_patch.py b/py/brushnet/model_patch.py new file mode 100644 index 0000000..5e13a06 --- /dev/null +++ b/py/brushnet/model_patch.py @@ -0,0 +1,121 @@ +import torch +import comfy + +# Check and add 'model_patch' to model.model_options['transformer_options'] +def add_model_patch_option(model): + if 'transformer_options' not in model.model_options: + model.model_options['transformer_options'] = {} + to = model.model_options['transformer_options'] + if "model_patch" not in to: + to["model_patch"] = {} + return to + + +# Patch model with model_function_wrapper +def patch_model_function_wrapper(model, forward_patch, remove=False): + def brushnet_model_function_wrapper(apply_model_method, options_dict): + to = options_dict['c']['transformer_options'] + + control = None + if 'control' in options_dict['c']: + control = options_dict['c']['control'] + + x = options_dict['input'] + timestep = options_dict['timestep'] + + # check if there are patches to execute + if 'model_patch' not in to or 'forward' not in to['model_patch']: + return apply_model_method(x, timestep, **options_dict['c']) + + mp = to['model_patch'] + unet = mp['unet'] + + all_sigmas = mp['all_sigmas'] + sigma = to['sigmas'][0].item() + total_steps = all_sigmas.shape[0] - 1 + step = torch.argmin((all_sigmas - sigma).abs()).item() + + mp['step'] = step + mp['total_steps'] = total_steps + + # comfy.model_base.apply_model + xc = model.model.model_sampling.calculate_input(timestep, x) + if 'c_concat' in options_dict['c'] and options_dict['c']['c_concat'] is not None: + xc = torch.cat([xc] + [options_dict['c']['c_concat']], dim=1) + t = model.model.model_sampling.timestep(timestep).float() + # execute all patches + for method in mp['forward']: + method(unet, xc, t, to, control) + + return apply_model_method(x, timestep, **options_dict['c']) + + if "model_function_wrapper" in model.model_options and model.model_options["model_function_wrapper"]: + print('BrushNet is going to replace existing model_function_wrapper:', + model.model_options["model_function_wrapper"]) + model.set_model_unet_function_wrapper(brushnet_model_function_wrapper) + + to = add_model_patch_option(model) + mp = to['model_patch'] + + if isinstance(model.model.model_config, comfy.supported_models.SD15): + mp['SDXL'] = False + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + mp['SDXL'] = True + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: ", type(model.model.model_config)) + + if 'forward' not in mp: + mp['forward'] = [] + + if remove: + if forward_patch in mp['forward']: + mp['forward'].remove(forward_patch) + else: + mp['forward'].append(forward_patch) + + mp['unet'] = model.model.diffusion_model + mp['step'] = 0 + mp['total_steps'] = 1 + + # apply patches to code + if comfy.samplers.sample.__doc__ is None or 'BrushNet' not in comfy.samplers.sample.__doc__: + comfy.samplers.original_sample = comfy.samplers.sample + comfy.samplers.sample = modified_sample + + if comfy.ldm.modules.diffusionmodules.openaimodel.apply_control.__doc__ is None or \ + 'BrushNet' not in comfy.ldm.modules.diffusionmodules.openaimodel.apply_control.__doc__: + comfy.ldm.modules.diffusionmodules.openaimodel.original_apply_control = comfy.ldm.modules.diffusionmodules.openaimodel.apply_control + comfy.ldm.modules.diffusionmodules.openaimodel.apply_control = modified_apply_control + + +# Model needs current step number and cfg at inference step. It is possible to write a custom KSampler but I'd like to use ComfyUI's one. +# The first versions had modified_common_ksampler, but it broke custom KSampler nodes +def modified_sample(model, noise, positive, negative, cfg, device, sampler, sigmas, model_options={}, + latent_image=None, denoise_mask=None, callback=None, disable_pbar=False, seed=None): + ''' Modified by BrushNet nodes''' + cfg_guider = comfy.samplers.CFGGuider(model) + cfg_guider.set_conds(positive, negative) + cfg_guider.set_cfg(cfg) + + ### Modified part ###################################################################### + to = add_model_patch_option(model) + to['model_patch']['all_sigmas'] = sigmas + ####################################################################################### + + return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) + +# To use Controlnet with RAUNet it is much easier to modify apply_control a little +def modified_apply_control(h, control, name): + '''Modified by BrushNet nodes''' + if control is not None and name in control and len(control[name]) > 0: + ctrl = control[name].pop() + if ctrl is not None: + if h.shape[2] != ctrl.shape[2] or h.shape[3] != ctrl.shape[3]: + ctrl = torch.nn.functional.interpolate(ctrl, size=(h.shape[2], h.shape[3]), mode='bicubic').to( + h.dtype).to(h.device) + try: + h += ctrl + except: + print.warning("warning control could not be applied {} {}".format(h.shape, ctrl.shape)) + return h \ No newline at end of file diff --git a/py/brushnet/powerpaint_utils.py b/py/brushnet/powerpaint_utils.py new file mode 100644 index 0000000..02c3926 --- /dev/null +++ b/py/brushnet/powerpaint_utils.py @@ -0,0 +1,467 @@ +import copy +import random + +import torch +import torch.nn as nn +from transformers import CLIPTokenizer +from typing import Any, List, Optional, Union + + +class TokenizerWrapper: + """Tokenizer wrapper for CLIPTokenizer. Only support CLIPTokenizer + currently. This wrapper is modified from https://github.com/huggingface/dif + fusers/blob/e51f19aee82c8dd874b715a09dbc521d88835d68/src/diffusers/loaders. + py#L358 # noqa. + + Args: + from_pretrained (Union[str, os.PathLike], optional): The *model id* + of a pretrained model or a path to a *directory* containing + model weights and config. Defaults to None. + from_config (Union[str, os.PathLike], optional): The *model id* + of a pretrained model or a path to a *directory* containing + model weights and config. Defaults to None. + + *args, **kwargs: If `from_pretrained` is passed, *args and **kwargs + will be passed to `from_pretrained` function. Otherwise, *args + and **kwargs will be used to initialize the model by + `self._module_cls(*args, **kwargs)`. + """ + + def __init__(self, tokenizer: CLIPTokenizer): + self.wrapped = tokenizer + self.token_map = {} + + def __getattr__(self, name: str) -> Any: + if name in self.__dict__: + return getattr(self, name) + # if name == "wrapped": + # return getattr(self, 'wrapped')#super().__getattr__("wrapped") + + try: + return getattr(self.wrapped, name) + except AttributeError: + raise AttributeError( + "'name' cannot be found in both " + f"'{self.__class__.__name__}' and " + f"'{self.__class__.__name__}.tokenizer'." + ) + + def try_adding_tokens(self, tokens: Union[str, List[str]], *args, **kwargs): + """Attempt to add tokens to the tokenizer. + + Args: + tokens (Union[str, List[str]]): The tokens to be added. + """ + num_added_tokens = self.wrapped.add_tokens(tokens, *args, **kwargs) + assert num_added_tokens != 0, ( + f"The tokenizer already contains the token {tokens}. Please pass " + "a different `placeholder_token` that is not already in the " + "tokenizer." + ) + + def get_token_info(self, token: str) -> dict: + """Get the information of a token, including its start and end index in + the current tokenizer. + + Args: + token (str): The token to be queried. + + Returns: + dict: The information of the token, including its start and end + index in current tokenizer. + """ + token_ids = self.__call__(token).input_ids + start, end = token_ids[1], token_ids[-2] + 1 + return {"name": token, "start": start, "end": end} + + def add_placeholder_token(self, placeholder_token: str, *args, num_vec_per_token: int = 1, **kwargs): + """Add placeholder tokens to the tokenizer. + + Args: + placeholder_token (str): The placeholder token to be added. + num_vec_per_token (int, optional): The number of vectors of + the added placeholder token. + *args, **kwargs: The arguments for `self.wrapped.add_tokens`. + """ + output = [] + if num_vec_per_token == 1: + self.try_adding_tokens(placeholder_token, *args, **kwargs) + output.append(placeholder_token) + else: + output = [] + for i in range(num_vec_per_token): + ith_token = placeholder_token + f"_{i}" + self.try_adding_tokens(ith_token, *args, **kwargs) + output.append(ith_token) + + for token in self.token_map: + if token in placeholder_token: + raise ValueError( + f"The tokenizer already has placeholder token {token} " + f"that can get confused with {placeholder_token} " + "keep placeholder tokens independent" + ) + self.token_map[placeholder_token] = output + + def replace_placeholder_tokens_in_text( + self, text: Union[str, List[str]], vector_shuffle: bool = False, prop_tokens_to_load: float = 1.0 + ) -> Union[str, List[str]]: + """Replace the keywords in text with placeholder tokens. This function + will be called in `self.__call__` and `self.encode`. + + Args: + text (Union[str, List[str]]): The text to be processed. + vector_shuffle (bool, optional): Whether to shuffle the vectors. + Defaults to False. + prop_tokens_to_load (float, optional): The proportion of tokens to + be loaded. If 1.0, all tokens will be loaded. Defaults to 1.0. + + Returns: + Union[str, List[str]]: The processed text. + """ + if isinstance(text, list): + output = [] + for i in range(len(text)): + output.append(self.replace_placeholder_tokens_in_text(text[i], vector_shuffle=vector_shuffle)) + return output + + for placeholder_token in self.token_map: + if placeholder_token in text: + tokens = self.token_map[placeholder_token] + tokens = tokens[: 1 + int(len(tokens) * prop_tokens_to_load)] + if vector_shuffle: + tokens = copy.copy(tokens) + random.shuffle(tokens) + text = text.replace(placeholder_token, " ".join(tokens)) + return text + + def replace_text_with_placeholder_tokens(self, text: Union[str, List[str]]) -> Union[str, List[str]]: + """Replace the placeholder tokens in text with the original keywords. + This function will be called in `self.decode`. + + Args: + text (Union[str, List[str]]): The text to be processed. + + Returns: + Union[str, List[str]]: The processed text. + """ + if isinstance(text, list): + output = [] + for i in range(len(text)): + output.append(self.replace_text_with_placeholder_tokens(text[i])) + return output + + for placeholder_token, tokens in self.token_map.items(): + merged_tokens = " ".join(tokens) + if merged_tokens in text: + text = text.replace(merged_tokens, placeholder_token) + return text + + def __call__( + self, + text: Union[str, List[str]], + *args, + vector_shuffle: bool = False, + prop_tokens_to_load: float = 1.0, + **kwargs, + ): + """The call function of the wrapper. + + Args: + text (Union[str, List[str]]): The text to be tokenized. + vector_shuffle (bool, optional): Whether to shuffle the vectors. + Defaults to False. + prop_tokens_to_load (float, optional): The proportion of tokens to + be loaded. If 1.0, all tokens will be loaded. Defaults to 1.0 + *args, **kwargs: The arguments for `self.wrapped.__call__`. + """ + replaced_text = self.replace_placeholder_tokens_in_text( + text, vector_shuffle=vector_shuffle, prop_tokens_to_load=prop_tokens_to_load + ) + + return self.wrapped.__call__(replaced_text, *args, **kwargs) + + def encode(self, text: Union[str, List[str]], *args, **kwargs): + """Encode the passed text to token index. + + Args: + text (Union[str, List[str]]): The text to be encode. + *args, **kwargs: The arguments for `self.wrapped.__call__`. + """ + replaced_text = self.replace_placeholder_tokens_in_text(text) + return self.wrapped(replaced_text, *args, **kwargs) + + def decode(self, token_ids, return_raw: bool = False, *args, **kwargs) -> Union[str, List[str]]: + """Decode the token index to text. + + Args: + token_ids: The token index to be decoded. + return_raw: Whether keep the placeholder token in the text. + Defaults to False. + *args, **kwargs: The arguments for `self.wrapped.decode`. + + Returns: + Union[str, List[str]]: The decoded text. + """ + text = self.wrapped.decode(token_ids, *args, **kwargs) + if return_raw: + return text + replaced_text = self.replace_text_with_placeholder_tokens(text) + return replaced_text + + def __repr__(self): + """The representation of the wrapper.""" + s = super().__repr__() + prefix = f"Wrapped Module Class: {self._module_cls}\n" + prefix += f"Wrapped Module Name: {self._module_name}\n" + if self._from_pretrained: + prefix += f"From Pretrained: {self._from_pretrained}\n" + s = prefix + s + return s + + +class EmbeddingLayerWithFixes(nn.Module): + """The revised embedding layer to support external embeddings. This design + of this class is inspired by https://github.com/AUTOMATIC1111/stable- + diffusion-webui/blob/22bcc7be428c94e9408f589966c2040187245d81/modules/sd_hi + jack.py#L224 # noqa. + + Args: + wrapped (nn.Emebdding): The embedding layer to be wrapped. + external_embeddings (Union[dict, List[dict]], optional): The external + embeddings added to this layer. Defaults to None. + """ + + def __init__(self, wrapped: nn.Embedding, external_embeddings: Optional[Union[dict, List[dict]]] = None): + super().__init__() + self.wrapped = wrapped + self.num_embeddings = wrapped.weight.shape[0] + + self.external_embeddings = [] + if external_embeddings: + self.add_embeddings(external_embeddings) + + self.trainable_embeddings = nn.ParameterDict() + + @property + def weight(self): + """Get the weight of wrapped embedding layer.""" + return self.wrapped.weight + + def check_duplicate_names(self, embeddings: List[dict]): + """Check whether duplicate names exist in list of 'external + embeddings'. + + Args: + embeddings (List[dict]): A list of embedding to be check. + """ + names = [emb["name"] for emb in embeddings] + assert len(names) == len(set(names)), ( + "Found duplicated names in 'external_embeddings'. Name list: " f"'{names}'" + ) + + def check_ids_overlap(self, embeddings): + """Check whether overlap exist in token ids of 'external_embeddings'. + + Args: + embeddings (List[dict]): A list of embedding to be check. + """ + ids_range = [[emb["start"], emb["end"], emb["name"]] for emb in embeddings] + ids_range.sort() # sort by 'start' + # check if 'end' has overlapping + for idx in range(len(ids_range) - 1): + name1, name2 = ids_range[idx][-1], ids_range[idx + 1][-1] + assert ids_range[idx][1] <= ids_range[idx + 1][0], ( + f"Found ids overlapping between embeddings '{name1}' " f"and '{name2}'." + ) + + def add_embeddings(self, embeddings: Optional[Union[dict, List[dict]]]): + """Add external embeddings to this layer. + Use case: + Args: + embeddings (Union[dict, list[dict]]): The external embeddings to + be added. Each dict must contain the following 4 fields: 'name' + (the name of this embedding), 'embedding' (the embedding + tensor), 'start' (the start token id of this embedding), 'end' + (the end token id of this embedding). For example: + `{name: NAME, start: START, end: END, embedding: torch.Tensor}` + """ + if isinstance(embeddings, dict): + embeddings = [embeddings] + + self.external_embeddings += embeddings + self.check_duplicate_names(self.external_embeddings) + self.check_ids_overlap(self.external_embeddings) + + # set for trainable + added_trainable_emb_info = [] + for embedding in embeddings: + trainable = embedding.get("trainable", False) + if trainable: + name = embedding["name"] + embedding["embedding"] = torch.nn.Parameter(embedding["embedding"]) + self.trainable_embeddings[name] = embedding["embedding"] + added_trainable_emb_info.append(name) + + added_emb_info = [emb["name"] for emb in embeddings] + added_emb_info = ", ".join(added_emb_info) + print(f"Successfully add external embeddings: {added_emb_info}.", "current") + + if added_trainable_emb_info: + added_trainable_emb_info = ", ".join(added_trainable_emb_info) + print("Successfully add trainable external embeddings: " f"{added_trainable_emb_info}", "current") + + def replace_input_ids(self, input_ids: torch.Tensor) -> torch.Tensor: + """Replace external input ids to 0. + + Args: + input_ids (torch.Tensor): The input ids to be replaced. + + Returns: + torch.Tensor: The replaced input ids. + """ + input_ids_fwd = input_ids.clone() + input_ids_fwd[input_ids_fwd >= self.num_embeddings] = 0 + return input_ids_fwd + + def replace_embeddings( + self, input_ids: torch.Tensor, embedding: torch.Tensor, external_embedding: dict + ) -> torch.Tensor: + """Replace external embedding to the embedding layer. Noted that, in + this function we use `torch.cat` to avoid inplace modification. + + Args: + input_ids (torch.Tensor): The original token ids. Shape like + [LENGTH, ]. + embedding (torch.Tensor): The embedding of token ids after + `replace_input_ids` function. + external_embedding (dict): The external embedding to be replaced. + + Returns: + torch.Tensor: The replaced embedding. + """ + new_embedding = [] + + name = external_embedding["name"] + start = external_embedding["start"] + end = external_embedding["end"] + target_ids_to_replace = [i for i in range(start, end)] + ext_emb = external_embedding["embedding"] + + # do not need to replace + if not (input_ids == start).any(): + return embedding + + # start replace + s_idx, e_idx = 0, 0 + while e_idx < len(input_ids): + if input_ids[e_idx] == start: + if e_idx != 0: + # add embedding do not need to replace + new_embedding.append(embedding[s_idx:e_idx]) + + # check if the next embedding need to replace is valid + actually_ids_to_replace = [int(i) for i in input_ids[e_idx: e_idx + end - start]] + assert actually_ids_to_replace == target_ids_to_replace, ( + f"Invalid 'input_ids' in position: {s_idx} to {e_idx}. " + f"Expect '{target_ids_to_replace}' for embedding " + f"'{name}' but found '{actually_ids_to_replace}'." + ) + + new_embedding.append(ext_emb) + + s_idx = e_idx + end - start + e_idx = s_idx + 1 + else: + e_idx += 1 + + if e_idx == len(input_ids): + new_embedding.append(embedding[s_idx:e_idx]) + + return torch.cat(new_embedding, dim=0) + + def forward(self, input_ids: torch.Tensor, external_embeddings: Optional[List[dict]] = None): + """The forward function. + + Args: + input_ids (torch.Tensor): The token ids shape like [bz, LENGTH] or + [LENGTH, ]. + external_embeddings (Optional[List[dict]]): The external + embeddings. If not passed, only `self.external_embeddings` + will be used. Defaults to None. + + input_ids: shape like [bz, LENGTH] or [LENGTH]. + """ + assert input_ids.ndim in [1, 2] + if input_ids.ndim == 1: + input_ids = input_ids.unsqueeze(0) + + if external_embeddings is None and not self.external_embeddings: + return self.wrapped(input_ids) + + input_ids_fwd = self.replace_input_ids(input_ids) + inputs_embeds = self.wrapped(input_ids_fwd) + + vecs = [] + + if external_embeddings is None: + external_embeddings = [] + elif isinstance(external_embeddings, dict): + external_embeddings = [external_embeddings] + embeddings = self.external_embeddings + external_embeddings + + for input_id, embedding in zip(input_ids, inputs_embeds): + new_embedding = embedding + for external_embedding in embeddings: + new_embedding = self.replace_embeddings(input_id, new_embedding, external_embedding) + vecs.append(new_embedding) + + return torch.stack(vecs) + + +def add_tokens( + tokenizer, text_encoder, placeholder_tokens: list, initialize_tokens: list = None, + num_vectors_per_token: int = 1 +): + """Add token for training. + + # TODO: support add tokens as dict, then we can load pretrained tokens. + """ + if initialize_tokens is not None: + assert len(initialize_tokens) == len( + placeholder_tokens + ), "placeholder_token should be the same length as initialize_token" + for ii in range(len(placeholder_tokens)): + tokenizer.add_placeholder_token(placeholder_tokens[ii], num_vec_per_token=num_vectors_per_token) + + # text_encoder.set_embedding_layer() + embedding_layer = text_encoder.text_model.embeddings.token_embedding + text_encoder.text_model.embeddings.token_embedding = EmbeddingLayerWithFixes(embedding_layer) + embedding_layer = text_encoder.text_model.embeddings.token_embedding + + assert embedding_layer is not None, ( + "Do not support get embedding layer for current text encoder. " "Please check your configuration." + ) + initialize_embedding = [] + if initialize_tokens is not None: + for ii in range(len(placeholder_tokens)): + init_id = tokenizer(initialize_tokens[ii]).input_ids[1] + temp_embedding = embedding_layer.weight[init_id] + initialize_embedding.append(temp_embedding[None, ...].repeat(num_vectors_per_token, 1)) + else: + for ii in range(len(placeholder_tokens)): + init_id = tokenizer("a").input_ids[1] + temp_embedding = embedding_layer.weight[init_id] + len_emb = temp_embedding.shape[0] + init_weight = (torch.rand(num_vectors_per_token, len_emb) - 0.5) / 2.0 + initialize_embedding.append(init_weight) + + # initialize_embedding = torch.cat(initialize_embedding,dim=0) + + token_info_all = [] + for ii in range(len(placeholder_tokens)): + token_info = tokenizer.get_token_info(placeholder_tokens[ii]) + token_info["embedding"] = initialize_embedding[ii] + token_info["trainable"] = True + token_info_all.append(token_info) + embedding_layer.add_embeddings(token_info_all) diff --git a/py/brushnet/unet_2d_blocks.py b/py/brushnet/unet_2d_blocks.py new file mode 100644 index 0000000..e332bb9 --- /dev/null +++ b/py/brushnet/unet_2d_blocks.py @@ -0,0 +1,3908 @@ +# Copyright 2024 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Dict, Optional, Tuple, Union + +import numpy as np +import torch +import torch.nn.functional as F +from torch import nn + +from diffusers.utils import deprecate, is_torch_version, logging +from diffusers.utils.torch_utils import apply_freeu +from diffusers.models.activations import get_activation +from diffusers.models.attention_processor import Attention, AttnAddedKVProcessor, AttnAddedKVProcessor2_0 +from diffusers.models.normalization import AdaGroupNorm +from diffusers.models.resnet import ( + Downsample2D, + FirDownsample2D, + FirUpsample2D, + KDownsample2D, + KUpsample2D, + ResnetBlock2D, + ResnetBlockCondNorm2D, + Upsample2D, +) +from diffusers.models.transformers.dual_transformer_2d import DualTransformer2DModel +from diffusers.models.transformers.transformer_2d import Transformer2DModel + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +def get_down_block( + down_block_type: str, + num_layers: int, + in_channels: int, + out_channels: int, + temb_channels: int, + add_downsample: bool, + resnet_eps: float, + resnet_act_fn: str, + transformer_layers_per_block: int = 1, + num_attention_heads: Optional[int] = None, + resnet_groups: Optional[int] = None, + cross_attention_dim: Optional[int] = None, + downsample_padding: Optional[int] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + attention_type: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: float = 1.0, + cross_attention_norm: Optional[str] = None, + attention_head_dim: Optional[int] = None, + downsample_type: Optional[str] = None, + dropout: float = 0.0, +): + # If attn head dim is not defined, we default it to the number of heads + if attention_head_dim is None: + logger.warning( + f"It is recommended to provide `attention_head_dim` when calling `get_down_block`. Defaulting `attention_head_dim` to {num_attention_heads}." + ) + attention_head_dim = num_attention_heads + + down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type + if down_block_type == "DownBlock2D": + return DownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "ResnetDownsampleBlock2D": + return ResnetDownsampleBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + ) + elif down_block_type == "AttnDownBlock2D": + if add_downsample is False: + downsample_type = None + else: + downsample_type = downsample_type or "conv" # default to 'conv' + return AttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + downsample_type=downsample_type, + ) + elif down_block_type == "CrossAttnDownBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") + return CrossAttnDownBlock2D( + num_layers=num_layers, + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + ) + elif down_block_type == "SimpleCrossAttnDownBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for SimpleCrossAttnDownBlock2D") + return SimpleCrossAttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + ) + elif down_block_type == "SkipDownBlock2D": + return SkipDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "AttnSkipDownBlock2D": + return AttnSkipDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "DownEncoderBlock2D": + return DownEncoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "AttnDownEncoderBlock2D": + return AttnDownEncoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif down_block_type == "KDownBlock2D": + return KDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + ) + elif down_block_type == "KCrossAttnDownBlock2D": + return KCrossAttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + dropout=dropout, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + add_self_attention=True if not add_downsample else False, + ) + raise ValueError(f"{down_block_type} does not exist.") + + +def get_mid_block( + mid_block_type: str, + temb_channels: int, + in_channels: int, + resnet_eps: float, + resnet_act_fn: str, + resnet_groups: int, + output_scale_factor: float = 1.0, + transformer_layers_per_block: int = 1, + num_attention_heads: Optional[int] = None, + cross_attention_dim: Optional[int] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + mid_block_only_cross_attention: bool = False, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + attention_type: str = "default", + resnet_skip_time_act: bool = False, + cross_attention_norm: Optional[str] = None, + attention_head_dim: Optional[int] = 1, + dropout: float = 0.0, +): + if mid_block_type == "UNetMidBlock2DCrossAttn": + return UNetMidBlock2DCrossAttn( + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + resnet_groups=resnet_groups, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + elif mid_block_type == "UNetMidBlock2DSimpleCrossAttn": + return UNetMidBlock2DSimpleCrossAttn( + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + only_cross_attention=mid_block_only_cross_attention, + cross_attention_norm=cross_attention_norm, + ) + elif mid_block_type == "UNetMidBlock2D": + return UNetMidBlock2D( + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + num_layers=0, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + add_attention=False, + ) + elif mid_block_type == "MidBlock2D": + return MidBlock2D( + in_channels=in_channels, + temb_channels=temb_channels, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + output_scale_factor=output_scale_factor, + resnet_time_scale_shift=resnet_time_scale_shift, + resnet_groups=resnet_groups, + use_linear_projection=use_linear_projection, + ) + elif mid_block_type is None: + return None + else: + raise ValueError(f"unknown mid_block_type : {mid_block_type}") + + +def get_up_block( + up_block_type: str, + num_layers: int, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + add_upsample: bool, + resnet_eps: float, + resnet_act_fn: str, + resolution_idx: Optional[int] = None, + transformer_layers_per_block: int = 1, + num_attention_heads: Optional[int] = None, + resnet_groups: Optional[int] = None, + cross_attention_dim: Optional[int] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + attention_type: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: float = 1.0, + cross_attention_norm: Optional[str] = None, + attention_head_dim: Optional[int] = None, + upsample_type: Optional[str] = None, + dropout: float = 0.0, +) -> nn.Module: + # If attn head dim is not defined, we default it to the number of heads + if attention_head_dim is None: + logger.warning( + f"It is recommended to provide `attention_head_dim` when calling `get_up_block`. Defaulting `attention_head_dim` to {num_attention_heads}." + ) + attention_head_dim = num_attention_heads + + up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type + if up_block_type == "UpBlock2D": + return UpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif up_block_type == "ResnetUpsampleBlock2D": + return ResnetUpsampleBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + ) + elif up_block_type == "CrossAttnUpBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlock2D") + return CrossAttnUpBlock2D( + num_layers=num_layers, + transformer_layers_per_block=transformer_layers_per_block, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + num_attention_heads=num_attention_heads, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + ) + elif up_block_type == "SimpleCrossAttnUpBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for SimpleCrossAttnUpBlock2D") + return SimpleCrossAttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + skip_time_act=resnet_skip_time_act, + output_scale_factor=resnet_out_scale_factor, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + ) + elif up_block_type == "AttnUpBlock2D": + if add_upsample is False: + upsample_type = None + else: + upsample_type = upsample_type or "conv" # default to 'conv' + + return AttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + upsample_type=upsample_type, + ) + elif up_block_type == "SkipUpBlock2D": + return SkipUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif up_block_type == "AttnSkipUpBlock2D": + return AttnSkipUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + ) + elif up_block_type == "UpDecoderBlock2D": + return UpDecoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + resnet_time_scale_shift=resnet_time_scale_shift, + temb_channels=temb_channels, + ) + elif up_block_type == "AttnUpDecoderBlock2D": + return AttnUpDecoderBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + attention_head_dim=attention_head_dim, + resnet_time_scale_shift=resnet_time_scale_shift, + temb_channels=temb_channels, + ) + elif up_block_type == "KUpBlock2D": + return KUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + ) + elif up_block_type == "KCrossAttnUpBlock2D": + return KCrossAttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + resolution_idx=resolution_idx, + dropout=dropout, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + cross_attention_dim=cross_attention_dim, + attention_head_dim=attention_head_dim, + ) + + raise ValueError(f"{up_block_type} does not exist.") + + +class AutoencoderTinyBlock(nn.Module): + """ + Tiny Autoencoder block used in [`AutoencoderTiny`]. It is a mini residual module consisting of plain conv + ReLU + blocks. + + Args: + in_channels (`int`): The number of input channels. + out_channels (`int`): The number of output channels. + act_fn (`str`): + ` The activation function to use. Supported values are `"swish"`, `"mish"`, `"gelu"`, and `"relu"`. + + Returns: + `torch.FloatTensor`: A tensor with the same shape as the input tensor, but with the number of channels equal to + `out_channels`. + """ + + def __init__(self, in_channels: int, out_channels: int, act_fn: str): + super().__init__() + act_fn = get_activation(act_fn) + self.conv = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + act_fn, + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + act_fn, + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + ) + self.skip = ( + nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False) + if in_channels != out_channels + else nn.Identity() + ) + self.fuse = nn.ReLU() + + def forward(self, x: torch.FloatTensor) -> torch.FloatTensor: + return self.fuse(self.conv(x) + self.skip(x)) + + +class UNetMidBlock2D(nn.Module): + """ + A 2D UNet mid-block [`UNetMidBlock2D`] with multiple residual blocks and optional attention blocks. + + Args: + in_channels (`int`): The number of input channels. + temb_channels (`int`): The number of temporal embedding channels. + dropout (`float`, *optional*, defaults to 0.0): The dropout rate. + num_layers (`int`, *optional*, defaults to 1): The number of residual blocks. + resnet_eps (`float`, *optional*, 1e-6 ): The epsilon value for the resnet blocks. + resnet_time_scale_shift (`str`, *optional*, defaults to `default`): + The type of normalization to apply to the time embeddings. This can help to improve the performance of the + model on tasks with long-range temporal dependencies. + resnet_act_fn (`str`, *optional*, defaults to `swish`): The activation function for the resnet blocks. + resnet_groups (`int`, *optional*, defaults to 32): + The number of groups to use in the group normalization layers of the resnet blocks. + attn_groups (`Optional[int]`, *optional*, defaults to None): The number of groups for the attention blocks. + resnet_pre_norm (`bool`, *optional*, defaults to `True`): + Whether to use pre-normalization for the resnet blocks. + add_attention (`bool`, *optional*, defaults to `True`): Whether to add attention blocks. + attention_head_dim (`int`, *optional*, defaults to 1): + Dimension of a single attention head. The number of attention heads is determined based on this value and + the number of input channels. + output_scale_factor (`float`, *optional*, defaults to 1.0): The output scale factor. + + Returns: + `torch.FloatTensor`: The output of the last residual block, which is a tensor of shape `(batch_size, + in_channels, height, width)`. + + """ + + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", # default, spatial + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + attn_groups: Optional[int] = None, + resnet_pre_norm: bool = True, + add_attention: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + ): + super().__init__() + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + self.add_attention = add_attention + + if attn_groups is None: + attn_groups = resnet_groups if resnet_time_scale_shift == "default" else None + + # there is always at least one resnet + if resnet_time_scale_shift == "spatial": + resnets = [ + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ] + else: + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {in_channels}." + ) + attention_head_dim = in_channels + + for _ in range(num_layers): + if self.add_attention: + attentions.append( + Attention( + in_channels, + heads=in_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=attn_groups, + spatial_norm_dim=temb_channels if resnet_time_scale_shift == "spatial" else None, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + else: + attentions.append(None) + + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + def forward(self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None) -> torch.FloatTensor: + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + if attn is not None: + hidden_states = attn(hidden_states, temb=temb) + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class UNetMidBlock2DCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads: int = 1, + output_scale_factor: float = 1.0, + cross_attention_dim: int = 1280, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + upcast_attention: bool = False, + attention_type: str = "default", + ): + super().__init__() + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # support for variable transformer layers per block + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * num_layers + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + + for i in range(num_layers): + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + in_channels // num_attention_heads, + in_channels=in_channels, + num_layers=transformer_layers_per_block[i], + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + in_channels // num_attention_heads, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + if cross_attention_kwargs is not None: + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + else: + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class UNetMidBlock2DSimpleCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + cross_attention_dim: int = 1280, + skip_time_act: bool = False, + only_cross_attention: bool = False, + cross_attention_norm: Optional[str] = None, + ): + super().__init__() + + self.has_cross_attention = True + + self.attention_head_dim = attention_head_dim + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + self.num_heads = in_channels // self.attention_head_dim + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ] + attentions = [] + + for _ in range(num_layers): + processor = ( + AttnAddedKVProcessor2_0() if hasattr(F, "scaled_dot_product_attention") else AttnAddedKVProcessor() + ) + + attentions.append( + Attention( + query_dim=in_channels, + cross_attention_dim=in_channels, + heads=self.num_heads, + dim_head=self.attention_head_dim, + added_kv_proj_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + bias=True, + upcast_softmax=True, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + processor=processor, + ) + ) + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + if attention_mask is None: + # if encoder_hidden_states is defined: we are doing cross-attn, so we should use cross-attn mask. + mask = None if encoder_hidden_states is None else encoder_attention_mask + else: + # when attention_mask is defined: we don't even check for encoder_attention_mask. + # this is to maintain compatibility with UnCLIP, which uses 'attention_mask' param for cross-attn masks. + # TODO: UnCLIP should express cross-attn mask via encoder_attention_mask param instead of via attention_mask. + # then we can simplify this whole if/else block to: + # mask = attention_mask if encoder_hidden_states is None else encoder_attention_mask + mask = attention_mask + + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + # attn + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + + # resnet + hidden_states = resnet(hidden_states, temb) + + return hidden_states + + +class MidBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + use_linear_projection: bool = False, + ): + super().__init__() + + self.has_cross_attention = False + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + + for i in range(num_layers): + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + lora_scale = 1.0 + hidden_states = self.resnets[0](hidden_states, temb, scale=lora_scale) + for resnet in self.resnets[1:]: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + else: + hidden_states = resnet(hidden_states, temb, scale=lora_scale) + + return hidden_states + + +class AttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + downsample_padding: int = 1, + downsample_type: str = "conv", + ): + super().__init__() + resnets = [] + attentions = [] + self.downsample_type = downsample_type + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if downsample_type == "conv": + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + elif downsample_type == "resnet": + self.downsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + down=True, + ) + ] + ) + else: + self.downsamplers = None + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states, **cross_attention_kwargs) + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + if self.downsample_type == "resnet": + hidden_states = downsampler(hidden_states, temb=temb) + else: + hidden_states = downsampler(hidden_states) + + output_states += (hidden_states,) + + return hidden_states, output_states + + +class CrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + downsample_padding: int = 1, + add_downsample: bool = True, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + attention_type: str = "default", + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * num_layers + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=transformer_layers_per_block[i], + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + additional_residuals: Optional[torch.FloatTensor] = None, + down_block_add_samples: Optional[torch.FloatTensor] = None, + debug=False, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + + if debug: print(' XAD2: forward') + + if cross_attention_kwargs is not None: + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + blocks = list(zip(self.resnets, self.attentions)) + + for i, (resnet, attn) in enumerate(blocks): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + else: + if debug: print(' XAD2: resnet hs #', i, hidden_states.shape) + if debug and temb is not None: print(' XAD2: resnet temb #', i, temb.shape) + + hidden_states = resnet(hidden_states, temb) + + if debug: print(' XAD2: attn hs #', i, hidden_states.shape) + if debug and encoder_hidden_states is not None: print(' XAD2: attn ehs #', i, + encoder_hidden_states.shape) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + + # apply additional residuals to the output of the last pair of resnet and attention blocks + if i == len(blocks) - 1 and additional_residuals is not None: + + if debug: print(' XAD2: add res', additional_residuals.shape) + + hidden_states = hidden_states + additional_residuals + + if down_block_add_samples is not None: + + if debug: print(' XAD2: add samples', down_block_add_samples.shape) + + hidden_states = hidden_states + down_block_add_samples.pop(0) + + if debug: print(' XAD2: output', hidden_states.shape) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) # todo: add before or after + + output_states = output_states + (hidden_states,) + + if debug: + print(' XAD2: finish') + for st in output_states: + print(' XAD2: ', st.shape) + + return hidden_states, output_states + + +class DownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None, + down_block_add_samples: Optional[torch.FloatTensor] = None, *args, **kwargs + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + if kwargs.get("debug", False): print(' D2: forward', hidden_states.shape) + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + + if kwargs.get("debug", False): print(' D2: resnet', hidden_states.shape) + + hidden_states = resnet(hidden_states, temb) + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + if down_block_add_samples is not None: + hidden_states = hidden_states + down_block_add_samples.pop(0) # todo: add before or after + + output_states = output_states + (hidden_states,) + + if kwargs.get("debug", False): print(' D2: finish', hidden_states.shape) + + return hidden_states, output_states + + +class DownEncoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + def forward(self, hidden_states: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + hidden_states = resnet(hidden_states, temb=None) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states + + +class AttnDownEncoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + resnets = [] + attentions = [] + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=None, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + def forward(self, hidden_states: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb=None) + hidden_states = attn(hidden_states) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states + + +class AttnSkipDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = np.sqrt(2.0), + add_downsample: bool = True, + ): + super().__init__() + self.attentions = nn.ModuleList([]) + self.resnets = nn.ModuleList([]) + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + self.resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(in_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + self.attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=32, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + if add_downsample: + self.resnet_down = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + down=True, + kernel="fir", + ) + self.downsamplers = nn.ModuleList([FirDownsample2D(out_channels, out_channels=out_channels)]) + self.skip_conv = nn.Conv2d(3, out_channels, kernel_size=(1, 1), stride=(1, 1)) + else: + self.resnet_down = None + self.downsamplers = None + self.skip_conv = None + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + skip_sample: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...], torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states) + output_states += (hidden_states,) + + if self.downsamplers is not None: + hidden_states = self.resnet_down(hidden_states, temb) + for downsampler in self.downsamplers: + skip_sample = downsampler(skip_sample) + + hidden_states = self.skip_conv(skip_sample) + hidden_states + + output_states += (hidden_states,) + + return hidden_states, output_states, skip_sample + + +class SkipDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + output_scale_factor: float = np.sqrt(2.0), + add_downsample: bool = True, + downsample_padding: int = 1, + ): + super().__init__() + self.resnets = nn.ModuleList([]) + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + self.resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(in_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + if add_downsample: + self.resnet_down = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + down=True, + kernel="fir", + ) + self.downsamplers = nn.ModuleList([FirDownsample2D(out_channels, out_channels=out_channels)]) + self.skip_conv = nn.Conv2d(3, out_channels, kernel_size=(1, 1), stride=(1, 1)) + else: + self.resnet_down = None + self.downsamplers = None + self.skip_conv = None + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + skip_sample: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...], torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + hidden_states = resnet(hidden_states, temb) + output_states += (hidden_states,) + + if self.downsamplers is not None: + hidden_states = self.resnet_down(hidden_states, temb) + for downsampler in self.downsamplers: + skip_sample = downsampler(skip_sample) + + hidden_states = self.skip_conv(skip_sample) + hidden_states + + output_states += (hidden_states,) + + return hidden_states, output_states, skip_sample + + +class ResnetDownsampleBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + skip_time_act: bool = False, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + down=True, + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None, *args, **kwargs + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states, temb) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class SimpleCrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + add_downsample: bool = True, + skip_time_act: bool = False, + only_cross_attention: bool = False, + cross_attention_norm: Optional[str] = None, + ): + super().__init__() + + self.has_cross_attention = True + + resnets = [] + attentions = [] + + self.attention_head_dim = attention_head_dim + self.num_heads = out_channels // self.attention_head_dim + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + processor = ( + AttnAddedKVProcessor2_0() if hasattr(F, "scaled_dot_product_attention") else AttnAddedKVProcessor() + ) + + attentions.append( + Attention( + query_dim=out_channels, + cross_attention_dim=out_channels, + heads=self.num_heads, + dim_head=attention_head_dim, + added_kv_proj_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + bias=True, + upcast_softmax=True, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + processor=processor, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + down=True, + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + if attention_mask is None: + # if encoder_hidden_states is defined: we are doing cross-attn, so we should use cross-attn mask. + mask = None if encoder_hidden_states is None else encoder_attention_mask + else: + # when attention_mask is defined: we don't even check for encoder_attention_mask. + # this is to maintain compatibility with UnCLIP, which uses 'attention_mask' param for cross-attn masks. + # TODO: UnCLIP should express cross-attn mask via encoder_attention_mask param instead of via attention_mask. + # then we can simplify this whole if/else block to: + # mask = attention_mask if encoder_hidden_states is None else encoder_attention_mask + mask = attention_mask + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + else: + hidden_states = resnet(hidden_states, temb) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + + output_states = output_states + (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states, temb) + + output_states = output_states + (hidden_states,) + + return hidden_states, output_states + + +class KDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 4, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + resnet_group_size: int = 32, + add_downsample: bool = False, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + temb_channels=temb_channels, + groups=groups, + groups_out=groups_out, + eps=resnet_eps, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + # YiYi's comments- might be able to use FirDownsample2D, look into details later + self.downsamplers = nn.ModuleList([KDownsample2D()]) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None, *args, **kwargs + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + output_states = () + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states, output_states + + +class KCrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + cross_attention_dim: int, + dropout: float = 0.0, + num_layers: int = 4, + resnet_group_size: int = 32, + add_downsample: bool = True, + attention_head_dim: int = 64, + add_self_attention: bool = False, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + dropout=dropout, + temb_channels=temb_channels, + groups=groups, + groups_out=groups_out, + eps=resnet_eps, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + attentions.append( + KAttentionBlock( + out_channels, + out_channels // attention_head_dim, + attention_head_dim, + cross_attention_dim=cross_attention_dim, + temb_channels=temb_channels, + attention_bias=True, + add_self_attention=add_self_attention, + cross_attention_norm="layer_norm", + group_size=resnet_group_size, + ) + ) + + self.resnets = nn.ModuleList(resnets) + self.attentions = nn.ModuleList(attentions) + + if add_downsample: + self.downsamplers = nn.ModuleList([KDownsample2D()]) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward( + self, + hidden_states: torch.FloatTensor, + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> Tuple[torch.FloatTensor, Tuple[torch.FloatTensor, ...]]: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + + if self.downsamplers is None: + output_states += (None,) + else: + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + return hidden_states, output_states + + +class AttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: int = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + upsample_type: str = "conv", + ): + super().__init__() + resnets = [] + attentions = [] + + self.upsample_type = upsample_type + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `in_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if upsample_type == "conv": + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + elif upsample_type == "resnet": + self.upsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + up=True, + ) + ] + ) + else: + self.upsamplers = None + + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet, attn in zip(self.resnets, self.attentions): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + if self.upsample_type == "resnet": + hidden_states = upsampler(hidden_states, temb=temb) + else: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class CrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + transformer_layers_per_block: Union[int, Tuple[int]] = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + num_attention_heads: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + only_cross_attention: bool = False, + upcast_attention: bool = False, + attention_type: str = "default", + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + self.num_attention_heads = num_attention_heads + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * num_layers + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=transformer_layers_per_block[i], + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention, + upcast_attention=upcast_attention, + attention_type=attention_type, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + num_attention_heads, + out_channels // num_attention_heads, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + return_res_samples: Optional[bool] = False, + up_block_add_samples: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + if cross_attention_kwargs is not None: + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + is_freeu_enabled = ( + getattr(self, "s1", None) + and getattr(self, "s2", None) + and getattr(self, "b1", None) + and getattr(self, "b2", None) + ) + if return_res_samples: + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + + # FreeU: Only operate on the first two stages + if is_freeu_enabled: + hidden_states, res_hidden_states = apply_freeu( + self.resolution_idx, + hidden_states, + res_hidden_states, + s1=self.s1, + s2=self.s2, + b1=self.b1, + b2=self.b2, + ) + + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + return_dict=False, + )[0] + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) + + if return_res_samples: + return hidden_states, output_states + else: + return hidden_states + + +class UpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + return_res_samples: Optional[bool] = False, + up_block_add_samples: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + is_freeu_enabled = ( + getattr(self, "s1", None) + and getattr(self, "s2", None) + and getattr(self, "b1", None) + and getattr(self, "b2", None) + ) + if return_res_samples: + output_states = () + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + + # FreeU: Only operate on the first two stages + if is_freeu_enabled: + hidden_states, res_hidden_states = apply_freeu( + self.resolution_idx, + hidden_states, + res_hidden_states, + s1=self.s1, + s2=self.s2, + b1=self.b1, + b2=self.b2, + ) + + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) # todo: add before or after + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + if return_res_samples: + output_states = output_states + (hidden_states,) + if up_block_add_samples is not None: + hidden_states = hidden_states + up_block_add_samples.pop(0) # todo: add before or after + + if return_res_samples: + return hidden_states, output_states + else: + return hidden_states + + +class UpDecoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", # default, spatial + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + temb_channels: Optional[int] = None, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + input_channels = in_channels if i == 0 else out_channels + + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.resolution_idx = resolution_idx + + def forward(self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None) -> torch.FloatTensor: + for resnet in self.resnets: + hidden_states = resnet(hidden_states, temb=temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class AttnUpDecoderBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + temb_channels: Optional[int] = None, + ): + super().__init__() + resnets = [] + attentions = [] + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `out_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + for i in range(num_layers): + input_channels = in_channels if i == 0 else out_channels + + if resnet_time_scale_shift == "spatial": + resnets.append( + ResnetBlockCondNorm2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm="spatial", + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + ) + ) + else: + resnets.append( + ResnetBlock2D( + in_channels=input_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=resnet_groups if resnet_time_scale_shift != "spatial" else None, + spatial_norm_dim=temb_channels if resnet_time_scale_shift == "spatial" else None, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.resolution_idx = resolution_idx + + def forward(self, hidden_states: torch.FloatTensor, temb: Optional[torch.FloatTensor] = None) -> torch.FloatTensor: + for resnet, attn in zip(self.resnets, self.attentions): + hidden_states = resnet(hidden_states, temb=temb) + hidden_states = attn(hidden_states, temb=temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class AttnSkipUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + output_scale_factor: float = np.sqrt(2.0), + add_upsample: bool = True, + ): + super().__init__() + self.attentions = nn.ModuleList([]) + self.resnets = nn.ModuleList([]) + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + self.resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(resnet_in_channels + res_skip_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + if attention_head_dim is None: + logger.warning( + f"It is not recommend to pass `attention_head_dim=None`. Defaulting `attention_head_dim` to `out_channels`: {out_channels}." + ) + attention_head_dim = out_channels + + self.attentions.append( + Attention( + out_channels, + heads=out_channels // attention_head_dim, + dim_head=attention_head_dim, + rescale_output_factor=output_scale_factor, + eps=resnet_eps, + norm_num_groups=32, + residual_connection=True, + bias=True, + upcast_softmax=True, + _from_deprecated_attn_block=True, + ) + ) + + self.upsampler = FirUpsample2D(in_channels, out_channels=out_channels) + if add_upsample: + self.resnet_up = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + up=True, + kernel="fir", + ) + self.skip_conv = nn.Conv2d(out_channels, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + self.skip_norm = torch.nn.GroupNorm( + num_groups=min(out_channels // 4, 32), num_channels=out_channels, eps=resnet_eps, affine=True + ) + self.act = nn.SiLU() + else: + self.resnet_up = None + self.skip_conv = None + self.skip_norm = None + self.act = None + + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + skip_sample=None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + hidden_states = resnet(hidden_states, temb) + + hidden_states = self.attentions[0](hidden_states) + + if skip_sample is not None: + skip_sample = self.upsampler(skip_sample) + else: + skip_sample = 0 + + if self.resnet_up is not None: + skip_sample_states = self.skip_norm(hidden_states) + skip_sample_states = self.act(skip_sample_states) + skip_sample_states = self.skip_conv(skip_sample_states) + + skip_sample = skip_sample + skip_sample_states + + hidden_states = self.resnet_up(hidden_states, temb) + + return hidden_states, skip_sample + + +class SkipUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_pre_norm: bool = True, + output_scale_factor: float = np.sqrt(2.0), + add_upsample: bool = True, + upsample_padding: int = 1, + ): + super().__init__() + self.resnets = nn.ModuleList([]) + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + self.resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min((resnet_in_channels + res_skip_channels) // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.upsampler = FirUpsample2D(in_channels, out_channels=out_channels) + if add_upsample: + self.resnet_up = ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=min(out_channels // 4, 32), + groups_out=min(out_channels // 4, 32), + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + use_in_shortcut=True, + up=True, + kernel="fir", + ) + self.skip_conv = nn.Conv2d(out_channels, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + self.skip_norm = torch.nn.GroupNorm( + num_groups=min(out_channels // 4, 32), num_channels=out_channels, eps=resnet_eps, affine=True + ) + self.act = nn.SiLU() + else: + self.resnet_up = None + self.skip_conv = None + self.skip_norm = None + self.act = None + + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + skip_sample=None, + *args, + **kwargs, + ) -> Tuple[torch.FloatTensor, torch.FloatTensor]: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + hidden_states = resnet(hidden_states, temb) + + if skip_sample is not None: + skip_sample = self.upsampler(skip_sample) + else: + skip_sample = 0 + + if self.resnet_up is not None: + skip_sample_states = self.skip_norm(hidden_states) + skip_sample_states = self.act(skip_sample_states) + skip_sample_states = self.skip_conv(skip_sample_states) + + skip_sample = skip_sample + skip_sample_states + + hidden_states = self.resnet_up(hidden_states, temb) + + return hidden_states, skip_sample + + +class ResnetUpsampleBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + skip_time_act: bool = False, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + up=True, + ) + ] + ) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, temb) + + return hidden_states + + +class SimpleCrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + resolution_idx: Optional[int] = None, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attention_head_dim: int = 1, + cross_attention_dim: int = 1280, + output_scale_factor: float = 1.0, + add_upsample: bool = True, + skip_time_act: bool = False, + only_cross_attention: bool = False, + cross_attention_norm: Optional[str] = None, + ): + super().__init__() + resnets = [] + attentions = [] + + self.has_cross_attention = True + self.attention_head_dim = attention_head_dim + + self.num_heads = out_channels // self.attention_head_dim + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + ) + ) + + processor = ( + AttnAddedKVProcessor2_0() if hasattr(F, "scaled_dot_product_attention") else AttnAddedKVProcessor() + ) + + attentions.append( + Attention( + query_dim=out_channels, + cross_attention_dim=out_channels, + heads=self.num_heads, + dim_head=self.attention_head_dim, + added_kv_proj_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + bias=True, + upcast_softmax=True, + only_cross_attention=only_cross_attention, + cross_attention_norm=cross_attention_norm, + processor=processor, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList( + [ + ResnetBlock2D( + in_channels=out_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + skip_time_act=skip_time_act, + up=True, + ) + ] + ) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + if attention_mask is None: + # if encoder_hidden_states is defined: we are doing cross-attn, so we should use cross-attn mask. + mask = None if encoder_hidden_states is None else encoder_attention_mask + else: + # when attention_mask is defined: we don't even check for encoder_attention_mask. + # this is to maintain compatibility with UnCLIP, which uses 'attention_mask' param for cross-attn masks. + # TODO: UnCLIP should express cross-attn mask via encoder_attention_mask param instead of via attention_mask. + # then we can simplify this whole if/else block to: + # mask = attention_mask if encoder_hidden_states is None else encoder_attention_mask + mask = attention_mask + + for resnet, attn in zip(self.resnets, self.attentions): + # resnet + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + else: + hidden_states = resnet(hidden_states, temb) + + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=mask, + **cross_attention_kwargs, + ) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, temb) + + return hidden_states + + +class KUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + resolution_idx: int, + dropout: float = 0.0, + num_layers: int = 5, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + resnet_group_size: Optional[int] = 32, + add_upsample: bool = True, + ): + super().__init__() + resnets = [] + k_in_channels = 2 * out_channels + k_out_channels = in_channels + num_layers = num_layers - 1 + + for i in range(num_layers): + in_channels = k_in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=k_out_channels if (i == num_layers - 1) else out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=groups, + groups_out=groups_out, + dropout=dropout, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([KUpsample2D()]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + upsample_size: Optional[int] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + if len(args) > 0 or kwargs.get("scale", None) is not None: + deprecation_message = "The `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`." + deprecate("scale", "1.0.0", deprecation_message) + + res_hidden_states_tuple = res_hidden_states_tuple[-1] + if res_hidden_states_tuple is not None: + hidden_states = torch.cat([hidden_states, res_hidden_states_tuple], dim=1) + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + if is_torch_version(">=", "1.11.0"): + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb, use_reentrant=False + ) + else: + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), hidden_states, temb + ) + else: + hidden_states = resnet(hidden_states, temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +class KCrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + resolution_idx: int, + dropout: float = 0.0, + num_layers: int = 4, + resnet_eps: float = 1e-5, + resnet_act_fn: str = "gelu", + resnet_group_size: int = 32, + attention_head_dim: int = 1, # attention dim_head + cross_attention_dim: int = 768, + add_upsample: bool = True, + upcast_attention: bool = False, + ): + super().__init__() + resnets = [] + attentions = [] + + is_first_block = in_channels == out_channels == temb_channels + is_middle_block = in_channels != out_channels + add_self_attention = True if is_first_block else False + + self.has_cross_attention = True + self.attention_head_dim = attention_head_dim + + # in_channels, and out_channels for the block (k-unet) + k_in_channels = out_channels if is_first_block else 2 * out_channels + k_out_channels = in_channels + + num_layers = num_layers - 1 + + for i in range(num_layers): + in_channels = k_in_channels if i == 0 else out_channels + groups = in_channels // resnet_group_size + groups_out = out_channels // resnet_group_size + + if is_middle_block and (i == num_layers - 1): + conv_2d_out_channels = k_out_channels + else: + conv_2d_out_channels = None + + resnets.append( + ResnetBlockCondNorm2D( + in_channels=in_channels, + out_channels=out_channels, + conv_2d_out_channels=conv_2d_out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=groups, + groups_out=groups_out, + dropout=dropout, + non_linearity=resnet_act_fn, + time_embedding_norm="ada_group", + conv_shortcut_bias=False, + ) + ) + attentions.append( + KAttentionBlock( + k_out_channels if (i == num_layers - 1) else out_channels, + k_out_channels // attention_head_dim + if (i == num_layers - 1) + else out_channels // attention_head_dim, + attention_head_dim, + cross_attention_dim=cross_attention_dim, + temb_channels=temb_channels, + attention_bias=True, + add_self_attention=add_self_attention, + cross_attention_norm="layer_norm", + upcast_attention=upcast_attention, + ) + ) + + self.resnets = nn.ModuleList(resnets) + self.attentions = nn.ModuleList(attentions) + + if add_upsample: + self.upsamplers = nn.ModuleList([KUpsample2D()]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + self.resolution_idx = resolution_idx + + def forward( + self, + hidden_states: torch.FloatTensor, + res_hidden_states_tuple: Tuple[torch.FloatTensor, ...], + temb: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + upsample_size: Optional[int] = None, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + res_hidden_states_tuple = res_hidden_states_tuple[-1] + if res_hidden_states_tuple is not None: + hidden_states = torch.cat([hidden_states, res_hidden_states_tuple], dim=1) + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(resnet), + hidden_states, + temb, + **ckpt_kwargs, + ) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn( + hidden_states, + encoder_hidden_states=encoder_hidden_states, + emb=temb, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states) + + return hidden_states + + +# can potentially later be renamed to `No-feed-forward` attention +class KAttentionBlock(nn.Module): + r""" + A basic Transformer block. + + Parameters: + dim (`int`): The number of channels in the input and output. + num_attention_heads (`int`): The number of heads to use for multi-head attention. + attention_head_dim (`int`): The number of channels in each head. + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + cross_attention_dim (`int`, *optional*): The size of the encoder_hidden_states vector for cross attention. + attention_bias (`bool`, *optional*, defaults to `False`): + Configure if the attention layers should contain a bias parameter. + upcast_attention (`bool`, *optional*, defaults to `False`): + Set to `True` to upcast the attention computation to `float32`. + temb_channels (`int`, *optional*, defaults to 768): + The number of channels in the token embedding. + add_self_attention (`bool`, *optional*, defaults to `False`): + Set to `True` to add self-attention to the block. + cross_attention_norm (`str`, *optional*, defaults to `None`): + The type of normalization to use for the cross attention. Can be `None`, `layer_norm`, or `group_norm`. + group_size (`int`, *optional*, defaults to 32): + The number of groups to separate the channels into for group normalization. + """ + + def __init__( + self, + dim: int, + num_attention_heads: int, + attention_head_dim: int, + dropout: float = 0.0, + cross_attention_dim: Optional[int] = None, + attention_bias: bool = False, + upcast_attention: bool = False, + temb_channels: int = 768, # for ada_group_norm + add_self_attention: bool = False, + cross_attention_norm: Optional[str] = None, + group_size: int = 32, + ): + super().__init__() + self.add_self_attention = add_self_attention + + # 1. Self-Attn + if add_self_attention: + self.norm1 = AdaGroupNorm(temb_channels, dim, max(1, dim // group_size)) + self.attn1 = Attention( + query_dim=dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + cross_attention_dim=None, + cross_attention_norm=None, + ) + + # 2. Cross-Attn + self.norm2 = AdaGroupNorm(temb_channels, dim, max(1, dim // group_size)) + self.attn2 = Attention( + query_dim=dim, + cross_attention_dim=cross_attention_dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + upcast_attention=upcast_attention, + cross_attention_norm=cross_attention_norm, + ) + + def _to_3d(self, hidden_states: torch.FloatTensor, height: int, weight: int) -> torch.FloatTensor: + return hidden_states.permute(0, 2, 3, 1).reshape(hidden_states.shape[0], height * weight, -1) + + def _to_4d(self, hidden_states: torch.FloatTensor, height: int, weight: int) -> torch.FloatTensor: + return hidden_states.permute(0, 2, 1).reshape(hidden_states.shape[0], -1, height, weight) + + def forward( + self, + hidden_states: torch.FloatTensor, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + # TODO: mark emb as non-optional (self.norm2 requires it). + # requires assessing impact of change to positional param interface. + emb: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + ) -> torch.FloatTensor: + cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {} + if cross_attention_kwargs.get("scale", None) is not None: + logger.warning("Passing `scale` to `cross_attention_kwargs` is deprecated. `scale` will be ignored.") + + # 1. Self-Attention + if self.add_self_attention: + norm_hidden_states = self.norm1(hidden_states, emb) + + height, weight = norm_hidden_states.shape[2:] + norm_hidden_states = self._to_3d(norm_hidden_states, height, weight) + + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + attn_output = self._to_4d(attn_output, height, weight) + + hidden_states = attn_output + hidden_states + + # 2. Cross-Attention/None + norm_hidden_states = self.norm2(hidden_states, emb) + + height, weight = norm_hidden_states.shape[2:] + norm_hidden_states = self._to_3d(norm_hidden_states, height, weight) + attn_output = self.attn2( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask if encoder_hidden_states is None else encoder_attention_mask, + **cross_attention_kwargs, + ) + attn_output = self._to_4d(attn_output, height, weight) + + hidden_states = attn_output + hidden_states + + return hidden_states diff --git a/py/brushnet/unet_2d_condition.py b/py/brushnet/unet_2d_condition.py new file mode 100644 index 0000000..103cd08 --- /dev/null +++ b/py/brushnet/unet_2d_condition.py @@ -0,0 +1,1359 @@ +# Copyright 2024 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Tuple, Union + +import torch +import torch.nn as nn +import torch.utils.checkpoint + +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.loaders import PeftAdapterMixin, UNet2DConditionLoadersMixin +from diffusers.utils import USE_PEFT_BACKEND, BaseOutput, deprecate, logging, scale_lora_layers, unscale_lora_layers +from diffusers.models.activations import get_activation +from diffusers.models.attention_processor import ( + ADDED_KV_ATTENTION_PROCESSORS, + CROSS_ATTENTION_PROCESSORS, + Attention, + AttentionProcessor, + AttnAddedKVProcessor, + AttnProcessor, +) +from diffusers.models.embeddings import ( + GaussianFourierProjection, + GLIGENTextBoundingboxProjection, + ImageHintTimeEmbedding, + ImageProjection, + ImageTimeEmbedding, + TextImageProjection, + TextImageTimeEmbedding, + TextTimeEmbedding, + TimestepEmbedding, + Timesteps, +) +from diffusers.models.modeling_utils import ModelMixin +from .unet_2d_blocks import ( + get_down_block, + get_mid_block, + get_up_block, +) + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class UNet2DConditionOutput(BaseOutput): + """ + The output of [`UNet2DConditionModel`]. + + Args: + sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`): + The hidden states output conditioned on `encoder_hidden_states` input. Output of last layer of model. + """ + + sample: torch.FloatTensor = None + + +class UNet2DConditionModel(ModelMixin, ConfigMixin, UNet2DConditionLoadersMixin, PeftAdapterMixin): + r""" + A conditional 2D UNet model that takes a noisy sample, conditional state, and a timestep and returns a sample + shaped output. + + This model inherits from [`ModelMixin`]. Check the superclass documentation for it's generic methods implemented + for all models (such as downloading or saving). + + Parameters: + sample_size (`int` or `Tuple[int, int]`, *optional*, defaults to `None`): + Height and width of input/output sample. + in_channels (`int`, *optional*, defaults to 4): Number of channels in the input sample. + out_channels (`int`, *optional*, defaults to 4): Number of channels in the output. + center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. + flip_sin_to_cos (`bool`, *optional*, defaults to `True`): + Whether to flip the sin to cos in the time embedding. + freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. + down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): + The tuple of downsample blocks to use. + mid_block_type (`str`, *optional*, defaults to `"UNetMidBlock2DCrossAttn"`): + Block type for middle of UNet, it can be one of `UNetMidBlock2DCrossAttn`, `UNetMidBlock2D`, or + `UNetMidBlock2DSimpleCrossAttn`. If `None`, the mid block layer is skipped. + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D")`): + The tuple of upsample blocks to use. + only_cross_attention(`bool` or `Tuple[bool]`, *optional*, default to `False`): + Whether to include self-attention in the basic transformer blocks, see + [`~models.attention.BasicTransformerBlock`]. + block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): + The tuple of output channels for each block. + layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. + downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. + mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. + norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. + If `None`, normalization and activation layers is skipped in post-processing. + norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. + cross_attention_dim (`int` or `Tuple[int]`, *optional*, defaults to 1280): + The dimension of the cross attention features. + transformer_layers_per_block (`int`, `Tuple[int]`, or `Tuple[Tuple]` , *optional*, defaults to 1): + The number of transformer blocks of type [`~models.attention.BasicTransformerBlock`]. Only relevant for + [`~models.unet_2d_blocks.CrossAttnDownBlock2D`], [`~models.unet_2d_blocks.CrossAttnUpBlock2D`], + [`~models.unet_2d_blocks.UNetMidBlock2DCrossAttn`]. + reverse_transformer_layers_per_block : (`Tuple[Tuple]`, *optional*, defaults to None): + The number of transformer blocks of type [`~models.attention.BasicTransformerBlock`], in the upsampling + blocks of the U-Net. Only relevant if `transformer_layers_per_block` is of type `Tuple[Tuple]` and for + [`~models.unet_2d_blocks.CrossAttnDownBlock2D`], [`~models.unet_2d_blocks.CrossAttnUpBlock2D`], + [`~models.unet_2d_blocks.UNetMidBlock2DCrossAttn`]. + encoder_hid_dim (`int`, *optional*, defaults to None): + If `encoder_hid_dim_type` is defined, `encoder_hidden_states` will be projected from `encoder_hid_dim` + dimension to `cross_attention_dim`. + encoder_hid_dim_type (`str`, *optional*, defaults to `None`): + If given, the `encoder_hidden_states` and potentially other embeddings are down-projected to text + embeddings of dimension `cross_attention` according to `encoder_hid_dim_type`. + attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. + num_attention_heads (`int`, *optional*): + The number of attention heads. If not defined, defaults to `attention_head_dim` + resnet_time_scale_shift (`str`, *optional*, defaults to `"default"`): Time scale shift config + for ResNet blocks (see [`~models.resnet.ResnetBlock2D`]). Choose from `default` or `scale_shift`. + class_embed_type (`str`, *optional*, defaults to `None`): + The type of class embedding to use which is ultimately summed with the time embeddings. Choose from `None`, + `"timestep"`, `"identity"`, `"projection"`, or `"simple_projection"`. + addition_embed_type (`str`, *optional*, defaults to `None`): + Configures an optional embedding which will be summed with the time embeddings. Choose from `None` or + "text". "text" will use the `TextTimeEmbedding` layer. + addition_time_embed_dim: (`int`, *optional*, defaults to `None`): + Dimension for the timestep embeddings. + num_class_embeds (`int`, *optional*, defaults to `None`): + Input dimension of the learnable embedding matrix to be projected to `time_embed_dim`, when performing + class conditioning with `class_embed_type` equal to `None`. + time_embedding_type (`str`, *optional*, defaults to `positional`): + The type of position embedding to use for timesteps. Choose from `positional` or `fourier`. + time_embedding_dim (`int`, *optional*, defaults to `None`): + An optional override for the dimension of the projected time embedding. + time_embedding_act_fn (`str`, *optional*, defaults to `None`): + Optional activation function to use only once on the time embeddings before they are passed to the rest of + the UNet. Choose from `silu`, `mish`, `gelu`, and `swish`. + timestep_post_act (`str`, *optional*, defaults to `None`): + The second activation function to use in timestep embedding. Choose from `silu`, `mish` and `gelu`. + time_cond_proj_dim (`int`, *optional*, defaults to `None`): + The dimension of `cond_proj` layer in the timestep embedding. + conv_in_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_in` layer. + conv_out_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_out` layer. + projection_class_embeddings_input_dim (`int`, *optional*): The dimension of the `class_labels` input when + `class_embed_type="projection"`. Required when `class_embed_type="projection"`. + class_embeddings_concat (`bool`, *optional*, defaults to `False`): Whether to concatenate the time + embeddings with the class embeddings. + mid_block_only_cross_attention (`bool`, *optional*, defaults to `None`): + Whether to use cross attention with the mid block when using the `UNetMidBlock2DSimpleCrossAttn`. If + `only_cross_attention` is given as a single boolean and `mid_block_only_cross_attention` is `None`, the + `only_cross_attention` value is used as the value for `mid_block_only_cross_attention`. Default to `False` + otherwise. + """ + + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + sample_size: Optional[int] = None, + in_channels: int = 4, + out_channels: int = 4, + center_input_sample: bool = False, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str] = ( + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "CrossAttnDownBlock2D", + "DownBlock2D", + ), + mid_block_type: Optional[str] = "UNetMidBlock2DCrossAttn", + up_block_types: Tuple[str] = ( + "UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D"), + only_cross_attention: Union[bool, Tuple[bool]] = False, + block_out_channels: Tuple[int] = (320, 640, 1280, 1280), + layers_per_block: Union[int, Tuple[int]] = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + dropout: float = 0.0, + act_fn: str = "silu", + norm_num_groups: Optional[int] = 32, + norm_eps: float = 1e-5, + cross_attention_dim: Union[int, Tuple[int]] = 1280, + transformer_layers_per_block: Union[int, Tuple[int], Tuple[Tuple]] = 1, + reverse_transformer_layers_per_block: Optional[Tuple[Tuple[int]]] = None, + encoder_hid_dim: Optional[int] = None, + encoder_hid_dim_type: Optional[str] = None, + attention_head_dim: Union[int, Tuple[int]] = 8, + num_attention_heads: Optional[Union[int, Tuple[int]]] = None, + dual_cross_attention: bool = False, + use_linear_projection: bool = False, + class_embed_type: Optional[str] = None, + addition_embed_type: Optional[str] = None, + addition_time_embed_dim: Optional[int] = None, + num_class_embeds: Optional[int] = None, + upcast_attention: bool = False, + resnet_time_scale_shift: str = "default", + resnet_skip_time_act: bool = False, + resnet_out_scale_factor: float = 1.0, + time_embedding_type: str = "positional", + time_embedding_dim: Optional[int] = None, + time_embedding_act_fn: Optional[str] = None, + timestep_post_act: Optional[str] = None, + time_cond_proj_dim: Optional[int] = None, + conv_in_kernel: int = 3, + conv_out_kernel: int = 3, + projection_class_embeddings_input_dim: Optional[int] = None, + attention_type: str = "default", + class_embeddings_concat: bool = False, + mid_block_only_cross_attention: Optional[bool] = None, + cross_attention_norm: Optional[str] = None, + addition_embed_type_num_heads: int = 64, + ): + super().__init__() + + self.sample_size = sample_size + + if num_attention_heads is not None: + raise ValueError( + "At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19." + ) + + # If `num_attention_heads` is not defined (which is the case for most models) + # it will default to `attention_head_dim`. This looks weird upon first reading it and it is. + # The reason for this behavior is to correct for incorrectly named variables that were introduced + # when this library was created. The incorrect naming was only discovered much later in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131 + # Changing `attention_head_dim` to `num_attention_heads` for 40,000+ configurations is too backwards breaking + # which is why we correct for the naming here. + num_attention_heads = num_attention_heads or attention_head_dim + + # Check inputs + self._check_config( + down_block_types=down_block_types, + up_block_types=up_block_types, + only_cross_attention=only_cross_attention, + block_out_channels=block_out_channels, + layers_per_block=layers_per_block, + cross_attention_dim=cross_attention_dim, + transformer_layers_per_block=transformer_layers_per_block, + reverse_transformer_layers_per_block=reverse_transformer_layers_per_block, + attention_head_dim=attention_head_dim, + num_attention_heads=num_attention_heads, + ) + + # input + conv_in_padding = (conv_in_kernel - 1) // 2 + self.conv_in = nn.Conv2d( + in_channels, block_out_channels[0], kernel_size=conv_in_kernel, padding=conv_in_padding + ) + + # time + time_embed_dim, timestep_input_dim = self._set_time_proj( + time_embedding_type, + block_out_channels=block_out_channels, + flip_sin_to_cos=flip_sin_to_cos, + freq_shift=freq_shift, + time_embedding_dim=time_embedding_dim, + ) + + self.time_embedding = TimestepEmbedding( + timestep_input_dim, + time_embed_dim, + act_fn=act_fn, + post_act_fn=timestep_post_act, + cond_proj_dim=time_cond_proj_dim, + ) + + self._set_encoder_hid_proj( + encoder_hid_dim_type, + cross_attention_dim=cross_attention_dim, + encoder_hid_dim=encoder_hid_dim, + ) + + # class embedding + self._set_class_embedding( + class_embed_type, + act_fn=act_fn, + num_class_embeds=num_class_embeds, + projection_class_embeddings_input_dim=projection_class_embeddings_input_dim, + time_embed_dim=time_embed_dim, + timestep_input_dim=timestep_input_dim, + ) + + self._set_add_embedding( + addition_embed_type, + addition_embed_type_num_heads=addition_embed_type_num_heads, + addition_time_embed_dim=addition_time_embed_dim, + cross_attention_dim=cross_attention_dim, + encoder_hid_dim=encoder_hid_dim, + flip_sin_to_cos=flip_sin_to_cos, + freq_shift=freq_shift, + projection_class_embeddings_input_dim=projection_class_embeddings_input_dim, + time_embed_dim=time_embed_dim, + ) + + if time_embedding_act_fn is None: + self.time_embed_act = None + else: + self.time_embed_act = get_activation(time_embedding_act_fn) + + self.down_blocks = nn.ModuleList([]) + self.up_blocks = nn.ModuleList([]) + + if isinstance(only_cross_attention, bool): + if mid_block_only_cross_attention is None: + mid_block_only_cross_attention = only_cross_attention + + only_cross_attention = [only_cross_attention] * len(down_block_types) + + if mid_block_only_cross_attention is None: + mid_block_only_cross_attention = False + + if isinstance(num_attention_heads, int): + num_attention_heads = (num_attention_heads,) * len(down_block_types) + + if isinstance(attention_head_dim, int): + attention_head_dim = (attention_head_dim,) * len(down_block_types) + + if isinstance(cross_attention_dim, int): + cross_attention_dim = (cross_attention_dim,) * len(down_block_types) + + if isinstance(layers_per_block, int): + layers_per_block = [layers_per_block] * len(down_block_types) + + if isinstance(transformer_layers_per_block, int): + transformer_layers_per_block = [transformer_layers_per_block] * len(down_block_types) + + if class_embeddings_concat: + # The time embeddings are concatenated with the class embeddings. The dimension of the + # time embeddings passed to the down, middle, and up blocks is twice the dimension of the + # regular time embeddings + blocks_time_embed_dim = time_embed_dim * 2 + else: + blocks_time_embed_dim = time_embed_dim + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block[i], + transformer_layers_per_block=transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + temb_channels=blocks_time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim[i], + num_attention_heads=num_attention_heads[i], + downsample_padding=downsample_padding, + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + resnet_out_scale_factor=resnet_out_scale_factor, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + dropout=dropout, + ) + self.down_blocks.append(down_block) + + # mid + self.mid_block = get_mid_block( + mid_block_type, + temb_channels=blocks_time_embed_dim, + in_channels=block_out_channels[-1], + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + output_scale_factor=mid_block_scale_factor, + transformer_layers_per_block=transformer_layers_per_block[-1], + num_attention_heads=num_attention_heads[-1], + cross_attention_dim=cross_attention_dim[-1], + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + mid_block_only_cross_attention=mid_block_only_cross_attention, + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[-1], + dropout=dropout, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + reversed_num_attention_heads = list(reversed(num_attention_heads)) + reversed_layers_per_block = list(reversed(layers_per_block)) + reversed_cross_attention_dim = list(reversed(cross_attention_dim)) + reversed_transformer_layers_per_block = ( + list(reversed(transformer_layers_per_block)) + if reverse_transformer_layers_per_block is None + else reverse_transformer_layers_per_block + ) + only_cross_attention = list(reversed(only_cross_attention)) + + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=reversed_layers_per_block[i] + 1, + transformer_layers_per_block=reversed_transformer_layers_per_block[i], + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=blocks_time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resolution_idx=i, + resnet_groups=norm_num_groups, + cross_attention_dim=reversed_cross_attention_dim[i], + num_attention_heads=reversed_num_attention_heads[i], + dual_cross_attention=dual_cross_attention, + use_linear_projection=use_linear_projection, + only_cross_attention=only_cross_attention[i], + upcast_attention=upcast_attention, + resnet_time_scale_shift=resnet_time_scale_shift, + attention_type=attention_type, + resnet_skip_time_act=resnet_skip_time_act, + resnet_out_scale_factor=resnet_out_scale_factor, + cross_attention_norm=cross_attention_norm, + attention_head_dim=attention_head_dim[i] if attention_head_dim[i] is not None else output_channel, + dropout=dropout, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + if norm_num_groups is not None: + self.conv_norm_out = nn.GroupNorm( + num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps + ) + + self.conv_act = get_activation(act_fn) + + else: + self.conv_norm_out = None + self.conv_act = None + + conv_out_padding = (conv_out_kernel - 1) // 2 + self.conv_out = nn.Conv2d( + block_out_channels[0], out_channels, kernel_size=conv_out_kernel, padding=conv_out_padding + ) + + self._set_pos_net_if_use_gligen(attention_type=attention_type, cross_attention_dim=cross_attention_dim) + + def _check_config( + self, + down_block_types: Tuple[str], + up_block_types: Tuple[str], + only_cross_attention: Union[bool, Tuple[bool]], + block_out_channels: Tuple[int], + layers_per_block: Union[int, Tuple[int]], + cross_attention_dim: Union[int, Tuple[int]], + transformer_layers_per_block: Union[int, Tuple[int], Tuple[Tuple[int]]], + reverse_transformer_layers_per_block: bool, + attention_head_dim: int, + num_attention_heads: Optional[Union[int, Tuple[int]]], + ): + if len(down_block_types) != len(up_block_types): + raise ValueError( + f"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}." + ) + + if len(block_out_channels) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(num_attention_heads, int) and len(num_attention_heads) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `num_attention_heads` as `down_block_types`. `num_attention_heads`: {num_attention_heads}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(attention_head_dim, int) and len(attention_head_dim) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `attention_head_dim` as `down_block_types`. `attention_head_dim`: {attention_head_dim}. `down_block_types`: {down_block_types}." + ) + + if isinstance(cross_attention_dim, list) and len(cross_attention_dim) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `cross_attention_dim` as `down_block_types`. `cross_attention_dim`: {cross_attention_dim}. `down_block_types`: {down_block_types}." + ) + + if not isinstance(layers_per_block, int) and len(layers_per_block) != len(down_block_types): + raise ValueError( + f"Must provide the same number of `layers_per_block` as `down_block_types`. `layers_per_block`: {layers_per_block}. `down_block_types`: {down_block_types}." + ) + if isinstance(transformer_layers_per_block, list) and reverse_transformer_layers_per_block is None: + for layer_number_per_block in transformer_layers_per_block: + if isinstance(layer_number_per_block, list): + raise ValueError("Must provide 'reverse_transformer_layers_per_block` if using asymmetrical UNet.") + + def _set_time_proj( + self, + time_embedding_type: str, + block_out_channels: int, + flip_sin_to_cos: bool, + freq_shift: float, + time_embedding_dim: int, + ) -> Tuple[int, int]: + if time_embedding_type == "fourier": + time_embed_dim = time_embedding_dim or block_out_channels[0] * 2 + if time_embed_dim % 2 != 0: + raise ValueError(f"`time_embed_dim` should be divisible by 2, but is {time_embed_dim}.") + self.time_proj = GaussianFourierProjection( + time_embed_dim // 2, set_W_to_weight=False, log=False, flip_sin_to_cos=flip_sin_to_cos + ) + timestep_input_dim = time_embed_dim + elif time_embedding_type == "positional": + time_embed_dim = time_embedding_dim or block_out_channels[0] * 4 + + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + else: + raise ValueError( + f"{time_embedding_type} does not exist. Please make sure to use one of `fourier` or `positional`." + ) + + return time_embed_dim, timestep_input_dim + + def _set_encoder_hid_proj( + self, + encoder_hid_dim_type: Optional[str], + cross_attention_dim: Union[int, Tuple[int]], + encoder_hid_dim: Optional[int], + ): + if encoder_hid_dim_type is None and encoder_hid_dim is not None: + encoder_hid_dim_type = "text_proj" + self.register_to_config(encoder_hid_dim_type=encoder_hid_dim_type) + logger.info("encoder_hid_dim_type defaults to 'text_proj' as `encoder_hid_dim` is defined.") + + if encoder_hid_dim is None and encoder_hid_dim_type is not None: + raise ValueError( + f"`encoder_hid_dim` has to be defined when `encoder_hid_dim_type` is set to {encoder_hid_dim_type}." + ) + + if encoder_hid_dim_type == "text_proj": + self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim) + elif encoder_hid_dim_type == "text_image_proj": + # image_embed_dim DOESN'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image_proj"` (Kandinsky 2.1)` + self.encoder_hid_proj = TextImageProjection( + text_embed_dim=encoder_hid_dim, + image_embed_dim=cross_attention_dim, + cross_attention_dim=cross_attention_dim, + ) + elif encoder_hid_dim_type == "image_proj": + # Kandinsky 2.2 + self.encoder_hid_proj = ImageProjection( + image_embed_dim=encoder_hid_dim, + cross_attention_dim=cross_attention_dim, + ) + elif encoder_hid_dim_type is not None: + raise ValueError( + f"encoder_hid_dim_type: {encoder_hid_dim_type} must be None, 'text_proj' or 'text_image_proj'." + ) + else: + self.encoder_hid_proj = None + + def _set_class_embedding( + self, + class_embed_type: Optional[str], + act_fn: str, + num_class_embeds: Optional[int], + projection_class_embeddings_input_dim: Optional[int], + time_embed_dim: int, + timestep_input_dim: int, + ): + if class_embed_type is None and num_class_embeds is not None: + self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim) + elif class_embed_type == "timestep": + self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim, act_fn=act_fn) + elif class_embed_type == "identity": + self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim) + elif class_embed_type == "projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set" + ) + # The projection `class_embed_type` is the same as the timestep `class_embed_type` except + # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings + # 2. it projects from an arbitrary input dimension. + # + # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations. + # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings. + # As a result, `TimestepEmbedding` can be passed arbitrary vectors. + self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + elif class_embed_type == "simple_projection": + if projection_class_embeddings_input_dim is None: + raise ValueError( + "`class_embed_type`: 'simple_projection' requires `projection_class_embeddings_input_dim` be set" + ) + self.class_embedding = nn.Linear(projection_class_embeddings_input_dim, time_embed_dim) + else: + self.class_embedding = None + + def _set_add_embedding( + self, + addition_embed_type: str, + addition_embed_type_num_heads: int, + addition_time_embed_dim: Optional[int], + flip_sin_to_cos: bool, + freq_shift: float, + cross_attention_dim: Optional[int], + encoder_hid_dim: Optional[int], + projection_class_embeddings_input_dim: Optional[int], + time_embed_dim: int, + ): + if addition_embed_type == "text": + if encoder_hid_dim is not None: + text_time_embedding_from_dim = encoder_hid_dim + else: + text_time_embedding_from_dim = cross_attention_dim + + self.add_embedding = TextTimeEmbedding( + text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads + ) + elif addition_embed_type == "text_image": + # text_embed_dim and image_embed_dim DON'T have to be `cross_attention_dim`. To not clutter the __init__ too much + # they are set to `cross_attention_dim` here as this is exactly the required dimension for the currently only use + # case when `addition_embed_type == "text_image"` (Kandinsky 2.1)` + self.add_embedding = TextImageTimeEmbedding( + text_embed_dim=cross_attention_dim, image_embed_dim=cross_attention_dim, time_embed_dim=time_embed_dim + ) + elif addition_embed_type == "text_time": + self.add_time_proj = Timesteps(addition_time_embed_dim, flip_sin_to_cos, freq_shift) + self.add_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim) + elif addition_embed_type == "image": + # Kandinsky 2.2 + self.add_embedding = ImageTimeEmbedding(image_embed_dim=encoder_hid_dim, time_embed_dim=time_embed_dim) + elif addition_embed_type == "image_hint": + # Kandinsky 2.2 ControlNet + self.add_embedding = ImageHintTimeEmbedding(image_embed_dim=encoder_hid_dim, time_embed_dim=time_embed_dim) + elif addition_embed_type is not None: + raise ValueError(f"addition_embed_type: {addition_embed_type} must be None, 'text' or 'text_image'.") + + def _set_pos_net_if_use_gligen(self, attention_type: str, cross_attention_dim: int): + if attention_type in ["gated", "gated-text-image"]: + positive_len = 768 + if isinstance(cross_attention_dim, int): + positive_len = cross_attention_dim + elif isinstance(cross_attention_dim, tuple) or isinstance(cross_attention_dim, list): + positive_len = cross_attention_dim[0] + + feature_type = "text-only" if attention_type == "gated" else "text-image" + self.position_net = GLIGENTextBoundingboxProjection( + positive_len=positive_len, out_dim=cross_attention_dim, feature_type=feature_type + ) + + @property + def attn_processors(self) -> Dict[str, AttentionProcessor]: + r""" + Returns: + `dict` of attention processors: A dictionary containing all attention processors used in the model with + indexed by its weight name. + """ + # set recursively + processors = {} + + def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]): + if hasattr(module, "get_processor"): + processors[f"{name}.processor"] = module.get_processor(return_deprecated_lora=True) + + for sub_name, child in module.named_children(): + fn_recursive_add_processors(f"{name}.{sub_name}", child, processors) + + return processors + + for name, module in self.named_children(): + fn_recursive_add_processors(name, module, processors) + + return processors + + def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]): + r""" + Sets the attention processor to use to compute attention. + + Parameters: + processor (`dict` of `AttentionProcessor` or only `AttentionProcessor`): + The instantiated processor class or a dictionary of processor classes that will be set as the processor + for **all** `Attention` layers. + + If `processor` is a dict, the key needs to define the path to the corresponding cross attention + processor. This is strongly recommended when setting trainable attention processors. + + """ + count = len(self.attn_processors.keys()) + + if isinstance(processor, dict) and len(processor) != count: + raise ValueError( + f"A dict of processors was passed, but the number of processors {len(processor)} does not match the" + f" number of attention layers: {count}. Please make sure to pass {count} processor classes." + ) + + def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor): + if hasattr(module, "set_processor"): + if not isinstance(processor, dict): + module.set_processor(processor) + else: + module.set_processor(processor.pop(f"{name}.processor")) + + for sub_name, child in module.named_children(): + fn_recursive_attn_processor(f"{name}.{sub_name}", child, processor) + + for name, module in self.named_children(): + fn_recursive_attn_processor(name, module, processor) + + def set_default_attn_processor(self): + """ + Disables custom attention processors and sets the default attention implementation. + """ + if all(proc.__class__ in ADDED_KV_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnAddedKVProcessor() + elif all(proc.__class__ in CROSS_ATTENTION_PROCESSORS for proc in self.attn_processors.values()): + processor = AttnProcessor() + else: + raise ValueError( + f"Cannot call `set_default_attn_processor` when attention processors are of type {next(iter(self.attn_processors.values()))}" + ) + + self.set_attn_processor(processor) + + def set_attention_slice(self, slice_size: Union[str, int, List[int]] = "auto"): + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module splits the input tensor in slices to compute attention in + several steps. This is useful for saving some memory in exchange for a small decrease in speed. + + Args: + slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `"auto"`): + When `"auto"`, input to the attention heads is halved, so attention is computed in two steps. If + `"max"`, maximum amount of memory is saved by running only one slice at a time. If a number is + provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim` + must be a multiple of `slice_size`. + """ + sliceable_head_dims = [] + + def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module): + if hasattr(module, "set_attention_slice"): + sliceable_head_dims.append(module.sliceable_head_dim) + + for child in module.children(): + fn_recursive_retrieve_sliceable_dims(child) + + # retrieve number of attention layers + for module in self.children(): + fn_recursive_retrieve_sliceable_dims(module) + + num_sliceable_layers = len(sliceable_head_dims) + + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = [dim // 2 for dim in sliceable_head_dims] + elif slice_size == "max": + # make smallest slice possible + slice_size = num_sliceable_layers * [1] + + slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size + + if len(slice_size) != len(sliceable_head_dims): + raise ValueError( + f"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different" + f" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}." + ) + + for i in range(len(slice_size)): + size = slice_size[i] + dim = sliceable_head_dims[i] + if size is not None and size > dim: + raise ValueError(f"size {size} has to be smaller or equal to {dim}.") + + # Recursively walk through all the children. + # Any children which exposes the set_attention_slice method + # gets the message + def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]): + if hasattr(module, "set_attention_slice"): + module.set_attention_slice(slice_size.pop()) + + for child in module.children(): + fn_recursive_set_attention_slice(child, slice_size) + + reversed_slice_size = list(reversed(slice_size)) + for module in self.children(): + fn_recursive_set_attention_slice(module, reversed_slice_size) + + def _set_gradient_checkpointing(self, module, value=False): + if hasattr(module, "gradient_checkpointing"): + module.gradient_checkpointing = value + + def enable_freeu(self, s1: float, s2: float, b1: float, b2: float): + r"""Enables the FreeU mechanism from https://arxiv.org/abs/2309.11497. + + The suffixes after the scaling factors represent the stage blocks where they are being applied. + + Please refer to the [official repository](https://github.com/ChenyangSi/FreeU) for combinations of values that + are known to work well for different pipelines such as Stable Diffusion v1, v2, and Stable Diffusion XL. + + Args: + s1 (`float`): + Scaling factor for stage 1 to attenuate the contributions of the skip features. This is done to + mitigate the "oversmoothing effect" in the enhanced denoising process. + s2 (`float`): + Scaling factor for stage 2 to attenuate the contributions of the skip features. This is done to + mitigate the "oversmoothing effect" in the enhanced denoising process. + b1 (`float`): Scaling factor for stage 1 to amplify the contributions of backbone features. + b2 (`float`): Scaling factor for stage 2 to amplify the contributions of backbone features. + """ + for i, upsample_block in enumerate(self.up_blocks): + setattr(upsample_block, "s1", s1) + setattr(upsample_block, "s2", s2) + setattr(upsample_block, "b1", b1) + setattr(upsample_block, "b2", b2) + + def disable_freeu(self): + """Disables the FreeU mechanism.""" + freeu_keys = {"s1", "s2", "b1", "b2"} + for i, upsample_block in enumerate(self.up_blocks): + for k in freeu_keys: + if hasattr(upsample_block, k) or getattr(upsample_block, k, None) is not None: + setattr(upsample_block, k, None) + + def fuse_qkv_projections(self): + """ + Enables fused QKV projections. For self-attention modules, all projection matrices (i.e., query, key, value) + are fused. For cross-attention modules, key and value projection matrices are fused. + + + + This API is 🧪 experimental. + + + """ + self.original_attn_processors = None + + for _, attn_processor in self.attn_processors.items(): + if "Added" in str(attn_processor.__class__.__name__): + raise ValueError("`fuse_qkv_projections()` is not supported for models having added KV projections.") + + self.original_attn_processors = self.attn_processors + + for module in self.modules(): + if isinstance(module, Attention): + module.fuse_projections(fuse=True) + + def unfuse_qkv_projections(self): + """Disables the fused QKV projection if enabled. + + + + This API is 🧪 experimental. + + + + """ + if self.original_attn_processors is not None: + self.set_attn_processor(self.original_attn_processors) + + def unload_lora(self): + """Unloads LoRA weights.""" + deprecate( + "unload_lora", + "0.28.0", + "Calling `unload_lora()` is deprecated and will be removed in a future version. Please install `peft` and then call `disable_adapters().", + ) + for module in self.modules(): + if hasattr(module, "set_lora_layer"): + module.set_lora_layer(None) + + def get_time_embed( + self, sample: torch.Tensor, timestep: Union[torch.Tensor, float, int] + ) -> Optional[torch.Tensor]: + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + # This would be a good case for the `match` statement (Python 3.10+) + is_mps = sample.device.type == "mps" + if isinstance(timestep, float): + dtype = torch.float32 if is_mps else torch.float64 + else: + dtype = torch.int32 if is_mps else torch.int64 + timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device) + elif len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + # `Timesteps` does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=sample.dtype) + return t_emb + + def get_class_embed(self, sample: torch.Tensor, class_labels: Optional[torch.Tensor]) -> Optional[torch.Tensor]: + class_emb = None + if self.class_embedding is not None: + if class_labels is None: + raise ValueError("class_labels should be provided when num_class_embeds > 0") + + if self.config.class_embed_type == "timestep": + class_labels = self.time_proj(class_labels) + + # `Timesteps` does not contain any weights and will always return f32 tensors + # there might be better ways to encapsulate this. + class_labels = class_labels.to(dtype=sample.dtype) + + class_emb = self.class_embedding(class_labels).to(dtype=sample.dtype) + return class_emb + + def get_aug_embed( + self, emb: torch.Tensor, encoder_hidden_states: torch.Tensor, added_cond_kwargs: Dict[str, Any] + ) -> Optional[torch.Tensor]: + aug_emb = None + if self.config.addition_embed_type == "text": + aug_emb = self.add_embedding(encoder_hidden_states) + elif self.config.addition_embed_type == "text_image": + # Kandinsky 2.1 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_image' which requires the keyword argument `image_embeds` to be passed in `added_cond_kwargs`" + ) + + image_embs = added_cond_kwargs.get("image_embeds") + text_embs = added_cond_kwargs.get("text_embeds", encoder_hidden_states) + aug_emb = self.add_embedding(text_embs, image_embs) + elif self.config.addition_embed_type == "text_time": + # SDXL - style + if "text_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `text_embeds` to be passed in `added_cond_kwargs`" + ) + text_embeds = added_cond_kwargs.get("text_embeds") + if "time_ids" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'text_time' which requires the keyword argument `time_ids` to be passed in `added_cond_kwargs`" + ) + time_ids = added_cond_kwargs.get("time_ids") + time_embeds = self.add_time_proj(time_ids.flatten()) + time_embeds = time_embeds.reshape((text_embeds.shape[0], -1)) + add_embeds = torch.concat([text_embeds, time_embeds], dim=-1) + add_embeds = add_embeds.to(emb.dtype) + aug_emb = self.add_embedding(add_embeds) + elif self.config.addition_embed_type == "image": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'image' which requires the keyword argument `image_embeds` to be passed in `added_cond_kwargs`" + ) + image_embs = added_cond_kwargs.get("image_embeds") + aug_emb = self.add_embedding(image_embs) + elif self.config.addition_embed_type == "image_hint": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs or "hint" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `addition_embed_type` set to 'image_hint' which requires the keyword arguments `image_embeds` and `hint` to be passed in `added_cond_kwargs`" + ) + image_embs = added_cond_kwargs.get("image_embeds") + hint = added_cond_kwargs.get("hint") + aug_emb = self.add_embedding(image_embs, hint) + return aug_emb + + def process_encoder_hidden_states( + self, encoder_hidden_states: torch.Tensor, added_cond_kwargs: Dict[str, Any] + ) -> torch.Tensor: + if self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "text_proj": + encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "text_image_proj": + # Kandinsky 2.1 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'text_image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + + image_embeds = added_cond_kwargs.get("image_embeds") + encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states, image_embeds) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "image_proj": + # Kandinsky 2.2 - style + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + image_embeds = added_cond_kwargs.get("image_embeds") + encoder_hidden_states = self.encoder_hid_proj(image_embeds) + elif self.encoder_hid_proj is not None and self.config.encoder_hid_dim_type == "ip_image_proj": + if "image_embeds" not in added_cond_kwargs: + raise ValueError( + f"{self.__class__} has the config param `encoder_hid_dim_type` set to 'ip_image_proj' which requires the keyword argument `image_embeds` to be passed in `added_conditions`" + ) + image_embeds = added_cond_kwargs.get("image_embeds") + image_embeds = self.encoder_hid_proj(image_embeds) + encoder_hidden_states = (encoder_hidden_states, image_embeds) + return encoder_hidden_states + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + class_labels: Optional[torch.Tensor] = None, + timestep_cond: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + cross_attention_kwargs: Optional[Dict[str, Any]] = None, + added_cond_kwargs: Optional[Dict[str, torch.Tensor]] = None, + down_block_additional_residuals: Optional[Tuple[torch.Tensor]] = None, + mid_block_additional_residual: Optional[torch.Tensor] = None, + down_intrablock_additional_residuals: Optional[Tuple[torch.Tensor]] = None, + encoder_attention_mask: Optional[torch.Tensor] = None, + return_dict: bool = True, + down_block_add_samples: Optional[Tuple[torch.Tensor]] = None, + mid_block_add_sample: Optional[Tuple[torch.Tensor]] = None, + up_block_add_samples: Optional[Tuple[torch.Tensor]] = None, + ) -> Union[UNet2DConditionOutput, Tuple]: + r""" + The [`UNet2DConditionModel`] forward method. + + Args: + sample (`torch.FloatTensor`): + The noisy input tensor with the following shape `(batch, channel, height, width)`. + timestep (`torch.FloatTensor` or `float` or `int`): The number of timesteps to denoise an input. + encoder_hidden_states (`torch.FloatTensor`): + The encoder hidden states with shape `(batch, sequence_length, feature_dim)`. + class_labels (`torch.Tensor`, *optional*, defaults to `None`): + Optional class labels for conditioning. Their embeddings will be summed with the timestep embeddings. + timestep_cond: (`torch.Tensor`, *optional*, defaults to `None`): + Conditional embeddings for timestep. If provided, the embeddings will be summed with the samples passed + through the `self.time_embedding` layer to obtain the timestep embeddings. + attention_mask (`torch.Tensor`, *optional*, defaults to `None`): + An attention mask of shape `(batch, key_tokens)` is applied to `encoder_hidden_states`. If `1` the mask + is kept, otherwise if `0` it is discarded. Mask will be converted into a bias, which adds large + negative values to the attention scores corresponding to "discard" tokens. + cross_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + added_cond_kwargs: (`dict`, *optional*): + A kwargs dictionary containing additional embeddings that if specified are added to the embeddings that + are passed along to the UNet blocks. + down_block_additional_residuals: (`tuple` of `torch.Tensor`, *optional*): + A tuple of tensors that if specified are added to the residuals of down unet blocks. + mid_block_additional_residual: (`torch.Tensor`, *optional*): + A tensor that if specified is added to the residual of the middle unet block. + down_intrablock_additional_residuals (`tuple` of `torch.Tensor`, *optional*): + additional residuals to be added within UNet down blocks, for example from T2I-Adapter side model(s) + encoder_attention_mask (`torch.Tensor`): + A cross-attention mask of shape `(batch, sequence_length)` is applied to `encoder_hidden_states`. If + `True` the mask is kept, otherwise if `False` it is discarded. Mask will be converted into a bias, + which adds large negative values to the attention scores corresponding to "discard" tokens. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~models.unets.unet_2d_condition.UNet2DConditionOutput`] instead of a plain + tuple. + + Returns: + [`~models.unets.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: + If `return_dict` is True, an [`~models.unets.unet_2d_condition.UNet2DConditionOutput`] is returned, + otherwise a `tuple` is returned where the first element is the sample tensor. + """ + # By default samples have to be AT least a multiple of the overall upsampling factor. + # The overall upsampling factor is equal to 2 ** (# num of upsampling layers). + # However, the upsampling interpolation output size can be forced to fit any upsampling size + # on the fly if necessary. + default_overall_up_factor = 2 ** self.num_upsamplers + + # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` + forward_upsample_size = False + upsample_size = None + + for dim in sample.shape[-2:]: + if dim % default_overall_up_factor != 0: + # Forward upsample size to force interpolation output size. + forward_upsample_size = True + break + + # ensure attention_mask is a bias, and give it a singleton query_tokens dimension + # expects mask of shape: + # [batch, key_tokens] + # adds singleton query_tokens dimension: + # [batch, 1, key_tokens] + # this helps to broadcast it as a bias over attention scores, which will be in one of the following shapes: + # [batch, heads, query_tokens, key_tokens] (e.g. torch sdp attn) + # [batch * heads, query_tokens, key_tokens] (e.g. xformers or classic attn) + if attention_mask is not None: + # assume that mask is expressed as: + # (1 = keep, 0 = discard) + # convert mask into a bias that can be added to attention scores: + # (keep = +0, discard = -10000.0) + attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0 + attention_mask = attention_mask.unsqueeze(1) + + # convert encoder_attention_mask to a bias the same way we do for attention_mask + if encoder_attention_mask is not None: + encoder_attention_mask = (1 - encoder_attention_mask.to(sample.dtype)) * -10000.0 + encoder_attention_mask = encoder_attention_mask.unsqueeze(1) + + # 0. center input if necessary + if self.config.center_input_sample: + sample = 2 * sample - 1.0 + + # 1. time + t_emb = self.get_time_embed(sample=sample, timestep=timestep) + emb = self.time_embedding(t_emb, timestep_cond) + aug_emb = None + + class_emb = self.get_class_embed(sample=sample, class_labels=class_labels) + if class_emb is not None: + if self.config.class_embeddings_concat: + emb = torch.cat([emb, class_emb], dim=-1) + else: + emb = emb + class_emb + + aug_emb = self.get_aug_embed( + emb=emb, encoder_hidden_states=encoder_hidden_states, added_cond_kwargs=added_cond_kwargs + ) + if self.config.addition_embed_type == "image_hint": + aug_emb, hint = aug_emb + sample = torch.cat([sample, hint], dim=1) + + emb = emb + aug_emb if aug_emb is not None else emb + + if self.time_embed_act is not None: + emb = self.time_embed_act(emb) + + encoder_hidden_states = self.process_encoder_hidden_states( + encoder_hidden_states=encoder_hidden_states, added_cond_kwargs=added_cond_kwargs + ) + + # 2. pre-process + sample = self.conv_in(sample) + + # 2.5 GLIGEN position net + if cross_attention_kwargs is not None and cross_attention_kwargs.get("gligen", None) is not None: + cross_attention_kwargs = cross_attention_kwargs.copy() + gligen_args = cross_attention_kwargs.pop("gligen") + cross_attention_kwargs["gligen"] = {"objs": self.position_net(**gligen_args)} + + # 3. down + # we're popping the `scale` instead of getting it because otherwise `scale` will be propagated + # to the internal blocks and will raise deprecation warnings. this will be confusing for our users. + if cross_attention_kwargs is not None: + cross_attention_kwargs = cross_attention_kwargs.copy() + lora_scale = cross_attention_kwargs.pop("scale", 1.0) + else: + lora_scale = 1.0 + + if USE_PEFT_BACKEND: + # weight the lora layers by setting `lora_scale` for each PEFT layer + scale_lora_layers(self, lora_scale) + + is_controlnet = mid_block_additional_residual is not None and down_block_additional_residuals is not None + # using new arg down_intrablock_additional_residuals for T2I-Adapters, to distinguish from controlnets + is_adapter = down_intrablock_additional_residuals is not None + # maintain backward compatibility for legacy usage, where + # T2I-Adapter and ControlNet both use down_block_additional_residuals arg + # but can only use one or the other + is_brushnet = down_block_add_samples is not None and mid_block_add_sample is not None and up_block_add_samples is not None + if not is_adapter and mid_block_additional_residual is None and down_block_additional_residuals is not None: + deprecate( + "T2I should not use down_block_additional_residuals", + "1.3.0", + "Passing intrablock residual connections with `down_block_additional_residuals` is deprecated \ + and will be removed in diffusers 1.3.0. `down_block_additional_residuals` should only be used \ + for ControlNet. Please make sure use `down_intrablock_additional_residuals` instead. ", + standard_warn=False, + ) + down_intrablock_additional_residuals = down_block_additional_residuals + is_adapter = True + + down_block_res_samples = (sample,) + + if is_brushnet: + sample = sample + down_block_add_samples.pop(0) + + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "has_cross_attention") and downsample_block.has_cross_attention: + # For t2i-adapter CrossAttnDownBlock2D + additional_residuals = {} + if is_adapter and len(down_intrablock_additional_residuals) > 0: + additional_residuals["additional_residuals"] = down_intrablock_additional_residuals.pop(0) + + i = len(down_block_add_samples) + + if is_brushnet and len(down_block_add_samples) > 0: + additional_residuals["down_block_add_samples"] = [down_block_add_samples.pop(0) + for _ in range( + len(downsample_block.resnets) + (downsample_block.downsamplers != None))] + + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + **additional_residuals, + ) + else: + additional_residuals = {} + + i = len(down_block_add_samples) + + if is_brushnet and len(down_block_add_samples) > 0: + additional_residuals["down_block_add_samples"] = [down_block_add_samples.pop(0) + for _ in range( + len(downsample_block.resnets) + (downsample_block.downsamplers != None))] + + sample, res_samples = downsample_block(hidden_states=sample, temb=emb, **additional_residuals) + if is_adapter and len(down_intrablock_additional_residuals) > 0: + sample += down_intrablock_additional_residuals.pop(0) + + down_block_res_samples += res_samples + + if is_controlnet: + new_down_block_res_samples = () + + for down_block_res_sample, down_block_additional_residual in zip( + down_block_res_samples, down_block_additional_residuals + ): + down_block_res_sample = down_block_res_sample + down_block_additional_residual + new_down_block_res_samples = new_down_block_res_samples + (down_block_res_sample,) + + down_block_res_samples = new_down_block_res_samples + + # 4. mid + if self.mid_block is not None: + if hasattr(self.mid_block, "has_cross_attention") and self.mid_block.has_cross_attention: + sample = self.mid_block( + sample, + emb, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + cross_attention_kwargs=cross_attention_kwargs, + encoder_attention_mask=encoder_attention_mask, + ) + else: + sample = self.mid_block(sample, emb) + + # To support T2I-Adapter-XL + if ( + is_adapter + and len(down_intrablock_additional_residuals) > 0 + and sample.shape == down_intrablock_additional_residuals[0].shape + ): + sample += down_intrablock_additional_residuals.pop(0) + + if is_controlnet: + sample = sample + mid_block_additional_residual + + if is_brushnet: + sample = sample + mid_block_add_sample + + # 5. up + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets):] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block and forward_upsample_size: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "has_cross_attention") and upsample_block.has_cross_attention: + additional_residuals = {} + + i = len(up_block_add_samples) + + if is_brushnet and len(up_block_add_samples) > 0: + additional_residuals["up_block_add_samples"] = [up_block_add_samples.pop(0) + for _ in range( + len(upsample_block.resnets) + (upsample_block.upsamplers != None))] + + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + cross_attention_kwargs=cross_attention_kwargs, + upsample_size=upsample_size, + attention_mask=attention_mask, + encoder_attention_mask=encoder_attention_mask, + **additional_residuals, + ) + else: + additional_residuals = {} + + i = len(up_block_add_samples) + + if is_brushnet and len(up_block_add_samples) > 0: + additional_residuals["up_block_add_samples"] = [up_block_add_samples.pop(0) + for _ in range( + len(upsample_block.resnets) + (upsample_block.upsamplers != None))] + + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + upsample_size=upsample_size, + **additional_residuals, + ) + + # 6. post-process + if self.conv_norm_out: + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + + if USE_PEFT_BACKEND: + # remove `lora_scale` from each PEFT layer + unscale_lora_layers(self, lora_scale) + + if not return_dict: + return (sample,) + + return UNet2DConditionOutput(sample=sample) diff --git a/py/dynamiCrafter/__init__.py b/py/dynamiCrafter/__init__.py index ddec9fc..84023ba 100644 --- a/py/dynamiCrafter/__init__.py +++ b/py/dynamiCrafter/__init__.py @@ -1,3 +1,5 @@ +#credit to ExponentialML for this module +#from https://github.com/ExponentialML/ComfyUI_Native_DynamiCrafter import os import torch import comfy diff --git a/py/easyNodes.py b/py/easyNodes.py index 9bc8b9d..23d50bb 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -2095,55 +2095,20 @@ def apply(self, model, latent, head, patch): return (m,) # brushnet -def get_files_with_extension(folder_name, extension=['.safetensors']): - - try: - folders = folder_paths.get_folder_paths(folder_name) - except: - folders = [] - - if not folders: - folders = [os.path.join(folder_paths.models_dir, folder_name)] - if not os.path.isdir(folders[0]): - folders = [os.path.join(folder_paths.base_path, folder_name)] - if not os.path.isdir(folders[0]): - return {} - - filtered_folders = [] - for x in folders: - if not os.path.isdir(x): - continue - the_same = False - for y in filtered_folders: - if os.path.samefile(x, y): - the_same = True - break - if not the_same: - filtered_folders.append(x) - - if not filtered_folders: - return {} - - output = {} - for x in filtered_folders: - files, folders_all = folder_paths.recursive_search(x, excluded_dir_names=[".git"]) - filtered_files = folder_paths.filter_files_extensions(files, extension) - - for f in filtered_files: - output[f] = x - - return output +from .brushnet import BrushNet class applyBrushNet: + def get_files_with_extension(folder='inpaint', extensions='.safetensors'): + return [file for file in folder_paths.get_filename_list(folder) if file.endswith(extensions)] + @classmethod def INPUT_TYPES(s): - s.inpaint_files = get_files_with_extension('inpaint') return { "required": { "pipe": ("PIPE_LINE",), "image": ("IMAGE",), "mask": ("MASK",), - "brushnet": ([file for file in s.inpaint_files],), + "brushnet": (s.get_files_with_extension(),), "dtype": (['float16', 'bfloat16', 'float32', 'float64'], ), "scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0}), "start_at": ("INT", {"default": 0, "min": 0, "max": 10000}), @@ -2162,17 +2127,15 @@ def apply(self, pipe, image, mask, brushnet, dtype, scale, start_at, end_at): vae = pipe['vae'] positive = pipe['positive'] negative = pipe['negative'] + cls = BrushNet() if brushnet in backend_cache.cache: log_node_info("easy brushnetApply", f"Using {brushnet} Cached") _, brushnet_model = backend_cache.cache[brushnet][1] else: - if "BrushNetLoader" not in ALL_NODE_CLASS_MAPPINGS: - raise Exception("BrushNetLoader not found,please install ComfyUI-BrushNet") - cls = ALL_NODE_CLASS_MAPPINGS['BrushNetLoader'] - brushnet_model, = cls.brushnet_loading(self, brushnet, dtype) + brushnet_file = os.path.join(folder_paths.get_full_path("inpaint", brushnet)) + brushnet_model, = cls.load_brushnet_model(brushnet_file, dtype) backend_cache.update_cache(brushnet, 'brushnet', (False, brushnet_model)) - cls = ALL_NODE_CLASS_MAPPINGS['BrushNet'] - m, positive, negative, latent = cls().model_update(model=model, vae=vae, image=image, mask=mask, + m, positive, negative, latent = cls.brushnet_model_update(model=model, vae=vae, image=image, mask=mask, brushnet=brushnet_model, positive=positive, negative=negative, scale=scale, start_at=start_at, end_at=end_at) @@ -2186,21 +2149,20 @@ def apply(self, pipe, image, mask, brushnet, dtype, scale, start_at, end_at): del pipe return (new_pipe,) -#powerpaint +# #powerpaint class applyPowerPaint: + def get_files_with_extension(folder='inpaint', extensions='.safetensors'): + return [file for file in folder_paths.get_filename_list(folder) if file.endswith(extensions)] @classmethod def INPUT_TYPES(s): - s.models_files = get_files_with_extension('inpaint') - s.inpaint_files = get_files_with_extension('inpaint', ['.bin']) - s.clip_files = get_files_with_extension('clip') return { "required": { "pipe": ("PIPE_LINE",), "image": ("IMAGE",), "mask": ("MASK",), - "powerpaint_model": ([file for file in s.models_files],), - "powerpaint_clip": ([file for file in s.inpaint_files],), + "powerpaint_model": (s.get_files_with_extension(),), + "powerpaint_clip": (s.get_files_with_extension(extensions='.bin'),), "dtype": (['float16', 'bfloat16', 'float32', 'float64'],), "fitting": ("FLOAT", {"default": 1.0, "min": 0.3, "max": 1.0}), "function": (['text guided', 'shape guided', 'object removal', 'context aware', 'image outpainting'],), @@ -2221,31 +2183,26 @@ def apply(self, pipe, image, mask, powerpaint_model, powerpaint_clip, dtype, fit vae = pipe['vae'] positive = pipe['positive'] negative = pipe['negative'] + + cls = BrushNet() # load powerpaint clip if powerpaint_clip in backend_cache.cache: log_node_info("easy powerpaintApply", f"Using {powerpaint_clip} Cached") _, ppclip = backend_cache.cache[powerpaint_clip][1] else: - if "PowerPaintCLIPLoader" not in ALL_NODE_CLASS_MAPPINGS: - raise Exception("PowerPaintCLIPLoader not found,please install ComfyUI-Brushnet") - cls = ALL_NODE_CLASS_MAPPINGS['PowerPaintCLIPLoader'] model_url = POWERPAINT_CLIP['base_fp16']['model_url'] base_clip = get_local_filepath(model_url, os.path.join(folder_paths.models_dir, 'clip')) - base = os.path.basename(base_clip) - ppclip, = cls.ppclip_loading(self, base, powerpaint_clip) + ppclip, = cls.load_powerpaint_clip(base_clip, os.path.join(folder_paths.get_full_path("inpaint", powerpaint_clip))) backend_cache.update_cache(powerpaint_clip, 'ppclip', (False, ppclip)) # load powerpaint model if powerpaint_model in backend_cache.cache: log_node_info("easy powerpaintApply", f"Using {powerpaint_model} Cached") _, powerpaint = backend_cache.cache[powerpaint_model][1] else: - if "BrushNetLoader" not in ALL_NODE_CLASS_MAPPINGS: - raise Exception("BrushNetLoader not found,please install ComfyUI-Brushnet") - brushnet_cls = ALL_NODE_CLASS_MAPPINGS['BrushNetLoader'] - powerpaint, = brushnet_cls().brushnet_loading(powerpaint_model, dtype) + powerpaint_file = os.path.join(folder_paths.get_full_path("inpaint", powerpaint_model)) + powerpaint, = cls.load_brushnet_model(powerpaint_file, dtype) backend_cache.update_cache(powerpaint_model, 'powerpaint', (False, powerpaint)) - cls = ALL_NODE_CLASS_MAPPINGS['PowerPaint'] - m, positive, negative, latent = cls().model_update(model=model, vae=vae, image=image, mask=mask, powerpaint=powerpaint, + m, positive, negative, latent = cls.powerpaint_model_update(model=model, vae=vae, image=image, mask=mask, powerpaint=powerpaint, clip=ppclip, positive=positive, negative=negative, fitting=fitting, function=function, scale=scale, start_at=start_at, end_at=end_at, save_memory=save_memory) From d713a14e98f9129c3edbb326ff35c4ca4e05d925 Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 6 Jun 2024 11:40:39 +0800 Subject: [PATCH 05/49] add:easy apply inpaint --- py/brushnet/model_patch.py | 18 +++- py/config.py | 8 +- py/easyNodes.py | 131 ++++++++++++++++++++++++++++++ py/libs/sampler.py | 48 +---------- web/js/easy/easyDynamicWidgets.js | 35 +++++++- 5 files changed, 192 insertions(+), 48 deletions(-) diff --git a/py/brushnet/model_patch.py b/py/brushnet/model_patch.py index 5e13a06..fe8d3f2 100644 --- a/py/brushnet/model_patch.py +++ b/py/brushnet/model_patch.py @@ -118,4 +118,20 @@ def modified_apply_control(h, control, name): h += ctrl except: print.warning("warning control could not be applied {} {}".format(h.shape, ctrl.shape)) - return h \ No newline at end of file + return h + +def add_model_patch(model): + to = add_model_patch_option(model) + mp = to['model_patch'] + if "brushnet" in mp: + if isinstance(model.model.model_config, comfy.supported_models.SD15): + mp['SDXL'] = False + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + mp['SDXL'] = True + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: ", type(model.model.model_config)) + + mp['unet'] = model.model.diffusion_model + mp['step'] = 0 + mp['total_steps'] = 1 \ No newline at end of file diff --git a/py/config.py b/py/config.py index 0442bb1..f668fd5 100644 --- a/py/config.py +++ b/py/config.py @@ -77,8 +77,12 @@ } } POWERPAINT_CLIP = { - "base_fp16":{ - "model_url":"https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/text_encoder/model.fp16.safetensors" + "base_fp16": { + "model_url": "https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/text_encoder/model.fp16.safetensors" + }, + "v2.1": { + "model_url": "https://huggingface.co/JunhaoZhuang/PowerPaint-v2-1/resolve/main/PowerPaint_Brushnet/diffusion_pytorch_model.safetensors", + "clip_url": "https://huggingface.co/JunhaoZhuang/PowerPaint-v2-1/resolve/main/PowerPaint_Brushnet/pytorch_model.bin", } } diff --git a/py/easyNodes.py b/py/easyNodes.py index 23d50bb..1260483 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -2216,6 +2216,135 @@ def apply(self, pipe, image, mask, powerpaint_model, powerpaint_clip, dtype, fit del pipe return (new_pipe,) +class applyInpaint: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "pipe": ("PIPE_LINE",), + "image": ("IMAGE",), + "mask": ("MASK",), + "inpaint_mode": (('normal', 'fooocus_inpaint', 'brushnet_random', 'brushnet_segmentation', 'powerpaint'),), + "encode": (('vae_encode_inpaint', 'inpaint_model_conditioning', 'different_diffusion'),), + "grow_mask_by": ("INT", {"default": 6, "min": 0, "max": 64, "step": 1}), + "dtype": (['float16', 'bfloat16', 'float32', 'float64'],), + "fitting": ("FLOAT", {"default": 1.0, "min": 0.3, "max": 1.0}), + "function": (['text guided', 'shape guided', 'object removal', 'context aware', 'image outpainting'],), + "scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0}), + "start_at": ("INT", {"default": 0, "min": 0, "max": 10000}), + "end_at": ("INT", {"default": 10000, "min": 0, "max": 10000}), + }, + } + + RETURN_TYPES = ("PIPE_LINE",) + RETURN_NAMES = ("pipe",) + CATEGORY = "EasyUse/Inpaint" + FUNCTION = "apply" + + def inpaint_model_conditioning(self, pipe, image, vae, mask, grow_mask_by): + if grow_mask_by >0: + mask, = GrowMask().expand_mask(mask, grow_mask_by, False) + positive, negative, latent = InpaintModelConditioning().encode(pipe['positive'], pipe['negative'], image, + vae, mask) + pipe['positive'] = positive + pipe['negative'] = negative + pipe['samples'] = latent + + return pipe + + def get_brushnet_model(self, type, model): + model_type = 'sdxl' if isinstance(model.model.model_config, comfy.supported_models.SDXL) else 'sd1' + if type == 'brushnet_random': + brush_model = BRUSHNET_MODELS['random_mask'][model_type]['model_url'] + if model_type == 'sdxl': + pattern = 'brushnet.random.mask.sdxl.*\.(safetensors|bin)$' + else: + pattern = 'brushnet.random.mask.*\.(safetensors|bin)$' + elif type == 'brushnet_segmentation': + brush_model = BRUSHNET_MODELS['segmentation_mask'][model_type]['model_url'] + if model_type == 'sdxl': + pattern = 'brushnet.segmentation.mask.sdxl.*\.(safetensors|bin)$' + else: + pattern = 'brushnet.segmentation.mask.*\.(safetensors|bin)$' + + + brushfile = [e for e in folder_paths.get_filename_list('inpaint') if re.search(pattern, e, re.IGNORECASE)] + brushname = brushfile[0] if brushfile else None + if not brushname: + from urllib.parse import urlparse + get_local_filepath(brush_model, INPAINT_DIR) + parsed_url = urlparse(brush_model) + brushname = os.path.basename(parsed_url.path) + return brushname + + def get_powerpaint_model(self, model): + model_type = 'sdxl' if isinstance(model.model.model_config, comfy.supported_models.SDXL) else 'sd1' + if model_type == 'sdxl': + raise Exception("Powerpaint not supported for SDXL models") + + powerpaint_model = POWERPAINT_CLIP['v2.1']['model_url'] + powerpaint_clip = POWERPAINT_CLIP['v2.1']['clip_url'] + + from urllib.parse import urlparse + get_local_filepath(powerpaint_model, os.path.join(INPAINT_DIR, 'powerpaint')) + model_parsed_url = urlparse(powerpaint_model) + clip_parsed_url = urlparse(powerpaint_clip) + model_name = os.path.join("powerpaint",os.path.basename(model_parsed_url.path)) + clip_name = os.path.join("powerpaint",os.path.basename(clip_parsed_url.path)) + return model_name, clip_name + + def apply(self, pipe, image, mask, inpaint_mode, encode, grow_mask_by, dtype, fitting, function, scale, start_at, end_at): + new_pipe = { + **pipe, + } + del pipe + if inpaint_mode in ['brushnet_random', 'brushnet_segmentation']: + brushnet = self.get_brushnet_model(inpaint_mode, new_pipe['model']) + new_pipe, = applyBrushNet().apply(new_pipe, image, mask, brushnet, dtype, scale, start_at, end_at) + elif inpaint_mode == 'powerpaint': + powerpaint_model, powerpaint_clip = self.get_powerpaint_model(new_pipe['model']) + new_pipe, = applyPowerPaint().apply(new_pipe, image, mask, powerpaint_model, powerpaint_clip, dtype, fitting, function, scale, start_at, end_at) + + vae = new_pipe['vae'] + if encode == 'vae_encode_inpaint': + latent, = VAEEncodeForInpaint().encode(vae, image, mask, grow_mask_by) + new_pipe['samples'] = latent + if inpaint_mode == 'fooocus_inpaint': + model, = applyFooocusInpaint().apply(new_pipe['model'], new_pipe['samples'], + list(FOOOCUS_INPAINT_HEAD.keys())[0], + list(FOOOCUS_INPAINT_PATCH.keys())[0]) + new_pipe['model'] = model + elif encode == 'inpaint_model_conditioning': + if inpaint_mode == 'fooocus_inpaint': + latent, = VAEEncodeForInpaint().encode(vae, image, mask, grow_mask_by) + new_pipe['samples'] = latent + model, = applyFooocusInpaint().apply(new_pipe['model'], new_pipe['samples'], + list(FOOOCUS_INPAINT_HEAD.keys())[0], + list(FOOOCUS_INPAINT_PATCH.keys())[0]) + new_pipe['model'] = model + new_pipe = self.inpaint_model_conditioning(new_pipe, image, vae, mask, 0) + else: + new_pipe = self.inpaint_model_conditioning(new_pipe, image, vae, mask, grow_mask_by) + elif encode == 'different_diffusion': + if inpaint_mode == 'fooocus_inpaint': + latent, = VAEEncodeForInpaint().encode(vae, image, mask, grow_mask_by) + new_pipe['samples'] = latent + model, = applyFooocusInpaint().apply(new_pipe['model'], new_pipe['samples'], + list(FOOOCUS_INPAINT_HEAD.keys())[0], + list(FOOOCUS_INPAINT_PATCH.keys())[0]) + new_pipe['model'] = model + new_pipe = self.inpaint_model_conditioning(new_pipe, image, vae, mask, 0) + else: + new_pipe = self.inpaint_model_conditioning(new_pipe, image, vae, mask, grow_mask_by) + cls = ALL_NODE_CLASS_MAPPINGS['DifferentialDiffusion'] + if cls is not None: + model, = cls().apply(new_pipe['model']) + new_pipe['model'] = model + else: + raise Exception("Differential Diffusion not found,please update comfyui") + + + return (new_pipe,) # ---------------------------------------------------------------Inpaint 结束----------------------------------------------------------------------# #---------------------------------------------------------------适配器 开始----------------------------------------------------------------------# @@ -6766,6 +6895,7 @@ def generate(self, positive, negative, model, aspect_ratio, seed, denoise, optio "easy applyFooocusInpaint": applyFooocusInpaint, "easy applyBrushNet": applyBrushNet, "easy applyPowerPaint": applyPowerPaint, + "easy applyInpaint": applyInpaint, # latent 潜空间 "easy latentNoisy": latentNoisy, "easy latentCompositeMaskedWithCond": latentCompositeMaskedWithCond, @@ -6877,6 +7007,7 @@ def generate(self, positive, negative, model, aspect_ratio, seed, denoise, optio "easy applyFooocusInpaint": "Easy Apply Fooocus Inpaint", "easy applyBrushNet": "Easy Apply BrushNet", "easy applyPowerPaint": "Easy Apply PowerPaint", + "easy applyInpaint": "Easy Apply Inpaint", # latent 潜空间 "easy latentNoisy": "LatentNoisy", "easy latentCompositeMaskedWithCond": "LatentCompositeMaskedWithCond", diff --git a/py/libs/sampler.py b/py/libs/sampler.py index 73c28cc..cd7381c 100644 --- a/py/libs/sampler.py +++ b/py/libs/sampler.py @@ -5,7 +5,8 @@ from nodes import MAX_RESOLUTION from PIL import Image from typing import Dict, List, Optional, Tuple, Union, Any -from .utils import get_sd_version +from ..brushnet.model_patch import add_model_patch + class easySampler: def __init__(self): self.last_helds: dict[str, list] = { @@ -68,14 +69,6 @@ def emptyLatent(self, resolution, empty_latent_width, empty_latent_height, batch samples = ({"samples": latent_c}, {"samples": latent_b}) return samples - def add_model_patch_option(self, model): - if 'transformer_options' not in model.model_options: - model.model_options['transformer_options'] = {} - to = model.model_options['transformer_options'] - if "model_patch" not in to: - to["model_patch"] = {} - return to - def common_ksampler(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent, denoise=1.0, disable_noise=False, start_step=None, last_step=None, force_full_denoise=False, @@ -114,22 +107,7 @@ def callback(step, x0, x, total_steps): ####################################################################################### # brushnet - transformer_options = model.model_options['transformer_options'] if "transformer_options" in model.model_options else {} - if 'model_patch' in transformer_options and 'brushnet' in transformer_options['model_patch']: - to = self.add_model_patch_option(model) - mp = to['model_patch'] - if isinstance(model.model.model_config, comfy.supported_models.SD15): - mp['SDXL'] = False - elif isinstance(model.model.model_config, comfy.supported_models.SDXL): - mp['SDXL'] = True - else: - print('Base model type: ', type(model.model.model_config)) - raise Exception("Unsupported model type: ", type(model.model.model_config)) - - mp['unet'] = model.model.diffusion_model - mp['step'] = 0 - mp['total_steps'] = 1 - + add_model_patch(model) # ####################################################################################### samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, @@ -172,29 +150,11 @@ def custom_ksampler(self, model, seed, steps, cfg, _sampler, sigmas, positive, n ####################################################################################### # brushnet - to = None - transformer_options = model.model_options['transformer_options'] if "transformer_options" in model.model_options else {} - if 'model_patch' in transformer_options and 'brushnet' in transformer_options['model_patch']: - to = self.add_model_patch_option(model) - mp = to['model_patch'] - if isinstance(model.model.model_config, comfy.supported_models.SD15): - mp['SDXL'] = False - elif isinstance(model.model.model_config, comfy.supported_models.SDXL): - mp['SDXL'] = True - else: - print('Base model type: ', type(model.model.model_config)) - raise Exception("Unsupported model type: ", type(model.model.model_config)) - - mp['unet'] = model.model.diffusion_model - mp['step'] = 0 - mp['total_steps'] = 1 - # + add_model_patch(model) ####################################################################################### def callback(step, x0, x, total_steps): preview_bytes = None - if to is not None and "model_patch" in to: - to['model_patch']['step'] = step + 1 if previewer: preview_bytes = previewer.decode_latent_to_preview_image(preview_format, x0) pbar.update_absolute(step + 1, total_steps, preview_bytes) diff --git a/web/js/easy/easyDynamicWidgets.js b/web/js/easy/easyDynamicWidgets.js index 17926b3..2729cd1 100644 --- a/web/js/easy/easyDynamicWidgets.js +++ b/web/js/easy/easyDynamicWidgets.js @@ -357,6 +357,38 @@ function widgetLogic(node, widget) { } updateNodeHeight(node) } + + if(widget.name === 'inpaint_mode'){ + switch (widget.value){ + case 'normal': + case 'fooocus_inpaint': + toggleWidget(node, findWidgetByName(node, 'dtype')) + toggleWidget(node, findWidgetByName(node, 'fitting')) + toggleWidget(node, findWidgetByName(node, 'function')) + toggleWidget(node, findWidgetByName(node, 'scale')) + toggleWidget(node, findWidgetByName(node, 'start_at')) + toggleWidget(node, findWidgetByName(node, 'end_at')) + break + case 'brushnet_random': + case 'brushnet_segmentation': + toggleWidget(node, findWidgetByName(node, 'dtype'), true) + toggleWidget(node, findWidgetByName(node, 'fitting')) + toggleWidget(node, findWidgetByName(node, 'function')) + toggleWidget(node, findWidgetByName(node, 'scale'), true) + toggleWidget(node, findWidgetByName(node, 'start_at'), true) + toggleWidget(node, findWidgetByName(node, 'end_at'), true) + break + case 'powerpaint': + toggleWidget(node, findWidgetByName(node, 'dtype'), true) + toggleWidget(node, findWidgetByName(node, 'fitting'),true) + toggleWidget(node, findWidgetByName(node, 'function'),true) + toggleWidget(node, findWidgetByName(node, 'scale'), true) + toggleWidget(node, findWidgetByName(node, 'start_at'), true) + toggleWidget(node, findWidgetByName(node, 'end_at'), true) + break + } + updateNodeHeight(node) + } } function widgetLogic2(node, widget) { @@ -646,6 +678,7 @@ app.registerExtension({ case 'easy ipadapterApply': case 'easy ipadapterApplyADV': case 'easy ipadapterApplyEncoder': + case 'easy applyInpaint': getSetters(node) break case "easy wildcards": @@ -1150,7 +1183,7 @@ const getSetWidgets = ['rescale_after_model', 'rescale', 'num_loras', 'num_controlnet', 'mode', 'toggle', 'resolution', 'target_parameter', 'input_count', 'replace_count', 'downscale_mode', 'range_mode','text_combine_mode', 'input_mode', 'lora_count','ckpt_count', 'conditioning_mode', 'preset', 'use_tiled', 'use_batch', 'num_embeds', - "easing_mode", "guider", "scheduler" + "easing_mode", "guider", "scheduler", "inpaint_mode", ] function getSetters(node) { From c4f100fbab1c4f38956c673087451539f258484f Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 6 Jun 2024 12:05:24 +0800 Subject: [PATCH 06/49] rename:POWERPAINT_CLIPS to POWERPAINT_MODELS --- py/config.py | 2 +- py/easyNodes.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/py/config.py b/py/config.py index f668fd5..6ef3580 100644 --- a/py/config.py +++ b/py/config.py @@ -76,7 +76,7 @@ } } } -POWERPAINT_CLIP = { +POWERPAINT_MODELS = { "base_fp16": { "model_url": "https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/text_encoder/model.fp16.safetensors" }, diff --git a/py/easyNodes.py b/py/easyNodes.py index 1260483..3f78f87 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -18,7 +18,7 @@ from server import PromptServer from nodes import MAX_RESOLUTION, LatentFromBatch, RepeatLatentBatch, NODE_CLASS_MAPPINGS as ALL_NODE_CLASS_MAPPINGS, ConditioningSetMask, ConditioningConcat, CLIPTextEncode, VAEEncodeForInpaint, InpaintModelConditioning -from .config import MAX_SEED_NUM, BASE_RESOLUTIONS, RESOURCES_DIR, INPAINT_DIR, FOOOCUS_STYLES_DIR, FOOOCUS_INPAINT_HEAD, FOOOCUS_INPAINT_PATCH, BRUSHNET_MODELS, POWERPAINT_CLIP, IPADAPTER_DIR, IPADAPTER_MODELS, DYNAMICRAFTER_DIR, DYNAMICRAFTER_MODELS, IC_LIGHT_MODELS +from .config import MAX_SEED_NUM, BASE_RESOLUTIONS, RESOURCES_DIR, INPAINT_DIR, FOOOCUS_STYLES_DIR, FOOOCUS_INPAINT_HEAD, FOOOCUS_INPAINT_PATCH, BRUSHNET_MODELS, POWERPAINT_MODELS, IPADAPTER_DIR, IPADAPTER_MODELS, DYNAMICRAFTER_DIR, DYNAMICRAFTER_MODELS, IC_LIGHT_MODELS from .layer_diffuse import LayerDiffuse, LayerMethod from .xyplot import XYplot_ModelMergeBlocks, XYplot_CFG, XYplot_Lora, XYplot_Checkpoint, XYplot_Denoise, XYplot_Steps, XYplot_PromptSR, XYplot_Positive_Cond, XYplot_Negative_Cond, XYplot_Positive_Cond_List, XYplot_Negative_Cond_List, XYplot_SeedsBatch, XYplot_Control_Net, XYplot_Sampler_Scheduler @@ -2190,7 +2190,7 @@ def apply(self, pipe, image, mask, powerpaint_model, powerpaint_clip, dtype, fit log_node_info("easy powerpaintApply", f"Using {powerpaint_clip} Cached") _, ppclip = backend_cache.cache[powerpaint_clip][1] else: - model_url = POWERPAINT_CLIP['base_fp16']['model_url'] + model_url = POWERPAINT_MODELS['base_fp16']['model_url'] base_clip = get_local_filepath(model_url, os.path.join(folder_paths.models_dir, 'clip')) ppclip, = cls.load_powerpaint_clip(base_clip, os.path.join(folder_paths.get_full_path("inpaint", powerpaint_clip))) backend_cache.update_cache(powerpaint_clip, 'ppclip', (False, ppclip)) @@ -2282,8 +2282,8 @@ def get_powerpaint_model(self, model): if model_type == 'sdxl': raise Exception("Powerpaint not supported for SDXL models") - powerpaint_model = POWERPAINT_CLIP['v2.1']['model_url'] - powerpaint_clip = POWERPAINT_CLIP['v2.1']['clip_url'] + powerpaint_model = POWERPAINT_MODELS['v2.1']['model_url'] + powerpaint_clip = POWERPAINT_MODELS['v2.1']['clip_url'] from urllib.parse import urlparse get_local_filepath(powerpaint_model, os.path.join(INPAINT_DIR, 'powerpaint')) From 37cf2facd7a96053f07f0d7d9b01bdfb0a68297d Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 6 Jun 2024 12:10:33 +0800 Subject: [PATCH 07/49] add:easy applyInpaint to swap menu --- web/js/easy/easyExtraMenu.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/web/js/easy/easyExtraMenu.js b/web/js/easy/easyExtraMenu.js index 0aeb885..50c415a 100644 --- a/web/js/easy/easyExtraMenu.js +++ b/web/js/easy/easyExtraMenu.js @@ -9,7 +9,7 @@ const controlnet = ['easy controlnetLoader', 'easy controlnetLoaderADV', 'easy i const ipadapter = ['easy ipadapterApply', 'easy ipadapterApplyADV', 'easy ipadapterStyleComposition', 'easy ipadapterApplyFromParams'] const positive_prompt = ['easy positive', 'easy wildcards'] const imageNode = ['easy loadImageBase64', 'LoadImage', 'LoadImageMask'] -const brushnet = ['easy applyBrushNet', 'easy applyPowerPaint'] +const brushnet = ['easy applyBrushNet', 'easy applyPowerPaint', 'easy applyInpaint'] const widgetMapping = { "positive_prompt":{ "text": "positive", @@ -68,6 +68,8 @@ const widgetMapping = { }, "brushnet":{ "dtype": "dtype", + "fitting": "fitting", + "function": "function", "scale": "scale", "start_at": "start_at", "end_at": "end_at" From 4fc23b305d188e4c27a82853fe3a2750379fa268 Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 6 Jun 2024 12:19:01 +0800 Subject: [PATCH 08/49] fix:load faceid portrait sdxl models error #195 --- py/easyNodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py/easyNodes.py b/py/easyNodes.py index 3f78f87..0ba64c3 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -2561,7 +2561,7 @@ def get_ipadapter_file(self, preset, is_sdxl, node_name): # if v11 is not found, try with the old version if not [e for e in ipadapter_list if re.search(pattern, e, re.IGNORECASE)]: pattern = 'portrait.sd15\.(safetensors|bin)$' - is_insightface = True + is_insightface = True elif preset == "faceid": if is_sdxl: pattern = 'faceid.sdxl\.(safetensors|bin)$' From 1899e21b7c9f46050e2b3687f714957de1671164 Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 6 Jun 2024 12:32:13 +0800 Subject: [PATCH 09/49] Upgrade to v1.1.9 --- README.en.md | 13 +++++++++++++ README.md | 13 +++++++++++++ 2 files changed, 26 insertions(+) diff --git a/README.en.md b/README.en.md index 69e2aa1..1b3884c 100644 --- a/README.en.md +++ b/README.en.md @@ -31,8 +31,21 @@ - Forcibly cleared the memory usage of the comfy UI model are supported - Stable Diffusion 3 multi-account API nodes are supported +## Installation +Clone the repo into the **custom_nodes** directory and install the requirements: +```shell +#1. Clone the repo +git clone https://github.com/yolain/ComfyUI-Easy-Use +#2. Install the requirements +Double-click install.bat to install the required dependencies +``` + ## Changelog +**v1.1.9** + +- Added `easy applyInpaint` - All inpainting mode in this node + **v1.1.8** - Added `easy controlnetStack` diff --git a/README.md b/README.md index 9fd23a6..5accd16 100644 --- a/README.md +++ b/README.md @@ -38,8 +38,21 @@ - 支持IC-Light的应用 [示例参考](https://github.com/yolain/ComfyUI-Yolain-Workflows?tab=readme-ov-file#2-5-ic-light) | [代码整合来源](https://github.com/huchenlei/ComfyUI-IC-Light) | [技术参考](https://github.com/lllyasviel/IC-Light) - 中文提示词自动识别,使用[opus-mt-zh-en模型](https://huggingface.co/Helsinki-NLP/opus-mt-zh-en) +## 安装 +将存储库克隆到 **custom_nodes** 目录并安装依赖 +```shell +#1. git下载 +git clone https://github.com/yolain/ComfyUI-Easy-Use +#2. 安装依赖 +双击install.bat安装依赖 +``` + ## 更新日志 +**v1.1.9** + +- 增加 `easy applyInpaint` - 局部重绘全模式节点 (相比与之前的kSamplerInpating节点逻辑会更合理些) + **v1.1.8** - 增加中文提示词自动翻译,使用[opus-mt-zh-en模型](https://huggingface.co/Helsinki-NLP/opus-mt-zh-en), 默认已对wildcard、lora正则处理, 其他需要保留的中文,可使用`@你的提示词@`包裹 (若依赖安装完成后报错, 请重启),测算大约会占0.3GB显存 From fcf5d18d206e301bd4ee99f0734771262a2a660b Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 6 Jun 2024 16:12:50 +0800 Subject: [PATCH 10/49] add:accelerate to requirements.txt --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 1967577..dc9c3b4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ diffusers>=0.25.0 +accelerate>=0.25.0 clip_interrogator>=0.6.0 sentencepiece==0.2.0 lark-parser From 9639c3a85e4007b415b1f2c7f24e766bc89dd52f Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 6 Jun 2024 17:59:55 +0800 Subject: [PATCH 11/49] change encode default to none in easy applyInpaint --- py/brushnet/model_patch.py | 16 ---------------- py/easyNodes.py | 10 ++++++++-- py/libs/sampler.py | 31 ++++++++++--------------------- web/js/easy/easyExtraMenu.js | 14 +++++++------- 4 files changed, 25 insertions(+), 46 deletions(-) diff --git a/py/brushnet/model_patch.py b/py/brushnet/model_patch.py index fe8d3f2..cc28b37 100644 --- a/py/brushnet/model_patch.py +++ b/py/brushnet/model_patch.py @@ -119,19 +119,3 @@ def modified_apply_control(h, control, name): except: print.warning("warning control could not be applied {} {}".format(h.shape, ctrl.shape)) return h - -def add_model_patch(model): - to = add_model_patch_option(model) - mp = to['model_patch'] - if "brushnet" in mp: - if isinstance(model.model.model_config, comfy.supported_models.SD15): - mp['SDXL'] = False - elif isinstance(model.model.model_config, comfy.supported_models.SDXL): - mp['SDXL'] = True - else: - print('Base model type: ', type(model.model.model_config)) - raise Exception("Unsupported model type: ", type(model.model.model_config)) - - mp['unet'] = model.model.diffusion_model - mp['step'] = 0 - mp['total_steps'] = 1 \ No newline at end of file diff --git a/py/easyNodes.py b/py/easyNodes.py index 0ba64c3..926239c 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -2225,7 +2225,7 @@ def INPUT_TYPES(s): "image": ("IMAGE",), "mask": ("MASK",), "inpaint_mode": (('normal', 'fooocus_inpaint', 'brushnet_random', 'brushnet_segmentation', 'powerpaint'),), - "encode": (('vae_encode_inpaint', 'inpaint_model_conditioning', 'different_diffusion'),), + "encode": (('none', 'vae_encode_inpaint', 'inpaint_model_conditioning', 'different_diffusion'), {"default": "none"}), "grow_mask_by": ("INT", {"default": 6, "min": 0, "max": 64, "step": 1}), "dtype": (['float16', 'bfloat16', 'float32', 'float64'],), "fitting": ("FLOAT", {"default": 1.0, "min": 0.3, "max": 1.0}), @@ -2306,7 +2306,13 @@ def apply(self, pipe, image, mask, inpaint_mode, encode, grow_mask_by, dtype, fi new_pipe, = applyPowerPaint().apply(new_pipe, image, mask, powerpaint_model, powerpaint_clip, dtype, fitting, function, scale, start_at, end_at) vae = new_pipe['vae'] - if encode == 'vae_encode_inpaint': + if encode == 'none': + if inpaint_mode == 'fooocus_inpaint': + model, = applyFooocusInpaint().apply(new_pipe['model'], new_pipe['samples'], + list(FOOOCUS_INPAINT_HEAD.keys())[0], + list(FOOOCUS_INPAINT_PATCH.keys())[0]) + new_pipe['model'] = model + elif encode == 'vae_encode_inpaint': latent, = VAEEncodeForInpaint().encode(vae, image, mask, grow_mask_by) new_pipe['samples'] = latent if inpaint_mode == 'fooocus_inpaint': diff --git a/py/libs/sampler.py b/py/libs/sampler.py index cd7381c..04f52e5 100644 --- a/py/libs/sampler.py +++ b/py/libs/sampler.py @@ -1,11 +1,12 @@ import comfy +import comfy.model_management +import comfy.samplers import torch import numpy as np import latent_preview from nodes import MAX_RESOLUTION from PIL import Image from typing import Dict, List, Optional, Tuple, Union, Any -from ..brushnet.model_patch import add_model_patch class easySampler: def __init__(self): @@ -105,18 +106,8 @@ def callback(step, x0, x, total_steps): batch_inds = latent["batch_index"] if "batch_index" in latent else None noise = comfy.sample.prepare_noise(latent_image, seed, batch_inds) - ####################################################################################### - # brushnet - add_model_patch(model) - # - ####################################################################################### - samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, - latent_image, - denoise=denoise, disable_noise=disable_noise, start_step=start_step, - last_step=last_step, - force_full_denoise=force_full_denoise, noise_mask=noise_mask, callback=callback, - disable_pbar=disable_pbar, seed=seed) - + kSampler = comfy.samplers.KSampler(model=model, steps=steps, device=device) + samples = kSampler.sample(noise, positive, negative, cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=None, callback=callback, disable_pbar=disable_pbar, seed=seed) out = latent.copy() out["samples"] = samples return out @@ -148,20 +139,18 @@ def custom_ksampler(self, model, seed, steps, cfg, _sampler, sigmas, positive, n pbar = comfy.utils.ProgressBar(steps) - ####################################################################################### - # brushnet - add_model_patch(model) - ####################################################################################### - def callback(step, x0, x, total_steps): preview_bytes = None if previewer: preview_bytes = previewer.decode_latent_to_preview_image(preview_format, x0) pbar.update_absolute(step + 1, total_steps, preview_bytes) - samples = comfy.sample.sample_custom(model, noise, cfg, _sampler, sigmas, positive, negative, latent_image, - noise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, - seed=seed) + # samples = comfy.sample.sample_custom(model, noise, cfg, _sampler, sigmas, positive, negative, latent_image, + # noise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, + # seed=seed) + + samples = comfy.samplers.sample(model, noise, positive, negative, cfg, device, _sampler, sigmas, latent_image=latent_image, + denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) out = latent.copy() out["samples"] = samples diff --git a/web/js/easy/easyExtraMenu.js b/web/js/easy/easyExtraMenu.js index 50c415a..19a8a3f 100644 --- a/web/js/easy/easyExtraMenu.js +++ b/web/js/easy/easyExtraMenu.js @@ -9,7 +9,7 @@ const controlnet = ['easy controlnetLoader', 'easy controlnetLoaderADV', 'easy i const ipadapter = ['easy ipadapterApply', 'easy ipadapterApplyADV', 'easy ipadapterStyleComposition', 'easy ipadapterApplyFromParams'] const positive_prompt = ['easy positive', 'easy wildcards'] const imageNode = ['easy loadImageBase64', 'LoadImage', 'LoadImageMask'] -const brushnet = ['easy applyBrushNet', 'easy applyPowerPaint', 'easy applyInpaint'] +const inpaint = ['easy applyBrushNet', 'easy applyPowerPaint', 'easy applyInpaint'] const widgetMapping = { "positive_prompt":{ "text": "positive", @@ -66,7 +66,7 @@ const widgetMapping = { "base64_data":"base64_data", "channel": "channel" }, - "brushnet":{ + "inpaint":{ "dtype": "dtype", "fitting": "fitting", "function": "function", @@ -109,7 +109,7 @@ const inputMapping = { "attn_mask":"attn_mask", "optional_ipadapter":"optional_ipadapter" }, - "brushnet":{ + "inpaint":{ "pipe": "pipe", "image": "image", "mask": "mask" @@ -152,7 +152,7 @@ const outputMapping = { "masks":"masks", "ipadapter":"ipadapter" }, - "brushnet":{ + "inpaint":{ "pipe": "pipe", } }; @@ -580,9 +580,9 @@ app.registerExtension({ if (imageNode.includes(nodeData.name)) { addMenu("↪️ Swap LoadImage", 'load_image', imageNode, nodeType) } - // Swap Brushnet - if (brushnet.includes(nodeData.name)) { - addMenu("↪️ Swap BrushNet", 'brushnet', brushnet, nodeType) + // Swap inpaint + if (inpaint.includes(nodeData.name)) { + addMenu("↪️ Swap InpaintNode", 'inpaint', inpaint, nodeType) } } }); From d18fec0e1634dfa592e26df582d2ac975214a087 Mon Sep 17 00:00:00 2001 From: yolain Date: Fri, 7 Jun 2024 11:27:15 +0800 Subject: [PATCH 12/49] fix:sampling missing add some parameters #198 --- py/easyNodes.py | 1 - py/image.py | 36 +++++++++++++++++++++++++++--------- py/libs/sampler.py | 6 ++++-- 3 files changed, 31 insertions(+), 12 deletions(-) diff --git a/py/easyNodes.py b/py/easyNodes.py index 926239c..406a299 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -2349,7 +2349,6 @@ def apply(self, pipe, image, mask, inpaint_mode, encode, grow_mask_by, dtype, fi else: raise Exception("Differential Diffusion not found,please update comfyui") - return (new_pipe,) # ---------------------------------------------------------------Inpaint 结束----------------------------------------------------------------------# diff --git a/py/image.py b/py/image.py index 599255c..b696848 100644 --- a/py/image.py +++ b/py/image.py @@ -12,6 +12,7 @@ from torchvision.transforms import Resize, CenterCrop, GaussianBlur from torchvision.transforms.functional import to_pil_image from .libs.log import log_node_info +from .libs.utils import AlwaysEqualProxy from .libs.image import pil2tensor, tensor2pil, ResizeMode, get_new_bounds, RGB2RGBA, image2mask from .libs.colorfix import adain_color_fix, wavelet_color_fix from .libs.chooser import ChooserMessage, ChooserCancelled @@ -1411,10 +1412,15 @@ def to_base64(self, image, ): return {"result": (base64_str,)} class removeLocalImage: + + def __init__(self): + self.hasFile = False + @classmethod def INPUT_TYPES(s): return { "required": { + "any": (AlwaysEqualProxy("*"),), "file_name": ("STRING",{"default":""}), }, } @@ -1424,15 +1430,27 @@ def INPUT_TYPES(s): FUNCTION = "remove" CATEGORY = "EasyUse/Image" - def remove(self, file_name): - hasFile = False - for file in os.listdir(folder_paths.input_directory): - name_without_extension, file_extension = os.path.splitext(file) - if name_without_extension == file_name or file == file_name: - os.remove(os.path.join(folder_paths.input_directory, file)) - hasFile = True - break - if hasFile: + + + def remove(self, any, file_name): + self.hasFile = False + def listdir(path, dir_name=''): + for file in os.listdir(path): + file_path = os.path.join(path, file) + if os.path.isdir(file_path): + dir_name = os.path.basename(file_path) + listdir(file_path, dir_name) + else: + file = os.path.join(dir_name, file) + name_without_extension, file_extension = os.path.splitext(file) + if name_without_extension == file_name or file == file_name: + os.remove(os.path.join(folder_paths.input_directory, file)) + self.hasFile = True + break + + listdir(folder_paths.input_directory, '') + + if self.hasFile: PromptServer.instance.send_sync("easyuse-toast", {"content": "Removed SuccessFully", "type":'success'}) else: PromptServer.instance.send_sync("easyuse-toast", {"content": "Removed Failed", "type": 'error'}) diff --git a/py/libs/sampler.py b/py/libs/sampler.py index 04f52e5..1bf9dd3 100644 --- a/py/libs/sampler.py +++ b/py/libs/sampler.py @@ -98,7 +98,6 @@ def callback(step, x0, x, total_steps): preview_bytes = previewer.decode_latent_to_preview_image(preview_format, x0) pbar.update_absolute(step + 1, total_steps, preview_bytes) - if disable_noise: noise = torch.zeros(latent_image.size(), dtype=latent_image.dtype, layout=latent_image.layout, device="cpu") @@ -106,7 +105,10 @@ def callback(step, x0, x, total_steps): batch_inds = latent["batch_index"] if "batch_index" in latent else None noise = comfy.sample.prepare_noise(latent_image, seed, batch_inds) - kSampler = comfy.samplers.KSampler(model=model, steps=steps, device=device) + comfy.model_management.load_model_gpu(model) + model_patcher = comfy.model_patcher.ModelPatcher(model.model, load_device=device, + offload_device=comfy.model_management.unet_offload_device()) + kSampler = comfy.samplers.KSampler(model_patcher, steps=steps, device=model.load_device, sampler=sampler_name, scheduler=scheduler, denoise=denoise, model_options=model.model_options) samples = kSampler.sample(noise, positive, negative, cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=None, callback=callback, disable_pbar=disable_pbar, seed=seed) out = latent.copy() out["samples"] = samples From 81b3f67068b7b82f85843a15c1e3ac05c4105026 Mon Sep 17 00:00:00 2001 From: yolain Date: Sat, 8 Jun 2024 00:46:58 +0800 Subject: [PATCH 13/49] fix:lora missing #202 --- py/libs/sampler.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/py/libs/sampler.py b/py/libs/sampler.py index 1bf9dd3..26962f3 100644 --- a/py/libs/sampler.py +++ b/py/libs/sampler.py @@ -106,9 +106,9 @@ def callback(step, x0, x, total_steps): noise = comfy.sample.prepare_noise(latent_image, seed, batch_inds) comfy.model_management.load_model_gpu(model) - model_patcher = comfy.model_patcher.ModelPatcher(model.model, load_device=device, - offload_device=comfy.model_management.unet_offload_device()) - kSampler = comfy.samplers.KSampler(model_patcher, steps=steps, device=model.load_device, sampler=sampler_name, scheduler=scheduler, denoise=denoise, model_options=model.model_options) + # model_patcher = comfy.model_patcher.ModelPatcher(model.model, load_device=device, + # offload_device=comfy.model_management.unet_offload_device()) + kSampler = comfy.samplers.KSampler(model, steps=steps, device=model.load_device, sampler=sampler_name, scheduler=scheduler, denoise=denoise, model_options=model.model_options) samples = kSampler.sample(noise, positive, negative, cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=None, callback=callback, disable_pbar=disable_pbar, seed=seed) out = latent.copy() out["samples"] = samples From 42ab155f80018e4e07f0f3e5b258b52d9ad33e68 Mon Sep 17 00:00:00 2001 From: yolain Date: Sat, 8 Jun 2024 17:59:02 +0800 Subject: [PATCH 14/49] fix:align_your_steps can not working #204 --- py/libs/sampler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py/libs/sampler.py b/py/libs/sampler.py index 26962f3..4943dfc 100644 --- a/py/libs/sampler.py +++ b/py/libs/sampler.py @@ -151,7 +151,7 @@ def callback(step, x0, x, total_steps): # noise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, # seed=seed) - samples = comfy.samplers.sample(model, noise, positive, negative, cfg, device, _sampler, sigmas, latent_image=latent_image, + samples = comfy.samplers.sample(model, noise, positive, negative, cfg, device, _sampler, sigmas, latent_image=latent_image, model_options=model.model_options, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=seed) out = latent.copy() From e6e0d6e9289d2b06c0948e48755ab2418ea298f1 Mon Sep 17 00:00:00 2001 From: yolain Date: Sun, 9 Jun 2024 12:01:09 +0800 Subject: [PATCH 15/49] add:align_your_steps of scheduler in preSampling(DynamicCFG) --- README.md | 2 +- py/easyNodes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 5accd16..692048b 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ # ComfyUI Easy Use -[![Bilibili Badge](https://img.shields.io/badge/1.0版本-00A1D6?style=for-the-badge&logo=bilibili&logoColor=white&link=https://www.bilibili.com/video/BV1Wi4y1h76G)](https://www.bilibili.com/video/BV1Wi4y1h76G) +[![Bilibili Badge](https://img.shields.io/badge/1.1版本-00A1D6?style=for-the-badge&logo=bilibili&logoColor=white&link=https://www.bilibili.com/video/BV1w6421F7Uv)](https://www.bilibili.com/video/BV1w6421F7Uv) [![Bilibili Badge](https://img.shields.io/badge/基本介绍-00A1D6?style=for-the-badge&logo=bilibili&logoColor=white&link=https://www.bilibili.com/video/BV1vQ4y1G7z7)](https://www.bilibili.com/video/BV1vQ4y1G7z7/) diff --git a/py/easyNodes.py b/py/easyNodes.py index 406a299..d8ad41a 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -4257,7 +4257,7 @@ def INPUT_TYPES(cls): "cfg_mode": (DynThresh.Modes,), "cfg_scale_min": ("FLOAT", {"default": 3.5, "min": 0.0, "max": 100.0, "step": 0.5}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), - "scheduler": (comfy.samplers.KSampler.SCHEDULERS,), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS+['align_your_steps'],), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), }, From 40fb1c0f62c70ee5f8e95a07176248392d769a6e Mon Sep 17 00:00:00 2001 From: yolain Date: Sun, 9 Jun 2024 14:31:34 +0800 Subject: [PATCH 16/49] fix:unable to translate cn words before or after theinclusion of @ --- py/libs/translate.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/py/libs/translate.py b/py/libs/translate.py index 8e769b2..16c2298 100644 --- a/py/libs/translate.py +++ b/py/libs/translate.py @@ -165,14 +165,23 @@ def schedule(self, *args): def word(self, word): # Translate each word using the dictionary - if re.search(r'__.*?__', str(word)): - return str(word).rstrip('.') - elif re.search(r'@.*?@', str(word)): - return str(word).replace('@', '').rstrip('.') - elif detect_language(str(word)) == "cn": - return translate(str(word)).rstrip('.') + word = str(word) + match_cn = re.search(r'@.*?@', word) + if re.search(r'__.*?__', word): + return word.rstrip('.') + elif match_cn: + chinese = match_cn.group() + before = word.split('@', 1) + before = before[0] if len(before) > 0 else '' + before = translate(str(before)).rstrip('.') if before else '' + after = word.rsplit('@', 1) + after = after[len(after)-1] if len(after) > 1 else '' + after = translate(after).rstrip('.') if after else '' + return before + chinese.replace('@', '').rstrip('.') + after + elif detect_language(word) == "cn": + return translate(word).rstrip('.') else: - return str(word).rstrip('.') + return word.rstrip('.') #定义Prompt文法 From 5aa4f17187c0db1c42247aaf886a21ce272bb073 Mon Sep 17 00:00:00 2001 From: yolain Date: Mon, 10 Jun 2024 23:52:38 +0800 Subject: [PATCH 17/49] fix:replaced with the original kSampelr writeup #202 --- py/brushnet/model_patch.py | 16 ++++++++++++++++ py/libs/sampler.py | 18 +++++++++++++----- 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/py/brushnet/model_patch.py b/py/brushnet/model_patch.py index cc28b37..fe8d3f2 100644 --- a/py/brushnet/model_patch.py +++ b/py/brushnet/model_patch.py @@ -119,3 +119,19 @@ def modified_apply_control(h, control, name): except: print.warning("warning control could not be applied {} {}".format(h.shape, ctrl.shape)) return h + +def add_model_patch(model): + to = add_model_patch_option(model) + mp = to['model_patch'] + if "brushnet" in mp: + if isinstance(model.model.model_config, comfy.supported_models.SD15): + mp['SDXL'] = False + elif isinstance(model.model.model_config, comfy.supported_models.SDXL): + mp['SDXL'] = True + else: + print('Base model type: ', type(model.model.model_config)) + raise Exception("Unsupported model type: ", type(model.model.model_config)) + + mp['unet'] = model.model.diffusion_model + mp['step'] = 0 + mp['total_steps'] = 1 \ No newline at end of file diff --git a/py/libs/sampler.py b/py/libs/sampler.py index 4943dfc..107e937 100644 --- a/py/libs/sampler.py +++ b/py/libs/sampler.py @@ -7,6 +7,7 @@ from nodes import MAX_RESOLUTION from PIL import Image from typing import Dict, List, Optional, Tuple, Union, Any +from ..brushnet.model_patch import add_model_patch class easySampler: def __init__(self): @@ -105,11 +106,18 @@ def callback(step, x0, x, total_steps): batch_inds = latent["batch_index"] if "batch_index" in latent else None noise = comfy.sample.prepare_noise(latent_image, seed, batch_inds) - comfy.model_management.load_model_gpu(model) - # model_patcher = comfy.model_patcher.ModelPatcher(model.model, load_device=device, - # offload_device=comfy.model_management.unet_offload_device()) - kSampler = comfy.samplers.KSampler(model, steps=steps, device=model.load_device, sampler=sampler_name, scheduler=scheduler, denoise=denoise, model_options=model.model_options) - samples = kSampler.sample(noise, positive, negative, cfg, latent_image=latent_image, start_step=start_step, last_step=last_step, force_full_denoise=force_full_denoise, denoise_mask=noise_mask, sigmas=None, callback=callback, disable_pbar=disable_pbar, seed=seed) + ####################################################################################### + # brushnet + add_model_patch(model) + # + ####################################################################################### + samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, + latent_image, + denoise=denoise, disable_noise=disable_noise, start_step=start_step, + last_step=last_step, + force_full_denoise=force_full_denoise, noise_mask=noise_mask, + callback=callback, + disable_pbar=disable_pbar, seed=seed) out = latent.copy() out["samples"] = samples return out From b6b6bbfae4d56e7a12f7ec765fdf9da9de20a69c Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 13 Jun 2024 02:41:05 +0800 Subject: [PATCH 18/49] support for sd3_medium_incl_clips in easy loader --- README.md | 1 + py/easyNodes.py | 7 +++-- py/libs/adv_encode.py | 73 ++++++++++++++++++++++++++++++++++++++++++- py/libs/sampler.py | 8 +++-- py/libs/utils.py | 4 ++- 5 files changed, 85 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 692048b..c594138 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,7 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use **v1.1.9** +- 支持 sd3_medium_incl_clips 模型 - 增加 `easy applyInpaint` - 局部重绘全模式节点 (相比与之前的kSamplerInpating节点逻辑会更合理些) **v1.1.8** diff --git a/py/easyNodes.py b/py/easyNodes.py index d8ad41a..bd0400e 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -938,13 +938,14 @@ def adv_pipeloader(self, ckpt_name, config_name, vae_name, clip_skip, # Clean models from loaded_objects easyCache.update_loaded_objects(prompt) - # Create Empty Latent - samples = sampler.emptyLatent(resolution, empty_latent_width, empty_latent_height, batch_size) - # Load models log_node_warn("正在加载模型...") model, clip, vae, clip_vision, lora_stack = easyCache.load_main(ckpt_name, config_name, vae_name, lora_name, lora_model_strength, lora_clip_strength, optional_lora_stack, model_override, clip_override, vae_override, prompt) + # Create Empty Latent + sd3 = True if get_sd_version(model) == 'sd3' else False + samples = sampler.emptyLatent(resolution, empty_latent_width, empty_latent_height, batch_size, sd3=sd3) + # Prompt to Conditioning positive_embeddings_final, positive_wildcard_prompt, model, clip = prompt_to_cond('positive', model, clip, clip_skip, lora_stack, positive, positive_token_normalization, positive_weight_interpretation, a1111_prompt_style, my_unique_id, prompt, easyCache) negative_embeddings_final, negative_wildcard_prompt, model, clip = prompt_to_cond('negative', model, clip, clip_skip, lora_stack, negative, negative_token_normalization, negative_weight_interpretation, a1111_prompt_style, my_unique_id, prompt, easyCache) diff --git a/py/libs/adv_encode.py b/py/libs/adv_encode.py index fbebf48..b2b2846 100644 --- a/py/libs/adv_encode.py +++ b/py/libs/adv_encode.py @@ -4,6 +4,11 @@ from comfy import model_management from comfy.sdxl_clip import SDXLClipModel, SDXLRefinerClipModel, SDXLClipG +try: + from comfy.sd3_clip import SD3ClipModel, SDT5XXLModel +except: + SD3ClipModel, SDT5XXLModel = None, None + pass from nodes import NODE_CLASS_MAPPINGS, ConditioningConcat def _grouper(n, iterable): @@ -238,6 +243,9 @@ def encode_token_weights_l(model, token_weight_pairs): l_out, pooled = model.clip_l.encode_token_weights(token_weight_pairs) return l_out, pooled +def encode_token_weights_t5(model, token_weight_pairs): + return model.t5xxl.encode_token_weights(token_weight_pairs) + def encode_token_weights(model, token_weight_pairs, encode_func): if model.layer_idx is not None: @@ -258,6 +266,14 @@ def prepareXL(embs_l, embs_g, pooled, clip_balance): else: return embs_g, pooled +def prepareSD3(out, pooled, clip_balance): + lg_w = 1 - max(0, clip_balance - .5) * 2 + t5_w = 1 - max(0, .5 - clip_balance) * 2 + if out.shape[0] > 1: + return torch.cat([out[0] * lg_w, out[1] * t5_w], dim=-1), pooled + else: + return out, pooled + def advanced_encode(clip, text, token_normalization, weight_interpretation, w_max=1.0, clip_balance=.5, apply_to_pooled=True, width=1024, height=1024, crop_w=0, crop_h=0, target_width=1024, target_height=1024, a1111_prompt_style=False, steps=1): @@ -283,7 +299,62 @@ def advanced_encode(clip, text, token_normalization, weight_interpretation, w_ma for text in pass3: tokenized = clip.tokenize(text, return_word_ids=True) - if isinstance(clip.cond_stage_model, (SDXLClipModel, SDXLRefinerClipModel, SDXLClipG)): + if SD3ClipModel and isinstance(clip.cond_stage_model, SD3ClipModel): + lg_out = None + pooled = None + out = None + + if len(tokenized['l']) > 0 or len(tokenized['g']) > 0: + if 'l' in tokenized: + lg_out, l_pooled = advanced_encode_from_tokens(tokenized['l'], + token_normalization, + weight_interpretation, + lambda x: encode_token_weights(clip, x, encode_token_weights_l), + w_max=w_max, return_pooled=True,) + else: + l_pooled = torch.zeros((1, 768), device=model_management.intermediate_device()) + + if 'g' in tokenized: + g_out, g_pooled = advanced_encode_from_tokens(tokenized['g'], + token_normalization, + weight_interpretation, + lambda x: encode_token_weights(clip, x, encode_token_weights_g), + w_max=w_max, return_pooled=True) + if lg_out is not None: + lg_out = torch.cat([lg_out, g_out], dim=-1) + else: + lg_out = torch.nn.functional.pad(g_out, (768, 0)) + else: + g_out = None + g_pooled = torch.zeros((1, 1280), device=model_management.intermediate_device()) + + if lg_out is not None: + lg_out = torch.nn.functional.pad(lg_out, (0, 4096 - lg_out.shape[-1])) + out = lg_out + pooled = torch.cat((l_pooled, g_pooled), dim=-1) + + # t5xxl not working, need to fix + if 't5xxl' in tokenized and clip.cond_stage_model.t5xxl is not None: + t5_out, t5_pooled = advanced_encode_from_tokens(tokenized['t5xxl'], + token_normalization, + weight_interpretation, + lambda x: encode_token_weights(clip, x, encode_token_weights_t5), + w_max=w_max, return_pooled=True) + if lg_out is not None: + out = torch.cat([lg_out, t5_out], dim=-2) + else: + out = t5_out + + if out is None: + out = torch.zeros((1, 77, 4096), device=model_management.intermediate_device()) + + if pooled is None: + pooled = torch.zeros((1, 768 + 1280), device=model_management.intermediate_device()) + + embeddings_final, pooled = prepareSD3(out, pooled, clip_balance) + cond = [[embeddings_final, {"pooled_output": pooled}]] + + elif isinstance(clip.cond_stage_model, (SDXLClipModel, SDXLRefinerClipModel, SDXLClipG)): embs_l = None embs_g = None pooled = None diff --git a/py/libs/sampler.py b/py/libs/sampler.py index 107e937..b7aee37 100644 --- a/py/libs/sampler.py +++ b/py/libs/sampler.py @@ -51,7 +51,7 @@ def safe_split(to_split: str, delimiter: str) -> List[str]: parts.append('None') return parts - def emptyLatent(self, resolution, empty_latent_width, empty_latent_height, batch_size=1, compression=0): + def emptyLatent(self, resolution, empty_latent_width, empty_latent_height, batch_size=1, compression=0, sd3=False): if resolution != "自定义 x 自定义": try: width, height = map(int, resolution.split(' x ')) @@ -59,8 +59,10 @@ def emptyLatent(self, resolution, empty_latent_width, empty_latent_height, batch empty_latent_height = height except ValueError: raise ValueError("Invalid base_resolution format.") - - if compression == 0: + if sd3: + latent = torch.ones([batch_size, 16, empty_latent_height // 8, empty_latent_width // 8], device=self.device) * 0.0609 + samples = {"samples": latent} + elif compression == 0: latent = torch.zeros([batch_size, 4, empty_latent_height // 8, empty_latent_width // 8], device=self.device) samples = {"samples": latent} else: diff --git a/py/libs/utils.py b/py/libs/utils.py index f53fe70..62d1a1e 100644 --- a/py/libs/utils.py +++ b/py/libs/utils.py @@ -90,7 +90,9 @@ def find_tags(string: str, sep="/") -> list[str]: def get_sd_version(model): base: BaseModel = model.model model_config: comfy.supported_models.supported_models_base.BASE = base.model_config - if isinstance(model_config, comfy.supported_models.SDXL): + if isinstance(model_config, comfy.supported_models.SD3): + return 'sd3' + elif isinstance(model_config, comfy.supported_models.SDXL): return 'sdxl' elif isinstance( model_config, (comfy.supported_models.SD15, comfy.supported_models.SD20) From aea8e139546c39e3a2eed1d2e87347e9a6470ba5 Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 13 Jun 2024 09:21:15 +0800 Subject: [PATCH 19/49] fix:get sd version --- py/libs/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/py/libs/utils.py b/py/libs/utils.py index 62d1a1e..8c1ecf9 100644 --- a/py/libs/utils.py +++ b/py/libs/utils.py @@ -90,9 +90,7 @@ def find_tags(string: str, sep="/") -> list[str]: def get_sd_version(model): base: BaseModel = model.model model_config: comfy.supported_models.supported_models_base.BASE = base.model_config - if isinstance(model_config, comfy.supported_models.SD3): - return 'sd3' - elif isinstance(model_config, comfy.supported_models.SDXL): + if isinstance(model_config, comfy.supported_models.SDXL): return 'sdxl' elif isinstance( model_config, (comfy.supported_models.SD15, comfy.supported_models.SD20) @@ -102,6 +100,8 @@ def get_sd_version(model): model_config, (comfy.supported_models.SVD_img2vid) ): return 'svd' + elif isinstance(model_config, comfy.supported_models.SD3): + return 'sd3' else: return 'unknown' From 9811cd79d0bdb675cbe98a11e658d6184da032a5 Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 13 Jun 2024 14:24:55 +0800 Subject: [PATCH 20/49] add:TIMESTEP for set timesteprange conditioning and combine conditioning in advanced encode --- py/libs/adv_encode.py | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/py/libs/adv_encode.py b/py/libs/adv_encode.py index b2b2846..8fe8721 100644 --- a/py/libs/adv_encode.py +++ b/py/libs/adv_encode.py @@ -1,5 +1,6 @@ import torch import numpy as np +import re import itertools from comfy import model_management @@ -9,7 +10,7 @@ except: SD3ClipModel, SDT5XXLModel = None, None pass -from nodes import NODE_CLASS_MAPPINGS, ConditioningConcat +from nodes import NODE_CLASS_MAPPINGS, ConditioningConcat, ConditioningZeroOut, ConditioningSetTimestepRange, ConditioningCombine def _grouper(n, iterable): it = iter(iterable) @@ -286,6 +287,25 @@ def advanced_encode(clip, text, token_normalization, weight_interpretation, w_ma else: raise Exception(f"[smzNodes Not Found] you need to install 'ComfyUI-smzNodes'") + time_start = 0 + time_end = 1 + match = re.search(r'TIMESTEP.*$', text) + if match: + timestep = match.group() + timestep = timestep.split(' ') + timestep = timestep[0] + text = text.replace(timestep, '') + value = timestep.split(':') + if len(value) >= 3: + time_start = float(value[1]) + time_end = float(value[2]) + elif len(value) == 2: + time_start = float(value[1]) + time_end = 1 + elif len(value) == 1: + time_start = 0.1 + time_end = 1 + pass3 = [x.strip() for x in text.split("BREAK")] pass3 = [x for x in pass3 if x != ''] @@ -394,6 +414,13 @@ def advanced_encode(clip, text, token_normalization, weight_interpretation, w_ma else: conditioning = cond + # setTimeRange + if time_start > 0 or time_end < 1: + conditioning_2, = ConditioningSetTimestepRange().set_range(conditioning, 0, time_start) + conditioning_1, = ConditioningZeroOut().zero_out(conditioning) + conditioning_1, = ConditioningSetTimestepRange().set_range(conditioning_1, time_start, time_end) + conditioning, = ConditioningCombine().combine(conditioning_1, conditioning_2) + return conditioning From fe32eda5396613bb1dcf4fcdac953603008ffa10 Mon Sep 17 00:00:00 2001 From: yolain Date: Fri, 14 Jun 2024 23:02:28 +0800 Subject: [PATCH 21/49] fix:fooocus inpaint not working in latest comfy version #211 --- README.en.md | 2 ++ README.md | 3 ++- py/libs/adv_encode.py | 8 ++++---- py/libs/fooocus.py | 12 ++++++++---- 4 files changed, 16 insertions(+), 9 deletions(-) diff --git a/README.en.md b/README.en.md index 1b3884c..e99faa7 100644 --- a/README.en.md +++ b/README.en.md @@ -30,6 +30,7 @@ - Background removal nodes for the RMBG-1.4 model supporting BriaAI, [BriaAI Guide](https://huggingface.co/briaai/RMBG-1.4) - Forcibly cleared the memory usage of the comfy UI model are supported - Stable Diffusion 3 multi-account API nodes are supported +- Support Stable Diffusion 3 model ## Installation Clone the repo into the **custom_nodes** directory and install the requirements: @@ -44,6 +45,7 @@ Double-click install.bat to install the required dependencies **v1.1.9** +- Support for Stable Diffusion 3 model - Added `easy applyInpaint` - All inpainting mode in this node **v1.1.8** diff --git a/README.md b/README.md index c594138..d729e23 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,7 @@ - 支持Stable Diffusion 3 多账号API节点 - 支持IC-Light的应用 [示例参考](https://github.com/yolain/ComfyUI-Yolain-Workflows?tab=readme-ov-file#2-5-ic-light) | [代码整合来源](https://github.com/huchenlei/ComfyUI-IC-Light) | [技术参考](https://github.com/lllyasviel/IC-Light) - 中文提示词自动识别,使用[opus-mt-zh-en模型](https://huggingface.co/Helsinki-NLP/opus-mt-zh-en) +- 支持 sd3 模型 ## 安装 将存储库克隆到 **custom_nodes** 目录并安装依赖 @@ -51,7 +52,7 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use **v1.1.9** -- 支持 sd3_medium_incl_clips 模型 +- 支持 sd3 模型 - 增加 `easy applyInpaint` - 局部重绘全模式节点 (相比与之前的kSamplerInpating节点逻辑会更合理些) **v1.1.8** diff --git a/py/libs/adv_encode.py b/py/libs/adv_encode.py index 8fe8721..66031fd 100644 --- a/py/libs/adv_encode.py +++ b/py/libs/adv_encode.py @@ -6,9 +6,9 @@ from comfy import model_management from comfy.sdxl_clip import SDXLClipModel, SDXLRefinerClipModel, SDXLClipG try: - from comfy.sd3_clip import SD3ClipModel, SDT5XXLModel + from comfy.sd3_clip import SD3ClipModel, T5XXLModel except: - SD3ClipModel, SDT5XXLModel = None, None + SD3ClipModel, T5XXLModel = None, None pass from nodes import NODE_CLASS_MAPPINGS, ConditioningConcat, ConditioningZeroOut, ConditioningSetTimestepRange, ConditioningCombine @@ -353,7 +353,7 @@ def advanced_encode(clip, text, token_normalization, weight_interpretation, w_ma out = lg_out pooled = torch.cat((l_pooled, g_pooled), dim=-1) - # t5xxl not working, need to fix + # t5xxl if 't5xxl' in tokenized and clip.cond_stage_model.t5xxl is not None: t5_out, t5_pooled = advanced_encode_from_tokens(tokenized['t5xxl'], token_normalization, @@ -414,7 +414,7 @@ def advanced_encode(clip, text, token_normalization, weight_interpretation, w_ma else: conditioning = cond - # setTimeRange + # setTimeStepRange if time_start > 0 or time_end < 1: conditioning_2, = ConditioningSetTimestepRange().set_range(conditioning, 0, time_start) conditioning_1, = ConditioningZeroOut().zero_out(conditioning) diff --git a/py/libs/fooocus.py b/py/libs/fooocus.py index 9e69f12..6f5e2df 100644 --- a/py/libs/fooocus.py +++ b/py/libs/fooocus.py @@ -1,3 +1,5 @@ +#credit to Acly for this module +#from https://github.com/Acly/comfyui-inpaint-nodes import torch import comfy from comfy.model_patcher import ModelPatcher @@ -32,16 +34,18 @@ def load_fooocus_patch(self, lora: dict, to_load: dict): loaded_keys.add(key) not_loaded = sum(1 for x in lora if x not in loaded_keys) - log_node_info(self.node_name, - f"{len(loaded_keys)} Lora keys loaded, {not_loaded} remaining keys not found in model." - ) + if not_loaded > 0: + log_node_info(self.node_name, + f"{len(loaded_keys)} Lora keys loaded, {not_loaded} remaining keys not found in model." + ) return patch_dict def calculate_weight_patched(self: ModelPatcher, patches, weight, key): remaining = [] for p in patches: - alpha, v, strength_model = p + alpha = p[0] + v = p[1] is_fooocus_patch = isinstance(v, tuple) and len(v) == 2 and v[0] == "fooocus" if not is_fooocus_patch: From ebe049c2d5260e140d4f9ecd0d7df65388d63a5c Mon Sep 17 00:00:00 2001 From: yolain Date: Fri, 14 Jun 2024 23:07:51 +0800 Subject: [PATCH 22/49] Add dep --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index dc9c3b4..dbc667e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,3 +4,4 @@ clip_interrogator>=0.6.0 sentencepiece==0.2.0 lark-parser onnxruntime +spandrel From b76b3d2fc53a25eec4f9af19548a7181a4242159 Mon Sep 17 00:00:00 2001 From: yolain Date: Sat, 15 Jun 2024 11:06:57 +0800 Subject: [PATCH 23/49] fix:Recursive subcategories nested for models --- README.en.md | 1 + README.md | 1 + web/js/easy/easyContextMenu.js | 47 +++++++++++++++++++++++++++++++--- 3 files changed, 46 insertions(+), 3 deletions(-) diff --git a/README.en.md b/README.en.md index e99faa7..e648af7 100644 --- a/README.en.md +++ b/README.en.md @@ -45,6 +45,7 @@ Double-click install.bat to install the required dependencies **v1.1.9** +- Recursive subcategories nested for models - Support for Stable Diffusion 3 model - Added `easy applyInpaint` - All inpainting mode in this node diff --git a/README.md b/README.md index d729e23..33f34e3 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,7 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use **v1.1.9** +- 递归模型子目录嵌套 - 支持 sd3 模型 - 增加 `easy applyInpaint` - 局部重绘全模式节点 (相比与之前的kSamplerInpating节点逻辑会更合理些) diff --git a/web/js/easy/easyContextMenu.js b/web/js/easy/easyContextMenu.js index 9f7ea06..0abbdd6 100644 --- a/web/js/easy/easyContextMenu.js +++ b/web/js/easy/easyContextMenu.js @@ -109,19 +109,60 @@ app.registerExtension({ } } const newValues = []; + const add_sub_folder = (folder, folderName) => { + let subs = [] + let less = [] + const b = folder.map(name=> { + const _folders = {}; + const splitBy = name.indexOf('/') > -1 ? '/' : '\\'; + const valueSplit = name.split(splitBy); + if(valueSplit.length > 1){ + const key = valueSplit.shift(); + _folders[key] = _folders[key] || []; + _folders[key].push(valueSplit.join(splitBy)); + } + const foldersCount = Object.values(folders).length; + if(foldersCount > 0){ + let key = Object.keys(_folders)[0] + if(key && _folders[key]) subs.push({key, value:_folders[key][0]}) + else{ + less.push(addContent(name,key)) + } + } + return addContent(name,folderName) + }) + if(subs.length>0){ + let subs_obj = {} + subs.forEach(item => { + subs_obj[item.key] = subs_obj[item.key] || [] + subs_obj[item.key].push(item.value) + }) + return [...Object.entries(subs_obj).map(f => { + return { + content: f[0], + has_submenu: true, + callback: () => {}, + submenu: { + options: add_sub_folder(f[1], f[0]), + } + } + }),...less] + } + else return b + } + for(const [folderName,folder] of Object.entries(folders)){ newValues.push({ content:folderName, has_submenu:true, callback:() => {}, submenu:{ - options:folder.map(f => addContent(f,folderName)), + options:add_sub_folder(folder,folderName), } }); } newValues.push(...folderless.map(f => addContent(f, ''))); - if(specialOps.length > 0) - newValues.push(...specialOps.map(f => addContent(f, ''))); + if(specialOps.length > 0) newValues.push(...specialOps.map(f => addContent(f, ''))); return existingContextMenu.call(this,newValues,options); } return existingContextMenu.apply(this,[...arguments]); From 8cda21d56c77f8ef972ca39559ccb4082a7b87b2 Mon Sep 17 00:00:00 2001 From: yolain Date: Sat, 15 Jun 2024 20:37:12 +0800 Subject: [PATCH 24/49] add:imageBatchToList and imageListToBatch --- README.en.md | 1 + README.md | 3 ++- py/image.py | 46 ++++++++++++++++++++++++++++++++++++++++++++++ web/css/easy.css | 7 +++++++ web/css/theme.css | 2 +- 5 files changed, 57 insertions(+), 2 deletions(-) diff --git a/README.en.md b/README.en.md index e648af7..608324b 100644 --- a/README.en.md +++ b/README.en.md @@ -45,6 +45,7 @@ Double-click install.bat to install the required dependencies **v1.1.9** +- Added `easy imageBatchToImageList` and `easy imageListToImageBatch` - Recursive subcategories nested for models - Support for Stable Diffusion 3 model - Added `easy applyInpaint` - All inpainting mode in this node diff --git a/README.md b/README.md index 33f34e3..cfa8b34 100644 --- a/README.md +++ b/README.md @@ -50,8 +50,9 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use ## 更新日志 -**v1.1.9** +**v1.1.9**x +- 增加 `easy imageBatchToImageList` 和 `easy imageListToImageBatch` (修复Impact版的一点小问题) - 递归模型子目录嵌套 - 支持 sd3 模型 - 增加 `easy applyInpaint` - 局部重绘全模式节点 (相比与之前的kSamplerInpating节点逻辑会更合理些) diff --git a/py/image.py b/py/image.py index b696848..551b8eb 100644 --- a/py/image.py +++ b/py/image.py @@ -507,6 +507,48 @@ def join(self, images, mode): image = torch.transpose(torch.transpose(images, 1, 2).reshape(1, n * w, h, c), 1, 2) return (image,) +class imageListToImageBatch: + @classmethod + def INPUT_TYPES(s): + return {"required": { + "images": ("IMAGE",), + }} + + INPUT_IS_LIST = True + + RETURN_TYPES = ("IMAGE",) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Image" + + def doit(self, images): + if len(images) <= 1: + return (images[0],) + else: + image1 = images[0] + for image2 in images[1:]: + if image1.shape[1:] != image2.shape[1:]: + image2 = comfy.utils.common_upscale(image2.movedim(-1, 1), image1.shape[2], image1.shape[1], "lanczos", + "center").movedim(1, -1) + image1 = torch.cat((image1, image2), dim=0) + return (image1,) + + +class imageBatchToImageList: + @classmethod + def INPUT_TYPES(s): + return {"required": {"image": ("IMAGE",), }} + + RETURN_TYPES = ("IMAGE",) + OUTPUT_IS_LIST = (True,) + FUNCTION = "doit" + + CATEGORY = "EasyUse/Image" + + def doit(self, image): + images = [image[i:i + 1, ...] for i in range(image.shape[0])] + return (images,) + # 图像拆分 class imageSplitList: @classmethod @@ -1512,6 +1554,8 @@ def IS_CHANGED(self, image): "easy imageRatio": imageRatio, "easy imageToMask": imageToMask, "easy imageConcat": imageConcat, + "easy imageListToImageBatch": imageListToImageBatch, + "easy imageBatchToImageList": imageBatchToImageList, "easy imageSplitList": imageSplitList, "easy imageSplitGrid": imageSplitGrid, "easy imagesSplitImage": imagesSplitImage, @@ -1545,6 +1589,8 @@ def IS_CHANGED(self, image): "easy imageToMask": "ImageToMask", "easy imageHSVMask": "ImageHSVMask", "easy imageConcat": "imageConcat", + "easy imageListToImageBatch": "Image List To Image Batch", + "easy imageBatchToImageList": "Image Batch To Image List", "easy imageSplitList": "imageSplitList", "easy imageSplitGrid": "imageSplitGrid", "easy imagesSplitImage": "imagesSplitImage", diff --git a/web/css/easy.css b/web/css/easy.css index 1a0ed05..7c69a7b 100644 --- a/web/css/easy.css +++ b/web/css/easy.css @@ -112,4 +112,11 @@ hr{ } ::-webkit-scrollbar-thumb:hover { background-color: transparent; +} + +[data-theme="dark"] .workspace_manager .chakra-card{ + background-color:var(--comfy-menu-bg)!important; +} +.workspace_manager .chakra-card{ + width: 400px; } \ No newline at end of file diff --git a/web/css/theme.css b/web/css/theme.css index ea452fd..cafc025 100644 --- a/web/css/theme.css +++ b/web/css/theme.css @@ -5,4 +5,4 @@ --error-color: #ff4d4f; --warning-color: #faad14; --font-family: Inter, -apple-system, BlinkMacSystemFont, Helvetica Neue, sans-serif; -} \ No newline at end of file +} From 4a9112d2fa9b59bbf9421f8fc5ae5d57396b2893 Mon Sep 17 00:00:00 2001 From: "zhenjie.ye" Date: Sun, 16 Jun 2024 05:24:39 +0800 Subject: [PATCH 25/49] Fix: ipadapterApplyEncoder & ipadapterApplyEmbeds Clip_Vision missing error --- py/easyNodes.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/py/easyNodes.py b/py/easyNodes.py index bd0400e..8df64ef 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -2889,10 +2889,11 @@ def __init__(self): def INPUT_TYPES(cls): ipa_cls = cls() normal_presets = ipa_cls.normal_presets - max_embeds_num = 3 + max_embeds_num = 4 inputs = { "required": { "model": ("MODEL",), + "clip_vision": ("CLIP_VISION",), "image1": ("IMAGE",), "preset": (normal_presets,), "num_embeds": ("INT", {"default": 2, "min": 1, "max": max_embeds_num}), @@ -2912,8 +2913,8 @@ def INPUT_TYPES(cls): inputs["optional"]["neg_embeds"] = ("EMBEDS",) return inputs - RETURN_TYPES = ("MODEL", "IPADAPTER", "EMBEDS", "EMBEDS", ) - RETURN_NAMES = ("model", "ipadapter", "pos_embed", "neg_embed", ) + RETURN_TYPES = ("MODEL", "CLIP_VISION","IPADAPTER", "EMBEDS", "EMBEDS", ) + RETURN_NAMES = ("model", "clip_vision","ipadapter", "pos_embed", "neg_embed",) CATEGORY = "EasyUse/Adapter" FUNCTION = "apply" @@ -2943,16 +2944,16 @@ def batch(self, embeds, method): def apply(self, **kwargs): model = kwargs['model'] + clip_vision = kwargs['clip_vision'] preset = kwargs['preset'] if 'optional_ipadapter' in kwargs: ipadapter = kwargs['optional_ipadapter'] else: - model, ipadapter = self.load_model(model, preset, 0, 'CPU', clip_vision=None, optional_ipadapter=None, cache_mode='none') + model, ipadapter = self.load_model(model, preset, 0, 'CPU', clip_vision=clip_vision, optional_ipadapter=None, cache_mode='none') if "IPAdapterEncoder" not in ALL_NODE_CLASS_MAPPINGS: self.error() encoder_cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterEncoder"] - pos_embeds = kwargs["pos_embeds"] if "pos_embeds" in kwargs else [] neg_embeds = kwargs["neg_embeds"] if "neg_embeds" in kwargs else [] for i in range(1, kwargs['num_embeds'] + 1): @@ -2961,14 +2962,14 @@ def apply(self, **kwargs): kwargs[f"mask{i}"] = kwargs[f"mask{i}"] if f"mask{i}" in kwargs else None kwargs[f"weight{i}"] = kwargs[f"weight{i}"] if f"weight{i}" in kwargs else 1.0 - pos, neg = encoder_cls().encode(ipadapter, kwargs[f"image{i}"], kwargs[f"weight{i}"], kwargs[f"mask{i}"], clip_vision=None) + pos, neg = encoder_cls().encode(ipadapter, kwargs[f"image{i}"], kwargs[f"weight{i}"], kwargs[f"mask{i}"], clip_vision=clip_vision) pos_embeds.append(pos) neg_embeds.append(neg) pos_embeds = self.batch(pos_embeds, kwargs['combine_method']) neg_embeds = self.batch(neg_embeds, kwargs['combine_method']) - return (model, ipadapter, pos_embeds, neg_embeds) + return (model,clip_vision, ipadapter, pos_embeds, neg_embeds) class ipadapterApplyEmbeds(ipadapter): def __init__(self): @@ -2982,6 +2983,7 @@ def INPUT_TYPES(cls): return { "required": { "model": ("MODEL",), + "clip_vision": ("CLIP_VISION",), "ipadapter": ("IPADAPTER",), "pos_embed": ("EMBEDS",), "weight": ("FLOAT", {"default": 1.0, "min": -1, "max": 3, "step": 0.05}), @@ -3002,12 +3004,12 @@ def INPUT_TYPES(cls): CATEGORY = "EasyUse/Adapter" FUNCTION = "apply" - def apply(self, model, ipadapter, pos_embed, weight, weight_type, start_at, end_at, embeds_scaling, attn_mask=None, neg_embed=None,): + def apply(self, model, ipadapter, clip_vision, pos_embed, weight, weight_type, start_at, end_at, embeds_scaling, attn_mask=None, neg_embed=None,): if "IPAdapterEmbeds" not in ALL_NODE_CLASS_MAPPINGS: self.error() cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterEmbeds"] - model, image = cls().apply_ipadapter(model, ipadapter, pos_embed, weight, weight_type, start_at, end_at, neg_embed=neg_embed, attn_mask=attn_mask, clip_vision=None, embeds_scaling=embeds_scaling) + model, image = cls().apply_ipadapter(model, ipadapter, pos_embed, weight, weight_type, start_at, end_at, neg_embed=neg_embed, attn_mask=attn_mask, clip_vision=clip_vision, embeds_scaling=embeds_scaling) return (model, ipadapter) From 107826d13426f10bd8d718f692810141dabd6293 Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 20 Jun 2024 19:39:46 +0800 Subject: [PATCH 26/49] fix:easy showAnything not considering API mode #220 --- py/image.py | 6 +++++- py/logic.py | 10 +++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/py/image.py b/py/image.py index 551b8eb..407000e 100644 --- a/py/image.py +++ b/py/image.py @@ -795,7 +795,11 @@ def chooser(self, prompt=None, my_unique_id=None, extra_pnginfo=None, **kwargs): mode = kwargs.pop('mode', 'Always Pause') last_choosen = None if mode == 'Keep Last Selection': - if id and extra_pnginfo[0] and "workflow" in extra_pnginfo[0]: + if not extra_pnginfo: + print("Error: extra_pnginfo is empty") + elif (not isinstance(extra_pnginfo[0], dict) or "workflow" not in extra_pnginfo[0]): + print("Error: extra_pnginfo[0] is not a dict or missing 'workflow' key") + else: workflow = extra_pnginfo[0]["workflow"] node = next((x for x in workflow["nodes"] if str(x["id"]) == id), None) if node: diff --git a/py/logic.py b/py/logic.py index 3830bed..e47cc14 100644 --- a/py/logic.py +++ b/py/logic.py @@ -75,7 +75,7 @@ class Int: @classmethod def INPUT_TYPES(s): return { - "required": {"value": ("INT", {"default": 0})}, + "required": {"value": ("INT", {"default": 0, "min": -999999, "max": 999999,})}, } RETURN_TYPES = ("INT",) @@ -144,7 +144,7 @@ class Float: @classmethod def INPUT_TYPES(s): return { - "required": {"value": ("FLOAT", {"default": 0, "step": 0.01})}, + "required": {"value": ("FLOAT", {"default": 0, "step": 0.01, "min": -999999, "max": 999999,})}, } RETURN_TYPES = ("FLOAT",) @@ -478,7 +478,11 @@ def log_input(self, unique_id=None, extra_pnginfo=None, **kwargs): values.append(str(val)) pass - if unique_id and extra_pnginfo and "workflow" in extra_pnginfo[0]: + if not extra_pnginfo: + print("Error: extra_pnginfo is empty") + elif (not isinstance(extra_pnginfo[0], dict) or "workflow" not in extra_pnginfo[0]): + print("Error: extra_pnginfo[0] is not a dict or missing 'workflow' key") + else: workflow = extra_pnginfo[0]["workflow"] node = next((x for x in workflow["nodes"] if str(x["id"]) == unique_id[0]), None) if node: From 2523183f2152bfee09d6de3fad6a3d6fcfe212ad Mon Sep 17 00:00:00 2001 From: yolain Date: Fri, 21 Jun 2024 16:58:41 +0800 Subject: [PATCH 27/49] Add gits scheduler support --- README.en.md | 30 +- README.md | 3 +- py/easyNodes.py | 55 +-- py/libs/sampler.py | 628 +++++++++++++++++++++++++++++- web/js/easy/easyDynamicWidgets.js | 7 +- 5 files changed, 652 insertions(+), 71 deletions(-) diff --git a/README.en.md b/README.en.md index 608324b..ba7be51 100644 --- a/README.en.md +++ b/README.en.md @@ -9,7 +9,7 @@ **ComfyUI-Easy-Use** is a simplified node integration package, which is extended on the basis of [tinyterraNodes](https://github.com/TinyTerra/ComfyUI_tinyterraNodes), and has been integrated and optimized for many mainstream node packages to achieve the purpose of faster and more convenient use of ComfyUI. While ensuring the degree of freedom, it restores the ultimate smooth image production experience that belongs to Stable Diffusion. - +[![ComfyUI-Yolain-Workflows](https://github.com/yolain/ComfyUI-Easy-Use/assets/73304135/9a3f54bc-a677-4bf1-a196-8845dd57c942)](https://github.com/yolain/ComfyUI-Yolain-Workflows) ## Introduce @@ -45,6 +45,7 @@ Double-click install.bat to install the required dependencies **v1.1.9** +- Added **gitsScheduler** - Added `easy imageBatchToImageList` and `easy imageListToImageBatch` - Recursive subcategories nested for models - Support for Stable Diffusion 3 model @@ -357,33 +358,6 @@ Disclaimer: Opened source was not easy. I have a lot of respect for the contribu | easy imageChooser | [cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) | Preview Chooser | | easy styleAlignedBatchAlign | [style_aligned_comfy](https://github.com/chrisgoringe/cg-image-picker) | styleAlignedBatchAlign | -## Workflow Examples - -### Text to image - - - -### Image to image + controlnet - - - -### SDTurbo + HiresFix + SVD - - - -### LayerDiffusion -#### SD15 - - -#### SDXL - - -### StableCascade -#### Text to image - - -#### Image to image - ## Credits diff --git a/README.md b/README.md index cfa8b34..45a1ac5 100644 --- a/README.md +++ b/README.md @@ -50,8 +50,9 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use ## 更新日志 -**v1.1.9**x +**v1.1.9** +- 增加 新的调度器 **gitsScheduler** - 增加 `easy imageBatchToImageList` 和 `easy imageListToImageBatch` (修复Impact版的一点小问题) - 递归模型子目录嵌套 - 支持 sd3 模型 diff --git a/py/easyNodes.py b/py/easyNodes.py index 8df64ef..0cc80d5 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -27,7 +27,7 @@ from .libs.wildcards import process_with_loras, get_wildcard_list, process from .libs.utils import find_wildcards_seed, is_linked_styles_selector, easySave, get_local_filepath, AlwaysEqualProxy, get_sd_version from .libs.loader import easyLoader -from .libs.sampler import easySampler, alignYourStepsScheduler +from .libs.sampler import easySampler, alignYourStepsScheduler, gitsScheduler from .libs.xyplot import easyXYPlot from .libs.controlnet import easyControlnet from .libs.conditioning import prompt_to_cond, set_cond @@ -37,6 +37,8 @@ sampler = easySampler() easyCache = easyLoader() + +new_schedulers = ['align_your_steps', 'gits'] # ---------------------------------------------------------------提示词 开始----------------------------------------------------------------------# # 正面提示词 @@ -3311,7 +3313,7 @@ def INPUT_TYPES(cls): "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), - "scheduler": (comfy.samplers.KSampler.SCHEDULERS + ['align_your_steps'],), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS + new_schedulers,), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), }, @@ -3389,7 +3391,7 @@ def INPUT_TYPES(cls): "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), - "scheduler": (comfy.samplers.KSampler.SCHEDULERS + ['align_your_steps'],), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS + new_schedulers,), "start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}), "end_at_step": ("INT", {"default": 10000, "min": 0, "max": 10000}), "add_noise": (["enable", "disable"],), @@ -3478,7 +3480,7 @@ def INPUT_TYPES(cls): "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), - "scheduler": (comfy.samplers.KSampler.SCHEDULERS+['align_your_steps'],), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS+new_schedulers,), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), }, @@ -3624,13 +3626,14 @@ def __init__(self): @classmethod def INPUT_TYPES(cls): - return {"required": - {"pipe": ("PIPE_LINE",), + return {"required": { + "pipe": ("PIPE_LINE",), "guider": (['CFG','DualCFG','IP2P+DualCFG','Basic'],{"default":"Basic"}), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), "cfg_negative": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS + ['inversed_euler'],), - "scheduler": (comfy.samplers.KSampler.SCHEDULERS + ['karrasADV','exponentialADV','polyExponential', 'sdturbo', 'vp', 'alignYourSteps'],), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS + ['karrasADV','exponentialADV','polyExponential', 'sdturbo', 'vp', 'alignYourSteps', 'gits'],), + "coeff": ("FLOAT", {"default": 1.20, "min": 0.80, "max": 1.50, "step": 0.05}), "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 1000.0, "step": 0.01, "round": False}), "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 1000.0, "step": 0.01, "round": False}), @@ -3642,7 +3645,7 @@ def INPUT_TYPES(cls): "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "add_noise": (["enable", "disable"], {"default": "enable"}), "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), - }, + }, "optional": { "image_to_latent": ("IMAGE",), "latent": ("LATENT",), @@ -3735,7 +3738,7 @@ def add_model_patch_option(self, model): to["model_patch"] = {} return to - def settings(self, pipe, guider, cfg, cfg_negative, sampler_name, scheduler, steps, sigma_max, sigma_min, rho, beta_d, beta_min, eps_s, flip_sigmas, denoise, add_noise, seed, image_to_latent=None, latent=None, optional_sampler=None, optional_sigmas=None, prompt=None, extra_pnginfo=None, my_unique_id=None): + def settings(self, pipe, guider, cfg, cfg_negative, sampler_name, scheduler, coeff, steps, sigma_max, sigma_min, rho, beta_d, beta_min, eps_s, flip_sigmas, denoise, add_noise, seed, image_to_latent=None, latent=None, optional_sampler=None, optional_sigmas=None, prompt=None, extra_pnginfo=None, my_unique_id=None): # 图生图转换 vae = pipe["vae"] @@ -3762,13 +3765,12 @@ def settings(self, pipe, guider, cfg, cfg_negative, sampler_name, scheduler, ste case 'sdturbo': sigmas, = self.get_custom_cls('SDTurboScheduler').get_sigmas(model, steps, denoise) case 'alignYourSteps': - try: - model_type = get_sd_version(model) - if model_type == 'unknown': - raise Exception("This Model not supported") - sigmas, = alignYourStepsScheduler().get_sigmas(model_type.upper(), steps, denoise) - except: - raise Exception("Please update your ComfyUI") + model_type = get_sd_version(model) + if model_type == 'unknown': + raise Exception("This Model not supported") + sigmas, = alignYourStepsScheduler().get_sigmas(model_type.upper(), steps, denoise) + case 'gits': + sigmas, = gitsScheduler().get_sigmas(coeff, steps, denoise) case _: sigmas, = self.get_custom_cls('BasicScheduler').get_sigmas(model, scheduler, steps, denoise) @@ -4095,7 +4097,7 @@ def INPUT_TYPES(cls): "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS, {"default": "euler"}), - "scheduler": (comfy.samplers.KSampler.SCHEDULERS+ ['align_your_steps'], {"default": "normal"}), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS+ new_schedulers, {"default": "normal"}), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), }, @@ -4260,7 +4262,7 @@ def INPUT_TYPES(cls): "cfg_mode": (DynThresh.Modes,), "cfg_scale_min": ("FLOAT", {"default": 3.5, "min": 0.0, "max": 100.0, "step": 0.5}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), - "scheduler": (comfy.samplers.KSampler.SCHEDULERS+['align_your_steps'],), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS+new_schedulers,), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "seed": ("INT", {"default": 0, "min": 0, "max": MAX_SEED_NUM}), }, @@ -4399,7 +4401,7 @@ def INPUT_TYPES(cls): "steps": ("INT", {"default": 20, "min": 1, "max": 10000}), "cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0}), "sampler_name": (comfy.samplers.KSampler.SAMPLERS,), - "scheduler": (comfy.samplers.KSampler.SCHEDULERS+['align_your_steps'],), + "scheduler": (comfy.samplers.KSampler.SCHEDULERS+new_schedulers,), "denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), "image_output": (["Hide", "Preview", "Preview&Choose", "Save", "Hide&Save", "Sender", "Sender&Save"],), "link_id": ("INT", {"default": 0, "min": 0, "max": sys.maxsize, "step": 1}), @@ -4534,13 +4536,14 @@ def process_sample_state(pipe, samp_model, samp_clip, samp_samples, samp_vae, sa noise = samp_custom['noise'] if 'noise' in samp_custom else None samp_samples, _ = sampler.custom_advanced_ksampler(noise, guider, _sampler, sigmas, samp_samples) elif scheduler == 'align_your_steps': - try: - model_type = get_sd_version(samp_model) - if model_type == 'unknown': - raise Exception("This Model not supported") - sigmas, = alignYourStepsScheduler().get_sigmas(model_type.upper(), steps, denoise) - except: - raise Exception("Please update your ComfyUI") + model_type = get_sd_version(samp_model) + if model_type == 'unknown': + raise Exception("This Model not supported") + sigmas, = alignYourStepsScheduler().get_sigmas(model_type.upper(), steps, denoise) + _sampler = comfy.samplers.sampler_object(sampler_name) + samp_samples = sampler.custom_ksampler(samp_model, samp_seed, steps, cfg, _sampler, sigmas, samp_positive, samp_negative, samp_samples, disable_noise=disable_noise, preview_latent=preview_latent) + elif scheduler == 'gits': + sigmas, = gitsScheduler().get_sigmas(coeff=1.2, steps=steps, denoise=denoise) _sampler = comfy.samplers.sampler_object(sampler_name) samp_samples = sampler.custom_ksampler(samp_model, samp_seed, steps, cfg, _sampler, sigmas, samp_positive, samp_negative, samp_samples, disable_noise=disable_noise, preview_latent=preview_latent) else: diff --git a/py/libs/sampler.py b/py/libs/sampler.py index b7aee37..9c646aa 100644 --- a/py/libs/sampler.py +++ b/py/libs/sampler.py @@ -273,6 +273,19 @@ def get_output_sdxl(self, sdxl_pipe: dict) -> Tuple: sdxl_pipe.get("seed") ) +def loglinear_interp(t_steps, num_steps): + """ + Performs log-linear interpolation of a given array of decreasing numbers. + """ + xs = np.linspace(0, 1, len(t_steps)) + ys = np.log(t_steps[::-1]) + + new_xs = np.linspace(0, 1, num_steps) + new_ys = np.interp(new_xs, xs, ys) + + interped_ys = np.exp(new_ys)[::-1].copy() + return interped_ys + class alignYourStepsScheduler: NOISE_LEVELS = { @@ -282,32 +295,617 @@ class alignYourStepsScheduler: 0.3798540708, 0.2332364134, 0.1114188177, 0.0291671582], "SVD": [700.00, 54.5, 15.886, 7.977, 4.248, 1.789, 0.981, 0.403, 0.173, 0.034, 0.002]} + def get_sigmas(self, model_type, steps, denoise): - def loglinear_interp(self, t_steps, num_steps): - """ - Performs log-linear interpolation of a given array of decreasing numbers. - """ - xs = np.linspace(0, 1, len(t_steps)) - ys = np.log(t_steps[::-1]) + total_steps = steps + if denoise < 1.0: + if denoise <= 0.0: + return (torch.FloatTensor([]),) + total_steps = round(steps * denoise) - new_xs = np.linspace(0, 1, num_steps) - new_ys = np.interp(new_xs, xs, ys) + sigmas = self.NOISE_LEVELS[model_type][:] + if (steps + 1) != len(sigmas): + sigmas = loglinear_interp(sigmas, steps + 1) - interped_ys = np.exp(new_ys)[::-1].copy() - return interped_ys + sigmas = sigmas[-(total_steps + 1):] + sigmas[-1] = 0 + return (torch.FloatTensor(sigmas),) - def get_sigmas(self, model_type, steps, denoise): +class gitsScheduler: + + NOISE_LEVELS = { + 0.80: [ + [14.61464119, 7.49001646, 0.02916753], + [14.61464119, 11.54541874, 6.77309084, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 3.07277966, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 2.05039096, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 2.05039096, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 3.07277966, 1.56271636, + 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 3.07277966, + 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 5.85520077, + 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 5.85520077, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, + 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, + 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.07277966, 1.56271636, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, + 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.1956799, 1.98035145, 0.86115354, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.14220476, 4.86714602, 3.1956799, 1.98035145, 0.86115354, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.07277966, 1.84880662, + 0.83188516, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.07277966, + 1.84880662, 0.83188516, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.75677586, + 2.84484982, 1.78698075, 0.803307, 0.02916753], + ], + 0.85: [ + [14.61464119, 7.49001646, 0.02916753], + [14.61464119, 7.49001646, 1.84880662, 0.02916753], + [14.61464119, 11.54541874, 6.77309084, 1.56271636, 0.02916753], + [14.61464119, 11.54541874, 7.11996698, 3.07277966, 1.24153244, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.09240818, 2.84484982, 0.95350921, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.09240818, 2.84484982, 0.95350921, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.58536053, 3.1956799, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 5.58536053, 3.1956799, 1.84880662, 0.803307, + 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 4.65472794, 3.07277966, + 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 8.75849152, 7.49001646, 6.14220476, 4.65472794, + 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, + 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.14220476, 4.65472794, 3.07277966, 1.84880662, 0.803307, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.14220476, 4.86714602, 3.60512662, 2.6383388, 1.56271636, 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, + 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, 0.72133851, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.24142551, + 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, 1.56271636, + 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, 2.45070267, + 1.56271636, 0.72133851, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.90732002, 10.31284904, 9.75859547, + 9.24142551, 8.75849152, 8.30717278, 7.88507891, 7.49001646, 6.77309084, 5.85520077, 4.65472794, 3.46139455, + 2.45070267, 1.56271636, 0.72133851, 0.02916753], + ], + 0.90: [ + [14.61464119, 6.77309084, 0.02916753], + [14.61464119, 7.49001646, 1.56271636, 0.02916753], + [14.61464119, 7.49001646, 3.07277966, 0.95350921, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.54230714, 0.89115214, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.54230714, 0.89115214, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.09240818, 3.07277966, 1.61558151, 0.69515091, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.11996698, 4.86714602, 3.07277966, 1.61558151, 0.69515091, + 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 2.95596409, 1.61558151, + 0.69515091, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.24153244, + 0.57119018, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, + 2.19988537, 1.24153244, 0.57119018, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 5.85520077, 4.45427561, + 3.1956799, 2.19988537, 1.24153244, 0.57119018, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 4.86714602, + 3.75677586, 2.84484982, 1.84880662, 1.08895338, 0.52423614, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.14220476, + 4.86714602, 3.75677586, 2.84484982, 1.84880662, 1.08895338, 0.52423614, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.44769001, + 5.58536053, 4.45427561, 3.32507086, 2.45070267, 1.61558151, 0.95350921, 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 6.44769001, 5.58536053, 4.45427561, 3.32507086, 2.45070267, 1.61558151, 0.95350921, 0.45573691, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, + 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, 0.45573691, + 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.30717278, + 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, 0.95350921, + 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, + 8.30717278, 7.49001646, 6.77309084, 5.85520077, 4.86714602, 3.91689563, 3.07277966, 2.27973175, 1.56271636, + 0.95350921, 0.45573691, 0.02916753], + [14.61464119, 13.76078796, 12.96784878, 12.2308979, 11.54541874, 10.31284904, 9.24142551, 8.75849152, + 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.19988537, + 1.51179266, 0.89115214, 0.43325692, 0.02916753], + ], + 0.95: [ + [14.61464119, 6.77309084, 0.02916753], + [14.61464119, 6.77309084, 1.56271636, 0.02916753], + [14.61464119, 7.49001646, 2.84484982, 0.89115214, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.36326075, 0.803307, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.56271636, 0.64427125, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.95596409, 1.56271636, 0.64427125, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 3.07277966, 1.91321158, 1.08895338, 0.50118381, + 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.91321158, 1.08895338, + 0.50118381, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.91321158, + 1.08895338, 0.50118381, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.19988537, 1.41535246, + 0.803307, 0.38853383, 0.02916753], + [14.61464119, 12.2308979, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.46139455, 2.6383388, 1.84880662, + 1.24153244, 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.46139455, + 2.6383388, 1.84880662, 1.24153244, 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 6.14220476, 4.86714602, 3.75677586, + 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], + [14.61464119, 12.96784878, 10.90732002, 8.75849152, 7.49001646, 6.44769001, 5.58536053, 4.65472794, + 3.60512662, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, + 4.65472794, 3.60512662, 2.95596409, 2.19988537, 1.56271636, 1.05362725, 0.64427125, 0.32104823, + 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.44769001, 5.58536053, + 4.65472794, 3.75677586, 3.07277966, 2.45070267, 1.78698075, 1.24153244, 0.83188516, 0.50118381, 0.22545385, + 0.02916753], + [14.61464119, 12.96784878, 11.54541874, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, + 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.36326075, 1.72759056, 1.24153244, 0.83188516, 0.50118381, + 0.22545385, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, + 5.85520077, 5.09240818, 4.45427561, 3.60512662, 2.95596409, 2.36326075, 1.72759056, 1.24153244, 0.83188516, + 0.50118381, 0.22545385, 0.02916753], + [14.61464119, 13.76078796, 12.2308979, 10.90732002, 9.24142551, 8.30717278, 7.49001646, 6.77309084, + 5.85520077, 5.09240818, 4.45427561, 3.75677586, 3.07277966, 2.45070267, 1.91321158, 1.46270394, 1.05362725, + 0.72133851, 0.43325692, 0.19894916, 0.02916753], + ], + 1.00: [ + [14.61464119, 1.56271636, 0.02916753], + [14.61464119, 6.77309084, 0.95350921, 0.02916753], + [14.61464119, 6.77309084, 2.36326075, 0.803307, 0.02916753], + [14.61464119, 7.11996698, 3.07277966, 1.56271636, 0.59516323, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.41535246, 0.57119018, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.86115354, 0.38853383, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.86115354, 0.38853383, + 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 4.86714602, 3.07277966, 1.98035145, 1.24153244, 0.72133851, + 0.34370604, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.07277966, 1.98035145, 1.24153244, + 0.72133851, 0.34370604, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.27973175, 1.51179266, + 0.95350921, 0.54755926, 0.25053367, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, + 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, + 1.61558151, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, + 2.12350607, 1.56271636, 1.08895338, 0.72133851, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, + 2.19988537, 1.61558151, 1.162866, 0.803307, 0.50118381, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 5.85520077, 4.65472794, 3.75677586, 3.07277966, + 2.45070267, 1.84880662, 1.36964464, 1.01931262, 0.72133851, 0.45573691, 0.25053367, 0.09824532, + 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.46139455, + 2.84484982, 2.19988537, 1.67050016, 1.24153244, 0.92192322, 0.64427125, 0.43325692, 0.25053367, 0.09824532, + 0.02916753], + [14.61464119, 11.54541874, 8.75849152, 7.49001646, 6.14220476, 5.09240818, 4.26497746, 3.60512662, + 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, 0.22545385, + 0.09824532, 0.02916753], + [14.61464119, 12.2308979, 9.24142551, 8.30717278, 7.49001646, 6.14220476, 5.09240818, 4.26497746, + 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, 0.38853383, + 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 12.2308979, 9.24142551, 8.30717278, 7.49001646, 6.77309084, 5.85520077, 5.09240818, + 4.26497746, 3.60512662, 2.95596409, 2.45070267, 1.91321158, 1.51179266, 1.12534678, 0.83188516, 0.59516323, + 0.38853383, 0.22545385, 0.09824532, 0.02916753], + ], + 1.05: [ + [14.61464119, 0.95350921, 0.02916753], + [14.61464119, 6.77309084, 0.89115214, 0.02916753], + [14.61464119, 6.77309084, 2.05039096, 0.72133851, 0.02916753], + [14.61464119, 6.77309084, 2.84484982, 1.28281462, 0.52423614, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.61558151, 0.803307, 0.34370604, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.56271636, 0.803307, 0.34370604, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.95350921, 0.52423614, 0.22545385, + 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 1.98035145, 1.24153244, 0.74807048, 0.41087446, + 0.17026083, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.27973175, 1.51179266, 0.95350921, 0.59516323, 0.34370604, + 0.13792117, 0.02916753], + [14.61464119, 7.49001646, 5.09240818, 3.46139455, 2.45070267, 1.61558151, 1.08895338, 0.72133851, + 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.09240818, 3.46139455, 2.45070267, 1.61558151, 1.08895338, + 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.36326075, 1.61558151, + 1.08895338, 0.72133851, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.72759056, + 1.24153244, 0.86115354, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, + 1.61558151, 1.162866, 0.83188516, 0.59516323, 0.38853383, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.84484982, 2.19988537, + 1.67050016, 1.28281462, 0.95350921, 0.72133851, 0.52423614, 0.34370604, 0.19894916, 0.09824532, + 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.36326075, + 1.84880662, 1.41535246, 1.08895338, 0.83188516, 0.61951244, 0.45573691, 0.32104823, 0.19894916, 0.09824532, + 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.65472794, 3.60512662, 2.95596409, 2.45070267, + 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.57119018, 0.43325692, 0.29807833, 0.19894916, + 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.30717278, 7.11996698, 5.85520077, 4.65472794, 3.60512662, 2.95596409, + 2.45070267, 1.91321158, 1.51179266, 1.20157266, 0.95350921, 0.74807048, 0.57119018, 0.43325692, 0.29807833, + 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 8.30717278, 7.11996698, 5.85520077, 4.65472794, 3.60512662, 2.95596409, + 2.45070267, 1.98035145, 1.61558151, 1.32549286, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.41087446, + 0.29807833, 0.19894916, 0.09824532, 0.02916753], + ], + 1.10: [ + [14.61464119, 0.89115214, 0.02916753], + [14.61464119, 2.36326075, 0.72133851, 0.02916753], + [14.61464119, 5.85520077, 1.61558151, 0.57119018, 0.02916753], + [14.61464119, 6.77309084, 2.45070267, 1.08895338, 0.45573691, 0.02916753], + [14.61464119, 6.77309084, 2.95596409, 1.56271636, 0.803307, 0.34370604, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.61558151, 0.89115214, 0.4783645, 0.19894916, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.84880662, 1.08895338, 0.64427125, 0.34370604, 0.13792117, + 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.84484982, 1.61558151, 0.95350921, 0.54755926, 0.27464288, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.91321158, 1.24153244, 0.803307, 0.4783645, 0.25053367, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.05039096, 1.41535246, 0.95350921, 0.64427125, + 0.41087446, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.27973175, 1.61558151, 1.12534678, 0.803307, 0.54755926, + 0.36617002, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.32507086, 2.45070267, 1.72759056, 1.24153244, 0.89115214, + 0.64427125, 0.45573691, 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.09240818, 3.60512662, 2.84484982, 2.05039096, 1.51179266, 1.08895338, 0.803307, + 0.59516323, 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.09240818, 3.60512662, 2.84484982, 2.12350607, 1.61558151, 1.24153244, + 0.95350921, 0.72133851, 0.54755926, 0.41087446, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.08895338, + 0.83188516, 0.64427125, 0.50118381, 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.1956799, 2.45070267, 1.91321158, 1.51179266, 1.20157266, + 0.95350921, 0.74807048, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, 1.72759056, + 1.36964464, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.43325692, 0.34370604, 0.25053367, 0.17026083, + 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, + 1.72759056, 1.36964464, 1.08895338, 0.86115354, 0.69515091, 0.54755926, 0.43325692, 0.34370604, 0.25053367, + 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 11.54541874, 7.49001646, 5.85520077, 4.45427561, 3.46139455, 2.84484982, 2.19988537, + 1.72759056, 1.36964464, 1.08895338, 0.89115214, 0.72133851, 0.59516323, 0.4783645, 0.38853383, 0.29807833, + 0.22545385, 0.17026083, 0.09824532, 0.02916753], + ], + 1.15: [ + [14.61464119, 0.83188516, 0.02916753], + [14.61464119, 1.84880662, 0.59516323, 0.02916753], + [14.61464119, 5.85520077, 1.56271636, 0.52423614, 0.02916753], + [14.61464119, 5.85520077, 1.91321158, 0.83188516, 0.34370604, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.24153244, 0.59516323, 0.25053367, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.51179266, 0.803307, 0.41087446, 0.17026083, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.56271636, 0.89115214, 0.50118381, 0.25053367, 0.09824532, + 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.84880662, 1.12534678, 0.72133851, 0.43325692, 0.22545385, + 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.07277966, 1.91321158, 1.24153244, 0.803307, 0.52423614, 0.34370604, 0.19894916, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 2.95596409, 1.91321158, 1.24153244, 0.803307, 0.52423614, 0.34370604, + 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.05039096, 1.36964464, 0.95350921, 0.69515091, 0.4783645, + 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.803307, 0.59516323, + 0.43325692, 0.29807833, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.803307, 0.59516323, + 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, + 0.74807048, 0.59516323, 0.45573691, 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.78698075, 1.32549286, 1.01931262, 0.803307, + 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.78698075, 1.32549286, 1.01931262, 0.803307, + 0.64427125, 0.52423614, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, + 0.72133851, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, + 0.72133851, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.86714602, 3.1956799, 2.45070267, 1.84880662, 1.41535246, 1.12534678, 0.89115214, + 0.72133851, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + 1.20: [ + [14.61464119, 0.803307, 0.02916753], + [14.61464119, 1.56271636, 0.52423614, 0.02916753], + [14.61464119, 2.36326075, 0.92192322, 0.36617002, 0.02916753], + [14.61464119, 2.84484982, 1.24153244, 0.59516323, 0.25053367, 0.02916753], + [14.61464119, 5.85520077, 2.05039096, 0.95350921, 0.45573691, 0.17026083, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.24153244, 0.64427125, 0.29807833, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.803307, 0.45573691, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 0.95350921, 0.59516323, 0.36617002, 0.19894916, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.67050016, 1.08895338, 0.74807048, 0.50118381, 0.32104823, + 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.24153244, 0.83188516, 0.59516323, 0.41087446, + 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 1.98035145, 1.36964464, 0.95350921, 0.69515091, 0.50118381, + 0.36617002, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.46139455, 2.36326075, 1.56271636, 1.08895338, 0.803307, 0.59516323, 0.45573691, + 0.34370604, 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 6.77309084, 3.46139455, 2.45070267, 1.61558151, 1.162866, 0.86115354, 0.64427125, 0.50118381, + 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, + 0.64427125, 0.50118381, 0.38853383, 0.29807833, 0.22545385, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, + 0.64427125, 0.50118381, 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.12350607, 1.51179266, 1.08895338, 0.83188516, + 0.64427125, 0.50118381, 0.41087446, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.20157266, 0.92192322, + 0.72133851, 0.57119018, 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, + 0.74807048, 0.59516323, 0.4783645, 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 7.49001646, 4.65472794, 3.07277966, 2.19988537, 1.61558151, 1.24153244, 0.95350921, + 0.74807048, 0.59516323, 0.50118381, 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.25: [ + [14.61464119, 0.72133851, 0.02916753], + [14.61464119, 1.56271636, 0.50118381, 0.02916753], + [14.61464119, 2.05039096, 0.803307, 0.32104823, 0.02916753], + [14.61464119, 2.36326075, 0.95350921, 0.43325692, 0.17026083, 0.02916753], + [14.61464119, 2.84484982, 1.24153244, 0.59516323, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.51179266, 0.803307, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.36326075, 1.24153244, 0.72133851, 0.41087446, 0.22545385, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.83188516, 0.52423614, 0.34370604, 0.19894916, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 0.98595673, 0.64427125, 0.43325692, 0.27464288, + 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.67050016, 1.08895338, 0.74807048, 0.52423614, 0.36617002, + 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.803307, 0.59516323, 0.45573691, 0.34370604, + 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.24153244, 0.86115354, 0.64427125, 0.4783645, 0.36617002, + 0.27464288, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.84880662, 1.28281462, 0.92192322, 0.69515091, 0.52423614, + 0.41087446, 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.72133851, 0.54755926, + 0.43325692, 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.72133851, 0.57119018, + 0.45573691, 0.36617002, 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.95596409, 1.91321158, 1.32549286, 0.95350921, 0.74807048, 0.59516323, 0.4783645, + 0.38853383, 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.41535246, 1.05362725, 0.803307, 0.61951244, 0.50118381, + 0.41087446, 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.41535246, 1.05362725, 0.803307, 0.64427125, 0.52423614, + 0.43325692, 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 3.07277966, 2.05039096, 1.46270394, 1.08895338, 0.83188516, 0.66947293, + 0.54755926, 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.30: [ + [14.61464119, 0.72133851, 0.02916753], + [14.61464119, 1.24153244, 0.43325692, 0.02916753], + [14.61464119, 1.56271636, 0.59516323, 0.22545385, 0.02916753], + [14.61464119, 1.84880662, 0.803307, 0.36617002, 0.13792117, 0.02916753], + [14.61464119, 2.36326075, 1.01931262, 0.52423614, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.36964464, 0.74807048, 0.41087446, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.56271636, 0.89115214, 0.54755926, 0.34370604, 0.19894916, 0.09824532, + 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 0.95350921, 0.61951244, 0.41087446, 0.27464288, 0.17026083, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.36964464, 0.83188516, 0.54755926, 0.36617002, 0.25053367, + 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.41535246, 0.92192322, 0.64427125, 0.45573691, 0.34370604, + 0.25053367, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.56271636, 1.01931262, 0.72133851, 0.50118381, 0.36617002, 0.27464288, + 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.05362725, 0.74807048, 0.54755926, 0.41087446, + 0.32104823, 0.25053367, 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.77538133, 0.57119018, 0.43325692, + 0.34370604, 0.27464288, 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.59516323, 0.45573691, 0.36617002, + 0.29807833, 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.59516323, 0.4783645, 0.38853383, + 0.32104823, 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.83188516, 0.64427125, 0.50118381, 0.41087446, + 0.34370604, 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.72759056, 1.162866, 0.83188516, 0.64427125, 0.52423614, 0.43325692, + 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.78698075, 1.24153244, 0.92192322, 0.72133851, 0.57119018, + 0.45573691, 0.38853383, 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.84484982, 1.78698075, 1.24153244, 0.92192322, 0.72133851, 0.57119018, 0.4783645, + 0.41087446, 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + 1.35: [ + [14.61464119, 0.69515091, 0.02916753], + [14.61464119, 0.95350921, 0.34370604, 0.02916753], + [14.61464119, 1.56271636, 0.57119018, 0.19894916, 0.02916753], + [14.61464119, 1.61558151, 0.69515091, 0.29807833, 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.83188516, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.162866, 0.64427125, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.36964464, 0.803307, 0.50118381, 0.32104823, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.41535246, 0.83188516, 0.54755926, 0.36617002, 0.25053367, 0.17026083, + 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.95350921, 0.64427125, 0.45573691, 0.32104823, 0.22545385, + 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.95350921, 0.64427125, 0.45573691, 0.34370604, 0.25053367, + 0.19894916, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 1.01931262, 0.72133851, 0.52423614, 0.38853383, 0.29807833, + 0.22545385, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 1.01931262, 0.72133851, 0.52423614, 0.41087446, 0.32104823, + 0.25053367, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.61558151, 1.05362725, 0.74807048, 0.54755926, 0.43325692, 0.34370604, + 0.27464288, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.72759056, 1.12534678, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.29807833, + 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 3.07277966, 1.72759056, 1.12534678, 0.803307, 0.59516323, 0.4783645, 0.38853383, 0.32104823, + 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.45070267, 1.51179266, 1.01931262, 0.74807048, 0.57119018, 0.45573691, + 0.36617002, 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.41087446, + 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.43325692, + 0.36617002, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 5.85520077, 2.6383388, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.45573691, + 0.38853383, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + 1.40: [ + [14.61464119, 0.59516323, 0.02916753], + [14.61464119, 0.95350921, 0.34370604, 0.02916753], + [14.61464119, 1.08895338, 0.43325692, 0.13792117, 0.02916753], + [14.61464119, 1.56271636, 0.64427125, 0.27464288, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.803307, 0.43325692, 0.22545385, 0.09824532, 0.02916753], + [14.61464119, 2.05039096, 0.95350921, 0.54755926, 0.34370604, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.72133851, 0.43325692, 0.27464288, 0.17026083, 0.09824532, + 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.34370604, 0.25053367, 0.17026083, + 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.52423614, 0.36617002, 0.27464288, 0.19894916, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.54755926, 0.38853383, 0.29807833, 0.22545385, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.41535246, 0.86115354, 0.59516323, 0.43325692, 0.32104823, 0.25053367, + 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.64427125, 0.45573691, 0.34370604, 0.27464288, + 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.64427125, 0.4783645, 0.36617002, 0.29807833, 0.25053367, + 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.98595673, 0.69515091, 0.52423614, 0.41087446, 0.34370604, + 0.29807833, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.72133851, 0.54755926, 0.43325692, 0.36617002, + 0.32104823, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.05362725, 0.74807048, 0.57119018, 0.45573691, 0.38853383, + 0.34370604, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.41087446, 0.36617002, + 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.61951244, 0.50118381, 0.43325692, 0.38853383, + 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.61558151, 1.08895338, 0.803307, 0.64427125, 0.52423614, 0.45573691, 0.41087446, + 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + 1.45: [ + [14.61464119, 0.59516323, 0.02916753], + [14.61464119, 0.803307, 0.25053367, 0.02916753], + [14.61464119, 0.95350921, 0.34370604, 0.09824532, 0.02916753], + [14.61464119, 1.24153244, 0.54755926, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 1.56271636, 0.72133851, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.803307, 0.45573691, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 1.91321158, 0.95350921, 0.57119018, 0.36617002, 0.25053367, 0.17026083, 0.09824532, + 0.02916753], + [14.61464119, 2.19988537, 1.08895338, 0.64427125, 0.41087446, 0.27464288, 0.19894916, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.34370604, 0.25053367, 0.19894916, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.24153244, 0.74807048, 0.50118381, 0.36617002, 0.27464288, 0.22545385, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.54755926, 0.41087446, 0.32104823, 0.25053367, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.27464288, 0.22545385, + 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.83188516, 0.59516323, 0.45573691, 0.36617002, 0.29807833, + 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.28281462, 0.83188516, 0.59516323, 0.45573691, 0.36617002, 0.32104823, + 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.69515091, 0.52423614, 0.41087446, 0.34370604, + 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.51179266, 0.95350921, 0.69515091, 0.52423614, 0.43325692, 0.36617002, + 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 0.98595673, 0.72133851, 0.54755926, 0.45573691, 0.38853383, + 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.74807048, 0.57119018, 0.4783645, 0.41087446, 0.36617002, + 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.84484982, 1.56271636, 1.01931262, 0.74807048, 0.59516323, 0.50118381, 0.43325692, + 0.38853383, 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + ], + 1.50: [ + [14.61464119, 0.54755926, 0.02916753], + [14.61464119, 0.803307, 0.25053367, 0.02916753], + [14.61464119, 0.86115354, 0.32104823, 0.09824532, 0.02916753], + [14.61464119, 1.24153244, 0.54755926, 0.25053367, 0.09824532, 0.02916753], + [14.61464119, 1.56271636, 0.72133851, 0.36617002, 0.19894916, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.803307, 0.45573691, 0.27464288, 0.17026083, 0.09824532, 0.02916753], + [14.61464119, 1.61558151, 0.83188516, 0.52423614, 0.34370604, 0.25053367, 0.17026083, 0.09824532, + 0.02916753], + [14.61464119, 1.84880662, 0.95350921, 0.59516323, 0.38853383, 0.27464288, 0.19894916, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.95350921, 0.59516323, 0.41087446, 0.29807833, 0.22545385, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 1.84880662, 0.95350921, 0.61951244, 0.43325692, 0.32104823, 0.25053367, 0.19894916, + 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.19988537, 1.12534678, 0.72133851, 0.50118381, 0.36617002, 0.27464288, 0.22545385, + 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.19988537, 1.12534678, 0.72133851, 0.50118381, 0.36617002, 0.29807833, 0.25053367, + 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.29807833, 0.25053367, + 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.57119018, 0.43325692, 0.34370604, 0.29807833, 0.27464288, + 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.59516323, 0.45573691, 0.36617002, 0.32104823, 0.29807833, + 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.36326075, 1.24153244, 0.803307, 0.59516323, 0.45573691, 0.38853383, 0.34370604, 0.32104823, + 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, 0.09824532, + 0.02916753], + [14.61464119, 2.45070267, 1.32549286, 0.86115354, 0.64427125, 0.50118381, 0.41087446, 0.36617002, + 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, 0.13792117, + 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.36964464, 0.92192322, 0.69515091, 0.54755926, 0.45573691, 0.41087446, + 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + [14.61464119, 2.45070267, 1.41535246, 0.95350921, 0.72133851, 0.57119018, 0.4783645, 0.43325692, 0.38853383, + 0.36617002, 0.34370604, 0.32104823, 0.29807833, 0.27464288, 0.25053367, 0.22545385, 0.19894916, 0.17026083, + 0.13792117, 0.09824532, 0.02916753], + ], + } + + def get_sigmas(self, coeff, steps, denoise): total_steps = steps if denoise < 1.0: if denoise <= 0.0: return (torch.FloatTensor([]),) total_steps = round(steps * denoise) - sigmas = self.NOISE_LEVELS[model_type][:] - if (steps + 1) != len(sigmas): - sigmas = self.loglinear_interp(sigmas, steps + 1) + if steps <= 20: + sigmas = self.NOISE_LEVELS[round(coeff, 2)][steps-2][:] + else: + sigmas = self.NOISE_LEVELS[round(coeff, 2)][-1][:] + sigmas = loglinear_interp(sigmas, steps + 1) sigmas = sigmas[-(total_steps + 1):] sigmas[-1] = 0 - return (torch.FloatTensor(sigmas),) \ No newline at end of file + return (torch.FloatTensor(sigmas), ) \ No newline at end of file diff --git a/web/js/easy/easyDynamicWidgets.js b/web/js/easy/easyDynamicWidgets.js index 2729cd1..9b97ab1 100644 --- a/web/js/easy/easyDynamicWidgets.js +++ b/web/js/easy/easyDynamicWidgets.js @@ -333,6 +333,7 @@ function widgetLogic(node, widget) { toggleWidget(node, findWidgetByName(node, 'beta_d')) toggleWidget(node, findWidgetByName(node, 'beta_min')) toggleWidget(node, findWidgetByName(node, 'eps_s')) + toggleWidget(node, findWidgetByName(node, 'coeff')) if(widget.value != 'exponentialADV'){ toggleWidget(node, findWidgetByName(node, 'rho'), true) }else{ @@ -346,7 +347,9 @@ function widgetLogic(node, widget) { toggleWidget(node, findWidgetByName(node, 'beta_d'),true) toggleWidget(node, findWidgetByName(node, 'beta_min'),true) toggleWidget(node, findWidgetByName(node, 'eps_s'),true) - }else{ + toggleWidget(node, findWidgetByName(node, 'coeff')) + } + else{ toggleWidget(node, findWidgetByName(node, 'denoise'),true) toggleWidget(node, findWidgetByName(node, 'sigma_max')) toggleWidget(node, findWidgetByName(node, 'sigma_min')) @@ -354,6 +357,8 @@ function widgetLogic(node, widget) { toggleWidget(node, findWidgetByName(node, 'beta_min')) toggleWidget(node, findWidgetByName(node, 'eps_s')) toggleWidget(node, findWidgetByName(node, 'rho')) + if(widget.value == 'gits') toggleWidget(node, findWidgetByName(node, 'coeff'), true) + else toggleWidget(node, findWidgetByName(node, 'coeff')) } updateNodeHeight(node) } From 171cac3db69b9842ab7edea35ac17d58eb33192d Mon Sep 17 00:00:00 2001 From: yolain Date: Sat, 22 Jun 2024 18:09:10 +0800 Subject: [PATCH 28/49] Add strong style transfer to weight_type in easy ipadapterApplyADV --- py/easyNodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py/easyNodes.py b/py/easyNodes.py index 0cc80d5..c9d9301 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -2495,7 +2495,7 @@ def __init__(self): 'FACEID PLUS V2', 'FACEID PORTRAIT (style transfer)' ] - self.weight_types = ["linear", "ease in", "ease out", 'ease in-out', 'reverse in-out', 'weak input', 'weak output', 'weak middle', 'strong middle', 'style transfer', 'composition'] + self.weight_types = ["linear", "ease in", "ease out", 'ease in-out', 'reverse in-out', 'weak input', 'weak output', 'weak middle', 'strong middle', 'style transfer', 'composition', 'strong style transfer'] self.presets = self.normal_presets + self.faceid_presets From 3d5fb30592d50f000915f943d48c28c1a8cabb72 Mon Sep 17 00:00:00 2001 From: yolain Date: Tue, 25 Jun 2024 12:32:08 +0800 Subject: [PATCH 29/49] fix:can not refresh node when empty widget --- web/js/easy/easyExtraMenu.js | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/web/js/easy/easyExtraMenu.js b/web/js/easy/easyExtraMenu.js index 19a8a3f..f0d2c44 100644 --- a/web/js/easy/easyExtraMenu.js +++ b/web/js/easy/easyExtraMenu.js @@ -438,12 +438,14 @@ const reloadNode = function (node) { function handleLinks() { // re-convert inputs - for (let w of oldNode.widgets) { - if (w.type === 'converted-widget') { - const WidgetToConvert = newNode.widgets.find((nw) => nw.name === w.name); - for (let i of oldNode.inputs) { - if (i.name === w.name) { - convertToInput(newNode, WidgetToConvert, i.widget); + if(oldNode.widgets) { + for (let w of oldNode.widgets) { + if (w.type === 'converted-widget') { + const WidgetToConvert = newNode.widgets.find((nw) => nw.name === w.name); + for (let i of oldNode.inputs) { + if (i.name === w.name) { + convertToInput(newNode, WidgetToConvert, i.widget); + } } } } @@ -461,7 +463,7 @@ const reloadNode = function (node) { // fix widget values let values = oldNode.widgets_values; - if (!values) { + if (!values && newNode.widgets?.length>0) { newNode.widgets.forEach((newWidget, index) => { const oldWidget = oldNode.widgets[index]; if (newWidget.name === oldWidget.name && newWidget.type === oldWidget.type) { @@ -472,7 +474,7 @@ const reloadNode = function (node) { return; } let pass = false - const isIterateForwards = values.length <= newNode.widgets.length; + const isIterateForwards = values?.length <= newNode.widgets?.length; let vi = isIterateForwards ? 0 : values.length - 1; function evalWidgetValues(testValue, newWidg) { if (testValue === true || testValue === false) { @@ -504,15 +506,15 @@ const reloadNode = function (node) { } vi++ if (!isIterateForwards) { - vi = values.length - (newNode.widgets.length - 1 - wi); + vi = values.length - (newNode.widgets?.length - 1 - wi); } } }; - if (isIterateForwards) { + if (isIterateForwards && newNode.widgets?.length>0) { for (let wi = 0; wi < newNode.widgets.length; wi++) { updateValue(wi); } - } else { + } else if(newNode.widgets?.length>0){ for (let wi = newNode.widgets.length - 1; wi >= 0; wi--) { updateValue(wi); } From ad0653c324734358fed62f1bf5aa0effa18dc49d Mon Sep 17 00:00:00 2001 From: yolain Date: Tue, 25 Jun 2024 19:10:08 +0800 Subject: [PATCH 30/49] fix:the svg icon is not correct size in the bottom-left toolbar #224 --- web/css/toolbar.css | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/web/css/toolbar.css b/web/css/toolbar.css index d297d65..ce944a6 100644 --- a/web/css/toolbar.css +++ b/web/css/toolbar.css @@ -35,6 +35,10 @@ color:white; transition: all 0.3s ease-in-out; } +.easyuse-toolbar-icon svg{ + width: 14px; + height: 14px; +} .easyuse-toolbar-tips{ visibility: hidden; opacity: 0; From 1af06474ee39a60191295e0c477f1df28256fe2d Mon Sep 17 00:00:00 2001 From: yolain Date: Tue, 25 Jun 2024 19:13:27 +0800 Subject: [PATCH 31/49] Upgrade stable version v1.1.9 to comfyregistry --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7fd8a5f..3a6cda2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,9 @@ [project] name = "comfyui-easy-use" description = "To enhance the usability of ComfyUI, optimizations and integrations have been implemented for several commonly used nodes." -version = "1.1.8" +version = "1.1.9" license = "LICENSE" -dependencies = ["diffusers>=0.25.0", "clip_interrogator>=0.6.0", "sentencepiece==0.2.0", "lark-parser", "onnxruntime"] +dependencies = ["diffusers>=0.25.0", "accelerate>=0.25.0", "clip_interrogator>=0.6.0", "sentencepiece==0.2.0", "lark-parser", "onnxruntime", "spandrel"] [project.urls] Repository = "https://github.com/yolain/ComfyUI-Easy-Use" From 44a5ed1f7f4f7ce890ee71e3b626feb1624c6e38 Mon Sep 17 00:00:00 2001 From: yolain Date: Tue, 25 Jun 2024 23:13:22 +0800 Subject: [PATCH 32/49] Add layer_weights in easy ipadapterApplyADV --- README.en.md | 4 ++ README.md | 4 ++ __init__.py | 4 +- py/easyNodes.py | 98 ++++++++++++++++++++++++++++++++----------------- 4 files changed, 74 insertions(+), 36 deletions(-) diff --git a/README.en.md b/README.en.md index ba7be51..d22cdf1 100644 --- a/README.en.md +++ b/README.en.md @@ -43,6 +43,10 @@ Double-click install.bat to install the required dependencies ## Changelog +**v1.2.0** + +- Added **layer_weights** in `easy ipadapterApplyADV` + **v1.1.9** - Added **gitsScheduler** diff --git a/README.md b/README.md index 45a1ac5..8d0ea94 100644 --- a/README.md +++ b/README.md @@ -50,6 +50,10 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use ## 更新日志 +**v1.2.0** + +- 增加 **layer_weights** 属性在 `easy ipadapterApplyADV` 节点 + **v1.1.9** - 增加 新的调度器 **gitsScheduler** diff --git a/__init__.py b/__init__.py index fac9e82..fe82b14 100644 --- a/__init__.py +++ b/__init__.py @@ -1,4 +1,4 @@ -__version__ = "1.1.9" +__version__ = "1.2.0" import os import folder_paths @@ -42,7 +42,7 @@ os.mkdir(styles_path) os.mkdir(samples_path) -# 需要把模型预览图暴露给PS读取,此处借鉴了 AIGODLIKE-ComfyUI-Studio 的部分代码 +# Model thumbnails from .py.libs.add_resources import add_static_resource from .py.libs.model import easyModelManager model_config = easyModelManager().models_config diff --git a/py/easyNodes.py b/py/easyNodes.py index c9d9301..aca2b47 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -2495,7 +2495,7 @@ def __init__(self): 'FACEID PLUS V2', 'FACEID PORTRAIT (style transfer)' ] - self.weight_types = ["linear", "ease in", "ease out", 'ease in-out', 'reverse in-out', 'weak input', 'weak output', 'weak middle', 'strong middle', 'style transfer', 'composition', 'strong style transfer'] + self.weight_types = ["linear", "ease in", "ease out", 'ease in-out', 'reverse in-out', 'weak input', 'weak output', 'weak middle', 'strong middle', 'style transfer', 'composition', 'strong style transfer', 'style and composition', 'style transfer precise'] self.presets = self.normal_presets + self.faceid_presets @@ -2603,6 +2603,21 @@ def get_ipadapter_file(self, preset, is_sdxl, node_name): return ipadapter_file, ipadapter_name, is_insightface, lora_pattern + def get_lora_pattern(self, file): + basename = os.path.basename(file) + lora_pattern = None + if re.search(r'faceid.sdxl\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.sdxl.lora\.safetensors$' + elif re.search(r'faceid.sd15\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.sd15.lora\.safetensors$' + elif re.search(r'faceid.plus.sd15\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.plus.sd15.lora\.safetensors$' + elif re.search(r'faceid.plusv2.sdxl\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.plusv2.sdxl.lora\.safetensors$' + elif re.search(r'faceid.plusv2.sd15\.(safetensors|bin)$', basename, re.IGNORECASE): + lora_pattern = 'faceid.plusv2.sd15.lora\.safetensors$' + return lora_pattern + def get_lora_file(self, preset, pattern, model_type, model, model_strength, clip_strength, clip=None): lora_list = folder_paths.get_filename_list("loras") lora_files = [e for e in lora_list if re.search(pattern, e, re.IGNORECASE)] @@ -2639,8 +2654,15 @@ def ipadapter_model_loader(self, file): def load_model(self, model, preset, lora_model_strength, provider="CPU", clip_vision=None, optional_ipadapter=None, cache_mode='none', node_name='easy ipadapterApply'): pipeline = {"clipvision": {'file': None, 'model': None}, "ipadapter": {'file': None, 'model': None}, "insightface": {'provider': None, 'model': None}} + ipadapter, insightface, is_insightface, lora_pattern = None, None, None, None if optional_ipadapter is not None: pipeline = optional_ipadapter + if not clip_vision: + clip_vision = pipeline['clipvision']['model'] + ipadapter = pipeline['ipadapter']['model'] + if 'insightface' in pipeline: + insightface = pipeline['insightface']['model'] + lora_pattern = self.get_lora_pattern(pipeline['ipadapter']['file']) # 1. Load the clipvision model if not clip_vision: @@ -2658,29 +2680,30 @@ def load_model(self, model, preset, lora_model_strength, provider="CPU", clip_vi if cache_mode in ["all", "clip_vision only"]: backend_cache.update_cache(clipvision_name, 'clip_vision', (False, clip_vision)) pipeline['clipvision']['file'] = clipvision_file - pipeline['clipvision']['model'] = clip_vision + pipeline['clipvision']['model'] = clip_vision # 2. Load the ipadapter model is_sdxl = isinstance(model.model, comfy.model_base.SDXL) - ipadapter_file, ipadapter_name, is_insightface, lora_pattern = self.get_ipadapter_file(preset, is_sdxl, node_name) - model_type = 'sdxl' if is_sdxl else 'sd15' - if ipadapter_file is None: - model_url = IPADAPTER_MODELS[preset][model_type]["model_url"] - ipadapter_file = get_local_filepath(model_url, IPADAPTER_DIR) - ipadapter_name = os.path.basename(model_url) - if ipadapter_file == pipeline['ipadapter']['file']: - ipadapter = pipeline['ipadapter']['model'] - elif cache_mode in ["all", "ipadapter only"] and ipadapter_name in backend_cache.cache: - log_node_info("easy ipadapterApply", f"Using IpAdapterModel {ipadapter_name} Cached") - _, ipadapter = backend_cache.cache[ipadapter_name][1] - else: - ipadapter = self.ipadapter_model_loader(ipadapter_file) - pipeline['ipadapter']['file'] = ipadapter_file - log_node_info("easy ipadapterApply", f"Using IpAdapterModel {ipadapter_name}") - if cache_mode in ["all", "ipadapter only"]: - backend_cache.update_cache(ipadapter_name, 'ipadapter', (False, ipadapter)) + if not ipadapter: + ipadapter_file, ipadapter_name, is_insightface, lora_pattern = self.get_ipadapter_file(preset, is_sdxl, node_name) + model_type = 'sdxl' if is_sdxl else 'sd15' + if ipadapter_file is None: + model_url = IPADAPTER_MODELS[preset][model_type]["model_url"] + ipadapter_file = get_local_filepath(model_url, IPADAPTER_DIR) + ipadapter_name = os.path.basename(model_url) + if ipadapter_file == pipeline['ipadapter']['file']: + ipadapter = pipeline['ipadapter']['model'] + elif cache_mode in ["all", "ipadapter only"] and ipadapter_name in backend_cache.cache: + log_node_info("easy ipadapterApply", f"Using IpAdapterModel {ipadapter_name} Cached") + _, ipadapter = backend_cache.cache[ipadapter_name][1] + else: + ipadapter = self.ipadapter_model_loader(ipadapter_file) + pipeline['ipadapter']['file'] = ipadapter_file + log_node_info("easy ipadapterApply", f"Using IpAdapterModel {ipadapter_name}") + if cache_mode in ["all", "ipadapter only"]: + backend_cache.update_cache(ipadapter_name, 'ipadapter', (False, ipadapter)) - pipeline['ipadapter']['model'] = ipadapter + pipeline['ipadapter']['model'] = ipadapter # 3. Load the lora model if needed if lora_pattern is not None: @@ -2689,18 +2712,19 @@ def load_model(self, model, preset, lora_model_strength, provider="CPU", clip_vi # 4. Load the insightface model if needed if is_insightface: - icache_key = 'insightface-' + provider - if provider == pipeline['insightface']['provider']: - insightface = pipeline['insightface']['model'] - elif cache_mode in ["all", "insightface only"] and icache_key in backend_cache.cache: - log_node_info("easy ipadapterApply", f"Using InsightFaceModel {icache_key} Cached") - _, insightface = backend_cache.cache[icache_key][1] - else: - insightface = insightface_loader(provider) - if cache_mode in ["all", "insightface only"]: - backend_cache.update_cache(icache_key, 'insightface',(False, insightface)) - pipeline['insightface']['provider'] = provider - pipeline['insightface']['model'] = insightface + if not insightface: + icache_key = 'insightface-' + provider + if provider == pipeline['insightface']['provider']: + insightface = pipeline['insightface']['model'] + elif cache_mode in ["all", "insightface only"] and icache_key in backend_cache.cache: + log_node_info("easy ipadapterApply", f"Using InsightFaceModel {icache_key} Cached") + _, insightface = backend_cache.cache[icache_key][1] + else: + insightface = insightface_loader(provider) + if cache_mode in ["all", "insightface only"]: + backend_cache.update_cache(icache_key, 'insightface',(False, insightface)) + pipeline['insightface']['provider'] = provider + pipeline['insightface']['model'] = insightface return (model, pipeline,) @@ -2796,6 +2820,7 @@ def INPUT_TYPES(cls): "attn_mask": ("MASK",), "clip_vision": ("CLIP_VISION",), "optional_ipadapter": ("IPADAPTER",), + "layer_weights": ("STRING", {"default": "", "multiline": True, "placeholder": "Mad Scientist Layer Weights"}), } } @@ -2804,10 +2829,15 @@ def INPUT_TYPES(cls): CATEGORY = "EasyUse/Adapter" FUNCTION = "apply" - def apply(self, model, image, preset, lora_strength, provider, weight, weight_faceidv2, weight_type, combine_embeds, start_at, end_at, embeds_scaling, cache_mode, use_tiled, use_batch, sharpening, weight_style=1.0, weight_composition=1.0, image_style=None, image_composition=None, expand_style=False, image_negative=None, clip_vision=None, attn_mask=None, optional_ipadapter=None): + def apply(self, model, image, preset, lora_strength, provider, weight, weight_faceidv2, weight_type, combine_embeds, start_at, end_at, embeds_scaling, cache_mode, use_tiled, use_batch, sharpening, weight_style=1.0, weight_composition=1.0, image_style=None, image_composition=None, expand_style=False, image_negative=None, clip_vision=None, attn_mask=None, optional_ipadapter=None, layer_weights=None): images, masks = image, [None] model, ipadapter = self.load_model(model, preset, lora_strength, provider, clip_vision=clip_vision, optional_ipadapter=optional_ipadapter, cache_mode=cache_mode) - if use_tiled: + if layer_weights: + if "IPAdapterMS" not in ALL_NODE_CLASS_MAPPINGS: + self.error() + cls = ALL_NODE_CLASS_MAPPINGS["IPAdapterAdvanced"] + model, images = cls().apply_ipadapter(model, ipadapter, weight=weight, weight_type=weight_type, start_at=start_at, end_at=end_at, combine_embeds=combine_embeds, weight_faceidv2=weight_faceidv2, image=image, image_negative=image_negative, weight_style=weight_style, weight_composition=weight_composition, image_style=image_style, image_composition=image_composition, expand_style=expand_style, clip_vision=clip_vision, attn_mask=attn_mask, insightface=None, embeds_scaling=embeds_scaling, layer_weights=layer_weights) + elif use_tiled: if use_batch: if "IPAdapterTiledBatch" not in ALL_NODE_CLASS_MAPPINGS: self.error() From 70b8b9f289cae3d1695b4c4729ceec6e502c1424 Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 26 Jun 2024 15:30:11 +0800 Subject: [PATCH 33/49] Add easy slider control for ipadapterMS --- README.en.md | 1 + README.md | 1 + py/easyNodes.py | 34 +++++++ web/css/index.css | 3 +- web/css/sliderControl.css | 70 +++++++++++++++ web/js/easy/easySliderControl.js | 150 +++++++++++++++++++++++++++++++ 6 files changed, 258 insertions(+), 1 deletion(-) create mode 100644 web/css/sliderControl.css create mode 100644 web/js/easy/easySliderControl.js diff --git a/README.en.md b/README.en.md index d22cdf1..8c7ad71 100644 --- a/README.en.md +++ b/README.en.md @@ -45,6 +45,7 @@ Double-click install.bat to install the required dependencies **v1.2.0** +- Added **easy sliderControl** - Slider control node, which can currently be used to control the parameters of ipadapterMS (double-click the slider to reset to default) - Added **layer_weights** in `easy ipadapterApplyADV` **v1.1.9** diff --git a/README.md b/README.md index 8d0ea94..4c6d0b5 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,7 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use **v1.2.0** +- 增加 **easy sliderControl** - 滑块控制节点,当前可用于控制ipadapterMS的参数 (双击滑块可重置为默认值) - 增加 **layer_weights** 属性在 `easy ipadapterApplyADV` 节点 **v1.1.9** diff --git a/py/easyNodes.py b/py/easyNodes.py index aca2b47..fd05246 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -6849,6 +6849,38 @@ def notify(self, pipe, names=None, unique_id=None, extra_pnginfo=None): return {"ui": {"text": names}, "result": (ckpt_name, vae_name, lora_name)} +class sliderControl: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "mode": (['ipadapter layer weights'],), + "model_type": (['sdxl', 'sd1x'],), + }, + "hidden": { + "prompt": "PROMPT", + "my_unique_id": "UNIQUE_ID", + "extra_pnginfo": "EXTRA_PNGINFO", + }, + } + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("layer_weights",) + + FUNCTION = "control" + OUTPUT_NODE = True + + CATEGORY = "EasyUse/Util" + + def control(self, mode, model_type, prompt=None, my_unique_id=None, extra_pnginfo=None): + values = '' + if my_unique_id in prompt: + print(prompt[my_unique_id]) + if 'values' in prompt[my_unique_id]["inputs"]: + values = prompt[my_unique_id]["inputs"]['values'] + + return (values,) + #---------------------------------------------------------------API 开始----------------------------------------------------------------------# from .libs.stability import stableAPI class stableDiffusion3API: @@ -6995,6 +7027,7 @@ def generate(self, positive, negative, model, aspect_ratio, seed, denoise, optio # others 其他 "easy showSpentTime": showSpentTime, "easy showLoaderSettingsNames": showLoaderSettingsNames, + "easy sliderControl": sliderControl, "dynamicThresholdingFull": dynamicThresholdingFull, # api 相关 "easy stableDiffusion3API": stableDiffusion3API, @@ -7107,6 +7140,7 @@ def generate(self, positive, negative, model, aspect_ratio, seed, denoise, optio # others 其他 "easy showSpentTime": "Show Spent Time", "easy showLoaderSettingsNames": "Show Loader Settings Names", + "easy sliderControl": "Easy Slider Control", "dynamicThresholdingFull": "DynamicThresholdingFull", # api 相关 "easy stableDiffusion3API": "Stable Diffusion 3 (API)", diff --git a/web/css/index.css b/web/css/index.css index b6e1950..5c148a2 100644 --- a/web/css/index.css +++ b/web/css/index.css @@ -7,4 +7,5 @@ @import "toast.css"; @import "account.css"; @import "chooser.css"; -@import "toolbar.css"; \ No newline at end of file +@import "toolbar.css"; +@import "sliderControl.css"; \ No newline at end of file diff --git a/web/css/sliderControl.css b/web/css/sliderControl.css new file mode 100644 index 0000000..5884772 --- /dev/null +++ b/web/css/sliderControl.css @@ -0,0 +1,70 @@ +.easyuse-slider{ + width:100%; + height:100%; + display: flex; + flex-direction: row; + justify-content: space-between; + position: relative; +} +.easyuse-slider-item{ + height: inherit; + min-width: 25px; + justify-content: center; + display: flex; + flex-direction: column; + align-items: center; +} +.easyuse-slider-item.positive .easyuse-slider-item-label{ + color: var(--success-color); +} +.easyuse-slider-item.negative .easyuse-slider-item-label{ + color: var(--error-color); +} +.easyuse-slider-item-input{ + height:15px; + font-size: 10px; + color: var(--input-text); +} +.easyuse-slider-item-label{ + height:15px; + border: none; + color: var(--descrip-text); + font-size: 8px; +} +.easyuse-slider-item-scroll { + width: 5px; + height: calc(100% - 30px); + background: var(--comfy-input-bg); + border-radius: 10px; + position: relative; +} +.easyuse-slider-item-bar{ + width: 10px; + height: 10px; + background: linear-gradient(to bottom, var(--input-text), var(--descrip-text)); + border-radius:100%; + box-shadow: 0 2px 10px var(--bg-color); + position: absolute; + top: 0; + left:-2.5px; + cursor: pointer; + z-index:1; +} +.easyuse-slider-item-area{ + width: 100%; + border-radius:20px; + position: absolute; + bottom: 0; + background: var(--bg-color); + z-index:0; +} +.easyuse-slider-item.positive .easyuse-slider-item-area{ + background: var(--success-color); +} +.easyuse-slider-item.negative .easyuse-slider-item-area{ + background: var(--error-color); +} + +[data-theme="dark"] .easyuse-slider-item-scroll{ + background: var(--comfy-menu-bg); +} \ No newline at end of file diff --git a/web/js/easy/easySliderControl.js b/web/js/easy/easySliderControl.js new file mode 100644 index 0000000..cfa055f --- /dev/null +++ b/web/js/easy/easySliderControl.js @@ -0,0 +1,150 @@ +import { app } from "../../../../scripts/app.js"; +import { api } from "../../../../scripts/api.js"; +import { $el } from "../../../../scripts/ui.js"; +import { $t } from "../common/i18n.js"; + + +const calculatePercent = (value, min, max) => ((value-min)/(max-min)*100) + +const getLayerDefaultValue = (index) => { + switch (index){ + case 3: + return 2.5 + case 6: + return 1 + default: + return 0 + } +} +const setSliderValue = (_this, layer_total, refresh=false) => { + let sliders = [] + let arrays = Array.from({length: layer_total}, (v, i) => ({default: layer_total == 12 ? getLayerDefaultValue(i) : 0, min: -1, max: 3, step: 0.05, value:layer_total == 12 ? getLayerDefaultValue(i) : 0, mute:false, solo:false})) + _this.setProperty("values", Array.from({length: layer_total}, (v, i) => i+':'+arrays[i]['value'])) + for (let i = 0; i < layer_total; i++) { + let scroll = $el('div.easyuse-slider-item-scroll') + let value = $el('div.easyuse-slider-item-input', {textContent: arrays[i]['value']}) + let label = $el('div.easyuse-slider-item-label', {textContent: 'L'+i}) + let girdTotal = (arrays[i]['max'] - arrays[i]['min']) / arrays[i]['step'] + let area = $el('div.easyuse-slider-item-area', {style:{ height: calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%'}}) + let bar = $el('div.easyuse-slider-item-bar', { + style:{ top: (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%'}, + onmousedown: (e) => { + let event = e || window.event; + var y = event.clientY - bar.offsetTop; + document.onmousemove = (e) => { + let event = e || window.event; + let top = event.clientY - y; + if(top < 0){ + top = 0; + } + else if(top > scroll.offsetHeight - bar.offsetHeight){ + top = scroll.offsetHeight - bar.offsetHeight; + } + // top到最近的girdHeight值 + let girlHeight = (scroll.offsetHeight - bar.offsetHeight)/ girdTotal + top = Math.round(top / girlHeight) * girlHeight; + bar.style.top = Math.floor(top/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; + area.style.height = Math.floor((scroll.offsetHeight - bar.offsetHeight - top)/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; + value.innerText = parseFloat(parseFloat(arrays[i]['max'] - (arrays[i]['max']-arrays[i]['min']) * (top/(scroll.offsetHeight - bar.offsetHeight))).toFixed(2)) + arrays[i]['value'] = value.innerText + _this.properties['values'][i] = i+':'+value.innerText + window.getSelection ? window.getSelection().removeAllRanges() : document.selection.empty(); + } + }, + ondblclick:_=>{ + bar.style.top = (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%' + area.style.height = calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%' + value.innerText = arrays[i]['default'] + arrays[i]['value'] = arrays[i]['default'] + _this.properties['values'][i] = i+':'+value.innerText + } + }) + document.onmouseup = _=> document.onmousemove = null; + + scroll.replaceChildren(bar,area) + let item_div = $el('div.easyuse-slider-item',[ + value, + scroll, + label + ]) + if(i == 3 ) layer_total == 12 ? item_div.classList.add('negative') : item_div.classList.remove('negative') + else if(i == 6) layer_total == 12 ? item_div.classList.add('positive') : item_div.classList.remove('positive') + sliders.push(item_div) + } + if(values_div) values_div.replaceChildren(...sliders) + else{ + values_div = $el('div.easyuse-slider', sliders) + sliders_value = _this.addDOMWidget('values',"btn",values_div) + } + + Object.defineProperty(sliders_value, 'value', { + set: function() {}, + get: function() { + return _this.properties.values.join(','); + } + }); + return {sliders, arrays} +} + +let values_div = null +let sliders_value = null +app.registerExtension({ + name: 'comfy.easyUse.sliderControl', + async beforeRegisterNodeDef(nodeType, nodeData, app) { + if(nodeData.name == 'easy sliderControl'){ + // 创建时 + const onNodeCreated = nodeType.prototype.onNodeCreated; + nodeType.prototype.onNodeCreated = function() { + onNodeCreated && onNodeCreated.call(this); + const mode = this.widgets[0]; + const model_type = this.widgets[1]; + let layer_total = model_type.value == 'sdxl' ? 12 : 16 + let _this = this + mode.callback = async()=>{ + switch (mode.value) { + case 'ipadapter layer weights': + nodeData.output_name = ['layer_weights'] + _this.outputs[0]['name'] = 'layer_weights' + _this.outputs[0]['label'] = 'layer_weights' + break + } + } + + model_type.callback = async()=>{ + if(values_div) { + layer_total = model_type.value == 'sdxl' ? 12 : 16 + setSliderValue(_this, layer_total, true) + } + _this.setSize(model_type.value == 'sdxl' ? [375,320] : [455,320]) + } + + + let {sliders, arrays} = setSliderValue(_this, layer_total) + setTimeout(_=>{ + let values_widgets_index = this.widgets.findIndex((w) => w.name == 'values'); + if(values_widgets_index != -1){ + let old_values_widget = this.widgets[values_widgets_index]; + let old_value = old_values_widget.value.split(',') + for (let i = 0; i < layer_total; i++) { + let value = parseFloat(parseFloat(old_value[i].split(':')[1]).toFixed(2)) + arrays[i]['value'] = value + _this.properties['values'][i] = old_value[i] + // todo: 修改bar位置等 + if(value !=0){ + let item_div = sliders[i] + let input = item_div.getElementsByClassName('easyuse-slider-item-input')[0] + let bar = item_div.getElementsByClassName('easyuse-slider-item-bar')[0] + let area = item_div.getElementsByClassName('easyuse-slider-item-area')[0] + input.textContent = value + bar.style.top = (100-calculatePercent(value,arrays[i]['min'],arrays[i]['max'])) + '%' + area.style.height = calculatePercent(value,arrays[i]['min'],arrays[i]['max']) + '%' + } + } + } + _this.setSize(model_type.value == 'sdxl' ? [375,320] : [455,320]) + },1) + return onNodeCreated; + } + } + } +}) \ No newline at end of file From eb11a51e011ce4c661b1b2f2897fba1c3585c918 Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 26 Jun 2024 15:34:20 +0800 Subject: [PATCH 34/49] Upgrade to v1.2.0 beta --- py/brushnet/powerpaint_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py/brushnet/powerpaint_utils.py b/py/brushnet/powerpaint_utils.py index 02c3926..6722bdb 100644 --- a/py/brushnet/powerpaint_utils.py +++ b/py/brushnet/powerpaint_utils.py @@ -346,7 +346,7 @@ def replace_embeddings( start = external_embedding["start"] end = external_embedding["end"] target_ids_to_replace = [i for i in range(start, end)] - ext_emb = external_embedding["embedding"] + ext_emb = external_embedding["embedding"].to(embedding.device) # do not need to replace if not (input_ids == start).any(): From bd144b9ba46aaf6b5729d047bc9f439e4d3f7dbc Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 26 Jun 2024 15:51:51 +0800 Subject: [PATCH 35/49] Change Slider Control scroll background --- web/css/sliderControl.css | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/web/css/sliderControl.css b/web/css/sliderControl.css index 5884772..42e749b 100644 --- a/web/css/sliderControl.css +++ b/web/css/sliderControl.css @@ -55,7 +55,7 @@ border-radius:20px; position: absolute; bottom: 0; - background: var(--bg-color); + background: var(--input-text); z-index:0; } .easyuse-slider-item.positive .easyuse-slider-item-area{ @@ -64,7 +64,3 @@ .easyuse-slider-item.negative .easyuse-slider-item-area{ background: var(--error-color); } - -[data-theme="dark"] .easyuse-slider-item-scroll{ - background: var(--comfy-menu-bg); -} \ No newline at end of file From 0e6ea6400789911352d043c409655e90aca4ae75 Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 26 Jun 2024 16:06:51 +0800 Subject: [PATCH 36/49] add sliderControl to the easy ipadadpterApplyADV slot suggestion --- web/js/easy/easySuggestion.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/web/js/easy/easySuggestion.js b/web/js/easy/easySuggestion.js index 822c719..ca1b8d9 100644 --- a/web/js/easy/easySuggestion.js +++ b/web/js/easy/easySuggestion.js @@ -183,7 +183,8 @@ const suggestions = { }, "easy ipadapterApplyADV":{ "to":{ - "COMBO": [...["Reroute", "easy promptLine"]] + "STRING": [...["Reroute", "easy sliderControl"], ...propmts], + "COMBO": [...["Reroute", "easy promptLine"]] } }, "easy ipadapterStyleComposition":{ From f903b4e5a5a2c5dfb90ad1cb8f973677373c8317 Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 26 Jun 2024 16:12:35 +0800 Subject: [PATCH 37/49] Rename sd1x to sd1 in easy sliderControl --- py/easyNodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py/easyNodes.py b/py/easyNodes.py index fd05246..0029c8a 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -6855,7 +6855,7 @@ def INPUT_TYPES(s): return { "required": { "mode": (['ipadapter layer weights'],), - "model_type": (['sdxl', 'sd1x'],), + "model_type": (['sdxl', 'sd1'],), }, "hidden": { "prompt": "PROMPT", From c704c2d280e22863bd7b8e03a5ce22ad131bfd40 Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 26 Jun 2024 16:55:16 +0800 Subject: [PATCH 38/49] Fix easy slider control can not show multiple --- web/js/easy/easySliderControl.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/js/easy/easySliderControl.js b/web/js/easy/easySliderControl.js index cfa055f..eb89a5c 100644 --- a/web/js/easy/easySliderControl.js +++ b/web/js/easy/easySliderControl.js @@ -71,7 +71,7 @@ const setSliderValue = (_this, layer_total, refresh=false) => { else if(i == 6) layer_total == 12 ? item_div.classList.add('positive') : item_div.classList.remove('positive') sliders.push(item_div) } - if(values_div) values_div.replaceChildren(...sliders) + if(refresh) values_div.replaceChildren(...sliders) else{ values_div = $el('div.easyuse-slider', sliders) sliders_value = _this.addDOMWidget('values',"btn",values_div) From 5305a94e6b175ec653189454b5ebcfccdda70425 Mon Sep 17 00:00:00 2001 From: yolain Date: Wed, 26 Jun 2024 22:02:37 +0800 Subject: [PATCH 39/49] Fix slider control value not correct when choose sd1 and refresh the page --- web/js/easy/easySliderControl.js | 157 ++++++++++++++++++------------- 1 file changed, 90 insertions(+), 67 deletions(-) diff --git a/web/js/easy/easySliderControl.js b/web/js/easy/easySliderControl.js index eb89a5c..17fc509 100644 --- a/web/js/easy/easySliderControl.js +++ b/web/js/easy/easySliderControl.js @@ -2,6 +2,7 @@ import { app } from "../../../../scripts/app.js"; import { api } from "../../../../scripts/api.js"; import { $el } from "../../../../scripts/ui.js"; import { $t } from "../common/i18n.js"; +import { sleep } from "../common/utils.js"; const calculatePercent = (value, min, max) => ((value-min)/(max-min)*100) @@ -16,60 +17,67 @@ const getLayerDefaultValue = (index) => { return 0 } } -const setSliderValue = (_this, layer_total, refresh=false) => { - let sliders = [] - let arrays = Array.from({length: layer_total}, (v, i) => ({default: layer_total == 12 ? getLayerDefaultValue(i) : 0, min: -1, max: 3, step: 0.05, value:layer_total == 12 ? getLayerDefaultValue(i) : 0, mute:false, solo:false})) - _this.setProperty("values", Array.from({length: layer_total}, (v, i) => i+':'+arrays[i]['value'])) - for (let i = 0; i < layer_total; i++) { - let scroll = $el('div.easyuse-slider-item-scroll') - let value = $el('div.easyuse-slider-item-input', {textContent: arrays[i]['value']}) - let label = $el('div.easyuse-slider-item-label', {textContent: 'L'+i}) - let girdTotal = (arrays[i]['max'] - arrays[i]['min']) / arrays[i]['step'] - let area = $el('div.easyuse-slider-item-area', {style:{ height: calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%'}}) - let bar = $el('div.easyuse-slider-item-bar', { - style:{ top: (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%'}, - onmousedown: (e) => { + +const addLayer = (_this, layer_total, arrays, sliders, i) => { + let scroll = $el('div.easyuse-slider-item-scroll') + let value = $el('div.easyuse-slider-item-input', {textContent: arrays[i]['value']}) + let label = $el('div.easyuse-slider-item-label', {textContent: 'L'+i}) + let girdTotal = (arrays[i]['max'] - arrays[i]['min']) / arrays[i]['step'] + let area = $el('div.easyuse-slider-item-area', {style:{ height: calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%'}}) + let bar = $el('div.easyuse-slider-item-bar', { + style:{ top: (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%'}, + onmousedown: (e) => { + let event = e || window.event; + var y = event.clientY - bar.offsetTop; + document.onmousemove = (e) => { let event = e || window.event; - var y = event.clientY - bar.offsetTop; - document.onmousemove = (e) => { - let event = e || window.event; - let top = event.clientY - y; - if(top < 0){ - top = 0; - } - else if(top > scroll.offsetHeight - bar.offsetHeight){ - top = scroll.offsetHeight - bar.offsetHeight; - } - // top到最近的girdHeight值 - let girlHeight = (scroll.offsetHeight - bar.offsetHeight)/ girdTotal - top = Math.round(top / girlHeight) * girlHeight; - bar.style.top = Math.floor(top/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; - area.style.height = Math.floor((scroll.offsetHeight - bar.offsetHeight - top)/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; - value.innerText = parseFloat(parseFloat(arrays[i]['max'] - (arrays[i]['max']-arrays[i]['min']) * (top/(scroll.offsetHeight - bar.offsetHeight))).toFixed(2)) - arrays[i]['value'] = value.innerText - _this.properties['values'][i] = i+':'+value.innerText - window.getSelection ? window.getSelection().removeAllRanges() : document.selection.empty(); + let top = event.clientY - y; + if(top < 0){ + top = 0; + } + else if(top > scroll.offsetHeight - bar.offsetHeight){ + top = scroll.offsetHeight - bar.offsetHeight; } - }, - ondblclick:_=>{ - bar.style.top = (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%' - area.style.height = calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%' - value.innerText = arrays[i]['default'] - arrays[i]['value'] = arrays[i]['default'] + // top到最近的girdHeight值 + let girlHeight = (scroll.offsetHeight - bar.offsetHeight)/ girdTotal + top = Math.round(top / girlHeight) * girlHeight; + bar.style.top = Math.floor(top/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; + area.style.height = Math.floor((scroll.offsetHeight - bar.offsetHeight - top)/(scroll.offsetHeight - bar.offsetHeight)* 100) + '%'; + value.innerText = parseFloat(parseFloat(arrays[i]['max'] - (arrays[i]['max']-arrays[i]['min']) * (top/(scroll.offsetHeight - bar.offsetHeight))).toFixed(2)) + arrays[i]['value'] = value.innerText _this.properties['values'][i] = i+':'+value.innerText + window.getSelection ? window.getSelection().removeAllRanges() : document.selection.empty(); } - }) - document.onmouseup = _=> document.onmousemove = null; + }, + ondblclick:_=>{ + bar.style.top = (100-calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max'])) + '%' + area.style.height = calculatePercent(arrays[i]['default'],arrays[i]['min'],arrays[i]['max']) + '%' + value.innerText = arrays[i]['default'] + arrays[i]['value'] = arrays[i]['default'] + _this.properties['values'][i] = i+':'+value.innerText + } + }) + document.onmouseup = _=> document.onmousemove = null; - scroll.replaceChildren(bar,area) - let item_div = $el('div.easyuse-slider-item',[ - value, - scroll, - label - ]) - if(i == 3 ) layer_total == 12 ? item_div.classList.add('negative') : item_div.classList.remove('negative') - else if(i == 6) layer_total == 12 ? item_div.classList.add('positive') : item_div.classList.remove('positive') - sliders.push(item_div) + scroll.replaceChildren(bar,area) + let item_div = $el('div.easyuse-slider-item',[ + value, + scroll, + label + ]) + if(i == 3 ) layer_total == 12 ? item_div.classList.add('negative') : item_div.classList.remove('negative') + else if(i == 6) layer_total == 12 ? item_div.classList.add('positive') : item_div.classList.remove('positive') + sliders.push(item_div) + return item_div +} + +const setSliderValue = (_this, type, refresh=false, values_div, sliders_value) => { + let layer_total = type == 'sdxl' ? 12 : 16 + let sliders = [] + let arrays = Array.from({length: layer_total}, (v, i) => ({default: layer_total == 12 ? getLayerDefaultValue(i) : 0, min: -1, max: 3, step: 0.05, value:layer_total == 12 ? getLayerDefaultValue(i) : 0})) + _this.setProperty("values", Array.from({length: layer_total}, (v, i) => i+':'+arrays[i]['value'])) + for (let i = 0; i < layer_total; i++) { + addLayer(_this, layer_total, arrays, sliders, i) } if(refresh) values_div.replaceChildren(...sliders) else{ @@ -83,11 +91,10 @@ const setSliderValue = (_this, layer_total, refresh=false) => { return _this.properties.values.join(','); } }); - return {sliders, arrays} + return {sliders, arrays, values_div, sliders_value} } -let values_div = null -let sliders_value = null + app.registerExtension({ name: 'comfy.easyUse.sliderControl', async beforeRegisterNodeDef(nodeType, nodeData, app) { @@ -100,6 +107,8 @@ app.registerExtension({ const model_type = this.widgets[1]; let layer_total = model_type.value == 'sdxl' ? 12 : 16 let _this = this + let values_div = null + let sliders_value = null mode.callback = async()=>{ switch (mode.value) { case 'ipadapter layer weights': @@ -112,33 +121,47 @@ app.registerExtension({ model_type.callback = async()=>{ if(values_div) { - layer_total = model_type.value == 'sdxl' ? 12 : 16 - setSliderValue(_this, layer_total, true) + let r2 = setSliderValue(_this, model_type.value, true, values_div, sliders_value) + values_div = r2.values_div + sliders_value = r2.sliders_value } _this.setSize(model_type.value == 'sdxl' ? [375,320] : [455,320]) } - - let {sliders, arrays} = setSliderValue(_this, layer_total) + let r1 = setSliderValue(_this, model_type.value, false, values_div, sliders_value) + let sliders = r1.sliders + let arrays = r1.arrays + values_div = r1.values_div + sliders_value = r1.sliders_value setTimeout(_=>{ let values_widgets_index = this.widgets.findIndex((w) => w.name == 'values'); if(values_widgets_index != -1){ let old_values_widget = this.widgets[values_widgets_index]; let old_value = old_values_widget.value.split(',') + let layer_total = _this.widgets[1].value == 'sdxl' ? 12 : 16 for (let i = 0; i < layer_total; i++) { let value = parseFloat(parseFloat(old_value[i].split(':')[1]).toFixed(2)) - arrays[i]['value'] = value - _this.properties['values'][i] = old_value[i] - // todo: 修改bar位置等 - if(value !=0){ - let item_div = sliders[i] - let input = item_div.getElementsByClassName('easyuse-slider-item-input')[0] - let bar = item_div.getElementsByClassName('easyuse-slider-item-bar')[0] - let area = item_div.getElementsByClassName('easyuse-slider-item-area')[0] - input.textContent = value - bar.style.top = (100-calculatePercent(value,arrays[i]['min'],arrays[i]['max'])) + '%' - area.style.height = calculatePercent(value,arrays[i]['min'],arrays[i]['max']) + '%' + let item_div = sliders[i] || null + // 存在层即修改 + if(arrays[i]){ + arrays[i]['value'] = value + _this.properties['values'][i] = old_value[i] + }else{ + arrays.push({default: layer_total == 12 ? getLayerDefaultValue(i) : 0, min: -1, max: 3, step: 0.05, value:layer_total == 12 ? getLayerDefaultValue(i) : 0}) + _this.properties['values'].push(i+':'+arrays[i]['value']) + // 添加缺失层 + item_div = addLayer(_this, layer_total, arrays, sliders, i) + values_div.appendChild(item_div) } + // todo: 修改bar位置等 + let input = item_div.getElementsByClassName('easyuse-slider-item-input')[0] + let bar = item_div.getElementsByClassName('easyuse-slider-item-bar')[0] + let area = item_div.getElementsByClassName('easyuse-slider-item-area')[0] + if(i == 3 ) layer_total == 12 ? item_div.classList.add('negative') : item_div.classList.remove('negative') + else if(i == 6) layer_total == 12 ? item_div.classList.add('positive') : item_div.classList.remove('positive') + input.textContent = value + bar.style.top = (100-calculatePercent(value,arrays[i]['min'],arrays[i]['max'])) + '%' + area.style.height = calculatePercent(value,arrays[i]['min'],arrays[i]['max']) + '%' } } _this.setSize(model_type.value == 'sdxl' ? [375,320] : [455,320]) From bea00593ebf0c9f9bd8fd52675aa94ddd57979e4 Mon Sep 17 00:00:00 2001 From: yolain Date: Thu, 27 Jun 2024 11:09:37 +0800 Subject: [PATCH 40/49] Fix FooocusInpaint crash comfy after change params #226 --- py/easyNodes.py | 10 ++--- py/libs/fooocus.py | 95 ++++++++++++++++++++++++---------------------- 2 files changed, 53 insertions(+), 52 deletions(-) diff --git a/py/easyNodes.py b/py/easyNodes.py index 0029c8a..84dd840 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -2058,7 +2058,6 @@ def load_lllite(self, model, model_name, cond_image, strength, steps, start_perc # FooocusInpaint from .libs.fooocus import InpaintHead, InpaintWorker -inpaint_head_model = None class applyFooocusInpaint: @classmethod @@ -2079,13 +2078,10 @@ def INPUT_TYPES(s): def apply(self, model, latent, head, patch): - global inpaint_head_model - head_file = get_local_filepath(FOOOCUS_INPAINT_HEAD[head]["model_url"], INPAINT_DIR) - if inpaint_head_model is None: - inpaint_head_model = InpaintHead() - sd = torch.load(head_file, map_location='cpu') - inpaint_head_model.load_state_dict(sd) + inpaint_head_model = InpaintHead() + sd = torch.load(head_file, map_location='cpu') + inpaint_head_model.load_state_dict(sd) patch_file = get_local_filepath(FOOOCUS_INPAINT_PATCH[patch]["model_url"], INPAINT_DIR) inpaint_lora = comfy.utils.load_torch_file(patch_file, safe_load=True) diff --git a/py/libs/fooocus.py b/py/libs/fooocus.py index 6f5e2df..26c4ea9 100644 --- a/py/libs/fooocus.py +++ b/py/libs/fooocus.py @@ -1,29 +1,69 @@ #credit to Acly for this module #from https://github.com/Acly/comfyui-inpaint-nodes import torch +import torch.nn.functional as F import comfy +from comfy.model_base import BaseModel from comfy.model_patcher import ModelPatcher from comfy.model_management import cast_to_device from .log import log_node_warn, log_node_error, log_node_info # Inpaint +original_calculate_weight = ModelPatcher.calculate_weight +injected_model_patcher_calculate_weight = False + class InpaintHead(torch.nn.Module): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.head = torch.nn.Parameter(torch.empty(size=(320, 5, 3, 3), device='cpu')) + self.head = torch.nn.Parameter(torch.empty(size=(320, 5, 3, 3), device="cpu")) def __call__(self, x): - x = torch.nn.functional.pad(x, (1, 1, 1, 1), "replicate") - return torch.nn.functional.conv2d(input=x, weight=self.head) + x = F.pad(x, (1, 1, 1, 1), "replicate") + return F.conv2d(x, weight=self.head) + +def calculate_weight_patched(self: ModelPatcher, patches, weight, key): + remaining = [] + + for p in patches: + alpha = p[0] + v = p[1] + + is_fooocus_patch = isinstance(v, tuple) and len(v) == 2 and v[0] == "fooocus" + if not is_fooocus_patch: + remaining.append(p) + continue + + if alpha != 0.0: + v = v[1] + w1 = cast_to_device(v[0], weight.device, torch.float32) + if w1.shape == weight.shape: + w_min = cast_to_device(v[1], weight.device, torch.float32) + w_max = cast_to_device(v[2], weight.device, torch.float32) + w1 = (w1 / 255.0) * (w_max - w_min) + w_min + weight += alpha * cast_to_device(w1, weight.device, weight.dtype) + else: + pass + # log_node_warn(self.node_name, + # f"Shape mismatch {key}, weight not merged ({w1.shape} != {weight.shape})" + # ) + + if len(remaining) > 0: + return original_calculate_weight(self, remaining, weight, key) + return weight + +def inject_patched_calculate_weight(): + global injected_model_patcher_calculate_weight + if not injected_model_patcher_calculate_weight: + print( + "[comfyui-inpaint-nodes] Injecting patched comfy.model_patcher.ModelPatcher.calculate_weight" + ) + ModelPatcher.calculate_weight = calculate_weight_patched + injected_model_patcher_calculate_weight = True class InpaintWorker: def __init__(self, node_name): self.node_name = node_name if node_name is not None else "" - self.original_calculate_weight = ModelPatcher.calculate_weight - if not hasattr(ModelPatcher, "original_calculate_weight"): - ModelPatcher.original_calculate_weight = self.original_calculate_weight - self.injected_model_patcher_calculate_weight = False def load_fooocus_patch(self, lora: dict, to_load: dict): patch_dict = {} @@ -40,47 +80,12 @@ def load_fooocus_patch(self, lora: dict, to_load: dict): ) return patch_dict - def calculate_weight_patched(self: ModelPatcher, patches, weight, key): - remaining = [] - - for p in patches: - alpha = p[0] - v = p[1] - - is_fooocus_patch = isinstance(v, tuple) and len(v) == 2 and v[0] == "fooocus" - if not is_fooocus_patch: - remaining.append(p) - continue - - if alpha != 0.0: - v = v[1] - w1 = cast_to_device(v[0], weight.device, torch.float32) - if w1.shape == weight.shape: - w_min = cast_to_device(v[1], weight.device, torch.float32) - w_max = cast_to_device(v[2], weight.device, torch.float32) - w1 = (w1 / 255.0) * (w_max - w_min) + w_min - weight += alpha * cast_to_device(w1, weight.device, weight.dtype) - else: - pass - # log_node_warn(self.node_name, - # f"Shape mismatch {key}, weight not merged ({w1.shape} != {weight.shape})" - # ) - - if len(remaining) > 0: - return self.original_calculate_weight(self, remaining, weight, key) - return weight - - def inject_patched_calculate_weight(self): - if not self.injected_model_patcher_calculate_weight: - log_node_info(self.node_name,"Injecting patched comfy.model_patcher.ModelPatcher.calculate_weight") - ModelPatcher.calculate_weight = self.calculate_weight_patched - self.injected_model_patcher_calculate_weight = True def patch(self, model, latent, patch): - base_model = model.model + base_model: BaseModel = model.model latent_pixels = base_model.process_latent_in(latent["samples"]) noise_mask = latent["noise_mask"].round() - latent_mask = torch.nn.functional.max_pool2d(noise_mask, (8, 8)).round().to(latent_pixels) + latent_mask = F.max_pool2d(noise_mask, (8, 8)).round().to(latent_pixels) inpaint_head_model, inpaint_lora = patch feed = torch.cat([latent_mask, latent_pixels], dim=1) @@ -104,5 +109,5 @@ def input_block_patch(h, transformer_options): if not_patched_count > 0: log_node_error(self.node_name, f"Failed to patch {not_patched_count} keys") - self.inject_patched_calculate_weight() + inject_patched_calculate_weight() return (m,) \ No newline at end of file From 1171a299b3219fef627ab2314289b747a48c47be Mon Sep 17 00:00:00 2001 From: yolain Date: Fri, 28 Jun 2024 18:26:07 +0800 Subject: [PATCH 41/49] Fix checkpoints and loras do not work together in xyplot ADV #227 --- py/libs/xyplot.py | 30 +++++++++++++++--------------- web/js/common/utils.js | 7 +++++++ web/js/easy/easy.js | 2 +- 3 files changed, 23 insertions(+), 16 deletions(-) diff --git a/py/libs/xyplot.py b/py/libs/xyplot.py index 22e0d06..d581b4c 100644 --- a/py/libs/xyplot.py +++ b/py/libs/xyplot.py @@ -288,21 +288,6 @@ def sample_plot_image(self, plot_image_vars, samples, preview_latent, latents_pl if plot_image_vars['clip_skip'] != 0: clip.clip_layer(plot_image_vars['clip_skip']) - # Lora - if self.x_type == "Lora" or self.y_type == "Lora": - model = model if model is not None else plot_image_vars["model"] - clip = clip if clip is not None else plot_image_vars["clip"] - - xy_values = x_value if self.x_type == "Lora" else y_value - lora_name, lora_model_strength, lora_clip_strength = xy_values.split(",") - lora_stack = [{"lora_name": lora_name, "model": model, "clip" :clip, "model_strength": float(lora_model_strength), "clip_strength": float(lora_clip_strength)}] - if 'lora_stack' in plot_image_vars: - lora_stack = lora_stack + plot_image_vars['lora_stack'] - - if lora_stack is not None and lora_stack != []: - for lora in lora_stack: - model, clip = self.easyCache.load_lora(lora) - # CheckPoint if self.x_type == "Checkpoint" or self.y_type == "Checkpoint": xy_values = x_value if self.x_type == "Checkpoint" else y_value @@ -349,6 +334,21 @@ def sample_plot_image(self, plot_image_vars, samples, preview_latent, latents_pl if "negative_cond" in plot_image_vars: negative = negative + plot_image_vars["negative_cond"] + # Lora + if self.x_type == "Lora" or self.y_type == "Lora": + model = model if model is not None else plot_image_vars["model"] + clip = clip if clip is not None else plot_image_vars["clip"] + + xy_values = x_value if self.x_type == "Lora" else y_value + lora_name, lora_model_strength, lora_clip_strength = xy_values.split(",") + lora_stack = [{"lora_name": lora_name, "model": model, "clip" :clip, "model_strength": float(lora_model_strength), "clip_strength": float(lora_clip_strength)}] + if 'lora_stack' in plot_image_vars: + lora_stack = lora_stack + plot_image_vars['lora_stack'] + + if lora_stack is not None and lora_stack != []: + for lora in lora_stack: + model, clip = self.easyCache.load_lora(lora) + # 提示词 if "Positive" in self.x_type or "Positive" in self.y_type: if self.x_type == 'Positive Prompt S/R' or self.y_type == 'Positive Prompt S/R': diff --git a/web/js/common/utils.js b/web/js/common/utils.js index 0ea442c..9a988a7 100644 --- a/web/js/common/utils.js +++ b/web/js/common/utils.js @@ -20,6 +20,13 @@ export function addCss(href, base=true) { document.head.appendChild(link); } +export function addMeta(name, content) { + const meta = document.createElement("meta"); + meta.setAttribute("name", name); + meta.setAttribute('content', content); + document.head.appendChild(meta); +} + export function deepEqual(obj1, obj2) { if (typeof obj1 !== typeof obj2) { return false diff --git a/web/js/easy/easy.js b/web/js/easy/easy.js index d5a0b28..6a66af6 100644 --- a/web/js/easy/easy.js +++ b/web/js/easy/easy.js @@ -1,6 +1,6 @@ import { api } from "../../../../scripts/api.js"; import { app } from "../../../../scripts/app.js"; -import {deepEqual, addCss, isLocalNetwork} from "../common/utils.js"; +import {deepEqual, addCss, addMeta, isLocalNetwork} from "../common/utils.js"; import {quesitonIcon, rocketIcon, groupIcon, rebootIcon, closeIcon} from "../common/icon.js"; import {$t} from '../common/i18n.js'; import {toast} from "../common/toast.js"; From 2c8856ca80aa31c4703a029022aa6d9ce2d19b57 Mon Sep 17 00:00:00 2001 From: yolain Date: Sat, 29 Jun 2024 18:30:50 +0800 Subject: [PATCH 42/49] Change some UI styles to match the new comfy menu --- README.en.md | 6 +++++ README.md | 6 +++++ web/css/easy.css | 15 +++++++---- web/css/groupmap.css | 4 ++- web/css/theme.css | 6 +++-- web/js/common/icon.js | 20 +++++++++++++++ web/js/easy/easy.js | 48 ++++++++++++++++++++++++++++++------ web/js/easy/easyInterface.js | 14 +++++------ 8 files changed, 96 insertions(+), 23 deletions(-) diff --git a/README.en.md b/README.en.md index 8c7ad71..8279b76 100644 --- a/README.en.md +++ b/README.en.md @@ -387,3 +387,9 @@ Disclaimer: Opened source was not easy. I have a lot of respect for the contribu [ComfyUI-Custom-Scripts](https://github.com/pythongosssss/ComfyUI-Custom-Scripts) - pyssss🐍 [cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) - Image Preview Chooser + +## 🌟Stargazers + +My gratitude extends to the generous souls who bestow a star. Your support is much appreciated! + +[![Stargazers repo roster for @yolain/ComfyUI-Easy-Use](https://reporoster.com/stars/yolain/ComfyUI-Easy-Use)](https://github.com/yolain/ComfyUI-Easy-Use/stargazers) diff --git a/README.md b/README.md index 4c6d0b5..1a39802 100644 --- a/README.md +++ b/README.md @@ -402,3 +402,9 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use [cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) - 图片选择器 [ComfyUI-BrushNet](https://github.com/nullquant/ComfyUI-BrushNet) - BrushNet 内补节点 + +## 🌟Stargazers + +My gratitude extends to the generous souls who bestow a star. Your support is much appreciated! + +[![Stargazers repo roster for @yolain/ComfyUI-Easy-Use](https://reporoster.com/stars/yolain/ComfyUI-Easy-Use)](https://github.com/yolain/ComfyUI-Easy-Use/stargazers) \ No newline at end of file diff --git a/web/css/easy.css b/web/css/easy.css index 7c69a7b..60dece2 100644 --- a/web/css/easy.css +++ b/web/css/easy.css @@ -26,15 +26,14 @@ textarea{ backdrop-filter: blur(8px) brightness(120%); } .comfy-menu{ - top:38%; border-radius:16px; box-shadow:0 0 1px var(--descrip-text); backdrop-filter: blur(8px) brightness(120%); } .comfy-menu button,.comfy-modal button { - font-size: 16px; - padding:6px 0; - margin-bottom:8px; + font-size: 14px; + padding:4px 0; + margin-bottom:4px; } .comfy-menu button.comfy-settings-btn{ font-size: 12px; @@ -43,7 +42,7 @@ textarea{ margin-bottom: 4px; } .comfy-menu-btns button,.comfy-list-actions button{ - font-size: 12px; + font-size: 10px; } .comfy-menu > button, .comfy-menu-btns button, @@ -52,6 +51,7 @@ textarea{ border-width:1px; } + dialog{ border:1px solid var(--border-color); background:transparent; @@ -71,6 +71,9 @@ dialog{ hr{ border:1px solid var(--border-color); } +#comfy-dev-save-api-button{ + justify-content: center; +} #shareButton{ background:linear-gradient(to left,var(--theme-color),var(--theme-color-light))!important; color:white!important; @@ -78,10 +81,12 @@ hr{ #queue-button{ position:relative; overflow:hidden; + min-height:30px; z-index:1; } #queue-button:after{ + clear: both; content:attr(data-attr); background:green; color:#FFF; diff --git a/web/css/groupmap.css b/web/css/groupmap.css index cd138dc..75aa867 100644 --- a/web/css/groupmap.css +++ b/web/css/groupmap.css @@ -9,7 +9,9 @@ color: var(--descrip-text); background-color: var(--comfy-menu-bg); padding: 10px 4px; - border: 1px solid var(--border-color);z-index: 999999999;padding-top: 0; + border: 1px solid var(--border-color); + z-index: 399; + padding-top: 0; } #easyuse_groups_map .icon{ width: 12px; diff --git a/web/css/theme.css b/web/css/theme.css index cafc025..d2754c8 100644 --- a/web/css/theme.css +++ b/web/css/theme.css @@ -1,6 +1,8 @@ :root { - --theme-color:#3f3eed; - --theme-color-light: #008ecb; + /*--theme-color:#3f3eed;*/ + /*--theme-color-light: #008ecb;*/ + --theme-color:#236692; + --theme-color-light: #3485bb; --success-color: #52c41a; --error-color: #ff4d4f; --warning-color: #faad14; diff --git a/web/js/common/icon.js b/web/js/common/icon.js index dd9aad4..e71a0a3 100644 --- a/web/js/common/icon.js +++ b/web/js/common/icon.js @@ -1,3 +1,23 @@ +export const logoIcon = ` + + + + + + + + + + + + + + + + + +` + export const quesitonIcon = `` export const rocketIcon = `` export const groupIcon = `` diff --git a/web/js/easy/easy.js b/web/js/easy/easy.js index 6a66af6..bfcc6e7 100644 --- a/web/js/easy/easy.js +++ b/web/js/easy/easy.js @@ -1,7 +1,7 @@ import { api } from "../../../../scripts/api.js"; import { app } from "../../../../scripts/app.js"; import {deepEqual, addCss, addMeta, isLocalNetwork} from "../common/utils.js"; -import {quesitonIcon, rocketIcon, groupIcon, rebootIcon, closeIcon} from "../common/icon.js"; +import {logoIcon, quesitonIcon, rocketIcon, groupIcon, rebootIcon, closeIcon} from "../common/icon.js"; import {$t} from '../common/i18n.js'; import {toast} from "../common/toast.js"; import {$el, ComfyDialog} from "../../../../scripts/ui.js"; @@ -172,7 +172,7 @@ function createGroupMap(){ buttons.append(go_btn) let see_btn = document.createElement('button') let defaultStyle = `cursor:pointer;font-size:10px;;padding:2px;border: 1px solid var(--border-color);border-radius:4px;width:36px;` - see_btn.style = isGroupMute ? `background-color:var(--error-text);color:var(--input-text);` + defaultStyle : (isGroupShow ? `background-color:#006691;color:var(--input-text);` + defaultStyle : `background-color: var(--comfy-input-bg);color:var(--descrip-text);` + defaultStyle) + see_btn.style = isGroupMute ? `background-color:var(--error-text);color:var(--input-text);` + defaultStyle : (isGroupShow ? `background-color:var(--theme-color);color:var(--input-text);` + defaultStyle : `background-color: var(--comfy-input-bg);color:var(--descrip-text);` + defaultStyle) see_btn.innerText = isGroupMute ? mute_text : (isGroupShow ? show_text : hide_text) let pressTimer let firstTime =0, lastTime =0 @@ -384,11 +384,12 @@ class GuideDialog { } } -const getEnableToolBar = _ => app.ui.settings.getSettingValue(toolBarId, true) - +// toolbar const toolBarId = "Comfy.EasyUse.toolBar" +const getEnableToolBar = _ => app.ui.settings.getSettingValue(toolBarId, true) +const getNewMenuPosition = _ => app.ui.settings.getSettingValue('Comfy.UseNewMenu', 'Disabled') -let enableToolBar = getEnableToolBar() +let enableToolBar = getEnableToolBar() && getNewMenuPosition() == 'Disabled' let disableRenderInfo = localStorage['Comfy.Settings.Comfy.EasyUse.disableRenderInfo'] ? true : false export function addToolBar(app) { app.ui.settings.addSetting({ @@ -404,15 +405,17 @@ export function addToolBar(app) { }, }); } - let note = null let toolbar = null function showToolBar(){ - toolbar.style.display = 'flex' + if(toolbar) toolbar.style.display = 'flex' } function hideToolBar(){ - toolbar.style.display = 'none' + if(toolbar) toolbar.style.display = 'none' } +const changeNewMenuPosition = app.ui.settings.settingsLookup?.['Comfy.UseNewMenu'] +if(changeNewMenuPosition) changeNewMenuPosition.onChange = v => v == 'Disabled' ? showToolBar() : hideToolBar() + app.registerExtension({ name: "comfy.easyUse", @@ -521,6 +524,35 @@ app.registerExtension({ addToolBar(app) }, + async setup() { + // New style menu button + if(app.menu?.actionsGroup){ + const groupMap = new (await import('../../../../scripts/ui/components/button.js')).ComfyButton({ + icon:'list-box', + action:()=> createGroupMap(), + tooltip: "EasyUse Group Map", + // content: "EasyUse Group Map", + classList: "comfyui-button comfyui-menu-mobile-collapse" + }); + app.menu?.actionsGroup.element.after(groupMap.element); + // const easyNewMenu = $el('div.easyuse-new-menu',[ + // $el('div.easyuse-new-menu-intro',[ + // $el('div.easyuse-new-menu-logo',{innerHTML:logoIcon}), + // $el('div.easyuse-new-menu-title',[ + // $el('div.title',{textContent:'ComfyUI-Easy-Use'}), + // $el('div.desc',{textContent:'Version:'}) + // ]) + // ]) + // ]) + // app.menu?.actionsGroup.element.after(new (await import('../../../../scripts/ui/components/splitButton.js')).ComfySplitButton({ + // primary: groupMap, + // mode:'click', + // position:'absolute', + // horizontal: 'right' + // },easyNewMenu).element); + } + + }, beforeRegisterNodeDef(nodeType, nodeData, app) { if (nodeData.name.startsWith("easy")) { const origOnConfigure = nodeType.prototype.onConfigure; diff --git a/web/js/easy/easyInterface.js b/web/js/easy/easyInterface.js index 6c7e8b6..6e80a99 100644 --- a/web/js/easy/easyInterface.js +++ b/web/js/easy/easyInterface.js @@ -5,8 +5,8 @@ import {addPreconnect, addCss} from "../common/utils.js"; const locale = localStorage['AGL.Locale'] || localStorage['Comfy.Settings.AGL.Locale'] || 'en-US' -const customThemeColor = "#3f3eed" -const customThemeColorLight = "#008ecb" +const customThemeColor = "#236692" +const customThemeColorLight = "#3485bb" // 增加Slot颜色 const customPipeLineLink = "#7737AA" const customPipeLineSDXLLink = "#7737AA" @@ -28,9 +28,9 @@ localStorage.setItem('Comfy.Settings.easyUse.customLinkColors', JSON.stringify(c // 增加自定义主题 const ui = { - "version": 101, + "version": 102, "id": "obsidian", - "name": "黑曜石", + "name": "Obsidian", "colors": { "node_slot": { "CLIP": "#FFD500", @@ -105,7 +105,7 @@ try{ custom_theme.obsidian = ui let ui2 = JSON.parse(JSON.stringify(ui)) ui2.id = 'obsidian_dark' - ui2.name = '黑曜石-深' + ui2.name = 'Obsidian Dark' ui2.colors.litegraph_base.BACKGROUND_IMAGE = dark_bg ui2.colors.litegraph_base.CLEAR_BACKGROUND_COLOR = '#000' custom_theme[ui2.id] = ui2 @@ -559,7 +559,7 @@ try{ ctx.fill(); if(show_text && !w.disabled) ctx.stroke(); - ctx.fillStyle = w.value ? customThemeColorLight : "#333"; + ctx.fillStyle = w.value ? customThemeColor : "#333"; ctx.beginPath(); ctx.arc( widget_width - margin * 2, y + H * 0.5, H * 0.25, 0, Math.PI * 2 ); ctx.fill(); @@ -593,7 +593,7 @@ try{ var nvalue = (w.value - w.options.min) / range; if(nvalue < 0.0) nvalue = 0.0; if(nvalue > 1.0) nvalue = 1.0; - ctx.fillStyle = w.options.hasOwnProperty("slider_color") ? w.options.slider_color : (active_widget == w ? "#333" : customThemeColorLight); + ctx.fillStyle = w.options.hasOwnProperty("slider_color") ? w.options.slider_color : (active_widget == w ? "#333" : customThemeColor); ctx.beginPath(); ctx.roundRect(margin, y, nvalue * (widget_width - margin * 2), H, [H*0.25]); ctx.fill(); From 5fadf6a704d685da276f46cd674cd63e70965692 Mon Sep 17 00:00:00 2001 From: yolain Date: Sat, 29 Jun 2024 21:51:56 +0800 Subject: [PATCH 43/49] Fix empty character in one line do not add to promptLine --- py/easyNodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py/easyNodes.py b/py/easyNodes.py index 84dd840..84672af 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -331,7 +331,7 @@ def INPUT_TYPES(s): def generate_strings(self, prompt, start_index, max_rows, workflow_prompt=None, my_unique_id=None): lines = prompt.split('\n') - lines = [zh_to_en([v])[0] if has_chinese(v) else v for v in lines] + lines = [zh_to_en([v])[0] if has_chinese(v) else v for v in lines if v] start_index = max(0, min(start_index, len(lines) - 1)) From 9317d2fd5d09450e4bc954e7af8eae270c0fc8e0 Mon Sep 17 00:00:00 2001 From: yolain Date: Sat, 29 Jun 2024 23:14:37 +0800 Subject: [PATCH 44/49] Add the 1216x832 resolution preset #232 --- py/config.py | 4 +++- py/easyNodes.py | 12 ++++++------ py/libs/sampler.py | 3 ++- web/js/easy/easyDynamicWidgets.js | 5 ++++- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/py/config.py b/py/config.py index 6ef3580..3050f86 100644 --- a/py/config.py +++ b/py/config.py @@ -3,7 +3,7 @@ from pathlib import Path BASE_RESOLUTIONS = [ - ("自定义", "自定义"), + ("width", "height"), (512, 512), (512, 768), (576, 1024), @@ -15,6 +15,7 @@ (768, 1536), (816, 1920), (832, 1152), + (832, 1216), (896, 1152), (896, 1088), (1024, 1024), @@ -23,6 +24,7 @@ (1080, 1920), (1440, 2560), (1088, 896), + (1216, 832), (1152, 832), (1152, 896), (1280, 768), diff --git a/py/easyNodes.py b/py/easyNodes.py index 84672af..747cd10 100644 --- a/py/easyNodes.py +++ b/py/easyNodes.py @@ -891,7 +891,7 @@ class fullLoader: @classmethod def INPUT_TYPES(cls): - resolution_strings = [f"{width} x {height}" for width, height in BASE_RESOLUTIONS] + resolution_strings = [f"{width} x {height} (custom)" if width == 'width' and height == 'height' else f"{width} x {height}" for width, height in BASE_RESOLUTIONS] a1111_prompt_style_default = False return {"required": { @@ -997,7 +997,7 @@ def adv_pipeloader(self, ckpt_name, config_name, vae_name, clip_skip, class a1111Loader(fullLoader): @classmethod def INPUT_TYPES(cls): - resolution_strings = [f"{width} x {height}" for width, height in BASE_RESOLUTIONS] + resolution_strings = [f"{width} x {height} (custom)" if width == 'width' and height == 'height' else f"{width} x {height}" for width, height in BASE_RESOLUTIONS] a1111_prompt_style_default = False checkpoints = folder_paths.get_filename_list("checkpoints") loras = ["None"] + folder_paths.get_filename_list("loras") @@ -1052,7 +1052,7 @@ def a1111loader(self, ckpt_name, vae_name, clip_skip, class comfyLoader(fullLoader): @classmethod def INPUT_TYPES(cls): - resolution_strings = [f"{width} x {height}" for width, height in BASE_RESOLUTIONS] + resolution_strings = [f"{width} x {height} (custom)" if width == 'width' and height == 'height' else f"{width} x {height}" for width, height in BASE_RESOLUTIONS] return { "required": { "ckpt_name": (folder_paths.get_filename_list("checkpoints"),), @@ -1103,7 +1103,7 @@ def __init__(self): @classmethod def INPUT_TYPES(s): - resolution_strings = [f"{width} x {height}" for width, height in BASE_RESOLUTIONS] + resolution_strings = [f"{width} x {height} (custom)" if width == 'width' and height == 'height' else f"{width} x {height}" for width, height in BASE_RESOLUTIONS] return {"required": { "stage_c": (folder_paths.get_filename_list("unet") + folder_paths.get_filename_list("checkpoints"),), @@ -1512,7 +1512,7 @@ class svdLoader: @classmethod def INPUT_TYPES(cls): - resolution_strings = [f"{width} x {height}" for width, height in BASE_RESOLUTIONS] + resolution_strings = [f"{width} x {height} (custom)" if width == 'width' and height == 'height' else f"{width} x {height}" for width, height in BASE_RESOLUTIONS] def get_file_list(filenames): return [file for file in filenames if file != "put_models_here.txt" and "svd" in file.lower()] @@ -1632,7 +1632,7 @@ def __init__(self): @classmethod def INPUT_TYPES(cls): - resolution_strings = [f"{width} x {height}" for width, height in BASE_RESOLUTIONS] + resolution_strings = [f"{width} x {height} (custom)" if width == 'width' and height == 'height' else f"{width} x {height}" for width, height in BASE_RESOLUTIONS] return {"required": { "model_name": (list(DYNAMICRAFTER_MODELS.keys()),), diff --git a/py/libs/sampler.py b/py/libs/sampler.py index 9c646aa..4cfba84 100644 --- a/py/libs/sampler.py +++ b/py/libs/sampler.py @@ -52,7 +52,8 @@ def safe_split(to_split: str, delimiter: str) -> List[str]: return parts def emptyLatent(self, resolution, empty_latent_width, empty_latent_height, batch_size=1, compression=0, sd3=False): - if resolution != "自定义 x 自定义": + print(resolution) + if resolution not in ["自定义 x 自定义", 'width x height (custom)']: try: width, height = map(int, resolution.split(' x ')) empty_latent_width = width diff --git a/web/js/easy/easyDynamicWidgets.js b/web/js/easy/easyDynamicWidgets.js index 9b97ab1..8cd90a7 100644 --- a/web/js/easy/easyDynamicWidgets.js +++ b/web/js/easy/easyDynamicWidgets.js @@ -177,7 +177,10 @@ function widgetLogic(node, widget) { } if (widget.name === 'resolution') { - if (widget.value === "自定义 x 自定义") { + if(widget.value === "自定义 x 自定义"){ + widget.value = 'width x height (custom)' + } + if (widget.value === "自定义 x 自定义" || widget.value === 'width x height (custom)') { toggleWidget(node, findWidgetByName(node, 'empty_latent_width'), true) toggleWidget(node, findWidgetByName(node, 'empty_latent_height'), true) } else { From 62cd949d62e063debc488df64d4181a5182b7805 Mon Sep 17 00:00:00 2001 From: yolain Date: Sun, 30 Jun 2024 21:23:02 +0800 Subject: [PATCH 45/49] Add crystools ui display on the comfy new menu --- README.md | 1 + web/css/toolbar.css | 13 +++++++++++ web/js/easy/easy.js | 53 ++++++++++++++++++++++++++++++++++++++++----- 3 files changed, 62 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 1a39802..c51d1c7 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,7 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use **v1.2.0** +- 当新菜单的位置在上或者下时增加上 crystools 的显示,推荐开两个就好(如果后续crystools有更新UI适配我可能会删除掉) - 增加 **easy sliderControl** - 滑块控制节点,当前可用于控制ipadapterMS的参数 (双击滑块可重置为默认值) - 增加 **layer_weights** 属性在 `easy ipadapterApplyADV` 节点 diff --git a/web/css/toolbar.css b/web/css/toolbar.css index ce944a6..89a7243 100644 --- a/web/css/toolbar.css +++ b/web/css/toolbar.css @@ -214,4 +214,17 @@ } .markdown-body .link{ color:var(--theme-color-light) +} + +#comfyui-menu-monitor{ + width:120px; +} +#comfyui-menu-monitor #crystools-monitor-container{ + margin:0 auto!important; +} +#comfyui-menu-monitor #crystools-monitor-container > div{ + margin:2px 0!important; +} +#comfyui-menu-monitor #crystools-monitor-container > div > div > div{ + padding:0 4px!important; } \ No newline at end of file diff --git a/web/js/easy/easy.js b/web/js/easy/easy.js index bfcc6e7..210362a 100644 --- a/web/js/easy/easy.js +++ b/web/js/easy/easy.js @@ -387,8 +387,16 @@ class GuideDialog { // toolbar const toolBarId = "Comfy.EasyUse.toolBar" const getEnableToolBar = _ => app.ui.settings.getSettingValue(toolBarId, true) -const getNewMenuPosition = _ => app.ui.settings.getSettingValue('Comfy.UseNewMenu', 'Disabled') +const getNewMenuPosition = _ => { + try{ + return app.ui.settings.getSettingValue('Comfy.UseNewMenu', 'Disabled') + }catch (e){ + return 'Disabled' + } +} +let note = null +let toolbar = null let enableToolBar = getEnableToolBar() && getNewMenuPosition() == 'Disabled' let disableRenderInfo = localStorage['Comfy.Settings.Comfy.EasyUse.disableRenderInfo'] ? true : false export function addToolBar(app) { @@ -405,16 +413,49 @@ export function addToolBar(app) { }, }); } -let note = null -let toolbar = null function showToolBar(){ if(toolbar) toolbar.style.display = 'flex' } function hideToolBar(){ if(toolbar) toolbar.style.display = 'none' } +let monitor = null +function setCrystoolsUI(position){ + const crystools = document.getElementById('crystools-root')?.children || null + if(crystools?.length>0){ + if(!monitor){ + for (let i = 0; i < crystools.length; i++) { + if (crystools[i].id === 'crystools-monitor-container') { + monitor = crystools[i]; + break; + } + } + } + if(monitor){ + if(position == 'Disabled'){ + let replace = true + for (let i = 0; i < crystools.length; i++) { + if (crystools[i].id === 'crystools-monitor-container') { + replace = false + break; + } + } + document.getElementById('crystools-root').appendChild(monitor) + } + else { + let monitor_div = document.getElementById('comfyui-menu-monitor') + if(!monitor_div) app.menu.settingsGroup.element.before($el('div',{id:'comfyui-menu-monitor'},monitor)) + else monitor_div.appendChild(monitor) + } + } + } +} const changeNewMenuPosition = app.ui.settings.settingsLookup?.['Comfy.UseNewMenu'] -if(changeNewMenuPosition) changeNewMenuPosition.onChange = v => v == 'Disabled' ? showToolBar() : hideToolBar() +if(changeNewMenuPosition) changeNewMenuPosition.onChange = v => { + v == 'Disabled' ? showToolBar() : hideToolBar() + setCrystoolsUI(v) +} + app.registerExtension({ @@ -534,7 +575,9 @@ app.registerExtension({ // content: "EasyUse Group Map", classList: "comfyui-button comfyui-menu-mobile-collapse" }); - app.menu?.actionsGroup.element.after(groupMap.element); + app.menu.actionsGroup.element.after(groupMap.element); + setCrystoolsUI(getNewMenuPosition()) + // const easyNewMenu = $el('div.easyuse-new-menu',[ // $el('div.easyuse-new-menu-intro',[ // $el('div.easyuse-new-menu-logo',{innerHTML:logoIcon}), From 2a9a39e137cfab274f44c7ff0838961d7e3ca541 Mon Sep 17 00:00:00 2001 From: yolain Date: Mon, 1 Jul 2024 23:48:35 +0800 Subject: [PATCH 46/49] Add easy hunyuanDiTLoader --- .github/FUNDING.yml | 3 + README.en.md | 3 + README.md | 2 + prestartup_script.py | 1 + py/dit/__init__.py | 2 + py/dit/config.py | 120 + py/dit/hunyuanDiT/config.py | 46 + py/dit/hunyuanDiT/config_clip.json | 34 + py/dit/hunyuanDiT/config_mt5.json | 33 + py/dit/hunyuanDiT/loader.py | 240 + py/dit/hunyuanDiT/models/attn_layers.py | 377 + py/dit/hunyuanDiT/models/embedders.py | 111 + py/dit/hunyuanDiT/models/models.py | 428 + py/dit/hunyuanDiT/models/norm_layers.py | 68 + py/dit/hunyuanDiT/models/poolers.py | 39 + py/dit/hunyuanDiT/models/posemb_layers.py | 225 + py/dit/hunyuanDiT/mt5_tokenizer/config.json | 33 + .../mt5_tokenizer/special_tokens_map.json | 1 + py/dit/hunyuanDiT/mt5_tokenizer/spiece.model | Bin 0 -> 4309802 bytes .../mt5_tokenizer/tokenizer_config.json | 1 + py/dit/hunyuanDiT/tokenizer/config.json | 34 + .../tokenizer/special_tokens_map.json | 7 + .../tokenizer/tokenizer_config.json | 16 + py/dit/hunyuanDiT/tokenizer/vocab.txt | 47020 ++++++++++++++++ py/dit/utils.py | 38 + py/easyNodes.py | 148 +- py/libs/loader.py | 84 +- web/js/easy/easyDynamicWidgets.js | 3 +- web/js/easy/easyExtraMenu.js | 2 +- 29 files changed, 49109 insertions(+), 10 deletions(-) create mode 100644 .github/FUNDING.yml create mode 100644 py/dit/__init__.py create mode 100644 py/dit/config.py create mode 100644 py/dit/hunyuanDiT/config.py create mode 100644 py/dit/hunyuanDiT/config_clip.json create mode 100644 py/dit/hunyuanDiT/config_mt5.json create mode 100644 py/dit/hunyuanDiT/loader.py create mode 100644 py/dit/hunyuanDiT/models/attn_layers.py create mode 100644 py/dit/hunyuanDiT/models/embedders.py create mode 100644 py/dit/hunyuanDiT/models/models.py create mode 100644 py/dit/hunyuanDiT/models/norm_layers.py create mode 100644 py/dit/hunyuanDiT/models/poolers.py create mode 100644 py/dit/hunyuanDiT/models/posemb_layers.py create mode 100644 py/dit/hunyuanDiT/mt5_tokenizer/config.json create mode 100644 py/dit/hunyuanDiT/mt5_tokenizer/special_tokens_map.json create mode 100644 py/dit/hunyuanDiT/mt5_tokenizer/spiece.model create mode 100644 py/dit/hunyuanDiT/mt5_tokenizer/tokenizer_config.json create mode 100644 py/dit/hunyuanDiT/tokenizer/config.json create mode 100644 py/dit/hunyuanDiT/tokenizer/special_tokens_map.json create mode 100644 py/dit/hunyuanDiT/tokenizer/tokenizer_config.json create mode 100644 py/dit/hunyuanDiT/tokenizer/vocab.txt create mode 100644 py/dit/utils.py diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..b9ba58b --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# These are supported funding model platforms + +custom: ["https://afdian.net/a/yolain"] \ No newline at end of file diff --git a/README.en.md b/README.en.md index 8279b76..68654aa 100644 --- a/README.en.md +++ b/README.en.md @@ -388,6 +388,9 @@ Disclaimer: Opened source was not easy. I have a lot of respect for the contribu [cg-image-picker](https://github.com/chrisgoringe/cg-image-picker) - Image Preview Chooser +[ComfyUI_ExtraModels](https://github.com/city96/ComfyUI_ExtraModels) - DiT custom nodes + + ## 🌟Stargazers My gratitude extends to the generous souls who bestow a star. Your support is much appreciated! diff --git a/README.md b/README.md index c51d1c7..e57ee4a 100644 --- a/README.md +++ b/README.md @@ -404,6 +404,8 @@ git clone https://github.com/yolain/ComfyUI-Easy-Use [ComfyUI-BrushNet](https://github.com/nullquant/ComfyUI-BrushNet) - BrushNet 内补节点 +[ComfyUI_ExtraModels](https://github.com/city96/ComfyUI_ExtraModels) - DiT架构相关节点(Pixart、混元DiT等) + ## 🌟Stargazers My gratitude extends to the generous souls who bestow a star. Your support is much appreciated! diff --git a/prestartup_script.py b/prestartup_script.py index c3a59be..600b0c9 100644 --- a/prestartup_script.py +++ b/prestartup_script.py @@ -29,6 +29,7 @@ def add_folder_path_and_extensions(folder_name, full_folder_paths, extensions): add_folder_path_and_extensions("mediapipe", [os.path.join(model_path, "mediapipe")], set(['.tflite','.pth'])) add_folder_path_and_extensions("inpaint", [os.path.join(model_path, "inpaint")], folder_paths.supported_pt_extensions) add_folder_path_and_extensions("prompt_generator", [os.path.join(model_path, "prompt_generator")], folder_paths.supported_pt_extensions) +add_folder_path_and_extensions("t5", [os.path.join(model_path, "t5")], folder_paths.supported_pt_extensions) add_folder_path_and_extensions("checkpoints_thumb", [os.path.join(model_path, "checkpoints")], image_suffixs) add_folder_path_and_extensions("loras_thumb", [os.path.join(model_path, "loras")], image_suffixs) \ No newline at end of file diff --git a/py/dit/__init__.py b/py/dit/__init__.py new file mode 100644 index 0000000..352103a --- /dev/null +++ b/py/dit/__init__.py @@ -0,0 +1,2 @@ +#credit to huchenlei for this module +#from https://github.com/city96/ComfyUI_ExtraModels/ \ No newline at end of file diff --git a/py/dit/config.py b/py/dit/config.py new file mode 100644 index 0000000..07d9410 --- /dev/null +++ b/py/dit/config.py @@ -0,0 +1,120 @@ +""" +List of all DiT model types / settings +""" +sampling_settings = { + "beta_schedule" : "sqrt_linear", + "linear_start" : 0.0001, + "linear_end" : 0.02, + "timesteps" : 1000, +} + +dit_conf = { + "XL/2": { # DiT_XL_2 + "unet_config": { + "depth" : 28, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1152, + }, + "sampling_settings" : sampling_settings, + }, + "XL/4": { # DiT_XL_4 + "unet_config": { + "depth" : 28, + "num_heads" : 16, + "patch_size" : 4, + "hidden_size" : 1152, + }, + "sampling_settings" : sampling_settings, + }, + "XL/8": { # DiT_XL_8 + "unet_config": { + "depth" : 28, + "num_heads" : 16, + "patch_size" : 8, + "hidden_size" : 1152, + }, + "sampling_settings" : sampling_settings, + }, + "L/2": { # DiT_L_2 + "unet_config": { + "depth" : 24, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1024, + }, + "sampling_settings" : sampling_settings, + }, + "L/4": { # DiT_L_4 + "unet_config": { + "depth" : 24, + "num_heads" : 16, + "patch_size" : 4, + "hidden_size" : 1024, + }, + "sampling_settings" : sampling_settings, + }, + "L/8": { # DiT_L_8 + "unet_config": { + "depth" : 24, + "num_heads" : 16, + "patch_size" : 8, + "hidden_size" : 1024, + }, + "sampling_settings" : sampling_settings, + }, + "B/2": { # DiT_B_2 + "unet_config": { + "depth" : 12, + "num_heads" : 12, + "patch_size" : 2, + "hidden_size" : 768, + }, + "sampling_settings" : sampling_settings, + }, + "B/4": { # DiT_B_4 + "unet_config": { + "depth" : 12, + "num_heads" : 12, + "patch_size" : 4, + "hidden_size" : 768, + }, + "sampling_settings" : sampling_settings, + }, + "B/8": { # DiT_B_8 + "unet_config": { + "depth" : 12, + "num_heads" : 12, + "patch_size" : 8, + "hidden_size" : 768, + }, + "sampling_settings" : sampling_settings, + }, + "S/2": { # DiT_S_2 + "unet_config": { + "depth" : 12, + "num_heads" : 6, + "patch_size" : 2, + "hidden_size" : 384, + }, + "sampling_settings" : sampling_settings, + }, + "S/4": { # DiT_S_4 + "unet_config": { + "depth" : 12, + "num_heads" : 6, + "patch_size" : 4, + "hidden_size" : 384, + }, + "sampling_settings" : sampling_settings, + }, + "S/8": { # DiT_S_8 + "unet_config": { + "depth" : 12, + "num_heads" : 6, + "patch_size" : 8, + "hidden_size" : 384, + }, + "sampling_settings" : sampling_settings, + }, +} \ No newline at end of file diff --git a/py/dit/hunyuanDiT/config.py b/py/dit/hunyuanDiT/config.py new file mode 100644 index 0000000..c9dda29 --- /dev/null +++ b/py/dit/hunyuanDiT/config.py @@ -0,0 +1,46 @@ +"""List of all HYDiT model types / settings""" +sampling_settings = { + "beta_schedule" : "linear", + "linear_start" : 0.00085, + "linear_end" : 0.03, + "timesteps" : 1000, +} + +from argparse import Namespace +hydit_args = Namespace(**{ # normally from argparse + "infer_mode": "torch", + "norm": "layer", + "learn_sigma": True, + "text_states_dim": 1024, + "text_states_dim_t5": 2048, + "text_len": 77, + "text_len_t5": 256, +}) + +hydit_conf = { + "G/2": { # Seems to be the main one + "unet_config": { + "depth" : 40, + "num_heads" : 16, + "patch_size" : 2, + "hidden_size" : 1408, + "mlp_ratio" : 4.3637, + "input_size": (1024//8, 1024//8), + "args": hydit_args, + }, + "sampling_settings" : sampling_settings, + }, +} + +dtypes = ["default", "auto (comfy)", "FP32", "FP16", "BF16"] +devices = ["auto", "cpu", "gpu"] + + +# these are the same as regular DiT, I think +from ..config import dit_conf +for name in ["XL/2", "L/2", "B/2"]: + hydit_conf[name] = { + "unet_config": dit_conf[name]["unet_config"].copy(), + "sampling_settings": sampling_settings, + } + hydit_conf[name]["unet_config"]["args"] = hydit_args \ No newline at end of file diff --git a/py/dit/hunyuanDiT/config_clip.json b/py/dit/hunyuanDiT/config_clip.json new file mode 100644 index 0000000..f629874 --- /dev/null +++ b/py/dit/hunyuanDiT/config_clip.json @@ -0,0 +1,34 @@ +{ + "_name_or_path": "hfl/chinese-roberta-wwm-ext-large", + "architectures": [ + "BertModel" + ], + "attention_probs_dropout_prob": 0.1, + "bos_token_id": 0, + "classifier_dropout": null, + "directionality": "bidi", + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 1024, + "initializer_range": 0.02, + "intermediate_size": 4096, + "layer_norm_eps": 1e-12, + "max_position_embeddings": 512, + "model_type": "bert", + "num_attention_heads": 16, + "num_hidden_layers": 24, + "output_past": true, + "pad_token_id": 0, + "pooler_fc_size": 768, + "pooler_num_attention_heads": 12, + "pooler_num_fc_layers": 3, + "pooler_size_per_head": 128, + "pooler_type": "first_token_transform", + "position_embedding_type": "absolute", + "torch_dtype": "float32", + "transformers_version": "4.22.1", + "type_vocab_size": 2, + "use_cache": true, + "vocab_size": 47020 +} diff --git a/py/dit/hunyuanDiT/config_mt5.json b/py/dit/hunyuanDiT/config_mt5.json new file mode 100644 index 0000000..d55cc43 --- /dev/null +++ b/py/dit/hunyuanDiT/config_mt5.json @@ -0,0 +1,33 @@ +{ + "_name_or_path": "mt5", + "architectures": [ + "MT5EncoderModel" + ], + "classifier_dropout": 0.0, + "d_ff": 5120, + "d_kv": 64, + "d_model": 2048, + "decoder_start_token_id": 0, + "dense_act_fn": "gelu_new", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "gated-gelu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": true, + "layer_norm_epsilon": 1e-06, + "model_type": "mt5", + "num_decoder_layers": 24, + "num_heads": 32, + "num_layers": 24, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "tokenizer_class": "T5Tokenizer", + "torch_dtype": "float16", + "transformers_version": "4.40.2", + "use_cache": true, + "vocab_size": 250112 +} diff --git a/py/dit/hunyuanDiT/loader.py b/py/dit/hunyuanDiT/loader.py new file mode 100644 index 0000000..c273184 --- /dev/null +++ b/py/dit/hunyuanDiT/loader.py @@ -0,0 +1,240 @@ + + +import os +import torch +import comfy.supported_models_base +import comfy.latent_formats +import comfy.model_patcher +import comfy.model_base +import comfy.utils +import comfy.conds + +from comfy import model_management +from tqdm import tqdm +from transformers import AutoTokenizer, modeling_utils +from transformers import T5Config, T5EncoderModel, BertConfig, BertModel + +class EXM_HYDiT(comfy.supported_models_base.BASE): + unet_config = {} + unet_extra_config = {} + latent_format = comfy.latent_formats.SDXL + + def __init__(self, model_conf): + self.unet_config = model_conf.get("unet_config", {}) + self.sampling_settings = model_conf.get("sampling_settings", {}) + self.latent_format = self.latent_format() + # UNET is handled by extension + self.unet_config["disable_unet_model_creation"] = True + + def model_type(self, state_dict, prefix=""): + return comfy.model_base.ModelType.V_PREDICTION + + +class EXM_HYDiT_Model(comfy.model_base.BaseModel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def extra_conds(self, **kwargs): + out = super().extra_conds(**kwargs) + + for name in ["context_t5", "context_mask", "context_t5_mask"]: + out[name] = comfy.conds.CONDRegular(kwargs[name]) + + src_size_cond = kwargs.get("src_size_cond", None) + if src_size_cond is not None: + out["src_size_cond"] = comfy.conds.CONDRegular(torch.tensor(src_size_cond)) + + return out + + +def load_hydit(model_path, model_conf): + state_dict = comfy.utils.load_torch_file(model_path) + state_dict = state_dict.get("model", state_dict) + + parameters = comfy.utils.calculate_parameters(state_dict) + unet_dtype = model_management.unet_dtype(model_params=parameters) + load_device = comfy.model_management.get_torch_device() + offload_device = comfy.model_management.unet_offload_device() + + # ignore fp8/etc and use directly for now + manual_cast_dtype = model_management.unet_manual_cast(unet_dtype, load_device) + if manual_cast_dtype: + print(f"HunYuanDiT: falling back to {manual_cast_dtype}") + unet_dtype = manual_cast_dtype + + model_conf = EXM_HYDiT(model_conf) + model = EXM_HYDiT_Model( + model_conf, + model_type=comfy.model_base.ModelType.V_PREDICTION, + device=model_management.get_torch_device() + ) + + from .models.models import HunYuanDiT + model.diffusion_model = HunYuanDiT( + **model_conf.unet_config, + log_fn=tqdm.write, + ) + + model.diffusion_model.load_state_dict(state_dict) + model.diffusion_model.dtype = unet_dtype + model.diffusion_model.eval() + model.diffusion_model.to(unet_dtype) + + model_patcher = comfy.model_patcher.ModelPatcher( + model, + load_device=load_device, + offload_device=offload_device, + current_device="cpu", + ) + return model_patcher + + +# CLIP Model +class hyCLIPModel(torch.nn.Module): + def __init__(self, textmodel_json_config=None, device="cpu", max_length=77, freeze=True, dtype=None): + super().__init__() + self.device = device + self.dtype = dtype + self.max_length = max_length + if textmodel_json_config is None: + textmodel_json_config = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + f"config_clip.json" + ) + config = BertConfig.from_json_file(textmodel_json_config) + with modeling_utils.no_init_weights(): + self.transformer = BertModel(config) + self.to(dtype) + if freeze: + self.freeze() + + def freeze(self): + self.transformer = self.transformer.eval() + for param in self.parameters(): + param.requires_grad = False + + def load_sd(self, sd): + return self.transformer.load_state_dict(sd, strict=False) + + def to(self, *args, **kwargs): + return self.transformer.to(*args, **kwargs) + +class EXM_HyDiT_Tenc_Temp: + def __init__(self, no_init=False, device="cpu", dtype=None, model_class="mT5", *kwargs): + if no_init: + return + + size = 8 if model_class == "mT5" else 2 + if dtype == torch.float32: + size *= 2 + size *= (1024**3) + + if device == "auto": + self.load_device = model_management.text_encoder_device() + self.offload_device = model_management.text_encoder_offload_device() + self.init_device = "cpu" + elif device == "cpu": + size = 0 # doesn't matter + self.load_device = "cpu" + self.offload_device = "cpu" + self.init_device="cpu" + elif device.startswith("cuda"): + print("Direct CUDA device override!\nVRAM will not be freed by default.") + size = 0 # not used + self.load_device = device + self.offload_device = device + self.init_device = device + else: + self.load_device = model_management.get_torch_device() + self.offload_device = "cpu" + self.init_device="cpu" + + self.dtype = dtype + self.device = self.load_device + if model_class == "mT5": + self.cond_stage_model = mT5Model( + device = self.load_device, + dtype = self.dtype, + ) + tokenizer_args = {"subfolder": "t2i/mt5"} # web + tokenizer_path = os.path.join( # local + os.path.dirname(os.path.realpath(__file__)), + "mt5_tokenizer", + ) + else: + self.cond_stage_model = hyCLIPModel( + device = self.load_device, + dtype = self.dtype, + ) + tokenizer_args = {"subfolder": "t2i/tokenizer",} # web + tokenizer_path = os.path.join( # local + os.path.dirname(os.path.realpath(__file__)), + "tokenizer", + ) + # self.tokenizer = AutoTokenizer.from_pretrained( + # "Tencent-Hunyuan/HunyuanDiT", + # **tokenizer_args + # ) + self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path) + self.patcher = comfy.model_patcher.ModelPatcher( + self.cond_stage_model, + load_device = self.load_device, + offload_device = self.offload_device, + current_device = self.load_device, + size = size, + ) + + def clone(self): + n = EXM_HyDiT_Tenc_Temp(no_init=True) + n.patcher = self.patcher.clone() + n.cond_stage_model = self.cond_stage_model + n.tokenizer = self.tokenizer + return n + + def load_sd(self, sd): + return self.cond_stage_model.load_sd(sd) + + def get_sd(self): + return self.cond_stage_model.state_dict() + + def load_model(self): + if self.load_device != "cpu": + model_management.load_model_gpu(self.patcher) + return self.patcher + + def add_patches(self, patches, strength_patch=1.0, strength_model=1.0): + return self.patcher.add_patches(patches, strength_patch, strength_model) + + def get_key_patches(self): + return self.patcher.get_key_patches() + +# MT5 model +class mT5Model(torch.nn.Module): + def __init__(self, textmodel_json_config=None, device="cpu", max_length=256, freeze=True, dtype=None): + super().__init__() + self.device = device + self.dtype = dtype + self.max_length = max_length + if textmodel_json_config is None: + textmodel_json_config = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + f"config_mt5.json" + ) + config = T5Config.from_json_file(textmodel_json_config) + with modeling_utils.no_init_weights(): + self.transformer = T5EncoderModel(config) + self.to(dtype) + if freeze: + self.freeze() + + def freeze(self): + self.transformer = self.transformer.eval() + for param in self.parameters(): + param.requires_grad = False + + def load_sd(self, sd): + return self.transformer.load_state_dict(sd, strict=False) + + def to(self, *args, **kwargs): + return self.transformer.to(*args, **kwargs) + diff --git a/py/dit/hunyuanDiT/models/attn_layers.py b/py/dit/hunyuanDiT/models/attn_layers.py new file mode 100644 index 0000000..4308af9 --- /dev/null +++ b/py/dit/hunyuanDiT/models/attn_layers.py @@ -0,0 +1,377 @@ +import torch +import torch.nn as nn +from typing import Tuple, Union, Optional + +try: + import flash_attn + if hasattr(flash_attn, '__version__') and int(flash_attn.__version__[0]) == 2: + from flash_attn.flash_attn_interface import flash_attn_kvpacked_func + from flash_attn.modules.mha import FlashSelfAttention, FlashCrossAttention + else: + from flash_attn.flash_attn_interface import flash_attn_unpadded_kvpacked_func + from flash_attn.modules.mha import FlashSelfAttention, FlashCrossAttention +except Exception as e: + print(f'flash_attn import failed: {e}') + + +def reshape_for_broadcast(freqs_cis: Union[torch.Tensor, Tuple[torch.Tensor]], x: torch.Tensor, head_first=False): + """ + Reshape frequency tensor for broadcasting it with another tensor. + + This function reshapes the frequency tensor to have the same shape as the target tensor 'x' + for the purpose of broadcasting the frequency tensor during element-wise operations. + + Args: + freqs_cis (Union[torch.Tensor, Tuple[torch.Tensor]]): Frequency tensor to be reshaped. + x (torch.Tensor): Target tensor for broadcasting compatibility. + head_first (bool): head dimension first (except batch dim) or not. + + Returns: + torch.Tensor: Reshaped frequency tensor. + + Raises: + AssertionError: If the frequency tensor doesn't match the expected shape. + AssertionError: If the target tensor 'x' doesn't have the expected number of dimensions. + """ + ndim = x.ndim + assert 0 <= 1 < ndim + + if isinstance(freqs_cis, tuple): + # freqs_cis: (cos, sin) in real space + if head_first: + assert freqs_cis[0].shape == (x.shape[-2], x.shape[-1]), f'freqs_cis shape {freqs_cis[0].shape} does not match x shape {x.shape}' + shape = [d if i == ndim - 2 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + else: + assert freqs_cis[0].shape == (x.shape[1], x.shape[-1]), f'freqs_cis shape {freqs_cis[0].shape} does not match x shape {x.shape}' + shape = [d if i == 1 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + return freqs_cis[0].view(*shape), freqs_cis[1].view(*shape) + else: + # freqs_cis: values in complex space + if head_first: + assert freqs_cis.shape == (x.shape[-2], x.shape[-1]), f'freqs_cis shape {freqs_cis.shape} does not match x shape {x.shape}' + shape = [d if i == ndim - 2 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + else: + assert freqs_cis.shape == (x.shape[1], x.shape[-1]), f'freqs_cis shape {freqs_cis.shape} does not match x shape {x.shape}' + shape = [d if i == 1 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] + return freqs_cis.view(*shape) + + +def rotate_half(x): + x_real, x_imag = x.float().reshape(*x.shape[:-1], -1, 2).unbind(-1) # [B, S, H, D//2] + return torch.stack([-x_imag, x_real], dim=-1).flatten(3) + + +def apply_rotary_emb( + xq: torch.Tensor, + xk: Optional[torch.Tensor], + freqs_cis: Union[torch.Tensor, Tuple[torch.Tensor]], + head_first: bool = False, +) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Apply rotary embeddings to input tensors using the given frequency tensor. + + This function applies rotary embeddings to the given query 'xq' and key 'xk' tensors using the provided + frequency tensor 'freqs_cis'. The input tensors are reshaped as complex numbers, and the frequency tensor + is reshaped for broadcasting compatibility. The resulting tensors contain rotary embeddings and are + returned as real tensors. + + Args: + xq (torch.Tensor): Query tensor to apply rotary embeddings. [B, S, H, D] + xk (torch.Tensor): Key tensor to apply rotary embeddings. [B, S, H, D] + freqs_cis (Union[torch.Tensor, Tuple[torch.Tensor]]): Precomputed frequency tensor for complex exponentials. + head_first (bool): head dimension first (except batch dim) or not. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: Tuple of modified query tensor and key tensor with rotary embeddings. + + """ + xk_out = None + if isinstance(freqs_cis, tuple): + cos, sin = reshape_for_broadcast(freqs_cis, xq, head_first) # [S, D] + cos, sin = cos.to(xq.device), sin.to(xq.device) + xq_out = (xq.float() * cos + rotate_half(xq.float()) * sin).type_as(xq) + if xk is not None: + xk_out = (xk.float() * cos + rotate_half(xk.float()) * sin).type_as(xk) + else: + xq_ = torch.view_as_complex(xq.float().reshape(*xq.shape[:-1], -1, 2)) # [B, S, H, D//2] + freqs_cis = reshape_for_broadcast(freqs_cis, xq_, head_first).to(xq.device) # [S, D//2] --> [1, S, 1, D//2] + xq_out = torch.view_as_real(xq_ * freqs_cis).flatten(3).type_as(xq) + if xk is not None: + xk_ = torch.view_as_complex(xk.float().reshape(*xk.shape[:-1], -1, 2)) # [B, S, H, D//2] + xk_out = torch.view_as_real(xk_ * freqs_cis).flatten(3).type_as(xk) + + return xq_out, xk_out + + +class FlashSelfMHAModified(nn.Module): + """ + Use QK Normalization. + """ + def __init__(self, + dim, + num_heads, + qkv_bias=True, + qk_norm=False, + attn_drop=0.0, + proj_drop=0.0, + device=None, + dtype=None, + norm_layer=nn.LayerNorm, + ): + factory_kwargs = {'device': device, 'dtype': dtype} + super().__init__() + self.dim = dim + self.num_heads = num_heads + assert self.dim % num_heads == 0, "self.kdim must be divisible by num_heads" + self.head_dim = self.dim // num_heads + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + + self.Wqkv = nn.Linear(dim, 3 * dim, bias=qkv_bias, **factory_kwargs) + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.k_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.inner_attn = FlashSelfAttention(attention_dropout=attn_drop) + self.out_proj = nn.Linear(dim, dim, bias=qkv_bias, **factory_kwargs) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, freqs_cis_img=None): + """ + Parameters + ---------- + x: torch.Tensor + (batch, seqlen, hidden_dim) (where hidden_dim = num heads * head dim) + freqs_cis_img: torch.Tensor + (batch, hidden_dim // 2), RoPE for image + """ + b, s, d = x.shape + + qkv = self.Wqkv(x) + qkv = qkv.view(b, s, 3, self.num_heads, self.head_dim) # [b, s, 3, h, d] + q, k, v = qkv.unbind(dim=2) # [b, s, h, d] + q = self.q_norm(q).half() # [b, s, h, d] + k = self.k_norm(k).half() + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, kk = apply_rotary_emb(q, k, freqs_cis_img) + assert qq.shape == q.shape and kk.shape == k.shape, f'qq: {qq.shape}, q: {q.shape}, kk: {kk.shape}, k: {k.shape}' + q, k = qq, kk + + qkv = torch.stack([q, k, v], dim=2) # [b, s, 3, h, d] + context = self.inner_attn(qkv) + out = self.out_proj(context.view(b, s, d)) + out = self.proj_drop(out) + + out_tuple = (out,) + + return out_tuple + + +class FlashCrossMHAModified(nn.Module): + """ + Use QK Normalization. + """ + def __init__(self, + qdim, + kdim, + num_heads, + qkv_bias=True, + qk_norm=False, + attn_drop=0.0, + proj_drop=0.0, + device=None, + dtype=None, + norm_layer=nn.LayerNorm, + ): + factory_kwargs = {'device': device, 'dtype': dtype} + super().__init__() + self.qdim = qdim + self.kdim = kdim + self.num_heads = num_heads + assert self.qdim % num_heads == 0, "self.qdim must be divisible by num_heads" + self.head_dim = self.qdim // num_heads + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + + self.scale = self.head_dim ** -0.5 + + self.q_proj = nn.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.kv_proj = nn.Linear(kdim, 2 * qdim, bias=qkv_bias, **factory_kwargs) + + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.k_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + + self.inner_attn = FlashCrossAttention(attention_dropout=attn_drop) + self.out_proj = nn.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, y, freqs_cis_img=None): + """ + Parameters + ---------- + x: torch.Tensor + (batch, seqlen1, hidden_dim) (where hidden_dim = num_heads * head_dim) + y: torch.Tensor + (batch, seqlen2, hidden_dim2) + freqs_cis_img: torch.Tensor + (batch, hidden_dim // num_heads), RoPE for image + """ + b, s1, _ = x.shape # [b, s1, D] + _, s2, _ = y.shape # [b, s2, 1024] + + q = self.q_proj(x).view(b, s1, self.num_heads, self.head_dim) # [b, s1, h, d] + kv = self.kv_proj(y).view(b, s2, 2, self.num_heads, self.head_dim) # [b, s2, 2, h, d] + k, v = kv.unbind(dim=2) # [b, s2, h, d] + q = self.q_norm(q).half() # [b, s1, h, d] + k = self.k_norm(k).half() # [b, s2, h, d] + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, _ = apply_rotary_emb(q, None, freqs_cis_img) + assert qq.shape == q.shape, f'qq: {qq.shape}, q: {q.shape}' + q = qq # [b, s1, h, d] + kv = torch.stack([k, v], dim=2) # [b, s1, 2, h, d] + context = self.inner_attn(q, kv) # [b, s1, h, d] + context = context.view(b, s1, -1) # [b, s1, D] + + out = self.out_proj(context) + out = self.proj_drop(out) + + out_tuple = (out,) + + return out_tuple + + +class CrossAttention(nn.Module): + """ + Use QK Normalization. + """ + def __init__(self, + qdim, + kdim, + num_heads, + qkv_bias=True, + qk_norm=False, + attn_drop=0.0, + proj_drop=0.0, + device=None, + dtype=None, + norm_layer=nn.LayerNorm, + ): + factory_kwargs = {'device': device, 'dtype': dtype} + super().__init__() + self.qdim = qdim + self.kdim = kdim + self.num_heads = num_heads + assert self.qdim % num_heads == 0, "self.qdim must be divisible by num_heads" + self.head_dim = self.qdim // num_heads + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + self.scale = self.head_dim ** -0.5 + + self.q_proj = nn.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.kv_proj = nn.Linear(kdim, 2 * qdim, bias=qkv_bias, **factory_kwargs) + + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.k_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.attn_drop = nn.Dropout(attn_drop) + self.out_proj = nn.Linear(qdim, qdim, bias=qkv_bias, **factory_kwargs) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, y, freqs_cis_img=None): + """ + Parameters + ---------- + x: torch.Tensor + (batch, seqlen1, hidden_dim) (where hidden_dim = num heads * head dim) + y: torch.Tensor + (batch, seqlen2, hidden_dim2) + freqs_cis_img: torch.Tensor + (batch, hidden_dim // 2), RoPE for image + """ + b, s1, c = x.shape # [b, s1, D] + _, s2, c = y.shape # [b, s2, 1024] + + q = self.q_proj(x).view(b, s1, self.num_heads, self.head_dim) # [b, s1, h, d] + kv = self.kv_proj(y).view(b, s2, 2, self.num_heads, self.head_dim) # [b, s2, 2, h, d] + k, v = kv.unbind(dim=2) # [b, s, h, d] + q = self.q_norm(q) + k = self.k_norm(k) + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, _ = apply_rotary_emb(q, None, freqs_cis_img) + assert qq.shape == q.shape, f'qq: {qq.shape}, q: {q.shape}' + q = qq + + q = q * self.scale + q = q.transpose(-2, -3).contiguous() # q -> B, L1, H, C - B, H, L1, C + k = k.permute(0, 2, 3, 1).contiguous() # k -> B, L2, H, C - B, H, C, L2 + attn = q @ k # attn -> B, H, L1, L2 + attn = attn.softmax(dim=-1) # attn -> B, H, L1, L2 + attn = self.attn_drop(attn) + x = attn @ v.transpose(-2, -3) # v -> B, L2, H, C - B, H, L2, C x-> B, H, L1, C + context = x.transpose(1, 2) # context -> B, H, L1, C - B, L1, H, C + + context = context.contiguous().view(b, s1, -1) + + out = self.out_proj(context) # context.reshape - B, L1, -1 + out = self.proj_drop(out) + + out_tuple = (out,) + + return out_tuple + + +class Attention(nn.Module): + """ + We rename some layer names to align with flash attention + """ + def __init__(self, dim, num_heads, qkv_bias=True, qk_norm=False, attn_drop=0., proj_drop=0., + norm_layer=nn.LayerNorm, + ): + super().__init__() + self.dim = dim + self.num_heads = num_heads + assert self.dim % num_heads == 0, 'dim should be divisible by num_heads' + self.head_dim = self.dim // num_heads + # This assertion is aligned with flash attention + assert self.head_dim % 8 == 0 and self.head_dim <= 128, "Only support head_dim <= 128 and divisible by 8" + self.scale = self.head_dim ** -0.5 + + # qkv --> Wqkv + self.Wqkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + # TODO: eps should be 1 / 65530 if using fp16 + self.q_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.k_norm = norm_layer(self.head_dim, elementwise_affine=True, eps=1e-6) if qk_norm else nn.Identity() + self.attn_drop = nn.Dropout(attn_drop) + self.out_proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, freqs_cis_img=None): + B, N, C = x.shape + qkv = self.Wqkv(x).reshape(B, N, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4) # [3, b, h, s, d] + q, k, v = qkv.unbind(0) # [b, h, s, d] + q = self.q_norm(q) # [b, h, s, d] + k = self.k_norm(k) # [b, h, s, d] + + # Apply RoPE if needed + if freqs_cis_img is not None: + qq, kk = apply_rotary_emb(q, k, freqs_cis_img, head_first=True) + assert qq.shape == q.shape and kk.shape == k.shape, \ + f'qq: {qq.shape}, q: {q.shape}, kk: {kk.shape}, k: {k.shape}' + q, k = qq, kk + + q = q * self.scale + attn = q @ k.transpose(-2, -1) # [b, h, s, d] @ [b, h, d, s] + attn = attn.softmax(dim=-1) # [b, h, s, s] + attn = self.attn_drop(attn) + x = attn @ v # [b, h, s, d] + + x = x.transpose(1, 2).reshape(B, N, C) # [b, s, h, d] + x = self.out_proj(x) + x = self.proj_drop(x) + + out_tuple = (x,) + + return out_tuple diff --git a/py/dit/hunyuanDiT/models/embedders.py b/py/dit/hunyuanDiT/models/embedders.py new file mode 100644 index 0000000..9fe08cb --- /dev/null +++ b/py/dit/hunyuanDiT/models/embedders.py @@ -0,0 +1,111 @@ +import math +import torch +import torch.nn as nn +from einops import repeat + +from timm.models.layers import to_2tuple + + +class PatchEmbed(nn.Module): + """ 2D Image to Patch Embedding + + Image to Patch Embedding using Conv2d + + A convolution based approach to patchifying a 2D image w/ embedding projection. + + Based on the impl in https://github.com/google-research/vision_transformer + + Hacked together by / Copyright 2020 Ross Wightman + + Remove the _assert function in forward function to be compatible with multi-resolution images. + """ + def __init__( + self, + img_size=224, + patch_size=16, + in_chans=3, + embed_dim=768, + norm_layer=None, + flatten=True, + bias=True, + ): + super().__init__() + if isinstance(img_size, int): + img_size = to_2tuple(img_size) + elif isinstance(img_size, (tuple, list)) and len(img_size) == 2: + img_size = tuple(img_size) + else: + raise ValueError(f"img_size must be int or tuple/list of length 2. Got {img_size}") + patch_size = to_2tuple(patch_size) + self.img_size = img_size + self.patch_size = patch_size + self.grid_size = (img_size[0] // patch_size[0], img_size[1] // patch_size[1]) + self.num_patches = self.grid_size[0] * self.grid_size[1] + self.flatten = flatten + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size, bias=bias) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def update_image_size(self, img_size): + self.img_size = img_size + self.grid_size = (img_size[0] // self.patch_size[0], img_size[1] // self.patch_size[1]) + self.num_patches = self.grid_size[0] * self.grid_size[1] + + def forward(self, x): + # B, C, H, W = x.shape + # _assert(H == self.img_size[0], f"Input image height ({H}) doesn't match model ({self.img_size[0]}).") + # _assert(W == self.img_size[1], f"Input image width ({W}) doesn't match model ({self.img_size[1]}).") + x = self.proj(x) + if self.flatten: + x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = self.norm(x) + return x + + +def timestep_embedding(t, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param t: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an (N, D) Tensor of positional embeddings. + """ + # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) + * torch.arange(start=0, end=half, dtype=torch.float32) + / half + ).to(device=t.device) # size: [dim/2], 一个指数衰减的曲线 + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat( + [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 + ) + else: + embedding = repeat(t, "b -> b d", d=dim) + return embedding + + +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + def __init__(self, hidden_size, frequency_embedding_size=256, out_size=None): + super().__init__() + if out_size is None: + out_size = hidden_size + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, out_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + + def forward(self, t): + t_freq = timestep_embedding(t, self.frequency_embedding_size).type(self.mlp[0].weight.dtype) + t_emb = self.mlp(t_freq) + return t_emb diff --git a/py/dit/hunyuanDiT/models/models.py b/py/dit/hunyuanDiT/models/models.py new file mode 100644 index 0000000..e481206 --- /dev/null +++ b/py/dit/hunyuanDiT/models/models.py @@ -0,0 +1,428 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from timm.models.vision_transformer import Mlp + +from .attn_layers import Attention, FlashCrossMHAModified, FlashSelfMHAModified, CrossAttention +from .embedders import TimestepEmbedder, PatchEmbed, timestep_embedding +from .norm_layers import RMSNorm +from .poolers import AttentionPool +from .posemb_layers import get_2d_rotary_pos_embed, get_fill_resize_and_crop + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + + +class FP32_Layernorm(nn.LayerNorm): + def forward(self, inputs: torch.Tensor) -> torch.Tensor: + origin_dtype = inputs.dtype + return F.layer_norm(inputs.float(), self.normalized_shape, self.weight.float(), self.bias.float(), + self.eps).to(origin_dtype) + + +class FP32_SiLU(nn.SiLU): + def forward(self, inputs: torch.Tensor) -> torch.Tensor: + return torch.nn.functional.silu(inputs.float(), inplace=False).to(inputs.dtype) + + +class HunYuanDiTBlock(nn.Module): + """ + A HunYuanDiT block with `add` conditioning. + """ + def __init__(self, + hidden_size, + c_emb_size, + num_heads, + mlp_ratio=4.0, + text_states_dim=1024, + use_flash_attn=False, + qk_norm=False, + norm_type="layer", + skip=False, + ): + super().__init__() + self.use_flash_attn = use_flash_attn + use_ele_affine = True + + if norm_type == "layer": + norm_layer = FP32_Layernorm + elif norm_type == "rms": + norm_layer = RMSNorm + else: + raise ValueError(f"Unknown norm_type: {norm_type}") + + # ========================= Self-Attention ========================= + self.norm1 = norm_layer(hidden_size, elementwise_affine=use_ele_affine, eps=1e-6) + if use_flash_attn: + self.attn1 = FlashSelfMHAModified(hidden_size, num_heads=num_heads, qkv_bias=True, qk_norm=qk_norm) + else: + self.attn1 = Attention(hidden_size, num_heads=num_heads, qkv_bias=True, qk_norm=qk_norm) + + # ========================= FFN ========================= + self.norm2 = norm_layer(hidden_size, elementwise_affine=use_ele_affine, eps=1e-6) + mlp_hidden_dim = int(hidden_size * mlp_ratio) + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=mlp_hidden_dim, act_layer=approx_gelu, drop=0) + + # ========================= Add ========================= + # Simply use add like SDXL. + self.default_modulation = nn.Sequential( + FP32_SiLU(), + nn.Linear(c_emb_size, hidden_size, bias=True) + ) + + # ========================= Cross-Attention ========================= + if use_flash_attn: + self.attn2 = FlashCrossMHAModified(hidden_size, text_states_dim, num_heads=num_heads, qkv_bias=True, + qk_norm=qk_norm) + else: + self.attn2 = CrossAttention(hidden_size, text_states_dim, num_heads=num_heads, qkv_bias=True, + qk_norm=qk_norm) + self.norm3 = norm_layer(hidden_size, elementwise_affine=True, eps=1e-6) + + # ========================= Skip Connection ========================= + if skip: + self.skip_norm = norm_layer(2 * hidden_size, elementwise_affine=True, eps=1e-6) + self.skip_linear = nn.Linear(2 * hidden_size, hidden_size) + else: + self.skip_linear = None + + def forward(self, x, c=None, text_states=None, freq_cis_img=None, skip=None): + # Long Skip Connection + if self.skip_linear is not None: + cat = torch.cat([x, skip], dim=-1) + cat = self.skip_norm(cat) + x = self.skip_linear(cat) + + # Self-Attention + shift_msa = self.default_modulation(c).unsqueeze(dim=1) + attn_inputs = ( + self.norm1(x) + shift_msa, freq_cis_img, + ) + x = x + self.attn1(*attn_inputs)[0] + + # Cross-Attention + cross_inputs = ( + self.norm3(x), text_states, freq_cis_img + ) + x = x + self.attn2(*cross_inputs)[0] + + # FFN Layer + mlp_inputs = self.norm2(x) + x = x + self.mlp(mlp_inputs) + + return x + + +class FinalLayer(nn.Module): + """ + The final layer of HunYuanDiT. + """ + def __init__(self, final_hidden_size, c_emb_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(final_hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(final_hidden_size, patch_size * patch_size * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + FP32_SiLU(), + nn.Linear(c_emb_size, 2 * final_hidden_size, bias=True) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class HunYuanDiT(nn.Module): + """ + HunYuanDiT: Diffusion model with a Transformer backbone. + + Parameters + ---------- + args: argparse.Namespace + The arguments parsed by argparse. + input_size: tuple + The size of the input image. + patch_size: int + The size of the patch. + in_channels: int + The number of input channels. + hidden_size: int + The hidden size of the transformer backbone. + depth: int + The number of transformer blocks. + num_heads: int + The number of attention heads. + mlp_ratio: float + The ratio of the hidden size of the MLP in the transformer block. + log_fn: callable + The logging function. + """ + def __init__( + self, args, + input_size=(32, 32), + patch_size=2, + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + log_fn=print, + **kwargs, + ): + super().__init__() + self.args = args + self.log_fn = log_fn + self.depth = depth + self.learn_sigma = args.learn_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if args.learn_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.hidden_size = hidden_size + self.head_size = hidden_size // num_heads + self.text_states_dim = args.text_states_dim + self.text_states_dim_t5 = args.text_states_dim_t5 + self.text_len = args.text_len + self.text_len_t5 = args.text_len_t5 + self.norm = args.norm + + use_flash_attn = args.infer_mode == 'fa' + if use_flash_attn: + log_fn(f" Enable Flash Attention.") + qk_norm = True # See http://arxiv.org/abs/2302.05442 for details. + + self.mlp_t5 = nn.Sequential( + nn.Linear(self.text_states_dim_t5, self.text_states_dim_t5 * 4, bias=True), + FP32_SiLU(), + nn.Linear(self.text_states_dim_t5 * 4, self.text_states_dim, bias=True), + ) + # learnable replace + self.text_embedding_padding = nn.Parameter( + torch.randn(self.text_len + self.text_len_t5, self.text_states_dim, dtype=torch.float32)) + + # Attention pooling + self.pooler = AttentionPool(self.text_len_t5, self.text_states_dim_t5, num_heads=8, output_dim=1024) + + # Here we use a default learned embedder layer for future extension. + self.style_embedder = nn.Embedding(1, hidden_size) + + # Image size and crop size conditions + self.extra_in_dim = 256 * 6 + hidden_size + + # Text embedding for `add` + self.last_size = input_size + self.x_embedder = PatchEmbed(input_size, patch_size, in_channels, hidden_size) + self.t_embedder = TimestepEmbedder(hidden_size) + self.extra_in_dim += 1024 + self.extra_embedder = nn.Sequential( + nn.Linear(self.extra_in_dim, hidden_size * 4), + FP32_SiLU(), + nn.Linear(hidden_size * 4, hidden_size, bias=True), + ) + + # Image embedding + num_patches = self.x_embedder.num_patches + log_fn(f" Number of tokens: {num_patches}") + + # HUnYuanDiT Blocks + self.blocks = nn.ModuleList([ + HunYuanDiTBlock(hidden_size=hidden_size, + c_emb_size=hidden_size, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + text_states_dim=self.text_states_dim, + use_flash_attn=use_flash_attn, + qk_norm=qk_norm, + norm_type=self.norm, + skip=layer > depth // 2, + ) + for layer in range(depth) + ]) + + self.final_layer = FinalLayer(hidden_size, hidden_size, patch_size, self.out_channels) + self.unpatchify_channels = self.out_channels + + def forward_raw(self, + x, + t, + encoder_hidden_states=None, + text_embedding_mask=None, + encoder_hidden_states_t5=None, + text_embedding_mask_t5=None, + image_meta_size=None, + style=None, + cos_cis_img=None, + sin_cis_img=None, + return_dict=False, + ): + """ + Forward pass of the encoder. + + Parameters + ---------- + x: torch.Tensor + (B, D, H, W) + t: torch.Tensor + (B) + encoder_hidden_states: torch.Tensor + CLIP text embedding, (B, L_clip, D) + text_embedding_mask: torch.Tensor + CLIP text embedding mask, (B, L_clip) + encoder_hidden_states_t5: torch.Tensor + T5 text embedding, (B, L_t5, D) + text_embedding_mask_t5: torch.Tensor + T5 text embedding mask, (B, L_t5) + image_meta_size: torch.Tensor + (B, 6) + style: torch.Tensor + (B) + cos_cis_img: torch.Tensor + sin_cis_img: torch.Tensor + return_dict: bool + Whether to return a dictionary. + """ + + text_states = encoder_hidden_states # 2,77,1024 + text_states_t5 = encoder_hidden_states_t5 # 2,256,2048 + text_states_mask = text_embedding_mask.bool() # 2,77 + text_states_t5_mask = text_embedding_mask_t5.bool() # 2,256 + b_t5, l_t5, c_t5 = text_states_t5.shape + text_states_t5 = self.mlp_t5(text_states_t5.view(-1, c_t5)) + text_states = torch.cat([text_states, text_states_t5.view(b_t5, l_t5, -1)], dim=1) # 2,205,1024 + clip_t5_mask = torch.cat([text_states_mask, text_states_t5_mask], dim=-1) + + clip_t5_mask = clip_t5_mask + text_states = torch.where(clip_t5_mask.unsqueeze(2), text_states, self.text_embedding_padding.to(text_states)) + + _, _, oh, ow = x.shape + th, tw = oh // self.patch_size, ow // self.patch_size + + # ========================= Build time and image embedding ========================= + t = self.t_embedder(t) + x = self.x_embedder(x) + + # Get image RoPE embedding according to `reso`lution. + freqs_cis_img = (cos_cis_img, sin_cis_img) + + # ========================= Concatenate all extra vectors ========================= + # Build text tokens with pooling + extra_vec = self.pooler(encoder_hidden_states_t5) + + # Build image meta size tokens + image_meta_size = timestep_embedding(image_meta_size.view(-1), 256) # [B * 6, 256] + # if self.args.use_fp16: + # image_meta_size = image_meta_size.half() + image_meta_size = image_meta_size.view(-1, 6 * 256) + extra_vec = torch.cat([extra_vec, image_meta_size], dim=1) # [B, D + 6 * 256] + + # Build style tokens + style_embedding = self.style_embedder(style) + extra_vec = torch.cat([extra_vec, style_embedding], dim=1) + + # Concatenate all extra vectors + c = t + self.extra_embedder(extra_vec.to(self.dtype)) # [B, D] + + # ========================= Forward pass through HunYuanDiT blocks ========================= + skips = [] + for layer, block in enumerate(self.blocks): + if layer > self.depth // 2: + skip = skips.pop() + x = block(x, c, text_states, freqs_cis_img, skip) # (N, L, D) + else: + x = block(x, c, text_states, freqs_cis_img) # (N, L, D) + + if layer < (self.depth // 2 - 1): + skips.append(x) + + # ========================= Final layer ========================= + x = self.final_layer(x, c) # (N, L, patch_size ** 2 * out_channels) + x = self.unpatchify(x, th, tw) # (N, out_channels, H, W) + + if return_dict: + return {'x': x} + return x + + def calc_rope(self, height, width): + """ + Probably not the best in terms of perf to have this here + """ + th = height // 8 // self.patch_size + tw = width // 8 // self.patch_size + base_size = 512 // 8 // self.patch_size + start, stop = get_fill_resize_and_crop((th, tw), base_size) + sub_args = [start, stop, (th, tw)] + rope = get_2d_rotary_pos_embed(self.head_size, *sub_args) + return rope + + def forward(self, x, timesteps, context, context_mask=None, context_t5=None, context_t5_mask=None, src_size_cond=(1024,1024), **kwargs): + """ + Forward pass that adapts comfy input to original forward function + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + timesteps: (N,) tensor of diffusion timesteps + context: (N, 1, 77, C) CLIP conditioning + context_t5: (N, 1, 256, C) MT5 conditioning + """ + # context_mask = torch.zeros(x.shape[0], 77, device=x.device) + # context_t5_mask = torch.zeros(x.shape[0], 256, device=x.device) + + # style + style = torch.as_tensor([0] * (x.shape[0]), device=x.device) + + # image size - todo separate for cond/uncond when batched + if torch.is_tensor(src_size_cond): + src_size_cond = (int(src_size_cond[0][0]), int(src_size_cond[0][1])) + + image_size = (x.shape[2]//2*16, x.shape[3]//2*16) + size_cond = list(src_size_cond) + [image_size[1], image_size[0], 0, 0] + image_meta_size = torch.as_tensor([size_cond] * x.shape[0], device=x.device) + + # RoPE + rope = self.calc_rope(*image_size) + + # Update x_embedder if image size changed + if self.last_size != image_size: + from tqdm import tqdm + tqdm.write(f"HyDiT: New image size {image_size}") + self.x_embedder.update_image_size( + (image_size[0]//8, image_size[1]//8), + ) + self.last_size = image_size + + # Run original forward pass + out = self.forward_raw( + x = x.to(self.dtype), + t = timesteps.to(self.dtype), + encoder_hidden_states = context.to(self.dtype), + text_embedding_mask = context_mask.to(self.dtype), + encoder_hidden_states_t5 = context_t5.to(self.dtype), + text_embedding_mask_t5 = context_t5_mask.to(self.dtype), + image_meta_size = image_meta_size.to(self.dtype), + style = style, + cos_cis_img = rope[0], + sin_cis_img = rope[1], + ) + + # return + out = out.to(torch.float) + if self.learn_sigma: + eps, rest = out[:, :self.in_channels], out[:, self.in_channels:] + return eps + else: + return out + + def unpatchify(self, x, h, w): + """ + x: (N, T, patch_size**2 * C) + imgs: (N, H, W, C) + """ + c = self.unpatchify_channels + p = self.x_embedder.patch_size[0] + # h = w = int(x.shape[1] ** 0.5) + assert h * w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], h, w, p, p, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p, w * p)) + return imgs diff --git a/py/dit/hunyuanDiT/models/norm_layers.py b/py/dit/hunyuanDiT/models/norm_layers.py new file mode 100644 index 0000000..5204ad9 --- /dev/null +++ b/py/dit/hunyuanDiT/models/norm_layers.py @@ -0,0 +1,68 @@ +import torch +import torch.nn as nn + + +class RMSNorm(nn.Module): + def __init__(self, dim: int, elementwise_affine=True, eps: float = 1e-6): + """ + Initialize the RMSNorm normalization layer. + + Args: + dim (int): The dimension of the input tensor. + eps (float, optional): A small value added to the denominator for numerical stability. Default is 1e-6. + + Attributes: + eps (float): A small value added to the denominator for numerical stability. + weight (nn.Parameter): Learnable scaling parameter. + + """ + super().__init__() + self.eps = eps + if elementwise_affine: + self.weight = nn.Parameter(torch.ones(dim)) + + def _norm(self, x): + """ + Apply the RMSNorm normalization to the input tensor. + + Args: + x (torch.Tensor): The input tensor. + + Returns: + torch.Tensor: The normalized tensor. + + """ + return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps) + + def forward(self, x): + """ + Forward pass through the RMSNorm layer. + + Args: + x (torch.Tensor): The input tensor. + + Returns: + torch.Tensor: The output tensor after applying RMSNorm. + + """ + output = self._norm(x.float()).type_as(x) + if hasattr(self, "weight"): + output = output * self.weight + return output + + +class GroupNorm32(nn.GroupNorm): + def __init__(self, num_groups, num_channels, eps=1e-5, dtype=None): + super().__init__(num_groups=num_groups, num_channels=num_channels, eps=eps, dtype=dtype) + + def forward(self, x): + y = super().forward(x).to(x.dtype) + return y + +def normalization(channels, dtype=None): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(num_channels=channels, num_groups=32, dtype=dtype) diff --git a/py/dit/hunyuanDiT/models/poolers.py b/py/dit/hunyuanDiT/models/poolers.py new file mode 100644 index 0000000..a4adcac --- /dev/null +++ b/py/dit/hunyuanDiT/models/poolers.py @@ -0,0 +1,39 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class AttentionPool(nn.Module): + def __init__(self, spacial_dim: int, embed_dim: int, num_heads: int, output_dim: int = None): + super().__init__() + self.positional_embedding = nn.Parameter(torch.randn(spacial_dim + 1, embed_dim) / embed_dim ** 0.5) + self.k_proj = nn.Linear(embed_dim, embed_dim) + self.q_proj = nn.Linear(embed_dim, embed_dim) + self.v_proj = nn.Linear(embed_dim, embed_dim) + self.c_proj = nn.Linear(embed_dim, output_dim or embed_dim) + self.num_heads = num_heads + + def forward(self, x): + x = x.permute(1, 0, 2) # NLC -> LNC + x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (L+1)NC + x = x + self.positional_embedding[:, None, :].to(x.dtype) # (L+1)NC + x, _ = F.multi_head_attention_forward( + query=x[:1], key=x, value=x, + embed_dim_to_check=x.shape[-1], + num_heads=self.num_heads, + q_proj_weight=self.q_proj.weight, + k_proj_weight=self.k_proj.weight, + v_proj_weight=self.v_proj.weight, + in_proj_weight=None, + in_proj_bias=torch.cat([self.q_proj.bias, self.k_proj.bias, self.v_proj.bias]), + bias_k=None, + bias_v=None, + add_zero_attn=False, + dropout_p=0, + out_proj_weight=self.c_proj.weight, + out_proj_bias=self.c_proj.bias, + use_separate_proj_weight=True, + training=self.training, + need_weights=False + ) + return x.squeeze(0) diff --git a/py/dit/hunyuanDiT/models/posemb_layers.py b/py/dit/hunyuanDiT/models/posemb_layers.py new file mode 100644 index 0000000..62c83df --- /dev/null +++ b/py/dit/hunyuanDiT/models/posemb_layers.py @@ -0,0 +1,225 @@ +import torch +import numpy as np +from typing import Union + + +def _to_tuple(x): + if isinstance(x, int): + return x, x + else: + return x + + +def get_fill_resize_and_crop(src, tgt): # src 来源的分辨率 tgt base 分辨率 + th, tw = _to_tuple(tgt) + h, w = _to_tuple(src) + + tr = th / tw # base 分辨率 + r = h / w # 目标分辨率 + + # resize + if r > tr: + resize_height = th + resize_width = int(round(th / h * w)) + else: + resize_width = tw + resize_height = int(round(tw / w * h)) # 根据base分辨率,将目标分辨率resize下来 + + crop_top = int(round((th - resize_height) / 2.0)) + crop_left = int(round((tw - resize_width) / 2.0)) + + return (crop_top, crop_left), (crop_top + resize_height, crop_left + resize_width) + + +def get_meshgrid(start, *args): + if len(args) == 0: + # start is grid_size + num = _to_tuple(start) + start = (0, 0) + stop = num + elif len(args) == 1: + # start is start, args[0] is stop, step is 1 + start = _to_tuple(start) + stop = _to_tuple(args[0]) + num = (stop[0] - start[0], stop[1] - start[1]) + elif len(args) == 2: + # start is start, args[0] is stop, args[1] is num + start = _to_tuple(start) # 左上角 eg: 12,0 + stop = _to_tuple(args[0]) # 右下角 eg: 20,32 + num = _to_tuple(args[1]) # 目标大小 eg: 32,124 + else: + raise ValueError(f"len(args) should be 0, 1 or 2, but got {len(args)}") + + grid_h = np.linspace(start[0], stop[0], num[0], endpoint=False, dtype=np.float32) # 12-20 中间差值32份 0-32 中间差值124份 + grid_w = np.linspace(start[1], stop[1], num[1], endpoint=False, dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) # [2, W, H] + return grid + +################################################################################# +# Sine/Cosine Positional Embedding Functions # +################################################################################# +# https://github.com/facebookresearch/mae/blob/main/util/pos_embed.py + +def get_2d_sincos_pos_embed(embed_dim, start, *args, cls_token=False, extra_tokens=0): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + grid = get_meshgrid(start, *args) # [2, H, w] + # grid_h = np.arange(grid_size, dtype=np.float32) + # grid_w = np.arange(grid_size, dtype=np.float32) + # grid = np.meshgrid(grid_w, grid_h) # here w goes first + # grid = np.stack(grid, axis=0) # [2, W, H] + + grid = grid.reshape([2, 1, *grid.shape[1:]]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (W,H) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + + +################################################################################# +# Rotary Positional Embedding Functions # +################################################################################# +# https://github.com/facebookresearch/llama/blob/main/llama/model.py#L443 + +def get_2d_rotary_pos_embed(embed_dim, start, *args, use_real=True): + """ + This is a 2d version of precompute_freqs_cis, which is a RoPE for image tokens with 2d structure. + + Parameters + ---------- + embed_dim: int + embedding dimension size + start: int or tuple of int + If len(args) == 0, start is num; If len(args) == 1, start is start, args[0] is stop, step is 1; + If len(args) == 2, start is start, args[0] is stop, args[1] is num. + use_real: bool + If True, return real part and imaginary part separately. Otherwise, return complex numbers. + + Returns + ------- + pos_embed: torch.Tensor + [HW, D/2] + """ + grid = get_meshgrid(start, *args) # [2, H, w] + grid = grid.reshape([2, 1, *grid.shape[1:]]) # 返回一个采样矩阵 分辨率与目标分辨率一致 + pos_embed = get_2d_rotary_pos_embed_from_grid(embed_dim, grid, use_real=use_real) + return pos_embed + + +def get_2d_rotary_pos_embed_from_grid(embed_dim, grid, use_real=False): + assert embed_dim % 4 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_rotary_pos_embed(embed_dim // 2, grid[0].reshape(-1), use_real=use_real) # (H*W, D/4) + emb_w = get_1d_rotary_pos_embed(embed_dim // 2, grid[1].reshape(-1), use_real=use_real) # (H*W, D/4) + + if use_real: + cos = torch.cat([emb_h[0], emb_w[0]], dim=1) # (H*W, D/2) + sin = torch.cat([emb_h[1], emb_w[1]], dim=1) # (H*W, D/2) + return cos, sin + else: + emb = torch.cat([emb_h, emb_w], dim=1) # (H*W, D/2) + return emb + + +def get_1d_rotary_pos_embed(dim: int, pos: Union[np.ndarray, int], theta: float = 10000.0, use_real=False): + """ + Precompute the frequency tensor for complex exponentials (cis) with given dimensions. + + This function calculates a frequency tensor with complex exponentials using the given dimension 'dim' + and the end index 'end'. The 'theta' parameter scales the frequencies. + The returned tensor contains complex values in complex64 data type. + + Args: + dim (int): Dimension of the frequency tensor. + pos (np.ndarray, int): Position indices for the frequency tensor. [S] or scalar + theta (float, optional): Scaling factor for frequency computation. Defaults to 10000.0. + use_real (bool, optional): If True, return real part and imaginary part separately. + Otherwise, return complex numbers. + + Returns: + torch.Tensor: Precomputed frequency tensor with complex exponentials. [S, D/2] + + """ + if isinstance(pos, int): + pos = np.arange(pos) + freqs = 1.0 / (theta ** (torch.arange(0, dim, 2)[: (dim // 2)].float() / dim)) # [D/2] + t = torch.from_numpy(pos).to(freqs.device) # type: ignore # [S] + freqs = torch.outer(t, freqs).float() # type: ignore # [S, D/2] + if use_real: + freqs_cos = freqs.cos().repeat_interleave(2, dim=1) # [S, D] + freqs_sin = freqs.sin().repeat_interleave(2, dim=1) # [S, D] + return freqs_cos, freqs_sin + else: + freqs_cis = torch.polar(torch.ones_like(freqs), freqs) # complex64 # [S, D/2] + return freqs_cis + + + +def calc_sizes(rope_img, patch_size, th, tw): + """ 计算 RoPE 的尺寸. """ + if rope_img == 'extend': + # 拓展模式 + sub_args = [(th, tw)] + elif rope_img.startswith('base'): + # 基于一个尺寸, 其他尺寸插值获得. + base_size = int(rope_img[4:]) // 8 // patch_size # 基于512作为base,其他根据512差值得到 + start, stop = get_fill_resize_and_crop((th, tw), base_size) # 需要在32x32里面 crop的左上角和右下角 + sub_args = [start, stop, (th, tw)] + else: + raise ValueError(f"Unknown rope_img: {rope_img}") + return sub_args + + +def init_image_posemb(rope_img, + resolutions, + patch_size, + hidden_size, + num_heads, + log_fn, + rope_real=True, + ): + freqs_cis_img = {} + for reso in resolutions: + th, tw = reso.height // 8 // patch_size, reso.width // 8 // patch_size + sub_args = calc_sizes(rope_img, patch_size, th, tw) # [左上角, 右下角, 目标高宽] 需要在32x32里面 crop的左上角和右下角 + freqs_cis_img[str(reso)] = get_2d_rotary_pos_embed(hidden_size // num_heads, *sub_args, use_real=rope_real) + log_fn(f" Using image RoPE ({rope_img}) ({'real' if rope_real else 'complex'}): {sub_args} | ({reso}) " + f"{freqs_cis_img[str(reso)][0].shape if rope_real else freqs_cis_img[str(reso)].shape}") + return freqs_cis_img diff --git a/py/dit/hunyuanDiT/mt5_tokenizer/config.json b/py/dit/hunyuanDiT/mt5_tokenizer/config.json new file mode 100644 index 0000000..fba9f46 --- /dev/null +++ b/py/dit/hunyuanDiT/mt5_tokenizer/config.json @@ -0,0 +1,33 @@ +{ + "_name_or_path": "mt5", + "architectures": [ + "MT5ForConditionalGeneration" + ], + "classifier_dropout": 0.0, + "d_ff": 5120, + "d_kv": 64, + "d_model": 2048, + "decoder_start_token_id": 0, + "dense_act_fn": "gelu_new", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "gated-gelu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": true, + "layer_norm_epsilon": 1e-06, + "model_type": "mt5", + "num_decoder_layers": 24, + "num_heads": 32, + "num_layers": 24, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "tokenizer_class": "T5Tokenizer", + "torch_dtype": "float16", + "transformers_version": "4.40.2", + "use_cache": true, + "vocab_size": 250112 +} diff --git a/py/dit/hunyuanDiT/mt5_tokenizer/special_tokens_map.json b/py/dit/hunyuanDiT/mt5_tokenizer/special_tokens_map.json new file mode 100644 index 0000000..6dc4d43 --- /dev/null +++ b/py/dit/hunyuanDiT/mt5_tokenizer/special_tokens_map.json @@ -0,0 +1 @@ +{"eos_token": "", "unk_token": "", "pad_token": ""} \ No newline at end of file diff --git a/py/dit/hunyuanDiT/mt5_tokenizer/spiece.model b/py/dit/hunyuanDiT/mt5_tokenizer/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..26a2a78808e998e45d2e0c184265897e6695b914 GIT binary patch literal 4309802 zcmZ6U2Xs`$_s8uDV#TZez(SFxv?L@Hu>=x|R0Tvu#Z9s#OOoA?>?VMy2{rWIy95ZK zg%U`iC;=5qNK{M$mJLk=Y}ot%GkZ6W|9Lql=X}3&@66n}GjHC!X=m-5a{LAKkuvFyMyTfDn5?s68Oe9?-!a5N;2M zum^Os2XwLrbhZaX+5@`T1G?J-qU-_D_JAI?fDpSsLhSwsvHK&$?vD_=KSJ#O2(kMk z#O{v}yFWtg{s^)ABgF2H5W7D@?EVO``y<5ek5IcmLhb$twfiH~?vGHrKSJ&P2(|ko z)b5W^yFWth{s^`EBh>DXP`f`u?fwY0`y((2($Ym z%+)2)Fwq-0qJEyFViA{)n*qBf{>F2)jQb?EZ+b z`y;~cj|jUzBJBQ%u=^vz?vDt&KO*e@h_L%3!tRfbc7JrV`=g`XA06%f=xFyxN4q~d z+Wpbd?vIXke{{6_qods)9qs<;X!l1)yFWVG{n63xk4|=fbh7)SlieSk?EdIv_eUqY zKRVg{(aG+QPIiBEviqZx-5;Io{^(@)M<=^KI@$fv$?lKNc7JrX`=hhnAD!*~=xp~# zXS+W-+x^km?vKuPe{{C{qqE%~o$dbUZ1+cJyFWVH{n6R(k4U>eBJKW&wEH8{?vF^j zKO*h^h_w47((aE)yFViB{)n{uBhv1VNV`8G?f!_g`yo4ay4n5F&F+uxc7JrY`=h(vAKmT# z=x+B%ce_8j+x^kq?vL(ve{{F|qr2T7-R=J9ZudubyFa?y{n6d-k0`r8qU`>Nvil>- z?vE(DKcejZh_d@5%I=RSyFa4r{)n>sBg*cND7!zR?EZ+d`yoEBTR9$`5Gok>0C}YMPg~!rTd(f*q(XYj`!YMR{Y%Zcp%bnxj5ys0D3|u-B=k&{&Z%RjP9fOROV9g&IGG>+zx47 z1T#fRl^4DNV&|jMTdD&nR@R1|sC7!D%zg=Cbkk(#2`fBZ{(kRdZHjR&`Sj{90{OOW z+g74_fHki)r(`~DG0zBTwF;)ibGYMV=xL31I&-`{dKs)$F;2c|`eh*3@_cEz0*Hl; zW%9qETkGD4Z_4*0nKtmNfTW3}_Vou)OX-vc(o~vA{`#6gaH5=Od#cu^@}07*Ba9?t zWYrrWRgabZXEZoonw$rznWKD8nYHqCt=1r!*ZxevG>2RM*#uOzQ0@J$oRelpx3d1!)dGw+y=en_8vba%*1*73)cpOg9kyP&swYg<5@A znv^xZSj&=}vMZRV8O>>FONgJWk1(S-5!~`1x6;-Q{;g!6!PS^ z{T_&Q{`EQXecfLI2Bk~WXF%wtI_0nfs9A@}d$TQ)AW;V(>Qtv!79FwJNT+Q13P#yR zO1~F=t<`cR$%|uQnp#r{n`VhBvgEqOF!HQeBror<1Tjvz@1EZRCU$ApPBKP=wTPkZ zLpsUUiEyo`6gsc9-$K=VELBGa}lJCB=;vL#7ulxcdTenj-)&C;^PtK5f zkqT+ck97yCF^STBB81$>NvpYvI>?*LRk!?rEGe~g`$)?Z5VbGPCwrg#vo@Iwncm$Z z(Q!LGHx*pdw7~3I{?&@JE-ZEtNyA@p@Zyw6M|alR(Wlc6`mqHH*2`pAq@`z zX*#E~wn>i~u4xAkx|ipq#S2h1Cea}seK4Yh%dcO7$d8t^xdx~X z375U+EfOW?>-4Adyk%6+#&w5C!VsSbI4sOlXm5zjavCO2u_ zM4mXN!M-%)7yFUdFGgVa1nlH6VA-`ZsROH>0$Z9!7ygGfs@K>GB8U<#d4$wq}$ zo223Az+jWv&GQ)hmldU#eAw{6+MH+_>z{*RK_@x$5=gU&CAq?hDu|HI(-d`+;WI&M zByIlWHCMfD^7bW3y?Q_u#Eg;Y|Falf;I&O)YRni1UCO=|aCKQ<85L=%MmgoTUNDSl zFJC&XaNchB`C#OP?_V6F@X%HnJ{hP6r=#@MLMuw;0?FEFv9S)>ldGzyGv}VpoFl%s zprpsEWO$WI%wUZ~E?-cIGg)#PUmZ{?T`MPh0yQTeV}-IlDxso&eKDw%D^rF8)dwN% zW&S7?Fjg8eT4D7x`FskH0!B;70u7`sx((FS&7kG2Leq+FrFoZg>x_*`;};(dRIV(@$3VL27dvW8=3jTWX0;8vS*( ziDbCw>W|(CS6}v!Q~!ftrrRaw+5m%z`=o7G*zI-e(8|h+%XX9%NkxA`G?NqtP07z$ zG|7x_T^1eV@yUk^prp&o;}3u|nd(V0v^PwP)?CiMY>BG2OYbyTtrNU5 zJOdi+M-Nnt7w3F9WhmVx_ph=B>YPae^%nZwZdZ)~4l@nQDF<5sHMIzFwbF22V#C`gWTdgP9ZN4&Z49HBNeXM@WTFNj3rI*dQ;@^M2*>fKtH)^J{S`m$qIKS_23aP-1zkynU1f0>S-c13e zvV(G$0Li15+};>Ou})>Kkba@6;7pYHj{!AnN4(S@Xfa-Cn}@(O*(5$l9W@l8Kvwp~(`x9cX3QSt{RE5kuwsKLTnN^sA2<7HsRg89vSN!!hR4%87FC zUmEHC66yc92Aj{yl4f;k6R66S*`XFoa>>{ZFm2}tWN5e&y1$N6r=z9Ez^J&Z#VFh% z!_<+c+ke+k9NAMEM_PgeMrwb&V#P}E$kqf)$hb>Zrdq;ummErm8K+Jtk+rjfIBTQS znQLi*K={MW8z8D>P{ARs+Gy~l zl}qH=_8QFlwND68GmG^}_w1r;L^b2Z4chyieA@1J=@1u8{jb)EIQdg;G`l!9kD9_1{@aA>q>edsuDWOXT{W zEt<{{`2A~cu1(OFEZ^J#Q{7}*Z;4pKM2}2r03(A(Z^3@WcmcSDytCw3WgU^!H7K?Yvq%AOQZ9AoK7lcg8_sf?C;twx1 z@ote7qkOGgIApQPRq{%O#ZnUFy-#53-N&Wu93)syQhDe{i^kv+=Vhq2SlaNxW0U0U z?}TXUsM=VyQ<~JN7qEsQU;R53(m8%!AE>Fu&=s6$0M$q)8S&p|@v{A8J7j%pjmf9o z{UMfUL@MKivEgualG`T>qEwMOc;*#_XUP;Y`YC*gSeEjy89s6 zR|2JpG?iv=gR}(kE_vgy#k@YL{|StInd|IRrI3$&mnrW@8?W?T`*@@*2|S9QyGmzOX#(sKY*Ti6S-Hywg)sXqBDQ`LBP z#PU#i#;b(h>%>G$V!kV%oTL(mOD@j_YFZ=Y$TXdAOwzsCEu= zN|zE?pvTxPuOCpQidpjed%$3;=Y9F@QI*6x$0|3`&)im}ntVOH zcS>QC)-v_9!bAD;@+F|A>~$o{o?n#ub8=08U-c(cjf?TgXa8Pd9@$*))>_$EpR}qE z!)u2p$vvX5Y@2xR1*$nI9va=+W>%cOPWh+>j5aq+!a}WZn$V~SSkNXO>Q$Yfnn+bH zufEn#z{x7sC1HIn&C*S2Qu16-;*kNvtVn#tq{?Q)iNqtC=>TdCb(U!fmfnd?C+Eo> zF1Y6H_IRbYS9Qw^8QMIV1yOI57mIJAr5VSp?QK(_WZ+7cnyEm_HA&e|K226h@bK?S^W;8BPf+w%9XHdEuzVqLbvp2Nt81 zXMY64q+{ddl`^2_b}I8t@s>krX6dDC5_1SCUeGH=hWqjK~zKe!yiZKqk)I(!2v%0*O zC15V~^FA;VeoprE2U!V(%R2-AKM#{O&%o7%EkjyK#1JT%GomVzIxj*rGqQdx!(yX+ zvcWJ6uAU=XURB6v*09My&8>P84rsgxj&sb*`_-l3VAEj0dEH7Cnd!=&Z(1TZoyr?m zXdKQSvks~C`rAvrXXI+!Q<+O-$$OU6pHt*tpDlwwB7`zjPh;XPzFbMgct+q8KY?I$ z#SFRTxI*48)6Ocyp~V+~*vqVzocj?RbhS&){H~I7i`UDr{}g|_aD_Z_&F!^Xp&ew( zjW9A`F64`w6jo1_59(ALnhvd&0FZVVlkerifdIKK@TXhyH z{X^mEfH)@(a!b1os(rR-x7^hc9IT}BfWP*X@bZKR#H2OrbgGrLMUs@5E;OTeE9f1fZvt*WmCc@%5vinXAPIXE1 z`+-^wBTDzA%2yAlh!Mk%Wn>P*gd24i+JS=Ml19?SH|ral%;;i33xkVbs0iBt`~!8#Th3L7ds zoF!8`0X5009cYd2VsYlq4#?oHaFW2W`I7WFB-nt-KHE@J-V*t;2av*;W$1~$!NCe4 z7H{Gkhgu*b9at8%rxlxJyHEMYb=ev672(QY*m15Rhokdb>dnE9V$2Z8vE6}ZW=uoObVRYlbk z=|I2wNNJ$QoGgt$Q_ApVJZa}vYD|W``7*p3LXlX7TyYYp)~2}*FO}jiH5#d$8ox#E zsDTC_4v+l#HB24FfPWX`wb|cT@d6_H+v7Sy5SFizJFW+6 zs;&_-@+O#?FrsuHUC7OF3dsj@ffU?s3E45?k+}LQ#J=a`z&#L6hw<=V55m;Zr8%h* z+6amZ-7M+tYicQmbe29XEf(vM7p}0%<+9~rE0K`)?Io<8CgS7;d~YX9QEL7^b-r<>tje@~fFzAYUxCltM#fz*?1DoVGw#Yyg|;D`pjL z_C|=Q^gS{!2pMT#-cG1mSH6?3z+0qRr5k0*TR`&gZ=51iJ_trWD7SwY49>}ymkg%Y zeL<283Z`>_zHy4-S}ISBWC!D1*tJHse2hfhlR*DcaNbgk_sCD*z%UZO47#AObe%l# zEl_LeSY}{C;g_GSSc!b@{|VE~16|m#Ywyx>7cwfWf4vg7+2AqZ4v1#%=EFbs?x11; zif`R($x@hFd7?2?OAy{(UT*`#up_(VSUXG4;Ts{dxtxlUR=LVMkiX5&`>#?X0edA(sm3i zSoSPBl!@cu7|JT_^4Ao4(q+^ND^8?auFX?{W2D?>psD2CT~<}-sm;D45=d?n?qd=QRz9B!|?(iE(XFwh%J zmlG{CA~RSQb zNY#88nbs7q_saQ25FBF`+UG4*LB%3=!JRThZ6D+uEhQ^duQbms-PeQFP@<*DuelcI z-J2~@n^f2It$%J&e0D{yEGkm$-#uP>lv;7(9Q4UqRq)`0w@YT8gyBOb%pdq%Aze}5 zGe9*dimAEP-&;{exa9a>u;6=xaTlMC&Hf`q%`D%^zL0yax;v0)HNLtANU5FFbG@6u zs#f_XQ$yjmsEoER>33SPWDj+F`3h%oJW1Za8@XET7$cJ!!LGDNdgF(hT5_6Kd<&S` z--9<_S&;g-ZS!xRDf`MXYA5i_oM>H+fi?4i3 zljOaw8q2?dUN+K_bn{7QPnEb>Ccd)|IM7+Fb;-B=AZl6+{(b0a)vKAdP`(}n)~Bdq zHa*)9&%yOsFnt2k8N**7gp_Cy56KV1l(-!%gx&d~rR&crG%tkiul&D|g{C=9j3GqR zr-R!eU1BZnN@N!Ix;QxQII>p4M*ztvu&n)~A!@RlPIPp<6^&_Or~GWVy6Y*kpBhFY zo{`efqp)m?e3AkriIHjERHsv#`k+)u>8=bplA)p$Dvo*a*GFq4cHota18Tx2Cd=g4 zU}}B!0X}>33@0PLsmEmkP5s9EWbi~Vmb-oQEy;#x9KNV`$et;1O))e=Zp*fqYpgs! z6GkFQ3`E<_2a|SMqTH|wh~;OuE%|!?T#+?aoPqu1%k>tk&XN|n77OhpKW>7NMw7Pk z_jZLx4#;=Af#hg-%U2gv;7m=EA>S!ZqpkmR8BD1UQ$c?zY}rEU{|h8)x!G+p zzh zIJvS1=8}PwCAM^JteootQEMGZY4ZHzs>Z}jaukqUmv>d#h z5=N?4<$S451Zu8TTjhJV3iu{}Iz{zsN_NPCQI=BgBx#-s(}evSX3OIlP_;BAiG7|; zv*1|Da^%0$fX1@f6Xn%e;M$isv%%Bl#s#WqmdjQGO*2}Q^`w-pf~ZLy+RM^4FmgMc zwefV;Qn~3()hyk@dh)$FmL^LY5qU7pfp@)m3(m|}x8wS;}F*i43;`~*z&z=TSTA;C7{ zkt3hNf(_EaYU`KBHChbQnvE(o+VSBh#;eQ{K`X*?S#Ot06I=bT%-vW^^q!%3IS?XcMspI7K?I7?*=f8d|I|N1mUw&nS7ev z2R3dlUh9@cO&}D~3=+sLF$}7y@#(s-BTS1{wTp>?o4UZUodw^62C9e&Tc=EZ6t2#S zW3yAy6DnrUFk1?|k{AgmT}R+U?8_c7tSQBShkGloDLyFA7_1H%pmXXeL8*Uzru^A2 zsF^)eG6#Uw&Y@1}|FlYsA-RKsS|#h{iJ@RMvV1Y0Za2IH$NtyaiX#pfd<57R&g{hG zBtkUF!~{8SSinge-7?w@(ZVIOy*I=IB^ehzTM+WOGS)|^DHMa_=u9xRba?JN`86f$ zVx@kT3LSLv?jUwEpZ{AB$1u)%ev0axDV{7Z7_8>8%9fAA&j+PuHR+$JNQ32C%`>!X zy2dnze2RkDzbi}LHCXK)#Nf?4Q`P*rMe_PAFy*7(XG`d82nAxRuycXxc-bwN62$G> zhsoHLgqwE4d?jCs53EwPn&Q3UU8{IxO1vz26Rb|;c*L??impH=41S>WfR(EkfmTbDm? zxqmaW;J*`sLEcmk-O!JJXk2RGX=(SD#)WuzzT$3d5Jp-+2R!`v*ao=rl z+<2xiQwH1tCP`OK$rQJYuMeZPW8E_D9);BtB(9-CI<^~H0I@g06F){)J{;6emVckn zaIbSTUnY?d^>{Kn@E>_f)yBFcv5zHj@{A?)g{p&lvy1U*i&dF1x@`gdQ zdL1#QCA|F#0a~PrET%{sk5Fy%eJ2}S5R#3f?{&zbkt*UdkdsSZD;`T<3+0gEf#z8x zG8#)O`DsRWR` z>0BR~Whsv|OOwY9S1WDjWaE0UDSvK`f729M zztvK$p2VzC!+cdW=P`O1jMonBkd}LZY8Ep!&b=@SKwmUl&K5#~O@i0qrXr2x9U~R{ zfp^utg@#*JP`2Id4`Yk{7jF|9G%lB~fzF5Ensn8h(#tSSErG6Ra48haFKx_}BZsYs zDIUCh`v-7iMG0?~KMc`SnKI88&xdeqZ}qCgeH7Ho$J#T71g&Mh=A|-=`Zus4{9~mK zUBN0k4JG$vUZ^9^f`c!FnLPamG1&1VXy zO6k8){DRw?N!(Qp1C}s?OS~GW*+)goAJ@P%%Q(I)UcKI;$?Rx}yhWq24a9M$6|MYD z`TZ^xuuq}h-BuJA%h{R#gOY9aWCki9H-+Fi_u=I->mjgKjn~EZ+?$5u9QF_$ZlQ5} zNvyrrc^IM=`EwS_Q(-DIlV9>~M@yE*j-gk&sw~~ZC_eN_2zmL~Je?P%dibS?Jsu1D zLCMNFRxEZcFmPNTRq-0bmzpF8Mk@B_ z=8G>8Z2X5QdN=1l zvCV8*yb#1Otm-_tN#mc`Bt5sN%GvD6GJ1z%vn%D#-C!*y^9`LOvH;F_;uZO5k7{|D zsUKYeRh7qhDJ#^ZlY0U4>`UZ+c$8+WS3qV>QyYXV>uZtR9F-89T4ydkU zwVBEApHz=o^Of@3ZxAdkebX&5e}T0S4j$34zoBYX_XKJF?-e04l)VfkU$!9sdDVTj z>7UG^8*yC^C&`q!5pwV*Flwbm@@X9)$z>#%SM$+ZRO#H3?Q+%aifc+1%4&nvhJLh+ z?qX3l!@4K$hLT)HirjdQM)ESUAABE}_USKN>@O_v7iG(}4_NVztY(S1k;XIat}#%D zF6pi~r$^>Jq^kbh4gO7;(!4EHOC8xyT0A@0q)C@oo5%+pAe6Eud#k+F z*@{!PK>iCdGb`+r(T`fPL7bI+LSy+0X39t1!KB^R94a3e4bgHt8OpVON@YprK81>B zHIlgzK!#d+m$H{gqvxPF;+2_$DZ0{vCuo})}`4*r?}EQ97}H%M-V2r;MC(A z25WvTLtBa03Dr`kBumT)75X>im?i7i;w|Aohdi1H!)yj$iSlo%C92vYQKK~;c`_j^ zJ;y;Yj!Dt$G8Kk|$;Sq2R^?=u^$?!8K=yv0qe7s=80k5?M)MaO7y;2Wt#elj5>LFXkGds({uS!8)iC9M6+`oXn$q0}1!#+or6Ic%UBI>Icu zG<#M>?6#Y1An9J5o+J4N2dj4n3%omqsv3*9EuT}hRK8P-UI7Om=iCB+?m_7sLx`3( zI!&H+s;)nOy!>u3o<3gq+4$qTrOgPXw6y2hfbobMsu?mcoh&=kRLmll>7qBLL$S@D zlP!0T2AewSER)6pgU;E>+WKzkmPv@_KhP;HCR(hQQ+~O^%I8YzWGgNkG0aNtqHIF$ zta~#ix#P|J)=HN&nN1j(RnL{!IY48>o?>}?o(8iyzT$Pk9vds8Hdt!09Q`$ntkM`T&B+C83yq>{`f;1a;WT9P z?N%IWcEC;*u;O%fHxL7AiZ}Y?oFSyeSBwlO1Z!IHjC*2=f_f#JVrAL^2*$kFPPQAU zYKa`Re4rE>aQ=1)eIKm-2NgsAQECEI*O43}t`DH3!q@WB<3Oya&Ss}syU!rO`Yob< zo1TP|4#z9lq{)_o6=w)gKONpKf1ia}t-gS1!dos7!f5h!x&9&ub4s@|9b&L?^=>nz zT<3v+DQAmzot>R8>*_+$tH~~qO*bnxrCxojVh2Z=p1KW;xiKCoG7tmCvZIFc!FR#5 zoZNwc-=Do(WAS|;?-{JNvU>Q&1D2|*bR+X@%^MLAC^!WzHAMY0i0=<+3dN`-=2+V{ z2b1{|Pe`7DT3#*=aK&?2tra08;xfx;9Ug(G_r|(Lx}{!gjamApeBS}6wxsyv@d!(? z{2-s}>pH@X4@#!Upe`DT9UaXdwIWq+l+O**)GMao$LG6hq*(g-y4@|!53}TR!?2iB zS~org)N+kvxiC6NSq%Mp^HK@vZ%NaSE|u*ApcE#s7VzajNU$la=Noju)5_yq$-X2S za+_!2>I+u4%~xuu3DGhRW>`NIh7s)lTVNo?`*z`OY4V(6rlrMUFcA{HrMvhx8V18u z-f;1(vR(f&RND-@B;p}j&t8xD>UhVc%5(z$ZIk5gkrvHx#>m8Eja#t<=Pq@tY#_5H zT<;K{3dNX<8@9+}>59EfJ>?m!p7Qc-ej-CfF1ob_V=R%Ix6k)iXbhj$2ghEKVp-Iy z8qusb@5)qct}2^8(NgQn@zc@;K@mgIwo4V8@BY!NRn4E5>(4tNRcn=+8^KyZBV0+zawON{W-uZJd2qZNaw=zbRWB zrG${IKX;ip-_e+Co*Xap--Bpj*p|R$14mWSEMc|z2&^^J4|5Wa1w~vuanHwKHLagZ zUO282wpGelgH__;_*0ci(wO>)I0?q$vaRGhG)MxoW>0*fT4wupRuI=rn=cCtrYR2} zE{(qesx9d>rm2Q%F5Sk+!gH#}{(-kH04b=GU0bUzT5#a1?Ku=!>S2L=%$&Bk^YGaIRr1);MC>zb> z;oG353TXem?*vn@$|Z8~E=$Wx)o~)DHjN|uoU+?c&7f?ZoNovtiAyUBW!QaSl3;6P z!$v@TNVz?P%TJm%R@vd1QrZ-#3G`y-`XLp>q)MA+3Y&$=KL)DKAg`=z4l{PLJTRvv zm@4y{{j%@0f@tylIa?&GjTMVolYZ?i*2^RB8Ack1N?OYkAz*D`jL`CBKnFN!1~w-R ziGb*s!b}z%n?%!?XTmg}NWLALcZ8~SgHvT_CrjwhpCqptN@CTM`MN#USqWPV5+uEg zrI^eJ^yFhueI&k~wf#)-GFjJ+kbsSC#noN0mkwfDU&ZF;l{Wpsns@npS@VoaYG%xo z&4Vln&19cpRK}4_()4+Sjs&Uq0x(cgwvXixQ^I$6o-`j0QJ-+wE=z8F1&%Fdjk=-J zk}*%zQJ#+1h-@>6mFE&5!N-6D(^-a^y4t%;atziqSwBjX9WIUMX)v{${R4earzoK2J5hJV3qY zgS84-&Pc++&n{N!rB&+e3Zon`F6q0>67+P+UxsN(0taSp zSOr&8YNoA`t5;j%lUZyo9kc;Xkyt^0r#^!rnsZmJh{LE^NDQrzPpmFn9SbDEXP|HtQS#Ui5Umt1yUND=qtro`HIobjI}rdinVT<(DRQe zHWNQH9tUgNWnb&r*>k1Q6NHcoH?OsoRy`q_B^|N6V<@SN;;cwsZ?G0K#UM4b%eA2AC_@9+WZbKW&gEK->Iqqf8qD zQ%|rns8IfS7OrNc(1RR#9!i>=_}~8$5DU$W>lZI;aK!@oc(_JjaB7w!A9X-AJ(rm~ z@Wp5>)1mGgp{nGZ!Nw6ufRc8_WH}Io9DmuAXeGd$Y^wOvG?IT?j%lEWy-=*E$(|(L z)4}Rt^X{kREY8pnp23>Ttu>k3rP_p`-G`b=I&V68nKuBEe8Vjh^Hd0_Po zyPm!B%zQZZaOQ*S^9;ezR36SI3&AA8CWkDabX*L>BD1lrVu`E`(uA6l?TnjzOEq3g zPAdPhOf_+^xh&1GQj>wQGxJBof(4_8EO`^E#ssDS7Ohidrk0!I~nCXTGI2XUZr0G+s^dR=4=~L)7_^W2M0%7>@zt zYd*ASooh@0$(XBscD<+A!4ayCN5PoF$UUACfApc!z_IyEX?9FCoy;xHGdNhg>t)Pw zRlKxfpV|HS_GeJt8xW{07Sw5!Gb&YyYVhxwB=ILK8qR~i_#_mQ{JDj4qz0^y?T9g{ zyluZW0eCNwng6rEV14B=Ug%OyJ<4tf6dGtI)rSrdrH@%6{INSAt6YzIzVaH~ZV8TjqF4XQO7 zl`1uNSgdz%St_t#4`VK-D3G!Ds@&`H^0J-Q07fO0u5rnlhKl_g7E0R(z@{zdGd4f> zpbF9%e*f24aUxH7Xfv>8oHjU6{=M~B4tt}x(Uh1@g>0%XE-!LPdEN4xS#!gI? zIc+UgHdk&6fl<-i()C#fpeCEh))RNQMg6%`WoZNy``t`7HRuG^vU1uxi))Y?bXI9i z$!w|D1+10K135)j7*0$x3;AbPFnLriU_3rYW zF9yw0GFZ(L?cLN%auJuZnC=KRnvxlOAn!Vd>82G_()cm;%rB@HIdZKKxJkSef zT)&%{^E>*fnuCpWdxn6?BqdSOpHs+{ZT|)#cK3f?LzyyLX&@zHDafBYUaor)ifjBu zS^mQHewJ099Zm@Oo2@AqUICjN&Bo(991t~>^*V>#7z5V_Dj{$_=42cJL2DOEqZZfBy5Oa^N@W?PsvPK9gjWH?jJ#R)x3NH8aJ)mg3=t_hbflmf#jfjRn3 z8%)c9Qh46obw;a%`gn>_!?-LfT7Q@PHr^5@GAH)g1gKhXhR9C2V(`y%C6HP^ zzV`Tb{`W|y_XtoAvgM@6FyprT*|O~eF!_3toC)%QAqdXp@0NlO!P-)u4Qw0uzyu(4 z@>XeC4psw`9n96W_!y46+lR|I15x{W0k?#G5)2NJ@t*^Ojd`&&`~pUzqcdDVd8Ki@sBV zD^`9q5UXNoS@pjMlQ~K*>1qg>$2mthd`VvEW;ls48Tt8TAlb0&d^&5jeD*7pB;t7E zbiB4ntp)x)b7j%>P(6m|r0JYAQB5Keyn4Xgbd4$7=hRZF0}$>Ct~KE7WHB2x|QYWb9Ubb3B z8>S|wFwbAFGgM0y<06N549B8)hA}rjYRQ-ZWQwY*M)l|A`SaGv?cJbwrnHD6K4A#) z%;R6$T?K4apDyits-R}-dfC|zY|Pg~e*K~|x%^b|-IH*2-hv%9tTe%|$(44wC zWUpZqiQ`IZWtdB&mTe8(ZTR0vDArbIO0Og!2A$crOb#1N0vR4(Qk=|B);OjdyC+HU z6>9c!><>~ia_1BB8n>lv=3J7mP#>FmS9>&*n(13*UK&`0&}w6w^fW0n z9E;32Ejj~?p`jh6d^C{q;G8^pZwv(Q2PaWu4F$rlB1e3#^maV5yjdthp}gUc>E6sS*% zK#HlcVoh&H2W$H;NiJ0wW+YBa=>hUuB3!px6Lk7tsU#U_mUz6Vj zaT->aoYQc#)#`z7z?uyc&Qfs^YJ9&?-u*TRbLI2zfNC3a`E%ry;p84ii_N&zlfzp^MoeuK!fk%ttz}ZW>4=Hn990eho~E zQx1vh>zqEV`GuV#NvzmmI@L zuV#9I%(`2nun{j_RyBkKEzVo$&*O(G9w0U_Uz*_LZKIz{WM%LwK zFV3bW5G_EOgNs4?G_`oVhr3(~3@4cjJ7>ttt-zW?0`n%HJz`NW_5EsV)iifN-`qy= zw+q+EkWj^r?@BgPBW<6A1)p9|D&J8zMG`;;9okEi zZa~f2OFP?cC?+O5X!V`CLySK+Y?EFFt6mLH&C^jTO63b<%br@d5# zIW8&ct+CHdoqldbvCMl)Y2c0(ZtCi*n&$XbzkXm6c{yC8kjP~%|M&PGeMVHifQS`woIo`AA7vfnT)@fS_-7cQ0e3?*xG z9sbcc#g~?5%H{+xUT4og*Ef&UIOebr1BanWs%I`{_&JC@bbftO2-gPQZ;Y%qEKpTD z<@QlvZ8O}GxK)-Ij_mA|V(H~od~U@Sxj7xIZZbR68yTtzmLHUtuL#YKol#>|H?X@u zQ$8N2$@rKOZS)#AnB76n(WJhvs(2vH$9CX1pp=cBG5Kt~stpp}1PMR=`cd$YJf7xKIX(qbOQNJ@&2_bRT-V0>#42T-S=bT6WHwUieGc$8< z8A?huQzyxQ1z=JN%>N%WD}r4YS;}cnpQJCgXsTzdykRK$c*n49EOCjFsywz`9b5(> z)vvcsmDH7B^<30noDQ5=~oYgSu=&h6#iwJ^1lIqyO_Wwqw#(yv#nHrE`?SP#`?c;zKZRj$S~eL=fTV6}vc3%r{(A~ll8ewsJ7Ldh?1 z5nSH_2v#2BCeJ;V+6Wq3`Cf~r`C{ayA}EHr=rQjs25a(NxJo_9%yHjT`PMK~8uQCu zM@oW(!=aM8A3|1x`^mB(Oo*5A0~(6Q9(o&SvPh1X*@wVdI({|8Edxs7_~Pu$g=c5a zkhcvrZpxl6OO9Bo?$R}5WWC|)HeU*dRo}J5X%1JM?0FxK8QfH{@tDH$Ig(rs)FPEF zll%%8h2U#-l05#gYNdN*{3k%NJ-ov$VV{D@W}w-}=aXT_Ra8?lUq%`nthhb9<-DPq zc37y~@|h*f@&U`;=rX}^CB z)$}>sn)j6@r1zIcPHDVvrp%T4rxkN2h1_sPBbnLUG`Z{RAfpc5a`iXLXfW*~q`^fP zCRMI7hmF34Xx1zy$iDBD@&KASs_DO~YQ-XMPx$dS2##*sM5g}^R12Tx=FUH1TDSZF zgID_g1vkZH8GGm7U@~JCWQJV*4}=sLjXh+brqHF8Jb%?gwJEU0(<7U%g{zbe5h*vq z)J6}(3&%~+d+Oe9*6jZOpNugAjm0DXw_$1(t9cPJr>-UKjoxq@jaQ~M2P+nqz)eCc>V+zgO6m}$jUVFFVn(S=(%wP)6P*lrB!1C{H!DBB!&^oNp)@URtwI9L*H-%UW`0 z3oscbnZ@#cEj5n$Hf1`=^DtCB(>qO;wT9I`i;UeqG+bZI-IAE3k+`-5kcB^IwTx=7 zxCM(@dkxlzRoki0-^1ZrtZsA_*%46f&&yW_?u_1LLQwGM@AU7;lD|7z3d|ClGi|H7 zSSi$Gu9eo01@$-VG-jWC97;8CEPelzKuj9L8!9@|iWJQdBcq!|-T2{0L)8j){qO-I zKSx;#DMWAC0~&B(mb}mltR> zuz~8qz|5mO^|Y$`H%&LQ)u#<5%bJ-pW!E6Z{#}!$%3!rM@Xnkfe+(u>4JgZzFNeUW zQ#vG05)QkfY`k}njhuv#Eyqe+f%_INyhH%`oX(sk+g}E2(MI_g+=jgZ$J|k@4?2Ls zZ1Zx=zOT#}jcB&|mKq#SU!g z6LQVU&0xz#kzAi;srK{8#fh56h!N6n z5|D!N!pQhyW5vV*an^60laexwVfk!bVF82|mob;I}Wcc;L-YVYtRfae7i1 zhtF9~2-$J5DXCw#0v2?vCqrH|RBajUl)Z*&nR;MKjG2PCypk|u_*jl70kQ1TsyPz5 z8jLRj$1N8af)1Bbx!t8wA&5S@?)C05Uzfa6=KG-=GIOVqIR_m zYbDq1gHo)EGcu*S!Rp??T*;FCmTEflUKR9W<|2s=?d%al*bHH-2^ke z{=DJj8aNcRPd2_~MP<_C#dl%Y5aSswJ>LW4&F0NzuYsiR3T$ROaugQq05;4v%cCvc zCnQ*#j46NpKx6iH%fBCKO#Iiez0^6TdThEr%a!GAgA}2gG3JYgQV-SUj);jt61YO) zSgm9(KgD}5XwPD9oAhx4WHd6@c!^H^r0Vv`A>RIWIl z%o;J`aH-&)$9cyp8L_8$l!YgWX~C(;c+L9y!W zjvaEd!J3fU$(HA>zo{y|O=FM6Z-!Fvv;^7myT&r7Qm^?7jB)WUz7v*T-7Jt?>6>G? zcoM9IjAu~!-t|!QSkO?gXnXW(Vo}{K8Gp#Ymm(N&W3^RjAAf zd?atZ$I|!joghaIC5zLU)1_GhjqA^w6kK?_>j9Oz%(eLjV_DfYwjsm>NnjW9t_M}? zOyMlKqY)Taa&XrTb)ufut1YN@q8>6^oN=nbI{}V^KPD5Ufr|i>9;8_Q(}#E*k1* zC~5LDlB=#rx}mtOGZM0RkNfa%=mOT~k>$u0h9Tk1p%fcTe6D?-|G1@8I#s@lglSo- zwy+h)+g%koR=UPOnnaR$TKtC4(0Mm7$Ov4nGcSrD(jR$vk~|y@*6KJrWxLrR_-s!? zG~r|iQyjZ{!?jAHhdd)k4JGo|J$0Fr7_HYyEM5w1*S>& zWqm%*P)ZWd9EX9La))sFJSgbYOpcCHL2BSA)8cfY zi0AEKx)sH2obekHtWneW(H2QvbVW5MPkJtfVnN{Zr0G(P zXSR0lG+5&?*}!VWYAaqhEZ?yPYFcqe>9Q8c28qBy*-gg_0{bEQuOpDE^%qU_7cTG@ zO_dkc6GHu@dB-qT9T#LcA~8wgay2#D=toG0e19vya&PI!L=R4Zx|xn0a|g zJ~shsu7mSe@#RqROdBgd8W^nJ9X$2jKZTPGS7M||+Hp(B9*ZPdS7mX2A9;&hTMfrT zGXt3{Jq*)K!dMmh#!xb$*WY>qh(EsHze@fI;<0>Z-hNVJm_r^+Q_L{bR0GR8KN}o) z0@*W=#s1UcFI4E}j`nq@G@f~v?d0p+P<2%KN_p^%Dw;!cZ4AbSmxf8+Sq(N5UnyTJ z_UC1bd;<=?*>_iM=OGHaKnN~im+(}nb5TiL4c8ZrGlY!H4Qf7FZ&)xUKeonZ%*byE zQD>%`ezwMNQVQ&{-}Svl#^}l%S^p!9vYN9V(LaN+vQ3C&8A$Ni?0os`7qB)JFB>tq z)awrdf{l)|YE%A%YHn`xv$X|=6ZbV{fa?DRR+mI3%g=wS#Kje?zx)Fx5kC_OtQ!CG zpQYH%C4XGiJRqb(@>yc+b}dv(nBt4!#VjA(LI`=(Ox-07ZUvhPUc@|{2D1(-pWX&m zvskCxA*b(xtJU54=x=d1R4wc2lH2Zq;UNy9rb_YuteDB>D!5Gzte9>-Fh0KzO5QHc zKX1BUaZT|K8S#K(v$Zq+K`V+`?JR8s)!~%&qXS=>Fp@F%>OIpGOcA=ald2%(0-zQT z5vs*DcRli}FM~~p8XeJ=-KQ4oJwhHgjP%S;RXorfOnN=L$rJ-M%3vk~AGTPuS2{hS zQM?SrD_eurU4dxr)&X#Hv{ZrE}P&FsRY@Xckn8kTGvgO1TUOgEe z(H*(^ifg7a`IH)BLV`BacVCEts*!y?@=CM{PiJj5zdW97Xt1s1a_IbB!zqJ*d$zpQ zOI6K7o|_}@_OVnsZta!JhSxqFd@Rd#eF?yEf#s`X{UD}N{*S5i0I#ap+VHhq?0s3T zdc9%;D^&z5O~fljP_XMsAqgZWF%7|%&>{5Rq=pazgdRdy0(KEpq^Q>uA%Ft*UjO$y zYqRq|K8EjG)7F%|&+M5sYnW3%-VsdgQ}XPBPC(bTb%bqCE)J69n6??V=^;;-#`bN} z!(mVd)^p!|1RCf%mzw`2PQ`j;SIy&9_LxVG`}$pC-#zYuDBn5N3ZM2sRRw1sh@JQ=6KhBP=-zb~O~iCT7CP47Vp8vw*>P z$g>8|ctBzw`!hQX;6v$zT$tyOmED(LzkIkiL2UUz`>=ONyK{2_z3$-oKo0cXM=4l z1dVr>?%l|*&R$W%lI2eSrtuK9bnyb~DD0VHLmS~@0#C;|;rLp8K(%~@2hrCjddkD& zrrRgN-X=NR=Gb*okkD_2o-IllI;FQh2$E?6=DkM)O8=*`_jkh-Pl(4ddBOO2YM5;X zhI-FUgL*%-Ot6&!bI(&d#3eb>r64xuv34{55v8e%#xIc;X z`5wBjaPGcBov4J8uWr`uyZ}tY#ofBqDf56{$$jijVFI{PENUMGL~C=_bul5{FmmR7 zbGb*1+q1m71me}nOR-f;VT>cj=hO;^492>vfuv_*ET@jiYoK10oCu$toz{h%)4cO7 zChlW5Cljv_>Gn;nB;HUtHfTL@f=!*|?0l`5zAur#S~56Dg?*|7D8rGmw_gdfj303+ zwEdExQ{3QiqP_g8$K5s0j<50%dNYF`hpR4b|(B33SfqIlk1#;VIt0j()r zwtvlN``2DV>Dhe+qcF$KE*t<>QEOcFH6WL>6pO_(yDO1*U;jGWVl zxk83DIfR6~IF-UDp9uQm9p|zyfMjwus1kgt+{saD-S#T@_Os^4Ok-yClV|f z_xsj^V#U+#wdp_HsZ3O{$9) z+7&;6QSi>&t@+O`u9_dQZNGqhpe1BR$+E|<9$2$vjlK1|r@?aJ4?&bwJ8~;;)gKs^u|oB`aF8oj()?(=bUhMp?;hGey6Z+bDqw2bLC~{K!scQ?gD@t? zxhkwkoP48LOf3`ksJdkSzJ?xDJ!GLhBCJLpv#Zqd8$rB$ELZfIGomRHFCQDphz+>K zlj@6kLx8hc%C2aJ9KG@93FkVj_EbfNMr>vQs;|d|%-J`{;o%!+Uvm9u!p{FR%}~c`W@h zMZ1Twdf_tnl1HE#vO*GRN=wKIBXh1MV&94TL&z~fE_=(cf8=`lL^;;Vv8{Pw`WtVw z)1HMU^|KdVB-ERQALM~} zBj{D=@oB?`oaY|f4@$l{S=2qHe;Ago`wGYHD=fEBqU5!&u#~~GuW*oEKOl_% zASYG34uqm_Jse=S9t>tIy*JWsE^;vjqITwRFvZ1(cCv^!J*O&q*&1>0nFOBU8%IEC z%!cvf0M%k1sEw3hTcQIV+W2wzDUA#Ok%{?81p)%EMnC zch*{~NnzCPX*e)`dotV~?LNs7E&-~Vf`B5iVy`N;;nQIh6VVaeq?aL-ubU@66n4$J z@WPwv(e&+R<7Pv=hWU)n>7uS&L??n%=D=x~s(v<4&<6t35uLDqQxb}ZV3+!Rac$_P zxUayCB_3K+Jk@S31A8fr8r)>Ji;|FQK5W-QFiKKRqk-h0^7d?=sB~yYELeAs)KJlP3=}ez2e)Z*!~Kb=Pqw#)@e1IqLX#t^eYh$ z#P!EU9K?q_NBhA?#DmuZpH`e=Dp?mwTIS)CgbM@KhiQ7r|6OzgoC4VLoM~rnhEVAv zMYC+LuxlM<7kI`N59&pGOnk$GxDW|%JNEvYVNCrJjd&}hNu0$$`8}vBy_;o0OWhs@ z$129#LF%59aE*S00U9levqH3%JkKDqZD3x6io3`7- zs@F}jdV9cBBe0BmR@9^Nn>@p|?)9kYd~Mmsj&&E-d7pVQULd?M?IBT0+rJi@zUnYY zb_DfAX+Vr=$J#R@-UGdo<5v<)m&Bh37VY>{+ZZI5Ed%4E+vkLnvy8!Z?+3ebSz9?K zanpe?w^;F@7@G?%4tj2U<#IDDhiEb-@X^C*yfeRmd7tIrlCRtUqa5rx_e(-4Dg~3m zW{1JSP$?d$tbdS%!g0OOGs0x*-$%n3zAI+=lDK*;<>Tnoui#RBc#$>v8cfgNBkUjF z08t`7WnO}xSf{jPS4RJ}-+3Y(MH?Dmd5!M$y=U;jTzgiKd~3=p>}O$bJ|<`y5I-QG zx!F)!Ww-qZ@zsp@CETm9PwFh0Yq@6ts}lG91P|H-BZ;qnhSFdh#bQAHB}Sl}Yq4P6 z{ud8K*Ig6sD>3v)&U@Ad{R*ajxIjGVcc30gdC5o;IrhlKe;qR=A=QzoOKs+#VJHh9 zS|$5L67s=MQvJVy-gS>~^8L7a@nm_p;g~J#A(-s`bUe(9h+j*96N?j&colhUr{J>< zjz$=*jlh-wP|o)x2MXrfTf#xd45G2^ zLJx~ou=0Fy7?#5xh;{!LoKiW=vi=f}_T{QYR&;3?Ex&42nw@wV9K|u0n_%aNP;_vU z`n#8VT%N)ttpMlz2afh>`1UImW>m#joHlS3&VZ{Cc#Ai_#rj_ZQzf{`13%2STnnX? z%zPfAE9*hf46pu21Qo4fnGBzo)`wDnE|6V*ohRWPGt=^~cfxca)wYU|h~`b_lkw*p zJkil9#8Q50w+4zhksKo4@6*wCvN_EljMf*gAlnE@8B{lPuvGNJA z@)eq0ywKdE#@A2Z74e#J*u{pn2r1|4?9+h8S1Xs=g|~&F8LT8$h`RC^_@~)9cZZyl zgjZYVR&amNJ7(s4%X}=E_gS->Y;tRus#`MMzG~xG%NvdE0i)G{6{R(UhTHS)f@oz+ ztWA5chW$Doo8tRCAf5N-C&KXn-Z8)4AEaP+@{SIUYX)JK)zKrx?ai<3>}1C@QZ()g z@rH<%PPe-s3~46T{GW)DdHhnCwhuvqXJRl*;d8{jPMxwbfPkq^OE=k7-NBw|4%4Ma z9u6b!EMS2hMxGo7i`YOB?_fDz;7G>wN02aPn>4j%PlV}M09)}?7&RxEnNE^D`7{zQ zMu%gIB2YBY55hQM3SkS48>ka&;|(jdp(#+WN8B4g`&19sYAkc6fA)m>TfA%hjO|Ii z5TMcK3cD^HNVf6!@r4->)wp7X^~>})9Qzw2=vr0r;~dTQB=Jj6uF3HvRb?4=S#B6F zR=UdG326K=UvEwGJQ=PKvMou3md?scW+kOXUr)$qlnx7hBZ_()cKG&rAR6T|_5a`n zaIhp4dg1u(MTA~AuDavu$4?6ocw1l*x7O+n@Ep>xwth6g)kB8bg#(dODULYFc%IEJ}{%Bk$6?xvE`Ly*%EkO8CXqC z8V9A6stW5k5$KJ;62xdr6Guth##ljsk4)nxq{(5t?cfwWhEFVB&;DdA&U_ zH^971rx%0uKt)*=|(q%5Sl)E`@mW ze7U^8?Ogyp)}ZrrV0FTxF!k<5B)D!doCfTjW={zEXEbhy077Sl>uzt z+{yyDV}R9J=MmnYX3wt=vx=4R_Iw4((^OR^lcI!IVJI3~zSIT_(;$3(rdY*hh=$=U z)@&=#b>R%F*_%*rn;edFFl1kS%M-FcvC+6;h0mY=J2C!J2NQ z{UqXgbdV#BzxTrZ^?h#}%lsG?Eb-46S3P5O^^5`bqlBd2I=gDIb^Zi`qPthwOM>1W znxUQfKj;Ogo^@n$@zL=Em>qDz#L;1RtNzTB?-|CYROLPh?VLkrURL8c6NmH*g*~qf zEO1{9s4Q?9bf+EgP+YvM;!S?hK~GpSe64j5MjwvcWeR##?!42sAM!}~dVv&&{6Nts-+A1c;S=rG@4>;7$H8lkfBYp0 zHBCscfBobTTkkwU4EUb3K|-j-GQkFWUBuPuAtwjt{~Yo?1Nqh7{|j6X=CH+fSlHvn z&j4)v6;3;H;EpZAZDPcXa(&6+-@}-)3AP{p^spmLQ|A8#rjg>S7?t+O-(l2k#W=sY zvyX?OxE5$r%}zL$5ijY@B0(48-=)q;9wuWRyF`Sr_+{{%2Cd^^@teWUIT=Eu^F4K) zAPw=_>@@q~6tL^XFZEJ=R;}xv>Z%=ibV*cJ#~-WMF#aTiT_mgTz;0o>vezIHrD3?DB12S7-<6nEX&FV zE=2AMaIqhk``L4ns8;wIoFwe(#=Yw;x)d&j%F8q-Z})GHdf?@G_J=Ua@v@p^2d{>B zu1xjh1@^LQkoa#*PI5vd1>20q646DydYN4d^me^B&C+UF>oi*?hCXR5`0lL-rjY2C z{u%a2eMr!j*=)Y9755s)mJYUuu7kP`v64zHf4y;or^YO-$lkgULbgZNOtyP&3Zv?f zF$1(tBP6a+<0jUpmTjAEott_#Jg;2oa*Jn^hD~l>GqCqTD$9lJAV@$NdAwhK6DCJ_ zH%qoIEnrjx8<;%1`8J3v-j@~7R-&ZREim10_n`P>af1CQ>Q&B&aO>8&cX`wte3H}) zZ~}9)^(|d~c>Zwvp%vJx!d;=yw}#Q^Rb2hj2I!e3M3`T6ZyR!ceWDf>M-O>wdO;X{ zcP-!$#PUBUr&C*<9A5n%DF)MeO(?w?)kAzY&{+eX@qyubE2LL^Pv=L_e2b2ZH zhu73vtC9qL4$U5J6C#fF-7z-N6Q=VvVM+YoNgl)lW;Y~*z2h6+VC_;oB6{i<;R#XI zuXv)(NDU))%CLjAtZ`#|AuSB6Dzq`ZV6+&f3CXr4Aj}Wu+i~fHc!lDN;RD6v`A@Z* zG935H%(p&K&!JHRZqzv{;)y!yV%oDa;jZz07#(JNG96NBB}@p89eB6BBIa$|gU#=| zb39`8kQKH+-?8oxZ&(2K?0HA7vNCaW>Qhx_Cl`ic+)R*Z%fvlPU8B*hzf(?K#8$J` zIv@%22Qu$R~H1tuBIleYOv>#=}Dv&9+7(U|wJ}InAm@ zLTTCfwq~zU5b}v%d(G%*KE^qH$3|ept{dxVV=GH_aooOuGEn4?>bwaasu5X+Q~ya& zlsz!1!tR{{W}2v<43;8>$6Yv9P4x(J5_2%X@&mMXn#*hEZLk$Hz`WYx$2v4iUNzGL zBOIP+F+0Ev?APXiy;>c3C*NA)lmfLn@Zo%qSu=X1eYU`HtaOqcU*^#c%^&o|oH16i z5Xv*kA;1}nfjkL#Yl{EO7ukoBsPX3wx2u;p*5wZ03VX?^%;}de^&p*cYp~35HX9dX z0<4qpWy@WjiJ{F=VehLQBkXH2@5#>T*m(3^0Vng=%B5=Yf369Gb28I2?NMqr@T+hzDi&L}^2b()O?x)RJS?Bq>Q|25*~M7~GbN`MYYIQ+ckl`t;WQg*kf)SEfp`UrdR zJ@POn9kChC$V|z`HsNa#FC&AWg`a~koG>NYDOH|HFV-4=*b>I8*QCCk_L}Dr_l^FS zh}WMD?;Bqa+4hNc!5bbCyS|8x7xCJ%v8zR| z8$_vod_iU{Gs#u&BA^~|!}Rw=JguyUvbI58rM4MXvIFKVz=Fa8+bB-tn)1m9Ca$q9 zqIy(UabNxSB6QB4HJ0(d%em?z$Hs|x9@uLp+u1we)TO%rR8CY^_n&QdN#g5_OoKU* z%}2*Ol94sn)92XbA351ox@)REwHtzlQ9cT|>s-v$|9sFGE8PpF`?4`jKI3DsXN$oE zb1iP;o+laEX6lQ1!6%+JFs|$TKWNY!xpvWK9?0gm9PzD)LA@3%N^v;0+9M~%2lYw& zVP2HmQZb+W;t(=aso^BcC0~NQ|5(G(bmn16T<=62hK?2Wuh#Y)jAX-}Ucc=yLe(@y z{z>i-BR}@{t&^~8laZX7ZM%b*yy#lCvwBAfr6-zRZBxDiQcQLKiPima+5U}FzHoHQ z^dV7NBchF^x4(5#x5Z<1?e*^<6edSVjlKtaVM)n!VEGRr-y;jufBy*gcI%X5+r>z! ztC{{5_GU@S)lp1Bo}BETX>pJES^2Gx@mdj)lf5hFYBypeXZY)DMRv@_22h;?7Mp{{im z@vxrQ$M+LeeoL&y1s=lAK`xmr6ZJQro5VD`2!Xe!&a7}8GglJN>u_1Ay(UU^4{jW3 z&tC%ej_24;k=bQ%Jv_3ryY_O3w*@D;Zun2g?r&)`#Yo5kc2r)~e-U>z^a|)}4_|@6 z)xb@_GFDZZ)g`qrb)`vZ_^n~ z5O{ycTH~IkQ2$zFxThsD+x+~NP{dCxt3m1Z?`DX|u@}Z3{cZ*G9QivsxELrO5Whgw`=H6KHuWwKWFr%EAS=Ba zN`J;GXT~b$a?`L~*UBX{m0WnSq`X}cFO{``CU#uwkf&5tWZT9zaO!ZVq|mNx3l20| zAJ<1`_uAisR2nT^n3e3pqI5}Oqxx622ar;=Hhd2O+fj@p28-;4^Mh19}n6v9q}~OAHBHuoQ<|a64y-b@-B&l zG**n;dr|5cTQJ-@C4u$atYcwoyohI{+g7j|ye!$H;+)xz?*T^T^hmEfT>$@}xECTz zh@XOBRL+o5t)+N$xLnZNi{dm~az5KwKpzYL&$aVBNxez1yvwu+>2S2Uqj7`AjT<&- zbdAlI#7mU_{S~6*xOu+y%K*~2iOlVj?N2c(_<6w`yD<|S)Jg7kH(I?c57mBXV_`3o zHHU~@{S2HEc8uD-mIk>wpz4N-3j0a}$g?d6IgMI1ALk`4 z^J){VVD`OA9ObGDreMJb_6kM0;P#z-C>83|)zW)AtQu#JJqz?w`98wS;`6nMIC^;f zi%_Z_cW7me#CZ9{KZ5jxO?k;<$CgftE!}8aMJfGoMS-1M5a5}M?1MruD#uMd?jPU* ze0STY!aj|Om5+@tZWP*Q0}-OPcCcCNI55~L%@nO#VSR@{smAUSyL4z6lAN1pch}NL zLLYlV6xG?2=XJWUHn1Jj_LfCq(!@xX9S}u9E*r_Udxr-kmsON0BD4<<@L7xMV~54P zQFHRrZP5s*KX+ZyY-KHL*3?cLNf?^)op+TW4WmmX7L5f557gB73dNFfNIcKjO77(t z@9A1JunvMASN>*cvBC*R&=f~LtL*H_VZv5~-6KjVTEi}x8V0c!Zxg3E zOznKPoelOXFm-Q*}K+oa|dtQ{NDJ;^=D|UQj?s|J>9vG!# zB_*u+%OL-KNz|hy8%0}{K)uAO85W)INn@-Xtrn)qxb5_9L5(%JL(u7$HVa(9IKb|T zeVKF1JYjXw%T^lTo($m)!oh>I6s!1O#9i%-JRFcL^kg-|rr8?-mTA>{!k#Qu%T>=V zhP#&SGi=ooPgOl+nr#vGUcqO3a$>H%7KCC*Yd0-f#$EGba~v3ONR<7ZR;AKHvtA64eV=iO2&w7ja{@BLWX_$ zsoNp!ZPO{iQrE$}TD{n!{Vt$dB4J+e-}Qu2c+V`mYeN{ZFH1#rHik5U##f6HvC&O7 zIDpSTZ=+v9iVob;G11z*3ULKn$MrJHAeR(WPDdb7Ebu@&aE?0|v&d!p#IX9!0GE`AMyULD(Z z$9$MG9&dPHS}w*&MQ?c^1GkkNPG!9fry+Bgy*>2~*fY(*1I>Bw20@%0zC+l9VijZU zjBU{PQ-#fF{4x4Q67o5S!=r1qgQ*8!bKTw#({*5~cSw|2hvqD_zdvwXGi`>Qy3?a! zfT}rkc0f^5&v3tw2qnL&@z!@Ykcw8#rV--Al}g1dd)8jUyg%4D9AK~2a?Dn7eEYFy zb9Ey-_Y)u~v8a^)vu2{G6)Rq7SA7nq)370@xw`CwQQrIGcigIbNm1d*fJ(dg0GRr| zzrw}~BCc6G$tnWzdj+;%@K{y(ioN|yxVJ*GHWS9RQW1gFxdKtQt~ZL z?AGr+B>v>B=X@5_RZhrb($rbp8z!p5w&}KC5;QoxW`J$@*%LDY^7-Vd|EtrOjxZEh zKT-1Pl$e_-i?<4KuVmAvwp)xo!DWUYFdOh20y1XN#V-0CjJ{f4dT+R8iV~E@QJVEZ zkjkWM{}iRIc8$Y6;JZH|u6%9 zi>TMUEwewo>ppa%N8kfz0Gm6bMb#B$Gwtq^9QWc=`GS9fy_&n2aMbaTWPToA7IyU> zjHKjP!pU&D;f_0P?kOHk&h+0Crt$};tgG2D!JarZOvT>GKzr>pk6N>InKe2a>`KNe zR%-6uPZA##O?Vv-`+pHQ0*29J&N-f3&&@1h8jts4nr#&!WqeLQ-pbF1y0WR+xPtkY zg}hZH%Du7T{!GbLYMQ+-0qNtb_*3nU3!TzW4Kcg_TQtZ{7Dn4Hf_p2q>}6kG9Hd>o zG`6D9eifx*SQqbq36NIKW#%+n81c?x`}I;F6_1r~ij^+C3td$OEqpIS0g&EvG(QPXnjC z;%lZ;B=GV%Kq}u$A4)>j)g$KG?bn07ms|7Y^Q5TiP-+j}0Q7>`6n`HCbdg(=-r^*y8f?1-z2CcMX0zIUMneSZcios* zWHC`!eD@lbw+7h9jXVM);qbU+_CVv>2+R1mTy9$>p=L*zm3yTdKN8N_QPyUyoCQ4ibh7ivQ~^T@p_>e{e^k7>O<(Hx0aEkBad$GY7Xa>cM0 z?+E#QnW<5$e-|9(@P=xi3j3h#7O@ZR4r6W^!6fqMmSHG~03==ovbMvB3P>rb}W!9;);~F+82McR3$P4(T4>;kDqC{&eLJFRF zdsmod@7T_|cL8b;U|G$pzuto$tGZ}~{Sx3KEN5NW-J>xSQqwZ5sVJEpSTVsm3wtGT zLs9o(kEpFIHvYSaqR;0A({HeA9`#f;g9h67Pk21NWg0cGR!>5`y4d9>+qi(|B(rAD zJL>$Wf}{ic+ci&nQrVbu^z5;S2QiViQxd=$?n`WXBG3m=+(>_!HBa)0)k8|{4q@+$ zdJXDZ-DGI+ZMv>7wsu}@?Na-94}@N^dJV0a7&YNS;~YCV6+->f`BHjV*rRpqV~x{b z!HD1kaIL5ZURBQ~_3~s9E-HE`9qctu*G%!esE27QzFijBGtqqQ2~p4Fo`|K3sqI&- zwuu4Or|cWq$Wbsek&WA}Ii3Tysj@J;G#5(!Sm&?qKPK2(*^`IRHMl1&J;R#yhSPY> z8*&?57_@r@_EbNKlXq?&X2ce{0Ldn{e1bK65$t;da?B;8Y3nT+;WYzG?ZyJI=h_uBqT!;Z*xwj=D*9;bTe;eqLk4}v5q7@7ZBUkqy z%i%hkAc+^-jg{w1hd^l*RxoE+(ohH)$&Q_PHWMpo|+<6$}*dWJ(BU4IEhj#-Fq^WvX0CjV56pjY4rG)%#v8idRroiXT&|rRxQfl+|gg({;cS3!1{mt z41|n%R%v)34$Opk#kS92T0CwxoEBy|XN#Z*Wh66MeV`al(V7`OEKEVf!RysA8!TQw&=n#S+mZ<3+D&c$|J3Z|Bv z3m7MexF_OH%fMb^44#uLMVzL>2OHnLzbuEjhB)Mjt)6N>t?;yIjQ5=@!DP98lAXQE z#ro}kIl#KF<*^FnWQR?@9^ln5D#NvLa!Oh$O8Oqj*0nN7&*XKkuwwa??wYmG;Q3v- zqk!gLVb4hBFR|t~*~E38IF&cwr^4jPx5aVmfwUD9zl~N`#M?27GyRSKk1|_7eho4`Kb+r1>dTQeYTTUjW%znpXjI+u>Uf=>=tq%se?|MS6haYG&--Gzb z=n>%&Ef;rnV#~{Im8f@1LLP_Tv;+V2`~NS>BD?lODAlYUT40TLhCvT?ZE5$3(oJoW ztx(Xv)DyMy$_(r4j}ZB=+r1JWYka!y3X>;L`B|b~sjai@S22$+qXlj-e`OC6^lExZ?v8NgOhD%7Qi&NX(3vEhm5Ni%>hKqZv{+AAEUJdn%aT0u?7=e5n zV>xR_MT4gi^GeL3pV}9u=A)iP52)8JOZVbU7f0Wk@}YM3{@Soi`s%nE58J`G)rdfH=0p^WYLYSvQRrOuK-dT3#8js5gB)QjO8c%a?;ZJ3k3B^ho0 zU-mN+w9`GmtlIuAN)yt7OscQ`)ze0GIoq1wAQ~2BmA3p(u#aEv@^CYt5B~C`6x5`V z9TfADSXoI+q0f)I;aJ7f(|BvuJswVex@5b(a8T=tF><@`=Ltvx``h@Thlou)(Gzf; zn!O|Jt=L+NSUVbhf2yid($HM5r#`G|2c^bs$veh%jSI?-Z zo-xi2Nk|#n=h(6{9p?445oZC(FAbxW6VGwXGMmjl*Kve8e}5j>*VE)N5SK!_O{D64 zkHN;2oqvJjnwcZ5-GyK@$xLT)ZpuXv3dKP3w#!_O+y7Gg=HC!+{-{d6<0f9(I;ZBTqYaRdbybla-w@~uu*zin8+l0M=5|=n zlj*I33DhaKgmH8ET$(D1vUykq=#0zzfo!%Bs zMqB%3TJHcK#Su!eus7(Vc~&8YaywVXUF@E9PnZdI0gdfzap^AC^@FXv9h5xNcnz2Q z7p!MMD;HREDY35^Z{SL}>QjNZy~bMJSsR60?)-=)=&U zXXPJbm0kWQ66(~G+2$jUc}m?QZ6gD+yObZ-Z$!{TKjSI(yI9cWV=>0r^*G!M>BECk z`~;K=ZW(94JmrvgXp|! zVh`5x>`ZR5+?+%>b=@(HmAsT>Ps=Mx#}Zx-C{;f)VVK>~Bg_gXd26h_xR(@LImjLt z^_K43+Rp9?BVPtt#5!a+x$EP8u;uLU@QGrm22JSjzF zFb$v6H>CHmZnmP9#!AZU`sWDsUfEN~=*WK_?(b|_a$!V;&5~`AluB@Mcn*6`KZpi{ zTW*9l*<&w;lHKL;VRqF^o&+sA%7vsTNviwLbqh`FB?*fs7R*8`1?qd6FFi;Jv-AgMN;sGrh+8RMJ z%-z!8GKPZVV;19{WwFYs_MRmEz(2?}%31c(Fa+K{@mnafi#+)OX1S%pv`VZb{%!i3 zC`BeE=kU3-WCV=7lX05FESHb|Ws<~)*G%g%%0p=#j`wP(WSk`EzJ0PS7o-7l^x3sa zgj5Mk$}b%4SiXVo5GH~O@5GL2jDyab#(G3t5Va8`-F&n{E14TuuCU+LT6qmJ^*#F9qc#ZIq8?^?D;Iswy z7#sq#JW=wE%^PAjEp{BmyykOZBMTRdrxKX`f9q`X%423Y(8IE z2_*|zMq9ZEg*9nt?+bbz;|^Cgul0Cwv-$e#ApZN-IXjajvka?QkBHK1MlHotJeX{_ zK76zw+4oLn&17L9zxG<2wGp{z&vYkh^3d*|&4n9;k(q%Woos+l&& zy6-^ft=AQ!oKxTT3~C0>vm1oHs*xyG9}R0cbG&udSKR9wTRlL_IzN7Z#Ou0k1t0#N zR~Ao2u`qqy@Fv^zk&9~v4z)XWg$cSd<=pKdM+VHYg?qrRSM|_&R`M~F^v#;tmx9z$ z7luYY0aN)1ca+;S5dsb`T4~q*5A4ZvdGD?If0U-#SW8ndTDs*k&jpLSs68O!gO#~J z&*WaY_Er$StBmt_pH_P*Iyu&CpC?T2#kC0QL{O0 ztM~#+!^Dp&U-D%b7AvW+KSVuYC$5A#;xXf%2tE{{xLiH}ZaC_gF=*|C{nxfla-zFw zsgTS|%)lMkZC^ntkppVW>}e6zqI9|ToY#nYCFNw)CVvg5nCjuP?DB8HL6ek}YbW4y zNl2^*3RB`n-$BV(pN*C!qCQ?yX%*ksrYs)Hc|fcCgGY@QQcr|xasZtH_?nojz>0UW zUHoIn@m@CHzWz1j@nxrGzjD4Hf`U&y>0r(jq%zhu~$|lOm&jt z7NybSV4h_P)+zSAI2!5WE57jE{di9rt1PqYPH>#bO#9>$9VamEn+U6yeBSnesOQPe zLzof-!PKHFua9lD6z2u@Q7z5oikU}G@_ZASN)-x|FK*FNI2w89KcPGrRTJ!_Ivzrn zDIAsgwGss>OMmyHdCo`gEoC8G%)}C;dc&?|}*MGhJCG3yMvk^P~ zJWqw@-RaVML|v78(>SUBRc&zmoT?~kGkeS9YafHfhx=T6S(xJRC5kh95r0MZX2q6! zv4WUi6il@fF7Q-YOz9_I=r~rq$X*rpL>P`?c0A-F1k^ddj|~gp2UBc}plis7Q^ek_ z<$Op_vFk1-T%BK9B9ovGW-XT3rvhrJ?P-66gx>b&1E`t3n<>IeFk}xQZy7Yn`tF(<*Tz=Pn|_8=I4HTLNB9)PLC0z2i#FkSq&buou4ka!oe(0InpFjP%s3Qk*J(Ev*M z@}Wf+2_zV=n`9*zKVWvD zq+a~C<Dbk)mE&`;IpMb{IMK%C^mSgz*x2InBKj>Uy-&dCkO@aP>p+Y#hiQp*rSipRF zk>$a(^oZ#r! zs+3W+(iR7}YIeHyz1QUjrkC1cVQ+@qWM0a{+rxbb_zhfj?n9vbd4kU8OMU@{z>R@rNO<5*`pM`9*&G5s!Lg>Udi# z?C;Isi_|W96p`ngn979=(;tIVMV%F4b#KlSP;Z>>sVu$QWlx6jv1VnpSR75ht{jCC zhNq>U8ZzIz>?sfWqJOE~EX=5^8fYB^(Ko(@cJyfo`o?_XApv-J@qt`JvX=Gv+R zk3pv0yrZYB73Csg4iK0r7EXz+(`R+pGbnvJm2_tO@)x}!}r@DK_AR%7*c0z3)S+|JaSqB zX54c{d^0gtULGr77XR2>W(|8HR}ZdFw?_l4HI%8sTyvs*7cKDc8(74u-fW4zX7}dV zE-`Nb-IMcWZD7Y#tJ^D#+N6o~PltJzbE-eN2Sa+BL_u}ac=+Cn!cnCczc(KWQ}qLr zCLJ2Gv}WW$`y~^JkAU3xvpPNtffpXl%d{<`o?HB@_OfT-$_=lKV}*V8$jt?u)?>F~ zPfnOT8!P4`xlkH2H{UkqIn2tip9Lu+B8SMiR?!frFw;rZbhY@sfBs*wsgDwK%a4PT64V3&OD0y1!&hAzUMBI0nw2 zig+8hiO9HhY(E5EiFUy;yqH8JV6AhR{VL*p5L-IL{@EW&K}odl0%3wM6lMw5&KclA zS_CW;_R)%?rr3fJy1sngK!oJmGbcF%mvo-EdPtF+1%4$;SR|W8%eI3d!AM>?(w!pk z*IErl?k(0M&wdiC?GV=Z&mLAQDJj-Gf3PH|8!Mh=p9}jCk=Y{uFR`yBQ?atJSymK8 zE}dtW4F`L*AL`o1UJ)f9{EOKgBf#GFyH~JU)MBKEF%`9`!k&ze9a}UijLH-%$6gg@ z2tJr;Lq`Jx?Mo~5s`zkBm^Q-EiSx!o)zWMk?GPqi6>e)M0KEhK?!KoaaCPGD{}Lv_ zsnUaeuqS?hav0b{laPz2z{&LB$RZm$9qjGF;^wON3NZpY@nvM*!rO+}vtnMG&OEW# z%?N2a2P*2%grbQSi1iM!$0Rt`AroTr3he_)C@#uz;g4rIJ~)1rT{H)bj?DDd+e#6y z03KkX_Kmo79J;{P&2_w`FvFJ41AF%Gub{~F_NOG?78#id99>yjf&hi%D=QUt{d`Z& zwJG*ismJBpC4QGF_Av-(Q{G%V%78%=4x}P07s9>XjksxSv#1xmeRX`V{If+!P_btH zC_8sCn0&BnTf5Z7IoOL`u+qg%Z?!Iht|q7A>CQ)juv>4jv4TO~o8q6J(N&=&KE<4G z7gr#0b#xkfPA$#Bil$3t7@EMzu#Ib>s1_@kXeX@)dvAWY%uW@Hk9F*gOoV7h`Yp8w z0{lgP{Ba9Yo2HHJjSWCoTPN$Y?ZS-+g26G)Hj0shcArMP;#fBht_-j|jjnh#%;H`) zD!4FI0?(q+4R*z57!_j}=qS}gqTUS{h|jVUUx!n|!J%vH4Pm0j%8O-*z55LhIy`#1 zt$GvediG^`Eoyt-^0@lVEtzC3-wxtpNPXTrU~g_-LRrk?9+E^AZfL^-ST)Kr-wor% z&ogkdf+T9a>@5qe+jfZeNY#QA%NHdKZx{<~^M?>JY@2SkedN(%OP6YfbFV1T($ZUB?%aYe~4PfBn;4SA%Nw&!cF&h9XFDnGUMp}4=qGqdrQnAX?E>q5UKQ z`g69r()xS`QRhrAvWu%7SC!^iYhiDi9z2m{qTV?i6~vVLz`iiN6`$uniZUSh>SBU= z@qQSY$~9Y~0P8UCN5Y97Kw=CD0*g)cXcUXK8lVmmc--yp>iS;F=jl zcEw@jKDayQ*h$jM7c%8c*jWFGN!vC@gQ^lLC}$$M;#9Tw5xE*%^zt+bQBbsB$F zO|r)W8mri#(-%L8qG&R|SS!Bsn2eGwBRJFd1JrBHx*{&m?b07Z$=(I=+qgPOLYHq{ zX3GSSHDesRLG0BfEio&&$2JWUV$>d9iu zm)Kdqg@G-zvKXa%CGcLX*R;N!{)flK@vs&B?f8qA=h${(Z*C6BM!6i*!(uC!Sfk@^ zimO#MJKJs#@Zqw7woTZx?nsY)Eb4l3%P#Zm@y8?Z-#SJEw|sEoXP;!GiE84PZJ8&8 zS#)FdWt=Dt5G%u<)=3@|Ke0d2R*1TS8GZ0q`GIo{;{@ z<0pOY$sX~*1a>4&@t_`6mFYD7=c2SRmmpxre=3ANda#?_bDG1QFWZO!vR1fR@Ysmt zpv>o|!zqT?#$fB&XBxt6e zg0l_mUB-TkT_8%Gn>V#T1ig@?Y;K|Z=LK*INv)a_we}Z6f>&L7Qr?!CdG>-tI+jIo5Lemqu7}UcV6jDByvq3 z5#t(Iz!t3cEf-Y+AN+oc5Fx zp7U=9dtKRLZzx7c7PGMH@9tt{7h+8n-Z5i zQ(AjUmbYwu8^;I7FSUO6fN6Qw-{du`5;yw@ynTA<^4rC=oP%OX*5F>kDK?r+JqES+ zoDPqgVk3oJh3cZx>LF9DGzi7ts{QMJkEv6n(*k^m8E+hWs%~jX$!zj1Q7Bc$9it8! zt?TFstA~`^O4(Y zpb`h{^F>jqk5<3;i_ki~qjr7-h&l%)}Dv zp<&~jEG@+;Znou5DNB?*I!5dzL7I-`s&)Lg$mXRYpsW~yS2)5hh=NtX`1maq zmy4>ur|@pLItxM*P=)8}T0=2!uOw|y4}J!&CK*y_qmSXWc)!R-?(O&tr%&7Fgqhsi z*|Nk4^aWNN!az}(OSldPA-iT!HeNCEJR<90T=8IwM7?l)FQ!{kZ#acVS!)?BOeULW z+jGwXQJ;0sh<(`y;$t!^!WV$O+82RqricC7=b%)NW!%E3&3--%Y{`7K&I=)}THn_m z5v9BX)7M(zi(m@jx|Z2CQiOn-H7o3AVHCl=$@4FH9C>$O;SJ)-x0OlIQE__b$c%x9 zi{gi~Vg*Q01rw#zTsu?TIRekkj!Y+Egv${an!dh~Z3o7d026%Ojh zB@X8dfCdjiNtwRZ4i7})EyA0L8B3QzL26tYH68+XEwi>1X4@EXbke?cs(mU(-tvD> z_xvC#6$h8ufT3YR>|7Vxjl(=D1F8s5{?kM$lsC5<@6VKg_Tx}JgI&Z+Wqvrps%v?M z*4s}mBAkNsyRv9_kg$>#zHEdC^x}8rCSjE0mt2;C(*w%5{qL3$_Lr#Fx?Q$@DHBF| zbiy0m)WAlFs))+LHenPvP-pZ-VWl*t;gm`LgyCJSd9)|3`#9=*aY0_IMD48&1m?W>4L!@;X1S1nxf)U4z@6D<$$Mozv0$w$hMy*ZGCL;@%^B z`o*1kBrZULHuOllh>a4XxX6~lwnEq&p&JwJtIIs7dT4)JB&=4SY2OHXtORUJ&R^)U zgxKfN0~N2B3CzI>%v^?TMl<+w)0~u<{jGc*wKmfr5=MfQvW=A zNep!pnMNHDMyIrAt>tnc1+(APkDsxQqQpA1X^K4~Ost*j?DQ2Li<;|D-ES+Q7oU2r zFOBJM=&)c^MHsUy#*1gI_B^U4+eK@DUI9$-a_wnx9^4)HezNaHC^ws_(x+>|s1#dS zEJwLH>pVzPpWy+Hq~}WD?XWf7O?rVfo zAg64zZSU(&b_}GRUEc7N#KidqHjm z=8bIrJ03z#9K5olqQP*OW*fJ`eC*!OJxQ!jB)x}(g0@V;r;4z*R&-0DeX`wys{2o} zF9V#}6C1xB$SDkC%!7hdIQ6xqugzwn^u_z0L;MisFzfUI)E`XNI63RzR|1kYZDjpF z1X6G=r#6=Cbn)f|7X1hqjE3ZBA6vQyPLpvQKWeK2f`7R6ws|ihWUfVjHVv2B^&cbf z%5C4oRA!yMBT3NxbWiLPsOK$PU#t2Q?rpQJ#NH7@A>Bv2^?zW`N{7KYHL+b1&x*75 zRxcJ({MK+`qNh#A&B%+NgS`~ZGVDij>iiBD+3gEcVGVBY*FY(si%*zHREtr5b-^^- zCrqu=bqVfk2O+-3sP7WI1L6PC+8pvISIC~f=nDvC+|t+@e+fiU&cN%(I9-%nqil4Q z1^BMJ?y@(8DFOSch@~9$ND-ccB4Jmb!?y#h$ye~8DRi85tc{bzngoAl>}BVE?XeHcEjl=KxLyAZlvf19Ggdy*uXl}a5Et6#3JT0hmdA}s)^n!0{98{? zg2VkL+vp#m=zW8#{lrfm7QdAwYDEDh>VpjZuSL<^uU3&?wVQwT#LNL^T5AznX8*d8 zHe8r0_2TiO3(x!o>f;vkAXc@~ehU(g#xTE+NHC1_199>na56fyp~Q~+6HG=JeIzpT z=_ZP{5!u|I@E62$)%^n4=1V}r77cE*JO2iIrX6*%Ed3uh$1|!~I_!&$Q*54SFt*vC zVEyZy<2-_%bH0TBBkJkdipjQj#L2t5XsMlcyhn}iD`(mQQ6KgXW!byyWQ3b_0a%3(vDX9< z@9SU2Ws+cTS8hh{Wt+u`me&e}ze+{5aor$+0AS40aMBUNY0z(9RTheN!=asV7QN=`7+sAsl4R z(a+s0?CHiHTD3OO3Y&mk5sr<1A?7->oxn?NN>dL`;Br;2WWELFS;^c0o4Jn7kOZGE zy7FiRcMv8?=DMU~Wb&SU3e$VtC$eM z{3uJDI?1(K#ogfAI_)X6yILY~MQ9K%(c^ouL+daH?QwC|6CV8BEz@*`t&Sn~o}{is z&zwvf-v;XH;#DcRZJ0$bZOtAQ=P`KfX8TGoX!7I)yZ#;zd6WBC+X2xuwqUwV5_U~v zG9oXt^!A?az_5av)$^_Se}l;76RdZD({g&*`@$EUdJexf+vj4pigo&qNlZN}E&F~# z(2yksY$!h$b2a2F_v%hi%BxvAiUqR+Lq^$4lAulX(4}^2XUEiq+eSSm1ym24S3PW; zJtYZQ;WJr}<`hwHiHCCScQK!ex8kS{tMnM_G<^smc_-5}dxZTx zpmfhLdi>%?J^Jo7*g{s=Gmk;_(rRk&3VK}JqiUV~$;XkPdp_^qp-+0mOgx7^1ym#G znyvzyD@v?PcHUoq8qCS^Qah%eh6 zX;3e=*o<+!svxOx8cEF`o; z{Ao`R@pgziiN7)%PAMr&RNu+*qztjRkHd&{&kZGbRGh;~=Rrw`AFC|1Q}P^Fj~HT; zg@Wp{J0eMf%!OX z8^w`#PO4?~_0);HwoiX9OwB$zce9PI6=dXThjz>J9=oS3*G*XI#Y|HFsC*wL#-A?xu(r* z-Eg2Q*O7rOrqb7~u|G$IX<|#uS=57)KX&lLtx$w`k%WkSI~we7d=^YNJIqh>&0{?> zYv@z#l5rlH8yxI*VONab*Jaiw2&kFpW6LH%7;d<_;}?fH!zznOt+#V36q>N17kEhOtct+jRY=M|(q%EPQbD*vz zPfQk5|63&?tsYL>BSNR+<)%k5(2KlZ_T|UVgQII24m=(b_Ljv(9=m(V5~$(iveIo5 z8k8XNlC=K7lEgkqPz66W>-j`Ku@s7)xv80ap*$|;rlhp}23lW!L2$4kImKo$W&2jx z>(VuuPo)JRJu-2nJy90Y1g27L7ecAmjxpr&vWTa=j}?`SMNYK`&ctZFs8g7q&w{sgs}9}m@m0{E z2K?YhtX_qu?7{uFNj5!T#7@e#bz*dKN{XGg8i-a0rWaU^u#cPVB{sPd77XWgJ4WIl z?F~s(gAr5hsIY3l9EXSGqcu*o5sRh6AELxOTrt3AtoI;_ZbPqZ6{W7s00!I58=b_x zM!h5ApJ^3c_0V%Q!MqBFfe`*;yiJ(sVe0ayI69o|{QFBlk}4 zgIBlNqEwr+bd9Tk-dqWs-AuLV5_sXcY4{TVTAUhaYBg#L*t2Pf%gvWXJ#AM`aOc`% zTf^{t-kWt_gL+o{YL2#(-hiX3_S)@D5%Q{9o6f!F_R5=3iit4a+V++Q(NNVzL+k@l zZ~ypi++^$WHWK;-#+@k-CLIUmEl<{ctQy8Gr`qaHF41t|o#j(k?rbIa}iA2-Lp$O+ZQGA$Wd1BU)&F+wcS1GS z@By&bjUD(U_Vhux=NVf**mjDNrwom3<{=1Kr1r5BzXbZD*yI+gFXodVc2Ilh!hqWk zBccp7K$`&9tSzyG04HL@zDwB4(jkdqM?A^FMT_kDqhN|^eyfcTM1{0mHgC@U%Hz~b z+h}cs(IIo|G`4C|0!H)fEwnyjR4Y+Sv;1Oj{~GQ^<+3#1_gm<(cSrnSeVy+-Gm`II zW6j0Xl>M;mUn3HH46ebEm|hI8N#^<8$HqjR@1fq>I7nphkNUyGWro5Pol|~aQx zH4IAZ+sCf_6Y6@UMraFrvau+aV| zp;zI)Jk&b>9}VK4zxxWt>?;_3ga2dM$02oP<@%AVmK2lQi>^HHXGArXtGQ&?q zqMmMMr=J9*Ui`4?GPZL?y+ls<4ziu%L0Z`mPByC(CXjQ4a#1SBnXm;`EusNfGS7}4 z!(}CQ&&i&Kj!xhe^O-2c{~uN70iRWs{C#!pZEa)k6&00UtVAVpo*-Bs82et&l^&+~sjpNR9DIc3hd=icX( zIWsMq*sW(ldVQJzSZ850ciWS~5GfumzTD16M(K4$%k3r6RLfc7NShV{oQJlwo6d0w zR)To$67b>^Fn+Od=YqXso=Uc2q#P-<_4g6Nte`}raS-aZ+h+0_?&!Yo~{oNO^6Gv?Ly4`E8I9kt3v zUJ6aqiL>`CKIGmHA-hw$P`_6&sE_@Khb{y7hbD=uS&CBqnOHOftED?H_oPgblI@}^ z0O9B%%(!hArkp(sag*_(0Plrg;2`cs;gvAF`y02s+rAZ~&{nPO;;Uj3xv+d+5XJEy zZjG*n)`MTk8X#Z5OK8u}&1u&F>C#Twm~dg|=_xa@!?Yp+3g9x+s04>}02~dI# z#F&q_xDH57>74$ji}u2`63!D0KR4L}8Cz#x4+BjS^Y|D&D?rA(2&4OkXwTRWJCr}( z0HkVsdsK<^XTE2p&Ak!i<1HzzFTuXFr%@v&1=UZSX7}6#?cKSfddG5mTo{=Qxp#=B z61iPsSygtkL*B6CuU6Y_w*b*CSza%mzZF2O8EHC-A1Fj2{c2X&mXMaOvg>by@9oRU z)(obj7?cqqfTr~nkXjQa+297yu2!x>W88Z?I8Y_HzWRtD`BGW)+Y18WuTNOtEtt#!?e8C(ZsNOi71}mf8h( zIi11)?jYJrNoRf5O^{dZ>yjm*jn9qu!11Qz7>b)fQ^fG+#C5}G8pZNQ7tLg2Hzs_t zz%FVGa+PsRx5WM?Ol9@Mm(@?Gwl#uu#@=bnrWQ4grB#R@`&^iM_pM>Oa}|5m^P0oZ zv)R7d)`zsNF#J=r7E)U{#$<88$>d%7ym^fg>KX_qVm+NPR&+{vSv=iUq_FvJcmoymT^qboOXu+PvmLZZf5u7}& zXzz5*h8+7;v}@bTCwc*glm|CVv5Ov#N%?(U%x?X{j&LY{cQIb@jtiit;;P__;K@!7 z^)60ZW`FApq@W`s$Jv}mpuM$^4Rdzs-+7kG=s*8)T2e{)O) zY|G0PgZpZ6!oiJ^b8xPE-E*;A?d3Y7w=lISyltgu*E2tRkZpTDmK2ZTo9*+zfN6T| z28BMF9Rbm^z1!HkA{ha=#!g79S;EnLf-_K!*nupQ|B>j4@4M9wh@=g;XUMi&lL201 zV_aU}CrFvCn^{6%NPp#O=M)AmT4&mNF-gwy{?OD4`!ZzwnJFMh;Q|cY0xOqwvE+`47(IpB6w|I-Y=GMfcES9 zq=Wx|D+VeXB`!s^KEu0Y1hGi6zLk(%(hE1JRMz@!6xYrL4<5a@#}JK zCW!KGyxn?-WYb%0n@G|mW~AX+_;Vru#xR>~-O!%Mf1u}&@E=Y@y zj2vt4W<%3)yGQUz7~9_kSvbDwlIwIqW`QM&CJ}|VuoneT2shQQi45QNX&77HkPkjF z3`^uW{S9$Qj1Tg6M23kmH?|oA!4%5JpwccF1VE)|`D~ji+7oi0Gud8?<4hb(TD344 zaavCt<}jloR9`#T1iEx*-ef zg}C(4^s@TWB{n(E3GAL`*dlTK1A?n>%rger*06~BiIeP+mk|3%?v|EF)ZJ`6>AoYT zx36aelcK3FgZ)KF*e!*z%)900=QUwZy?t;UV-S)@KnU<0gg4Qi`=53i6uUaeDjLVVu%qrjNIQAssE7Z(oYm%P?*}f3|-VFg#||9po*% zsoJI9WDQ1p0rJl9@)&515zX&!8S8?>3+LFYqP^eqa*{E?$*|8u@wTEo>oX23bw~^qr)v$1ZTKXY2NuW` zST5c13JluJwM#1!tyxca=bKqi(qvfvjP{mt38TzX#PN=gu3c_tOaXcy4oGGZuvOT_ zX>{pVaoCs}%;%KEijyf1%lr$(p?)s;+Bc%TtYAa0a18Wn%NzcrJ;Yv4dgqE`%N%l$33xe0K3SUi8)(bCzPLjEX@E3RO)Z0Q)S}sBw^vOJ%Ale(!E4Y-fO$;wPkMlY!1Nt*-*WQ-IQJVsL zZi$z3yrRq=To%iAv~sh}jKlWq6Kz=@lP0qEI3h@)+Dy!{ZYx3F2T6+TeMbe@+ndDU z3Uj7s#o=SY)cB57kgjhU>$P0#QV9cn>xLB94H<2XR|7RHbUiX%fO-lN<<_wzMD7)5 zbC@tEneTR^D$k{$#&*#f0JYW3wEIL-+TK}g(>^WWdhu=1mFIKU!g0+9up?lNh0!Ir zgHUddhp+$xVkYKStaB;DS0@_{s)3Z9P)ne-%>rne8hBdYw82yETgQ56xt+QZ$n&aS z_o@|%_A1%>USQS2-XD>IJbcLP3lkrjHqZVU7iJc)sct`QA~j9Gd;>Rt`hB{5vzINu zK39nL21eK(W>hDN!_Q50(PXz-mR)G7n6AQ}>W_z5t}w0U7c-y7`Sg~U7!4=d@i@kC zIK}?36*;X*;;d=@>#?-$WI6NFXC0N#n zK(8E}DIUl*aKO3eT_5W>8EO{g;$?84^(nP#K{ADksS3M}1&v4IuE=owT43TK4 z^R>PEq5a#HE;m7V`@HcJPuaSu^%ohA$_3a2OcM6525-?TpMoeyf!tsC4BGY1f2+ba z#9>dCQZIe(DWhvP*?nJlN~Uv)WH(;WtH6jN$>x0NNxv$@r@rX0`d4@09E*ha|?&acmxdj95?Jzlh zT(($-A1+4UgTkTdGW_VS^alpm=!he14U4#O*K##gbV{q#9} zO&Il4lM@u)sQHnYI7?yqUKU1wjwNq93K_iJCBe2rg?)C+^XgR~CQ$itpzDuW#d;zC ztzqJ^CRo{=^|fb+dJN2BL06kSBTN;(1)t~={C97$((mB-;FRB+#5AI+*h&9_8J<@M zb1|(Kc^Q|A?+-t^Xu5Uy9_WKMCsMOK&$9mwgSN1Q855FkF5tw%xw}g2JCUyPzA^?z z6}$X-|Aps;_QBe2eF!lS)TY4wKOl8g>c>tyxMA5>Yi7yXdc61=+cisVK}aWalD{yd zYZhc;%k63EimEMBbf|jy3j0owCx+LKJUja*fNS;15~A_2i)C+%=RL(zSvwgg{S1d1 zwPo{rNY@ru*qOh=r_m3m*##oKlE#f2+U(zfRKhBWFt8^z4gltADR$M#4mh2hVs8mh zaQ(v7Rwvp^h_J5yIZVf#X@gyNN=z0d+D%VEvK^l~$;OFBhgR6LgsIqVDcL*8-u**7 zQyH$_1^tn6&5`5BPsAYSjt$oNR8K17Dh~BeISm+&6=K|!R#-1_NPAZ^%NFT+K8E?- z06|(>P_rb@ssvE%@Z8n5L$ucyVSHuSr>Dc9)Y`&{wS|?ng@hnK!;>fR@K>J+O|knH zf1-`?Kb!@m!tBBJ$Fp6+yWNl9fhPrBIX6kP+s=U#F22{&UzO}K4zaG{`WwT=lYTL}&f3n2dTol8-{shNh%fibcnDYh#(0LVC>vwyB*v+y-6d!j_F~fk>Lop?5kvGZ~iy!`D-8 zzN>fv^R9qL&C%*9mV6~Nk2yo%k}IzQc)^@&7PFh!SsZF@+0-V8Bv(K7#pZ}67n_^u zcIwrD@KZmB&E(C(|m(``p@Kt`3@i|q%Ip6vL%66<=S7lA=~u6=xyC*yAUf}0^} z&G8ZA?7>?-i(D%%8EG}Qxrier%WMFB;)BdLV+|{@>+T4X>rM{wPy{I{2|MESP{fc{ zw5DlyhAG!B*HxR=4S{IM4|SPHGUcc5iF2kDNUfoZ)T7*{r87 zU`qY?z0j_EcMQyC2vX3IkrV8D(OwY8g*5ZNSkkQQG&>;ZVuF5-wxRcfsi(GZRq%D& zWMdwHLt)yA zI)S|>qNOwKH$fhK%VyT6vy-iw*E_)u1>(rYciTmSF*DSlmVVi=! z_Kcu++~5c|*yjm*aZDo>HRH~|z(MP1b&1UqjS}hFBJ353Wo6Sl&7KiPh#4QTb_If5 z3v8TdS15r4nfC;}iXEE*mkT?>oZR0SVv7^J7@}PB(~=0FJ2+gxM&Z99;yd|Ro?V~h z34;$$X94PK#Fdd%qP@OWtT>1D1$q;AO($mFX5nybDClaL%}#+6&T>6!uE!vZq{62~ zOnQgfC;>0#!zC1RERKVI`aKOf8r094Wr_VfV{#IE7k?8#fZH`&Ale^q|A<|l9z$(% ztVM>W%f`vcI?>*Qp7iDyp(wg&mK_a6(TYXUimf&&lavgRhiK9F41z^G!Pt4;AEqtEnp0m z#Pw7I21g!YH`a5iD6?EFcw?D@G^G(Ig%KgbcJUHJ;Y%fgeM;@wwHhU4SM z?@0&O9-o;4SEG>v3q;eNuQsn#zit)uq4a16ODu6A+wn`-kpFCIOxQ8kehX0^K|S)V z*)(L{PaU(dK<+HeC}-;PYFxBy5}%Ll!swhEVbyrr41ijTKhSe##mc~{X(iE~VwnA) zG@CiwGs+Ob>O#P+zxDRetFe@M5zMjY2vh3uc@ynJ(P*-#&>ouu>61|%GI3N+@Llo3 zS8Sc;$-H8lt-rY556J^;gbDuD)_M&JCq+=(s19!`Opg-oCwF*l)QVbtu7|Ro%8tB?|NV4?{To~M6j@Tom zv9!_3@%FNyI<#ty4HK=mM82E97Vy!LC>!f!>#)$pS{Yhb0q>h&GnZo!`$arggLs#A z(qf>uE~#coqLm1{xF0*>oepm!VhZ4@c*t2jqy z6ik!WS9lgG!bhx)S{2KpX~_N|woM%L?2IG7KU6~d!)9|PcaWuvLEWue*r}^MT{4T^ zE7m}xheEcqgL}qWppSueJ?uOoln>%-r-uO3u87SP@P~4I#8}%G%GkCpw^{+pz-*3L zOY3zYlsaBgY(I*oJoa^_+V#~gh?Xw5hedm}-P&54^&sy%!nV&ZxrRnWEShJ0>gwcv>YqJUH-7dd|_|0uE4kfWa zgc~klc*TMdZwBeX0O&bqSzY$B5syBCvFf}!9#C&T@Tqv!rQ4PWq zV~O3@3j0wUY8y~sows>TwkyWi6WamkC}#t_h|#xP#@r&`ro9btwRqqIZHq9CPG(v& z;ytJ9N6)h_Li+ft+|ByC7Z3!|yncr#QqZu;JE8sgw(o2Y?gDwy>_x7$OLv1&FR8Cp zh$LaYe9_y}AAqQ{Zpczw_z|=};%MnoyJnAz>etP&_M#Cf6cif+MIq$qL7RNDLP*bV z8^@GPkjgR!*b$NRqE-)fRKQ;tw#%?+Yx}WFAD0u_ZUX)r+b;C=pSu?p%HbQ^CWSPC zRqg(L@b!hL7!s|R7Ofz1rZ}#8I^X#p_XDXmt7dhYb^X*!$x9z(V?=wu;6y*)4he_5 z8h9TGepP=F5A}CUv$H>oCy#Jl__EKzD8xl=n-tP%d6~9Bv=_@`uC^n>>aNOa-XQy4 z(1%N}LHIDsw41+(aSm-+ePmjhJtPjDqpf3p3Rl{2F}z8=*_{g@Rb#eq>m}k5vSi`J z^qMb0VHNhW}%?Dp-TV@4KRp`)#3eGCUXz=MUUig`-f;(uQ5O8j~|XFT|yA| zB4K|)I$)Q0BqrT8lNYnmQ7}D`o+10XV*pADMth-63L#2l^57plj#Tebf;}ga4%PQ( z*Cr-2O}_SGIOR&SRstxO%*DGx(PT?}#A!?_I?Ec<#Re^rC$1>0WE5FhVX9rP+7^@1|b7 zcJG8R*i+bId;b%Y9$QPap8tADG@UruT;~>6HA^^i9wDICkMs81EHS+3<6~x9vmb!2 z^sZNQieul096VTHdQw+3)*k+mG(KvA;HX#p1g3&KeZt=qpr>d9OXUM0LYI~eZTSCU z83`lC&!wH~9O8|(I;i(mD6WtxseOYZ(i6$#|az@dQhXol;xZ1tw6qo4dh57DZ1t~z^ z;kkcsdOM+8>qJvPRHq$Vt>{!2v6pAxilznqWX?J1v@o55K3CY7(_O~or`S@@a2XFE zpI|Y=1vOk}Rone%#zeXt{!$3}eON+tycTD9N<{-`EjsM)bwlOQ`>L~DdTic8%Mpz( zN!zAn*qP@5{1-v?qgrP^A{KJ2YFFDd0Teh`z0uOnh4#Mfh)u@@=K*=fn9}gBbQj{W zpm-j)rIIaC3{PD%J`1Pao|-i=Tlzc0)Oe(EOVUfv_w4)d0TNgiN5lvR_F8$j=cWi8 z9~J4^czyUnuvZ|<;q4d2ASWdD)WxyXgXQ3%N|@A~wjZ82!rl!bwJl}y>q|keEx5Hl z-To~O4cGPC3oe27(fr9M8!3cD@n6PX3QhL~E1}haUD8FD#S(L$X@(Gqdp+9A=8N_Q zV2PPzHK8mE^-jAS8J&KhaQ59cS-`c?oS1{G&#s7N#pEx+jtcwlQD~jK$}YbW9#z$j zns2v?_Ns_Joot^9)B3>odx>3s6_Bx=JkTB%iG-+M`iwmpLLOsVW;`h&4E+D)S&OS< z#qL_kDM^a3zD`s53}Dq_m0|?FmTk4yK%ba&%AuI%a6PZ}1ksfvZKh~fk{_Chwq4jC zsP24sxXzR2bIN$y^)ZpIGPI7vUf7tW3R0`0>wG3UthKT{m|NrC?S@$P98Tw23ewrk z0m^J$3@{Gfk0-2IGQcjpF_tf%#m6N##b7rK%vT7y{`k92vVY$UW|$;m3dtR$TR`Ce zmfNi&`$imZA9hVP_0||{&yM#TL6l;1bCvyeTTGV6N^tt^KpKdJk8KxCIyO^NZQ>mO zs`N-xx8CWwf?5Au3t%{mRtAUCk0`;3g&`Yc6*}d#h`1U!cw;`Zp%+oVZsEbrv#m=A zv$KL_;K_G|DMwGW*Y1Hv4?>LLC$o_Y>V|M++t`b!D_UANWQcWX0%WWW%CJo$y#t8a zYVDeNCWR31D;ix{21nQPS*)}aalChFYLcDTJcb9>g(yiX`9R74!8WIJ+g#-y{roL5p7uiigi;``c zIOq^~1u3_4JH`?ai&MXc2gISyyxc?^-3gkO_A6lV`DkZ=*1{`Fg3IarTV%b&bWQRi z1G4Q`VRGNaVfcfO#ANA_?OSYGml)QqLA$l9r_JP)ae+8K8f5CM7sU38N5l4P;p}gj_2>aab+)6X*dKcWPH2gXmAS$Q@iMvDp7}Gt ze-areH1y<0U5X)6#Igk_HaL)+VIK>6N&KLu+r7QOv^RsB@b8MI>kBY|NVZEJ15wAo zY=X!5QA6vG`{vn#W(XuqykId@>gX)3+)Kl#3$30&zr}niWeXzRRHa-EL z!fQ8<;YaC7fcFN@F!Iv0zkQE*u|6aituPsC)?f(pk$`uo&M3Ll-S%ncL@TF7D_2KX z7ul2IcmZswV@>!Bm=@&c4YL1*bbZlGyZ+f&x;zew?4CF#?kMLt&mpHljK=}|M%M{a zOmy8OyX*OoUR!0!q8Y(>xn3YL{Au7$%hGYW*0NGeR|gBH0;>@Y-<9A{$FBMd99ozs z=SY7QpqjcNEA6{bkdf9e-R@3sLA3N$`&6_)_?+O5WM-llfe+S*Eft^$T(a92AzeSb z*cvCncTF;NFg83H?3(Ng?ta@_;?S0MZERs*NIlNVHR{wWQh{nc=e_;XLeYx3cpKWm9;B?}Z5l@>#_T^iLtABMZ_CY-vBzyBau+iP%cv6MF{16xzmQgf+;|073o) zsa&2JeirSuu^Qob&)R2tVv6sXdcX)uf4g9l?yoKIy!Fc$S-~KuWq&kKG#Uom zm!oWwpnoSHBLGh*44vG+BS;0c8&+mnzrhahRUffj0q^EKR^}NTH(dB4Ec!CKVMuht z^5}+m!L;w{BA`CO zr9n`wg=3E*GsNjMCL>>q_Q#i($r+9PR}31sf7qw5*tJ6) zu%9~D+6YiYdZeGN6is!SA{3R%pGMIlD;fsVhfXjDC-jFnG>DIw++-{i^wvMz&Nd2B zQzWrpntdCl%+ASAV^RBE2pMzzwlB5khLhB_j3jX~Wm#g7mz6(}V+6e3X!(RFKd2Hj$lACCuw>C2o{8#oG* zj>y5U>l)Gi$f8x_ZC?m$Hq|Vy*<>9@d!}8l+F4^D(U{+)D!XrNELUPpl@$t7Hm2(% zMTTuD(0MCE`F@?vD3`qR94zcF`+{&|%k9J0Ox&!L^7&YcbgiXwtlqJtf-fC33_X z8ym{%hR%u6$GAwpeTt_e*on3W?BWuzkCk4*I`%m+sE)8q?~0_!y3(_5Dl}CSI8as% z_e}%RtiS_zx&T?Y=Jb_FvOMt2jn-^BG(|JZT*iMZt+^O9pS2e=l@XyhDLb(sP-XZG zPtPdEM+sIr>t@2B+*VC(#%xG$9&gutyYE#O?H^}}BB`>rc!a$#8Z8S31tBCZm=nvD zB|DTyg-MD{sg=$3q;mLBy28E?JTa1(zj2>;o+sF~`h%628zs*7yx1DqDAArcSg@|N zp$lC4^~|x>q0}?djs$!JqzF=JbkS0q5YoBuUymoNojb+;E$C$im#K;WurO9mZ&rQl zLKqB@iMC0Q-VFo&mD#n6yq?_1U^^_Dp5qG3&c%>!wQ+n-@$u1X!rSkI%iz;hXo(vY z8xn^)qAeCg=`38{mW84q!r)fhC+LsjNx5Tsw%n7}%ulpoq8S!x+RWW25Dv+i{Mwwd zG=}@?vPIQ0uxro_^YiV3;S<0SZuwTbWO*z>3QiP{3i=C->l+T>BX-sbSk#v%W1-F= zU=~|t->h^=DmF3Jqyj)|>WZe=-$hHsJhrC(vC0Jp*H5%NE1gc~>P#omt~voiBKDE6 zt9oo4cAcv|>9(Pnwk4$NORKDV75o#^X0Wxt+6z&f9ICt|n&Q%HRu8oC0{q6Xr#%$A`oQ;^ z?Gw}6kv5q9jl;qWnq4K#0`Qk(*_&Wc9C1GF_RY`~$NXWGO%y;oHf{!I+f*TvWkk4J zz6F}TUqjn_6Lzqyw>*w$)A^;(bw*NGnTejYIcrw7XCkPX_ckb z#5j9a?wOBS#HejvK#o3Y=Wcg#u$4SSKCet1^+HB+0>{om^g?vS8awwbXm2N#Gs_$) zhHKWFXEXn8pfpQoXm1n>c}f;6C&~m|nAM4Br~;j%810G`1I{_tLQS%?m}*s!ACf zHT(b(O5trFE!7?n^1k55UiW0@gdA?~*AAUjJ9NH%6i=htNOSXt^C$6jt|86QK^)2Z4-X}lNU-IvOzeU6}a zU$>sNOo)^j@?f)YUtGlP(*2%KJ;ej*CP>OO{EAqi06l?UQ2XH%r;{;~Yxt=r>aLB^ zYJn3Yf@lq6Y}sdUTos~)*t>$1lE`Sv``qb6!{*v*(cxIsO_WMbJ#POJo&Q1?o4*)+D-{wi3B#FGHYXLJ845_MjT_eI?PrG zll;bKUv5YstetW3zGZCUFF)Yoonr}DxYafZ`Ys@o)ii>-X&PK!2RHmsaoUw({}Y3j z%XjaC2fc)Y>xc7a?+PO7%Lej4>Y>pa3xVl&%OMA8Nm$}c3_;BXz9o+x_7eDc?arce zo**S0TK<~ZSJ3_p0>=iktdTfKYjMN!PtoDa!!-dLdnAU5MxJI*9R;H|+cR87+*4(f zLk2aaN4P+XKR8lop*J*mWoGh`?kScJu+F{hIU%0PBW-MkNNSEQT5BgCclM6g>|T+s z1V0p;tm)Ta&rhh&wnE;a7>Lg&#*{v#e+kp@+niBg$G#yA1=kggu#3Ni_Lm47nZOtE zR%;}tSAqMzT$?V;3YKQH8O+&nLS&F^D3<*bwvP;4=-TW50-^}v9yvJMDddAPDE*=)$u>j zu7Ny5lnIiSEl!WTb=7}8Z2|U^w}~cgUD0cGMRVRqlORn3)qvNm*VODJvf2D<`kTx2 z!1!r@UC>80+d1hhi!S}$6UaXfXP}jWs1RWbuH8vZ16oEJkBjEHFzw{zqxs1oJwAmU zTqeLk&FyPzM4~8~%w})-%u|4B&ql7%h$ahD@ga7`sVqzS++Mit?EZuEj z9B0d_WKNj&__PXpN3?5`!ffZeP!z2wwHwd$BIU{;NwgQq<1e!eVOM}@;~Xr7#*5FhYoa3_EQELfT z9Ft*MlWg731*1q{A%jdXp*PEg(UDCFs}k)^Y}~4$?HA+;D;l;HUjXp-x;y$=iMC0+Fn!}jHuFNDioznV z@*-&OKN+a=G23^sO9#}HTcb;yK31{B+KKj*@-xrlD>FD@Pp=(gI|M1VZpa3^<5Fn< zWCV_mY*d_qHBL^R-FCUBu4mu#nk%3g$OO;)n@I15-pQD`b-EH9s*Vl9R6&&BYH3CR z4zN?K$yJ_rP=SpW>3Inl#EMARmEblY->rdHJ460OapAPaCWsO1^ab{+Fiptqk6$6t z-d_JK+nLwE;qeh{mV@!8Lay2IHM6Z)kgO4G3Z{l4Uwqn5K`*IIW`y%+4nsY0w5r6~ zUkmLI3?p*(8X1sJibr8Jgkjqw+UsePL#*=afZ;L^>tR`x<%{L*?2F6KJlijZH;etw zGQ0nJFav~H?0{F;T91bC z{ZY`_NvtK`5yzG7tJ4obGS-fsWj}`^UJE((eCaMkbX=$QHcq5JGqzgsQHY_=MR$A7 z=;AS!Et;e)TiDinoGeJS8I2&lsdPkk09kXIe%`Yjrhz?&$asWwU& z-42h(9j<7~3ih){T0#>SeLZ!Q-PY7ae0SP0C&+IG^oPII{@&bW2RCm$Jf)07QJ~j= zHT44fTMJJccuzbaKqGSc*_xJ+)O>7NwGF!u+FOx6FwM@mALy+J%pTU-qYuP5I}3Lf z?Hp@OAB<(eWwiBp$RTql8H|q*bS3ENjh6l=F!i?SWiN@OF1aG%kZhVDO{v|mKF2;0 z@QR~rS4P*(u-R?lpk*5SkzcoSTHZ%~2x#*}0-MW; zVud{~kr&e=!}xLplxP zf*(YOU%ga}5N+5~u`&{H=-B6Jptl02Ao^7O4nMVAZ5$L^cd0MvsAd+L!k`+ zA$I+Fo@y@sQDD8jAD3qXJAA6nPB)(m+7&xeRg>wJX%3a0=qt>k1edW z7NW^oTUb(CxYQmO^qR2@Otw!#F`5%~*1i!8x^hKe)>LM{i^F3ekV(YOOp4`ZuXGSM z4#BQh;}#9ARb1RJJ0ZcQ#BtwTZH@_hzx3s)at*s@a*U(zC-d1i#Gy#;a%kH6vmnDR z14H{P`y~_}pBFg6J*lq?u~A|Lc)bv>d?0*_$IAwc6Kdq==E&dMHOe@t;$ zS6HOYt&9-T8^mF>TtU~D*kRo9v!U?>32Yu8h{LwrW2sLiy}u)!``B?Il;DGp2kA6` z_j}K53^eZT=SjEE&)>#2lMoN@*yK_>Ki#wSOU8V3kASONvp8a-GJrlXdImP|i!(jr zk-|~-couYcY17@rtzn0Fh@G1apMGiZqWwiA^&FZ$*A|BKKt6u=3qIO zrTyW%*8Q^CMDLj!!$h3JJEAb^p*O+RttKxfekz#>$cMtH%Y*ex7PInUcn3Y!#rhY3 zd>HQ^^66L%7RzmjcxZT_aK1e~0NT4>W&$h-E*uDlE*&IC>(v6TobI{Vc`txz=iWv3 zph(i~-@+EsF9M#fqc+Co4gym}ZSh!J7t%*wonu=>dpa(ykFXuW-uKb+Y1VMCXT!0V z6^kZmWX1_N zFgfTGLR?ggVE<{7Aib8I%Dt!(0ZPf>{OW+{uxAz!Bjy`ne~fP~wx5Q1wxpT`S(ZLL zCd$mQt4D&+ji04exD+Y3JH_yl*vO370^x9cug$V4#WC^TDSM|`?@?f{=(gMJB_ULj zr|8L|(UtAp4fbg$cz`|UC&wUQ6!0VUxk&Gs3`TE;oi-MR*QVP}spElO`pyxyT!<2L zvJ1f}RyGywoS`DkK)<2JX~5&4EQlz1r9(BE77RY)aiY59nW?>fO`X8f_5H^JT&>FUaiRxWdX6pLCS83Sc>wd^&AJSQy#b4!8rCpQ4L+I#_&`+9nb zUA52!HJfl0^O=Cpd4u(15d8C?m}<)O;dX*PG;N7pvIxGngDuT|LUd?SGB<$;3qCT$ z)ZaG^_o*KW`zwPP1Dh0w#Gu&2vsPH6#n5P{D@K+e;N8`v@!d8=kPc|y#-@t&Zf>tz zl^OO^n6_60x36jTMHvag0Wlj#OcyMP;a&51PFD$g(d;K#qM(v#bFk2^8?FbCHLstSLQBH$2q}q|1wJoInlMN!uZ{7 zSHx;;%Vg}IaoB|=>KQ9z(k3{%>Me+>={PG+6HVvJ#RIm`YwUe7=unoH!)!rCEMxcN z0(-I&NUQ4X1#C4 z#ceqlV7j(PoN$cb=CsvXFD+l*6s@CB_>>C<*cs~_2ERok?a68&CF4@Ak^OZ&fKJS$ zmKmabgtp;kU#^|J!G*QOGwfW^l*vLz`-guDAvRW%F`mr|Azjc8Bec>J8OLt69vjI> zb-OA)m}Q3qydSYgoV3}?PvGF~JJI15d=Url^8Kds#r9j^d(Z410cWEXiQ}!)(fI3e zn8>vA)U8Ov-mhkriaD;GC7wUc{Ui5}wvob8sT$uJ7rqXl2aZoKvKvHu!kZi1c$0M% z_8!T^9cZ@g5+i($+Va|D$Hbwmy{~Wr_s_VvcFss#hTEUsM9j;}6}fzot~b{YZM~ou zmEcy4uZu%b`En!Iw+4V(jWJ7_E!x#$D!72)6$Qo>i;q^yxYPqD)W{N|V+CmSjHmoPmba(eC)_Da(nwNpqq~=2 z`0Z|Ob3!5R8ghYeH6c@cZuh^Fn?NXslwDHBdn%7VzT@KHt1Fe(>rV) zueXzT0_YC??#jv5Xlt^|IlL@+$;6gRwa3IkxkH8H53Vn^qA&qLL8H~HqSZ5OnK;xo zfPJyOVdChb*+iPR(|5b*s|`zS^9NoICCfWO^ABUNCpTE`6-2G1g3rw)Ars*O!c_pIl@sLP&SDWsE=n1yTm$n+xp?k>0}Y45lMr z0@W5iFvmg~gL}K67CzMz88wCbCU*J(FrzAePzE*wmkCh_XKk`XJ+BUk;xswWZWM6E z^JT-69>BQW zllhc=8G`IYM!+fGdOi|%$LQ`>K@w);#m1U`2cYg~*>HPHw5!##W})pAq|^HMw|bFe z;kWBmyXjvJ?!D)pd+j9w=~Om5TDHl?3Hr!nLrX^yzr{Jaz(xe^v%dE%sMCeF;iCTl z{o}!AAEzSh6EqjkyPi#?WP4B;Eiss~JkdNe;;UlQGFT|=%B(D#`2!F|KP_S{Q6=EY zu!Ry`vBa+V(eoxp24feTD&#+*E)Ooota84OiXCIET&bu#dHXP1DjK~qvaTdPmV zxES(XNNbIK)=#8E4UDCd>~jHEF%=&UUH<3gMfzpg-jL2?xV8G(^RUXr^MC-AVz_PX zeu1Xaw+Mngi($}7n8L9<7;29RFt2A>%KmKi~c&mOlV)X>(jI>}|-w&*#e_RJ6?{BkjGov>VgV?*#ql zwsy|+AP~r8Tl%|~fC~?{LT$64zY}{-h3-g>)PF#O4`93QP5g2O#) zb23bSpn+{bndP72xiprT&JPeg@uuP4$lCoO#*lL#z7t~52O14;i}vS?BaqL8yve~O z;cHKg;l9~?P`V0x7vy6=aYQhjH*MyCWi6Z5^G}1P!q+abH%@mto44@`(XJLpT9oyz z7?gEr#*iaZC)=54IEJ0@zw;%avs9$P%#PCLh?QNm~NFvtwNsW1Dxy z4__vQSQ=@~&x3=SOiFUCn}GKoe!|MQd+fr%n&9U1W5U3epqH>OM%&|(Q&HY7|D&h> zU^2c&A_86)J8@cHfu8HY@~Y}~~$>7#tb+g$>r%Ke)xS)@0Pxy%$RzBGV1#JbhS>p&cAFSVWr+90m0oUWE_KFL zvBc3ui{#JdZgEgCqOb)cLck^V_15QVBvhgZb?ZcjPt1=UD>9OeHltLgs~-f({}qS*&o+SeLDap6 z{VYT^34^u|v0nu|S6gIz82|q_hV-*@$OjKV98lw6U8Adw>FoEq>(YbOqbyB) zSEor+HjxCWv2Mt6+bo(EMhJJu4Q`>J!fDYBYoZ&AZFy5Tw3&14Wb4t)Gs>R(1JSe~ zCBY7f^oN*|os@4qn|rb(-jijby$5^uupL4u5?xU(&t7}OgbJGZX*}Vs752YSas)@) zl`Xuu0nD{qwsblXlfrvLTKizHiKZHCENI3TLf&3RoL$}uh^m89?Lm>QY9jY)>^)&m zsi^V^t%3fV!?g(Jz%Pg)g-d5@JQNF3(vH=3^}UdEOwP8+i8eui9&OdCl~s!-3%f+) z@B#8;2ziT`*;>#0kgC74vIa%$Q6UP!Tx^Q15+Gx6COO957Ia-wf{4i7?}tGp5oY~) zqLmTDM%y0(?7~Ldk0A&a)JYGJ%$wU+!FEPJ2==GZGuLK51foKjAED`2f?gq4dl`+l z{|QDgPS#mV(Nx&KfXVRvAs~>}1Z }4>SViVc{D3aDTXnDJ>Y7g|^`@qeeY;}GU zk5cQ`ueHt{pk1HlO>L!+hE%GZ`>-cEv~0f36CLzRIrs7GA0cE~Jqe?`OFMeXx}oDO z(#cbDcU(_vL7>CD}Rav91p0a-UQfn?6U6{d^*;WC{-#=7dO6>D)4i(q+<6+jiJJ6p4 z%^0@tHB7J5#B+Tg&Boypk;cWLj9|yHnj7ap%FoZODYMIZ0MuQJ$J#rheN;ubIbz=k z>#M=NKAAg|*|46T`cUyy+aQ{!+NQld_h(2dz&=A!l!^ph19lAwieT403Wq9p4&6D* z{v86G8ZEG~y*w+=vMs%|SI||^OxXS-9KI}pXR`*qVW4k1i=`f-z3PJhzVr56;~)uO%p9tHXKq@e4T z8DS-vD@+F@WY|qlL3)d|&ZEU$#bH(lSi$={Y=4nGE0%hxX!khg$H{`UjyqVeQ4VTsT}! z76(s%qdoX6JbsCZ;#6C>p|)^YZDFsTExz7+VioLefX@87W9Jh(>Qlr`xh{6K3p}-Hj1WB3e`=+ErMYe>Q`@- zT^5NY&>`V%ad>>lGW(05hF^w_5lN}FV`f=qf@cccOqSTof`|@ooNk*$qbOdyvhCdy z>B?TyDkC}3(+y-sv0F5Xg>I$KOLB;wnRv{&O^_0qh*`2|IwGM!!Ip}I!bOn0vu)-C zv_mpHwA{PeTJ?ogAInpEk^NQByJlY*+mnlg(Jna!72XNc1`lbo-JgPtdQhQxE9)(! z<`#2w5z>d0#Yl8`CKve0}(WZ*@pb|k?+l|2lR42A`B=MTI z&%xvYL`~e+u$l~LFTHD8-(<_qjA6F;vg}J?hOJEBStnIn%Pbhw*S(Fc5=l7+3g=nJ zY^Q5C&bRfV^;sCnR_FaWfD=|$xKm-UV`vrlwK^gZMd9M|tp1SRsLYzRc8#EFmFs1$ z-C0*L!lvmAfOvenhauZ+&r8Aps~fVhZs^dup~d#PI6+5_wnL)5dHCx|vr}_jrn^CT zqP@=JV@hpS9Ok8_*~fz3WjkisCqgKsMGVWB3D!T)3;Jr&82fv^)6qrCZPoy2+P-fU zug$*%sDZAlV##;@Ku?#3726}Cz4Cs1lRpmO;VEnCH?Ok(FCcZb`^jJLO@lmjv~-m2 znw>Wo=xa-&G6mm+E5-60EI+5&&NwEvHZ}z>BBxXGxYu^lOI}+21iTL~v;4);J=@qbLL|hzI@=2V2Jr648<=e$#9{5|iMB5ekI%2L{|Yj1J~V2~q1hYiS1ce( z(&42mt!1GXg0l)aX;~CPG|wB9WlIFThqx#fvDQVgDl;Q5SZ_i9KI>>UVv$hJKCPTg zza$uTa5;Acei8N$8|MMo@!K6k;HW{eqdsjYfKJWg2d!ANH}2Tti8eJ9)sCsOQqf-O zj=8)o3Bw%oL(RIH&>zAYLAs(>XcrHUl~h2lwHgu2&Gf3o1`AVZ{A+EfX8 zCr8)LjIJAQw~mA3)#6%%sl?a`Cvr^HByzr>zZzJOu}F$onOI&(KD#L?#03iTpj+=w z9jq=6kB^yWcTOZdt-Pg4Bik(6pHI7N<{R@TfxUX}(Xa%0^c7EBJ7%pj;?sj6YVcTgJ$+p2}mO)LXqnQ&{*#lAD3!AF8>jWf})FybeeO`RG=%-Nfw#o zgemxFRiXVP+7-&rPGZG!*EG*7qip8ef1K`Vqbn*ceFn6i0;VBzziDz_Y>gPuVzISN01p=3bQ^-Jf3CGZZ=<}G}R1af~}1~%rGa|J0YZ> z73=o@K<)qjD5*axH>l6IvuD9VL(GTrZI%FK>nWuc*xz1_!7hBg%LKhs*o-4S^&BuA z5g5}}?;dWy&y6Qo%_idu^E^KtGlTt_H$q71>2%wa`ADgmmBLKhD?s1K;<~{Cr=#VI zt(j<_3VgC8*t1$`m%IkgM^{_C39-;!B@Vf9$8FW3!!2IDh0DeKMoj8r#yH8ITjEEzIWpZaGkKu{w3@?2zu%02-ZC;FFGyqj5JPREHbRs zVi+1StFT=c&ExBvXc;0;ykEnkYpd*-I8;et)td9#1!d0oU?uScIoz;_PAyR$VU^NQws8VH~%aani4eAu2>08pYwZ}X%|!g zyr!BZ8FsrMGFCOi?UPUz_sBFiB^eqewn~^X2Ql^98q#{`^`iYf?3bLBm2H1m z?Fo}P&A6t@>AJ$%mM7W^WZ5*%R>m=t@(SB3?6)-|S((Whc>a}D?a6CkqcXcHoIP8n zpoY*4evoSg=-)l_EpaWR_fk9EW*KBf>s;6)%Ra4kVSKCZ3vpbTN3;3OOJ5JBbqBZ3 zw}GO)lWL2LZJl5^4c305q5%GJgC~zxPqX(#hfgz=q48-PV=a(omu^Ik#$?=RLj}CI z`r?TU!zeMR>@Jx>-9;=?m-~SLU&OaV`LGSa$pTE5br4WaJz78T&) z9I1YMDzpW{2Wf(x{WdJGmQUqG>m0{@b2&--i!kjTNaW4}D-=@km1T0CT`7o8I+rth z2cU7e#NHQ4vOG6QYxr)Mi{OwLnPdy1{P8i%Y_90AAKZv#(R-fwz|h&9&weV})A4!+v+t&JE|hH;zq6-`;h77C0i7Ta`jy!AS2 zv{U~9_R`w1jrx!v?IU!;t2QwN1zAY`8&8)jz}_K`Cgc6G#b`!Swk-vWLxrhFJHBa+D0YM!0{5ge~p7pVH$wR>Q=MaQnSAFRTI z_844|-XN~GXU_=yH-KpBjx9UZSwuiXk}E=KqFp0qlB@0fk7L=I++`OEk+OEoWNRea zJ3L|gQhPEE|4an>)Az>8$>K`By(sKjb`7pprtE`3zkgC}t3=W-fnUUFGU@wN9Lm=A zFtf`G_XDZ+(3&F45bf2brxn&@N=z3f_CdiN7eQM}wlLbqLl-dDuLAri?S>tX2qTxSDB z(xSn-NARKm{ZfGOYw{tdb2(;w^DwlxV}Kk@oc9&j2cEoE@V2xP$CdBN)>_v%R{GUr~eR3%(*W+Qub$YT*WL!cCm5sB*eRt^OVWrc=bCb z?bvK>e~cBv0+Dd$_PjV=2$3dOl=c1zMmTRTou)0Th2yM`m?&~|&2p<1?Qi|Q1%Zih zC5w=&{|DD=;>3u+1;XU08?x0Fil&$O8JJ?3KRe(>^G-Iq#af9Z z1HVI+cG<50`h*+JvK#9tNV3~+w@#sipGU0p?EK%5Fc|U8vu%}KE#zZ2AB(-a1*z%q zk`2}`Oh~>w-ff{!b4=y!`rl)iebpRmBaAY^)vr8(@R2QG$+5~>oYXwlf!SHfgRIlZ zFv9-P5e9Cf8C)~PMeUyLY^z9r`8p5A#szfZrNUmL$SVFXhtu7ywp$!;OBaq>>IG?w z{BWQC2k5KMHrm2~j$KS7T%FGICrOI#oS`RM%hLef(7}uk>n!Yg^yp=SPmf94>CyZy z9Q0;7v(%@~0HOZ2iT0LASEd~={iSCDQ6{=*u{|%^2UC8{TJGg75QE0m4P}c`f3{2< zS0XQ&pRt|7lyhwQBSHD-PH98yQ zpTN+OTb>n&M~1+2!4O*+Lb|F6X6xsigH(#I9fFH~XjeRs@U*Gc|9sC%+IAe7oppi3 zo#S^F;adDfL8`8qPdr_F;6fMj*^$S`Ru=&&_bBJtmtE|1gbT!9T;j}95xYj4S$a~blyWZ*yWc( zdV`XJsnhwF!Jq<*Z4Du{am~{%zQ$7~vM18vI%pq9xRt`_BaZj2A(G)vVM;wbtIA%x9-6lx zEqVJUdr!byki<>x9fDymGK%{`j~5UpPh883(;c$pU+D} zh(%xm^UXp*FCKf|B>PR6mPNRY_10~%oK0FbwCitoQEqZy>nPf@5ygG9-FOGs`?q%J zJg((i8*wP8zG8|sYzXZSBLV03vjlzAvKZh{tIb`WHoB_Nz7x$;%zSr7y4`oT3yyQc z|I~Y2KtPBzTM+_At!_sm) zxv|rE#B&@V8Vx?!_~8tDpot3(ZZ5G4o4R24SXPm53Q$M1YGrf{hUatj%=e1v-NM&3 z+3s!TxsOj_883&h zcFefi(It4L@T4g?t+Iz(0%%cm-BcST+WWj0r#+{%0=j0q<}!UR6L#_A{Akn*`tysf z9gfw$UC6_Fypgxt}!NXkHAw*$Q8I_Qkvk`fbrp9wlQsMtPr0=H(?}Kg+{= z@c}q!%M>anJ1bB36JHj~3&aLA+Y%oHqkTF*(JMuVUAU-3kIEi`(GOVc>43C||?av~;(!Tlz4iNT7w|#!LjSS&ei-*;3 zA;eo-q^Q)G&HadIR|(hL{9xM8;&@s41zfnkx?K$GEZ!CiqjFM2;jC+fc&_E%jx2`0?H(x(uMY14`BF3C&q3tqq(tXn1qWi>qpvAL4W&okqvKGVxSYQjV=Ap z&wOoDQqv_(90qI;*Y#C*Om#QBMx(h+E)UUDNn9f_jbWR zS!|~ZAQ*)0?8~AH3cuCMz_*AwUiBH5Z z%j!ioN!UBky-ziHGR8>DWFT9i7%0g?V;v#J9bjIQZ1V-YDje6@p8rSHd4OkKU4OjR zZQXj*wr=adDgp{P>R$l?Q9uf~>Zn;HAt6aKK&T^w5vBsNH-r@+Bq0f5s{yO6MR0<( z_6;E*0&3N^YHR!de1GTizO9esIp1^Fx#PXRJI*~P0S9oz`-{g24JSf02s#I9_Cj&V zjmc<|?GPY?{f*NPPMcsCJ|54nLx;B3AwX`d7xQ7hN08opeU`l}lFAR|72uyllVPSV z%8z@(1Cp@Gb>frIUUQanW9(dE5~Z^7`B@^YhkRXZ7AEfxr(3sQLwcpO<2w1NI84Um z(^&xqiy6-(LbHnUtU}Po0Nv{kwU&!Vv7c<4Y1Q?bmNs@U z8RQ+48X4tFNe!_pYh$a5?W8n^TA6U$&@32QTmB>>?GC#>J*45}ps^m65+I{0@=38) zdf0u$@lv`oDQ7wZahn!p*>4pXhO{eazhINa^Hxn|Lv1y}#6P&T!oEmkR=`3d&3a_S zBV%Zx(8`2q+XK@#99TEaUJ(q_4Z79Kw%K3BA-7~|G&9rb17)M^Rna6wEwKirs`~~z z0rXZfa5F6zW z2~z1iJ>f6N;7Q!|g>a}LhE{Uz$3>npcW5Sl)S>B<=oXuY9F{7Erh+ZI(H|6j)*7b;eVlB zE9{LmfVO*7@Yt7-jpGV!9B=q7Osc$Z=4g9FG;;F#xvjN-#sMA3({}-46|h%*Q?!d% zfM%FuuaFka+(>SI)|2in-dk*c5+JFrx_|tf2jE$7ymbhmFf%>LdOc4F!!{37@wt{K z#CX}Y#;QfCOE#fX7wFcBldQHFKBY$c)YBw~!EPG|jbw##np-45 zKt9$bKNPL$e3i8u4;eN(5cSJ%TQv?CquJqi_O;;`*n_xbwmZApfyp z@NC=8P29<@7LOQ7Jb2HvH9}tJ~*BSH`(H3LQe ztpVy}iu0uIGd&^qkQ2EGxnx!%e?8KQ?C%(1a zjSvMU6-6t)W4{=_Ht2>G*eqeHj#7ob1uFyO3toyQ#d}K# zr6pp^X4|?7Xr$2&)tUrIMg!MjF>q%H<2J3v9$xBUG}*oxFW5QDfd1Qi7}fiHs}=*v zqVW5{A31Na`6^mjOo9%Cc zG(WfU+u}M<7&9E7=dj#t5{s;kPMdoeE!*AoK(gX@Xg_Fx zc2$>w`I&iLQio=8eQg(tjm&#XI61pWfNJw|lM?q(QsvkX^R)d@b>7zFJhM&9i{{$1;zZrRq%g*23X-`tZTEA>awuTR-dVGAqxIY78Gz9eMupd^#i1zqXMW&i z&x{G6^24n*UC_tn&N>v#R@=r90Uu`#d`JHhAi4d9$-3Js@%XXI(Ym@W5XUC6Is@p=f5%dObCStQy! zN0(TUUB@5(4jvMxMYbaD67qZ_zuAp8R2-L-1&}?Lh!9y1t+G0CC@r^XVUGR$bpWMB zY8s+Lr5YE+8kcCl_MMo--aU>_*L`nz>>Z1WEK{`CFe8b>hf{wK_QozKEW&^a4yMEk zjZnr-XrDL1R2c_RgG74GaDSJVjSmU?w|La7sCm=m6ubV9AS6q|qrhXLJ)@y%+b7yl zVXxznvJ%VK84~8{IOnT?7#6$`KmIMkC=*SMOGJ7qoqXI7AWo}!jbhD$YL=n&@7i6S zst^9wL?Trxx4VO}TLMxaV+$umG+Co6{=>H+LpN~LiG76=-+@D6yu{Pv+tUMVE}pBx zE)NhHvzSok2$Jiend_`B5yYvbZGG2+WG3Rg_n;Ax8DgZ>2q4$+6n#JYz8`|hIbgZ) z15a~s<2E~K542ahClyNL=X$rG`sRI1o`LjK2BwMD3xH~ zKLbUFQDTs|)75q$kgSg$h{2^!%{F}{={b9ee$UcSJUOl=$z7-aY!#zwH{W{?QkPKx$ z4Dqa|M}1Qe?|Z`M3o`Z&a|h*AX#)m@}dA@^6l2MnP)$DNDKGo zceO783$gli#}|Z3=!*L2Wg=d_#c=`gJT=VD`7&hJNS{~RrGhFum-f}#s@K;Z-dq-K zjduvrfX$oM+sA><=PqQ{-<^MC@hDp$+8Z)2t;nhq@E&@3i=Zk~LkGVa!uaV_qFcXR zknw))9d=Y?_{FYg*Zl%lx~KmGp39z=PF6#Ni9X<7o0kZ>uLuM1mwZDIrF^kqR;m47 z0HKFssW#HIYYlX)a<-lR9dY~@*P)Fq z7D5JI-#_xtkag8;-ADiYUqG6YlUQ~esF;+9;q5XcH`6vIVEbFzS*sw~;^l>(V)6GN z|1Cs|gG&2Y917~%(>@jH?ZBP&aQi$2?ZW2kzX_sHhC?#S_NXZTpwqFqJZnNh@;ER; z49nEh4SOy?*)He){=6T0D2pSx8NEW#3zCCcTPBQ*+b3qCK)x;@cQ1O;@$tA!Jh}Ev z0%OD}&E6NLM>6#Uu-6Ik$mso#v!{d!MEknhR-Y6PjJ&&kB21^Wlr6BnCqt{tGwsw< zAZa2$(bG2JadD^{bA^0`9}N&oeli<%!l{H(Mr1<#Vgd4bFvmU->6OgOW&7IZG%&@r zzBtm}5A-$HTxZ*U1mC+amv7+5g5>bdTKh5)lEZHy<#a-PfZzyYjgTknL$dTUfL>bf zB(5-iC+r>8ljF)lt2q+}`Mx>Q4xbf|o6mr^8_ov%ptxUGwK!TlCmw_MN{j8E!qky3 zZi)TiTnCYRoOcAMm6Xy?Ixn6oQc8PG7!jibo-}*@e20vof#~*c6C~x~>Y294;z5NR z8oe**!pr zLY`}^uF)n5QlQ4}TG76Qj=a#&5!*xBtR1D??(P-}3Y1dDjjVcefckcbrC(-&-FX$U zsP67L*8gXa^hTCc#c~B)P%U*>D846-OZMJGOS&55P0NKjenx~n^tz6A@iidimMsJ> zM(+&}EgoHmj1I6cgEzV9_O+l7Wc(CzJU;tc7*sPGJ0tT&Bj25O+IEp%WIw$_IsH11 zcA$2hrL>8MMW50yggsB*o6xP#UGI^x%Vx7hyR2-ez7ay!BJJ?e$^5xP1~=bf%MOlS1BZ_sT<|JuQX{#bGdd{vo>GW}fqZgi;8q)p9g`o*=0K%UKl7v!})I)HsU3 z#94^I$j{?-Xo`IsGJr9-X`OYsi}=)uON8x?;yvn=6t+@se_|Vxv_E zQgyV~3UdpG=VA0l9O_zxA(?0Hh9*b00IRGf04`!!ISIIIxaq(gcNZ`v49>9LBE1!Q zWahKnoF#_OglO_`8<%gt?+S}T4lk&--rb>MvL}Lq^iP?>qBwW8q4=I+HpMqVXUmQ8A2{$PEi^k%n!ssdc2vH z^o+;M7;c{kc}(6?aQoTeUKj|T%oJQInl|97YETl>Vty|$O-py+PsjS+=TQ$WnPmL} zePk3HJJA|jvN^rciUhrg4BjPj@fYG=*QTxAe?NgBvg=Ha- zZ3OT4`Vr`@dQTU-^Oqpii}z&9R@*iGf#kS-fjuz5Q#!+iWh&`6k@gOia65UHhfg*~nSxBkGZgXRTOn&(e#p1-Mi z{zyCh5rVwb?PD`-l%SW2`7#{2J_;t6*3Ap8TC|7VmxR^v%YOx?nn}1kE3kVW1BJ$A zIrBR~AKdv#dC@1R^W*Ut-B`0$1&9@Z>|Uw%2f;9;BFik(pMY`ffPNXO=dzJ=#gm=~ zCS+yacSeBB4bxb9?G2ETHh{-q*Ztaah%TYl#+Gf?Ec&UK^m?pvTI9H~*q(pN^VImP z;%r>dhlL#7A!+|4IR1@_Z*jjA&tu7KIG?GLlHt%)vV7e=&^yYLY`SQAC5wX)YX||& zV;9>8qUljAcMVU03|~Q+_~0VjHi)PESO5-5g{E;(Nm{MP9rhnFG^nDBDmyU^c+9+q zUuNj7ge=;es~{hYojTY9Lf*=Kcyf_x&xwI_ylt$qVgavIv^Gn#3kSvHXUUkqT`LAs z#p=h{E22p|MAvbz3((=*OX>kV|EwS#mJgUh{Q22S3H-q#zHdu~z1Gs1`f%f)Ua~)n zNB){jzY|RZHO;c28S$)GWG}KhVf`9r*|@=wYKn%9dhPxPK|*%Sw?B&Xst-b|P{^f8 zPitCkBeK9=SswW6wcr`qaJ+E_pd?sm7l{$IGDbMcZJ?m%z=d79y&r<*a0V^3e+g1t z2KRZn_Sg`RXaCk1>FV1Qaj4qi$uC;J9B6Xpq)>Xuj|kF|k?o=^0WXGE6=_x=OulTU z(IlA`QgS+=GuMv;WbpRp9mNlFiOY{BYK&RYGsAKAKpspABTmP5S6hW3g|XioWfOh_ zpt`DxLww?UjRj&8Nzhj46J2d%2f1%$iQyXNq1y`m6IDFsRC z7y=fcl-^xzj7ajqE;F^6BjnRW_tebj@aQwK)J*7P4=#dMChXVkTLF*jm&p0{<}g?; zX7q4xy>)yVP7tF8)9&fQ)LrfyP-yvgBAC-s9JT(I;Gk?PWp+82KV~DdLSYEfJYkgm zLp0f-B-OgG#-4Zvs3yd482Tm-WHZ-pf6f!}Ce*HY9@=GLtyy4?CU8q-jxs9TxO5UD@Tnl6d^+)#{GYI4+dYycS{qkur{P2FTN`i05To*S=jFt1OGh zVX0CXU2Z=;GMC78!*~q=U{<(k!Y`XN>UNW|Q>gg&FpD zaXkJo&S0;c0Hj4bckXQe6z%=hL+>a0PV}gdJ_>R%RW745_NkbN z%TK+|a$f?txbN26D?%!dGck^pPoD<#^3rk!=MJ}X#R%V6S;3jYt4Qp*;UC2=QxLhL z;yB7G1-u_pdA-$UI*{tc8kgH9(FDmkC|~#&XLvG>Ni*#iGvmqnvoG%=Nbzi1)m(GrDnq4~=%&(_g1}Zv` zF6PZQTCtg9D*k)w6ami-%-+uZ@CMh6-;RM1Br9%1uY1l5S+B}e-u>%P#V zXg69_t>g2@#i77%O?7m3v5;4^Y3op%6(IJ#Wh3rqi{hcOclf9v{iOGB(?ruGnLMbl z(Tn5p3ez%dfgtf(Uzl!d6G7~B?2w>W@L|1y=(5C9>S~PN|C55sRfaFC1Zbt~WV@mQ zlA6XUSHxD8#41be!DE~aBV&z=qyjrn9HQTOt!)xXHaixfPw~ePlb>$16dYq$F7@D@ ztC^@DT?X(L*s(CvmI{(|i1gQ*mjg5~t1IpL6;3yAT4%o$t&vx?rygDOdP^09Mr9jD zFMKZKmF&sMlbyQ~jP%DYp4fki<3ICg<{J||C>SHIr`ubK-fxXC#UB{A)HWo-24T;_ z9;)=P&Zq|zt%7C{{*P_m7BNZOJa(BCuZCt>@YV#*Ti5fi#!AKVvij#D^VBt9TCc-R zc5fA=53ANKD-SN6Z%-#UXlCPt#LC3+mvZGHl?Ox|p}gem>t^2s)8Ews1rLmYLy zCFn`90J_FL5ccAdQgGXuZGQ_14!*SDK;0%gyP61m^06@WKiibc#iy$<%reAA3wR+t zIVIRHr~$L0g0~~F6{U7GL^v|z#lz!XvU6*Q;8MI>^IpC65q2qLQVu1S$Hbuu7!9-O zA(RCdHfave+iYLf#^WEJP>jxybz29=CCK9)@*JV-c7^2&GumWa>8(i&**y&~=qr3691%$vx58`XKFUsLgh8XA5R_%t z2~dT?rqwng1Qco8P$uBDJuOm@2bI(Vn14&+NNy0#h27 z{|lKbiv@|A!eQbz(NvDl;!1l}z$Na%*^>|rL8=wes{gpnFg#lnoU&|L0(VVH9&TR? zlW(kYs@ax!T(-$`tW21&=J7T5_*SP6&!1^O+y+hgoU*-Sr@jn8PL^^t+@C)oNLcim z{n}SNQ6>+G=ZYo(n>1;5NC2UKgO(zi3a^3?nyD!zTff4+VuUfo$;ICj;hY+=D*1a8 z;a;ikTAQD^-QzO}$wl(d1j8V%8pWJ%J;kAL{cP=30jec0KY!Q(?K!39Qhc1WBWH zX?Y@O`-C+6=^qL50wOi^MRw`VI0yfk3`hG=9DYq1+4iTmAj7nbhOn+1P*?g+Tq4|` zWd9QB`SHb?zH5^`{WctLH(i*f+Z-{FG%JbM%c}*Lc=xQ~Iu@-*`sj>zV0lwwMr*5` zFO2jlycx-a8wUl!w0u@FZl*l-t*a?+bN;)5svoaq)=NdSX#S=+1@ zaEZAsk=cn|;&|%~;`Zc>_Z;#beB_=Lc9WnN6)9^<8yEBpX5QKYfA#2`7UW0tbV zlk@B7n)@^=OCIzPb(ktT>=Se)qO*n7iRftH+DUtff`|{N*(8xnL6M3QI>U6`1e+qJ z_XFzI+4h?cJ^q`;HZ737Gp*B~;L=#=giJ@(dz>JRUXX#FQLTVCOZ3Sow^ng{bU${b zo%0dMOW;~L-F__Wm6cndZ0jh725xngEey2w9+&(XK84Dk1iy*N0n!dxC{z9?0sElu zd-lhkYjmU}D=K%35w;N8XG&eL%0`Oo!gSM zZO+fi#IEtX37&i)MvIhh61+Y<`5Z2vsTc1fEnP57o=*QD;9Y>Hts@-Pek=~f$p80# z(Lu(W+gU~n(Az%?4cBa&Dh6>sT{qGeiguBBT~V*^sT~aAMK~1thiLL*^S+K7)3*D8 z-kgzRm7QAPp7>vC7v9+qfo)=Wd)h@8({|1dTUGX<2(lbYshXbDk)(Nd07WS6u z-rFwxdptIZM@iORn9%GuX7Tu_Ob9V#-as!}v{ttf|A6T~xjx(y=G*vhVK6c5tl~#r z6aC0Hi{%yOeWV=`MAS^)Tb=kFG+l<*2vjZwh{rdX&xhS0=(1y*3gzDr%s^<`nq=!k zQe0N&*fsz3JP(W?YiIt;>DJM6?B+l>t;@F)zlTp*sri}d)=$7Q=WJn=Jts`r(Tj(2 zD-Cf`d@Ho6|MrBOY>cyg0p#n7_nH3?LS;2^bN-)?-p*pN{MW&1K-yIAibH)fFyhhW z2c3^KMTgF9?HxgvqA-nWTyY$j22bIDW4v`1B4KvZW*aHm+c_CK4>>k51h$k`+T=ht zPZ(#H{*Z`Nglptfv=nahm?lli@ty^oflZ5S zf?$|~cqS8VWY;I6^d-t(L8PO**`jcT-una?48f$|*efEb#o_9?_UjX&qgF<>WQ_gg zBp^MKku)gDrk)INxwv&pvVFp&%jYcOlvAL+Q6pDLn9~=BoLCpjym`H#HxF9fDAykm z!+S>7)(bPUY~-o1kf~Q+n=6ubPRh<@(mVAu5P4DG!wtO15OhJL4`;U|@UFRhe$V(3 z;Ru$Cb?}ve&O(dkl|XmmdSUA6@jQ@lh24CHr-?PJvi+iw$^f?x&Dph{&Qtx!o*|$7P_~-~{Z$2QfDR z^||<`AjN0u9{*xb$Sj1_5qmJe=CKQ#$JSf2pbs_l()jF@i$T&M%zk!>(>i;-OthCD zTQS{6grLG=rolZMTLRO85uQTCH7JP2ib zy{5ZH&&eTKLi>ZTON1gFhMH`5A_h+n*)a&aDEF~p&avg!!tloIbhDk%CY~V{yXx!$Vb2gV zFS^p`ElwB{9HzW0h&1^O+Vif5CM!-=P)@x@$Rpm2&+Xcu$6*00$(@44#p{39us6_ z%)-*d^Kpm`T3N5QA!3nD3JaC#qG`P=uCy5B=q zAVeI<;pFMJIDPF6*V^1bH*Lgq`P=p!5fpC2%r*#CJA@US%Cnp=$nYcQ{D1W0p-4 zNyMRCXW!ie+G{(gsWIPLgpajVtt@Mt)YCaFB@etC-%?7)uS3JCZ+m6;zkfaAG=PA)HiOf~n??O}*ETzTtNr*ax zXEZ;*FP_Ulu2qr+eMZu@CpzbPL%cAYZ?k_2d98MB#MQzU%ez0GMFAK0WeJ$gM`4_x zE>r4eMM}rBVs+~@|4tF#rxsVDxvMt}COIrkY|%aqpD*``gaoJ^*~WdgKD+6dlRhX5wi2HPD1VilupPoPK8ytnlA1TCfYmM+@MDa=O! z|5ITru#Us2j;EpV53h?0X9@I#4k zwqP=Rf%75pTmYOaTGoTWFf~?;iZ-$(RxYN;k%^H-g5Dlobk=sqL-FA5ENA`;rlY%4 zrBKAp#Gd7&9NFx9Q;e{;NZ9C;s?HS;$8*(|xTB!P2JSXbe*~Ilj8#=d21r-h z&&8n-Ub4-y@d0ociC1m`(rFQP@}q&qwdQ?+ZYg`gN<>pXBw|r(Kll}p>={)pr4=ou z8|@Wwh`?%j`JP3#TToMLgMBKJ><2XQ>iaQI$MlERn>=<*5l3PzM!{^608#s*JbuFC z9xy`o)4K|Iqu|$ zk)>c=A^uVvM8LZJ3$|9kvr1~J$+h#IisM~#u|;yLFj;TsXymYHDtRBf>Lf@XwmPI| z8}NRDBU33{KHQv49GXb>Uih;8QZP(O>s)yUr+1!XuYq=BipQp@`f)m4Bs*%{>8aj5{>1OIwFe7i#kA%`+5z86iY@@~Qu+IEmf@%1O z8Vp3jEbjG+MSH&NgK!lq?43OTT{f)KXTwmJ$*(u>{u-=O9Fp%?lxFu0f#&zvT(YgX zcv^FDmHZ%%6O#zIPqR6qX{gA4;>r+!{SrGbhX8emUr*g1;NG&xUDA_+$2LhAB4Myz zk{fcZt+&fWyOcwc2Gd%Xi$NC@WHzninR!4o4yM5R=Mkn(SUyG%ST+iJXR;%c4VzcR z@!aS%mecKjrmwZL@}a3tQw2_WZxuPOC2ttBE z{9>L7L7(6!ZL(-@ag+>r++u4tDH-AG1Fz8FZr*PK_2g#gbB z|41cvsxTrH^76S`pj(S4+M}Y86mgQdk!=?AA23Q|#}$FdksB@i`UrS=+1ppKnH?Yo zNil4oaX2Z!*vg6QsV#S?r{&mis~s5z;3s)!d+YzS^SQ)cVo3sIA#d~i%AXG~x>TvN zuZ9zf%!4@VIB$gK(CuD3;kS_f3zba;JPcu#;Td=cr@b_pRomsyI)i%UL@)en#PG(% z_X#7~UKYoz9;=*VEdd^wHtUnpT08DJLTL$>$+90WgeX_GxxqFDI*%L8m}qZi4%%|< zug}NxWMN=e76WNOREaa~0Ri$jI=&j?VbEcyVc&?ZJt_{;=;;Ex87qlLh^?3#Td~c4 zEspY%DXcF7VBvYQ^(ghEe0w!W?iUO*SPfmL7x8^Ul-8=Tt)o_0$1=~0pIE)!EufFW zY%|9^*XK2Q&FdPI2f>H!y=| zsRcGt5lESn!nxBK;{idPx0VU9&Zl`%_7jDX>yi1RY@uipWhWJ6SgnAM?bIR}EBi`} z(1tQFiX~<{I02sjHoBx(pNXC%atVgZYI{=*mn#R8s4S^J5r+gQd!=OAH883PknB=qzL{nU9 zZc7( zltAaOPyJrBXM;-|du*!1=xtbu^?S)vC8eg?uSFwyewv(6Va@V&F-RJ{z!+~|1{gi# zsI;r65lZiHw_9t!34jAXUO)->V0##gZ~gcHu&$c!nbB}7m~U^I0Yu!$Cx~du;T(1| z24;KB1co-{&LRWf<>GkVlVvI=X_hC>Xj)=p1Ks*UxlIy{NV{e*y=)NhiVtS}zd6Kf zDVu0#%qA!(Iq-wjvp)V-+g$Mx>XRyzH${7Lc~if5jzjGY*DSN%g0vMkucP=iY_Ovt zoXKoJ)^NLfuID884%vauIxH@o@;>zqw>TCnqN^jz=GY6^IK3T@IZ?Fp2z39JS*LzK0=lYsJde zg{9VzNSn&C7Vq{harsQUGSE$ni>$wBdJ={FG)oq6@nTqv{$9|_i&d#iG;YmGmV>HV;iBHUyl4&ia6STYjy*N~jZpzBsu^`hfSsG`cvcXJQD+VRA8OgL; zmjP%vUNE9%T4+haRP505Q8raH5ewOT*vHEOh=^L2c6HNM0DXYPs>a5u)#TI8 zr*A3aHK1tkls)UX#iXR+eRJXy#SHht9H0;gUGyy7# zy=MKtWu+|_2T8lIr@C=9bU3pNTX?=fJ;f!1cO)**}CR zb`X7+%PnoX-Ler5ReL(kibQ(UUuD|!LP0`|t1S4M}GG z`2ByylFj{1cE_f8PLa#6ZI&huh3=ke*@?g%TiNIp+7fYmgk&Zs*$zSK7OPug@5O0s za&Ct2W&4tz55srjdB4gU&~Q6_3k)g|xldjxrS|87wC$^4TWEX9X=fYxFXF;WQ zi+GfX<3>Auo74L@PPVH8&5?uUh^8byldCyeTxb6hgM6dYqa3^FWlxvR%JyQ>1Ys#x ztN*(Nz4LZfpkLh_qJA=Fo%MailkqslMh7~L1<5;sZr{GG-SjGP7zvTFt49R9vAW>& z>2Ha!*>-soVYG7Zd#p~RYW>2C`?t=qR|KgcFTdypiT_@`cTz<^(E1p-VPm^9+5JbfVh{U5y! z3mIC<7TT48=2fS45$)qHw!TacLe6{x4pJR^S#LiVgK|*zD754L;9%z(UInfc@BxsN zstxTMVt56T3OR{t7N*pE_M8{J3GE|jK<=QzVX~1kQ9N(YET-g{f+W{Osau1i!jv4X zyUzY2z<)2DI@>uyq{Q%tepfdOdchdfO|$;Oq~v)Ds!Sm+`ePe*ZnGvq7vuqSO3<&t zOGbzyryu3@_L-mtG8*R=R*xjtu>6v)A5o$mVAfKZMJ7Ii!2B1Z#g5r0w#!i8*68w3oiC4yP$M3sYKi zagAHYYc5`I`C@uw4q$jZ8(>Ces;&n&B(lho9e@aH9zS;55nEeg=e`{;XK)hJXxjw7 zKZ*XFChScY**2->Ir+QdG49_QZ701083q&CR&BKl#i0@zXiT*g?H$Nt+2VIW{%KH5v~wrHC5f*7MM{1i)_7U;vZgDCwJ;!3DTsIAx_TX*;Spg-(#c{rRDQ_X1O5pM!%~$I{0e{ z5vbphndNAV5`kg4cY<{gd8{QkJ-<(wHY^;@dimi*;E*DI%!MH^JD2VC76IzTykD(7 zmi^)Y4CKTXrRMn_f)vuVoAn7KFLxe4#=XZbI_U8w6h1N933@AY)F8{goyDOM@K*mPPGuxpO zJ0vbuXq{W#I*;?mLqIpoD;E&ixp`7au{XqKEO9l!>${JIynk6gaMEz~r!c%C_$kqY z#GAwkQBX)uvuw3GAE_BN-Xb)Jxf>{Y!%iBC@-_uMN?A%K&k6hR!a%CcPekX1J>I&6$v2Co{&7d3X{n;2c9}>QI<{6z7cCWm z@>t^-`yvr1nX{^Y2vX+*GaC3`JFCuLc&b<<&;FK2H`R+QQ%t18>T{YsBjCk!;To>} zm!1X1@D_JS(<-|~9K?V4;fL)`(Jp>Lk-HNf8-hQre$n1eBu>i9R^_IAMFJY^iMCcJ zl2Qws*5ufi0qm*cqkY@gUP4;t)AlRTR6#~X<~KCYpKULSLl)0IYp;qV3)Y&I_Vd31 zG-zd>-JTF4h{fe5OBL{b9i;DLrm)v>U`CRCF6cu;H^jVCc=zvcsAKDz8TO-pKzkii zIMI<_?E_*VLf7v0s7T7;ptPN}{RZIq_To@CRgkdA_riJsFGP0rvBmU(IOH<0r|nN9 z%;6UMnr{iANPapz4~^DXTgCEU*gcszq_MNVgW+Wr;2XM3(6f(K;6-zVHG~Ks%)pMr za@!{8ZGb{(re*&#j)^hLPWu;_Di@#$wpuiT>sraS3V1<>msi;7-vguByfQi$;BSeY zEhY())w*Q@UJ`GGJK1t!>e+3e)rq8!7)|NP*6ZIOAHrNjb0>-4CwoIo`q%{K2hJdYaaT z1v-oS?J=S$q@a+?H9HP~uvp<`VRpHYH^Xo?2<-*ER1}VMMR2<~Uh1xe%oRDp$cRF* z4G&?*y!$Rl#AR7J%j*9NPv#7IQvx#~r`d0QNM8O4j;?&x@Cp7}JR}GnfL4vSGmnRZ z^pWL`}0B{d-8CS-_|9y%R#1j;9;#x)Xuq zJxFWbTLsAb;7c_&>?CNfcF)}GK~^q&Y#?Hre1pCCUz}y5boCk&hxDygi*3%y(8o@j zCdI0>ALXyTE527*CrljFR@zCYz(ZcDfE^9&YC2>z$q3}Brvm*a(kD|Eqo#`yL|Hal zm#0U?@lGtr;-h?bEIr`Ij=R$biQH-=v z6n3LH{Mfe7VBJ(8;KQ1a%x!H^^oHSWQNUs0z>9!kJVaUQr_C>RrTEn6K=lm!Akar= zj(Nq^Fw!IqDX=}FsX%k_0vU0?|6+$UcC3oCIYIy4b_=gQaxV24(dR@P z)~%wY_`Z-vty#SqueU_;0=N`N!}7uT;xM?_SdF*;69{98mby<+ZPB>IGD5tC*&9jw|_iop>X0Ja_i@Put_d-e9q+*_I~glJ%lm z*;bTb;1rn^??f?Zp?kYnt4KuIF$b@mp9=*2E-6NDhg5klo!N>|&B?L0|V z5x0QP3lPxyLWO-F=;m?r>?iF#QnV$xSiq~;qsV?4f(kM+^K6p<>6HwcgppB$Uvk5js46pRPSTeGI-t2j@%wfXmdP_X?VY$wnDNOR{zD*_!7KlMPSt!Pz z(-E5XVoIt<7obFtphHt??CwrZ?<=mfE~06r0ZGB7h$biDqu5jh2t&G zj+L2Z%WWQi&uX69d@bPd2kS~3UHm&dI5&rd?N)&>3iY6*_bN0$|HhQ(+UmP;deX(5~6Nsg@+&s(lwMAM=n_DnZV6SLgqO=h-(QkTMQ)sM5KYXR*E7Zokiy;o?^ha2^j(Hv+6Eel6&| z65Y4(q+1-XPh`e_PDsExgAAT87WC%g0d&7RSIPpv(W0rA#^ZVZw>TNPUiQCMue;)6zSB9DdfnJ{F~fc&1{q*| z-wqG(c-OYEjDa2x&7VeQ>P8zahPPSQB#weQJqV`CyyC?Tht&!Cu=6JsESqn82p%%z z>#DXu02y|^WXC@YNs+PYYWs<3A0}OK957vwBsoo6hS>H;JglXpk-J6vlb}n$?y{q; ze-uoPd#i0vAm3}WpZp50GG8&z9v1D*#TCD{_dj_Ij?a;gJ!W?bd0$A;oz0x>6wBpF zT)MA#o|OuEYxawNDSe-a$In8)%VrBB`?FR*I(tZ0|COE*jXwTB#BKie8ayNJ!>SJ@^(A{LBDVV-+k$R)zD zE;IPMAvSZg7WN+`5@Q?B&VS1DEn*mU747+AgWMhzL|#lap&i5dRi*6+v3G1OvMZ7Z z_6}zYpIVesSYZ9cJN8SflN%J{LmmZa}aK}IIy2W`=PR^2(~cRJq^CMgPbW1wZSR3=1{^k#6L*u=q{K%T7@K{@f!1k<1ys%LtuD+ep)@v(geMZsQxUs z>vO=A-n6*T@LAjQm=b=Nk*X&qe?+a0o z)}icS4h#Co;K52Q&9L55e*@3OP08n>V4>Yu0Egl;QM3EKXfnV`xU4jFD+GFJ$($hD zJYi%(nT7Au`b6k}Oh)``!eqn)JliK)V>UA1K{c!F_99r`3I(}YHgzb_^XB3;*M2$- z>?WC*>QJ&rsV_2g!m?$n%~B|NS+LsLyz$Kl1F4GtJd3%ISokSO8aMl$J~W@i?A z0!5JCcW+_;dFN%@WhEfmclTs=%iROmF=spPsRX=%nVq}#Mn?_&puQk3=4Wz=7Z&>=(sXiJ}hF()>@+=b<8fxvdyAhtS%2a zqQSF<*(oClK*H^-aD8%30Q%b5^`i*!9_J)ylobh+1>ThTrmPhr-L9=~&#{w71N_&) zP~-3oS7)W-5kcy8wom|3_BU48$zz~h%C7uQ?g=nfT@kApZ~X*`d8BrY4JwEB#_k{a z*-9A;!)x(isy!=2RN0{ZK(uF?!z^^uIH1plK@|~SP+p~w=A}|MI(K9YrWad zoak{$+FV|1mk4_MGXEA@#{|}~k&RDamIm$YRbd(wdnhPmO#-Oy_=)IsjIP~aPl=|5 zl5iTBZMVJPNjUk=w21e@C!<0dS3F=}_UFLh@^D3Tie*zg z6B;O&Z}(k-2>fYdv3($#5!<7S?Gx#e+=~NRAzy)DK8^>-!|mT<1<{vqop-}jFb&5# z7frX90BX36FWF$ga(V5K~ zx8+!g5S8ItJI%HP(6pG@`fUOK2^@Z5mi;RvLw}f6*vjbyhPgB-4otWD88E&6kr$G? zXM(9jOKFKcAeu_NvBJiRbfGXKDo^EDCKu0tnz`K2=U7chn~Gt+mqXgVY4)l}#Ld<> ze&FQC6$~Us}n-_$gco~(gi(xZYy}n{=OK9)8_%ZZ;mHqloSlLtLB2d zRk`z&J@XoIs7322tVzyu`tVCHSxhup?4D+$=R5n8ZF6j9pl{<*ah+(Q>{@A8Er8UZ znT(nIn*@-xse(N^%D*7VTFVDzv^9sA9I36h))0np{rW#(zgXx|fuGh?Te%?l_sX`V zBFR6}NV;@U$Z`EhjK*Ry)gjv1Yq(+k?Gm6j2AeH*PxHm_&ScR@%c@5j#q@?K;88R` z_`(V}VR%au4`&lM^n}A0~F0>UZJTjqq z_?i7eP@}L6-JX^4pspMlT`WiqwB)u&1gJ3P;G4@<=%(xoaeOZ5f}bq;Ld>d!MT__E zVjDyvImI-Mvfl~$ucZT5)7Py6l37b>O-tG4meLJ&vpAkv8uog4wPVZ0@-(r^nRZCf zGraW{>%H0oSsa$|EnX5}OUcxhk`=a5kV<7R*}g2AS`67fHQ&xz1Mp75Y6)*(gb9p1 zQ$891=Tf;gIe>@cm8N4A(R>fW%Keys@HyfJ0jn$Qp|wC4@Wa{m(`paI)Y5uZMz;uh z!8&cV$Al>um&^5*U*mvjd9D=*cuYJFRa-$V*t6|?n{5zcFybydGB7Yl8q=?e>4NZ4 zMW5Fp-tN#sv)>B(c;nrDL6*HIhL2soCZkz=d?b#fna)z@lsX5w>0CoE3V>7WwN_hC zh>P+d<^qLiDM1@=ff0zi?>tFlfdV*DpV3uHgtP%5(V*13V z{T4s70$U^=`NS$W#8wsCmH>GIbZAk%UC>A<5*%2#+446)ho3E8tWd-gBMeVzM{vh< z_C{E$CjQIqHi0mnI@yCl-X^-7dTWe5D-K2DxkN@LHY6e>$w_merEK!t(vt9LeClS8 z%?)bflH+ZUgZ67Bv zW<9g*mTg`ZmufFr9|4d2klX||hCr>5?XLm|i2|z}KKv^YH!mNf(Ux?}5)*t7DEKkpT)iaSTeST^0 z3R0U~M$;Ef@yHx*;DXIg-tLi`CzLmrudyzI)F7#8c~d~g;o4+&yb)1t={<<@Hl9RK{pq8soS4`!x_8Sd?(Edcj+dJFLBYw*KiiTp$~M3;oC zeg~J9px_SN0sd8xR_fHLQ(G&19YBq^ps=q4-8{a^{w|u~au1KA)9kD_K;AID)3^pp zvr~Q#$MY;q%C+tZ$llzx2=Ymb)m22TTahp})8P*UQDAK8YTG5+f3(Sw_tU?L;kn{I z*ap5ChYvC4R|_I|eiEOx&joyZMD~_$iAdkDGO021a?r|KGCmviA;G8fC)n%ndE!<)g?uTR7S=YFT|vM19qL!X z74En_9#bQu!(M1QCtp^Tb!_sU7z~MF(Q~2G{^U`0fO67DP7hs!@n*T0J~H(&%CzqjsWNlW zs+so}BG3ujx7t>b6jYR7WN(T_hRFHCID1Es8ltRnpnUzoRcr0c7#uJCerY3g5GL{d zjTJUsv=_={Gz0#l5ZJn**0P%kBbFW#R1ZB*WK2}#tG6igpD=Z=TQl}nNLcd zAVnNpu+(OX_OIS;9jrNow3NPJUxg4}3a1aZ?}dDBO4H{Ke^z!v3q0>GtnKMF@1^39 z8{cZ4hTJYhlRVnR9u=u(sGJ+C+-mzmJOt`|t0UyWcBOq9(sb!(Mf*tu8>_fh+zPFp z)=Ak+fn&!DYa(TRJPB42~U;@DX6wJbp`838(?*@@`bLH;nGYqt{*~;!#_w*Ws3Swph?- zqCPx9&SbRg6N~JIrVYmigaFy$hu&^F4DH!*P3BwoJH;crw??vdKm{rhCAt@>qW8;> z5QUK<+ZqKL#6LO;!)u9B81BYwg*cSbv?||f0-efN=dRD3-%?g$Pl)#4s*WxFblpA( zF=BOF?Qf#h5e4{0ee4T>*K^2@RhAQAWaq!yCJ1`9vA_`R2hR8_EJno6I%^O~M!7>c zj{Q);M`8OL?ZYoYWSg8s-{sh)UjY&PQ0WpY3-l-T6Ks)aau_UM(x-gwX=rj;KTh8H8&^4HvSFZKSIt_umk*ePu@BX7jB}FM#?lSj1Gq& z*_N&UhevB#mTyg>iH1u{9VedojYH%qWE!|pkkZI~y*(bnbVbgBDfbwxi4ONC3wqDR zYRBOG!&Zw!WezT%V(UU=^37m_b=kKB1;IyQt?FT68aQu|wTP5JwbdNF+DYFzBw;`H za<2(`=Wwbpf**n{{wFLtVF<<}a&4^;v*58Q5WioG?1F!JCLDv;a6z6FU~}0t&5?P6 zVX?tGeojmIrq~W~k-nvLnf+Nb&5$8Qfa|{pxa>PO@=B|lFy&)7 zhBa0o4lywciIR(eh8?vpu&h(!ky(Lq4`40gs557BW&C9#IC^wC!!A9QU>B-4I#_nK zFwu8z;n~;Q0_xu4S_ti-Z&<#GtUAd zKug(FOBPKK^PrSas|CpigC8imVGAu_+9xf0$7DMcqJs~Q=XCpQqI*YTa=L)s&%^|+TA2&GO{(Pc`*l=NLPCIqyDEMQ9NSgE%B+TRq#} z7w{rK#ai>4i-7#x`r`lT$MKNtTr|>u;;Fb7D!0_10+8(R$Z~sDbeOGa?P$2x+5~Ya zgx4{{ZKZ$@y&+-Ya!@R_aC{@jHaX=InCkh+QkcCTAk8r#qsTg5N~qUFx{th7Efz-EEc3m!)1P1z5O9Xh*hrD0{iU{fi8&DR(WW6bv%a= zJi9$XkQ#i_u+AP4?H$xJjr#2co)RzsD*Biksx?;|X|9E^7cUBsg3M$r;Pb;`qJ- zA4=n~^`)l_rPeAwVhq90-DinZdp2qrWRL%xQ2I$0rtL`qDv*(xo~w4ILGm52 ze7yA)^q<3neNhDUh$FCxJ)?t%$OAY2m4v)kBb`^~;9C3n%^v;W@@h-D1)A1+eGXPZ z%LP2w2QuukjvmCr;C%a604&c7?f6cFP_2Wrv1d|gm4b9{WQ=z;RbOj&cZQ`lV@*6r zG(|KuV8-v=5ODN`)po+I1bB1ADyww7uvZ+CwUm_G%-f)u2;N&_Kf4{$zXIegBSfR@ z;XB|VD;|Dm`N2Y-RRP*3TuVJK4y9suu)uEn1pt9**i;&J=AA&FJ_^!!*7B-w_{yy3 zb6354jJ+)$q8yzt!A9H#O}|DiEcGQk{%#mv9Q{0s1La?If#X^72w}2yPvD(PI2-%Y%2qOj{@%4x%E{c+d^-gt;Y6MtyJY9#6WrgmuKz!fq7n zy<*vx!5@m5L@RYo8bgehUPVIIFRajr(gtNQ~Q?ady)q9>h383Fr|amtXF#`IWSA@JNuT z*rG??am7L2<7>9qxMXTBZs!Y8YDRvVUbfZ>A|PJVqi4DY1EwgQ8rruXC7f`iq7jnmzl>vz!pDHL_@IF{Fp$~Vwp}C*_C(}9;+9?@8Z57vi=}%4U|grz&jrX> zw&{C|)+lD)%(4a{byjtqzLph_dmsXJ!z`W<$&RhxU_b5c5woyqLwi3aNLhNMXQ=|- zAp8hqGA>UXAKH%_tE~~BQu?9b2IzPQV|FRElOYT-_4N46zFr2`B5}J$fOPD% zWxe#P0M$Eu$L~M zZ)=|hctyMMuG`)argQstvZ`kwy<#2vYhbSsBg~Nc2x~;{Nk+@Vw6UCN4~pZJ zPviMtzLkmL4V5Fc*;9WBMpUexq}yo%l)7V*T_BPQVXT+U+V_72qN*7?N7~DxLn}Pi z+tv$G0z+yc|5w`^$?(Teuj%zX;S|8lZQMQ+2>pv98XwngDRB5g> zDohG5$j;{-cfC+Zv@+6Pt55TARF=7XZ4jaao=jnHNWdd=ZlJ5VGc({I`-wSYZL?@^ zj(CR5Pyb5{Zw`c?V_o}$y>dHha_zGKv0W!qNY7>xN=Z$dN8k=fz>~7>Pq8J!G~TYc zRu|$iDHYizF^@MWwqsUKe2AT%4fX|OXKXIU`Ql$FmS>e+%%ysF;jn!QBa{zyiMYs? zsIHLvgX(0Wct$E_{6lbLDsQ=6l0$6Tkli$sQjL%|OL_)Ij(FB1j@Pt1{wh1?0=;EY zn8VYpZ5|AliJ#_X`>n7_$KP{9rTslXUbM4@v2O%vWL>6q&UbnM*O0ZMy_6hwKDQP) zWPglunUxFDo;#OdBJ}J59u}>NhTDaLl>Z=>*YKff7J~WlC6-p&n}RMc#(z0qcrVFd zi+9J(cIOMkP`_7{*t4QFnt9@xVb2M9V{i_HDHJOX;bi`9olOuztaQ$ZZK(h?;NEu( zGgKnL@H+FPAcbV%h3=9{i}?x`b+G}_(!4D0sgKUuW@AOG zeQOpaYB~BZ0wP0Bj0JO=fMj5*;vVhSf}v~Xa}m2#n6i-mj$5r#2$7ocBfVKPxiD>S zvHu8YPJGOI6+?Qr^hLZeg5Drn0J5B^7stQ1@r(gQZX4{Fm_BhvGnt-HojUOUm6q4!k&5KOe+zh-=6AY`$c+tVn`p=Y5OP{b=M+mQ$jfT zHg3q*9ZHd4SWxTf9tU8!BB;0arSQFXJ7;kcTBNIxyTzoKE}d+bNE#+n2X`L}kmrfH zL-io~q+k%CYD1!Gn%!RJu`zxevzUNaW!I`*n=DV*YnrH=YyS|`;6(Izd27@IRqv0s zGGW>{+7u1NRP#77NE_@#EES|x2681*ghDe28L=pwVVMGCTZGE|A<yQ38IiETl4mlsITW)vuY8L%|%dv8F?kE^^j6B&?Z_|cRf6MG%@xAqA&e@(4 zj#>cisu|X2G!Oxr=1+}mj7+g~aS$(;kLxF*{RbrFU6#2r2l?j^qd&XXPGdZ~lmUf} zRdG8hNP{xp^LD8BSdf>UBPFC9;g*iB*Xx+m#(8{xUG46+?+76q=&}C&?LI+|t}|zj z`yUdAmX+y98!12&@E&avUs_a8#X$06>>sQRfn>$ni>=k~L(=z09-Lwy1i(jQxt%rM zlQk|MU^4@)W7CNCpUe{lwkd=h8M)o2O(2DTZ)M6&CU;*Gi?VZQwR0vy`)`B-YNeva zZ*T;9(D13Abi)wwY(a^eTWt4=_Lk}vEt@8Ln66wd4s`R?$6cn$3$ z5-FQTZnlJIm6d^m@aLxk!sJ?mb_h@8(6byg!&5$zVLL^tNwNy;^FSx67utn0oiD@S z_lx$H>{VEhtv-HTEJUG8r=h&{y8wf6gY##3DE*4>dHcB_Wi*dlWcLR;kC)G{hiKda z>Z-3v&}G$OtfpIj3ir>3OLb6jon-F{cnkFAEcsh=JmX{7$h&MVv=4;4I@q3oit}~d zbL~9ByaMSN=)1 zg#96#bu<62NA8FTDTZLfg@K)ym=PYQ=H6=eL-$gfxL&N29xO$&x zuV3R(+YwMVHrlv}T}YU>(M;>G2*9xD+10W{YG%bUX=H+^NDLwtXYUwhO(EjLy{!FW z_+BA)WLW>P9B~l1i}W1x?FB(~;e>IvNVI?Suz$-cjP>4QOW=9^^K=hUBB}v__B?98YiWPI9pBuLb>Tal%;E)q<{rH`}}7(v-ToTRJXW z&!jAc>BT=nZ;Ti8{?B+ZB~LH*=ZWPKB`_VkqSQ`V2FJ74mk7gQ!=wTuF3pZz()^2OU2cC4WSiUU)+)H_kaf&BU!%gno>Cjo5=@$VhH+tW}Q_B zTEp-6EzxQt?b|m6B?fkD_ym{P{Aw89eeqakzHJglEL5vy!yR!tfL?;-aog1 z6?$(Wb?=5Hkufl=g4NkSSHtx3`rx^YE8Zv8cpl6+tZVEQL62~9+T3IHt88I10&YWW zt)O}iS1NxLO>2;r@8hC@*0}~o7zw)6)O(QO_Kdh88irmoY>60Z`-wWPvt@!_CPS0D z*p?)xe{uA>|v0wj)6`97juGM-j2XATX4JGZ6VJkc~~Olmx~K*+y(iPgheZFbrb@x6;;*kk(k z+E(}|CMzciQkXUyo#`BeMZo^W4_}pvu}bMq}(5F}(HD{3P)caeTtO ztDW^)@2N4^x7ua}fELpN`ek!Ndks>=2)P50x*r^LIe|0D6j7?3W4pVB0er9gk~-sTL=day2TqM}7WJl*Q=u%t_+B z7+1kp?QG4lCwGAl2)bC&q6fEw;!wjLJ#1-+I}pb|MLG7dAVU>vlmqO-*8$!K?ns~s z_a|+|baDH#+gK&&nai9LH-PUZBRtAuse<1T!Kb7r@z=rUP(JF0iQ6)ha(FpPIQ+Vo zZREjdV%ZjHlz09;d>Tr29wv+SG;&&L(}i7 zfb)ZMzDt8fI=QXthf@?lTd+2(;;3|~Acgaj!{urSBT*KHW%mboaQO$NoZTH0WR7Fq z%4SyS?Vlkv=6n~}>F;~aI~UR2ar>oUSmo$gwl11?QbHtM|Fh`H5cHa1Ws*hDJ~5Do z7aK7<`hkN34Z6(!(81ffTplSvEs!)(J<1LXdN!Cfq)m?{!{sk}$w!3KA!yRj#=C{Q z3%bTRq~KKbV>pz0^QWjY#|0Q3vbFl6XfG$uBz;WK%VCGkIe|vwYcpTa54JIOU_3Dcg{-T zXtvP)Buqo-DdFrx(5gYzN*&pJBuGO8sQX3`O zTk`m@4K^+W>AdtG$)LP~G<&?+Q|ZQ{Su`ETb4DI~op}UA6^9J6=S7ko`+-@U`WyzAC-xU={k@3QXFd0i@x|uv^SrQHvStRLv8!+GK(K0G|UH^S$j+tra)OM zw7&|F%}wp@u#1j+#FSjEdAU&+rhQWjbjf`pgi?E6EvMTlCkXXwaxf#~HukDmWY$=j zVgD2DrJ`SgPR2YQ?yW}fK&l@{Pal0ak*&9^ZLrL5%dJmYysXFZ4BOFRkdC}e#jHWSN z>_7ciN7U)gyKJ@)QTZN>!M?`71B?&m+CR@qzxGh{yc4zKtU-`oY{m}8h`%|lOQ{;s zUS7wXVyhSQhKynF28%!TPKbSE-XiPqcY?ikn47rODumS&%P?++w*kC(iG#q1i1+n^ ze}E+7T3uP#U_o!o0R?=d&~Z$1IOfprx>6hjJybs3&iJR(cpS2`MSI3w2imzp`cPvm zR3zgLD~iKxSct_L4a(2sgeYE*{;_rb!+&{9jALMPPk@>pT;tye--d@qS@NHoPVvTsb zY_7HWug7I>F0fJo!kWq#>z(>6K~LI6uU8j`;9Xmyx47#Aq@5o|fwxJJR=ck4_4ecc zIh~0)^czIeI6NjnI%7+#ALuHZ%mo z?vIL3l};=hX{n;gV*g~u$YT3SFf26O?tqR{dY=ZD9;GAjk_Jp7doFj#u%p}GlMy6GU=+SZ8WP3x9QG_ls z-=g=0kRYRf+-98tO*`JLHOK!15C=6v-FBUFrib;<-~nB`0N4dBu|)zdH@ge_jUd^l z73W|Y@CzZAZRY|WrXBiLE5_1EhTV7;n7Oq$zaZuJvzY$dis3(_J383yLOy&WA3J51 zCk`?`nZYBpxdIYCz1X^)?KE1!cKWxS#zMV~5KRJBhqbm#Kx+a`%WF6T)m7BTV!Dj_ z@L`eKQ|z719{Kch9Asv&>OCWXoD>d-CkRluJlxF+bPk(}E4~At;g+3m*NG&`AU@o$ z2Rf5w%_--?_a+;NAM0BJj85W5*yn;S4=c+Wd*D2elnq0Tv(Sh$@1A~Fb0SixKdS5^c)eIXwzZUI9 z#o7IpCZTME?hzzo9FgZ^soFCpAHGxU308{{>^=oQae9NqkR5g*AuUZ#D}Qu|DMC6+r~G~*=kP-B2Ro^nw5qicDlM=KJWVkdE4kNiy8JMag>$3N^`R+NJ(6J z?pT0Q^Aut7(vj%G5H)287pE9yXbpgh-cd=*6H();BfN_EvMjQ*FcE53) zj%e@ufyW_9UzW@p14LE7o^SYBKO~eNf4CK#!WGhaVrgWqKp~GV8;f z9^<>eN-Q@|uGP1;M{;njAsgLaJbg)P4jtO!ry>bu6=s+qzpLWNKL9N z_Ls$w>>H*<6Z0yo7l$rxN7-+SCMUj0_&pNvLi1U%j{S*4&YrjK9%bLV9Oy%-2Nybp z`Sx>hJU$xzRw9TXjY|vljyf6GxGCGty@Ejc_dpe&GCXE5*-R138>Mm6j&U|y7}4od zwAipi-T6uw%oIKPSWIO2$mlkRSMNH?vbo~>2PhB+Vh2&m0j;~&{$q$ zvjk{~rjhgH)im#CK-F&CG0rJ9myPin5y*)r4JM&cG*Mgp!BfjVX;~&MMG&1as zvPYr)@eUX+?}18keE!ZPu41V+R2XRnvj*87=tRY0`@CFE07jVe>@Bfea+Wq) z!2Bc0NvzwTsin`)TeBTdz%;#9m_ETk#Q?kMK7gdFsnN^GE&(3hyrp^KNPF~t52XV- zKi1P`1$cPFs^;zGRv%zuS%Vy%{8y0P(zhY%;H(E=1mR-1!A-YS;!v6%sItxZV}Q}j zzsN>(^iVwoL+kEyL7Ju+xlZc@P4i;E7)9hWh3JbRL+t!OcIj*nh*bHDStse~R<;NS{Gd3qgNJwXBo;}^QH9|*!j`mA&sL4B3_OeL6kU@P~&b8{~ zU+e1-<*qh%eK(?n3RTycJqV=2S}1aEv)y`$;o$|GTKmpJaQtJevyh@#e`fAG0yD9(JhHpBNNU~P#H4|akC|zYi1uG8dg&}H&V3jL zVni<&M%rzHh#}jYbL=@GFO{oKE%wKVLtZhy*>8$wY~ri5*8U{mzlSUt{;qli?A7bY z)OSpfw(Egb%%hNG(Vvy&=c2uU{=A!GUhL2v4g#QC&2?BGA>zuJ^#`K;m#nK-{A^{| z-90?~vu)dKZ=mz>4taA=_!6ytSDig5Kqjf2=?)a_1+sY5ZO2S;T+qJ!NEaqy9tT?2 zK1QfkQnK5JA_CLR17dkiG1@i4)(cZsk)DiQ&BHPbG60^r5%T!0~hYlKy&i4&~5XfI1A5GX${6o;fSJ+`b*1~XHsif268 znn3RqIMw@>I7rXtMW@jZ3u?)yo5yHN#eQl}h@;-AnQg}eeJsX{d5}|aIt=;QGaijwxrJ=dej8w- zZjl@(lt1gavUI3q^IUB+#qi#Gcde}!@-k4twl4*djRWs;`=3DQp4HWf)phpBbFHyc zInv4h1<>1@4>`YOVb3hiz1NEW6D&QrtKf|k;rv=HK-_EQ7Rwm?G^eW&oWP&hq zG?mP^O@U4?9hZqls6<@D}J)X!Sz=JL`>|+>}Dz)O;kK z=E0JQO(k2JN`^L-Y_#KnNm&C5avN6_#c-{2QcUkLe1q`bu}usHeVx>r>Da=ls^Z>G z^L5w2AtxsNp`12kh!Z-VN9~0c7sH>G$j&|I{?W6NG0BvODq9+NWfPI+&`E9t~ZQekwim@a-i zBNG$-E5tyWCt~)cNH0u3GdA;=6tu?fm&?M$(#23aFXKJKECETsc_=2S6Prib+v3oe z_=aK-{5M3-O36*N%LfodIgj?SP9j|fEM;(F*iQ@weg#uK@eh^GJ5)O3 zQ0X%38HeM7Wu@qAc}NWZ6=Fvu$4)B(Q;Dt}t*c0HtjJ#GCYvn|*(a)($Y4cvhX8sUoTG!6{rwz9m3I^k;HBYap~2uxpw;vD-p%JR8n(I7vDt z25Gr{m}V;n0bEw@X$x$#u-8`Zu=XW`3$VLv9}0W+YHGe~&Ej}t7evpu;`YCg$MJ@e zlS6Cl2QLzX?!_nAS?pNe$G#aIBXi# z&@^hRl?##^TGf{Qk_YJwt7e=1_+_B?>-(rdqFTPuvc+R?^P|Luw3exCD1a7acITJ=;`JV8xhyw%lpP(6Gn&k;$}`H;_F_M z-*~E^#%UD~dFlkb?Yryl<=d~e;{Ng(^nGCpjdly8IXt!uaa7%Sn$?O9^(&3e=ZD%G z;;CyVak2FG0McSb7!wpgq#a{zOo>NIkr}ut0xknBJlbw71ye#1PY)j#?O`0-47CDb za*wn*$_2ddm}0p;|0smAI>4=-fPW_Lyu-d0@Bt1bxuH?S#o;Rr&a{$VCAu~z~l!xT0HqXa1}D<(H$i-f$&T?+C^ygnJb-OX+7 z)=^%H7VDNO;FV+qRa;D$Iu!S}KZy)i9h>CfnD=Lq*BkqrLXcTI&RL^dGk7S*;o>c0 zz}|@MZf<8+jCIIXU?OMiPYRMUfx(Xs<62`r%&@vk(A!gq9oXQYetmRR0k80X2f50f}fXWOL{2}iExapiV}XbO~VOD4^G1YMdt z+F4H_&nTY3mhr|(t(bfM_O?}+*#6q26t47GH_>_U-(HeIw+8^JEHVOIp_&r69!}nYYbWiS{R=ksd;A)IP4Q z`ZtBqLTvu??Vll07RO4#pc(EN0!I(H5#xcpcbBV1VtyFBvqA|zgO)-63^(iXg zNMfrPw6YuMYbu|N?gg~HG!MSF&A~N%DjJ08=mU#ww@Ad$xj)Cu_snmNxpP}yfyIOg z=94AAzEc8Z0*+(TdYPctGHTbkc0?SXIr;>nxVJ8VMPVOowFx3=vc@X(1vUz}^eF%H z81t5eFer=6`*ZyGxF9`#bPhJjLJ(RB@;`j~BF_hNh&a_<44~3(`U@vs7{ildbbIkV ze;&cH%I+=(g7oc*$BLE=IDgq504G%I>{EeIC9dwc!uV|k9R1W#4!CD2G$QV4*gJ0T zIO{0r?aDD@uDvPDFL6+ojB9sW2BNl2n@fxBK>?SBH80cEU6nA%R>qU=6-~C>HcEZ| zTU9`^<&uQ0V7~n*U>atKtR+1ztg*~?uyTY{q1r5mL)8wJ+E9_m(58L+HXZDskhkdz zoCuDr4)HeGJdr+63~1aW>sgq_TLBM=vtnE%4h^7j6`j=};Q7Q-i|w-z#*#I|Zd*wh zHPI#Fi=xRv9w2PGfER(vGY{8LsGBFAw{KUv9o@#NRnAFNOu!j~etIKU!=Vb9SxuYA z*y}4DvGg+;%n8p2JEdL%a5}V zFA*d3T#Vb+gTh|FIEo*`YFmSIv8iKIgsB#aUnA_j0CG9nxAWI}!sexGkCar}C4!1r z$@{pg*8#k)(Pp^bt{2CP>6o8o8G>rLh8mkFnv}VXo6_w40QN0?Z>R&8^&t#X^4!Q% z4diiPyaAeyzq`HtbiK0&a{#zQv?uA2fy)e+^K3QO-U79U#uPxAJpnvysG9C!2RY8$?qbEI=376`NXfWwo`; zo=swOE{iN%*uP*Md)Xg_TqG?N*7w;*MUgW^!yK(+PR zzXA9l&Bgg{pKB}B&&Xs9f(-XQ|5)|Hck+$8)NdXE?G4Dq>p(`>ii z3dNz-ct4h60|gK!^4T@pW(v|yk9V|fBE6kj&T$yE8*GR8G;1MU-|KZ~Z*xAoWH%tm zcmW5;JQf#&4EuGo%YFwL)`Y{kxRc?ic1a%l`R&~1Y_^BR)5m&sJ@%}kyPJF_7Y_yKlYmDr*oT z?NL5|TSfZ-(V(?|3ww*DV}~Zgdj1iHj~G8wxIZWlq;G@97@U9-Wag+SDII3s#Fwnlc z7f9JW9x5uvJW@Mh#5_3_{UOm|IJdZY)s@Uj@xAhWiu^2fvsm7&jZ^GLZ-cyk?QUj+ zB}gDo+xW5F`3{Ksqz&P2B~w8CUboHO5G~2@=g8h?UkJi~l=XbqgP2d&aA&qzkSwAt z`X?cb#!SgCwjujGD*NDSKHitT=a5h-TDJ|bd0E-vt<&tDWN3d}*1jxAzNB1WuZFP0 z+ZP=vU1LW=7`E!MjJ}_Uh<#%IQ0pQZv70BX`K)}aJuKLwzAHr$KE%T^4? zlb2tJrbvf3W;|7=;f8M%s`avv$r&!yT3wE))-zOxe%fc=H|h^5b!d1Lls#|yy+tk z$AM?eItIXl*7 z!#)@Ge#%E#|J;vTVM=kP^%e}pv5vt5!MUHnp}08S#c@y{5Y68GK}R%ACWjDX6oMc` zOUq$@@CRYS4wcNXo(ZSZvC~v4I{bEPBP-es+J)O^;(N*2j~8b!FCP;pG%$7+>_igF z-Mu~E)Eb^%fNaNwsX7m?XGL4I>ppFbz%#f~iyZ>1C)bu)wrHxwSmIk#Aml|oiL-3S z-ijSlnSiV+86l6yZMHXL)iipt{qQh}>DyRR4vf8du7wL z!_T4Xt#4H{e=2;y)ar73`b3KSoo(&Nu z*EDWiHi-@u^Ru5V;`j(|fAekjNr;TvjP?1lH8PLgYtdSKGRcYDkFB)n;%GcB=Cdb% zov(@Er58%k_j_M~DV<-LwG~aB*y+dZb^&i7o+M(+Vq}OJ%gDA#qCJ;ARcIpY4Jo-o z%d!sz{6ojObQ{YpeYG$7D_klPxgwov*9OS=XhNOocY=s2OQL_hsNeV0zOug4%O#U#_blp*%MXchpSitUX5KzdiPyXG)EMGP9FdHAsA)f=rW zKsK)2D%+SAxK6HRZ1Oimj($I!9ZrfC&x>rMnB*WWE{;Gx6Qs7h^NHD&{{VQ&H1~E6>aHdC%l*C8!@Y^2v{^}^nayw>8w_oW#0Rm-CpR#K;&0}G)K zZ{2R+5>3-Y_h}{eBSDg5vx>z(TjzlxZsXcCd*C|+hM_u@9ZL4Op3jbL#kNnhw`t>e z>wF%F(GbgG1@H?2iimtUlvt%;s6%xPzF4hB9Da^H2HO#lYVY|A?CWI21Dm3C#u?`m zO13-{OyPAGfbK{~?Z3s|>Q6(2VqH5<6!4}fOc@ZvLXsF>a#*+wy8sR)_sy`;f$Ugl z>qG_#YgVHJZLf<%!5wbD4ZE}71$b3=&9t6Ep;6}VpKSAlscocHG2DJ9NXk@QyY32M z`lgItWv5=~Q6r}qHFlez|EdNuxKK#6&SLqH!&tTqAg#1l#iMwhX|S9x6(WCb>ql9c zfY0`c4W*H$@De-kdoaB=C(7p?t0}ct#8D%UUTxb&dn2L$%g4dK55wz&KZINxki=M6 zEwb6d6ps=ao4NtQVF?blxh47(p`iOL|bjtM7N^ZDu7GVME8!=vVd*wEcC znjA1hTWh5PRI}fswpnCY;K_jCW{z9<5Q~d&ky?ATU3HB^x}|5zK-(s$aot$O0C=tq z$Oiy+471TO94yZNIa0#1l>cDq&3CCJaf!BdKT=clb4xxCvw)SeWFB)o#EML+5zL7L{|*ok&5 zL{7*D9WMq-?c(b^a%4YojQvDVa-(f-_Xz|&4xN{6+sm0PW3SY@D}+2bOtA8h!-*La zkJXK}Hw9_2d=}L)J1Ind+yJk!%dYpJ&e7wtM}>T1OG%9v*Qu*LR>LA=3C?}tQt(>{b!^j`!+pGTf|qTV_02Dm-~8MZ}s zcM>;l;Zo!QVb6qH44ZT#ko*%BYwcsv5>c8aBkYo!fXKp;rCMgJ-6w|PZWwL%i}qrQ zQc(3R4q-=UZL(z{A>%9_Db>}9K}&JaS!COiDOp?>+6^~*F~lz7{-m;Gt#0v&CuCV*X3f}-JF>-5AZzIKY`7#UmVSXx%2vRDw^VvR)PXP{lvba}!H-gQiTGsQ>7y+irfejxRf^c2;;2p~VUbBmpRk0(N)#u91?Qtwxy;}Aj`mvRhd>xFz6v90Cm zRG4}l-8$*$=DBt>z=0f2@uB-`NUIkCT*lGI>HtN7&XyU9=#Dt zHTW2Ft+q=DiH5LAS@QGNeA2M$Wk-@YI`@6%e!{&qaq7Z+^6dva@u9LQ_LOJ}Jw9>f z@zGOlm!Q`NXVW?MWr%vbvckG`B#ds%XBV33R|Nya@~pW!Vb?xR3{StWg74c4oxncS z_blX>RBFE#2g&hrV>?8vTOYPh0{h6SW!9y$v->=0D?}n56M+l~90(y-UT^Poc{ z2y;-t<>=v2%GhqWEVyudyMMIX=to7mqwrb!uKDz#J4=_?;EGF`r?@ zAqKBZs_mZwv~G0Gu$sl|Fnlk;@`81S#?`}v;;8Y;HdwQ0>VOdW+}Pdq5YSt`NCuK- z2s2&?W&?cQ!ywNp^3u#4Cl2D#tTDSrfP9bDuCu2^d%nFepqC}+`S5nIJVewR$eqcE zk1`DVJVHblKwmV=6(*lu>vcM3;{_=~PoU?9pv-~-S;1)YC_&zP3|KUiKNG`6kb{%s z0VbB|C4+q}NKQFenEZ!mq|<#0R-cmTFx_uYcK2+W*KBLvKG!k>q(%A;DzLeNG{A!~ zj%}(0ynUj#O6#mC#MXrzMzSyJ;RT_`!(wQU5CZa6D3#syhk}|OyW2B8A^mHxZym>$ zTlcNt=(1c~B3#$brimme_EDRM54RbDJ~!sY7&v^l&v*=$%My31ycdY$?a?tk-L4N2 zWKXYjxlYHPO(t=h=~ga|_r{*d%z_Qc80n2$WFp{@IG3JdGzTkcz2Dl3MqbPJ>v%GL z(78_SMSR+eedl_M3wW1dNR?&XWHIQJoqZl+^B5st3zP*<5F5GCD@nJ$PkPcF^aTtcZ zpR}(s*b?^Xvh!$d*OFtgOXtlo({oo7-p|Ju~N zqwHGIl&Wh^_R2+RKmfE^uf;kK(^hO38o;%PnqQk&~hlY+G^3^S8*5K5cUgu z#%%qWx<3=ce}H^caYc!w-_C-EAh<;?5E(j(g;qY#;9$5QX=tkN6$~zJv@gU&qGlgv z&pJCVMi}i8Ir$vNUn>%akrJJqmPGdNme?y|`WLn6MVl^!@JGilw}ql9t8q&abF}Rj zq>_1hG=F2Zhe>FDF{1*E78KiTWq^#|M8zcAEl3fp_h|Ni3aPP%a-(}qP6)y^TAZh9 zAZosAvOOZwYu+bP@_Aeg@7~0^)rtC2TPuz?F7_XAvMH<~yQIdh%WaLlXDto0)mDq+ zvG>g3#;GBRIZjz@7vvF+Sh)o}xA?Ar&j3yH7dI`Ktmisd?Yc*N&!msG{N02Rc)(!G z5g9c%zx>Fi->;LAG20in_k_bFhkiuVU7PF&`JRrpVp79!k-Z=enRb5Az7Xl%oj?R!hy(+<)-RXYLF@(a&dL^p|Y_yNwiDH^A=5cTdgjHrxpyhH$~GNiPZ~a<28LC(962BgB1voQ9d8#*`ojd zU$LqhU1(J)jLu0N%qruc06F@*2RkX~waF=FjsLSjVDcQm8sVg9{gjs4p8|U@%i_O^ zCWpvQ*;ae$MPTR?S_o%1lf|JtJhj)`o&cf?ZyW46c7NJ_D3VC=49tO?IRrqKpO?-! zvSor@CFnzu@hn%Sc5jHGuOOOP?+GGiV`a>Cz2u4WV;RMRSTYBg*f14Ogm&@EgoXhU zsUXn*!^HFav0ERLrHDqckU*bHew2FzX=s)m?5d9jl!>+tmdjru49WQrrrU1?yuVWm zi&>)_5`#wT*4dgxdfRerz?t!{J*am-%ND6IP+nnUM0*3WE|l?}cV6{~JIC0&B0Z3w zAoti-QQ}GacI|091C6~?`(r75YJlR!Vst*T%793z8%q1508(~ODH_OyAs4FU9+~Rq zzFvcqE{BP_8l6kd6IAV^rzUHKs8OH3)(`?n_E-+D+dNfdLt?hQFQ^|+P{XgXi-*DS z24GUkwW|ZpFThqCw&$)B_D07pncXBvL07$EMIwDEAGuYk$1d{*%|X!v^|1=2^*&!* zWs^lCD{B&7^8H!JOT=Gzu1y{8xJPkzq)_l?z;606-#!u6INDrjO`>TeM(Vc60M{`w z5Ko$F++w#K0i-0nim+735n>FncEz0^U(5+sD3%v~U>yfow~zGnwALUldd3Kbk%&)Z zwkGIIC!-{)X4+?hh;yQHlKo^cfCeRc6$43)N_u?5n-<}Zgb}Zt#+w+37 zWADB;I*>QqU|)$;Rckrq8Pl5JVY>A#K^OeGb{(#_cZ5|pW)MCxx)gtQEG!@AU88Ht zC1Ox!G-<81JH|Pru6>w}emveG7dTa1;dBu6)UkY=qS*stAWUNY6nj=QRY@zrD$zd$ zLL)`15%vYQPk`s*#nN$P`jRk3pBP(E=!*n8=C@Q!kQ;m+KZx* z>-dz46Z1-JRtP#&KFz*0nIOu+!4ijo0;Fjwn`$dW`&9Hu3h(tfxvUGpC(A1AwLpKi zWSCtz#j}q*8*H<5L6;;~2UYpPl4Mw|Ee`b2)idm@sh*7bcVyXrK+u!r6rhnha2nXV zrh7pe18u4p`nh+qe~3it%oL{mE2jgfT>>9hSf{upz@Vm-6ClTgM|jh}L(p@@NH{CD zzlq`VSa4~7&J2&TXA7pOOZN`9&SFq-_pbJmNXkMU*&wY6P+iDR@8X$+QqwnP71>Rq z5iyp-1K9Qfa1Wbl;{-xQ8SWk!r8u+z+f}E%R5o@gcEMbaO(6+x zwrfi)CXUD8grLfn2_ptujZ!Y9UK6BNh1yk}I?uxnl`c3`R%&Mm1~2V?P1!OB|41Q- zkcthLbkWhw-*OQ6syLy@Nx0;lEbQZ1qV zqeB9$zF>(?hECvIznmE(N!tg=4{ zQjtvx~(ebJoKtwoib;*|Dcxv=|Z*3b0IpNns&k z6~uQgwSEE=iMEvtq3;t66DNls`o7rsCGfOatJyoX^(%E(MQcPBC8;U7ICObVyeKcm za*bsON}2*rqQ?qQm;Q}aHeIw=TR&U7e<>JAdpvAIL@JAA({-e|Fu>31$2QO1WbX)) zBiaTrdtq5?E)<_@Wy0iw&gD9hDt}43x-PhG*M<3RKg=G%wMEfmH@)V@O?QZ z1Te;D+A0C69$B{H>;}cHiS|y2-Mny}B}7w_u6Xy(wB1#nQ9d_>2SgLJYXg(p8v9g` zKIqFie5R!?_rT~@bGtPPhObMSy<81a2Q_ZyXkP%?cGpl(v zUQFtdSCGmROPduS&hTXD@Z*p{!Hxx-ZQr??xCpy*Tgb>9{Ki!|wo-tKq`Xkf zDbg)9Ac`Z4=rCi1Wr^d(WuR)fM$lUdM-hCNej^5wXLB)nC>ggXR&1+l2|_d;U412z zTv(7`qR=j_bI58Jw>H*B&`ZTJAL_)GA&!<(5tk*z_sa2-otv*q*1@2qBFn_>1w1sb zfE~I$E(TTaz_;nRXm2L@a2!_;L|Bd(v8*&ph=@lRAi&?(v2 z1>ew`MTd4bT0;`Xd1d|KMnZ##+c9)sWuz09;mHd)Qlo#Cu~F51Y0^Q%cf|%4yp?@_~&}+A{>bGI!uRjT9sY`5Y=6DW5}Mg$RkNxpM4v(Qk-A#kh{P%S97eW`@_T)q3F%frEHj*>L^; z_-~2hW2uLx2m6&6j9}E=YD7{t(}bQ}{6?@v`k8vB_Ki3cc5?bs`}{ShQ_&PVwGlcr zSmV-MyH%Jrl9nTHbA?D%gcHJ*$(XxGU{vL(u+LxWUp|w}ft5R8(%${ztO3^xxWHVv zbHW=j$c{C?_1x)MYw0;vG}3E-#UkKug4FAd>+QcGRxuaysjtH(?}3Gko)-kjDmEm= ziUOUeZm?HHlhy9wyH+9O20^u}oSKYDRFv5p;`s04$pSkhL{U%lw$pwGN%s9U`L`GF zzU+usk5`lMI;_2H3Q?oq%f|v_nANmtRgrxoM25V&i%9Q;y~EyK!|yZY_a1oT)mA2w z)}z8$XJLyxX3s|3FD8|e5v9}L@CeOwtGG(~tg`Ir+%a~RnBljIn=-po*o%cfSO7zjcl9}|8_nDX8BncHy$;ZR$WC71H<3(N`<=Px^yeSfO)%Kwv zGHQc++gq(c**i*Yji9GxzbX4?Z;C@?R>jui4<0#kbuh+K1u0eb0qkV~+7O*K#`V?^ zlcC?P{hju!KN9HKHqD>jG=GEDh=WMclZ_G9*aGV+bQE&+EyFCb{Xc?b6SI}>jnky=sCrnCe!Tlt}#Pmv{Zl!e>^oHkY zH}lJLVz}67UvsBt4dPJCw8q8UjW-5_Q7t;5%^nZqvrLXYCD@>bWHy!5?97+_v2ZM}qbkFmOKsoy)yMu_9}=z$9>u3_RfMm&Dp z?{2oG$pn5WX|IW+9II9&s2RGg15ZXUmpee>--wqqzDqRZqbps z+y#CBR1?+PPXqZ-hP@zC1xJq56BTISh(k@euHI407I>sE%|$tVa%`>;?UTveaN&p0 z^f}KTc!en7ADzVN5s{CaQakS>Ps<00PP#*g_{D6k?J)u3HVOOud+!mc$3D@L^UJ;tt+1!afmAneT7pdt-6%o!v^m^}8XGAYRWt4P~7|2+_ zyP*_&IRw))efc!z*nBa(K-N7nYPMM%q-&Z#qG^7EeG($@!%8%)(9i4QL!P9?U_yo6 zC=N*yyglN^{82$~h(1L!2tQQ}YWUfjrPk}P$K|?3mo2{(Bq*yW&nAne;dd@!r8pt8$|T?%*gM?8TKG21H`ra;-dLOH~~b=0HKiv2m^>=@vICDDeXub{-CezfR# z`;{18<^c8|Hc41NAZ1B&d&$6zs2ygtG16xlKuYOKGYOPhmRrhJPZTaYYr+0!2i@%RZ>+x36)cqhipJytW+ZWN@w7)lM1=HSo&3QDquvM4Amx&|! zR@ccI^mT%WUyxH&AT5-;gw>0YIkIjc6yeeum*-kZNDMx{W`j)-q-f3vY_n)DNoHO- z6nra$Hji6tABpy6;0O;NX6#3Q{MXhLJ$SUrOYB?3@wUX*1$JqKX(*g4O}8t*22fbj zn05AuXm2!>4D2aEFPr(ScJC;gnT(Lmc4=iutNY?t|3+kfc#rh4p(4G!zOr85_3y1X zI&-NLrpU$(h4wqq>NMSxy%7RFoiV|hMYn7O3wWrV$ByY6F+FE4Y8Kl?|M1kYJQl*4 z0!Y7iB$M4Rs}l4Kn&vNTn!l}S{w%y1!t|cx!^9QY3^CNrbLZJ#L@NUsHr`}kgrK6v zh4%e_c{EmhcCly|BKo=(35UTT=h~&VDa7t@TU-0RXfIJ8V@^E&4TFa2*U#FCROM^< zz~>4?l8m#rTj;SP?4)SVzLyl$a_o-(z|mOau>zV8@c+P zuJ{JzvhSLQi=J9x@9~pk$Jz-&|6RNv-9SeNgg5;cu4aOI@?3?g|H^xBYGvcr4{xcNnyqizEiB6zjYcs zpUa-=Zi9r#kKKS~$hrV&&`xaLzI!^M)Q}Ok!0r$rDo#u5ag3cE!itJ{F45}@!h(gy z0RzyKc1`lZV$)j9<8US%=+CE5v44stU*_*zyZKB&_^oj#%!1?ralCDIjLpDS*(+jD zH1kyjCKZMYlI@`~jL?WCt8Cr-jQv&!C?8?-MU&?6=9z~|$Ju*=-ZVL~8~;fNW(Hrj zf1-6f%VSgV+}HryA?Tmit~7lk_r*Uv8x}(87?M|g)2)jbl+1zdbQ>q&*$=>mgRKpL zyVmU5;;WRIE;1E=1%+^)hWZ`w3DCdYcuNE#OVX zS$GGx~ZQvmjl*4vu`6p)TBhrfwtE^gTn)+8#H*l8ESRKsYsELDc{ z#UMpt5iVy$lU0#6NskC1P)=dvq9V%*(LP@`%gRN2TcQGp%A+j~apST6{LZ%tQ8T{U zvhTb*ByFC+TqpX5bFPV2zMQ+yNNlKy7Q(X=8|qmL%S*?G1&IwyfL48x=fiK9M~gP- zdq9nlO{KP3v<6SZa@kbbDd-Y~L*I`=%K+9%Pst?g6D)Z3~JU^b!#z!a36Oec>#<4y0CIHJd;gCJp`hYy zjQNZ9>8Zu=%f=9VymFqM`s3DMCZobZ@ga7aSU*1J`+LSSFVwOv!u49z)Vk)lPJ!yv zhK<%=v^UqjB{m_1@CwfsiAGY?8>EPz5L6Fs!r)2wOC6*aYdtRD&Dd04%Px{_KG{R^Y4)ot0AUo!x6Eq3v(JmC2|z!8J=&ck z1}RVx#${3nB{y}_1gjN;+D9wy$#(9QK*WnKcJUcG&u$e*oq=~gdr*M3&`EF_Co+9Q zAeoJ~Q~~-on?+IFJ`5-$(r0?=Do;qGU|_A%F1;G;ee@s-bJcv$xi_; zeMj^R>~&$pP2*E@JkZDLYwQcrBsOGy)%3YS;eEb4{!*P1My}TS4lzTkb1s1vJZG z3ges`JYat%U)D7O-b!qC((TS0!T;Yhyq?udjZGJy#&2Aa&pr1>A+n1%F16#LH7ayL zzta8^ATNPr=JnB=Jd|GS&O5(;f{2?s$cBmZlF_wh*(Z#=o$(NBKfW2@GvPhg+owW& z$94Fmi4%Rt@pf2DA0IS5$6%-20wY|HY+$C963$jTPh9UIJ=?-s%EjU^Bk_BPPN+B) z%SuM_7ebaV4${P9x>OSIqWY%D&h1<=s7B))`!GbzZd{aOhXrVtSUwi`+d}&fxHl)c zmkE-jV{f}DgixJ&F3R3*M~Dx!;0i9qj)+HM`bgK<|BtQrfbY7x{{P$Bp|o0Sd+Od> zivn(|ZILbW3y4~)HseF`A=7*kmK_<4umnMtpnwDj0YVa1*w%onm7!w6IzAzUAy}<# z)eiOle7(;lpFVz%zYoddyv|+cj?aDHaqc;%bOO2nIDO%@?-gQrtr!wx`LVA`aSGCh z$rs4q#UYQ#4(dkBya`B+a;2|vLcq%;xSpB4-hOhk$7ds}_2Lde@1Mws#Jj@OZ75gn ze-llI9NIeLXnDh7Kf(WZNUtR#0xi3xE&ZOQwDEdjx|G+OJe+z>i00_p)y}#Vl8jqx z=U8Xa$b59t#>3CfxAG8lXxk)vN3>TN|0VReoq3xlEW-C(mS_*vZPAMH_DbNilr3v1 zo7+;h$_|J4dS?8gXf@6_Eb*decDrYFqG68Zi4Kmcuui{zbL8E9qBP6rglIC(xg-^> zrDA1E#rT%;#Vr+ETFN)t3ko8027OQLOSKC6kpSnAc`|V_%03ENwN{O>bM7F5kM`F$ z+fRjPkET_ai4pB%I*UoIC_kF#zI7*Dex$CShgr&G?Hb*_v=kNOY+zdcM^l2!2Sm9v?D`??*$fV`6S>tEyvFNDX|za{OHEoF#)gB@%3YD&Rsx8 z2UmfSnT~zJ#7)Qdj&-`*19y~VMB3i{114|g&Gq&>VIO_C(cx~D>(}L}IK2zF7Ch@` zo(E|PIb>v!bX}nG*6CPa(}loz?(9Q7a$J`5|+)r-7dcepdUkHqg^AKfPEYHt-!}9 zzy~Z2L` zGXc+L5L&0-=>=q-h?X#T*k#AYi;uNNV!Z{Cl6z@1-%V8Rc3(93*k}MjJK9l z_#kZXbDeN62^d}&PiEoMLx|jH)rmBG-vIf@OG99^;K^YitNtTxLP(w!!_1|v4e{xy z0bGH-BuH~bnn2rjA=Yk7 zu3aoOrH?~&UZ@t^xB3&zsA#U7@3i;)PzF#Z1xcFqjgj+_dYL@9LrjlTgn^IW1c+6J zmhy@AdqJ;qa_!jU+I2Ql9P;OTXx9&D3&keuY`*xH_KBdlJMTf5p{I2MeEBaui=^JaEwkGMeMXHoD(sN0i+Hrk zA+}kUKIGBqprLq;xlGVIp))(He9SNO2{Dpu7wLELvS1jzvie_Uhs2=?v9erbheK@C zzPLiSZ~V%$kCwMvtY?6f){`ApA3;jf(w8$QsfiC23)xzqTw{fzLs4~&JRq>C;`~3~ zg2<&5MeY2b*lNx3_GfVrj7L}mE@2+YNptDW&MfwVgtbtx+h5e$g_wf_N1+i<~z7S=V_i09)X!ObE*iNqj>%YOTl$1k(05X=D~-Gi># zj#M6=E5}zm#Y32;#V<~`K|=vvp^i6pvPZMp^6!h$o|sJ)gZyc>LD;Mlq9=}SU1^pL z?Tr&zW}aiq#35t!Wb*AZ0TN0ZHepxC+ahCMs|>y70&zU!?nSm*h>Y3W*m=XC5h;2{ zzuo5N08b8Av;k{{Ntj!Z#nR?=Aw_POEraNra)IR2Jh{SN3iR=1)9jo)kH+1WJjiwk zkXrRY)&~WB0*hR3vl5so78%`o>#epl6+D7BM!Kv%E1zKcuz!(VQ~*h>F?F@tel8G+ zGgDW33qlyP{u8!3gsCa{HSHCogx0Beh89hB_YAbt3Oxs6-NPcHT?%%_uJNx0{dtH{w&GoXy9i;-YIFU@ocg%jZm22-u{>XH9IzTihZ{T+B3!~4GL^s z#h}oAOYLEiG*gV^zeUz>g+jM$t<;zKmpWBEaeM7-{!YjhwmVMF3;7JHaEaLf3G&|Gw1~ej4UOy;=B%BjhzP-sl67(x% z6lE~8jjIjzIcA;d)G4ZBQsI6%6g5H+IqqY0(8 zIoDrrRRToq;TEvI`8W{Sim1l9qP?+Ly*+EQg&D)chg#>~K$69-O$>!^{?^l!@GSLP zq6yfu_KmrAxq#Pv$CO+K=2ao)k)>1Z7ovTT7Rc05!X5}|*^)Cp1t^>|$~c4Cwxyyc zt7~MW|M?JuS`~BCX#W&O^kmg|>+(Bj#K5^lmJJc`w&^{}ekp`dc`;5nI3c&V7^G{d zc+y6tq8`|8-GA>PI~S$dy`sr6@@Tb9fIL_jqh!d2X@?lzR8fSFgsEI~mQbk z82jb-=m}v;?Raws`*vAd*uAktiJdEqz?a?G5q;E~guJ1&jbj7XQyew&!aY^?lz=AD zK2|FdDT*;ru93V`_?mW?Mz^KxvC$b^rAJE}EA;W#uk_@F>9@*>i8Ogli|y1vckI~7 z&J^uUH{d=SEabJsfpyFt4KauF0yi_z$z?07NHp>lt9j?85^-RP)n#D@Tz*01v}K=Q z_}ruH%GJ_3JDy6y^svRws~`?i;G%MLfjuweQe>)tw}pL_X`fSI$q=1^apQINkr3s9 z`mkX6mmspm^SNdEcL2M#@yq+}7!TR|Bn!N20^kmGjP(?tiJSPHi6l3)DQd8i{D`2J z%>=;0p> zD0}n~M2_~iT7^Qwpr}GgyWA$8jJtB#k!??4p@0ZT#uMfzbn4@Vo3Z+8aM==n7$0A#liY(u{XpF29hjKFchQBA8Cf1jMWB&G&J?Qx2$xfgADPV$+ znSzC|O zGpiDlp(mqXMfT(qo&`NB?K#jji*n_$5vJVAG*slot1dv`#2`^>X8y*zL)9O~kzCXPU=f}B+ zxm1|ejdKk@LbT`G7lpb{1bqM(^B|g)*9;Hmh~jB}iWdp`8Y?XmgM%z;v&Ev$`)m24 zR|t^xa5igy5bcx2-~o2yQy?!WosZ6VL6@4NlXN!z3&f#lrbCp{nM6MmgNzHYXJG#h z@uPPJkvq*fGYO`au5X=bD}^aw?>Gz*%(hnrNzDb?GQ1IcTskT{#@WiJAujmdf^-`? zi(n+>G+dwVgwSzlOW-Gy8} zl)Uoog1K!ttALf>RAKT@RyRa1qsr}Banw%ba%*TG3L=7@Cp6OTH zC0><05!8;r=x!INvIOq@(pL48{0v!fHkg8DRSHX~FO_ zD=99>V+*ugEE?o!-3GhpIggFJ{Wur!XK#o>^gHgj!A^)q zwn$N>+)i2UL9O-JdkHiOX!ae^p_glGlk6U@NO3S;Et8!;6-Q!ij8y!_Szj^8o2`IO z$7Tt7zw$}IEn~jDB34+wpS&iu^J-v519xV$WTeZ*LEyu+>ntvsT)1J@&AYtY{CEW{ z%Hg;xgG<$G1?eUkEV27T7#*q~{Dcs;XVIRC_ND;YHmyq7H&;T_4~5L?1!VF)VV}BC zyXD;PHZka8_S~4;%oHMNOW9(ZESmCgOq^y*LpmIjtl%67Kb>ND2X~e_eO(Ik9%_u; zwTf_Wl|%`b?)XM25{t~+-_*f2idLh_t;RZTZdTNKU^=#^wg*}lf_p`KJ{)Lsu2yU# zR>Sh5*%(*Z)51&^FVC}A0{K8^yS5H4;{SO%TIsh5(3F^HU(T9vhoE=+AU&Q<*v=Fu zxqdzROxC-eINnlu@tmRpJ0S+*7Zz{{)d1}sOrPsbG);M*m=wagaVFOk_pAYWAzW+Y zrO~p*Ap%przPk$q$x!;T+eBa1?tFgz(otN^lefiOTSvC#PP%3NgW^yl&iqE&scSti z666%b?7{#KZQ67gyMC7ohH<^LTC>-E;!qu}3b{C`wBl3-L-0B^BETbSHXqr%z&flW zILFzez-J1;+Amd^5Ky@l!UyjSpXe7uhDRaMQn4O*CutVg# zVm0D;FLu$-$i{5+{A4uxNzwk{VAnp1`|BJg<$2;VoO%qhDv?OCe;NKTYXm$isoL=s z-vomuaA!TXQ4mRwPMKg^LlCVq2!+Wv15Bd|VWy1`kCuw`O=|6NL9)+Fd(6g&R$E}{08`gOWS6X|O0M8V8?Vd6(P9Im zpt_7)t!4bb#YEWLIJ3oR+W}`Fl&73c>o{PSFVS$hOL3CS_|E2#*a;0|T( zdP5vrEsQgxglH8?SGqfHa-lO;pG$QB8ET?cisZ#02iwB*u_(uX-KaFR9E z$(ki{f_EVGX7B9)Q*GXHY{HkegMLkm3(NvQje42gF5nF^n9D27G>hT++@pt2m8l3) z-jCV$UL*oSCRdHOi$$xgBFDxyQqa?K=Q+jhc?s+-@29+#}frGuDlUz#zINtAsd9YgM3P7Kf9LliHoxpaDSX{}u>j!SmDyxKk% zL^6!_rC41wCWm zd!1m)#8gFdX@fl#Akzbi$5t(f2u<4%c-I>MuM=}YMbz`3h(`n#|1taL|3uJfnr5TV z#UuY+Pe-1J&Uq8)(#Ry>l5*=Uj&~1F7Gw6HFcmE>;LO_&3Xz;clUY2oJN&=NXJK;O z3if$~=!tc;4G)ku&SY~vT@dlocg)SS)gg>8yEJm{c#AL>B`+;nrZW7z1Q`E6r+dda%DE&`ig^4 zm{H5L`$Fs-*}=%N7D4a6tYLEW_(6)%pJis6oqoWxmzHtaQtUH$X55jbnF-K#m)-dg z7nA1+@q;_OaHA~_(eh)g_UZ(vCy(#t9qmm)Z-}Pae3^`E|F)-W-|<%KC>m)oF0pSpmpn$a*Me<6)-r6rIG(bzE=F2}qp`%; z#DLYiKpM+!i(V^4AOq(F`Y%O-UYmQc;>0Ee2X`S#WaoLCy(s7fCSn;y+~>U876Bs` zPuthtYs-TMV71AK)eps^P>Lzcu=9kx6}m`K)b0?2#vUf?gZBy`Qd+!dM_G>5g)r_s zH`t4*lw7WIvOM5@k4(k`FpoW6kc>E?!>z4=ONv!VwBYT^qzB{VXN^q}@ODCjG~b>P zMk)?j7Dc8Xx2GcDi35*_&29Nbsw!J-zc_T*kxk3&zoPlsbZl=I9CUJchFu}j3pr4U zt@$x2%)M8kZB60q7&;X13wzV_D8gN=J=5ZO<)qshkyH*-6cr9azen3JdtSkzE9aEi zaY34#_GJ)c9|CD;ZCGpL1AS!nSR8CS|M0>^*6uL0%gS+b%mxUPA?Jh@OdPufeQaRy ztDddGIO}wzEy>VaKKv7f5%HC|s5E^2D1cK1)NZ@w#-t2<&nV24cN81J_~B#r=q@3b zS~k3YB}fL@&6`(SUMj3>T9!Q(Ag4)_FnO?2(8p9F!#)$D;iB6Ny>0&5F&KQocGb3B z^PL}9ELxWHHgg(3$Q-dKp3afwiW!35{6o`ZtL)4VV9;(Y<+$kz^c@{(}OICpIq_ zqqo@QVtO5~yUzX~hfjR5IF!^px!P()``D##pR;;lPs|$vTPx`G%j1iaZR=C<@7D+DkT7{2 z-TtioLo}^4O81zz{LNF!%bM)jDQ+V&Pb~5sGxV34nU(J zapNcO=;g@q$7bH3$Q5&jm`oj=+S^Kz$T4!{NP8>LnN78}?C+v_qGJIwN+R127yOfO1m97XyQ9oH2}xFrG4P&ul%TiYzyg$y#tMg1 z>XWK`$=Vw1{@9L?^2oxqN0%+O1A;z8v4(xxr=TF%0ZX(_L2saVreQE7L5=E>-P9ZK0gVI*b&wvOZE!-8IJx1sj+&qE-J z8?5E87E}>S8>6GWCDvaYHGG8|IGv+?-gp1ysVJf^i<0{V$r7XGOquJ1d`O}riyzY> zdqphu_E?sj{}u46;tyf6PEPH#f5Y=}mWjUx{K1Ohncyvuj!D>qVrh8knc7eR`nyNZ z(N-Af7(eTG{^OA%%dsB|sIO$Am^Y9Ab%@kCsL`DpU}UB}W;Y6webLS*bFE&$e{`}K z&TF9e#i79COXgU+FQB#3>u%?W^oUx&ak+&sa~5|eZA=84Zq@qAKk^JM<6uwmkR z3l8QBk+4l6KWdxF>q9&Xojk-jr7-lp5(6hAVN zA3fM*oZRuXyQ7MYz`jzZRSHq%IAdXoXeyb8<`*ycjtY7~d&jaXedg=nFwP_Apxi0# z7SG$IH?!n1!BFkS%HVYLM2Hid!JPIDkJDzwO&YAvodVOx4E{+n;*7<8V)-g-~+oj(EYQRieXcI+y=ddD58)27!8-{D!OVOAY>e03LtzvsTxiHJHJH+tj zliwyAoWdk1wMT`c-z#r`w&>VnL&%~l_cHPP`-<)K?|2l(0jo@$wpzJ3)G=$AtfYMX zG?16cmyr|lHGv_^#Q31wU`=AU5FPKh{SLdk9hjP>aYH*vwD;Wr4rN{vB(umM!5V(- z$Heds=-t_V@?B5aTsqdK2fCFl&J59B0$Zd~3?6vAcuuyR`aNiVmpQVdbgO_D$GEje zLs+pMoaPI7MS4Uwu&Y93UCVKVvsBPK8@mB|;rBsEJ}hl?rd@fum&3t9mX!!l4j#b^ zcv@5ypq>z5t?BMF2=zf6i;P#WpLkF_&;7u7t%_{EI6g>tO~jjQeViM_q&CIUJ6|i{ z5)W&t;;VQxmC1?qPn~F(Zx?)@CWe=oCQnet(#<>e+W%h(N?e23Is{Yf%7Jd{|Mkx^USNrUKb`VL$Vk1(osP~ zZ7!{EE}a&A+j%B$Fujer8fuUpgiRCA)7{HA0B3GD9i?Ve`K&V}XDvGR`D^TK3QjFq))Yk!_+lw32}(nM1pmQEV2 zP{7-lWp=~<`nOlxIPt>h8plOJy)an~;aaG`nuNUdSqqw-52PY{%It2DbYM&QIz5i8 zn4$NPqr~(A*kEH@2%7{?isuC+^hJ7G7>WKoi?j8=33yW#7wGh}>jfT~xno|j)e2~) zYnYjA*oF@oTP+T4!4;!?J4kKlWwBhu4DNMb6QmUn&st?ay3pz5rYY86G#K2nvDvabJ8<$X5e-zE-F` z#c_d}X60MI6zq6g2m6(v*ElU*m&;9J(BS+qx(%`)TnzFO`UhKIUB#loqVIV>0jk$r zInB0-_Lk)|Bgg(J>;n_c(~Y~f*v~I%<8TDk$i>5jqD2Xkakdk?G#Bnvy)o4Z>7B=NL~f$ zyyUb2SAaB7bVwmS>lb7lud?c8PdiWd8&&kKYmYk3VbFWjv$R4d6KYM=Bn( z5*O`P5>Ml0A=gAR>~TTL=EsNzd8Lqsn(W1{ZZ2JAe-MXeYpGad*IWhdMGlS^ChQ(z zvSKYhfl5CvsPQ|0o*lW`qZSk*uU@VQdk1&UjAv5KZ(ajOV*_PyYbQY4^iQ|jMXJEE zI(t&Ir`okprao+T;GilJ>F1BLrfWTE&wEydJ$z-CI%8lCN{=MeVT5cAto7OH7H?A1-#=1pp;|P!n8oLu13yK zwg^%i?3HsK^ybOHs?{=3d;X0C(lys#ZCyo@=VcGy{WE(|KogBTusj_ZC+2UNEfrq_ ziWg|bc6kSn#T21UBAdGCy_#JoK6O8`X{p^M+PjS-eVk#uDMt7;En{GPn!?>RT84Ba zTpd^wjqt|=JvxJl)sV2-l>>n05Ebq2HC*+y2vURn8(8w*(8@{)7e_!LijrNg%53GJ+*0|^v zSYA~*Ngrul#0XuDp8hi-Zx&|UbJ>=W%38&#mEX&;C|R&PJe>gDk}0NxVO$Cc@LyjWzETvuvM zqV;N7n;LX4_CNseQ~IDYwd*ADKo5pfb`X%Zv8D?FigCWTONkb zLGip5M~uQ}!FPWOrs(T9Yr9djhZW0XR9Og1Zdzoo1zKJ@KNjtci^mEnXy14j9BRxd zqa55kV?D(133$(9w1YT_-aDK9;=^})L`fKDfZ7f*DF0yTH2YjMwTWRpEei`zKXXW2 z*>*T2NX#OROb?6Zci80~>+o|(7lyKDvoijTIQq5>vQm-A#pM}RBv1-(6oX8TmQS#~ zfzIQw?8aYsE(6$HprOG^v4c3oEM~jEN3>V3I;!4fong>YJqt!eYWUX*BRQgW`pGRe zI|Ku9iM_>LpOWd&tY@Y25$t-Q#)A7RwRW`{w=_Hg)TS z5%0)^dG>~Ah8MP*682>X7|p50x4L+wQ5+L=60M=KvGM5kX;u`14mWJJBck~wNl!jz z1G|E}r0leu9D7RG>z2nSV7DM`@pkl97j4=DVt5De$Xkc#U!_v?;OwhwH_zbswE4#; zRoQ)lNdCc^HTH;TepYT0Ei#fi#m0(Bb<$X8+HwJMPcB{N56f{y@9B4rE3xK4XQ1%< zfoPXIk&S8CbGm!H)~c!YYtaa;%~EtDct8xQJfKJyk%xq+BF6zzN%?yZAOZ}~4&qY* z&z6TinU>xYjQGuy=Gth{L}Wu&ZC?nGNu+|Z(7x8IE!_aDvfDUeM2L=>u=$5kf&YoX zvhS#{w_rf9jBK(zEcLB0Lx_$1U#u_`TcU$W2X;h)BFu_tRRC{Ao`uV`{-K{4^bMEfelrLPKkDwk{D8BDlDkxQlT3y?6mw89<`O-tSIkUc0e z`rS5!-|f~A*ezk(L{b1bo)}Ya+XM6)1N&6a2VXxPKNRuYHPbG>4<-$r7OiTo5~BB* zjOU}-tS`<_#6qa1WsK!Q0SbsLW=yqxC*jgnvJ(CEAs)40=a!gVD?rrnLU39LOB=Rh ztaZHKBl4oQ#(pc{ox>$`rhWYZFqM7yr#4Dt__;Jpk9>42wdoH!Gr74mxoKWF!l5)3sk7Mpkc66k9Ie^iaZ>lABHNnzw;{8h1N;^TF7_F140kC|tuJp?~$_cB@OpNN~cB2IEF?5Q$$^9nWA|1ZilQ$1Aq3zXDOofoV2g zq&k|Tt;wQ2SKhIYx3wYc@S;ll$HRnaqHB~_Y^>5;^N8nhR5FrCJU zUcz<@P%@S&xyC#5(YC0pR%@)gFl`(>9F;AWO2kkxX@QoCc`X&=S}Impr8r30RGn*o z674_E2Rqxb5RzO|iEn#CT#mG2u5G&t|F0DZDKsN|nLVTs|LG*Fo(0fQF>;WkKt8K}zRsKcCGj(?I@J_f=nXq&QAorI(|!DFSNXwQHh}K)!%j=6Qdj-{ek+;KEAu=1yVcRb<+9Kw#th-;3LUYqlKYdt$ z`bAoRkrTW!ap>|Mc!WzO%)=1UkrX`f>>TTr>Dls8U5&oOBZ6xAwY-=XjV!L5zEX%0 zLBr(9&#rA2gJI9Ed==YHnIZa73>HGYF_*>8FaiBaqSddB6!f;EtYGBU#*0gN$Hq># zB_TDJ^vigZPXM{IHsxy`we!m%e8&{7-;)8L@G#dt79f{l%xiZKb-J~x-WG~R9*+En z+a3YrQR&q5m>?}Q_yId3%R?|vDxJ`~0z5KzbL-e8)Ou%JzH@V-V$L(PYGa9+TGrBy<71PC!3I z{jk@t;y4;YQA2c4ask+!M7EMf@*0NTZV-o_y@3*mexJnGF+`qeBHw@C}EO~9n zMH}Wuv8ZQj{a7m#jXYURON#6r0UxsH!7j3o1LiBj)!it&Hm5C|Z`U@vIu{(ODx1s= zcC9!xedO(8j`axoxaln$Mn3D`8_Qwn2Q88 zP_fI+TVNrIPp(}M?co>LtKv`_R41?!pYOrsCPR^|sm>E8jBGyGX|_TP|4nMPelG=w z@WuaD0im>0dtSU=B-)d9Ey8cOT^R!Nc^1@Fw6|LhCw4YX*xNL^%xeF2XJ&r^7Z_5TTOGYV4&z7ot~m=p?_Y!Y+Bt z<0N7+yCKkrHqEqOiY6<5!d#r#(*e>z!?}%rO_0V*%O4iAQ;PsDSl6`tXnAvySfs|{ zmVE1e8enVLlGaTdY)FDoZ=lTREarYO$RVhva(-e3VtG5UE-0{ZDU91o{F)Ve9-Mfr zjgCiN7K0+Wbisw0{Z;U!`&!Ny%UO5V?H$Lt*u5ofIcKNk4z*li7e6f*gP3th$*G?!56+H;IYsK(xz=(<6l!ASjANmQ>D+lLHwRF+p zL3 zVFxTS@Ud8|a5N#sA*LvMRXnOZ8eM=rq7jHE`q5(R%u!&<3zxW+g2;zXQNHaJU~cN! z+rBm$(n}gDl~}g3UB%LGx(Zur(Hp;Jal*%*{R9&a2R{7UY{wO%9^;A5BKwb!*RhZ@ z<(ZEI5%1WX?RNWboX%#pZWQg&7zTFHZyg@mw5)YT8Kwz5X7hweHYCtl@u4;?MB}G& zV)k0w8(?H2W1^i9^#0%zN+o{RRz?C1x?cx~Yf_BK+Ry;8e15obwLO-KoLx|2*ZiKe z-uE1H&tw*!CQjS0rq~V$Q?q=oG4Cpcre*Qxm#{YlJZWD(O#HywmBFF1DDp`w>P|so z>(s}d5b$gZWkheWuy+P8TX>qCZAZjXXPkUUVLvE`Lxvn^ufxX1If7iS>q!|SUG}GR zIdrYql*81w#`*{#Zh{$gxM+S=yfWdU0@E|Eh@lC+Sey0lR)Acj$U{VgNt#?zYiFgx zGSf=3>{el~Y=YxRuIIXlL+s{f$Jj{GNN`gJbb==c=(kqIBTHKyPyyG^F?&_Wvq{#} z@_@uz#37p^jPU(Kv21|9RS58#rfp zNL&iY48O%umK_qku;_)wHhyed37t9jx8CF0FkV*5Y@9GH#+ROpBu)ft#GptXWKFg| zrefzeEiJM?2_t(UwxQaMho-C8Zas(P;5!ACQN0eS_?ZtEBQ&;}@1aUC8D|zSVN?i^ zm`jIk80&dakQU4@kgMwVg!l#b?PBo>kQCUuda^w(nhbb)9mNfnyBuYN)TdGi>y($K**D($GJ|)x0NBBXHuv{JTQ&$&}6!hs*et;rix1^ zk_YDvYz!BPK^wA!TWaqJ&}?}1&B2t!CxR4MoLz$FvM+_az`fl1S@?HsBCDKP_caE1|5Tq<^jouDHTv2RiKAbbdW9mkdmiVI} zF*9&peg0F>Ub?nDTyu3uaqf3J5t_-Vk+Wm6p<_1%sMg|%TCt!H$V7p(w_;WymS-HjR=If=nEiWmIk&0& zU#mOzUy13FxO>KIp|ID9gPo`Ns0^A7#|J~Vkr_M-9G+qfjpt&btzHcM3UN(dV1E%p z&dB{b#^c-I{AAS@J8KT1j3o4gu&F8Fy)cND7$3q=$BEg`<`U+`4v3B;dWb>fXwq!7 z;*(I`TX=IJh}`bnUW%|Q#i1m=LfLk~(*UpLAT2wu6;@_i`fRit1u3j^5wENBl3x{8b>bk)m7RSZNMaeB6t07axq5@s-#~cDA*`=Yo#ntTPg5C?P zKC%*)5yCm6#(IiP5cJumM}IpegkX_L>n{ad66vd3!$J>4wPutx3Xrd?eMeuC8y3NE zIU_rYn4g0p+H>=tL`FyRj9||L6KH4RDGVOMO7~a--SjM?f&|7i< zo&jQsY`Z=rVL7%OA?Dc4sU$<_o_ht6yk8f4QzW9GX_U!U+_BdrompWCp;R}y>>>j$2SOfuo_rm36gv{~7|JQ#anVSr2W!!O!M+Ts`Bg|=6t@A(iQ?7mrfpnrVX}{Y ztGx4a2GaKKX5$qWnnE^K#zvowN#gq6#}8asggq-Bvtx8SgmbEB)q7P9%<$d8EPJ%f z(ne{j_(&<8Evb%I+NCStFp#heS8O)~Fapb|c3%Jti5WH}0AAC~vP}ZCU4kFP8zF!q zw%K_r2?%>L47p0{!mbd{YnjJNJ`W=eFc3I0u8ngL%xcfIEHSBipI%lX(tEdK`%YFb z7=8=erSoOGLj)P0;ahrG5NX=q+`&E(eRjJuAN_y+vwwt0P3v>)oK>CxSHO0YXzvqV z6k$eGn8I^knakm+eJtpOO98BYqMcdm5$LoJXI4A(+Tn&0y}b(+PKxHzdCjF;rTTM! z;6!G`BV#i)CpnwTlAD$!H`Q6c)ota-Y+HVHQ)Z$voLj&oFX=*fzjH!rcP13(!r&)y6GQ(c?v zyp5h=WGD#xO@iTP5NTxCgW`CX$|i>8pG=^KC);hZppVR~$nM)WH?_s@&IZut2&;i* z!q!c;tPm?2yr?qr)VI+ZLn2-v73VYACxt!N+_+Ba`fi5dGfi{F^ymhAhK@&9itp_) znk$B`TRdk5m<*xniy{s!+3A)m?F-SgmJYFFcH34E?V2d&D4<-xYnd10z6s(VpvIxapE2G?5`>;QUAcg@GDe=LQEad&FJ6?UofO7fUAY(gr=PCfO0 zUKm+1NpHJ$w&fOQVPJn2K54fLd+3~?&5vwlt+rEN^km0M7usc_>6f9StdmHFBRVT= z^rd}Z{lxM%)zNl;K_upkrKG4N+a408<+3ApT2xsrjhA5}Pv>4XP$aeM$b-KzqDjoj$2v}y*4dMRq4|;x=}>z` z3}hLMQf7|*RY+ru+h3ls{`M7M_?Yv>VxY;m{v>fjKWT3m8J1*T|3Z8cOH#g{m%Iuj zXNu}I$i@m0$Z6bjtgf&8qeHepW3bWCH9+|-#RynsfGuUUEoGDJfFLz$#+F~FKS8Ut zE2vYQRt+}nHCWVrLXwDRVo`M57h9516 zu=tk$)m}*2DxYsfj%d#Yl?fddB?2dM9Eba$t@eyKR4!|nEW_5PQZ!dA*L3(^isL&y z>-4%8ws*eWFVYK$Mp&7R6h{M2DqNdba=j}C@*JtHx39n9bP?Kx=Zp5F$qi$Y8|K?( zAvA|)i`_-jKJwbMzs`CIk}I_@7|LW-6zu_0rn8K^(PiO?k3rwiO0Q z>Y?$GHa`{r!6rfx?aLIkfTZD$SY~*`{ zpVf8Zk%A&VoQF|UK`K3b=V}`(n!f2ez@~;+2ew4_zfTK#v7F2m+W-9-?6r*U3hh^7 zgkdm^#nD2$e7|$#vNpzoAYnP;XmIAq8rb6@968=zegIuy+bE6~l7AXB zda^G%?|siLk;Apn^#T-=$(nPeXs;jchw zxy0^E5=ny(gBW-n7xGN-3#{p8p*TL+^vWXgN?RiyC4W#i_rx=$R-a15N=@b>w~K?A z@+r4dbhx+T8cYL4g@SVb^aWof|+qvRW5_8cc)}=8)@;$V5 zs+Eg&QGKAmrT7vkrQ~9W(OGM0>k+?05%u9f0(}zV-VW-zwj? z)_&Xq!!tfnQTE~VN*f{$#g59%vHQ5rEP%2?lfDUN*Bx*e-Yx+jzyMps4ZJo zulmhah~s_FlV7Ipbz+1qtQN++7q?Sd;a$}3Y^DRY-P;&6p=~0yvj!WbaMd&;JD=~M zEf7XNjBwQ2aREjE>HO{aHv1yPCqEfjwI3b!_~|?sv>yw2)zeEdu?Atk5+{g+P9nE0 z<#yf?c*xE89rrBp9uzjan?Asp_NY``w5Sa)?-R%sqyv~ELtYh~IpR^qC zbaM5$WbNumd4IL-Ohw&Mw{wBLCG15>W%5NCIw1j;uhPS;e1aO-Dr8q7K&IXI^ zqQAFc-r5!0CBo#ne--XhCmtxbYsK&uNY>1-eyR9aN=jIkFw(V@EwUQXh|N|iX78lZ zrLjG~;va<3a=WVRI+4^>gSnRt65>u7)RoCOx1caO7>S8}NxO4l>+U9+PNj3C(>GtV z&r}SRT(QQ!`A=xC6-UT%>n=>^uqZ2MWK`4QtHeS$dFQorKK1a(a)T^ktg{Qn@`;%g zwffw0;c|<((JxqU?l%b07M!E3Cd;!v1Ck|uh6QE;7oj&gDArrp$44e#ym&5enZ#0Z zUOk~C^o`FQQpEw>aJ(5{0^9#o;WI(1H!L>XuKJh946bJW6Cehhq}u+#zX_$wn1*DJ zKhtW&2%n-SnO`}T91;WnYL5074Y`{&di z5Z}igE(6ibdNh>{o5(zi2_rk^@$BVPSY)8*{eTM!b=#G&qrx;IkLnXxjsP%Tqxbae zXX~&R0mp}5F)Qf52r}zZo2PUN%hzG59Td}RrB~zjy)QfspYbyK@GL zKgl%vlQ0RR{oGc&{%b#ra^=8}7H%)(wHp!V1V@mzC`2pO9uXM|VW9YXX~OIbB~hmlRPZ8&bVcT$P^axi@1*NKMQ$*Qq-kLb{BIxDXgc46|^ ze4PCaAi^{)!JzIf0^WwXu`GVxUBuw`+_$U!QKSp<=5~8W$Q!pyc9B*|?N5P2zQ<~2 zx7K4>L69!w*;n-5c(%6D`C?KSeLB@D1w4zvX%v=e2gPww6HV(!+uwxAGPz<(vWDZ1 zZ?**w;9G2Sg~=^a`dcXAx#5Y06J*;fj{h_{kS^pAzTNaKSTY+C{Z{zCp~J;Du9w(U zt$1`Sr@;0Jg>fJGtol?8&uBy}mc!EvyXV`myb)Rc*!_Y?vTG&R#bX6rl4LFBd#mi! z@4)e2YV`R+5e^N)Wqicq3Za$4m4n3DW_Bt}#L40wvdC@}qAz95poeIRZJoF6$b|8> zP%xAvW97B>e2Bnglec4r-Fg}kyg}Kv&$HWvUF?oG+-yAry{75xoj79?PvuQ}(JqPsLyj-e8x1j~JeP4^~fo1LUh9>w^ym z*t%+|OpaxxA}80bwATc~xMyxHvfZih`>4$&rxT8BOz|vCIttMenL9VxfDnSN6<>px z5I>g@BkW_5G$q@N8VBAQOy3zg4*f*oUj0D@tjyY-0gj#=uKRyI7GJL*uUVXiIxgrb zae)%E&xFG$SUM9MP*(qaPY}7@eA17jn&0#De>Vug8`Q&!1o5B~rj?S~RIUmk(bKatwNvtoD~T+{L9Yi)NGGsio1JPzpbos$%C{d0xX7#wD(pgGO5^0j){FlCbKl4` z+2O~8X(DZ$8F}%MQy=Sbwr7@$_YCa_=LvfY4q&}kq8CD|#B=dJs2gLi33`1G&n-V% zU1=Ae0|)UUeZ)Hh*gKDt(!K&DWiPvI|02s2MAT!mCRufemm9}J`imjp(B=(xIHaNL z4?b0H_nqsREg`aBOxu#%va!tmC=P{SyU7lT)@STq`;SOOInuCA zs-dTy?+~d6Vjm=3kcbaGY+t(o(k013KiAF|R+~i67cs5@)bD#N`h5Kaylr{Niv4hl zh1mX^v+085dtj?og%C>Yirt%40qSgw#paiSWZN}1I%a2F=-J8unw}xoSYL4vB3ZRe z_MIy)a)_I@(%qBoxd3&FxV+Bl0^D7(dkmqyE+#S!lB~y+iD-TeR0f434#;MR5!#)D zE?bnsJQ|qon1YW_n{#5;TKjLv>%PHV?8-|>ipZU1bL46P|Cym}%8%q1VtB@ASf^S4 z5Idg#%3K>PK(5h3W{izWB`%I!e3Xmf?aCKYZ_4MTIL(#o*lF4pal8Po=j&|ArC@$9 znu78;lq||*L3C713WAy5RAa{kLz;P{$-b;DxChIGB0*Z}$WxnbexRkr+bB9D<2kjx zC#*rov@1XI_#M6+PWaL<+rL3KW72`(6VO*7UKP!Zv_!r*OucA;v8nr5$n(axL5Y1X zjKqGOVc)m{7%luJyR+n$vS{I#FFvB`_+XJ}IwD_B0tZ|P(2yT!vqgH=J!RSKqN~6z ziY&6!<6}%5L}7JKK7B%XQYVjC6x?ppLpW<6CYDV?-pBFWES3YGh=CBd-FAchN3>7x znRs*43g$Ogd+_iq-e-xXTAaL9Vln4^LGP@{>RR;FE-7Z{bzNXJ*zjxMQ2Ucc^cFW) z@N#61%@^OhwUBl80YT)ryTecIN7q7or3=$`F0$Rio@sFj+GY9ng*e1+shDm}*ExL) zZXy3J+B21BsecE^A*`C>((66uk#Y6*Yth7HQMZwo7mo@0B;1$RJ&}p0r^KUAdg>d$ zNFbaF>Pf6@Ppq+>AxgB4y6z`Lp+O@1H>+gwr2HwHC??&*=p8V?mI`@IusXE-?QM2u zdpOi2xoN!pxQ%|=Dn!#!zW-(SyFv`4;czX}-Vr!Cn%`J%=iUIOl%cUW%d1{OsvENd zibC-K`E=29wpP%G088<_0^2A?7*l#BUT00>P}YtW9PUiN5#VJpD2lC7IP6^`{X3p1 zRwuQK&V@7wKM|ecY;Iq>|-Xq zKF`j-35Y0#ym-GL&=?c7n?x(xRITsrs{k>I7GwN<<7Prbg}Ak}Tw(9!eXDGZ5P2Rh zU1INw_S#&-8Sllnc*HC;&I;^mAp)aCOpWyr^ju}>+1?OF_?Gg9$hOo%`>QzKcj$<6 zIP}X~+mc`(${YTB3m&ZipxZM4M~gUt+5{NiSPzO{r+Oxw1bBdJ+1`&{Xe=<3FUPWf~nj7x0yD z6_*Cg+quy^-0x7ko3 zvcfw|uDvXPFs<8at>SKI?@vzRM%yRCL~N>IUGHJ*!qiE2xHB~M z=-0|A^^YtKpP~I`VGZke}Vlj#KNM~eEV2{;#a53|FdpAsw92411EAnhgKsoucl{i<3JoHX$foN}`gx(1(OyLZ*#`D5t z+g!0w->mz)wMCB}5k4Rsz8b9Mv0j&J4~a+ihbK(2@<3zZ%+Bl%-#fAMV7n!tg62(& z;`VAl{G`!2IW9!Ybk4NvdO%V>J5h{q+4X{86ooV zYyU~5%!n6d*eN}U>O%qd7$=@yZa)^s2Vl3zvV9LRyh-mt#VJ>idKI8jkQeBqr4{y! zXwRv;o{ZrGT^u@Q#}qC}F6`BoO9D-Z=TeZ~w%vkc+PT-g_PS`QJ*cmpelMiokVeNa zb&(C9>S!TdWiu2?BJN%61<|xBO1^aGCn4nEtc7+;Z$c2ChLhvh^94yXocoSzMTg%d z`#T#V>{XEMM>chjgxDom;YbAf=(>4!X`i;3{=(g6h~up!Z$z_F5t8elli8hx;?Nq> z>avXj7q>enWxG`dud1WRvh~SzJVEG7gfI;-hviw37$i?LEzh#00xmdr6yxmM{lKiP z_`OHl)>@o;>nQ2;|KYD+8`*9hE0ebK#HXTdD->HK4vpTe$ZAAVlPIU{wyHnS%SzA1 z6GD@)H&OyqkUSN*XaF3V^zh1UHdD0A-GwtAj(EiIQ*=Uj;6OB|H`)M`VZlFiRLmLMaj|;g>(I>Rdo)CxVCuUb_ zLVQ(_Qe(M!aeGU^MPl;XXkUfEo#m+OpErm=K1*`z(DYnxe(dGB$aS5#NOEZ7Sj!Pj zQ*{|+&xrJ;Z#V1#`WyMH2gCI2SkA~oQ!Zge_tv=5uc>B!W34b zdu!2Nvw|YF>-qMHI9{eqOHh9t_xwXls{0q7XJc&iD?t>0xu4a?7Av{hc%fc;<`9n; z`Io7(GC|KBmvOl)D5r`;*|?&#AKVY^6~XLUj*SzhkVvf&7l^4~T{!6s@^)cwX00Ze6vaT?$WzpI+a&07G|#!?k%sq!zl29^IpwmrXiwj5 zcoD0>6=JwN9LJ>DUqj^9X%p%e9#y{?5gm zn$8Vi*H#P*Hwqxa@y5w^;lt2~zzEkU-)e)!aG3|6{D~|SFWN0P4;!u$a1a5TuY3bE>VAXyT=fLQ(NP0T;cvfRl`w!d{>Z{zRvE^TqRq z8H&-Ige^`bk`po8FHG*4blN44LR0XO%~KDrd%~^|^ln3EG?&D`5GR_brN)^pMA8I? zH~%8qWl6-?U;GcwL0J3#uL-AFS%<5Aq>9j2tdJzHX^Eu?lU+WuQiW(28?S)egRDqJ z=GAyOpoGDbE))orVO6+}$ZJ7Ysj^<|JwLqg%XK#uQ`{Z~UOZhxI zAeLvEm0_O>g;86NH=fOQSvnl@&51YR>r{X;lX$CSM>1DX(sPD(ZHA|eeDq9eDW71K zffK3HH#C>ev}xjy9L0+5?0)75l67K7{b*Y%5L!leTQ3T$*-=N$wy#1;Dl66PA7*+= zRJUvFo*2LzqL{nRazS$CElHYX4RwI#Ha-FZxLGfG|1O}aRBx1*n@h5Q{z2M1hFxK~ zo+MKJ$Hb?K&6A$6jiM0=@4|WXwvaa!7s1$OurspZC`-BQonluDdW7Kn?I+@R4|CkY zA;qu6KvwJ|q*;alMRmHNgZ(!ZvrF_K>dLrh)@EeVx{2e}?ucEjp@Lq4J;CmtHcmDy zM8g{MEM8kI5v0VuQ#b@@NM(_Mm6vw~y$hr}WXD3>cpg840mBGHx-3R&#~f&i-aVej z+ZYWFkj@#v5@dFO+Ewsl+a&0@a_rY&p9_awc=U_qr=e%~TkYmtLTKNei?jjG7NQUi zS{YL|KEPtx+|IX&g5H$8;m#awd&Kb8mXarLP!EWM#CP0rhkcyqnMJ;w8tjt*k@!R% zI=cCU(kLA}bhy&$1i}v@+kRBw=w3E#`F5KSU($hn>}8RmvDPw&zL~Ri)sryH;hAA1Ars3T_J}a$pfNl`q~{Z@6js;=;;3ON zrfFx<|1lu)W)_uTOYSits*=tpTY%zN0 z14ScZansg3doh4G)(-6R0Mcc)@TQWsO!G_FXm=O(xr>{)^fZ1kzZc8PN#v9il0#*R zvq$O)8|)9__&`Zd!x6Jp4~OB~_+pkL`Fb}!%9_RXDnt*a{wnO>m2j1qYXe5KWrfio z>iwJ;)auy0YO53NvgmOqmYUN?!r>RG=|Fb$@Cb@)ut~O6d@pL(%-yB-XJMDHORntF z-ZBaX*(R4Pv$$xF%%_p-A7K_#xX4X?NuNkX8Ys={#*jfeb_f0w=nPK2XN>m5&8657 z-4gtFcP&+{NoU%J;v)pV+AeHM{~MqV_egbiX%r zdG^qz3CHF)+J)lNCb5w*yF#=#_n`ikD@6U!rzjB_$$)aGz7FHWK!)b>snP4VF+2uu zF5O^D#iv@4UUfB2EjNCqKO_9@($FiZcMX3mf}R$Pve|J z4RfWVm9V*rfaq8Yw%SzW$FQTo@#fp&5Sg1OdD#7Fxx*vN%38`+TZN!%G(m%z1=n0L zy!_^8pKM+*)-I~>6bOu=Yx`KxBVf_Lhz-?e;&``3*SL(F=K=M8sYJeQj>VI~ZIXp^K3@cLqVQY>%y!CbP=6^yhXs{Eja zwJnd#p>4f_yd86xHA<}aL>NAgNFltaB+@9HAuck>V04veGEdeu$m75YL9Zdx2n!y2 zAw*!yAcG2j4Up>LqQlOZM0^_kzHWAnNKdLuMkeP@;&|P%^=@G9|HRO%!i{!N9C~y2 zc;1BlM*vaW3y@_`c1TG>82OV05xTWvsy!KkTFT2?%EsBj0GSlWVR_(!Da6!II#SWO zRES)6t;S8qM#~VS#JHTZGzgGqK}N2T&}hsh@tMpwI_@E9}0%<V46)1iG0V-5wP3 zf)Z(XNk1a&zbw3x$B;E{KY9vYwD#aV+A=f%TgoQe9b%FV+SKSG-6_QAeRYeC6se&= z_x?^ajfkjOO-mgTbRX_&N<(^V@zQf;LVi}1Tv`*HBtH>yN;X#7-$G7%=Wsw&YCoAt zd~YxYC;j_~7+#Vdj{Z)N?ut&~vEIJLYQ$l<@`@&AF9>+83v$>2{YeX6<;w0v}*=_vjcZ~fRdK2Xx@m+TI8+q16*lS+MzJ8z}RXJY1 z&|V0%{HOnXPFu1_BPm~)WZ1@^fGgw!f|?8ExR=*0>!P=xdI4(hD@ct~jW+mPX!?mc zU@N8}+^YU0G0Dn@o6QpRrrNcb>&~sVO`L1mT^i{cPiwAN5*^HOL4o)7nb9>n7wlM! z#$2RghT#~;`q(*7d)9|%)g7C?88azBhS}}c+UP)z%(iNgB;r^TEs!kR6B1!WDuE54 zcLV(Ys5%e$tgGw)*IBD}_te%}TSr^8iVXKGLqGwcf}pjwCL|;e8BfeYs3Qq`$ri#6 z1PGB4LN==!(Ap}37Da3Ii5P~Ub+7ts|L@QDT=MiUzVbSsbJw}!d!IYbJtxsTP9y2< z5SuZ{G*@bQi%HE;;`dV~Bb)6pF9SuFxu~2Xrhir(GDMF?v9|(`;ew0K`F7b-0G|_5-8I(k5~4u2_+`Tc zJ#|4kzel=_dN@QO0e(lVL=c%S?0liUDcYCHIqCRaW-uK}#?E9@fB!OK(2hNFY>mhh zQ;+2TQVeoBvX-~5%b}^AzR9(vx@g}f4sFfPACD?i>`7rHMT`NxdOM5~RLI2{yWgi2 zfm-%qIbz>f;rS68vr>9e-A5csJ3elT)1DXO!oCVYZ6%BB=M^69NLhvT73~sdaPo|n zj~L!|gYkhN6oybragmh>Q`akQu_+=c?24{ktWGqN;s&a1>io88_0}Q|lIF)~q31%> z82)cQ5AYZrxXwg)<_o-C#h0=IK#2gRYJuHCM+OIAS#fiRV1HwhDV z8U9jU6YW*b<1*%bL9g0qoIc|aSqx1n4V;x;PzmsiQc>fvw}rjf{VkaN-f3TnLw1QR z6BD(YZ1`$Vy=w+P8*c~@hU1IF^OoDkAuKhai3J!%7rtnfd z!)gQET^{*hd{5BJjGR~7*VnCK*lek2FAHpjUbJt3+Qbj9w(d2ZNGp9nQM+qQM<`L z64Ub@g3$q%NS}uEow{6RUDp%pV`D%JPui1(l{XuPxuR*cf^5`{ZGJN5tysdh4}=j) z=F?u>WQ#U9-uqg*%K z_ltvI5nRK*BRfP%G(DbZYLNcy-^4lb-7;5CuK_clVh`AtBI%|>k5$=kH$&47I0nwO zVgUjYHA`&52^yWu*Fq$Y((CQ802yVEvXg5eeYZ17vl5HGX<`vN>^7?si3A0lKFccS zPGPSGr#PvcQM{DoWX2=oQ+>7&hf#A!N)9UWMMB>GY0-7*I5ChG4_8?>RlsYwcNxpG zN9w>npTu(0n53}`_7Cw$fQlkM1nL3E*1F94i=-FaEQbvibkP!38+E%kA<4;?bKM-9 zBMyx-f=6KYG&p^*tj?y0CaXQ`Ww^>77Ytd|HOX4{iV%U3H;5(NYQZqrnoAN*tI-XT zRiky{dv&t3a<5O~T#gRy>PEu7G4hzI3T&Ymp5UHzE=~tFfhjrBu*5yY+BR{haF5^F zvm%i`jW-j26^+FD()Vt*?#&*AKUSM0nnGCdvIV3+9~UEhdwO5yCZpaF*DJ{}k$ks( zyTvmZ!BD$Mv=4DzdR@NNAxlZ=w^j=xNp|aYdn*L-fMF9)7`kukNH`W{)Opgn?0jK@{qSkerG4=V zyHqr}Bx;t+H$^`|`mI0xEs|<57I4K7-VdGj49swYBk%86LtzPoWfi+tA>NoWDk77U zak0EAdsj1h9}q^whcElng*HvVhjL^;hgBT4T_Ua?OjSgOoF_~HH;Ao9=4fUkiZwHA zXUGdbV-5Cx0J$kSnU?hz;(EK>fkFJyf;8D%3#M76Xdl9ys>t|r>9ZdBKz(%5wnPvK z@o|`L+XKjw!NuW!^^lyDQE8ShKr3Mmwos&JnW&zhsBX4JNe&AH{k%RSjt{(4DKOjL z#9)NSGDe{g_2Az?F7ObHo6ZwdN6N4Dwh$I=#p>TQ*;Mpb&E1{{+tr+XN7Zm)<)t#*3lP=V2gFR#EPlbQ>(BG11VHXk2Qs0AE?m zkNa|4Cm3p6Ss_QgPl`j{FRf#&-MI(gBH|lxt=*NxticNH-XzY5aR|^VOyXi)bMFyQ z$#s?Tr1z#EeRq7#Qu|bND7Ik{&wuRn7eeH!9SL5SoEsp=H&Z#wze7$RPglUO33mR$5{|7frtpO|hFr zB6j)+W{KtiUY+>b6ZWP61!H@aQ=X3l;sGfKl;`auO!bQ%DcZ;YSw(2sFe*Tu*YFFv zR?utA!(ra<*{%~2a8$xm(jhMr!GF6jMt}|mmy5c%zE^xod36#QZ?+skM8(Hhx{Vc} zI!7uN*r%es0DL}b@%nj)nkx0gufOE!UR%h_^KSuW13hx@I3g|iyEFMMm1X@HPP|iW zo|p_SdCiE*-zEk{=5j{UX+N}&4xSXp>{?-oS{gZOxKj{`5_LOV!^|co6AYEx-9}-5 zxfG3T9d2M?$Sz4voa43s6Gm+E6#s&?sCj`9{3gTFR$hK~;!tt4LPqW_A$c>#{Bd*7bJcZR-Z*e@MJdUUCPr_lm zO_(o8t7r0XHTN}W1ma|$YqkJ#;+fgDN5E^buMD01^ZwC+IRxEew>yXnuN83n28dIzN~tC zPb?a-P~NZlv^m(bg{Ad+0S5f=fwo&DqvAoPkRfkD(^Xht^egR|Vo)Z_Cw|VYQjiQW zJ;XUd2y34|^Jv9%drmNn#tDqZgfK<)8)E+o$#C3+yTfEWOdeF*Hx3ey^4oVVz}gJ7 zkIFk(Ev^ytfr9bP$UTfW-ith&{r=k^ioBICoL!CKaq>j`gnnjb*%jh=hQqkU>KS07 za)EAr2L;G|NaLQZwoEYkUExQC!vvnk^1D)FM^B`xUXysD#37BaPmjUAQkd6TkDKl5 z?O*~?=gP7x1k_^oahsd@+89$AqQPdzf?@z|R z{_;!g%|l)S$$PM4KJPG)F~E8PTOGU+EVB#5qD$FPZPt0OEfWKw-l?xSURGnr1U2=I zu?vns(i7dfb+PWENk+zeBRsnuA%@1|#`5U0ZN6;~$E%K3MWMBXI7oc3bnC&=b=Z^v zlJh`aX8c#cbH+MYzAb(aj4=5R*lQw5rn4F@%f9t5PnIHeoG}7kNU_`iq}yz9=ripN zQ482^ivmMeVq7pc+X^wrFIbIQ6(F6Q&dG6ohz=Z_FU8n{V$kaNkD<>#5kjusMfRQd zJs)D~f!2e9)SWXqdrP#0z(q{--sxj8sB6Cb>Yn_81AKqv+gSo+c*T`gA~I}ES~P!Y zO~7w?iOp1qx6VL50((!GDo}LW)Rp#ML2re~)FtNgKJ4JIu+k1{kT{5!h4Su}WN3z7 zz2w>+asKDWq@GLr+dqOA3gx8Ha)f;ZyE(CJD-sLg``&2}i}Y`0NDi07Rxbtx9xQ9H z|BCjk@bAZ~ppU^c$7r3G{^%12T$P{@EqQjVw4bTa~UIg}z(6@;g-VC{| zEw(usKMzBowq2M~IFp-e`vg30c6z2A2w@m8iQ8G9dc>Gq$$V3Qh-^G`EZA4@MB6Tq zX~JRRAY!6sgFPD}W2tWfmiqn?BI|mwY#Jsai}A&U<$2NfW(KEEwbsN>6-q7 z>>ZJeV0;Ag*yh?VL1E3!#r%tNqbs>)%M)L%wHUS3rv%7pRO<@+*}tK^8w$tpsg4x( z7I~+p zzIcq~a&d_g7E7@|CsU?Bh%!r|Jui-D&GDbLCn0Bb_N)IR)W-@cDNLs0#8BI8*kY4J zhtV!`PLuhGcv(EE-zujlqyFO{Erui2WddH%V9wiq_FspG7OXp3Im@~QD5o^2Ff0-D z#^M!tr7aV7Ir7uFQMXDl=$D@T?NBlcrY{HcB*=$>adWu0FXVrHtWOk#J}@E+C#2^peu@(?=EBk52K# z+Ow_Km7EVJ7}aDkrwOcWq5W?Qc5ZKj|LIST(1HY=oNFx-{P9Zi8XhSY^J zK)oZRCU^+vb61{9YL{eWd<30%jTlseWBf_>TY(b`2gWRiH+IHp@O-R{;1@D-iHl1c zamfeoceB~>SKFgvkWU;pAT~q52k5TJwp)l{k-uw!{WYX>Y1uiyD25N$e5?!P7u$z{ zqb;}IuP?VJPbUs7@XF&9Rb{&cDO=B*`MsH7r+(cTWQu+FuJu+WaI$1}p7F@tBEe1N}2YKqJ2_HwyT- zaaXR+s?)?ElY`?HU_%Dlg-tXqvu}LY-v*(hW zA?&@Fsb5ZVbhVH9;`6^pDh(7i_`elRhKZVuiOm!3Zb2l?Y}>ll{wUz<%=D2X<-3O; zoe#yN6ObHDm6N{@@|yO>6qS81iBWSZvWtbu9<`2fds?hVfa*}*rbW;TxD73~80OD7 z6;d3pQ)Ixl&dSB1Wiv)vLL}c1N8~Jbd{cMf58!!&vNI-!TrpgzTy6twov`P)cZIep z&haGd2KYYu7*UW@pB1WFD+OtW$WcU-fM>^(BwiAPxK!dew>;@T^rXP7(do8F&|_z1 zv$_9Vn5-fjWbvg95v>8Ut|n2nz_JCsB?o8gSg}qF7A5T)S00+&c=(A;Z#PXl zvVFU~A-?LmW@WSw)o#?z`mtvw$0$5bJX_EuW;CJ36@sIq_8qn;KwA3F9s{j5z!C19 z=7=CdzP-KNPCpmgrNS03ztu%zAa&&VX}vuwNL{(AjoZfp(lyeZ+p0uUV|48nnHsw8 zCmv5aQ1(Co`8@QtV*);Od-A%IM_??J&pr<(rRb!cozIU2y@w(_rV_hK9LkLsFn9J# z2EVi}^5%MHl2gn}1Qvx~i9@BXySj_r|5IrH{n*pO%cbovjJI{-Q6f#!2PKGq3z9#p zDjc4k4+sbF40>JBag3rzpubQE(PWj{`U{W+*Bn)QXW1VGDe=hEGHmKV>jVKe$Q!pr zXRWom@c5_rC%(809?E~~XE3O##v178866xq>0sF^>nRw1h_^+zXjQGMlodWr z;h)P_h^R=FWo)3?bGi+uOb0HnGH6ePj+|6?; zB1l&I*Y00$g%^8PbTMio_J4w2Q68-EBU$nb7&Jovbek)ZbcIrzJ|N(=M3IkIwyapr z{v|w@gXh=F?03Rm>%A4M`|zZ=Z=wBHTyJRI+i;#yL1LOHlewR^>&eh8HrPqCK- zy(~JMi#Xcu+D;w0=!k;5nTQiwlZX0tvtoEA-Mjq41`E=dDOhHFBpH-akY$f2^TNl1 zbaSu2gveC&KpiHi<_q{n$ZhxsOaAYPPmaB@YkR?^&|WwXyRz-q!W7OYAr0{Blfm3S z6xmI})L6>e`flu&5JB4aamb|m2-hOw5 z=lL2Q-DNb|Hh1ypXd2qnqP_f)yJy=`LAroXD$R}yxG?=wv|0Kpgy!mFxTGtg-XV#W zvP8=UnG5PICSOi(O!SIGs3WEGY^P{%1C|E4_OUR7jk5_ypnz?1~p=#p|$@d-GNAyG1~4H=~5KYaJEU9m^vcl zFm<0G0=G@6v+rN&K@5W1>4k2Bh;V4(rm)N|6^9z24mHch2f%S^sZA2-=04?796#ILwI)`1DcUr~a>t>-EjV zuQ|KV?_3K9Au!%&T}68yI+(Vc5EkPTeNS{4db-}-$ad4dejQAgAyM;$oh?XuR8rXb zupAX8?_*m^Fx3N1>!9#aVzWa;x*Da&bbC|S#Y%rr>wP=#dN>TB;dj^#A}It_qeT67 zdqB|F1JN~8GbUKvKNk=@r&b zG@*&66_Le)W%kXR;83_W&+Hl(33`?T<3*S_XOs4sc%Ee>&`SHs&7LSS7c1w3|ZLqP#10yl_5&*HQ)n*)zr`^7DuoYqOyF0w^}zO*>Lu=3q`kFtFcVjEa>Swb-n5m>($ePk>yy;IvW?@!IBvVOVA4zB=;`eI@?sy{-N~Y#ZnxN zxaUJ`_Ezeh3wsg6znh_P>37~G2GvH*pEKkKg^+}zhH;kmP=JZbO)}2-uppywbcQVw zsiDE+Q#o*|u{9y?h*+B5e7q@0L2@D+P5Flc10~iGksj`sAq&bHiPvP5M0WHh4OKZ< zGQs{Wy5pA=ohB5c&c4+PB?Z%0Gl~d57l*Ng8d3vxg#ZXoNZr0^_LLyGvBTP zgIqhgj|a-E)#-sgykwiDi6-KZqP#r(LFd~eA+W7^l;_hAwpu~ za{I>ZMDPMf#tNj^zf2rzCpTxZ&Dy&^7;(p@XOFaz0t|{iL#$e4m}Gg3CR;!#k$fgD zEk&=5$+sU4@aTKi+D}9xRi{fXvx`Kl@3!%ySz=d&SVMZ*jUp+G1HN3lH$>Be2L{pfPT zoix}PoM~-lwr;jxia|HTc2C53qzAK5&$d&B!tkk%%>yn7Q*5DFNZ7h9o4d|0g?s|xthx|u zmHBqlT^+gL*`1NRNDNBh!9A9+?c!lTy7$gLHe00EDIet=YY--L&c7&XcQSM|_saIN za4^kP)0C*$=`X`|{#LWYURS6OzeMc>R3q%X;czIIJI)kz|E>|F;_X;9z9|GTT=ZUl zZh*L8lB(4wf{39VWr2O;ZV;{FjvA=X55@34JGQ0CRtS=BAxnr}_dwHQXtTuZhTj2P zB6+6Z+P1eiYP<%z!tN5H_UO#T;+*u)PR2*~C6&|U0rz@#X>odIWB~dsZIpm_)euhB z3hZ$)ysHMpI05EYX}MSkcCd7jo$-5UWX{LTb^2)QBp9}R8?hXI!oJy!;zu7xRdgEB zZ1;tPG|Uq;%_KoDH!GgWF-z%v&ftDu_Y7-nju_rL1K93^Wm-I?dCsrB+W`G8cD5F#bN0Dx~nyc^r4d@MX=q%UTguo z;AaEO#QQo4|DFszRy*0wyr0OFayv)kH;WEUv1U_rz{cNgHb8u@DFrY=SBZg~yXG?4 z?+iH8DEnXNS)2i$r0sf;D`?W20OptoqdbfQy6%tWex1d=O z=yz(@+i1}Yl^FLESY!zyhwwDDMzpsmC+T<#vD(0CE2#)3AwEmS37^L=M|i4^cY4-U zoKQhFdWCjFh~f{#_-l8GgK()uSvF3zH~0{})9jH34r_;<`=&A@)>@W$o+UlY5X=!n z8L;*EM6_psSD^WJPC8iMiadg?7w}4Pub!tZatOvHRkMvA*%8dHq6ym^&x=D7VoCa- zND4`?&)3bYy&@Ln9A7xuejI~Fs=XKw5vk(WZn6!5J~nr=ojVFX z9b1UjZn|AA5B5JcDgZia2E3waZl&6CGM~4lNo_;t?qb zTbE4DD2nHCJSGmA^UNy64hw`Ib}kZmg^jl@j-QT*uOhDLVn>r%$jCCEfn|AUKKISH zh^D#&;`X3O?|M13O0i#M!|*R4pUG{wVALENtvJwyu{?WTG|homdzAYx%IQdSZ`b?m zRzbvNbf#ODfY-KXJPi+f!X1lOG-kP-Se}f-soXeQE9_FGV$dtswu^xn7uKuqb??`OT{cYTO^Y0Y2MTF!gTz9oD&%QB;BgcvU`&ZH9cl9#UQijV0fE7B8W6c zW-qXXf$lx5x9tve%Ifu9paUVWmm8KRJt$Fc8~_9Ov5EVrews3EPGX zcpLS^|Ic#)w$ChUpSr>h2>N#(9>7Vt`|O80(iHDnY~K|0?cS~$J%LH(X6J%roE#3i zZU~SIsCtZU+#*QZWnq(aiD<7x$_Nfks)ZRX2`$Z{v*_0XhdSVpFO{Rp_r!6jd*9K+ zzWJ~xAwr^YoPI~{7l%x-AB3e-E{Bsb_Rfo5CN%yJN7hM7?R{~=fm`JUtn)}c@{C7d z`nQjgP3{Q0Qw&7cN@|8^^`qv%I-4&@!=nF}ZpQ_@T6{e-?bJs*ur8CWi!hNpaVe^OLY>o|-B1iKTV4n|G_ToyQ$jq%Xj?W{7J?24`sWszpqXhm zmBR5#^uXOKuVcg@x183N0=;(_9xx4<%TCAc6+W@NVUm$%Pm_2^lZ{ob)@1xqn54DO zgpnZH>ut7E%Yf8Po@wnY0hhcumNLSAn8aLF#;sQpU)<@!&Q>W*Ifv@D+N+|ypS3D_ zX&lh&QJh}b`h>kF260nTQq0D81g1uo`#OoiM>sOWwg+<0QagJBTx2`2mJ6VP0;K2b zv4=%_=Gie>5O_KS9-2AJ4v1#%z&JIIZU#>TX^2*DOjK9t#oqnmkkQc<8!RQz?UO2O zX-CY;$RSsaT|3FsYOLaaY70V$gG7-Uc!~f&714Rvw8*q;m9G3Zh_9hGk)hFS&xJIY zlq$4O1*mAoD7$E~=fzW-YP(JVp>pG+Y?x>&fQB-kV~&tdseN>H$VVNqh@4o{X!#2E zrjUQ}Oe+-w5exO6t6hK&O&h~oH7+MknF99j2A_9&H^(vG17dp5CF!8M;%6u$O@q2fI(fKyqv5 zin~lO92GTkW}`!kTALxRm%^LZ)(v^KK5!EH<~e6vV4K82*!EeAZI5V|LADa?v}s_P z2HTNSupYU|9uPxgK^oO0ueF2XxLEy|q%)=iY5A3-w2P6#?V3#xcA4`yr8s3)M{uON-aCm0>M&xEFj@U;)egH`H1)gW;Ty07F+GIz zU{|_Pv}f9bt->>c(a`1+zD#BXb>+XxJ`&$Y8wx3`1->^M1~T{TW5Y$#x@{#B+e+lx z!+tLgLveQnM?HTOK#t?{sw_pcXA_yovn?Se`>j>>tbkXz>vh+5vA+ttY$+o~*nb4o zi&gc}YPH5rnFGf^kt`HNWF06&EaVz8z+Mok_$}p$mioljDfWgqUOYBNS!#YJMi_s= zaUw^R^XGa(K7Ty{;2>m=IMe~BPu67~G*TrR`5~KX_X)bt2Uem0nIh~mp?VX2w)r7N zWTd&w76wT6dHJKQMG%Sm-e$W+(oIF9(sRbxOUZa6a9n)T{EmX?f)tAjd+Q8MDY7R8 zz1&`C$?P9#J^-nF z*en5W9gebfH(_mJA#B&~UF@)EuT3OCbSrjDOuhv_6~ZFkvF7o|r&ikei{QJoqw}SM zmMI3(Vr5zvy>kS~SdZoY8{+B?Eb`a)y~PC5+Z>r+B$8zCaYtrd15B(dMgPjK7W5{I zPIH^=x)4FuM7duo7F1)`RoI*mMEmwFVNVFWf%?U&1l;62#&x-NE0 zfT)!v)|FT+Kv{R&v(q{)?}%KGnP>M2hObiFets3$LS>4p(W(oq$pZSe*EENhPiw{T zE=)Acj^0#Fwa>+LDKPMAg)2PO!O|z>Y`Sc!Jt7X{r?o!DuhWx4)VL_6*q#>6h|yy* z>AcXf?};S|^bYV}0%Uz`ZsqZ%_z&kOiMke^jF$N0rN zES`@Ztzhuk_lY+QldB=>KBWKpgh zLRyAcbh~cV3+x#o&p#EtmV<%_kn}?Pc8I_kOBG}NrnN+%g0D{EXmhXtiSdV;*to-n z3VMA9vqH6EVK1B?@kp1uB&3ME+11(OCo<6K+9G>jFpT4-1@ifaF7@YP>T_Gq<=^B3 zRd!(&jL^T8OLQxnBaVO0TxxK)RT`r30&8173M!elQ5kBsq27@ zj@#qbFOb9UvfqmI%Aj|XZ6k%r;NZA%RuZE1?Pn83Moq90^4^onjnp;C2!rwMYkh?E>w!7c4C^OE?NZsa{7y7N z9jH3cXvHDmc->^1F50_iFinD=Qf9-)#Pr@6&4PjD(Gqd!cE0V%o(C^ntHkmO zg?tu>ej6sp_=UZ@9xfsz)=joy8=yVYwka#xrqtL!#36^)iV@8JZ9*pwEqIN`>ty>> zJerS2>VYE9jT?dL@M}BUMA06UMcZ7w2}oY7=7$So9h90XI{8hp(s(NvMoz;p%X$PuMMVe?osW}1pv1_)jg5-6mX_NH`^dL@a zvqbyH!Y#$f7z@&e#qy%i_~BS4M9;=&!~xM^$VXR9XKeP!v9{(_Z7t3Ab79KK&Wzi| zqFqMnoXYn6+7NnpJeCfVp}`V$hB(v!opG$X+H-QFQ{)$YCn511V>x;9rj8x z7@H(9dqbFvF-&haZ}l=GV+a-2OHdVFP-pjuj@oy7qMBjBR0J3$uTqb~^ zvIvjD)jtF&H*y)wW7Y+B%TpmjZMBRq)C-a=zqQzDZv}XX!)~?TClOkySM2hL7`(d1 z4hYbuZIj2_8M~of>>kkD4=^eKE|0`zrG~(Kvjj_v~hDElVkgw!~e2<_@z$N}#dq>!diD%k<&v+7h0{bkAYtNKy%ssMTkLH6pm?au=D>y48#EQv7^c%_c9NQ$uJy=dK1%dnk^8+ zOO??`+YusYI}jaIy(|Vz8%sz3J|RG>9BNu;pCnVJ=`H!^!ak#)IHR}pXW{vP(_wsp zpo^5KS%mZ2M9n6v7t1 z1BVo*qmlD{F%-Ry-koTF7t}bKHpRa4ya#EQf?})qA8Tqa23dY(H zG030hxt(VM#e(D)d8rvI5PDpOs3QX&3quakPiiUsv0MzVz7A$s_?^73BTr^0J6n)` z?{T;NGLW|ov#ui5p4oaLb-R#%VIyMM#k=Na*$A;zo5p#Gh9;e4?F$@ajXXS8S*I6? zNw5*hJWJ z(UhZWWZM=364)ZU&nLpQ{UEAM{hN&?floBqyE#^G2S+dL+jnz%kKUsq<;t46WjbM#{jteeqLO9 zk&7)3Fy8?9gfD%CF|sB!T09@ZJOxOza$yp1dS7ify$R@;O1gKpTroTcHm{uGaX$5k zc=~$S?9LMHxykak9S~OWve^A*GI2C%j6jbi;*q$BC*=R9aJrYVQi^$`TM|G%CTe@< z5bGrr1#;Rk(OLqOjYlqg5&;so8z-kfYa?m`~XJm!|{N82+%?6x*=f;%oi?qX-}t-xxNC)9yjzk|?7oTq8lmI$dO zXY;6gjR1KfU`Fdo+ac)f#5Hb<4S3sArl;b<*Cq(jt9#a1lSt%>vHE^dv?oeS$xpSn zguRv+TV9N2!MoywfyrXN(04UEm>@*9rCP>uV}ItMSD4eDJE{OiQ%HSzNgt?kA8Uj0{eBKd&VEM$)df? zE}gsDB0>L#aR{q-wf3JRi^Dq9dkd}K5l?hbScc6{mRCFwEoUE}=8Gf_H-!vj9*6PPbbG!WUJ=7q!-wisKS# zvq4qrtnYj9^sBoqw{@Gf2PBtM8FtCP2=j7pPvIfJWx})-OPXqIU5pMe@{F*+p}t2X z+!2B_?QN{LOW!A<4?gtqSR!{8!(~H>J;o_xcX9ZDUYluUBB|2#T^{ailR{GMlJ{X& z*gg=0VvaxFWM_N;9lpsjE~c`C2}>DOWRC>8y>X2#7VQOPMf@oB5{F^$8$W)7>TQfzvCGXcBZzS{?aTTw`yDR=er8Oo)^^ zeC6ow8Np}-XqKY7&Gt%&fu%87e|a|~r(1^>+mJ6kK63c&)&!d^7C9y=*K5xmL7zAn zCvM)Q#I}V<{lA^sMdCvJze z)BmyO%ZIyIVJAjdjm*CMEo6y!+5EzOAm{=_YoC+;4@^Zd9Gq*X2r&OM#_Ez3e1m^FlKqNOMkD)=zxJCb`wUIf z7YDGrjNiUg0ew@oC;XO=7l$!UtM?gbPYby$dGgwHK$vFY2cwpOyV(@?kcxXvUd*xc}RsmgbzJzs{t^&K9nDUHlLs=*BRCygF zNAFvQg-n=fwffp7h)5_mS$halyP);|fgp9kA4{Q~eX4`(*zPShOu$R#vZdMX4S|Kb zE7I)&0q?(bHjobnsQYS;wWbJCo`y_iGUhuq<@TnaW}$(0B!sZH&}!xbK`)Pi9=DrL z^Gp&oTkI~;6c?G@U2jtb$z(`>TOra@_18^O_tQOQWPYQ>z7Rx`4Bnrf{dH&`Pcpj6 zw{*TZbV=gG%_089xk9un#n)<~GKU5EjY8ycyk@)ICEAM{p3cL!G-1zEmS>pVvqEg$ za!e8Jvv^)Q&UafkTV*oA?buI9A7eF14yF-w#rL#0UPundd-j|#wQt|BM*d-LIs+J% zgDawgD0hbWsQ7$D^lYSLhvwEyD-{#rk4>CoJ41xFaeVZ1qxH-ZJM9~ud7`G8+Y#$0 z=w)+9R%FG(jE`MUphH~U5q>qWa<_>@^~dxQ}sTepgK`Pm-x zaMfdWrDz)cg^8TImqxm|J;YMkPfQA?xb6~%vI|=`@dH(^pg{eEF2BQB^3r0l>JR?pSeEC#~fHKd2VDB3GCM2DlA8o&N6cnGW~n7KAk zh}gSk#O;33-fwNo7Pc*GMCsKE#bk)1u<(dT#2d|}G>V8;ChWgS(S4!T?wf6LUZDisG4tU1ij=O zO)Im$3#Jiz-eXUSBw4g1e>Q+T*^NjDgptEc!V0>^hJ4Rsql|_X0|DB)y{gnM`#yB| zJ?9CZHd3_C-QxOJoTy!bWpbM;j!S~!up(O^jG$EP*t{BBE9mu%CDz-4WU9<(%k%Bu!r}d;oIatt$o?8HbNEGW zuy6m6P+A;a!-dvez#Ey-wt*49$(|8U@=0ZQhn1WI^hO+<$rG>wnh3}7v=G$$d(Er7qhwa9O`}oq_1Ur z9EX}|_IEKf6gQ*X@?Qb(2@H6QwBqx?e9Y}jst&j8JhFD;(Ur69RWZqth8xG+_=leY z>B-|$>zxi?Oi^xDq2()(xJRaJLZil333^Rb)@{j3R%cwSmIT?|4pBzE2=y3lf zjX1=Y9w?iTF^b@Y@0$H|jr}R4=aW3XsK!pZz;owptPI~HBLqD=&arqEWecc<3hr`eF_WWHd-(_i)g%JPhgI?o+;mlUYKE& zI3!F|H%X`dX+at`rl)Hkg)l16T2W}9g)lUEn$0dEq6@=$`3~zUOhO*(EV17Rg#Knr z!VheX4GIb3siV^6KJ8Iq#?`A6WJ`i`$;o1PgYcVxXK{N@9C~jgZg1muM2Hev*Yf4A zxfl?}1Im5d1YH^wc;a^TFFY=$IMc1K0PT9XZiV#|O}a#Ng{FjJK}ulViL%^NAz~5R z(=P&jeEu@)^-E$gz=jRAF(MHnGTJ^xK$*F-sp*1b#>EJC-V20$j?*t5M`}24w}G zAXYfWo(rj%%2x1H^MD|&kl0vf{}*U%vsm9w@YQh@r~%FvpbD4`;V^8CkT8Wi(MVr zw2j;7+e_OdCN)L_n-TwFh!U;0H%E3{CfOk|eQ+_?&$f4kspO!M_MS+ZB%YJsx>}|N zCR_%CCObT@zJ0?iTP8^TisINpZVO;1LGp^_@S}TPR}UjEIbyt0kfQf&lovJ@$D8dpVyOp9C)w?y zITfbJ=;1VNw4HO=qGTpT>9To@q-)`E_KC{w3NT!f(Pp)0oop2wXdJVR$xoz2ayFpAk03*3$5Rka89g&=0|HFRyAdoBEENbqH=|$3c5HbqTw~s9!^H+!Bn?ZkoJk^7238y zV+{o(VDQ7|)+qZc>|oEisw3}y%y)MRdfmHrlVil(Bx7(qiYLC0@t?xf=+L&Ahw7Hu<<|fib$tieO(H{G>!#Qpf>b3P z+Zp$Z_Dp16(@KS1)=_a8C|I40fT^%-+bZmN-N~`R%Yx)}Y|A#g@7j*MF!aQBRoL^& zYHi@d*e{09xshcZX0mgxgB9i+joWE>>9YI9bfLOjcYSwj5vI(j_sTHv3S2Gco-x!N0N6NDKohs$@gZ)|wGd6vB)j<-r%$t-(QklZ+;EVP5kFuYLPdm%BO zBI8Uy+2`MwOgwqMT_}jehfp~jCYt7nOl7fIuVBk=4~eNCz}78jhPMg1uuPlF?86X> zFFwvkuehlrlxw7X>my9&*L3-X8ArI;wZ$P6;SiDx5>A?LY`Hx zSTX9cZ;C;EWfJuJJ)xO=qO%T8l!6&u`$T*nE(y-|hV^n7-NBXF_yB3_MBO5rAc!Q~ z0v6cf5Y|@8V}}ykD9HGTrUbMFbaZc9#H8|f_P19=BFsIUt-cZ1{Rf)$hV4QZ@XWc; zE);_@ut^oSZUQ7KQ6SAZvSjlh>(}QyNoSEg`j?Y^JO4sRWkJuT6lGUk%qxcdtT6I72eQu zyOv^|`g9bRg*%K?L6?T#yY-eWOa-$MWqhF9O3LhoknF$)dAzWHCX=CWnPDgQ^<;^f zHFl0@dN%r@!8Q7-$UNUddrW+qDA7{E<2lC=^wTK;HXsC zM_})G5&KIlnRUcKjV{GL4>2MW2Fo?jkR8)J-?xHR+B9(K~|SAZCY7? z2+|WH47CADLw+)jnFYxx7XuK*w|m0FOO_vRRk@Y}UTm@4c>t1iUk_)?~L zjs?g`+qONmc71=(ht)RML6#%P2utC6w=V?f-uOr*ymbI5h_2(1?ZVy^m@L4Ko9!1z z^LkCkz37<(VNpMJMOcQl8wGtt7pBYR;DcgNUM!1)`qd#awI9l1)Gc>-TA6=9rw7-nQ{8SjJraq*$JvuO951zlGyMWTi79{y~`RX%T#Ae@} zC4?m0jJ404jbZe|tC!hBV!9+6`YisIhG^06?+Mq@e+^7VU<&&zdEgmg&x`3*m17m_ z_qz^!Pdd7Ww+Z{O9Z-~NqXYD3hqfshS{xs1o$nv_M2LzDFxYQGZcfpq71j;^i~hwS6LxBjPS zB#y4`F;*PX^%`Iwgjns97utuSz22R=UVVvu5yB3ysI%C8gi#GHDtK-t;3JqboEVyM ztllSw7+QGPX@3X>ZhfhoN#`sn;qLni_$H18s?SkYxjFDmv_6;x&`|1 z+G$oG+G9pf0(RP5alG~W~Xd(JhR$5h}5rCh%G%HL2&f2^P^eE{52^RNGXvwwy6S2JZ_UM6zyBU z-qB0*=mFhcaY-F1rf;)fj{qXpb(h%vA_-|-$;-_Ys}}UCu?In`;{6a#bKl92&}C_a zQ*LVoozqnyL>?jcb=}4AHXg)c`RWjvk9eJ37s5tyk4i?H1C&x23KClcLqBtRyKky> zOZU9qZYnu4dA;=!^s+Gf!`r(;F+#WCVqw!%D@{h>`F+6%n<@rP&W4qNIZX)Bd8UzK zj|sS>BhVB0k06;vk35#^82#LlaP-mPxs_$g&KG7_wpgJ=?0Gg~SSSMy4d2?piEp6W zmd&(`K%<6Z#iA)G*wUFTh!mLX&a&47;C*_tU6bh{d&>9BwtED8eDHziT7fWCiJnf` zn?hlqk76Ca;8=mR|A^~NG%`D-Ak)4R>tNjaAnO5))4@QN-1zRryC&HMf{34Jdcy7z z?Vptnv$20>^TqPnl}Se?N_N__;&~ZEBGaCqg*=X|;{ItAfz*xL_yW63fXq{AgW;n6 zY-R6!ZbP->-pOLDR{RrvSe@8ZY9ECx_!*dHS7#BdrmJ2qXPXZSl4JYq?e=J(6Ag{F zS~SJ-+O^PL3TYUx#VL3!3h|^E`)M+$7vo_;wr3a5h+Bo|@HuJ;U75r^W%c5P5O);& z^3)vS(gb@~QVTS^FiYndy;Z*2ri=D*l~q)1a|AtK7B%U1W^M;YnI9)D!c=Ks%7a!K z=tIkKB`4aOnduX)y(MC(Y2{UFmjDxl7PKh1ZP8zQOH9r8J?-K+WHg#LMmN&scAGd1 zA6jMaI-4EBg)sYs=FKW`TNrEnM@3;(o+t>2p$W&bIPWj&5G9&W9yCYFsBNHB{D3m^UZK36e zB;m5x!$spE=mo!2@_=%R zvN-O#TL@v8fl&)!ZS$}gE)16+?4O0bTHJBi)+D?WO9-zCO0t%X10#1W^?>oy5eyVk}o zPp!8`p$>s#_N*}FMfTfS5oFsN$r#*fEVM(yUZnyaa$f%@Pk=bOcf+#EE^&y!HR)Q` zweKY(VEJw{&Xs@p5E1+{*9(*A0-DF8cC+{-40`wd1rdduF@LPDZ%l}Q^B%p3EfYL( zN8`u1boo~9Xx1x)4$jo|`*$A(&`FV{lO6jOTB#VcKyDFs3v9EH7Zd%EZpH|nhmXx| zt;n$p9sy~3t6yo?i}twT;AWUO$b7IgS~g@0dUFXgE01n_*5z#CrIZ$ zN;sJxU9i#46YX)+;*MPNy}kmnt1+uRfwImiquqlRxGbef0WA+ z=2daLls?fSFMARUq@$Fy-J7gf&|8DO9#;_mAD)O=yN>YOG1O+dFm14BJ-do60yI?X zmK^(gG9vCW*4Y7Jy5M)i?Y=3HE>&b)ah??@7J}>4rA|{I!mLe^3Fvr{=L$g1a0pIJs zce!R8>n)CV055dy)&L8~>cr_=_LnCO6`ot z9PD1s1G~osXz(FLwm~F?o>+BEv@POLD4Qu>S=i+>Jt2pnvuscR1ND^P`dJ>rb$Y7Z z8~`Vk)mAJ(rfntj+Df9|y#?ZUGjLTB=l92si{%}YrFVDt&jzbH6V}=R(a3;y)=CbN zE}H{nR;3?$-)_f+s25wWH2X+^n0vOqFn!M!TR9g-l`h9(rR22rXM z=3~l<;}9q|D$7QTK^;c&8V7|Qq0rYAtdqCdX^(qm$MClGW6>d0&ss|Dx8jgmCf3l> z3~?{-+TC7F#?>n@wqkFJLp|G;J>Isg$vzH{Cq%lcJY@l)RGYSxInl;hiH3D{hL}{l zvusw}B0#k|b-%3fg~(|vc1lE3!}MH!8k<8vCgMFUnvQyP2j=Z~qGex*;WgMd z;l)i%@&gycaUt?#ef%zAkD=>MCYAZ(APOtenYg)GB}fg9u32w8MALJFMoT~84@*2E zr=;^=S#Ey}kSYwsvA`Tbgv^r8`!WGCY2UuthAoA530_=`y3}G}KH$6VuoWU*7@d7n zffqvH;NJF9GF`Fm!2Tml0ld*t@b*f}=VfEQeJT!(*?W-vW)&paYB2Y()k0)@Y|D777R_gqsN0w{ zkn^ROo>%k|zSO!^!l7kQAUX8-BpV^<^&Xs$Ypg}We!8-6f^J5%K(!U(dN!ENozAu2 z&sM_;za%&=XIPcl>1*Kma39G9(zOBlGc?N-^pdiR*vKCX!H3t*Ia;#ZJ{F`ZiI!^X zvbG~CzlgK!Dq+I9^K^NgXbM5UQ^)x9+8<(sT$phqzSuL^XRE?w;n(UdI}&1hlJE(UceC`!%YF#6p9-`={K$==SW_AC$M z_U*f(Ny>QU;TR{1Ys6s0@R6>wzXnj8&L^=~fJ~#e#O3y}AoaVguU-6vr(u?zWRD6U z@F?CUOcYHL+KlrQTN7Y(t2W7gu--$XJ^w`ev7onLI_AvttY-+1jm+osvXTJ#U6eQU zg@R$U;|W2V%7{g;9A7X?hA*eiv)2^KM|EnZy)NV>aD^SDWh`YHC}wLYbIKj__h$k zh05aU!SXkQ5qNm!7@H~DW!kfZD}z!%x^DvC+7A1U zptnK$^aZ-hyei4*i#JJ@2E)WbGL4;aqUlt$^l+~iz-W07d_S4)(1O*6maMmr#G$|X zjkUshNSBcp3S(`GuxeCSpQx+RWyWH0$hc5WD;paCp?f2RwJ=#85#O`PkMpqh)JCxE zOzFTtTI+6L=WMYN_}Gd`7SZ%!UVeNGF4yDsm;$|1v$(>`vPLl|8Sf{#)-2$S*AEr7 zQ=5R^Jo{@n8mhOSisKPR6y;fGL5j*|NBX-E#2hT`zG^}8#V~)NwS;(wAFHvgfj(F= z&)Py728Q0R^=T%JS1?nW2r0r|KF`)B?Ac~B1Lt7r+=FG)50-AS#p0-(BG%6P1zf!2 zYiHZ}EkOT0!fsc7T)tbc7LWK?>&v#q0mKS8k9=RiOS@gF2`To|tsR_<)@^9`*ivzP zP8xixeI-O*oWvB^1=|3$#lg~AyGXPbJVYv;(sq6@gttwpwC>wI?)BW`XNx8i43DnC zFd4unM$RyzWx!Igs1utC`8;b8^bX99$r{^}Aw02Wk&N2x6^vvWQ(*rTU?jDb&2&TA zZ6)K|N}8?n4)|US&zCV{oozQISyD-rz26>Ukrn<3i|qCQn7`-Rx&XL{iP=^GmnbJ4 zJq-KS&i}{Nd%$;HUH|{K+M%tjRnOPjYU{4Hir}_RHp&zbwYJqyk`Kuz+YAu8G8kbA zvX?+)Bm_x92#_#sgVt(A94KIGKQSyBR%_MP4*$>B`&{zr|0h1+zRq3eo_jy{eaE@y zcj@!L`2$j$rN&%AcaT?!G0P5s|M^m z3aJ0e=GcsoiFp@AD|=bc*FWx7lJ(;{yW+P{84Ips+eOI7E>A{h+3mtq#0%+|Jrn@` zClfYPKpl$1M)aW<1c-j*WHvev|E{x~z4a*Y`mRPKk#|Ma!l{g9m3j`&Gwt8z; zn6~R)O{!rV?2NX~va(`~=bMEoi_c`Yt~c4Og2-*eFdL^}=%1BL)MfUpD3n~nqU}&B zUw;gLe{CmGKDq~@;+U6*GT3)TBl&xCCR#s*y_$i1jXx3e@xd8aQKp^tTxWg`{<2(qK-cWXr0v}2%j#f zQ-B9oZj(j8Hw39SkyC2#r&6YK`T0*lWJ<}!oC$O2`~FK#q3okAjmMYFu+l$z;d)fX zL(VaRaQ=8K_c2=#;&j#f%6QutAerUxTUY5|2)Kh$%qN zXlNE#=`Ii~Cz~p4w89L5J=1K7g0xe+7L4yreZ^DdGH8}4>>}u$ImT)PX;`G*JfppG zo~;#y3gB&2sXZs;Lp`2R8g{F%iKv0cr!svvVahu+Z~UQ_7Q6ISAiSbW@r2zhz~@2J z9Dmj`4ZR@}Ob#uuw-qF3WSU~T{auiHdDX;Iw@-y!B)knOu{E!Osq*k_zKXAVMkYS# zSq%$N(;U<3BZ6=csqtkBP~EUBt5PrsS!$=#g#G6?c%i}!9VM;9kxfnf6K|n6@1|AqCHT8V2W8v_7)~Za>b;e2+=Q4=)jEV zU6tJ@3N;>la=z^j;X_Z&uyfvoP9r%xlz;Hh!&udMVCq!bLQQBh)JKyP;w6@2E!!fXCJJy`qx>~vD zG`~cjxM>333(3Y=_MzZO3ou+uZ6HUqTDA1O#CYjEN{ibnVV9e}Z~0^Gf_d;oU)X0>?X|cxu3`xO zXg^dRTBBmQZR_nJ5xgCFI4R0V#Se*N*UzS>2zk8D%CIFOcs(cv;8a`~sc@{2Z<_=t z4bM+gc>jLU+dyw?kJuRbp_?E|unXkg+|UU@UV45W#f5Lzin-APV@$v*10_ZtmbTbC z0WyNRU3a}*@F!xaoJT{&c2@uxXJ2DK6%Z#papRgXP0;&P3SHq9^L9HdI`Z&mGb{?u z{WAnaIu^}W>cxl+)pV}i=C-wJDl=uG4FkT!bF^pt%p=*+S@ z)EfBWc2A96h6#49fODz?-dyV`0x~$ba;24>Oxb{v(rN+!d9%q_o65(#Zd5pb^seW- z9)qHT6owxh6UA(vfHRAsqXKIcrkq|3=eQk7#WK&N*-tu%^&aWYE4@R4&K`#ESUsE& z!As-Fs@^_NCBT&t&%I7R;05r5oQ#3vvjjbTK_SKny9y)tA$WBPDd~J{=h`B{uuzH| zfJe)x4@89l99XQ2hE>0V5NI3j8OPbL1&Bp-W3aVbVl^Tlz4odW+oZ5JWi%ci4hZTq zvT?0!f6I6EX1nSTL=94TI6)PYZ@{C01Y5&-fGVRE3q+#M*EpoHuN?+>i@1^=V-E=< z&yrGkN-hXU1GCuLgvi*o4ci@!M*#4XtX(SWFkg8O$j;x@2VR-R@%ntFnpUj-?8y+Y7`w&MyFg`1SF)& zCAKw$H0Z5yd&kET#Mn!f-lFCo}(>8k@xB_6Jem z??~mggV@yHpGu$y?CG2whXk=)`7*cs_d83+0D0VI3R8Ob3?`uv?xkc7jbtu)u1U+y2eLFYwikPe)FHMYwa=ZC&owYGTMH^Ji4|;A_{y(sJ z(tC?>(<`EWBTIKJw=N$6e6UNuHi38V+eC7y?0%Z5EM3^8`T!rrDT3bY{SA9pv1525 zB#5qSC)@N?g5vn&T=v_?qIe~8_pXk+?C+h07GrLztDt6w&5P{O5XD7@tU}}oIymy97!xEU?gY^#uX2dg# zxH)(*K+O5bb5&9hA?PvsfKR<@ot9#p;M9M3RB~OF4ON&?I+CN|5?d|=6Sz_4(_otg z!^EgVbr$T*|IKFqgzmf#h~YnasIUgX+Fi|d=Vt)#Qj9A|+iZj=l*(i`i)l3>D4FN+ zc&aeOA>ZzKX|_DT_r}jWxEbe(0dgz+-U3YIhMYwScA^~u)D!ti$G z@GlZLmzlZ_zD*c5B1e%GHbRh=XSQubjo>jM=XzuUNB2dxK@@L1AAmA8@`wbbyXQkO z$rVdwoAsDr7*R1kR(<~s4n_oYWVxBesAeV|vdg?^6b~1BTcR_JT}n%>R8(tj`Ttkxp)!sf*{4B05LW3ct+eh@wJP@n&bffU~!I-tHOL z83gOwz?SCi5WcQ!7rXtG?ml?6gOk>Ji}e!;F;M?c_yqqdqyWmXv5atZh~#zO%k6Ao zmYwm1PD-+IPI6tjT`7t)9obb|h4Bl!Rz!qCS^az4jY44z>$0OY+Lbg}mgvqqox9Fv z3&T+K@+-rZ3ervtUFF)=5SMJKMpN0|5=63*IV2W|_JIgNc6;n9D*17@o9rJ7yNCy|6gcNJpnq7qtdqI1i|I=KwNJNwZgqq zM4xT>aWt|8+HA8#^EyXyptn811KSoKoK-Rk*DkD2?R50IBHn612?tdTJk%Jf&qTAy7QkJV6=4Sop*qY ztm9bz|EnPK>Y0^@kF*bk!XOClTkGwzuR`=ee1f0!!r9he6la`w!7+O+g-4>GVXvn! z+aDf^pH6x>;)CDHLGlfPG-NRQ5Br6HmmVotl-cBvI;#Lx)qI;PNKdd&!u~A>|GyW( zk5u>_RAPlq27E9fY`a2I+J=h=ezXTf&=+SL=1~2t>i#nzg(hMcz#TzEL^z7&v&=~- zg>%I6GlS1{o+xzM!5!o6*CCA2Dyvgi#p^^z)>d2X%&$T6-X96`Jp}odre)iM3cAoZ zFv+o!kdn)R#6bJuqz1QwJ6DYxq;_h~ifVWLw zi8X@WpuW17IwXu_ll3^h;)p78fAzI*K=hRk7B{`z&d3xvd9KA-Ok1)L4ELd$KG zaM%e(E?%PUc|K(6C2I&T1i+@e(%uw+v7?+qA6HmXsjJc2>5~w3q_NUIQ<&*LxoVdG z#KWk)_FK?>B)H25yGs;rMj^LVy;2E=6-MUhzI8SUd;-0nog6kaMDQkLCPtQ6H(@U` zdbPd7dW%Af^H`|b&jhI8Xmi=IWwkaTl^d4{`IZc6`(Wk54u;6cDFW9?pN72emd)mW zJN+Clid{_$-=A{>q+>AOVdo1nZWx~IrZq&ZSR{?(jZ(fvJ7TN|`dO}6W914v-$ikm z%C^r$@gKT$KXFsRFS+_$h%TTk-Ye`1P(NT)V!m!3%Pr;-u0zfhq~$mWFR_aQ$V*_S zuUAM2Z&GOfogj=PYbV${3RA65Nl^k;bs;^LDN30i&;CEJCvp@w#m*CS-Z34FuVi~X zP_!=8kz}166@@AeFIr?1&m-)8mX^S3!)wA3pM1FP__hOX>FVr$0q^e71x0wXn=OL3 zzo>14?GN#9t%&Rl)Y&H?KGI6S%HH{&T%KE4GjA01iscW)u@{f;X1-t_DuEMv4jyq& zFT3tL5WVP zgk5q>mU_HBRutsaQ9j?chS+5Dis(+S(YA>~g^}5LO@)6F!TIYu+%CQV;3XGA_W4blF&VDR6N&D4zVZ2Ln zCW(Gh{5&mdw&z2($mi6#7ZHuL_q6bZ!|8~9PXr3ZgBL6ccni6q$D4q#7q)9QPtH(O z76o}6TEZJ=g?-@W;LUGOh$_)0N?|W3k&Cs}GcI;qT3pJMRbXccQ%pWz?6Vd2N}{)B zt#+v>@Wi?$*KQY}mdH@S3>zWnQwZN579W~I8bSt4^JdsS0UyD|(MjruA(l1qYU_U~ zu|Ymn(J@(uD3JuTvJp6$ImUQf9%2h+U1z%hyy$TZKk`F@tR$}bl?}Pf3&EFciM$8zktF7}^P_GHr}`#eD1A+mbA z@d}bt^)OVGeyp(nE(hIj-G$UGleX#5sz-o)DrBSn!2pl8mgxapR)92TEq%}!pbngH zfA=Rrc*?>;U-b68=MNzG=#ehCJsNQD(xQR_%!$N=H9m4sII0kkOdIQyjoUHR2&7MX zVWaO#4-Ob$V-Xei@P|aC9A4#2hR1&k((+^D%dFrkkK(EO;mtKx zCrBSeALH$sLXL_;;)84F+nGP~SR{ce`C6vTABy6AI-nq|mSUpOgfW_8n*`veeaXW1 zC9~U?ENx#p#ap=PzIW5tJ|o#<3sD z3GW>#Qgi~!;GQ88rA8+k<87v(%bn+@nYKSf)08o>vG&&#?7`0dZ-Qz<8WZei0)d12 zIyr*=*43R9tl8v@DY5U1LI&B!v0eg*tZj3#%~6;Epe-vSqR##(0*vIw3!}s0_K-Li zRgMos_y)8qzjzJl=2qrn5mfw~Hr7BG}Z7-hJRZS435Vrju3%y#5sh=|}sU z$X>e54>>!^t0Vj4%X#f-n?*$ae7v~(u+IdksQ}|-*I!H6`=VzNF9(JRyMXts#90(C zH&PVplx?9ebtR3D>!VGTa$!DYg`FcJQs29gQ_u?qs4%%=4G(xQEeWK;Ze64Dt5})zT=~3x=1i+ZkP@R?u~q>Iu)c|24?B!~A#;}-Z?qBDc|oJ%*)}(X zk5skViV!|HtN7DFBF7fX25D&FW^HUgJ%_E?A{waF}Zv6=xO%_sCV0_Jr@Ruu_Sr1 zTPBFa4y~`X7gHHW@lxzB0Y*E|4R-cTUXN7qMY11>02f$Ri&@VA@c))?y#&-Nn!%FR$a2Y9r-wo66xX{`?$20|)Ei-9SYCFoMzi|s4hB+QRx*n_q` zO^0B_-+{luG2*@pK;Cc9tTbv-1Elg+=tw01S;)7bp*}*zN{XkeHuU9yn zfyg0zeWcdbEU(L7xYctZ=l05(_6yP8>U92AM7+nV$2 z=#L2_KO#2SX9DVn=IwUoZG@wCZ{*O$z8@g3(|MLO2m`r6?*O; zIWv}}g|Ui3N`rdXrMDBLLRQ-4Rw)oVMIE9?9FuIes2Z8ldFDXFYC|4=q1E;;0T_dm zUg#j7e+Lkr6Kn$$3R6wLo>rqEsqkH;rD~m^%O%>4Pq+1=gr5UXi|rq&6#F$_qA7AG zDSRC8-Me21zBAieifofWG&$ja7WL72wo???;F{U~5Rw+lyWVjDA7QL#bs@Xn{w*5J z;rI(Xa`x4`fIh?IqY#7U5L5Y0B09(IWi#8$)>scwoa0QLxjrmRDSOwwU5?ghZYr8( z$ucWV;SmW|%D)o!j~B0xO5%^BxFwQT%uQ+J?_!xXi--_&vTc8=h+R!s27L5xqM~MR z40Cc-CuLJaS!%x(CE|)-dbzzIh}a@;$ZrOaEd{{;goN~CMr^cQ-kpR&PP%JtW;xzX zG;eWnY-HSWQfVTuZ=9XmV$rC8yCrNz3xsZ#ukjt$qeo{E!}Ln;m%=_#v3cf-bgl^G z-#3w&yivgO?~Z(i*tS%D&TLEUl%Ad+lkYLKesM)4La0)k*b4iKD2$uYiJc45&4iE# z&6IoHQ2}aS&za@3soZFi=m_zEC}D!xz&iDmUYG36SFYC9-XRV33+DKX7?r zLqn42asD#~`zc&3-9}u-PbWvljyxEem4D1;PiUAdSnr^ZR*H z`6jWu1^9KvxxX41BJP(p>n8=gaaVS``6_D@hSgsEF$n1e#ENj>eG;HXQD*1dLriGl zQ=DA2VjdET8nI$=a%Gi0dJ^*KSU1tu2~zvPs(IF`Fr6#ky~VagD2RVYSRegcw4ewl z6*vy3qj_+feXbaQnE9f;{jG9k+`Gys)A;x~9_h859y=H-|^u0`M^$U5zmc z!!a&;v<&@sfb`cw)@Wz;_t@y%rOJLH7+8|d8RJAJ_#C*-R*af9eZAG4%+%PBY^<=~ z3Nkr#R8H)u*w|6A0wXkb!2pQ9LFNmp5hqJl&32vWOfG!7h z%OBtz*|6xywpF%JMEb5511Xm~h7W{k61HL3JlKaJKbCs!oPk7zdpb^vBaL!7{@O(4 z+Th))`0yiap2I6h8zzc+)bGX~67hImZc zb8AYhOu&mNmiLo?3&Y-#`t3*RtL>uuI-~Pq9B^GOOpDRtm0?bv-vt6(VV{_t%15aj zbYHstSC|5I9>@Odj-j0erj@ev?j_7GbKnTOYnZ2G8(PWX+C2dtdaC-smW}p^APEQb zv|Cxp?Jf_b5!;q8o&An-`$=L`>qV1cPV`Ib5j~rUh5s zWv?q3rW$Sjx7Y`wFi1E>X2tTA5g`3Y<}Al(rDq#7$a*SBwvNiD_4BJ-W6z2LYuGfw zUC;xbt9|MgYgRZ+D&^Qf++-c1z${0*(!sg!K~Ip}QkmSmCb?yjO%;Vo)42m`QCI`H zaR)zo>@I1oyhB8IOe-zHy8p{UslmU&-Vq_3{zs1?m~|%FzeT0m0Vp}bW5Gi}pUpU) z(Cby6go;MRN9vZ?5`|r88F6O1Wx_#H&6-*4L`a>B=I6J5LM+Y4)I+(QBS1|I#r!r5 zUzihX{}h z?S#FmIF~i9j;pz9eDvo~-~#I?R;@7AMV6E&+OvY>Y+KJR^&RqA zMlxg_f@hD5V?;S80}AA;@HP=BH=7&pehMoeKYx~Wi2##z>t#}JZGe0Q<*5BFL6-)) zy!!Nyemf8gw zM8T1)%wZ|1ry$w(H|(pju>xLd^ts<`zY)dj#ko?IC51x+aKX)jyTv{gE%@tXz(Uj4 zbpzWLRupmKR9oV?MR{ZFya1EUP54r@?+JQoLr*SU?h(zqUV5FZXKY`{#jj=<*7m`Z_z z2j8+A0)bzz4h>rE77_fq)xAffoe?d!vO-F9hZ>@8UoL`|jR&zY_KYxt?7D7UZQK9E zN&Ehytj>CK=tyj+iGa*tzZ+*ZT?NT|VCyoQ7NU}Mi{*`Er(l>Y_1)6b!`-5J)u_g1 z+sDFMdFE%Ji+gUOGsjyqBXKRZM@4hV6t%5?ZmXqaO^k}8A+cUD)74+gCZ1Lt+1_eH6sG(%J)#>baB>=EdATiG zV5W)cttgIXmRe;B<2iP*HHISOeG1E%B!;i(ySmxpIY1vli9Ajgxln8oi4ma>1iv#} zzw)8jCZZ1{as<2K-ojb*q8G`6f_*qB?{7jXi}PC{RO?;n9R=9 zh3ForQKM}BmLNrzB+!2eQPHXHc>BMC&YWpHFv6Cm7kqH(69-!=?1li_r)-ct{P}_| z7=B=-d@M4Y8b`QrdE4ivMC3q6K+xlh{!0h-ZkxHX)lv;MW?0dP|m zvnz^7uOIpQ$t>i%JpiS4+GMjQZ5GRL&H1UT_hz!OB9kET7}dg+;+P~2re>s z(L7mUFPF(9gDG^1C}d1epw%-+Km!N|){MJ`5Qj-JuAT(^!;@@Tm~3gn6_!`RrBY?CA_OXFw1bt)msdR_EDd1ulSXykSmpk0Ei6ax1fr6CN3%hd631h6U2;@zS8dG2o2)Iyb zBV5+cP%k4e5RK-K=Ewr>U)EgxGP zk`#NbICBX3c-mEDGeSa{pGXAQ$mmIDbsm7&za91T;8f}c|aKj#RzU>i^%yQiN zJ<_n&eyT9VkhTCr1LIOjODGt#qC~?7H+KHm@QYPWAU#`0__#y_ALEhpphY%U6xvOa z7v|eKA==IR*z4@y0@TJ^mC^WqId77u)NiN6z9;0YBpcUSS3&=I6l>mmEESzyT57)# zbO~h?#S2Spu?YWjMx)c|a$6p9(IbmE)B7mEBRdux;Q7tDlgZ;fj{*l*nmKxFdZQAlX^@PvX5mFIAtnf24%nAcDI5q9CUwMtXkM*m(RqrI>5*z zdA>a_sIiB=Z~JqI<2htU#T5ISp!Z)fgO47)Vw#tgY^h7O%(hdW0Ma6c&T39ZYwThX zV5uilWn5uOW6QjV3@f3pDmP`oxx ze--r3Mx~MYiFIm7F|vqnN8HYvK??7@VOa%8e2@q(X0Ea@`H{kT`7Cf6gkg*gX|4TM zfa-Lz@M@KPAjr=kJywKii~&!AymF>#%q` znVuef2`P);FG#D0+-ol==yw!*pXPhK?(M0v=wmO54tv=4Y_QX30qC}a;}_Z)3X^wm zcRNo(38^Z&sLd6WbXT#X38CMqX1Q8v~9Dasf5EcDSUm7 zm%@&QceDaDgjNW{QIK zt~_=ahf|cy+&I4(`|4b;_`v4bI7_s%1EiV^cJSX%K_)8uksu;OZ`lo7-w_BT$A}7( zaFqz*CyB-<_Zr>ic^)Oh00F(Xe2G01QZk$2t&{nzQ3Rijc?HrQ z@?G-dORlw3=M&}i@VS;5^KXbk;-07<#;w1Q3nyBH{SO}U+yaaQE1r5|&sAHg!Y=#% zJIk#)K*l7zoOYW~5+Pj7Q98PK(=xwyhG7a<9ddpQDJju{Nu~iB= z&z$pd*J!Vc;-jv;a*g}nvo}Mg;u3Cr?IR(LaT%Q`w=))c85ld1^*x^EeK$~$?*MM7 z`1Rc^3T->mfMGs`>9L%69#%FOk+D>A-l{*tmDD+lNa7zK*{$VcDZ2)@i|Rl6KDn5F z<3wPPNU*{&T_$JVLj}D#Oal!p`(8}tVU|LlqR3triGC<$Ymm~gb3tpuUcl^s|mx+dkbb9tY2sk3zC*G*`gy-s}R9EEP8Lc z-d+|3hB+$Xxa}`OUKwv%a_rQn!Ia0y>elsE8!AXuIrRO53VTa0y}`0W%!xUxZEeVu zlbc;_>9%7~#9SrY*2_;|lUP4k{Gz=FvfTak43*c+rvPX_+ZpA{s@@?(ZYD9!@r` zv{ef8b&5Y`YZatMdacy{c`^-Wf!D4kBI3NBBQU!qKv>51ggqkYor76;_8X6jKshC) zIR(}r;PM$5XBW^GGRvp+ihA1rfKfEqc#!pGg$UH5cj*fi_L~c)49q?ib8(T+C-BsIBGfG#8#EW( zEa(lv%v6!h3eik`ytuP%LJG zuK+{WF*_j)lSi9sJxu=S>oZrcd{1vA7VgNOY}#m-2Z+MMoVS+S?Wx#IBsoNoPuNLY zu#N0BZK#O!Z*d|sKQhhD=|-z)F1y`mYq4_LvJOh1fxFW6S!G`qNryFrkyJ6SrHhX14`|y>k0ZG!kYA%Lh6d< z7G0QpSI~LF+$V-Bgq z&sFo1v%J{)Hg{HnPw=jGf53yX+P5??QXdtjdx}`rJeEp3M($YOPvIdk4wAmQ!P9oT z@p2oWFwAnx+_qYt8;U~Yb6aCJHiVNr@!b&;YG%JhUff?6!JEx@r__!K^YO*_+;|?% zti!tgmp4N4QPFQy%y24wvO6QU(=p3TQDnuNi?WDT8B*%fWKo?%Z_e%2D7?A zA8wJU_supVB%skf@Sk!@3kiJfm0UH+9WU7rMD*_N#i4q!brS(T5@T7+^b(-V2gW?N zXmlc8f=@(ITyEJq?f+OgS$NzDS#FiwIFO*Wi=c zUJO}_dBAQjg}9E2){e?5ds&bgJIbH1y$ZXuxT3(!pRkK(SRp<|?e?u+Zrl2d1RKx$ z1o_bp?`0zu^xg^%#L}}ZPefQ|XOn3Q1iWy(*WPS3Au@Vb)@>V+jAvO+6Uuy;i9>N&Z<0p1QU z5-dbrCT=eaQ5uH=^H9Kl;91X}tX~&RBI~R~6cTh)PUxsuZchr*vV5+I>;(bDBxM0U zrxR_T2(bR%mYH_LZwVu#A%kqPf{@2xxNEn<6u@oH8hcB?xs7J|YTFy4cTS6C+Yteu z5HGpJ{viY>$LCGZA<`Fq*O|A6>!_bEiuamcw8pKc2-KU+HO>Tu$(>wLEfXGdLX<9( zn-oSug^ApR2FmBc8gP1*dtsXcU8mb+0;I(Bc9G0)PY|Y1x@Tl=sZ9!Pe(+s9Dx~sO)yOu~)z1OdWec}kpAc@_nrBZd zOu715v5soBgCZc6SD$3J_qPBzfV|13JntnQT-JPWMwN94k`RMPCH6sxLqaEJZ?&)g zo;YgjUdVFWJJkQ(k8U^7AtCiuqTi6esEfB^|SG{iIPNu}`K85xT#~T66V!i4@KfVrF%3zZAt~5SbXY(jNM$%L8__o5y#g26b7sxNAQVxOXk3!0;es!CD?`6+* zuzsFB62d#zrCEl;h-&8~8?7J%Bf4GJt1q2L{6<7?Ct8#F2(dyG@B2P6p1`jb4t+1p z5Y+VSL($;z;Pz_!D3zXPa)oy4F5zh)u}Uja~Li zDpu>73Bpu!;;CnBdMb_&#!WUO#9jG=OYL=qy~>e^v9>QnWihjUsxae*wcUCi3A!VczR&KSa=>8mb*}f3X@Epow_K+ZL9Fv>NTyp-a zon^(i1N&cGtd~^Rr{a6*9^ejP=e}Fln{A9Bg++>8HdzRTL|s%y#{l|wgUu8f$t0T= z+8l)$%&3BHV zgpV(GNj5LQ16#`vtz2ur7j)jEmF7D8mnhB(6KJJMOj`DcmHBJO7 z7+PxQ?(m^r44p6Bj_Jy%br4sNS+(xPf^| zkU@fj+ah~Kz6G~$VM=>$Q9d^%r@aG$JB-e4u`dgFaTuM$u2Pm|hj?B}#H?6=s*@YG z+ZX;qn3jx0-G$T3uLhW`tCR7VD^J4u_1e8n6ZCeXv{Av;`AYkbXvp@tr|ervf-q6o zw!YY|5TF6Cl)o}jM`d=42riIl65C|AiQ-Z$q}z`O(jrFJu%Y&~b`VVF?wn;mRG5T} z<++%2|KYnp?=-HoS&3k|NiG9=iw-Me(rl!HL0U2~R3aP=^Qw;W3DTSSO`uR)G_P*5 zMWRp(8aUGZSuW^}jb1NsUu(^xd9k}^%Wg@?l5ClyCFt)3eXI^o=v@DTj?VNW5`3-8 zgriQ!501h}Yix{z`Z=kDyu(P?7Y=l093}hg7YXa97AeEG3%THu^|P$&!Op~J<22Z# z!s>?@x87v}-VD^H$MFLxvz?;BXQbt+hk#!^k|N)4=61OI9F{o37aa2$n`$QubyUx1ktfet34YMM8@jow^#B==CEg>PfFss z*;);Va&jp9PqRQIS~psDMJEb`U*7m=@4e1yLLR^P5@0UwBmmXymy zT9+eGobwF1p%SE*j?byKj#Sie6y`q(@Zj{uL#^BGy7x%wqCzR9!fq3Wx3_1$J=+Ef zP|T6)3`!GG=JL=SyM_+;2Z>RGa`qIR?U+O zoQFh#lSn;if^8J^vSN8@zCrl@acGR9=tWu;`}1o=pp^EqMeSwlP`3Nu2~o+WIWm@T+WSNWRs8YMk{sidQpx7mafae(7w7%Fk>JAh zneQo?mC92xQ|9uEG8y;Pv$*E*TQ* zehv%2tL>r>J^sjsafjwEut9=8e7P2xY`+$!*2A^+Hbr6YlF{rr3sAm%LL?t0nljn# zu1ryq^5|A;FNor8+B4Zs`x^*;cn2cW7Iz71a6V+c6m<83?Mo)NFWKI{WPaor5KAI0 zHd?VTvXfKLD?ajYHY@pr!jzX>RiXZF5v1*XWTvZ4AdF#oifpjI{ST$NPOldKAqwRd zpvrpY-wAu;8DpC)TR02}-Suvwr?-f0&Y}wy=Tc8AD8T&H%2eik6Bvu{3e&Patgb&M z=zK)0YmB6d;-bvpDZI{_-W16z?;R^fJ5Sif)?=uh^ND9*!k)?@Knwcn6Gie2!wT6@ z)Cq@aU6w8$gKTT-lB>&Yjwnd-V11Q`sVKUw?)MKOXo)tG`LCpT?S_ALmcn6T zaY4*RisG$c*Wx-zPm4yDq5_T=w+b-F$#Jjl&!hcn*Uum#?CuyCQIN!_DP-Hv1&}kB z(rY;{lHsq(BDqL-cV??oa9IEQZC`+qo#%;mAQgL@1E2x_A}=lB44)S{WkQq`&9mip zSkO6-&WYokIGyruXknC;>XqB6A}CJ_`oUZ68v*hZ?MPLlT`cHA#n0B!M&5LW0{B46 zVPuPyh8%QLVZkVSTo7phyBD!l=P3r}a8hZ8V(+F#kd8UFdbYiuit3N2xeh_(HeiUI z{vU#H78xMhV&4@Eqe=R)`fYa&NvNzC4VnT$AH+j>o}nX~1tQS^S;N)od5dibc@9<0 zI5>H!ec`_(@KJr`kL)W#aCYSny4ZMysh{J(8W!mIa7!w>SY~zZOkwObueAbUiE9E{4kf9`L9+Za zHiZYq*F>hM0#8oSt3q6Q0eWkb1-zkn^v*zatR_&>c{Yz?IY-s6q|)Fp z2p5L-YA7PDZB3E=-&eh^v>02z>jbEaopdf&uJ;Oh@rkw?y25@L5^#bKtF}(if5_o+ zp8ZZFc;EdLYx=hYT+aM9=h)lAtbUGf#24VoV>OG8S3GmPrp!(~9lDQey#c@~!Fans zG?)J1F?Oktq)@?PXq9yl^nu8WWVR*`ia_sVv6!8d%K2J@EetUOyAQHWA!cWJ2C814 z33>CP%Ub+_oB_qho@~1Y<9g?cN|W~N;5Owp0cVv}9%@-E-g=1U&Frb;?PJ0)OOFrb z3GWAjFdII>jds=7Ao%it3u=yVb;u<<53Crg6&Eh|TFa6xOOh?iqem@`)}kn{npw1v zPj{xhDjK6{S3L_4E^1leR@mM^r2fcD{S>a}?fkEk(wQ2-f% zU2~{uf!(ce5IIU3EF-6fr1+h^$_@*-AQ48neJJcak3#eRvTp#xW_>YID`!C?lM*+G z?gFlObu#Y09u|Qr(PXiS3I`3nEs>&4Vrhj` z+wCp^rik0`us#Y>dRL5w4OZAo$Us3Uf^dSu@nY5~PucT|hoF5yq_%d?w;=ei?myV7 zgvg$3UMl^A8bNA|=c6(DTLJ%Zafy*&?{5DT$;G$#>By_YhO?m{F};|O<;AVSq&Rsf z-)Or;frH|#gb$fyNW$n~Nh+fVHRpICQ2G?`-r-Bp2mWihj9y`Xnf zdKM=nHRkDkr@|yvFM=nCp@3?hx-QquD6 z9B59m#UfMaW2Jgbvm|5~)cXP3tgtttL`F#dD~u41H!VIsr^>E5zcYG>Ceoh^d*8^9 z$&|Gg4=Dz+K$Ur^Cj-WAb&>EJ!fukI5J6?FQu0@AsYXymFpW0&EL-^3T3VSys z*$qiL@g~FtYKCshmu%I^%@5(wlEeQz@uBbUUzSf=<^tlOpCX83UGoPd~^@ztx zQD!^sLa(8S*vSfek(|WK)NGw7v<9y$Om+e=T|7!w3SSX|9VW^~)Xm-xDWk9D=OHBz zq%%wG2jBCQjDenQPg`bySV=#!s@%p4hS^oev1pVuM2E5~Y@VXLyLWBprOT#}Gg&uB zh6oM|I=gB3J3~S8wC_WK!&06E<=fc-QSa_?cB_z&#Jo5MHTJLww2iNLi)9GV8hI4K zq+ZrZSl=kSkxvImNn_Y&)C+pWyk*w?Y=bCaVCt3B31JPEC5!A!7loo%PE4-YXtxK* z7qx=U9wSIfT<2Jo!gPF(!8S=j-#}xvSBgxVBQ<$klJG^^t=RBm=Jd^aUkrAJIMYx^ zXN#haTg$oCYXY8p-$d4|7hM8IOftrXc5*-idE}XEeS|zM7n1YrAz?3?n-VR+GetQm zNIkn+C<09=K!>GHVd{f#OkqO*`)!XlY|%xh z6)Db}i`rVu77N2}Q8M3HUEC|h^3n5k>*XFu>f4DW1w zR`59)DGK$s)s@=g3NtFNyw2tZ@*Q{F;bGdB$a`+R%}Z8C`y}ees8$mx_n-Na9jKB$-^f(ymgNY7#{`xmv(nFYJt* zn6p@hQG13IWFDA6N8tfs$tk}L`z@*DEM=r>ddiPHn$v`48zSKS98DFi(V^TbQT>~o z$1|mLKK<{DMC;nCYTGAqlY2D~E@bI*U6mZMo+XMBR4$T(*Bbr;ZC`R zBw<-EXF8Y{wces>Mk$h(Vjm$dM9LEgBrXb^^Fj|_@HSP@*()w&&)ab_dI9==v+bi) zbhOqvuZ!n~g#m05E)yg-4q)4Axxc>h`i3?PXir z%gW?JexB%lo|D$`!U; z6uJ;MbiB5WEUlc?wKLJ~jjuH@Q=Tu1H{1gE_4X&)N=tc@8o!PdMxqF0=Gj%UIUC92Iq9BZ+L#<3fuX>kMW>$u*n0RI% zvRlZ9WnaB2_I2i~qQYK2x9A1e5k>$Vi%ycc+=Osw(=rQW)@)SXG$%uCX+b+$*K9cj$n@(Vm z#@-T*da>p)&n~#h^P|x%fA`-LbZK*1$DOwoib7re23V1T&LdX_+1m_XGOHOxh z22(P=_10KIz>69d=MFr>o)E>8^HoMGL)gpH(N2M_4f*lzUTpgXsQ3@_?Ce`QGw#Mp zy5PAG7Pf;#Lqm7L~G!zeRxFIaD*>$`p3) zic9$(FAE8yZ`*qTV6dmwPWv$_y=Bog{sbE+in?+$XB9t5MGs>m!8#{BMbU1cYJ<%d zg~ko9YfUE-M0}>V`bU~TM#`2J$dg;8!QxND%{e)wjAkXhYY%4 z;0m_O?Idw2-y19B(C>B;X!Jnmm=g)-&LxFwLbbDDO z+Rj=}n^24(z8)wz$&8#lZ8W=sG%m?~QK2H_E@hDFf?<5uCrN3M4GXYA|$^+MdU%w0JmF;>88_8XTL+9w`sL5_mr5MRE zm<( zf25|uVhVeUvSjmjnlRjQN#TkxABy0^aPScOMo&ks!v8HH&zvXy=^qI**oO?ZA*rYl zdO{IldMh$Vjfb#W`&cQEdS2Vw68luZ*(_qaGfG#=XZM2U9h_0Hb1AM^dy9e;IAfW^ zHGH|CSHEvNmyb`Ra4{Y?Y+p!@a0hYq_pYFyBPDglwkX*;QR?ysMaQ_uNe?%?0M%7Q zWG#7Ru%diiX%NeqP$($^U^D&c2UBQTNgJnC-Tj{(0tg$P+J&f7mMJn?Z;Vy-5^Yb`T#KX+?a~S zVRphE2+^^;Le4ns=YsHac+LXb5u%XGtvBCd7x(jmMi%gZc(g4M^m-mh^hwxpVc3nf z9g&j7uzQ?Beq6H_kBwQjC_X&9bBs74m0&E&?98zfMWOlu98xS&m{BsU4|aQq;q+jLOEzstuAgX+3sM#;dsuG3jLjIE8j@$mSgA)qaxR;LE%Em9 zDK=ZtFv=-kHcDJpC+rQ4js!TNw2KEqgWJNwI2LsY`;ftW5e^L2h~OfX&LJzBZ6W!4 zT(p0E5V7!ncv1bqNz?2>LC>9meP_ITi$G&ITrRhfso0WO5u2}T1{3R2>@Us!yM%qx zN|%vFc#De^4sPY0ii7j$T~@Z}@GPacWePis98Kv3-(5pG3*t0ytWKeaiUcPmObnJI z;36Nye7#c8`Cx(0OF3cY;P$dDktFM-MY2mouO-=3FZaSfyswjjcx9yUR#5`Cx^2a- zq7DqLQgrI$dbyeYpBUi59cvD>&anA{F14OGrp>oyA~-*z65KyOBg}ZoDwWQDix5pe zGH#*$R$*^QU-U9g8R}678@Jn!6!uBC_dvTP#N==;G(h37P!)$VTUurhit3fNZAH1D zHiaYYr3PVbcCntV$qk1c6YXOW;pM>g2_EJ*`RYvjkI3Gl?y_BKU4}t$;c>N(%B4*g zMIzCi&1BmXAoD>Q`d#k@m|VRM)28;XRQCNSI@TADm_tRHYp#UNxE~UYD#cFDiwZmAyO&0C>M{Zf zZOg&)aiWCL(@g|MRv}wUD;01HAE>eI;G(pZtCiMWo2^nYlqwCpSqgi%^*}Fjo*dD(sjj-Wm65efgO%vc2|lyXgVXo51M-OZ~n9%7rfajmHJmOB>3Q&9#xAuv+W= zpl8vu8`Q@J1sECn!)UA^{NuT?!LkM5GLF%;ija;zL2uforC?ulKb8pUL*38bRuFDE z(I_oKB42q3O!E$|syMV{v0W?Zjmgc%%hb)n)R8|XVSN;K`O4Ais%NafC_Z+^a+Vge z*&dMDw(2(Qs)rrnah%)RK|(Gl2K0IxA?!6I8|&nvsacfh zNBB(iBc%NkZLf&*M6V+IAeDpL?n)MFABh6f2evLe%qxoz0J`B!fy*AO~!hOJt}b785hzzz65dzS*a%&$V+!3OeCy1ws|UVa0zNgB^FaQ zdgss+Gc6~C+gq1gmBQ+r)zgx-t#pyi6$MsNc#@saT0!S+fZiK@?dM>a$&>oP4+K0C z&7xN8B}^?Gb6Y=!5z?Xg4LII7yl~vX8S`v_h|U-4-te_k1d78+_#*pL0657iv5Aim z10Tod%{VrFoXrl0)%Vq-}1<|}w1qDTED6nQ& zx+rv0>1caOL1#vvxjg$s1l2Kn2PTdQ!#;98=>%qu4HD6Z!tfYIBueaMQOJ_PvUHci zk{oKZoGag$26QfE5ykEjR#Rv4>}{w3X`^A?YTE=UrUX6K-z$9bm%!F0a%*7srh5h+ zJng{Lz{i4Kc-LEge4|~O;nA?mA+f~;J(>~LXjQ`giz`HRuq1(tyF;Q;r|bcLJ?`l< z87y4>U}P8!uLIgv;7K_|Qg&f3JyRYci*r!4nlC^poYvUW3cGmtb*^V)U6Z2lTTJKq z-!V~WIP3n!oL<~H5=bBT861RuBAk?aZL(paR9znvkxFty6P zg#|*Vut;XQ`A8J%qOgp%^)@^K^lxw97$#}?GLB4`ol9wIFh&D+|Y2_eDfVueUd7^8|}u}pg@ zRZ3B846DH7AsW{Dbhn3cI?F5=Ig;BnMp#@lM4`Ew=e_#_B*W3BMmsJ@@wufsg#Jv3 zZsM>n!^YIWMMQ#6pA|9SSGcaWEBps`xkaR8fPM?+3( z9mHDZhk3;7`&KRC{YZ#bN1q5b4}3*$6p11GhXoh|SD0#cPUAptpa3jLWSpps6ZC#S zm9oKFgqc1c>1taPq=-^1o3w@Sp=A?oS4c8qh`p|$=N{Fzf%iJXu)XVGyp{A+g9ci*1%YBjBtJ!xrWLKUblUkA|{X z%In6n#kv)dE_8n_3WfuO!*8lQ`c+lfi>Wl+){L_L#a=c;(G>vMX`r4oKx71vfnp`9 z9YX^o)evGw3Tg`NcC#&47`escJUx6NL~ssjpM*TRzhw9EsUTzV;UCy{N(efig*;HV zuELCe4#N1=T_+^z>e6Z&e_M}K`p6&rAYqu}VP0v;SW6EO=DJ>eqvZ=CVh$bT<)q%~ zMS$CM-f27^BDtN$E)Uz@7es)qdq&zDAvkQX{Z~O6gA_?Fp3Rs-?lR_|>C5Gc8Qmj|H6dXbH<{L5hOzUK)Fw{-XF~9j)3qh*&NX?UVBq zyhZhV9O&I1efWk8BS`fF{n21EMDRxVQRAB}_N-_WJ2YO{wmFS?7Z=ka(XKt);QXs& z0Ny#h$HeWn0Nbk?+YwY@fO|Lbe!ap*36gVk9RGN~32|tT&0&oV6bJ2Z4Y3ytc?FHWW^{dTAlW#u_ScvuD|96`<7rRNeVIL zcC#pygb%PTHc;S-Grmu~K~qt$zRLN1xA6PsnUFshvf)Y>2~H_pn)zLXjX&fsx@gzlH$MEP*!e+Q2tv9Y+J#ibzcd7tgfJ-w<}TI9$!K$Ame! zifk7K3xQIl|CQ)2Q@)y8__K+F>sLi(c)c{iURMwv>4nX9K^Xx457#_-XmXj|E=XKn zK|#Xq5D<^@4XVc~BSLIx9tyRK1K9s0G0zK7Oyarf9Ojyv%7O5W5iBcEn5a>*m6C5& zA*$OAx7v1vqrP-kvbA<*1r%>&4xWmwYYGpFqgi!Bz-Xdo$^<}$Foh%4zRcb01Sv3j zNH$J4<@-cQO-$@FCffy-&}byT7(56G`1zk^uDxs2RuVOc?ipqNnCr1J+bkN*N{eUN z`=JoVJjOc&qK4FkCH5zxz!>`$Gy<$;oWta%1^NL$D;QRm9C@=UwJXMZ5wv6nT_yn zXSY^Cp&v&kSZ|FMfTf~Dn*CB?9~HehI({aklSNzmT}T(hXxr(NI@2XrRomAE>6M;Y zvC`t=Qn`-#rbv`}VjixALh9s-rBbo(ElBOJt>>Ln9|3RYNcKb-c7MotcxA0U93mrC zGStXyQi#mwZCuUCgbTLVZiRi!>A-_CgLM1xWG^{7x6zaQpNl}_m@HLRrl5<3w_6#^ zxjCZ17?#vZY=(e$B6q_jHdELc#kdk*jHC#(uqP{k|0<012Mn}prx0`z_RlJ{TLd+d z(7@)WFx3VILJ>BPFyn_C0ukWj#GLu;Mvm1?wdtb3N34i<$x8$%Z_jqj6jj@sAvK+w zY^~IF+(T160hG3JJU$69*;p;Lt)qh844y9)*!v+`mp44@K64t;RGw_EkIql)?c9@; zRg0rHo{iRB6xhj1j4iO<0<O1RXTR8Wr{?VZL2eFQYxAsQsl^OrbsY&Xm*2`S|SJw{IX_Qtw8t!avHIzGP!BC zZ4N1lOKHzh0hcKAdy{=AOl2Gh+c`4``&WC|12$7g&8gdfwsNwrRvNOu6Or1|+Bdh_ z(NuaK$4z*C-Oa4aY{EJs*5vB zl}bM#FJ72A&K8RjzLL?Y8jEq2qJ8J|Z$_3L)*aXyEIwfKA+k{RW~$UYJ#JW11ZJB| zCYIP)3qjtSY+tt6MZ$CnKg|BzP7Dx)<<9b_qb*#${XCTcb2T}(Dx`>PCbKVUwm*mx z47uryLCp0zg8XK$;ZE?jC4}jiWcB=L-D$A9M1ieLne>T9F9M zgu)mv*armEqJ|1tjQO{qjLB4O3~v^4sx$#LT04CyQO+iJ$=BHsVOlWaPP6ymnApa!SOlHA`?(ddV;ZNG9E(Qx|MDEqO3q@<+BAGf$r=xO~{WTYw)68F8+ zeygC@6B}280OKD;P@}k9#VPTT04WUj-}b(s2BLXYvUzi|d5V1^3KjQBv&)tfbXf)r zr({I7jktvwE^?G-L9)3e*<5N3qEIO(N9{^$Y=;Q`edrgwFP*x=b5XHg-HP)tTn&XW z#quDswgS%gz*s@SI6F@SEhu{Y(am1HjSp$`U5~E%UKPQA0Iy8sY~FyEsEKw=R3CI) zA?Dk0Vd}yCV~w4@(!s$w2YJF4h$a#Sh$h=Zf^;;7Q8Mt2Aw)A_xKExmM{H_HPTj0g zwnC6{kps!MwE|x0FufoBF{C9oUqEbL6HQI>49U&xXhD7pL+=@8vlaFW@W==vJKCos ziDTqg2vpcQC7>QGA*0tQK)38$wQuK3IM(A-bWEhsUWrqGC`?`W59U$BPF)4W*e@G8 z`D}IrLd{6eeNz#ngT(|7E8i6iU*T;N+;*qk6w>fR+>92~U_tm!woJ2~sqFXA;9m*) zyp&XOhHQ&nz8V^X$`NrA*P`33mk3^EI}Lu_-*P@N zvy!!S(my(D4MbQty0yxBC`{V^18uB=bP`fIJgN&Ybe-jkBjNvPc6K!s?<0)f#qA1V z|4m)i!|o8G%pTpZvq1`bH*}P*i%u6yZGwpK-7#TF$Ao&D71C!QyjG_$C5=iv#v*WB z$XkT6G*@}t)ZDoinolI@kaGzT6NTcE%@xV!N~;V|=7bRm_)5*m^ej%&ti4wh((~TW z8=0jG($=xD$FQ&@;6F8Y#2B|{MGD=zEE@8ib~NP4TToM}rj;a3XkL6^*h#hrIh2@vZq@Em3MnQ!^9Bt`UT>G?@h$8{(K=BakGZA4f}2 zlXZ#W<9N~rj2#KN;0b3WcKP~~l_(AGfhhtTHr}gqMW9@k2iYoPk$6HB@2(fiaRa|o zm^paw06T31L8ejrjyyX@VY)&Wx;8|B#zb-?J9&8Ty%Bo+X)lXNqljij)@5;2AvEo)<}zM;7SI>=i-h4LMt67d{n%d52vp zd`*5HQe)hx`qephMLm%;3uW1D_9Fq$IxaRIpL&Z$pcTUsmQ>Kk3$tNucGYH&I=I-X z6x86hlqQ?;gH>uRqQKJrsru;H4;vgZT?V2*@P8?|yFSl02AJG3A=yx8zZUc<7)j=| z*zQ#Ndl`wp2zswz!aG~4itmOLvf_b!{yzoj&zpK&XP+wUZOY;WI>XM}0)ZAw1KzF{ z@XChs8|5|cW>H`rUI!qTbazOdDeH1Cq~Zp1%sv!gv|-w6?+JQYNg0sYW#>1ZEK4V> zm>@Otg{`&{0eDPqs<71xle427k6r$)66OE<*svM=k0N{+hDg}r$6LX^sPr3PV}&TR zt9hzDsW1(RE~A#%Bthzk1Wb{KDz@1n@zIH!ZBaqb5@;-3h@>6kc6$bUjkDIDur zx!%6erp7on%esaj7*dozk`Y6sG zUQn0`_wd8kYa2mtDZ-!mc6~D#o?;Tl&J>_=1zOYcEl&_0xOUUk+cGN<;e;0GCN8~Y zw8f2z|085Q)I9f4Gwz03Na>>{qu=z)5K-VquFcjfOr_YgjoYRG+Bfl9_*(%uz-ZvH z$_hJUyJy1SvVBuwGO;uk*f0SvAF+;Aui_jjB*iPR{Z?V0ZO11+ZNEz;-nTi@!}-Dv z68QLD*3-TtM9F!XS#iq{@Q%l$4oWvqh~Pa|l+Cf}^N^Z5RV;9}5=qbYA8FGSq|!q> zwjEyCYU>4Q?~!dawq0Rl>e4$*7eY4F|_o8&M>+*@8jI zbi1~84D-zWPq73@Zt=H@b#@E*US`<&J3Y6*xmk{&ca*eLm>4ypXg1H9Vzmm>HiUx{ zT#;Fuuz!}A4|zornX{Kgq<;^c|rv z6(x#ZnQY#suTt|y``WWaF(xDKweKrP5mKd873fqFvcB?%dKtN_Z_U z$+Y5BN-j66LnK=!88rIhuf5b*B*|bu6m-V-ER~s3EvvjO}EfgU)pOG=Pf2WE#R@eN_){SQECkvIIDn5p{i^5=aZCZ4ALCc}$8AsdN zY@~>^n7!RbzU_GdMz)*E?3qwVq*1v|owYb5pxqhmjduBNvTJfIn_@pzn4(}xLf%sX zB;^BKV{R4n;Tw^K=Eh%yX>`~NbLo;4g?@>w_Opc8@j%dVNwQjY)*erRWYsJQCfFzu zywG9D(mRHQNFi>eZBaB37OumF#77}JkVOH(flpKMeQ?Ec&T}O91DKr0>QIK!*jq&P zI`%K-zIm)LZ0_5Fg|u1xr1pv6!`CmXq>wYqt-panz4>TWpi?$N&VyLE}k zM3pd|ef75vkWScULjz!M9B;9?vI|toma} zgN@M5Y(Xx2o;3a=d2FoRAp~=BqLHPrZxMLCD~xlqGE*e)3YH%}Yx6`1GT^d?$oUvS z$z9eF65>0nkpowm4gZ}NoQimu>anMU|qOP(6jTP6EEaF=(&)B zx9h1~gnf`I02S?g$66F*v#Bn*8Bc6`DZ_hRUBK_}EK$6YPw_TZWLAfXCUGEf5wHksqq$no_G4g+?c2VSaOn zW&m;$kCvdz|5Av~isk0me+0ZV*-)~x_ksE0-ZR*)P!KL6|8JGnQ&2yZQJBY8*mEJ- zS!7wl-fWqEV1;-ym4dhO9J_cwDc}h+-H1O1z?;fM`;kDnNR@PujO~_MhNxZxuTi+- z6!tFZi{i?LRJ0s%TdT1DAh_*kGVvohBAT`(@^K(_je5KJB}ieGVn|2>#IO!G@W|_L zv(7nR{38@+B04V93H*POohiEiHj41{UTAlT;Jqr(83=f%i4rE$;$}-1grV`A>5COk zc^{3I1tjhG49!*y&EQMTVJ_XSI{*%g8rNgE@=Ou#LUX6kN<^R<9$Au2>ur)?;G=pH zzAKzU>-WvHrHZ6AkvdIjw4}F4qvmtbeMHnsOfXEfvtRLYnTs7$r(2%@4^N+uwNQIi z5I!T9@o;g$t6p*>gR!NvY-49Rf9sv@CCigoLEG z^Mg|^yqO)w1 zO%LHd{rlJ)g{c-@_Vz0KU5II~Ew+~xCi~t7L~gqTe3|4bB9gX0^$ke=Lm865-rdT2^-i{a1p9jpmDv~F)(ZUoL-Q>EE!ZWs7MEYYGUycTaok5N{GT!@zKY14B z>SQERQRYJ~?MoukBb0krzFj5ct&B;+q+7^BEZ!RM_ab{g{Y9j!5WLN>UkUin_4&m` z_M#vy7<9XJgqW_SD-Shqx5I)yW7&9M!SA#`d(QAcbG0D+CabqakHjnN?vNmQt}C@a z1bAd=`jrUK12_{{MJdzC}igO zlAbLJGm7P7sdX3dAK!3Hg2>%yp-B3jH`ZE7NSmF7<=bZk=#}uM|AAmwkXx`QAamN^ z>4X;K|FtS=VT^7**F=GtcAFA7g9q2K(oVG7MS-0IO}zFzBR~~79L+fgE)b-#MF}q0 zY@-mrLMB3Hem1Wlk#cGV$hYLh&^ z?Xr=AFvBukWYYqO{C3U@0A-3|TPXlvyVs{7j`qBe4^csk)#?S|ljo?&R0122PEqM; zJx}Tt6w$S#sf;=8P2=oe!Y95n1ql=(yC5(}km5*Iw?@(=OZjWlMAr~)nQp5UKCyfz zxGoxFYeeD)*JqG5DoFcShNS9xAV9isJX^hg48-`L39aMapa9kW(hXMkOkHgvVlwyz%Zl*k-la-XmtH@8Vvk#YBp@iawv1JiYs zdRCYs57uE$@)_$8q<3+`z|Z{m$31cWSj>QOf3R1WhIQ3VwtWhdk*;PJZXJT&yKHIm z*lhNA_ap^z0@Zb$C}fd@6@mZ{1V{yCGR!Zv}sa zNYln+mZL~vaw9{?>%rqfnncCBm)gpZ46_kdtFU(^KhO=iT4x= zQ?vYil!tIveXEE4dxP|gj6<>Kw53YnjE_N;xWv{4!Y~XOCHV1Jy9mx18*dqWD6yQs zL!*JCJ9nT*G?W96s9*AOQ|NtA1$$cB2hb}LAjO; zi81YMT?$jqv8D;fnkL#&LE4j!(R#b>Umi!UK8S8k4iI(AI2z+C1=XS)EU(!bA@59e z%_|eEMih#|d|}KE1i<~=I_nhh`7tDxNxz@{Z%>M)wFj@LKzuD6qwbvY^;L6Y_2<9+zRKe0nl@c030&RyJM~xa7x>uqOn( z=kCdt342`Th(yQUaAP;ys<88oWiWOfdqa8&D(b$AJL_X2d4>IWU^w?PApBtjC|ZfD z?7N~+V9}m6Hbr3)k74EFQ+xu_-vu!wxI-+zW^9RAgCNXBk~dA!o$|IPXKAYz9HbV zb7UXu6!M1XM4qq@MFH$oX22gH=|`eVsd83icwzLSC@#ueDvr;uB}zD?!UQ(^j* zpPri+wA)0X?EEx7udM=-&PdtD-^rVqE?nOY>1bBHoq8(i)aY8^2K%BQosZNFFK&5u zeuz%iH_N#Gr6C%QbK+P8zxhOTb$zmWwe1#!1MbY?_MAYtAjI%hbXeSB*M8CKb1Un1 zn;=h_$xVwR!$J0bf}n30PndF#%z!Qson`mH7F(zwJ;Y|$ z{;V)_C9?9-zP5cUQ~z(G(cI*kGFdnKT+n}r!LXzK{7asKCT8I5>5eZ0>5wAM&L z5Z0DQPmBn=VC|~ZmWilepNJd?8|=@bz{lGY8tnZL#u$j%|3Fv$l3#4Fp9^~Z7<`ll zoRx{>(?2slHs4+n4$qnmEsprHHSAGS^uV-&v z&>!9+0`VO)>N}=xv^GKCP*F=HowwOO5K*;bzEzis^>))&p?C+3#P5M@dEX)u)$U$c zyl0Co7osEcu=%(~VX8>uW6b7qt)N#CxlG^^j+3c~-hC`T*V)ylgK3`5S@xg+_0vUq zgknQBM-(dajqO)|Na2d zxx%zR3HVPZy6{C7s?tvXx|g+Qa?HM^FfEr^D|vXDZ`X>VwqRMA)=alSAqk8j1E0xj z;1p4)uXpd>-K^9iGBZE&rT=vV#QTeUlBb1hVGZ!5J5U26OE@_D+oxxokA-Y zVtwzMZxa>t^&2 zga((~qRMdV06{Od80YPFuQ07EOz_Nde@Kgr&=`&y8G_8O!z*j)v|wr}Q$%lBar`mv zHAaU_^t0~Po)UB(3KAS=A{D{&q4}B-X=b^z$qPjFl16fEQDm2h;7yFwMkd$|qA+Ln zO|c9GVYDP4J2SHdycOJz*4Xlp?zVgEu!0QU?HpAfvyX&mJ?~|@R($XRk7QT|47GG2 z2J`U7HMTF{>>K*%vHA}*Uqrkkbv?> z@D6aPaU82z#Ff&n}xg}4l8(T5vDHgIO@dfDB^-vETcaO; z7wF>=eO?#ZouW{t?iN`brKs_+NSa-ZGpr+o||(lvSOmOflG*40F`LV{zr zu$KjCPsz6TG%(eA_JZP{zub{?}ouqvxz}U#$Po8})$1nP?} z=-EbnV@<0_AWVwR=gBgx;TZTO;g1;bgh>_vK$c0n%%Uu%k9^Jb1Xj`?d6J*4P z-fnFnCX)P~XRin{wuc+nSkVs%pWHp@imKc`6%CF?OI>j24?S@)Z;20uFq+j?s<3Co z224qG0In3x*^OZxGiLQ7Xj02PL#rJS3=_Xuwm+mq({Db<#y-~&M^kgSIPDw4od~9FP?$Wi z9Aqu)1t^^z7dsdGTreDV8f3Ganb~OPck`06$ClU>g~=^vB~~p!Zl-e@w_?wRIHuZG zdr^Q6i7t}lWb6$Qyw}q3*cU0M{8cn~e7m;BzVIW$VeU)YrqzBViWlFbXHVp6Syv;XbIor~@>@K8iRPR}vUt1fBT?W+n=TYr8||Ng@ZwC#&AvDZw-coBVdcs@ zZpFf0Mq#`l#>soDDC${WgzYZ^8uj`LEKb?mg7gzRVe5UZ=S)^@iVTZvvI0>^oD<)( zlaJ_Qf+Wsp-)>VvVn%2P#|jRjvqj;zG4L*XO+n|W4_*uGla zh~ALM%f(d76@@Yo1eA%jH38D6cQeEG3Q`{vrhUD=8X`xvZ+*jEsH5*wO?GZth6rJ)Z>FJ+(M!CCNuCR|~9C_V>==HW%L<%U$!$*0hB}4ko@+J1R!c1{4 z0eO|>IeChG_h*p8^pjanY=Vm7Z5e^Yu3g#(40i>5TyqQAqQmvxE$IDicyY4jhC&Y3vWF-+QApKVY1u9lq$|3nA`hZ)_8Q5w2rnWn^szuAhqy?=N$5MDlxY=cnb@Ok1v{fe14m)6{O&-1Y+v# zsk8_;Zm^F-B!^gx_zxf@?8I|R>=z-tdv1mmDIDfdb(6M*RiePp@%fA7ChYx?IJu$5 z{+UWFF*_#9jkl868#7MY=!=BaN9bK;Syd{wuzgL2C50Jhnu>1)K$?9pA6ve;szN_Rc_>J4WRu~=6H??noJX_&Z%Ekm3-Q~`;Uk1nqEm=F; zW(!hZJV%#I6++B6_81ZXmn8fPA~_3@;ns2XCs8z4ni}jIcX;;Pm@HUaCd6Fi{^kk= zy=NjDdyfm#GukI2Y+n%|Ea$aX+Y15iFV)qm4H!gXCd!!h>OzYNz+qPV8s1=b3sKJA z9g#zmQw9T_yU5eWGP_cg@RP|e-m_Ro_!MbK`?|*x>G@VD5`}7P$J$aMXO=ex>_ent z53F%StM+ZMzbOL7+Bas8v%d#`glL|9bx4R=%dp)n;N@`^&9r-jolALtZSfv~c`5>k{@J zV(#%CY+oM=1ztGnAdG|*V37#$5^eVC?O8!DKSzE1VTi^tB!ZvzsURc84tKMib+>2b zc1;>t3j`U^zW3NE!#r7{7+IE^1-$uOg+jPp1gexA4txCEka*7uZkXo;!11ubHVF82 zVI+e5Ovmyi9m`uga7D0PQSe!?r*8D_S=sg*VfqjWJ1)}R6QWAqPTEj0UtoiVL-6U# zCIsu{c!d#3J<-T#N_K-E7r{F#vWZ-5i$fBI0)xN2ES@`pBs9N3YV+R~puKkuwTlBi zORm2!6`*-MZ5*EZwDlD9<`0V_8DbBIcuGCKYKA3J@mEG3uxF&=hhfgAMvyvRFXKqP zE5yAxQx+WUupqsFXgv1RVs=cJnbNgtxqbT{!oIhNEE{2TX0hdn2yYm*lm*>cg5E7W zHe>lP!`6#LFSt=rNlot#d5%u0Ju@!hdxr7a}{U>Cn zpSk0(YessN2WpT~Sr>f_uM^2TeqRGxIy>x!6a|@;xyYIJ6NQ$&J9Cn)2;sx^&Gs9G z8FxPX904x87es#`tAyir+*t~fE5QfvA%(q`NIbE~%0g@*ug)73)|{NgoN2Tzg5ELd zXm3jD^S?+#^9yj0UTD7$1&l-9=*;`bL7VY-&|(7x=p(&mzCMw0D;7b$T3a7!m2R`K zqQE#3eLQf^4e|QX%YWHITO|q%9ooKKw$OK_60if3y7ZnD1q)2b1=w?ew{9PeJ%B($?_j0zQ^f7(%;31pf~5G%fp*>6Rs$mZuoTW~@}m7l`b9 zhWFG5PU3@;SzxXehHuuBQkIv;1C$UxOW_*9aF&*VGRZuCCYm!bFjk0`!Z#m;K!tS1 z{WvWXqCovz8kgBhL9c&cJg2}`343lyl$2Oo$jz-P`?_`^A4pzd(rOHWOpbCbBTBwTalqc-k#`dGJ}E17R@<~ zu0f^kz>GV~D1~1Gpi~|GB3|b6NL&Qw1+S{i0~5 z%nx?B)}VS_C(~vMa0o!zN6BbUhVIno3c&u|cjKDFOCx(4%Hbaeh~$Hu#r^DIVqrBB zp^_!2z9M+%_uvHBD(Fq?r?c-z!gNMaJdG=+zYFneYp;*lkY5lE1ARj2ZK7zNo2iA4 zv#=&sNdj@XFFpc-<2bK}c1HlL5Ib#%0O_z_w=re~Axiq_HckNkaM!(!{q_@r{LG?D z^p$!soGA)C-Np8IR!B_SkImR%e-LzbZo@C6y%eH5rY@5p$l+9U7ADs1L*WyXVjPRj zry)Hj3eFWaGIlZ^ja5W@?-PNkhp{dmo?qL@GO$V{TEfjUAJaF5VE$NH&GDsA+qcqy zVHuR_RHNOHq9oTMA6IR&MB%)ajLgH!`QlpkuUx~4PBS@{Sc}5`?8Iq~rGoFQeBbCc zr`cmk*)eOb9SP+};y-1Q>$y0cybJ<21;uuwfd6)ep=YTT?-r4~&$vvAS&lHBg6zCY zfX)=b&wpI|MfT}v$b3;LHd{KW8wKE?U!OkK7{W3g+^VqF$f^yIRks^^CWRE9?`ME@)quV+Rz5*CJG53hmpO9+Qrf?;j`}xLS_g@f(D_ zrlR(Z7&Q8s2>K05`^WkU`BcSgYYZjbMX3Tf^<~>OVNIlT9ALjH;H4Ggd_2dl$b#T~ zn2|rmt`ekhc;xnGM2O`6P>!$?0q$85&*F}>T9Ec+%Ma+meb z6k+T*-K6zOcr{ER6bmbam=B$$3s0ybuCTQtQu>jpldVSK@Jbl0dqro%W_wmKtfvnQ zw>K1o1rDAG`z(Ohr{e$g>e0l|M!w#1Pj;=KcUiK2tJJ@Gr6|!MYo4Wv5>3$xpP$;Y zrHkm3$VGCI`ppViBcVOy_Dbz>QRu=)OE3d#PlbZA;*a%{&t4>LaS9}&cTjdLjnl%P zL|`$A47aR_L@c7$w=U6rGO$UE+qv0bAFbmHH(F0YjR2aX}c*c&^`zLJC|pGgNj-u4llE z!PAIJ_ZNf#Y*p3TtpOmTxy9}f2tS8NFOi*Pt4$WwdF1vJ!{fqqO61{f7ulwZpcVF{ z(@%*JRoPRb)4@!h;X~}oJP>`5gDc!Jg}q;gAlj3#$s&ZI)(P-w{DOF9776Zi9uV?I zVn7w)a9nbUL>5^>lJkzqHcJ$07?VHVmMP49>zlB>3Wi@!NxmHpsnQbT?41x5okPp) zy%44M*Ru=ArCB-_JEd0%P`#Xj=tIwzWk?{=xPsWY=oWICNYp+ba|4o?bDcl4mEari?>%xzf>zmh`%vY1Jd^rB*tdu1a^ zxY+6~3~wD1=Gn&zQw^;eoj=Y_EjgKVcO4(xF9~~e_ioqP8G`ifZ5Rqy5Qbi>dE;rj zC4g*P<=G$s*0|_77|}%Dg%{Zf5$PQsvG-tOTZm%iACgT|hM+Szyckn@ACa&!lkZ10?(g3(_)Hp9S`)0DSA3F4L9@`7p`Q{r*W#A$|nkM9w0iB(@fM?S55| zcD%iGn{_EnscGmr*uMnyb026wevBZMGYyN;W&eqwKSANv3+X%r)+xB3$W+6lDt(Y8 zL|L(7W^!35fDy!Pg*_i2jw$4T;D_T#>eCd(c$B1W5rL|2e!>QZxX7eN6P6OPQi*vv z;-aHk1R5Y)3ELH>Qe*~J<1)k}k{e4cGlWaH_<2lW?;xxg>F%d8#6EHLCAbJ77G`*W zXdp{1KjWp!IJi9@NsOJ}R-~1Z^Iw`u%tG71;?3li>W9lrs{FlTs zo|&VOj^@ef5n=s2^a0T;`QJsL8Q4#>8=fExhwn6$AFpVY-L^iW_;lbcEoM&$Q!_(2 z(G~=N=x5CK3;0ACiD=(ZK@BgWN@M=d!=!ZC4cb4%s({^ZE{3sn`?Md$*A3VPkOu(b9lQ#(7Wu?UN%<9r+N@EwFM&iCo-0S zz9&K>tjlXfrOG=s`*#RX<$Z}*K0Df@f;8&7>$=&Q6Fe%CB&o8y1%00~E+2b5@tjfC zCK}Ue$o+PEDM6oV8Sw=6RHutTHQl>Oc3Qw&_SVA5wk-wmCv2|;h(L@S>>U*Jmh?k% z8&9S7v0U74vc(|-i{b)I87&DJm~H!7 zU#hdUDV)7~bCEqK9PJV|MMrS#VC&><*OWtNb(2Hj=4nSaRd!8%#_kf4rt2l0Q`ka# zN(8v$S129oI>B&Oj?Y6BU(9g-RZ%{y!w1_<6TPhHzGs&` zA{dSwI2PobooAV%`7D#+SuSbDibCH#Yx0_-;R=t znLff4C-3d@Vz9xci9&m#&TF+90n&N*CPv$?kUux}8250w_PY?>)jFXIxz1yPJ~1AU zx{URme6qfMYdNu&2*ckXo_R}bd@442NSP(L`#j`HHr8R0 z)2^N7Ri^HZO9F*D`lRzfT4ax>C}`PW4ab&>Lf5BpD_9-ExttLmnGT)t%j11=jAaQi z^^h2g+nf-!XDR|D_In}Pm8@AP%fpw?@bZT9&A(b<%D}_}FNW6$@#}*dw0V~^^>ts; zW+kFiz_ID`j?LnwUC`$}E24J)b3~yt=(pvpn(OS0nNFaj0g>*|L=nQFPyS@M1k<69 zo6OaUVZKO8FA^A;ZLf$Fez=YLVeu&PwrKRiD2`ztD+~jXbE%E8XViNZ6k6ZDF(cn@ z6$p!#Y(qR0Fdy}7-YD#GVd>Hxc@t~iB|2TM!axwEGUX<$@HItf{lw5bmm& zVf708)RF{$nstVJNV`lz#Q2i{k;AXTtIv6}NlpjC-j0{%Mgw-WNc2Ef{#ad**nohk z7!?AH>k3Dk#ieW$qb=h~%Mg)1?1S-H`@H}YqlkOXxVQxhk@~N_Dezkl%*BskEPKzKctW!$E`bTfgola$Z&gJ zq!B2E4aU3fM7r8$Nd;~Zq!yewW!vus7}Dd*8jme&eP_!`d;A%X&X*j(Bmo~dx%JDl z7et^J6T6pj8Pp}@JQl>p+j}7^vavC{elBs|KUh4@v1P)fjg=H*x6;-NQcwm;a0%NY z6k11Sndf>jB+x|6wJYb5fZpvWUu6SBxNG}#yIW!BAe;C9@q)B1@(wW}00e#VtVO`t z(UqUQBOF$6ZGyC6Xt49@*0`sHP#>uZ&FmWRso8VQYzcSXG0ubQPD_UNE}sW(x599_MaTw zNV}njC{&V*$<3JEF68ac#BtgiDL9z-gQ1H&R-T^Nmk$$+ek5o^`87_ZJtCr3y7~OG ziW|LV5#Td&%k)~vLX!?JFMWH$3id1Zo`_Vcds`OM>lXvnXD6s`UJp3!crR{K!f=1^O-8LdxiaH&J$cTF)msGi8jh}Ld-@AolH;^M|<-= zkzgb}n;YLL0boaBqb(Hh?n2CRv@I3(4t;%RB-3=kO3%AzH6!-}fpCUF$%=RF1Q(4z z3`zD++ds$d7Vy5x7*}Lt1nH~8OLy91A>HnU>_0XNc+K2H7u%~T+y|9C`-iY5Ni2gc z&bgH!=L@ky31R+Hl(4gs!Ps)d^Xf(OGSgzC^X&_(z_go=oMJZ$coD4B$IDA?gedeB zulrlkPu8aNA(7xdJI>OL(w`tq@Mz7eNQMxCNmLr0?56!0!LnKQE_B}Xq zekOvC^lyWXlA6LJY%9qh#}WSE&-!=Ri7lwsnF&+tWF# z;-O*<1g{Y99_cnHg^{QpV^0c`pLdmJE_Qw=NdD-RsD_`<%OO5Zu1O2+pdcJ^pf0g9 zt35Bj)&Bj5*#JS>f-bKOR@kQu8XYoaP-DYI^FBhSMk+P-v`BDQ9JvOH+j3!wmx#5! zC_s5#OSgNNi2Ks=AG3}Saj0dpePgYcpaoIq`n!e0T-~BwE?>S8qJw%Ha4QW+}4HTG^Q^S)IS*J_W{5bXoO{E*2#y(s@uRIkL{r`vi_ zd@RO{lVOf$Mewn>Gln~{udF+X*~wKohO5Yy%|tu%zoPM)Yi9|=7U~6g78BqHaz7sH z6x7E`8guLdWVg*e5RuM1)Vj#NTuazlJHE2w-DjracEu@ruV`sJzNpGB6@~O~S5LN~ zAvSuw;oDnl`J(tzjrIW2PV5ToXvoqvyUcE0PXd}&n830PYGe-z^J~uG2{Y4P5%Qjp z1be>yO9X1$y*b;yy5VFb3ieFt?~6cWvcAc>DePS~n1@K~DGVc#(9I6ELHeSP6wxQv z$e@EfBb5(zr4pNy!i?c|+bc|89$0^%#azcY1+Wg`qFx! zcMCW3i)@E5xnsMx=G#sIt>c^R52m$WUmLliGI4|!xP|Htg_ zqIqkw^D|5EiUOt@>Ya=g2ef2pa$ga>C@e?o7B`Wiyu_LPCCsUb;&YzMN6s#5Meve~ zWwH2IAxpBRIa$*xZ;{qW3VH+^1YGP}t{|QTquY#D7MZPVjJGn&%DBqgZl`S}n#ScN z;-js%00YSFB*QsCh^j8R`jQ@(+;oX86!wM=kj$4%UM&*M`I9ZhxK)cldE7otwH*Sq zFuLoB{7(P0>16uC*m%~^5h9TO<#kNegn&0Lmd2rRp|JN5Yb3@}Ba_-Qi>UZNhwQniUFookMeMW=NKc z4U~$I3D;rhD%k6y=$9)~vQy}b>slbt{`B-j`xZp51$`(I z<1=``ULZm!5(PoqE(}}jUkmK~?VdWCva+Y}eG&ML-80gP6(l34tx++afes7%Z;Y?K zU93P;oh*Gmgb7^p$fUiKZ;=gZOblEakRQjeT{?gk*`0(mYwqIfAcogsO zNXxZC2#uDxf-*Zt6sp!`9dGT=3VM^$V&i#k4bcq35JaZF)kaM$#MDUuD)svIeQ!HB)Z)8VlndJwVtjE4mgRK>mxk!mcrxYvOY+Xpl za;pc5=ToH=B0%;#K{~bHAnQ;Nc5;w|`B-89)UcI|E+X@GLh_akl?$Pt3Q=h9p|;6~ z7uDID0n(Fc?K|wOUlB`(1p(riT`dC1kF9IOM1b8JAjy05y4JD<;r7HTRAVzk0=|_MvtCQ&paY-5f+}`X9ma{0o90`ZD9Ttm&z=g2 zU&Ht-N-nDfy$7+Rjp68DgY{uW$~oF!Z9&rcOcA|X$y6$hV^MtEi+F^~67*8WOCfBw zFujgG5l$p7qd z9`ws#7j?fN%)UPPV5v>;5j0hZj6cUiE%Y30zaGw zFR>>>98+xo*T#kRp$K%l+#TfD&A$QB2T7^s)Hw8NhRUsAUq$;*ZB(>tM^!~6Sj?iJGKJet{+ z&qJXTI?5MIVBi`-?}R~=W!;25!`MXhCVqn`^ud@|JJ)>zbawxHY;Y>&zRKu+=CCOA zc2S8OdL0v@A|lK7t+XqC=kWNlt;e~)?n^0ZG74xol+!i`o$rRebQ{U$-4!KlRZCsUciU6FEUCQ z31nnm5{+`ZrmnZ1zbEW0@Pt`m3E?ncno?72wI~#~yOl?~odSGZbPL7DMYjc*UuSdn zydr2e_mA8{+E4xf^kPR!3-5Me-@Hb4Ag1sHY7dI25t32K7Mmx?km#sG`OQ`%0v$c7 zy(x#+#}JK5{G3Sq<-L%dX*e8dsLurH>D?IY*y|-ldSy$o;P_J!sOiO0Za*g4^Z>~@ zj43`dK>PqnPI0!NcLCODbNS(}5ye~c;)EAxTD7n`uxggQpfJA=T>UjguCN>Jloug+ zpWiDvvKR*6&k@ZFP1bI}%ewtc6fYD*uM4bPm_iFt^{!HwK1b7Mv;AFwZoq&stNzq| zp6pQTbX%Y>O&;9Wb}Fc43Ed{$Wmvli^te7qa*Bs*yjk|Kh?F}f&Zg_k{Qz~p9Nacp z?*OA$vU2;epfk)(wmvD7MA6TySUas}guIby?Dp~4oNenwQ=Ls4Q%Y067LjV;s{vt| z_XX8=IQrWCl7|mt0Q*gaHQ4zmeB0*%p|;OY`K*_TgKf;w;d4)b@*?+PyR3&`=-8(1 z^4EQvC~(0(M^4dox4y%2Q_1A245{P%P!5ZD9lbnG5WaTLDzV2y;*N6sP(=3uOGNR( z;%;h#o%+X3i&g*#u}+G{ARRQ=GuJ%uoDSq%J`Kmo!u{@FF(;*k5?Ga zvTxbGMV2K%6T7Mw9G+Qbxgjx!1-$+695Gh}Z*xID^0RqXD+;w2aZk5jVdoWH`Kzoe zm71elP7d!vqA+g;4zaHvAn1&waR;1j=ZZktQa@NTBUx2z7mLEkjE^PQ5Z;{1kXa&+ zjBycyvWgy9r3;1yp8cpAhobnI^)EzWBhQ`|g@$Pt#G&U;0g|9Nzr;>^g;=WYEUoM; zD@UHg{#O*uor$llX5Vy&AbjteZgmR!$C57-NBRt_7YROxMZ$A03NZ%gc#pFq0+d5V zF-%^47*ZA?2_CbrzDgve9&EL5E2t&N4Y>2Y5yAVCNBSDOOPD$+Xm69v6r$IU*S6V$ z6XD86`$S=eoK*O%D$zS%k$v~IlSRo{s$G%7oYE53KZWIW!A1ledF$im^g&@-3~#cw zFEdTKBGIglsq3ZmF) zVehRw`P|a z3FqdX2R$Rcbqnps0U&)l%6=l?3k;7D9O2VStXwo7b-keKa;8copTM_8<|Te50xiK2 zk0n!)c<<#B{Ju9lGJkZ!9#EK^k&TtP$Pi=%0w17y`gn#-38_1#SKC5`{Wr(-)@{{u zqJ%Y}oOAfg!t?=`JuPpLrZNQGl2$s+Tt^USi7y;fCuhTo=ik*IH4i0m&P-4m*XMx8b_3c{ZeveE8wO zuf?7d1s>j=H`BiHCok=A#fGjnq^Q>0j{`++suyt>v zJtIg5OFGitOcj88+_-)DFCJNv&ynVT1jr~)5NH#17o@HDz_TSGT`Y^E>pui&6qiKN z(e~P;VJi*c&;pz11F6w945v&mKu7Wfjb^JYk<79m`Qw$6iY+MRp+*STR{}@I-Q6HTO@mzXgb8G!&-n)VIjt?Z`xpl%Mba zh~i_HnSWedo()OI$=hi$uO_PLHL$SK;+ zkb*;-ZmRzxsHLR6GRKY!Q1@M7xX5JXy-@@&CYHl-eTT3Ha>aIj zG4_(67?#5F2LbS+TyE!g5#xOsZNSRx+oF(F*7JCb`jMay4_>}><2uD@6c^nhU9SYp zg0Ybivb%(c!Ub@?JuJX5XYmPJr?6KPN%`)wA0LL|gBBl!_@E6GfxNG+ePf<27a(t} zWR&evSPMs7gZv#ixVSC}H> zSbfIc>*#9rP35nvjhi5d1*0LUOc zV_$fi81K1g&%rgZeN8m|NE&Jpvb1Xi=@ePaD2&V$-5^T1^V1!`W_~e^_OhrKo_=0* z?$Cu9w|P4Gu*`Es{SJ|EfIjmAyD$JY*bUZKK(%2WVA}+n6kyk+nzzeq?6r^-p&XpE z9wR9`j7PEDp&*U#nzhn?p)e&#=87e-Fr+%T#fla5P9G&Z7mI{Xd<%Ir(!F4#XfzMU z3hf)y^(kx;#k-H^HnKe{0+kINc9-oA$(j4f#;wRC*>6OldMG`v>t)aXSAyy#@__V< z<0li0W(^uENLLi^sm!;90yJsZefCmFwy!a|8SfMfyvK0WeA~NVe(~H)=%9gzQF~M* zGGErio>b6h?$G=k_NTT<6xc$k2=PDLEa;6I#A6!<8%0nL)@`>#3j1op3o>f& zT}LNOHJ$w)DJcI|1owII;3@9wqBwtja=31~L|ElbUv588m?jn`P`~IcK;g`X@@PxY zX19o>E}O83e{FZjs%O)86lN)jw3}uG!^)@r6(Y<>^1*TlakdDsgK3sX>UN3^7sZ#L z6N+0a?O_q&F^!#E9&&%NfElX6ah9k&VR%k9EV8#1hG$*H*!SKCcz2Fu=ZRAPGotVd zV{5@SZ-tQerr(mh8KPwx7Pn;|5Ka4J4~vibC_(SuJMyz|xtC|-MDk^R?{-WIwIFlb zvUj^J3_0NtZxeRG-$+KQlFjRr&G;q5X2BG@Pee7oxi->@V}^-BJ3337EK^}0mn(bR zc$F0j!z#a0{UCX87J-Id+&N*EO&6d7T|O|YwrOTrqlgrDbk{b!^h3hTu~!>ypn{Nf zSzfO&-2tEbC)$fb6qzae^5=d8pz)pM3$fFB5^{2hjUR8{RIK+Lo*Z*Z>}Mi)=fv}5 z${b-?k*M>ghit8a{)^2|$EUnLEboa%ZF?*B&asRB4hZAEd{M{pwvOeC?LHAHVBp>M zpn`D2y~5T5>upMa@cq`}H8v-}$f&?f`&1C-bR`*?DfrqyAgHUG>Nyg#owADpg#wDk z<)YSSKRJ=6VQaEsXR@K%?iYnRqs!A>_Mo6Q8UF`-_*SOMaL2XQ6>=QiwMbX<=X^|# zu)uESHhTyEwOT?{AK8614CHv>=tqDmG+NJ7Y=I~~Dn+@rD#XL;YmK^*w!!}-9>$>D z#k?FIV6ti}XH-iFdig^o+BhqP_aUBYUBZ;kGKn(E2O%?MWtF7ad7lu;pyak!V#PTC z7@uzA1(-H(Ev~f3L%0{l=Nc7uX6{8hd{=-+)|N{DASvjbHaL-2@|YbFf!cHOaY1BX z`xl5=!w(0kAZ$#P*$+i>_V49k(q2ko%*n;f4%Gj=vnPKkacSoonqVDint@RT>&O* z%aXN~_L?9~8eWoy)-Uo=H+%*Gj;_U@+f54lJU%JZYS|(>3w)ht*&8X0xeByTgz5Br z$vb!d+yR%9#kO6*J2#7UWEc%prUJt%^&j;tuWE<44myRt_w`zRzx)>b3TpR8@N&qeX(B{$V#4H)kecE%U_ zN9-4mMwH=l0Z+lE5``H)Gxo5E@b%CoH$8NT?*V&PL0@)w>$=-Eg}rGx?NwZ@?GnLRIyjY; zDue47B zg`w6R1qaQuPA83jd)S%G)bhG_mlcajZNvH9Ax`|UAU&Tb=9b`$uX!9R3l<&hGC}>q zHf=rW04Q<;G}UelDXDD?7nQdOQcAJ>N2Lq+6y?PQ$9ycHkBY_wzjKg{QIMwOb8_Uq z(Pjwy4~L8H;{05j9a2#ED4nD)JHvB;v$5M_*9g-Da9^(DdJ1|0y2|4Rep@O-j~lP= zZi9rw`cWtkRje>JTtr6m@brb2sW2rXJS8QvM+0QV(3@$spjUu{v-Jmd*r%c}%E9f- zG`sUmF946Rd+={KUC?tRYbuiKCOE|hFxmnfRto>{py(r$mysZ|eH03!ZAIuMf8i{W z1qG~4^OJSclbcqD4_jTS^;WDGDs$*IK$v0pQ4bUkmjuAIPKK=!aE4HZ%w<8Y4bk1Z z_q^UV3eX-tA-t?}N@x?oOGY~vVGZGE`M#Rv`>lXnv8dPCo|X$s86mZ4!i@D@18t^) z^lP$qp)@&`2*Tf0kN={Z)d*0ezp6HcD0Boy+lA*41v^~k$;uDbZUX^J&M2Dk!R;Xe zgV!-(j@_g%rLr96*&PAwTfT3Z%?;Tkjzu5i$27#=p(nWEUnd{T@#T~OvOGb$D2eIM zs-1F`Fjf>QIa0a7o>163gxj_fd*eiEhSz1c{w#{bOjfeS{r5}YzZIc|=HjT=zIrZ* zTHh{j(+2SjLFy<@q}k6x6!)dO_O_mcUDI0*V;3%pbq;dSj<)$q9p+e5Q*v8{EerW0 zUAGywDimGF%HOE)3HI3$esVJDCY^jQ`-U@xM6z^DuLv+Hml0+56G1ZPXNS0UJA9|<}Wkx<+OJimBN{B#DPiE@D;TpVcQwD7P%P=4dG zIx@S9{tQR&-HHi~sXe$8D^(H2*DoC&WH86xQY!u6`5LmwwAuH}OXm?M({2@@j1zXM zs;pELIs{%`s<)*=VH3hrg-*}5Q8WfVf!y~&g(;oXIqhpG@C)Dc$c+3<8?SKaG07*^ z2zz1UORzJl&C(vxXlpJGHSK@T2Y8G6C-5Xc&ay@E^06$OKidS0>`>=^+TfjfrTdqKQPFMf*%XJIhv5%imh zLZN$?psl~y9ueg8|KcPr%(rnk-)f6P3RR1QUzH%^bZ;V~ILo#Qc~_%QUumtvVcZ&6 z>t$5pH==o^xr`Oxwm*sj>zR4^HuHZ8!$8-J`A6pMuqHt-kcSSOtq9YLI3$iv8g?L+ zAYGDt#{+J!MOJ}FjQcM1^0G0uO8$f(g38rySG&~O$6G^4H;clDdgkOqjxW>4Uq4y465%CWEkI=Z*M3_b;+iscK&w= z(+S7s&pfuF`S`-8t(PdocI}*G(-ii(GPE$kF?qTOUdIEdNzV!pI~>~o^ti~Avq3@H zfb-uiBEX=0NinyDPJX-@W}R6(L}X+cM&z5bbeV9`cOm(}NPCIb)LTTMx43MG+3FC@ zG~qE&(j--)z*W2u3C2vj@_RtI#+pweQ{$Q@0`bZE70Jz5MiHbm+@x){)dD`SqxAUl zUJ4iFr;oR@FDBlZNmkV+tE&H-G8pZVk}-CX($KJIc4A|U`MJez7m-RazFnBew^af1 zB9D4}8OmE$b41^H8XF^@K1(@L;c>TZ_UJ!v*1^*xlGGNZa_GZpZ%UE_*|S zaCmB&7M-6elg%w$P1*gI66KTLA0+tJ6p6-&@FW0g6be5RKDv&5yk%N3W`~r3mL;2) zW9KQl>{^WD7KpGE?vYQq4Co^$g#uY)IzuGNjpep)L`cNagni_ZA57Mj>A*il#L%Lf(#-`)W#mPN-+-MyN zd+o!~a`;e?ZbLa{1IqzUBEIkO+|KJ)bD1D5EhK8YMF)=6B+niejoRF@!;!W*mI#@;W-sV0 zn{HzT!%vQvkwV^{>P2D}?OP$KbgjwkWy|@y#o81V#%|lh$XxI<_KJwkf1m#Ly(>XJ zjnGtz+2z6^cBE|+qCXE$+iA}!Oveo$C&eoJhoE!9NhLD(dgE0mNrU4!;9%_U6^R1& z%*nPag}vu{cDv3>1!*hFxzXuqsm&3Eu_!8Tuj1h)(^iWRmi|cWi&OBYqIsJ-ra#>= zZKXulZ~uXp#VIY>(5$msOcW|RymcG$Di#-nkGB@I+Bk)2Vh&PgW9)GuMvSvrsg()9 zQzT}BGW-&&J3-ktA=%WHY+7rFPf#R^^8dIx55TOebN}1ntk@oE$H96Edz7Y~Y(B3Yu){g(@ z`+H9EdjE=ezR%guIluFs@eI-|w7&)lb0AauOZ@VPLZO-Zy}0Ik0P5CN>UR`&cE_YQ zc9Sq&W&^hlYo$BwP7!DUWu?}w!cd_-B8qx4t?cyP^;RfI(TL5Bw3#6;RWUhLG2W`u znK9Y2fSdi7Me$W(a7;Q0$3^gAixpci@y&6J|Y%^p+XZPHTyv6Vcj4|IlTtik3u@S)VHe@3poe4p2pBa8guX) zZ)=4qN2xpYvkiipfmrT(OJQ1oMBIKZ_^a$a5okF=FbnKI?*Mqgxh2?oYa{FhW%3An zcZk-r)<_Pl_X(4ipSuJ06M^ud=z(~y?xwqnNOdUpuZ}+8ViA07ipC_Q2mF~fP_Dc5 zn>3d5hP;_XQbxM?$9R3MeM|9r#5({<>bjBN=;)j|(|)8#pWQt3>iaLYuAGa;pvkUu+8nZXz*IVwVQM<|S!g zYKf6t?=S(n*S2E^`*{eDKyT(33VTnut}V*7#UWQtlDD^20y0z~H^N*mK9{Q96Ft@F z!qI*i$|*)h`h}1UX}E3nj(~R`MdwWWFhufFnPvYJP>+`KojhyTw*%6CPVm$1b^&I? z|2gLO64Cn{8JwuFh(ZyO3)Hz*q6DxIi5~7!*QAp(?0U1^EQ$}H^e)jQwrY`lyoT^? z?hh$+YZhIE*Vy|aQ799vOBee_dl2lyytaK`VRfK_xw+UL5%kWYIaF>7L(&dy@Y^1e zp5D9gbo~tbb%1iD#j)}I0I8xguNZ$6q()>bv3T)N2d{<5LEMB)6NF9Ci}`$kaHPN( z0tYIlzcq{Me3JKJ=JfmNGCJ|ieCwc%z3c4K`w4s1*geNu z$>kyt$=S@hE9@=p8;^W>JS>V&0xmjiYJic-&J>#`=!NCt9X4k3Lp+lNNqbwE&YeT< zcS3S{gr_ZbiKjw5gj#-wKlkI->Qgmar8;z_C{#b91m)i%>l7eUaujzpvjk!Esj_+B zKUFr@7KZ4^B-XQ56X4N(n2Z^3^&vHxWkSM!B@EAzdY@>&76?7(vL6Zes~_;fNglb( zdPtb2Z~)PIoE;$1k}@A72p3~xM%oAIoJUt6`SZn}5JyE=U|DJF0$@{x*F6Dpom)1? z_9`6inObgdWVYs@=)QhSl7UrCrU#x6ITK8t28DgT4VL@qMqwYb_bSWnZ|P{gblLTt zT9adGzt}3%m|Kr|_O`GODjObt!hKHKdb` zQkY&B>TYIv0NJuBvN?cwG0$pOb|D#!J-cb)=>u!*n}Sp>QJ+ZV=i4F#wqS#g?O8pK z0{g9l=oFtJo5KSHeC6j?c`D*Nn{AkA6f7}LWNIf1dc*r+q=yxyS`_vVEOs)DGKce6 z845TMDYiiG?H6?g@QDxTY?mtNJnf2(=}m&pBYBw^ZI7f0eb9?5v=Jh(gfk%AKNN)Q zGR3r`*5(Pqvvzm2w}l~`$*+2K3%z3-JFKvGJTm>o!d9m};IBpZ(ZMJw`|~+HmJ8mP%Pii*1*HS5Z{T zt^YA$O6n8OPmWI7wLKl4-F~2H9a295GS1`iuQ*ZATRbjSaHuNJ-Vg=e;^@J?)Qd2K zFrp-n3!-lbQ832GOW8D;pj*g^ULhtNo5n6gGJMtcO~p|NJ3wn6vNe{iXe_I?4uZ7( z&3SLmv91Dv-<8X>#f~V-$q(DgkQ667W%jCocau{_wf#+)*06xcui&r;TT5cAi&auv zFN!!iW8H;kXWG^PWkgo9SFoix7MWkB^Gm0m3V$UEKccc9!#V6Aw}t;LlIC5K0B_9TZoO?-fbOWoei|T7d9l7`YXr6N*4rs(FhsncvCKY!CTlXwZYGdr6NK62jJgmfS>IZV3aWS8;AE)u?LTps_6{flL$HkCs zaG*4nO+t5}v8>j{h~i&<0T+6ORw@F`EWthd^pKqDE=;U_Z|6jNNtD|zy#WfUJCQ=r z_Q--ARwS`zw>47fS;x(arP6t|0b>CFDGHrNOL&r9)!zZPlsR^dfOn)9x7c=18cU$s zdJ8k0(bi$F4G)mpGwFn_7IZ#E*NF{Pwo@dyIH;E$Q_#!iellTg27u|`*|jsQhr<3_ z#47>IYNW34WJr;P%uR*D6f|U@?NLzN+^`{%)Y@%FMe$kInb(@H3wZwrF2kZNCgjJE3@f<)GH2Lv)B4Aq>m1ATr@a-IEeD1k}!@WT(=O+$&J@aFZ~th2&=n|$uG{Q8C8zI+kGhr#AmF91)9M0u!A z71~?@T6Csnb@Q`xZHpiu&BzjYk+Hk9ul2a(nXu5Ftrbt5BtRe@Qw?rJXx=JOPJQAHgDELcvDC_GL9O=IROf3 z*S?MYTVVzvhy8gnp*!)1AkgXU<-?wfQuMmYopC!+d^Qy2BKH2H zG#)vIU$~zM(|yE>k~U5tEGSs>;$c?=7#Q87@Y1lyMYim8I$@7QA!omf9w8y^!0nAx zPcIJe%$x&yJ-#f!#u*jT?;0fN!-%^h&Rw=#6k5UE$`V^8Ku76BYIlv6b;Q#5wG=UR zd*ihI_DXG5lOaC>Lv!ry0OE&M;V9`*Z&+&cfz;+&>n=z|G9_jY27rb*w*LgY z?>WhQruirlyp_?1RczxzdW1-e?4STWrj^6+v~et*UA}lr>}x;u_(-yKhy64_lK0@k z>v=&9ukM3!lyty;{Ik~T#*E9wiLP}Kg}Kiy2K%92LM#F&cG)BanKw<#x7i$p=}@wW zHXRBPsjYQ(%VR`%^P_h)WG+Ra#q5bLRM%KxfV8M9=b5rJe1FbX`-dQPq;^lYe=4kx zFrhut)jtP0qq5n&B&?4JBt28V($^b%5Yd`;M!0<;dBX)MBAqP#7F ze*n29*gk6#rYi1eH=!r~uK=6o%sx{y%Wi+7HFZ`zUSQpXDUS1!Y??2y$3-AJQs)J% zm@5MG`vTi2NW&wmr7sJFPcP0VeNI?>+BMNWP?$N+wH*>b924p=PkT!YqVEs}_q2P3 zJbRuF$M*|6-_eYN?>$7IygRU|*IQwl#b7qBTxE|4(hin@ot6mcnmB;XQrHW{hZNS- zg`JydgyUJ){v0T$%4eP`AFmgrzlq{y%e!l@44}7TFhB6T)`>tnisNJLsKQ?UXjvfa znCWq&3v;Y6gqy0D+nNw==5=Me!jzw#mqc~fb_PgSE-Yo&BIv6@r@Z)B?pRotznkS* zQWXbN73=K#f}X}Na&$-6Go+!MVwSRj0Y&$nJvqO8G^Kni_jt~4{;cQdw&5o&IP@hefrzQ zIUpa0ws+rqmz4_BKIZ&V)PKtbqc1uiYxeV6A{TH|vcl#Ecybr>vdT6JdQq4%Ew%<> zIM*rL4k<``xZ=mnXaYbd-=DX_zM`)~EZu#KQkO@6&NS0zR8-3BfIA}9B+ zhAd2rPV6_{6Qm35ZMex6pbO{LPPQ9zTg%Et5+MgKLl8nUrF2y*l?{75P30S|;K5(Y zW?7=r|H$uCp5+9nD@U5SSddoXA}IR#;x(aCBnAX2Y7P=rA%V0pFaww1dE>261V$OF zSSrp1m|cQZ>X>~ZNG~4XANz-Z4;VKGQ-_snqtl z_PC%%6l>(RF95DO4q8J12n)vS>XEIvdU1EuQ_#m(X7SkkJ|c>@;jPIWuO3aOKo(6V zWgZtrj8BZnvXuff>*xl3QCuGXArKg)$Q!Yp62S}PH4gs@`CvNr(>ou-y%Ayhkg8lG ze>}4UJ!vGbCdX+rc^}TXW;iG0z?lrts#Py+{XH zpLF!_7~a3;35#JHVA#9q$lN5mz(EB>GO9TU#^u;hA?H#`_wX~UOcXki&)H^T2#>^% z#xjM&>p?wJZ6Exv63u&6n$68seH!1^zTNHiSK;WX4!IRBX#A-;dJn#K6j8J>D{mZD zr2>c)qwW`=NvS$q&@JSJ%pMIC7>_Ax$@2{tg-&o)!AzSby zw=ER(l{k_D&DBuXgfxh#V)?=TnobkhlE5&PS86v4!kL12mfffD^_N~1 zZG7Py8(+O*=&E1g3d>Leue4=e$MJsz(La3_4BiNf?P-zd$#9gU6ILe#S6Fw^H4*U7 zfFr+s5+K&0FwbE>ENYD{Dk!v^H0*=oOhS;!%cYWrYL|@_!3Ua~u%*0El!_88AnV;1 zpaN))r&EuKXCMw>(?wy09Tv08iV4y#UA3Ztba{Y$0vX6k-Y7`Ihq~J%3epI+ z-#g^_&=!l}12u>rOgN#C{wDb1AUX2_VN;`EZ?FS za?;2lk?qCXMeuPMg0M^nK^odO!4+ju5|fub(+T?Y>0(pTuqSeY%LO%e^Cf2Z8zEXf zB&WpQP*^j6-7>$@v`fZ#TDIH8rS>NwDo$-)mD*fwKN<^!(|7*Bx+@uSB`s!40+3f@J1D^XjP5D=59O|{pBy&-6A zX~nL6iUiDt=4msUSJa=GHuZGvjI;aqTX)g5PdpfXx_VS%$7oN8Oh2<4 z_s+7B0?f|Q4f}Z!elnnfJcMQJX(1+D@2)meL3+oXQ!P7=<$}y66dfbsrSZ~uToKYl z#)P)w`H3_veq^X-pDLyDcSQ1@qeD_`r-f;Wey?#26@V1!FM1k4eQaGPqg8QhfNXe{ zuv{$>Bp)(92kn<3?)VPO=9b$#f^buIkWcPr@A!v6;P|wZrLe+23>lb^v4UK?`4?nh zXi}9+tdqjNzbZ(|oprP#3>4MpLf2lMEH3E1$k1u-m@v#hx>Ft1W8NHlQ$(84DT@oq zOnyNwghKd{p2y3?XUd58pV#Qgd>>auKNpR9d|z+N1OvD9)A3hfeQTRg3z?OwsIyDU zq4?cWB#9l_tcC}di%yqHh7Y%n3j1%6j0m&wv92P82~{Dhv^GQ(7)tYZTS)+shlCmd z+JvCP4qoCv6!f{;HSz=TQHaML)l?(|ZW`~&o3?E@jrLtfK^oEnw+O!t;gN@siBgzG zy*ioSiSi@cxQox4;7LnRaC=f=XZUa~JYN^2b?gL+?0h;-rsF>j*+#GrxOXDyym2fL zT%Y_{1Tu2sSZkRAw54g{L7S~GGrQZPR;M6!$rWCCr9B_A99nvKpS>X9BOvp=c0`yW z3ej6StFR9s-y|-(*;RZXnll!k-3c@gE}7Jts^x~*+K5Jti2I-dVGjvXXkJ6LJ)yAo zU~nRDbj$`!_OwW|kEep^f?D?@e+`jkm-#$9*xRD3Z__r~-xT(pNU9FGqcz%6T{*=w zGUr(fq~jf#9=Cp?dxa>+GfK~h(7Nrbcvc4I(g#ZW2{0!iqWHE8js_iX7?=l(@{KP?}8yFl$zsqQUnICE&iC#h46*y)%I_NnZ4LrirZ(V1H_stloz?e7ECW6 zTHkGKNJ1NREju?r>>gnG&6W$&1Kxo+Ku3r=Wy-loMkq2x^CF7%)G#ZJb+2U0(pW#3_IKfMj&;MjdfLBqrdNYZVa&<| zXkqhn%WRXvbY7xb=)Y_YQTc3H-d7lI@?I^w1^ey8kQOc!BTe|#Ss`BLV6`ycE`qOz zqhf`JSD{8YMkMdBTf(w3QN;QLBAN39e6+L2@mw$H^9N)4NM(I?HUy^W(5`lwf;75u z>H_<^!p@VvvO(51Bzv&LW~HO}>M#Hja9$GQWw7r#VQOry+JAQc2HPv>Gm^36XsdgM z*Fu^zdncaRJH<{2daFhtWSei7%<(eto`M}*yH1dnox+~VXoZ;yd9iUkmK_Q)sme)q zL}A)2WxTQdhuhV29m7nHQp0V&u-AfPFs9{l5j0AR=U9cpFbzML3H!Bx=Ic%txykmr zpm%{o9}a}n&3}mKrJ=CPPUq@*5GXAV4=3#u_Q*^uXvFLu5qubA+`n^(=NvQs*bHo@ z*QDd+IQ@A+T2msm&|eDp2;lcejs#Gg#M*xZeB5x(7Pr?GsWGizYfYh? zmecJN`|&fxlDZ%fA9-kgp_K_!Z6p-d0wRaGHHz^z6lLZlaC-N=DD)`O#9d&A1ii;u z?8dp&4DoQkW80hU?R5OG$l-XC2(;`%?HW5D(w=C6FWcSwTTVbRG!SyEKq$;U-9yLj-!GqdA`}%c3n6$=P=o60yq#X+WlaTWbV-?-$u% z-Yd^K?CyUrI^}As%g^7l_MQlIE3z`erA)Pb6q5AIezCG~FS_^t5P^2(BKq)eg}pCF z>rtt{esL@2Ikv>Q3VWN{wY$>>reQ%z!hR~K$uZbU74+W;-YpjD$Ji!|<~(N)rC+6V zqAoFc?_nNPi$)DurHMmm#C&E6&>J*Bt{AQmrm@&GvkoELw0y0NQrKq$H<+AY5QY%x zQ`e;!!#&Dna>$h0!>)5FDZ(V+qB6^JM9|PxY>q57NEkI$vDNZK_ihixy6`+fCRRo) zF*a#?LS*!=Ki}#EeIoT5i6UB&T@Zz~A?3#rfdy-(eRElBqR6_$RLd2`s~CYdpc(y z-aS4Esks?T*aIull~a%2maDCoh_vx^Z5d|Z&Xv!%Ux?zBCmJ@|mNY!Gb*t@2!_@js zcG6R-Ga&O^p|%PaWEzzo>BF3g$_+ zACy1CQzF9ShX&gH`NmV_vm)D66YW+JnLxn+U((u$K#55VIP_82KLNdLy*dqh z$&ZUhfp`4ao={MOrgzgq0osW+n~Y3O5TqFVg4u!)MVp5s&$wLB+v!)0GVb?MNW5;+44xE zv(DOxLR&{3T7Gz`{anZkcr<|!YH1p?jbM`~4AUYB*A@153&epizBMmdODs%?WmEU# z0(7W(;d36QT1jl_i3pFw zU&7x=#__sPZcm1sxQIXoM1YnSm1Ig!`0s+$!DM-Ls(o>tM`EF-4Er?W1vM>lc}!X% z07n|J5b&JB-v6w4CZFufAtkQo$Jv_#RG~{8My*LuV-%CS?yJ^&G16YTNnszaJ|*0Y zJ}gW|6gW=r-EGeWNZY#OitFMH#5xDjsYqtVti32|5(eS=h46*RYwYJCe7c@9vci-p z>*GiEShisJYLV2~y;stA^F`DuzM(d{A6@TkwJSDyUJfNY@lSTOAft*~xmibQU!BCh zuuDk6X+jh9$pC3_Cv>AWqa+75Owyz{BzG#**k);wm6Ou%U3JBo=9|Yp4}x# z9L}wh_J{yww7csr%T*XomT=LRYg2`sA4gUrq*!h>pQ0ef0HrQXd7GyU*4lbe{6{X9 zk3DXDz_1beeMl$mSGQN*l+MZLvi6Nd_K_%>fO=GUa8qmLMLJ>>+H_HfW=*fdPZ+=_ z2p=B%fM-Qu7@8(cJ+okyZ3&6l`9R7BK zeObXUlIv>uptuybheY%x6yfYdCSnLU6oW?9=%j3X3x(}kHYg)9sbUl-r-Kq0t--{@Q$Dxi5* zc`%Z`K;)G`g*`1gV~lM$s}AAj8O!ZV2&cAI+69Hf;>hE!b=nH1SJZl_vdDfW=pAPb zDr4cPvbmyp<1v6%=(m#%Az$N+ZL&H0vY<~Q-A3@1{)#A6G(aX%xCj43Iz_4)FX{)J zLe(h0%D&6@wvmC>#}TbeC>$)3{~y1ZHd--c;v^>%*|mc3h3kc+{WZiPiKd;`Odv;}0ZLM8h>5vWc z1|DI)FX-GV${Cd?DCVWCjcD{YdeH45;EnH7f;G5-!rBw|v26x zW^O~Ss`9crbm-8=?iHXZXtYk{mEk@?pGn{<9D8Mm0uOn*v7+0O-Oa*ys-5Ms_&??H9kCJB1)xkZrc zwskgJH0nSeE@7AM1TZer9gi-eE*HVujD)phQ|}B(sG74r_XtNspa3pOitS`NHkOyk zq4RAKyvoQ~q0R(fu4ye)hM!N=*dS5pThr_rHcVm4ds}XW9uZm*X zjgssln;f#CHzEb#nIYR-2e>(WK>!AqA_$moABL1Co{NUX_7KT*h@4Vrc&`+J8Z-21 zd@Fz~q|U6TfLO*3VMCpjhdAtaAhzBTCw1dhwph?vM04c+^{^=ZV~B+;Bv@W=$)UIU zb@r|x)#AQA-_D0Pn9#W-%RXOAob#wiw~OBrRzvV?UNhcq7bGjIT++T5;$)!l%_)`; zWU9!;_^1%Yp;c1%DM5V*7-7~P)&3|#SY{)mHxg*5gGN<)zx;j3yqMNQ@Shi+>pa=UlXcwZ->MjL!?rl)cJRJuw~IvolZcQc6{hj|3BV!&CIxAG z^s-k&R3a;uZQt1I>9Bg8ur30w`2Ok?-ktXFfLJHfcD&8-tZM}!aY+{ULu&=RI}Bso zo)?CDav#QEyeCLw8duiZ|0wL;%D~<{4$by?B(Acpe+Z{)@VTWh2_ubkS=VR^1X|wi z?t3x4mbaf2UMpLZTq_cWzax0oP>=@5BH{7pY^NX%xUg%5?Fvy8(~oV*^MWu3pN0AM zx`3C63DjKsj{~h3pFWH2abenoahI(pYEK-nH6r+Y*MhQpue~M;RiUkHrxm7b9l7iS z0h)Ab!lqLbcG`#OoEVFd5Ax3)^wfC`YcPHjY_Q+m3hIVTI{(p8WB>64GJsRxcgr1Ou}+tU(6C+P(nAC*$E| z)>p`@>0E*+*9c+y<(_t<3!_R~l+H4En5`9}>Id1Go=}(`)1}@>PV{d8hJ&`gbD3+_ zP3SPu8AHuuPDQ({J`iGuS7EP+Q(BipUTD*E<<>uh8#Wi(q!7LXpYHP%rZ1_A?Ws*u zWt!uP!%m@k4AmeqZC@9K2C?4mM)Ccw05LH%g)Kt_og0Hn(5#4kn!?TdB+L~3SXtoa z=mft4Jq4KF-Md;M6xBSv?ChHPHeC=l7GvDz8{N})M{;y z(uASVsb`hFE{ZQ|K_<7@hP~LD3IW44yh%bNtLs?3AnvnQMWuOYHDIHq$c_uc2wjb!0TW^=_xLh) zCZX@iF{Nm^qbGU^yWryVvPRtWHU%9Gzr^__J^jiIWYYNzv{e z$CXH|xBU%qaIX}Xih~pmGiy(|?u#B4g*x7-w!)CG>EJFaQ8=(!(|baQJG*6(O;K1I zo(-E@WVRMY*AlyJTS$gKW!tVW3xs~fqvq&gWv7UtT&<2Lh5g5|b38FZk7?Kc77{Fj zN%%{COb8b7EQ)Hm{aldFj6<@oRAD-Es{GkV=3$3ZG{hCi-7U0fO5lZ$j&q7wz%u7Ejn$40P+ z`RG&el7#+e8W*v3cT7#v3bUdk6y~3{uh`6sr$p1-K5zG$g!5$%c9e7B&AS|u~dKvOX?Qxi?4cYWTK|l z?iKWv0ihKRtoMoFo2M4}P=2sP^d_;QO|^%G8K%*k9byX88GM@**gOHBfU(?S>1Nf6 zf@~M993U%nGHuOiqFH3Bktl5C95=}2L{BS6#=1Dr0pnw(r9%|aP>w#sQLiKV^m!** zVZRrJ(L`#U;ke?M7tPb&elE-8{SR{?BGSr|B97>z1-zB0R#v^W$qtL+Q%Rfo1RhJ? z6$#G7#}*)zN3KD~AvhCsK(ptBVM61qX?9RyKC_!|x8n+i4XVzrW!x`+^EFSJh3xX6 z5bi&8kZo0%hV!b7INi0cgS?+PJY!rZNF-tv2;V&>L~VKT9Lo<0PuH!Iebw` z(6SZs5yb;o7T05GLNa3nzt!_XcI};!W;iA2BOYl6W65lry(eNA@ySfezlS`GK_`q; z^*+&BNq77;7vlB}0?jLJ*lrUPRzJ#?^2cTihDB*TK4dr84pHECbd8UUCm;S7ei!m! z?7yJEz9>lF@jDLZhYERPu!e%RSsL@4I@%_M%#oDyWSc5Thf=jOB2z}P{b##E8amL6 z1J|DfX)n)j_P+`Z=wBn>**>4agahCiw#nO?P@-;LyM6#;f zY1;*0g9HQdRUbjd1(9QXg<-{IzvBH!!Tt!#ExNOoNdr5m80Q!QwlN-RKS@)h;DOW)eO0P{sa2#C#!4{7XxF~! z#n6OuyHx=GG_6@+4=7AWX!TA`=?@9Q-WE3?kyPBHA^q8H8*GNcH0^i0?UJ`V>zM@= zP4mm`iU9eI-iXDDu7cjyL$mUdqpd&$XFkHLsN;t83|dY@ja^8?EF5NjAV?ce*X^{w zE9^hlh}2;}vp^bE-*_7;oXI;>HOhV@KsUI;mnFuR1)a$wlbDN2Iz{tf62D}>Q#6Hl zXxG**{ykxD)<|t9+6&X}U=2FodIkdR9Ly6++#VD`v!sN%wnPAabnj_v71XDJ#=uKU z@!tldlJl$U?5pn(radyvgd{Xa|F)v&BgjmkqyDoHkLXu(*|Vq0C)?wqP*|#Rp)42_ z2zo0cv+`roxMW;*iTxsthq2z+RAJf}XX@I)bR-9+d3M`>dts8Ek(sb@FOfh{HAj{X4uyyU6>6d@|(Y z!=}*kMNs!5J#k^i%hO2-L+EpKcy+$zL%k zRZ(eg2dL8&SE3&TNV|&oz525tEs=Ux!_IvBSXisz_GOVZ^<{Dsa`U?oX+k8)&$fA; z-69GNMu3nrhd>xDe$8_Q!%d!E+S8WV?P*0Z8ds8n-mj%3wNU# zRmWw`hoVsjhCrqxaMk7%FwELG_cR@aoh4ng$+ku51V@*!1+Pvg$cdNmAs-V#ecHS+ zQe3XI_eEheAL(iTR?vI&D0|8gr-5EumlzJ+bA^3K_??&V@)S|Le(cDx2CWr=p5D>^ z4%?xymlor#f4?BS%SNr+E^lg$>x{7VU4oP|Ty|{ZXutLnfy8WH7h1l6*E<>qmc{ut zD`c0iVV?Jvh15kc?i}uJCbfn(h}4w{Q?_pCnGCg7B?28DkYoEouw<0|PQkG9SI@@z zG7i$@g86f2pnK)R5}6X;j~^puZS?gj4>WL_rlnSiuXKzvm%SQZK1R9Xj7sN&cpNiy2A zg1)2Vp_*Tum>m^KY7VHvA_ApzQ!O9Fv6iCbp{+8tYqI@Slop;H-eGsX=SAybkfNV# zfqN*Y%xVM~8@*wTvp)#=;HD~frK%=dlPGX#a?Cr{Gcc%Yncmk`YI& z(7q)CsnMFTYZazT87S29Y5q(w6un`E+`kML#T)ui3=47Ngcjs&%aL&XK$P*icI!M zmx|;=h)#vHt0voZqES!u+&JOa2WT?|`h}|XvT^N$Yll89zsJ3~<=H_SD+c80^y{p4LcGVvV zd&#}^GTTp>d_1JjvjGAvx6FD+VoBZ2e|TP-#6C_5@8+ zC+p9_EjCodFk+D-BdZX_s~>_6kG&{N$Jm6=;@>h$i6D;Etdk+PNB`vTydBCRcOdVd_nwZ==vX7_&UD~dN$ewgeRpGpz&-md?v=ZVhak)%sIQRujSjB}D4 z>pP?gy^|vmROu`NgLP;XvWW^aSdC?S8_O0&MvCgJkBH1)OuZKraGBAag{q%uG&MHj z(4Pq5E5-B2I8nUfPTbAr+Y%Ar-TQN<*`ZKI(_!!Yhfg~`kh z^%A>TfPxvUJyZphf`6mhIO0x zrT>NG9HZY^RwC>Zb8Nyk2!#^(3KF(Y1fSo%^0V+G6e*>@EUJGE(Z%&X-idZ0zjs1( zc!plGeWs6v6+JOmkseEyHkLa?y?@cSC$b{-hYebE9lJV!}(;a zAT>1a#QlfDKG=N`T*!%!v7I4$3_IBu(mC@EZLt%A-f{d&XWID?EfH|mp#S)oXj(Ly zmoaN6;FWe|*?vHf21ZNZG)dk(BZ?Q6EJoJ$rH~<2F$Kfjc0>@Sj$u^d_EA7NY2auxmmAEtbO8 z;Mb3oniS}E=i28!0Xdh3CyKdAFS3gRDbmg2MUngPn?>>;GHP`R#cra(l59OmcNg$> z2D=kIL~$mi);()|)37(c#My!jcE9d6T0zQ*B;v*kcysu`M@z76wn+4>2)U{~3iGw{ zQB`qIR>A-G*l`h^+ZfBZ)7}!MKrEmZ+Q$Ok@?1=su<hLPnJE}b#6rA1E$rMt zl*nEXq=`6t%Q2LhPrRA0_ig%IiAgbCSL-XQEXV=WM^McMNH|fa2 z{YTMg-)PP;_Qi|3MHR4bkk#001OpE-8HP3ATS9{9;tm!DNP`gezq@IxUls*jw}4NGI* z=Mt7B3}g8*#rA>3OP&fTwic+ZYIK!asII1Q>9rDw8+jw}hNs`TTmE3#qAX9<^<@+b$-awjW-@wCO6KJxASy z$T9IOshJKCkrEXX{UYWKS=ES9u(ut_^Pl<)M0gd^l@yr zwjz2TdX(^zVqHa`ilQ8D8?$Ukx)4M}^6Z$f4?$5u!=}j3{qIDh-58V3w`Kt^5I5Hx zQQu1w29_k4s2_>oY-rcMoz43~Yl*tzj3iNhEt*!lp7Aykk9gY*l$cE7{QGM@|s-DtXf;}!e z{pf;dBZ^@{8tm0aCm-c)JiyfE6{#)dc3jY_%Fv6Gy(WTB=r^U+Uu9=SVMVy%0s9~w ziDJ8M<-U8Vm)TIJFbjBh*J;`PY)4=G$ zalWk%Q0pFAyDjjIxxd?kF!VnZ3y<06ZIih;@?UhB=f{7F-p0QrNbiRyix9BBR)_{Q zZQp4RgoKfF;BHF@(mtj=+whR<@cI~fLtBKL2}c*}&cpsCiZr5Y7fYZ|9+hxMi>Gho z`xe+mUn17Kl*4m^T`f$l7nbbuFg?#1Zr6$I#Uc<^#3FfPIwK|)WB9cs1-v+MfMv7V z1_qd_*}^X}ZYn)Cllh0-*$zQ^nv|5HBOgh8+^i|K=9bLpNMYao-YG zMq(E@^ybL4RNNj@JQbXk86Jh<$=fwnrXcB}GczJnRxW}wiOWN|0-F$W#*0d=R$=Ef zp6v_llCOgOn{%nZxP4nB*uq|sOWoUqC|?cc-x=0cgirzpO=~CY^<`n^3Jb4^bc!*u zc{54a>(DI|s$%x6NPNpK9SlQcv15yfa3EE=LdK3%*uFaX`$UF z;Ok#>6;+4Xf*~P630_m}@pJ;b!PoM$QER!PiB*xIT`DXWK|RG-#JfN&vNM zdxyO%sL#G~lMS@igq?fYLqbgW!9(pa1*sK>gxMSrRtP%#BjuqPb|6iW8~_*8heYug zoJbOfJ82YO*tTd=)4#o*fb3y*vA3#NV*@nw$9%qR;+AQ ztHo^T!y-~w!xj`4G6ZPE`Hkf^PGN667asPMV7O(I7(TMHk?0r?YTU-pu{Ku`?~^z_ z7VpC-&)GoHe3(l4HMXx^-HHck3wX7#b2)lpjN4pMXuxPx6Mm^MEyLS-roALUYu;GI zZfdE$9OCe*+15IJ%QK~FcUY0aUSu&_zuY1_DGJ4)kb#Yj0Ioc@#b~P=PZyR z?0KW3_C8!aLZMbU34C?7oeYpeXEnc0X9a0&PSIHV`n81pTa<&Bh5O#Ct+XGC$Y7nU zlC}+t$xsnmn8fBpc2;JJMjhIqaWm8q;K_QneO2~yD1s?e8kr_IDS|h;UB`Rc+h_h0 zO!v6n&bEsMTE2^MNxNRydsK*K8d{b2i{i8C?P}{5;#;0LDr`o&gd?@gnFGQUkMB{W z{{?(XWMMM*cz{|4*{r`S=#@pLzzgm2>pb)DaU7}d7x1?AU_rC-!X%IEm@mYjxy=m8 zv$>eHg90#@Q>0yXy(fhUI01%jcBLqxF#Q;OQ&`O`=8O4>0L^T!-g{>0cpH&UJG`M5 z>F%@_c$6ph)zB$%Yhpr;P26){{5&($xuzAD=*Dm^*5fv1Ak1Xv%EqwJm= z2;O|@byOSqJ;Gcz0UkhSMSm0CE+fNQQZ6YMro=p{SVO&sm!TgO0PWs080?14alchN0?{X_(x$nDy7uz`YNnmh`PO2<8v zleEJDMo;ex?BxI%FKG&V^(J!C{%mAb9#Ggnt)u1EMaX+oq)WaX!mv#@Wg|cB=Brv(v`iSD=&E=unplJ||is|JhG^c(g=wC%qG81P)3R-|z^7l;`0Tdvoo=!%p%f{Nu>>3z^wIJgHh%Np7mYSF&6{qQ z-TtX~*vUD-z9d>;@j?Akb3zcSciNW~LpOC#&!c%4L7K#&EM|{{xYW8WTx44!#O27h z^{5cX!d1m>a7loCP({ctj}_$eerS-*4>3hZGVrGQq983jyb3AL?|R*NY_*Xyx>PXC zfgk70lA!;;_g z$cAmjhnCxTAs>y{Xr4Zf2)FPR-qi6mM>LANxqbVa+uK#&Ck4GaRBvq*^zUJCiG;vL zir}RkUc`q}Z5u-w4-T*@1>tpm!=^kt96%1@9Ck#&OJ_CZdgz=8 zux3oMFfVIN0>#B{C}p+7IfCiBLzw}6(NDU^|C(+ z!s$bm{p=Hkshy>Jp>8eh^KCpbNxffHn0hexA<69<1?dBC)m?2yOB}LLcu{*kK;{H* zn)dh)Na}6SJw6XUnQftXH;XWIm1PMLXhkm9cQX~HWG+M}+lm0JQb z%R!bHapof;&`1QVk?FH{L%dEDyczNaD*^+_A?PdZ2vTK1;TXGIVfC$YheVg379>?a zq|y}hwxblX7rDrpqWEkXEZgm}V)M5&jYTXvecRXCla6jMp$_7&(TkCS&-iuPP5ej{ zA4YDBa_tFWnt;Qk1ZzuC$dKBwGJ5l0<`fdZ)OPLfwy_~S@UFYsI;1u%DdyXvkeN6{ zJ9NvkUlcl<+Es6#?LZjjYq8_NJ=2~D1d<~=2U{b8*s*<=YBKtvSrf8}EeT zjn6{>)NV^-q&nl)K8=xZYiAD#`)`I0=!rP@_af1vVk9M}D9p!3jAXBcXdj%^>8a8_ z5`n(y;Ao!+grYKY#>5IsEpZnlnkz3@)l+SSpr=6AGM;;Qja7@}J&2EG*O+gA5ygAJ zYeNDjk6?zp&x7_&1*t9@*4gy}K2y0Chc}Oj5EhFXnkW&M$3^pfQ0s=w@CI!I#IHQ>!;ElRh;)@ zR7s3W9AU3#BwsC??U*RkgFZ-XWU*xn(ji%AWiQ2^SOjlp@fcf@&d;WTD?QsPinFtS zNpY5>shdQCSv}FVzlR{hSb{00hZOd<4919t?GkoYp^~_RL(C5A)Uh>9sSHiME==p- z62}tzgCNBr`DEu6reX2yY_=Vj-`kq8k7RkS5~ic>a)av%yEUZ1wqZ#;k!L-Hy<aty7rN@ntzCW-kkcUmiKj-H0!Fbb{@I=o+qJ_Mw74L}+)6 zv@hHbrtawJ81XT-Uf&Q2#zwYesvFDpSZ1IwMx@}$*K$OmNjP$n5&Ri~G#Am_O*UJ= ztLT)6Oyky&R_AEGm0yNvf*1;4_w+8lFqq(}J{B>$Yx{_^-}piAdAfA!ON# z0FpU~tDZLaSz98`hut}JVs@0sXUwSnPW|&)6DjF{m}lUu+P)xs#Rxa%#pO# z7dk;?8oj;Cu2Rs)qqI;Kntmbz4P}btCNYmM$h<7Z&{&4WgjyyY9UmVO(|qiw+7!XC zqDFIsM$S!V;Vn1YmZz}#ceS&H>DVsF+hCw^pC~l=?8K_(9m}n6SBDHsd#*pn32InM&_J;IbYu=H zUeiKb`KtNB5@OpwAF z%ct5c3NtZJOt*U!^bz8yqg7sO{a$-0BS5lC+6%(8NK3608@#h3U!Y zaqM^o2~aEsA@RvER>%hf5z#6hjn|4o(M?n8B?9_OLH`De8+I3>-jQ!FiUwOp<=EQ_ zQcW>-Tfg4R!|&~>vu`U*DZ#C7H$f`JTnl$^j|%z39ytciWD7e7Gmn4SLY}yE&IPioHVgG zD_z%*r2LMw2<~sti}Xz*lKCjN6K!fp zK>En#$O2m#67a2a^s?Ux()esSUi(9cOUa_hDmx>no=Uo1;a1O@v(4V`gKbXcL z5SJzCFhRb7NBi5Y3Npln4K=n{VX8niXp6li5avr{J6T%wcF_Q+KF##@cpL?`K#7)u z4X4JBw_8P_vPev0h8+_OQ!JOCn?DB}ADPINm}!@R#8WGF!{XLMK%=lW5-EL9P;8T} zmBsYd`UVPo!hG6h?D%DiLJyj&R@;B2V|7o&PwtvPX_`Ch%)~XP$`{*OQJ5r1#j$+X z2E^O-Zk&w?d0RQatg+vI3a>;A;G#jqi_x{wPP-D9nh|Bgu}AB6v@*#Vz-`Pl&=0jKFT-ScS=(DU&MG1$@U->np-Rqbr#Cp%fAFP zg*NiSFy2ar0V$ZS6y+fjG*OsnULzLat73~Fy^9y-#PXzXQ7;0;vp=Yi5-B&7dqw*6 z232}0zZFqaGZ+!Xw~{FSYlHRnYz|A9b`7b-Jj}T>ZhsWTJKCube9=Q-+CEImkaq~s zxD=PTnBu)h&^djy9#!={HaR4qyljM%HwnTe8S%)sSB2;cKRE0TxB}_*a41eH78r&U zgo`O&?v^VYj2lEI|DzpCRkS~UNpUnOOKL|=0zQPD@{{@FMoQJ9-6IgGi3bZEfUE32 z5xk|1E6W;J&9Q_i-p@!YG*_6)Ga9zpGYV6|7;Ph87NE~u&*{eC(nlTA1=%L}hG1Yp z^|9$3wQd%Lx}!%qEq31%!E5S??;mRu62POPNaA)#noyt{HY*WM|J9uAY8;& z6L(ZYe(I6Yv)yt_2>Pf7NpJKkMRT5TNK5URkChCo3i%@Sglc<1&|8(8;A{VP8uP5# z!9Mph;+bc(vU`w9{<280sA=s4GlfIH=S7>nYen$_GaA+u+Av}GKmk}wW+v?vLBpmU z+OGpZH$UIr6QF}V(Dcc(&pigB_hcN7q|>#6)Oum+3LB`3#5Plz1Dt%p*$7bu|j;+!hd-CTH z;ZAhHiR)x+Th0?HaHNWlVXysG6n!nakgl}{9tYA`zI3*VKM|r^XKI$&g9>|(N5#h3 z!vUtY%t&osV|fAM2bDkeRDiEeWM?woW(iWx>H35AvchmU6ImHM8X``W@3rfmAfj~& zmfB;&G^K02h#$3bA$k}sc;#Fr%Yf@|MWifxILST{@CJ_M_MlS?=*4kiz1VsQt1pX} zTb9DgcaV#KPtpmI-a#8HgUBGUA@v-jm02Js@^dBaRaEKh3Z?L(J7aLv?iHr!rt%fG zJ{{Lrw}*R$o#TyDm&k8XLr8t@*`;W#eO${VmuGnUj^uNs(Nvxxw+VGi9-peFBZt7 zdymi8;m&%A?v=4_WLu`N`X9M8vfm3j_aYUijrN`>bm8zSeltHwN8@*AB%-GO7RAfU zWpiX3hlAme^y>IA+i6uI__EW79RhX+=vL2ch;*@E!+I-HI9#dfB6X)9LbX@1r6FCC zopFuAl*~Tdt-v1_!N)4I1XX6v3x5`g-e`g2o#$TxMw6<}Zp$IoJBRACOpbIBMM@j1 zb$`PiSn8ZREhcikNOn|Yr0>fkAhHr@nSajyLt94Mb`iWW1&KUt%nQSV=0%lk@pS6hbn%L2q_v zF-W+??J5CZbcQAKa*M_wJDYE}ha_*x;c+Jcel)uEwcZM9Igw_# zez~@b;2q7!$l@+R7|IO;AJZNo8j-+*`UeVA$ZZ|$GsOgbYG7A`!{8T0@CG7!7qhI8 zfLhqsa>G4V1h3^me)6UX!fzP>i?oNVUKC#>J7r;Yp5OO|kVki>TGrA<^h5sQ;*ypk zFcCis8-n%)QD|U~gqec0J3oxe)FsV6B$S9Eyl>0}Mg2_vDHLUGe1LWiDC8B5JVuGbRM+E1Y zu4Qau$Q2nAL-`tG1!eZGh)ft3*ep9G0N3bAKb@-M_JJsL`H{i)xiJLcaAMrJmzEaW zj|F{Ja6W6Xu=}{EG(tZ@(d&Dj2)>Lr)zzLoI9IO(6Gilv4Jbj`ZJsdP%tNfCQempW zXy`Qdsc#9kta-TZq}_E`%d=CW({6c*w~NODyu5f3c3#^G6FDZCgKnMxg*R>AZg~n* zcqBi($JT}@oGsWcg}py~XTzBvy2ok=8Q@57-Ynh}qa;6-c7@32qkG7*PiSBc}C;HIR@;f3DLrR~d%fi*kwWUzt0l)XCs#>-nEok2n zgQyDA!a;GI@Qmpte=0UABxjM_&JtNFNXxXTI9b70Qz3%SnIo$ad!1>27lk3`W`whG zBwF^IUH2qJhJ>5aefDDk&(pYae&ebM)}N#KWR=EMLjZ-@88~SY@M+DR7nT}@X*YfD&U*PTLDgG%AmzR)KL2!U zf&rWbuNCxfkA??w_azI0U27>(d^Nl{EiHl3u1ttyxKY;Bxp0 zY1qk5^^!7Z&e4JxgYgw%eGAw$=RWqH0P$aw%a8bwmRLPJ)Y(IVw0T4c2d)f(&_*;9 zxVn8(6fc`I#xC0slAqklFT&0M@((R8vOfq=D_0A$z4)OZvk6mN4B9u#LE_93IP+Zr zuLWtV6&Pz8CJKqMr^UB?*?5rmk*@%GL$>~35{(oHcJSjgSctmbJP^H*JuB#p?8JsT z-?oUr9QXm&ldBaDrwf-+}}M?LiTI-EmX2`8GPFVHS+w z?lNh+(gf}xY3J)AL^e7r+?{dsE4vk`Q~Q00B6DQ=38PKbk>oOR3X0N*$s})`{V71? zDKD(tY8Ou=7Umt=l3iq94xn-GjAGjnz~MC<#Tx`DMD{jWPLBsjiS_6%T@ZAp^5cs3 zB-_A0hwO~P60=F3oxS7MBh}VPkV=R2;$dfyfO89x#!OzGW1{fE4KA>a3c~DmciwHk zQkY@l9hF1#8$!-O78NE&w%s-v5+yZG*b(h#S6f?Am|>*_hj*|6wJySRLk0)fXblXI zM#;QPamXJ%hK|SKradhR)xW!N9^OqoT!Js?`3m!M61fFB&{#GRfsV#9HjJXv#F3bC zTW5QPeCia*{G2t4Km%LWJw*CNp@QaxHFjEI8pjxR;VkfBfaf=CmH6)$rx5Er9xhR4 zT-DpxL?icmdsf)@6sBs_PT6bTD-=F0tYg>%AvSjme$lN|h?-ES(*xSvkQm2ZJe3#Q zCSf0uRK-GUL)ngy;k}x&_jXRSlK~D&Wan^m@;5<-b665V&CgD4tr=OEOuHxG{1UvQ zz=KZ0zQnfpXRfeuBGUMV)pE%3tPth#yvfWHP+QqD@pb+tK$ujFZ?iuLdTGtm7oMp& z*t~eX-8>DVcU}VPcE2!1TzGEZg=d#ob~>6zft~i^r!eB7_O38oEo9gHgXx|%a^GCm z3Y#kD=_zNjVl|!{^bw{uOtF=sc-5U_{2~&En6hX;+`~J+<=-*91DRZ0k7sabd)t1{pLGLN2f$cUT z9eropn{9?*SQ($;T%*P3fb9{LD%gSTv3&s`GdIPKhwKu~<7(sgf-C`@AF{h=c^=GP zkG1;*;3p5ReDKc*k?QrGwlD+{j$fo8<2U3Gn_-2$m6>t|cUIUt(W!4&`yk}%-^X5= z?YT1W0d(aY&xLpZ`?@^4S(sd$9;!Gu+$u;f*ptq%2LxzC4=i*j=-)d($}n-Y2&_@j z$xcQ@_SxEyMgm{D_^~PxyoDoQT8hyWVOoEd&G)ZEVkSFch&%)zO(%X(_TA42(~?pw zQQKGN0^m(SQZg&|2~keFJMXfD!nA)#PQz*}P(;v&J~ljo*rXLH0p-Yihm95pBU{eU z%nG^rmR6%@zpMpH(SNgjcAnRBe$FJkqI$Rq+b}&87SCMJWJrkWaQ9udTw$+2RlP8^ zv)opM*b%InO(FJF`TXcjcZdC56gtN986vJ7p8pnV)(cSU`HeHr zkFT&h<^y3u|--2tS@QL zg2*a4B3_b0x8IA-3>f;T{X;?8Qe47g+P?*8w%pMa*=H7b)Em`Ef8HTLmLWr}qk^

733rhj-vSr^ism};jKr0ASN-b6DO0^neDw2SONj}wJ_h?roc=eQ7sGI0hW zz5crZo1dNAylAOCy2y*l($2L+z?0@T?6a|g-sv$(?r)wHhJytS)wWgP@J+AbrH*@j zo=U1h0jI|Ak_7fUg0vGuX5;OY0IlM_k(KESi$Pvh??K({azXe>Bl$I!yMVoHOZVBg zL~r5ynml_>`2R6=9)ML=XCJTq9Ict7JTjU;yWw3ppsM?{0qTtr(wRTvf!UQf0j3tU9cUOg>NVHjwwNTVdXQ;>9c z-$=GU3DBhou1BPynq%vIbVNS{?+C!u&~Q$CgzNM7h56y(nl13PxT?Ld%03p+HLz=5 zll|A{p@B%_qhqkpMekpY9mLIcjfV#|aWHJKTLfvQ%--zZM0~N2C;L#w{3#DPi5K$p zvQIzYE!(BDn^cf+(4c`Sdqw%MmRSd7)rNp@rf)CbQ zOAekoeP`o8HLqzRda(g;uf6|KleF#Daf{z3R0-w zw-n2V&XXQ?Hg4#cIo}otY68?W1!e)3S+ywYxcW-l;A8nrWayUR-Qu%zms`Y#{;D86 zCdgpa`vUHxYAv-dgxm}snaAyiom>gQb*;{l2v}r_Lhg)2uBE5==$zJy+=*P4q*$3K z?px}&YnlDcr{?EBV!y2-4lX;_Ot&i)b_FBxC;!eZqL7n~%xc!RzY4mcVWhI{C1E-( zFjTY1U%Pb1N@4Gv!5zoHMS(qRbWXD~R|BZ<(AKFoSYcP8Os(KwXsjr%hSA})e7jGW zu0Z33As8Qv#bLGJD%c4I4bs-dET$gqLf?9qpJXbbd-6ni=QS09=Q0zL1_R8we)QX3 zL%_8%BalRktcBnP1#8rktvH5LLkJ116F%qkpRu`#S^v|WO@ksFqXUQBXFp%pT@KsQ zT)R-1iVjpt8=N2Xv{(X+T4&orVV6H1gC1La`VhuEQe`dS3lU&DzO^#ZE?eJSIxf9d z+ttEk#3h{MfAWspES0wRN#L-t8l{Ti6wy zlZnX>doh*(i&_|V5_ZeakQ5tR*PR@3eO~;kMbLz5Yp%4d0#a;-@&xj-71kyQ8+?~d!85>Du|jTFA-U)(1-5VgcLOPDD{?0xJ6C{G zs45@1lzRnP(14@4<_n>V9JVYG-89YNMps?%azEW3@$hijo*;}H}jgbgHW19#&IU06bTePh2 z*&$JMK8C+4&P~JP)lFT~m)m)pq0n~;x&u95fHikSe;cNtrjvwJ3++ikSmHfNDoz{m zI0=D%#f1Ad{xVL2pj2Wo%d9j0@{mqwij==n>=J31~HKn1>J{r$0*Aba`SVz z)J^vYQ}_vY^eyd$3vHh-LB2MSpF5?AG@9UZ_%%LVz>RXWacgt~hTS&U86v_#ejZwv z8TOEe(V9){OzaV#KfQHDx|Iny1~Bc#R#cdV94W1_cNKP@gI;UGb~u)J4`z?;s87%> zu+wWRIbe-P!WMf#fTG%pEA0t|X|>!~<7G#PO0&F^s2RfW8_fS2ds@(M@Gzt*+upWQ zGztr)=h@I^mz^FZw#ZUq7#YntD-d>F-2Wb1;1SCn@|-`}=E~_@Hp$`@rds{%9Kg1e zpz8`Gax!eEuxow@vEZ=1;nP!Z*XA{LFqWQskC2_co%DQLL2-zFv7@_0BxYR(enT*2 zz_Dk5C=|+8O4c4{+hCuhtzd3j0e1eHY^0}9jdY&tCR=8VGFq#3wml|_>mnxRPGq7R zZMtYQhkV1>*e>>&I%=nOwCvDp_{vx@(VCKI4bF3X7TL5a*oQDv}e|-rujKYF?U8_qF#Vv(AajdgjM4=JL zDC)XvfFOOFD4mRr0hG@S?W1Wmlhgy|?;9pZ8*kSz~LZmPTA1c@YBrO!yyN8xbL-VvsoI<^;d zG|aUQLE6U&dy@U?Zve+4Ry<4j)PExisXMpM>zFa$F7^o|>+K$<`ia80>x52MmstN3 zNp#Cr6|F4AqbEt|5;z#l3Lr!urqZ9;J}oiK0qq?oi9&r)J}g{k(*@nElJyRr6k|C8 z+w<5K6@~KJ3ZAwXz870RUv8}4_61-uH%J)SFJ=dJp(wNoe-_Jm z6u(N)?+)~8Ud?^N)jmm1ICmmT{Rm%1w6-N$yGG{69uS537pSt!vQo>w1-4W~O*&** zxi7Mpd?H2%u|0V*L_fU$%OrxI_Coez)$K)v_P8kUlO0Y*lEO+nJk-4UP}3rtB}h}R z>4gQrasfBMy8MmUN}pKvTK8|WRX%afL~N>8o&YI4&pa>S9}T1gfagEXHv2_&ofF7f z?zB^0fx-xM%vx(dRalc1Yi@xM#+g1cr*)lOr?B5K6ss^sxxvRDuG(hX6sA>jV#cy` z`W_(P=1q^n zi>>!Rx+_E2C}K&%bo(D~u`~s{d#Z?@DiDR1O+W)=iNbEm>6c1IPx=2M{DgMrk{mEK zel3b?&zL+em43S)OnW+NmmYqq-p&_v)ue@x)=sheM4_6&1BTgRh23zD;pG)=e-Y@v z#Mbp`w#9>8^N~^bM!P9goF^iHz0MKgkM|j_;grHLzjrP}oh3~DoMTWkWf!r~mq@4dzM+S`=i}M& zIg?RsKAydKC1$!02{MK$=(KezOgDv)w@tI39(0HeAkFxNkQ;rBI!iF{N>Lmq2b!5a zslt>bhZyS=hGp4jwO0jP7fCiD(cX#W92d$P5H~IEkkxS>B!4avtm%abzdRQTk{m-_nYLMgI=f1j z+YTSTDeeY)MPb*ATp97>X6L@+N(nYTJFSl(84qu&vRf7QLtVxp<3V9E>jzC*Fje+{ zqQDiuutoZ~5%kFLiD>-Za!%qqEGJe>d>(H(#lmzjmbQ>vT`NR|Bf0B;!{?3GRC^QB z9X_^g$7VbET~hi@Km(JhPYZ8hhCj;5;#qwCfOo4(1Jx7b+X_9*_>s)9Nx=z|ARHlF6}EBEZMd!l&$A zg~`aZ9d=IaBSF@iNJgMCl`9GNlizpAJDRuHj}<046{qAy>{o)W+i{jxrE%)((eThZ^x*u&n=JyUE|80b2OJJ7> zUZ~}-skwddA;oC|=pqJ-wwvsSQ7EvkwS)!n3_&-&f|Kh;yHXUoC9-?F%~crA zbX`HI@}By>NVE+pzkEEP0w`?X9Mx!J1iyDh5I7r=z>_5@M!$w7GmmWCdSvzFqbp}y zmWV2SDVq+I+qnZO5sAh*5jb`s%S51tL(Qd!H%_vxC*qrf&He&A^#d2r$I!I5!G;OK zd$!@%wBO1nI>Q*^+};WNmq^^1b*n`bxZ*B$6O**T z_K4sHJU7I<+`ojWwi6dLL)!`SGkfh-mZzYbCdW3F;oY=5*RA}=HVX$m-D00A?0(^J z*vsz-dy{}hEtrf5o`cUg)Lqn_!=M%0Y)veC99Dy# z^)OJgsUmi=kfnhmBDmtCRV(z8{E5#IctKig|I>_*P)bn(O3^Gf1UkZCnG%q8QW1?a*-V=c)i%Za3?b4be-q|WGu zDqDRbTv=>iD$G=32$9A}&*Nt=?l2_H&B?)Cn zK+4YYf+IU;TY{jg3B81Jd&H+jz&q0l1>6c9D0%EeO$bA8qSDJfd8cdf$>Swe^;ZGk ziCAahaC_>@0 zOVISsv&RJ7M-gm9SFvYUAR1l6^>Z07`j}R=H6l@F3c{g?Xb7>?1hU}soo_okD5BJa zb+*C8G;h3`<5zV-c9Zn-xHuam=w?wwt_WDYTSaml^5dmV%&$J|rbOk+YK5b?QWE(! zxK0UNhbJJeV|NPEq}J{D1sNyc(hjAC%Y9r5!)WU~;IgLF-WFgCdR*Jbe*KZl6zCD` zu-<~MQ_=Y4r>&2VXYwRr0eGAsS&x@3uw_1cquZw~{@(Pd;R?7+H2)LD$E8~ccWs1sRAQJnOpGQeiD`Rf`$beEFrc(`rtCi+ z^~qprWOjIBB#?Of#1o^n4Z+VW{}$R6|01vJ%bvaPo|h_2;}AndETCA3zQP<(wk;D- zHDe#?JY~mBW#GgWWAam0b3%#{kMbf^%EbFO$)6xV%So9Ecuf^aY0HSHEp`V>st zusDzW`GZ0(JEl6NgZGdql+H76lRYUwS?xtz^x|3~2!DDn)#S8UKAx@^J7kFc+e5zZ zV;h#+k3S0B!;2umui;r*dI~6SK3s~6g8%VD4dT|ToTZt%C!XcN7s<&hDd*(4` z-%9Zs>-TMUZl32N{B}Pm3b~W8oStcOgs2?@`3SNLFdX;aXNMGYzj}AIHt|D{63WMZ zpr~rF-ej;jGTqMo4hr29oC9*~VnGUL;6@I%D}_kjwV>j|IopplG}+CfP~E@~KP&Yik9%kMk}2 zN`vEkm#E@3JwB8WvXg#rM*ww&&nJGCAVnXUUS-$&D7jv*+KN+8qTCP!4^}uJ_mp5A zT-9DY$r43z!-~i9Ldz0{AL&Wh6ai}PS~EQ@TO67VrG&PZZ%#9J-`Q|Hcm z>{SKb>X9E#iJ(QJ&)+Kk>g_R+-HgzgSHCBnB2kHK!)L!9p(QCyzPcy^xLT`UvH4N4NmC~;}bKl zbW^t7KJxL|dhyS(uLRxG0UHp6Q?NNAJJ~<~u`33mZ2X_&VLw{Wh+HN}V-L09c-V)- z$rJ1`g&k`+x8;j37XjAtGZQCRrGOj9h~5@zePogpn6?VQH#2w~=a}a_j5d{H$KC$s zlj7z*)AkBbI~zCqMq$e2`BXA6cGBst7~My4We`A;_Uhfk{#Q}1bK8n#ti~{_e1f4} z^{uPzT%Q>py5`NcUV^lSj+E$Bjg9stMC*%WmjGWG**=LDfKWU?V~c$e!5IZ_3?35C z`>wq%NO#0*zx<|v>yA+&Uc>(QzhHQ6o3_A4DeQV9B`?n&jKO0K8!cXt885m097_|T zP>e(5Sh|2~Bu8c$;+QB3#b$7PZ&sLM56@Y~>#-g8Ipa|%w(oqpw!$fP=^3ub^e|m| zxd3b0U8Agzr>BG-e%R7IeRq@1Q4og4&>4d1(TI$jZ(smxxI+u-2G z=9oyXnG={TU;1LM>DeQ$mtFZ2BIVOmTaFFEMl$B*%lavnh6X0w(S4a_w~Iny>6pl8 z2Hh>_jhr>;X+#SB{vCRZPs*?i<*{PEhtjG*D&sjp#+{=8_Y<6+UJ!vU%t8dKOJRDu ze>h{J_4+BmnY+=Q6`O7C|FI3EJ1py?Sh97NZm~fM!`z{!fHDg08ixdCdH*pMg(2k(Z;>7QEkA<1iFD9Upu={P0`7 z2xLs=p-7I64hp;Z60MzPUkbugBDaS>JIjS5NjY|>!mc+*=Z2H`m3vz8hu5U zE=Uhaf;Gq1_(*sNJ)CE+d#KfgUy82;T`f6Cb@93U)Ep(c5uKKq{A<^XLV2M;)pL*# zl^w2{)v>MF3O&UBkEHeA^H56!Pez9XnO>MB;=T)m8#vSY^)I2pdA|YuY_P&K38U?) z_JDvJ+5L0vsF0s_@|5t6u$w(`z53b@&H=iKi)0Psfq=qwqEXtug4gS8qzAZE$+XH7 zAQd_M9RluGIXnDtUW6av<083k@Qd4sbD_Yc45U7!uw$ji4L8MEUJUmShw^NeFsxxf zPJY_H^^hKKTei|}`IU>MCJCGj5_F8yMcT2fwtGc$6~u?|fA@gT!l|#G=o~?~3-0!c zqBZkvi6}GydGZ}R)?qNnDnz39cDLyC`;IRG9!3rwZeIwxvxI+S!%&NTbE1@udI7{= zcCr2RJXaO@u$qbC2^$)t@Houdo{bfS8;~H&B-yNDLrrE}&&-V_)+Gox7aK*<+BVVt zuAn|fi4*CT;(}j8U?u6=T;8>`vTN?7WAz1HxW2kXbk`k8X_@0~pic)=G`^9uYJderj68|7^L4FhiOB!)aV(SBOCW z<=}=2$96p{H88A7XWFf^H%YOtG(Hd9m%I zm!_ZmhIkmFZVa_!bw*m~LqvojZq(#)837UzTrXVSv*#7|SPWyZE@V##Q%f#p3AhOG zXf#StVXGHXSIg%O!Zr9~bp(y8^k=P21o|#{_Xhhc7MayrvwN9+?IUy0;z;@}3Eg)$ z2C4Sx9v&>ve{Hs)W0G6f9u8A%3i^r{obT#n7qOJ9(`Al8WF(Yo|EI`cylRjNz-g+3 z9c42*=Wn)h4w5nSy>M691-+b<-tpD#wquUeYXxOkw z^&7KRmLswz!uLH4%@fhJZ!oHmANVvB)={~`8-}~&_pUtcOPRgFn;lO-*NaS6?S?46 zpNH+mOLea}Qjo5ZimB8~i-nmncudQ*7X{o0Ih;GyY>q#PIrh1zYFpL(=;|pP-0kKI zTp_T7Vm5Zsh26A`>vIz_gY_c70}>vrCAQT=ok96!`&%qKFMkR4fiPd=!%22jLD<<} zwSTSs)6)kIvdb=n?so(j5Yf59t`v>_k2Wp9aVlXqx$h2HypS9JK)o(33_H>4W%3I@ z&qLV1oY$>#L4T#eQvklRMn8U5WCYOZuwY!MhVd)-Rhz?RuoqwwlJsIdSQ6O z>xgYq*!7MiGP7-m2!5}EEkkTdY+f$~qyN1ay88nCeFC4QiAYIt$bp=vFzw(bCIeBm zUkkcQa{@I)VJhO5qyRIU8|$JAGVqZ9%l=83*C3y|B#Axx{4)T!&TLZ#>M6$h?dV^biBQ(>X2PD+rf- zfI3gs*c1_{<*fxw*Gd6Rwx$))rY-Upu-~VLCp=DZtN)1zZmi#$W&ag|Wo;8F{rpRT zz7JV+aD7dOKPe(r?%f=GCG|E%1Ty7>lk?E6?-YaszR7C)kHCr1!O90NvlyMH?K_|7 z?LxM2Kfa7auIt}ifk%-Gh2d#TAX9#o5PdT^bAsKhFjZlATc6G!1t~q)o^nltky%!Z zEjG|6qm^-euCeqOoXBtDbU_-x!qE_@7OoWmjcU4Gw%eZ-hOM!K>}dtvAn`rm@JzPh=ZNSz8an{* zra(>RK%9lo{jZ)7Tp#FNu0sU3V`A&uTEP?izhY$!XW9Jem85XY1!h|o*+rtb)0KZ! z=bCk!!6KVLB-_FT|DU zM2|-2>3d%&uDlMEIDS zf?}UHg4WeD3R7juINPV7e=2p)=D}4W>*IdK(EI4ZV1yC_nwb`NH6+)nQ5+p=1Ysvy zzuLAcOof3>(!iL*b3UZD=`y5=48ZRM>F^QU zVDpkHFP|oBVp;G*l3@)oi~;68wl{|N&SlTo zCQJv$V^;*nt^n78s4!v<$xgi%3jA;eOG>f_gy_Q2V=Y5Lk_JB(CD<0Q38K(XYyjkq za+x4Q#S4`r5}K^i$Ma3~L>BWULHaQsPrL6a?7C)vyK;`O!hYaOI8cWVVYwLZ^jQv1 zUuS1sM-~{v&;S=?{e>tDsnhN-0$DahbXphJhs9x>2X{!n_(=iBp0l?zRRjveJs#6_ zzYy~P-cRg6J{CzWUb>)d>5jIg3+*#e+|M`HlssfHWZ#J9W=krUb-j83-3SKr^{yDs z()<3<7~VS@<6EPJ-5L^DO`Ri$++#$f%=Y49iF7_7s3m8pJ))o+FsWnlDDtEzYL^}} zb?PjNB|%_!$G(|%KonO;Mr&1e3ftlg`!1F&T3?IH4ZA6hWOT?7`SQ5ln!6E=*wvd2knF-}ua(MVP|s<)XNtIZ{w!eLZBhX#aVmpj#3H50?#rqp11O zrcD@)vnLcw3nbIbkEs=T%-obCM_VhbUPRt2=*12Zm=Ag*WP3l0+@)UF2LcsNl>eiyI<|ZE(0=*}5!~?O|2e_}H(3;TA3q_-{;Dw5r)N%Je!d~(dPQbV zxk65}s88L|QfNmMc6IY^iQB#F!7#>0mSICZ*!L6?u#XDB6~C0zS@ECoahX!bTqEGE zHq}1WZLvfTx&Kj=`)UHiX2Gk(SvNq0M~q|gf)Xd>cw|o|rxly*@1l{dy>J=|ona+H8tBbU z=%3mW51Ao3;c>P`&@q-I)AI6o*6Nd_X6{~$t%1HbxdPli#6}6yeW6rNzxN8bfy>Gc z=^c9tA6iuZ12&D2e6spvt=*B+g>GHCg9TPvb#`5h?0TB9&6|6pO2 zoqIE3eHj~TgWIqg`#(_}-;(}iO>4AD(Sj+xQ-(ZW@sO5wHB7R93cB(oQ*8bI(2XU; zXLrZ&>kIK#HAa}y$UcBAbDp4^k;!UlqUFaZ67|PE(?p-;wP)a{$<_(dQ}{ZH*&Wlhr})Y8c}Fkl=qt#6sCq?4rv&XA-BJZ=tg6-d?masd}1QgjAl5I z>AAzA(+vk!1upc?=m(_S_QI#FkHXZ7PaQ5?6Zv`T>nTjAXmxoMyDRsI;;O;w#RmJ6 zFvT5en%Yr1*^*)zu$QmL_7qWIHc)iM&scCd5%Iau_L;^tWwwWbb&hg-Qjj_Zb`3yJlX*Q!1o}s3Lp$KX!4?L(*ASNtogeF~u*)`-m%j`f;|WgGfaNJk z1j^a7Y|kQ#2)N-3j%>}2;(nFr$n@4V7_LtDshoI1bn|K}_NCIEfw*{2u{9#_)#=T) zkG(ELcO02g(>Z;nedZ%cJ#wi1{7xeIl!o79=P2m9?Jj=5t`>CM1eO`Osu0CB2agRj ze4z-MCDS;CZH%R7Pbb4kZK5z@*b>Ao^`YjeN6VY+xQOsZ^#exQ8Fzu)L>ME3u|q;!hU~L#ztc!8^~3n zx#DF1k&pK+pX2ZxtVk+M(HueIIW4^{2qS5&>r?GN0w;z)BRSW;_KCUZ#P6LY40VZ< z(WA}f-Xv+EhINQHe1r1m8d z);iG`tfBYV&xR9pEQK;qL$-57fd8y8cZM1EJ3*3;7-;zlY7OQ=0(Gtt4kdEUg3Pp# zn#nJU-8vHB27_Zqr9Cd}`@g;DBLWp9 z$aC8f0armNg7xix#ZqH31RIhk-R(-_=UEQ#uJ*8_bn~Gd<#xRwJ)26_F+MzCOg~$v zFqP_!B~NNGZ6dgOM#+`M6{B1N=3HD~9S7n>fa@e|qi6>&NDp1d_tL`#3i;a;O!aDS z%@_w%(q!k!)=ibt&@~-fcOvd2yE<*6(BXd?U>_>zhR!QV=i2W_L!%#99M{<}4{|0> zNVf+Be04Gr5DEpdTEEhg}XTP!bkq;kVI@?aZ$EQJ` z^EV2Uo8oV{#(pQ{1|1EftTY=a0w2DA0i0jbHF<`O7tysdJB;F+jCZkT9qUR;Lz*Db zh6%ZWyH^5&rNXodGbsu9?_MtGIwjCKD6|)R_VLVSrKY68|Kq8p983CV~$ zNUr@}SCF$fCb6RnJcSlXR)pRERiaS$5Onu>Mi?PTll7{H?bujye()w8SY>IVxK6+{ z3`f%Oo)B0c*9-Y$u{^qElt6Z=BHA$B8bqN|*hoh;aI1$MQ)kL2*=|855i)0d zYCf|#<}Hu~ZjUIAh5YcYmQ;IF1ctSvuEoAl*p)0Z?caVc_tVJ9Y_yC1zn9t6s=$`NA3b z5t#&g!zYlJO1uPnl#{1Z{f9!-!g~i^w(JuR;b$DL5MO&3yxh#OlOG^49pT>|>+RPf z(909^m_L0yz)B4UECk$#F({8S$UVZeEGh3{OZ4H1^jDJ4Gy#+M_LPt4*RP*7E9?r` zJDbabXN7(DRc#Fx5!TxWU8y%A=`}=Qx4c3>?{*TmeBnxUm5ftajS}5zkx;9(OC< zi@IJ)R5izrNGNVQelb|2BUe5Jkb!>vvEcGaIrM!xO`cVLXiY{#Qq_0qPK7$Qx~n;?uagbN^6yU<}+YnV4D5lAy-?pdQ+hD&}gTM z;(8FZ>P$OBnB1tI<>D>y5J0$ zg&iZqI0k(z=nt3pkYX#jA-Mnl)F*?`!XxrGg8ndr12+i?UH=zXsFc`-DeU?yK9ct0 zLTv7yh((a^Ngs}`tFYM$GZzNmXK#3N@4naDA%$UnS0R)6MSsQ@jZ<9pp? zeT3jXuxwsve-@;)5d$phW4HvIv+pVE6!aq~>D~K5K{tl% zAQNy)>$2*E#}mwKpA}=OVOtVQySI$%pZA2_L?6S!nfv;1H-TpWb}2WC03!#BD6PnD z7o?ZQgi<(4goJ2YdqJs{Dh!LraL%`tKB6ZsI~x_I1a2EA<3jvz9@5W)xh;QJ(ER}K zuS58GXJBXfoFu4Jla4LB47*r}-oLxQU9KQC=1$16KPXI%ScS!Dgn;X?Xho%M7Gzk0 zSKe9jrv0o>Jr1w&_PhY?ma07GgcAEzG6c$v*3Yv`6m~OV_z)W<vLRTvJCaIIlq+hAY##P?)yj2xd~ zzfB@I_wNK<$!J1^?LuK%H)8nhcB8^>3&Z;qmu+t~ z+c2M9_eQMe8G^3wZcoI3OSWZ-!ivJ@uG53`HF&odk*2=55EVo#_Xt@dMEEZo*ImTm z%-nDq)?-D{*rEKVv%Remf#z};9cSBpEYmANFPL^xsw+U^zgs5h1O1^Wv{AMqm^nWZ zq`W|W38xkIV-frZl^qJ(S;CrUIqddsjYaAUIee*2#i zbW`h|kv2CL$q~U8dKj%QmxkPFX{3cOwS@bLu$?D@>rFo1ZTz79P87%KI=5NzFx4ZMRP)wHMTSUlU}Y+KLz2M<)tcidOMlJLh2+ zH4)cd7c1X)g)f92$jp&_)mO z$;nN)z3x+9gB0z@3QLr81Hzn}_B6A}-HIXP^|-bCLpD)#D=#9fZbOCqJP)kETcRk; zH3WZBaOn$Bhg9Qje6E!U($sW*tmY}~+A}Vb$yV-@23EAHt--@Uj=a(K3sP%hS|&@~ zk8@nHd0F@rPPFp{sj^Ss>#wz|d>m&esr~m6bkzj*X#2-79!&b!AYls53+1u^jSzD4 z4xs`$E~v3FqIs2r3id(B`Dv#b+%M$D;#l7By~o~74~SA@eIgbY2)f|)}u>S+FC*PJ@rcnC5F={*an|Mihf)qd>}}3r7mk7vE&?y(<8{- z9P#lSGZEDi@B`Ek+(UdPieIY&leRz2bxDqwtvNou^!W4&8z2fia3~C z(W7mYhkl)3E=YSit5?`6pIOUjIE~kau+@m<=W1h9v~j2H5ycG)q8`2NSPZ}UG_o8Y z3#-88P4=Ta!s?(&%i0SU*y(~qjb>BROJV&)q1zCMDQP125z+NvC?lWO&ctq#)_Kn6 z(u5f3ONGsQva&2B7>I@jdeK{h->XSh=u<_jYxF4HCaB*tOhyMgwhQtd1C$a7x}Y7s z$VQ6j8aFJIF~Oz_Gsm!%z$t#F5Y5TKP6q;0m4ft*lR(t&=M_OzG?6 zXS!C9KJJNesJDESnz(U`{o)Z9g_&vVtuXbXPlNdj0g6DW6h%@SF6gFNzxYHf$xj!- zF+E&AP>Y1ALoVOf_;3~v$6I~)@;+DC3qE`k`tL6*td6Q$ADlMl+fOGz38pC{Du-?x z>@1O3tlw_3^A&Uq#P!yl-S0$zLB7LHysZ!QNe2$Mu#cgMIjPx}B}nK-jC=E_)QBTh?TR zN!vL~5zS2s3{M1>f+vgQW{y+>c@Ul<3T!3puCv9lLbyA_5;4k9FZp;(soARvJHB|+ z#IUolX1yHw+1KC8t&bIjDjV^bt1nu;)$8?-5rsSwnLxinh<;CvP~2kzbQsNyi@V;6 z1*r`!C(IltTeYx1pEWLGi8*1B6FU_?DLKQ59B*IuB}J>6qN|JSAA;1E`dkuAaVNKr!v$eKl91ZE!_tNH9Z&W*z?mYPxTp|H~Cy~*Y`FnAdUaQT!0i8KOr<$DyK#EOU1!#zTPh~ELYHRgY`j7 zi3-Dw4jcNBwHgsQEQ(qAK zMA>TjVKFJ`hC7GZ)e6GEp>6Z<$7_G`Ft7yPY~_Nk#;i=-yrkNDK0(LRGdc>k+sTt$ zcIM~=-cL{QkZa+o>>%R=UAGJmW0QQIFzm+lK-BVipB9OO!xDxN)Ji!J|&Fh?vFi`EE*QX`jn7g{)D8Ru{9_~mRUC0&5EE>NYm^O0&ej3E=Si9 zSqgjHC&0M$40}U>fs)0xcNBK)r?#%ivL8=&aX0qtVP`7r;_i}E+0TXP$An-?+65jm z7xUN_UE*^NlMyI|nO-R}PH1C=7>Y}7u(=AlA_wSUeN7Cf<*~sHu$xrz+_YxcekGvJ zx7GwZl=lT;eH@dt%SSOTyh7QjQ(Q`5*YwSHMhtdLsj~|N{Vkx@`@qWqpX3#>WL)N3 zFG0%hH?*HkQ`qsE6ygPVl`x&mRY9ILdw>&+4Ewu)`w+QP*(4EU)~;JhUHatKIQ@K-ba@dH93$vBPIuANn8YvAP!BogN`!ZmpsNrOUp%ckimu1K7v*v%$%B()Qo6Pwc$$ME zrDeo2^Bi7`c-d3F^tNf0ZPUuE%tPMgWY1(xEdB9y8;`G=Va+~4a9nP&GiEqmQ38{% z_?rZ^!b)1e?icd&J~KND@5L^0;ND`Zz3&sZR;6**6%}G&(;~Z9Sf_w%WKzWsWnx z*ODg76NT~70UouKVnJrvH9fDlFBJBBGt}Sfie&ug2T!>K*rvAA6{fUkb7{1>G1^>c zKM{q>^TzR`bB+LKl<&!&)uV29fyhiNwsnPpiu%1GuzFnA*H$X%_)Tf8%(kW&9v0$) z?$lW>J}Z=roGRXDgx!RFIK{3L5$~vXoHU<J3d4Vnt_<}&SRn$7wsru4AS0(bq~Mr8ig2yj zSMvHo+%<#YW&(nr7niy4I41Q~3e#)-Ls<8IK!8l#2c)KOkrH6{6{4EPs*s`tuFV66 zTB(qWX8X+|_LK;2i1|b@nq_sObQ@S@GtDomv#9Z`VrXU8^0{40X4)=6W|No9*YD2P zBKe=13in%V>9e81F1IABY@L8>BUdP4d(}tc!(xM-`E+;W0Nt^WgqR2@`4bkYP5>mjUG74;g-&B?@gwd2wO7^_=4v2yV=|#t?*Ie2^V(-r4!| z`p!8mHcCVo9x`IIJ#iv^MUyVjrV6rEk}Q~X!kF|e(TYa7=RM$4c9d+8SnIcf)QSMx zI{WEd2VN~fQMz1!J_s}#Hrcy^u1y$3$wiAh&5nrXdiu_AN~T>n&*dK%=I69bz(pc< zn_=sPVfR|Boi-@!3PY`(vanpSHI{zwD#rJ}!qneZx}a^^T(kK=7){Q@&d<3X@X#E# z9s+Kj56YH|%O6B=zxUDlsewd6m8_wR72VHR8Ix(m6vQ}RQlhkQvpsk?kDthV0aq*{ zqVah)PXrh`yl4{s5^bSiuX?Rn}Qo$U08oE0vsgrvX+*Ie`lAL%M}}=F!jlUtd$8c0ohOn zCNzTEoC_Aa%+YO=q8JUiNKhS8KNn4w6GmuSEKYR){ngBsVXU5_P+E|3dbGZY#Ma-J z#;w_0RH@Si-O|MBDOn(Yg=aO2p%LSe8Oq5;)ws?R=*En^bS@$f`=UZ92LVqiCxiPb z?4Oy*t5dRFB$7@DcUh8xw3BbW%5nw7S(9{t0f>t^A)DU?hVIdT+TKM z(zf=ZsmM#W7cR41qEOtclTnlWt4|GQ$$5$PiBApmw{mAKAvLVANXVOY5x1>pi$rZh zMh&vw3RBoy@-TX{fE(DoD|PQ7>?Y{oP+k^;6idK&Apg-MfL9$E(b`+>A+-O4F_u2 za}O1SiMFNBv@PA*wsexs6oqciNnyWRE2*tBVFxJv}u5J{5l{Re~`bBN_8Tar)Ci=PvZT_=KumFwoW z3+;f1ToRSCOFF&6Wgm@ji(FivB}@@1+l?HQ5s)!uS5Q$aieQ0>mT{yl!IcqCixyE@t?3nNLo?hEv&*;nthv z^a6wdSW6TV`$!Y)Q<$~#^4|8YPc$)o*J8vNRue(b1oi|g+6yPO7gn`n!T&nZ#XG;^ z_bRNR!h=RdwY3Sty6$s=LxS#w;;SJtTIk4!Ek}Ub#*Vc@Pe&zYt<4pn&=JG?S*60R zLuAF;UJ%y2YOD#I@zvOWPEeXxMw_=rx0TxGqR_syFm}={yvCJwP2V0iO<`E%Z8k)2 zzbohp54P8bgf)Z1d<{RV>5k;-KhrJ~b|cq69AR&lY|qA$#NF7tkDa-eBru+X5#~!2 z_A|AnUhZe+i{c81kK`s~vhr4n#5xmPrDIetBQnw2L~}zPZLE!MD2g^V+7H%2q^pv~ z*~tp}?+6jeu$?V}YXx$?d3LR^o5k8%UyK6*zDEdn`dzTlXsa*?JPth+V7fECt~a zhp{K5bFsx;X}5~*7T!_etUUavrP>(LD3|Fv8Ud$=Vo4-l9C%jTDH@D@Fn5hTr?Bt8 z&D*?HijQJM{ql3wg;@M)wXR^au-K#68G(d|?ubtfR6HulE=G)zZ z?n@dzz*Y$PWI>3SHBh&V4E)JHkW~jArH(_&qU&*OfhcaaG8l*z&@W7 z10&cAwEw9m-t~Rp?VfCQn`o}@?-(-3h70n6?2}pVhXmkh2v5xu6sGLpcUC3`PY_^I zcE;na!k2WkU~1>|HP$Z3#}FtItv=AmgXup-qB9Yt#xK-oLax&iBRN@_893Do+nEhe zVIYDzVCy45hCBF}F>5U3*bh_$5}3dxqB-`HIs2a8=yD@My1>r#fYAuqB?7c1lUKio zd_<~5aA*36!;7ZaDupRc+u@?=Rx3#L@rcu8*mfcJ6~=3keNz}tTi0{pchUv`#qV01 zZvU$=5f|Qc;pKLQ0NwCfot$38A4Fa0U-@H;X{0Nr9NDTp~!P4ZPE4_?SHv zd*<(1Wfg*a!~(7G6HT%T=P#86$?- zI-e4o26z+|pmqwm92XEjY67}CQd=t~*m=UPpeS;hl{4)3qPQ;%vvE`872O7j<~~zx zp@KWW0?QE8tJK-?N$e_xS9l->}M?OhhuU_`C!md;FY_EGp8@$bx zC&S*BAV5oVLgQLj<2BxJFfR^c(nw(dkyv*(P$)d@%AjcSI7+zZQ@b(Pa-hReBQIYMd15HaucX)?s9v` z7Z9JB%J=cO5Mwx=P1a%ZM!uA8_@kzizkfk^tp1 z8&>noHpxB{!EYoQ*JFhxFi>A%=RE5Q=~`NbwMDx~kXE)eZ?Zfe#Vxj0_ew#>z}^)B z<)BaS;=+U%7pB^o&vhqgD_GiAP;cjnqDG~$W?U`cdN(~Y(}sGO9!X?~?QTIvi-!S@ zG-02SGXOV=dr`oTG7pCl`@1JF#(Hd8XrGGU zS`o#2_l9ZqjVO+<(W3`i|L1{jvLnIUV0Q`=orvN2-IMG|!S0JTy}USStu0ol7)tJ# z(tK$9BF-~5Nfg?II3E}7B|`9qFb@v(cn_Z{f?MbGJdd!59`Uz{>gwN9zGt1iBKdIo0L?W9(RFdoU;cbX9C`VG9@&hz0T#WP%( zJXx*P@OYbITxFxe378qO;hxf7IG zv4;g1AwDx5rC$-GvNSHc|KZc|o)fmWJlMSu8T2oFstMe+4c<)-*P;XK`FQV(;gn1+ zu)@NQqd?fN$Vx<^%AJ<>64Io_)5qgXTowdMAS$61_`*~{7meT-W)y z!;71ates-p1YKb~L*ObVWFLv-YGk765dN9Zvu^>g(JuabcY;9N3iIHe5_sENjU5~t zDvG`^sq2psfW22|aD%kfo)mOrpN0OH&GgaHnu=&my_E?v3~7jNMQoptdRhnPwbE_; zP87x!3)x}BJCN9ask^*;B?6EnOqRe&EFNMDZJsDFa34CjoKv<5xj3BjepVboRE6ovsK^Zb)xGnOL>Zs)PMQ3N;oVXpPicu&PSIy5~; z>uZ&=j1x7UVZT%a+>je(FS|;Rt~yx6J@qXD)RNM=!3HVpr>Jw4V?#x9UB`D2v8)&# zhCG>d2~!q77`gV_Jpk%qQ>S;*KYJK$s)=r$VT%Mw%8>4~l>$`7uYF+OmS6nmP9)zj zGul{Z&wDrq>mz6pb_l{}*W3m6UxmA;e$Yz$^{ddRo1gmPy({c$L6UbgV6?zT(fs}# z+;RlnXUc||_m8JeWJMtzxuG&aHyv>oCxMc8Pb83vVUzt^(0xULlRr5`(S{>!FLdTn zD1qa-K?Rr>&r{tzdgw!6Yu=+X|Yjx9E-mHtt!hwl|AHkv%XfJyu~VZ!fNs z{o-5?nSHC?D6kq2$Ar`5Xysc$=C7v$5$RS*-h`8}L~a8_q}agJ;cOcx z$S~91ZDg8GFJ+13*iV#L+FGCEz|KR}C1GoO(N39D?i7W((7eE8 zIxn{$`6PiiBc2Cp?bLs`!Utn<$A02L!bBwVuNH7KK9Yd}0Q8=0h-gf=@nLD_%okGe z>-6@B>%wXgJo{X}*0e-x%I(=0F_`;oEfVcl`$_303NJ1a9JxajS~;gCFnAa#$IY_#BLNt)Aq)mxNdiev9m25i)n(AglL-Y94bPUh^CI9ko;6gzhR!2~FJ9kZnH_h3T1-)w)sceSazJH@&PhysFJ( zOM9`Xuypx_7unT59CvMB8?7*-z}Buf_(;-4a07RsTIP^2ob!Ze?hAO0i)owZGtmLu z;&H&;6-&S^Wu{$ufDH7IHmpQH?jfg993RhAF{erIfvJ-=L(;QJWx;%=8ZJ`k3Ipz*qrl4awxZ2(zOi60f!D-ea z2oq!3J-=`wN)xfc_6oXR70HvP@%#LmNQ@1ddD-?qZ@H4_+-;oouN9=E_QEChsKTz& z95ltO#V6Gl#9DDckYa*Mg^zq(u;VSX{|LIKNhbyaShv0ng~r@B#8MP={gRm=-KIhj zXbsXyO+Q4?8MWbDpz%Jd`{vGd4w#)?faBJ*FV78-%rlQib6&5Fet7T3acKe@LsCDseik zn@+^2%8n|QI#Z-Kn`6g3)DDKD%+>F?k~^1FTeiZkkNG)-zsE&@cbP-6DFT#psJY^3 z!6Iw%8IP`9Vz2qAj#)LG6j09&M&e5D(t3Je4s)Yu^#V3a?@o%CSEfp z32Rh&kY`lOJj9Ze++8gYq#L4j)2vBhx*-=wYVjEo34Y#F=v)k5Bp^X{L=>kkz)R3J z+_1?3{bc@llU1B%A{MIcYXMr$f+=rl$d5)LP*cb3m3D!`u#KMKLJs-5sl7}jH>Hs4 z?KY5U_lr(B!GbPhp9@9sa~i3dh61bbl#W@e+KbqEF0*GnC3vOe2wY{yd@-~{cXyB2 zmm;_|>5db!UT3#KqpL6}rSUL9X3JeEmg-|#D^hfmSs>{8_+c4~UMmcf*uTJe1e@(W zBAl2%=!Iap9LY4MP#~Z7(+`N{SBdX;5<5S%xL6^2ipZjq9^5-52*C>c?p_+f-MWLv zi#B#GT4U=4sV|s-yl&bHBG9?{XtKPbFvF12){HfHpD|D`sbe6gRo=%^0?l1 z!y%WwXOBL1m%^^+xKNpFcl$V&M5|U9#u1*}WNTvyhohlm2RzQnk042(Wd}W$E;=q* z?69!cW0B=;4(x$8i?pYXD9)=J#lB`XKlOHghvO$Xn?hOHelP4;9~EZHA;_ShVbA36 z>m#*C_#IUQ3L@6 zNsPI>ds!}9?z)8)V%b73g3^0iU;$wZuq-{vLf)7t#S&2yb5~gEl2?t!n&kia{?07d z_j&%}v(J3b^fU9DJ9FmDIjgKr*g2Y`C(Q|aH%-ZkQ&pCID2fU%$>x#smk)!SvEinC zhYb)7j$}2hDYS$D)pfe3pDj?B8Kk4@DSrrm7m@N3aXnRhN61@%B{}pXjGRboJa}lUIQmepT)l7RNoupl4zjvt!VcXWQA@VP_)sBehEPhDZ z(Ptg=awr%R9C||Z15rFh{}|?@uM&o161iQgFzm%Ba{}|LywxqTTScQwu`Eo5-w`sS zZ8B6hp52A{R9M=|xZ$W_sndhLCq#zHnIq&*a-0xMOI6R%i_?_>(xmnsJJ<_pD0?0M z6!a0!L6Lw*5rvk0+>2w5ApLkohz7Cm;BA=zWow0C-D?um zhm>O>6D_!BHzu&N2XWH(b~>dqBRe5N)RP9357wC+3E;WXCjB2=NA+y z{;is*YxFk-i5n@Kc1J^;UTZYu*)1oD^PjpHB9k1P3FnJMi*c}REecZ-9q-3&N2?%h z;8cm+>$HD(q_({^W9{35{%J&-6$tJ`@kXNmm$YnQD&totIHa^CB2nV*8t(sWrBK)r z*J~AJ|FhkiMD>FA$tuJ)dqor<^8>5j2^RgZ)#?;z|QzzAiX%cb+TQfFbs$l zVvOo(0evX-)sc!zwT%3bA%t9>}cGGWl$XODd>#9|U)vVG%SlJZFo z?rLW%=)L8tXO;~XrdY1Ma7Yf}NfFxgh)(&C4zB;KOtCa_SbQXgwFSI0Ioyan6XLMP znzT0s=-E3nSKASVefT1E4n$t(zX!>=g@Xb8jI|fV8-~G)61z*70$L_6uzM7yBE3GM zZ}$b*GH%}Cnb;%$Le8eOMYc(pHYT&tNp2C)JVE|&RADg{ufb9nFSmb-!oUucXvhk0vmA zs`cc|e|kxHj4ZaZ1e|NB4NL5b5XU3)gxxFPgPa#Dz4~KMgIV#o-7KJuLG_B%st!gL7rdViz5 zKb?TpW3~NFSR-4$(Z2HuVLGcf;V9QwfRuhe8pz$#Z~)8KNkK9`&clTBKPAjJ*(ocD zjny9rITQ0riV*VtL)_jGT4G;FTNeX&B1qptxW`$%&$1Sw|3VWkEMJ8GQA_A3h8&!vS^mhW3 zx+cGw5oAeCZI~0y@Hwds>!S(3D7A5neW1j&3>Wu9?99)+AbwV45aKRDDvER#@>wPO ziU4zlG_B9g!Rb7R<__#(nIYqwVgPNm#KfuUMMZLuYJ;y#>EXz?@hG?*BIK0x5H&mF0GTvSL+YBN9j>MCd zqimKa8Xmpw-eSKG2~r!DMJ!t66be8q2~)_sLUEiG_>lf8#E{{h#NG?x!d;{7^nahK zCKH)+9B0*{XhyCqvr85B5gi(5M1L(z4LD*ijoH+YvfZ`qY=OcINk-GweEWj{Ozzyv zQh}bU9-j3dPqcUPo}G5S0NshMr^;=uAWW7P7@Ov7+u4@5d{2$N6A~XSt2|gf&aV0| ziJduJ84)(KH;dve#jww2yDgo?>a)7EWjKgA=!WA=)mFqO8Ef14I=AduR5Ppm;Bv9b*Nk zK8YQ~H40Nm5klEKdsB$T+8t&`wwaqn^7i%B?cz~k1|iyWU@F<(O(%G>=0LsmKAi-V zezb12#TC}#xq59RDn?*jWeY{5rPv6_ZCY>J1Zf&xS!4F~FMAwTMq3&y?QB7oA?b*w zUkB|p6P-Gm$CY{5nUt!Ur=NfeLm>>*P!#4b3h=m?C+QqZ|@uNL09Aw5-~_mr3XBB6IeJLIZRfKp(Xq~a?S;6H;r z%|~w~-xi2O$9Xt6&Xx(#qZkH{K2(^-MB=aY*ctiKS0VUsY*$&k=kUyyLPW9;lN9VS z0kR)n*kE@m>}|z7Sc&yaV;=3Ybfqz-*9vTbFjIh*b{%GmglH&yKz9&Rd1%pyMBxXq z^Lr#jlek-cyf7P|eg74S=0fU%nogEocm{+prL`nnCQM5?F%{X30`vvRP6Gl9T%__T zb`L4gwJ5Oi0B~iKWwip%w|GIKNRQqlk}s5O>9b7&IZl#hx; zcO)D^{uAjmSdoZuv=j235jIN5`O}HV|1ldc0;R$*9U|w3)S1#iuodaZzTNJ(Jps02 z1iF5My(Y-A*=LabO~LRBgH~ZNj&1C)XmlMxOe7pzV*e1u$09$DF^bPa3hM48BR8e? zlQTUJ6lo*!%J~XWe7bQ3Co^>{%mYLe$7KFYrICrGVWbL3C}o)dqc?^xkWk zXpPw;BEXaa6a(`uONar>#!z8{!eN=y%lb0=ohVcm>9RD~17|sGSvLu@1QruiFZJz~ z={3mGK%ozu)z~grohaIKpzxp|Rb?5`HJcCPeblfQQ5H;D2c%FSf4-7SLW*85aEn&*lV#wgyjl1*E#m5W9bP#GA8+pK*7 z(hautmJ(EB`KbRJ!jB_ry6BtGeTH%a1dntz1*kv7xb2Lixx-X_^>G6(*Cj~o{zcZOwnjd0&`Vm=^W@@jzN6#yAXSL<9vHX zVZLxSE!*tOvmNk!Fwf2va1Os(_gaIs2-DvF#gW26uWxx;uAob7Q2^*6&awspzO_iL zZ}XnUJ=?5FB=TlAZOd=kX1hZ20n$-_S0H@BGS^YB?J6_rif?-!_|t)3)EfjDGe2Rm1ZadX^#rhrW5tkEfF#nm#}Rw z3F+v}z5I5Ug>+9maiz^y*he&;-L$G`r2Rz{zPG$YYMDCgVNbSiDgNjpOtMGrv_eJs#Os{Qv2g+7%1L*P69nlZmwGZI z_^izm0oL{IcK-ldC_pg}4YY#_db>NbtK&BAT~Wk2c}H1o9|`(b7C8g3JHC60fZ`pO z#WO`+AxBX-jmeFi}t9 zI|K?0=WCnDe7Y-5$;C$=UAf?AH#Zyhh~PExP-Id_izfF7TO+{d z9FF7(#Iv?mH2?L|`xU%qImyR2mV+#3kCOPH-h=Gn4MDFluVf_0#J`K+qowUA>(9q& z3f^JFj~^Oo{7Z~pC%F~2`F4-!URS&J_{1J4>~;0ynR7)NCRuNuOT!`rlXKFU`*JL| zzo(-QmOW$hzE4^3DVE9Ox4i;h@Pk-IJ|YM|?|AZhI~k&wmNQ;|&i*L~12GoP#P}#A zyspEocJq0jN&ZmpQkdDgXR7s7kb1HUSsn)qcs;55DfY(@7i}NEb-u?H#}Mvk*|~z` z%WGPmYnKZ+BZ>+*H+K(dqaE@_iwXKLWfsM9o0eqRQ|TNTO%3cQLJs-Y<_7ZF0GrG3 z?pb5o(gh%sXFBj4FQnicJ?@Pib~r$OMQSld|1Uua!t0}RV|39Yg-iQ1q-&nAwt4(U z`%i#KG^>%zefI*g!Mzyv53}ujA$paK@#PG*1Gfm%TPy&#i`a~?6dw-o$n>o(l~e3UNP1usS_9|)fTZE( zO1}kkX#aTMpxH%=(@01~Yp4M8TW*(GZq{09h>d48J#Ui*=wD7uS0RgqXle$NvqoXQ zE}kl4E7}{hO*9|Xeam^nwM&>;bN?Vas-RDlK`c4{OsC9?6_?n*g=q!aMs?Be-1mQY zD!ru8KNj@uSo6Xu%?lea;$t_9NOR;V9rxIPY#jUdl-VPlnP!;bIaJ=5FR%c4mOYxzfC&oq^_h@@?>WDi ztCcr};S#fdk-a70^Sl6q55;y!1Yd`TaEg`vunz+XAu3B!bnkH5C8VO+d&lm51|iTl z18iB>aCp`f>mbMkq!&Gg+N}XKtt#dRuZNI#XwPE4{O#696lvPdWkW+oe+kbFu>w(;REz+EDps;}%%VaO!}~E=lG20a9aa=F(;~bkmf^>^LKJ$y zy&R6MZFYc>c)!l71)Zz%v2A}9=EF-28DR$%hJW|pbAJ!}R3O~nNf!bQYgEN9x-?Cd zQ!V11z>x3eiymE;-}<9pwIxk%nj$4{R2__qu^SXjS{W| zPy5L!TBK#d%VwuhaGrcEeNzc&TJyr0cALWVDLRLj+rt4;LmqG6K5mPx-H24nzH}Lh zxrIRD5NTMTt-;c3-x1k4-|m*%JJ@%HsSM*Zal1^wCr_$+u6%3ci$e7yOB41%2zN!g zaQWq)jqA6B-5Efp9zXOFAjhsbMV6n`SE*iH&NtGf)a4+6?Z=n%JS ze(J>>YTR!96=o@XWwuo-NDEU{nmPn6jZpC+iOdDRW4S-(oxW3Xp$(*zUrf z$9W=ni|%6sc#R;bxssv#orFT+5_MEqPCEY(Ha#&b6oGFdItEv}OGJAsovMGa{9*r7 z1n*dkP z66pmzBpM9m_LARxbch3HM&y(fBrGH38Bmxg=#+(GA8N1T(`lpxLAc++nR{2`H`C3SH ztPn#Ow<#>H@SqEZ_7L=53@MD$QHG$8XmDz13^C4=Aw4bIy{N>-reQaPPs;^iQ#R)* zZeEl2f(SlqdndB8_)+~lOVwBufR1c`wq*f72kjtI zJ59w?$wfjG!7n|kj;Q@d1aAR*);gVc>=Gr4CS(3EJt@o<4m?GYbG^SU2nB$06L@F)Kv+z!XQwx){i)JYtLMr>+c;4I7e=v4$;kOc(I_yoJ7m*^ zU_>{pfFv^Q><&OCh+j8F@`QFGc^kEOGGzvfLLM4{Pc!>fIsw_Y2)RJMBh? zXvdA(v)0ZG@X*!?hi5If^95lGTG$86C+p$;)uLzyY{taL1h(O~gd}toy&*kz%@Kv+ z%1QA1@Gk*+f!7;OIN!esq=jg`?!+asx0pyIKt+b>JzB_H&f}d8_6K3gij2bCakB#+ zEzGpL1(>ZUQ7lJ+qVX>kBrg=vJ9jU??h&NyT%0f`ZL%VLt9Va-esZ|qZAVnPC@8053jF2JPKn`0m7Px(OirP z(wDx{G@dHJkQ}X9VAIm+@{%KMu3(rx zD*|(kdOLNu{}J*|;5II9FA4j^?21~#eV z*hYI^5k9%N!OiD~YImB_N&2>NJ0yz#o}Ci2+UFul#kMXw%x`5e8NCM=GIv0Rw={%B zm2auo28#rj7^)F5drZhHOmg#tPOiZBS3!rl7aBcWf9tzgI<|uNh6-F$4If5EJRrJMXl& z6;{a_u%&jxoj`q}aV*{y3iv$gd5=9V?5X=Jz7nVrn9BCR;ISBNo^f(NfBv5G(^znUlymX zTy%6R(*<bY{P$vXi3&#n{JM_@(|R z3;0MRc`_lv{nxuda^~hqtW|91h~hPLmj1?d!Ze7NnVDRKCat>&`U0grQIV>eC1VtQ zM1*OFXNZ>a|0CQ*^_k)A|lu8!}{OX4)S^ zco@I-Zz=3dA1Z^u$3onZ9oy{#h3RAS_&OQw{#5Y)w@g;qmwI|iyxAXKjzfw7S-clw z)aD66cp+`Ze48hvSyVZfhtdZt8f{TXt})%c#$FYq^DR~5?Trw{V+oN-| zevussX|s8Pf6hIgZ}*&pU9YgSzehj2Uns~xaR0;<7`04 zg{B}XIS>Yw+NW{*y55kjGcVD8CI*zbRbS{W+bQ)d8b2AsQ1|7-z6MdXZU64oVAl zzW_Zf9x=kQ6!uyek6c?6;@%lI*Q!G%Ed9)}-wF7~nas=Lp$NSE&b?mhzNP!7+QkBt zT7<`<&LMn5`wsSCNR-P0Z;uN2WYM00$I?kr=vK01cp?LA>Hy9<~0`Rw+rx96btUP;X42~t%Cva|jQ z^Cc10yl_JE!gHDyTDLK^Zi8H;+%6(Bf}O-V`-K1t&;t)zo`Ru4 zku)h82-5DM6*V?i1p3r6d8R!R!d<#_v26-d$XgBeR|Uf`ua*r0`#=;eBf5FjV@Ni( zc6uL(^hc+n0~@%s9%EOEM1N>&Ho7c74-os;+7I2=22rHnG~J#Eu#n9GM~hX0UMmv& z=z=>?Qp-_div+@#^z|w!ADgqhD$cR%MdD*+Q^gMV4k0lVi!3~*xU&t`uSl&QYnue& zjw~-lE8*`&fQ{^M7T8_^#;XhFWu-uV$pc;%U#OfXT_H$j4#XAw1eRGZ5#ZR%OKhlu zFg3N|8OsgXqRWk)_Ozg$ZD4kQYdyKwULENbO}EvGqV|Mtj-D6rFO)Uc?Pc}o2aVp~ zdSVuD2Kos4;Nl*RAFu~Rpa2;cv|kIr2pI&7*{ed%O`ZbCeDeuWd^+|+fx+(X@0o{5 z0y#r~z6|PXFDj_9mPGLbUbpl&MFaskQS5 z0GS*dMLDdX_`TgO5J@v{?J~PsVNy3&&eRWcXF*yUy)|O*QDuEZp=$@0zFA}ALe35y zI#{*B{uOm|Yc}?VNEAj}<&LCP(7Vq0nL~hm^+70YyybqZ65D4NiRP&!vPCcSQc>tQ z%SD@umW_6|h|Y6442oNp2+l^T&$btYshss|HEIek3DRza>-csPpvSwH#ZQHMG8A8X z$P;0;ZH8Sb;2pw(L*%9UT9LeMI1i8ujt*%Gx>p#WwFg9@wOSTxN4ska%M{TY$1X5# zxL3q|(PlNPMFew7n=HOVOG(=KFMl73~?5QF1(Vg>JMlZKjg5GzG8zFqM z*Fu8SN^X2s*!zOcuc8FrM{$_)dC3(4M8Tr(55XI z<`MAfyP#QCXvanHvU^Cc?B<8Ta3-7g;DZ%bxvMJlN;VOop2^@0B1e#JV>6b+bYwR1 zNs+=-EaXI1B}`#isNbGQ$7P^SZ6}2(oVROnendY{N27W%)v%WveamzLCCM{|=)?Z` zEc4$LfVm^HZJ>faHsK~YR}_jRUtfN%pAK;Mj>K->V$T+&L4CUQwlja>;S)0^Sr>&_ zIYuTj5kPkp(vUP(L~b^g+IUgCW3dwKS#A<$uAf+4eRA}4*Fv!0hHO;*<|>2~b#^$N zjR(>l?UQth{o`aZ*iQc?1&PJ=vm~>>Mo@S22(~4s(Q{$e`Ym>wB49u=uQYDm0$@8@ zWxp2irtMpa_;8-EvjmTBT1wwZQ?LM*C$%O~sP*M#FHhqe`!qmAZq*Fze*_s=enzFQ zr2Fv;9ywKLD0%RVv?|6%~3hGyRj=is7n@=lAa3su^xdV$mN(|#GqXRZd zfE@P?vY85gLEiz}Xb53ei%Nsoa_AOoogjT_hoOc#g()a=*seLYH-NknZr9%ua1IT_ zG#In^eNo`Z?k&0szvaOB^jq66SiSNZT=<+d)Q zIa;~BrD~DYhBVx(J;Pc)iQn!ib}Xc!mToAYe?R6;7?><%O=u_N1A&=RJ*OIBy+xy0 z;g!h!f|i5U7ce_)_Tso+6%M~bh^8aebo)a(Q|8cQ!d?=l;|RU@KHd{@#`NJt z%PWL`i2_59Oq^w(g}iJ4aqy69U&=U@8u{lAq}q3gLJPGQfR?3 zA47BH;>dm;uU;hquwkNmXNDId5-ANO!5qw(+O#y33$+!3UU1*Y)ohM6iWEeG@(jO) z9f#WRg6Li_TY|W~A4-bU|9NndNwg1j63I`7pt`$;+f;=ag2;ajPOIg&dah_*TBdw@ zyeRAq>BmbG86Max5+$apYxuRW*8~{_h92L}tYPDLW&e?gVbkN(F;S!MqcrmrY{>*dXQ_T6U=f#lrl85?Pr3zGe{#@9C5 zl>$mG>x(c+_iUD>Z`&Ml$`0}RiFQ1|!{f_jr6?<#f}HF7 zHbr~Up`v&fJK?o#n_w6@bxUS0e@yn`-Mr|zjy@7 z28BZ288)U`o=1uTpOR?7&Q+L7b933~y`D}QIr_q+x+q?>yt~*DVQ-SpwmLRRd?hT9YuR>%bKiO_EL00&OF!rio_^L5hqaWa1fozp0k}GY>d=VosS+*!%GOu&9Y@9GnDj8N{%R*{u zX|8;h)y=9!fmM4p@QXc(%2o%0CJ$f7L^~h?6-FZDvG%4QUjR>2q{`zyhfah{k&ud~ zf_7o9XQHAEZhS8baNoS>TK^6~AA7mx8#mb^iWZ`BtqjrWvECk1GVck3?-Cm;0%8!bC8!1;@g&75Y71SvkNumokHEg_Nd@)}6685@~^OBcuar-!pu^WnFVAAuqy%k(!_lQDM*)93) z^@D)`$8evD)b=3}7_{cfWzjDt6834Ddt(A0$6}EgtH>!&WTv-96lTl4g|=HkDy6OW z>2}OM5P?dXw@$=KdLe)oM7F`WC+{xo8;j^le`;#eD4CUbQgliQ+pQv-83-J|R;xF* zNCclS2bM;DVSb;c@XMtUdNX9=c(a39y~92hK@&aFN9HG}!7eO<1T%MU!#SnGUS?jh z7<(^HagMdWjT@ORO5r%VSnig_?9tZR;B*N=J2*oWMv^;*TduQF>3AuDFu|sY0((Ya zEYNBN!WV=(V2M0|)rSPTCZl)oo&d8mvW&OXK2E3CTFI099J`>{lOR(-|H3X8q<(*& z*(*fyTnw*zHZVXMl0#dH0%Ytmkm*hhS+Qx%7q&o1C#}YXTCC+7lDC0N?A1UglX&bB zJU)eYLT>1tAGVW1u(`NosJ$0rWXQAFJ_#{8@_eVnQ>8X7^e~KR+SatvexwNJJJzIe zg`39rppt5j3H!*&$uL6i5u#|>U%`*tdRre7MBZfO^vr%Q3XCZ!%+R%|P&fc-kI24? zQ$%wpBzjS@+Wt3WL7u`T_UA%BJnLMhQJadmw2yr2c-n9xy+Qp^$78kmfhgXEcDJDc zeW@@U$w37Ckix7)O!s7FLJHV?8$(2d0c`CPmMahq;|L^>$qg`oQ zYSv093r1D;en=3R!CCE)${3i`y74llxNe4hzm#0zLyTHYgp}pSY(5X<`wVb$F2yqIGVQ*S$!&)09NWXPSv1nBDLOe=`1lpXU zar{y`z06S@3hA+hlv+7fo(De^MI(|NYTp~-g~QSf7!BwcAZ+Nud!MHS>FVx^SaGR6 zC*(^Wf=pb&uV~9$fm~yW)eCyVUmJxRzWu_m0KMYX_O?JY4J8&k8FEt3m6(M4Af1z| zY?nQWe32%_5^X_XD>TfL9Zyml~}$Ni{KS? zLHfaKr_G}9>w#85VM!)$Xkzw9erar{V+!6Nmgg2XXK=+TF`J)77^*V+6KhY$8=ZEuK^T#9>&w?bS|4w@Qo3wYC0 ztDjG;DYI*zg2LyWe#kU?T(TZ7Yh3!qju%(gZ2x(G&DjHLRH`{jF(}ca? z$eh_y>m`cXo4`ci0|LH$*B77j0B z4Ou3@@HCf=;c;nmS%ZwgKQE%Uh&zFL+bkS@3*;10HoZ}^5tUYP97+te&jKpMy-Ajx zS56#tpjkM=ZV(`$ZmMuTCj|SBJiE~D4&kGdYwQVy+azur^rGhhHZJ5Xj-k3|&j>lY zk;LdnWSJ;*Hm|8#e%_k}ebtsqJpximVvFo+6{j*jEN#Y8YbOdBkpoQOhpmqwUDg}# z9IPl02pLk1iz89=D0@^Cx>TY^cljYXJnj`kVen~Tn2b)qdK}ViwzVSo#MxV>lNeVw zzYQhiO6T*%01zl;*sw}c!;=ozcd$l+N$lUhN6s8p|HQQ{r!W6u#9HX19 z0yOFMseft2pUv5$J?>V?s4f;@0v)W_ic5fCw*CeYDXEYjj*be`=(KsY+XDsW@xl@< zE$c6e219Ro%WZ@p6NUso`A&?x6UDqOIjPGus0_Uo0I&u{W?wIa%VM{u}_G?QP0;Q zmOLbf`zP6ak$v{{L10#B3(|yw+@>X%J`({xV2V<21ojB}N7teK?Kjz>H0B;-l^qYs z8JxUi+)kh1<+BAx5t)7Xw?v?$NG4)-UWip&b_ z{C@8f{y&DLcXIY*)R>8{DbDMQjtEl5iD+~oQukYJhm^#7^f=u=5n!~*YOsF^!j0~F z0{p{C0G6Iek9(0jIkG!MBtH{DuSZG+$$-Q55~~Ptsj4Y@+f^Cz_ik#)#;4HG$ZX$4 z5!nH?f5hg779B8qY70E3m5tLq8)!J{+|>i^igc z=~i}KeT94q_UA4Hk=j$C2x=QLH0hAyBfp8W73YJ1&B)odCZs*Qcx=nqF;*)`+M#h- z{Y$5Ph&M5I!851wX2gcYzKC&G$z`+R!v;;}*3tQ#X^)Et7ukNyL>=eOHn@#p| z08L9UDRSl%Pl%Ow?1kC)1nJ}ee0uZhQ;2NEywS3bA&UMXmgyRzuw0U~dV&9QJr`*k zGJM+=6Kzj$Q-gz=vgWeQc1T2@B3(EVeP^n}mbEL6%~@)f3sPynB>DhX3D6!?9@utY z7ouL?e?1t;vKm1u zLTZ38?)M?#uDSyI>a)c72h$6&Qo=42fl`iE)LVCj!v~0=Idq+Jt$&C=SiaE1o|!W? z^^8#@EjqGdO3OHe(1LX0l#CM-63_fjjp%*1-$`iS#fmgAL6w@v4 zwnh}1uxA~wr>uT3NDBYS20TdbX+4-w6gMn*qT_z+NR((7*PFHA|9Be{!t zB!GQ0_ARkl0$*5acI+Qz^F>1gD{T5V&Ocr|<3vs4iP88jQWQ1sTf}ciqwNr+=2#X! z*NP?2-zSQHf{{{i`Sg%M(v__?L6C-UqT=;Xh(qP5ny+I3L z2M@M46;uK8+# zj{QrA08_QKsk*TaPi?aiwM$pZdWZbScqSTGnD+hQIm=bhJAHp__y{W&hAl3hXsz66 zC4oZUB1t8;KBCZ+)>+$ZtHSyb)uDnOKVqpd_ zGVZ_$lf$bG7Y!z%8a={R3DD2z4MmatS{CkvL zJ&S0*SuW)%`*tDUL`2eS-8!NSWkW@$8hjja)nMZUX+ml9*5x);z$a*QZ&6{-i9(gf zmQ1(l3d6{O37Z$lM=K`T0fp&ksjP!>X1RVg*c;wyh$M?x*AaK&egHa&pyX9_rB?u%BT<6rFF>=f85!rt zpeR7ST0mw#N6=Y!uzVUf#`bo|oE)ylGy>iU6jsaaEAzm-R|zw(VSekvCA z8*j3)>Eu%VvvI;S66=esy0rln$K~U5e?VBgpgV61(Sb;|!q@bieJDzpC1uYxm#vlE zsNw|>eL=*cxcf4%5KV)(79#~!0zQoj&>>qc7*_rfvURgo1X`5fcO=_VLf+j>yn5MZ z!W6~(QVdWp1W;5WmdS3%ej-R*juZ*I33$e#D1O_og{en7yhtmmDr81{JMO?1I~n5N z7=!&;t%UE10N3yjWT!16%qMp5gVs(#v0=;H)Yi#(%eJ^Ew4!q&HzS7lSddomTqkCw zZK&Mq4gOjU`QWF?&IPP8^%oU_V_FvaMKxJlAd)^m_8+tmWltj7Dg#RSGk_Jl$qf zH8&kAvqkpX5X2*3TLcc8NDLZ$z zdqN~1TNQ`7K7u}%Gm|pl-#5f_Oyo)gqgOVjT%yza2KK!Yw(a7=bN>OA4 z9#>-W2IUtx9E0yGxJ=9F3|A=>qZyOQWB2&;0F zBVxHcKra)fC~2xhGNH~LdYK3`(Dmyc45(DtwO~!vNz5{7|g?_G&Mbg@H|Fmi=5fRL;GHgjl+jwQ|v&A^l5a)n`f|L{B+4 zzP8GyiU2FG@7U3nDeO(2i!0@P5cJ9frh3fc0$~;>Zl&A4R0H_BXb-e<=*FQ`PG* zAeO4Gw)59P@%jc}GtYh?%&z9dlsP{6(i`eK6%__b`FGlbqEK`Jt595F235~s3awH| zGZU96h|m8j$PdJxC3Z$lh(XDI{bV~wkeD7B)+xlGpgabtYF|OBi_GX&NK2zAB!Cyq z6RK=xfU)6N5GbI9uRGTVoWhzKOc(r?j?LeJq74MzlZEW1^v}N zFBN2Q4>cYMyY+6t1}m1nMAGj%%Mgs1ir3rKI>yWrCG(eSC)Fw( zjtLbyw-bLrG#ZDN2S4=Z*Mpo#TpShKmBN}C?Cv@$?A^ErQ-(s~ceOlup>eg^W~CEg z9(uMAMU9M%tj6tk!W5NiSZD7ltZ7v{Lvo9=*E`gY0~V473zG8i+*vkEVITfPQ7%@= zb42hTbo8pSN?776Px&)ZyhUBYMexr>Vpwm#(RzmT^ewU(QEq<}h0bG& zsfHV*=8)azANSC_Z3}r)D>g+Zk;Qh}M)JUt+@UeMM&U34SFY4!srI61RnB9W9un{l zN6vwt64rL`S;jj|DMa&6i%f~dxS1HOu$RbHfgaU98)A=cU24Az;S4Szjw?*tB_Bp? z{^N#I89B-8Db*%XRXNI5`n=+%!K_eHu&XsR-=<>b#L~*5-8p+ zk|s&xsOT+dy*(W$n&RA3=HPZDO_37lx?0;O3f08wTABD zuX8i$^_?_Kvx^jF#=ZQkU8W$#@RVM!e{L6q3Gqm~GBe~!HBPii3I_(r^jU??4O!z{ zAn^-XlTIMN_O$8!kbrLF@dNlDLCOa18)x0OfMAweRpQm|KoMvOl8oIucvLBz^gj_nq~3m?$m{wG9JRadNfV&Pl_3Xe8l+w7>ICSC@Ix&NjU zN6t*`(XCz_{T@ z^2<)&28oq7t!^@79KLDo$D-5zL@uVL?J^-v3HeXf+bw%o6o#*L*<#C7IIPV2C6YaW z0@3IZlDn7{3BcQ-JX_|#T5KgE(EZdpZpU`mq>%XKY5Z8s6rkje=qYbh*asPE`f>#CrC5}c>nn&w!vV`e91Ev>+8*RD>Fur|T>-C_V@uqw6V~^cJ%aV6UdZQH5f8>CuDSeokSKI0AKWzxGuQOyq`p4#gjugQ zj-5rP?Zo4eo&d8wBW4{gw=v-$UD^&=3HTWV&BbLqV?;+7hS?09<0^+#t z&RgZnc7fY=MNB1mZ@XMH_!+tXF0!8qhI{Ym5w-50 z(QVh8oqHAM#a-E<<5f0Hm{tww*58U0rtQ&jtVY0ZXL`kk$MC(AC@8iwc0={r@5$#T zq?l3urf6C@vWsEHWkNnyD4J#HG;xwN9_L zJjHs=Ow%?`8zV#wJ4OkF+8kg%A-YeVH`JVXr0| z3Fs<8MzCc}t-Y?WGoKS*G6VgvOaBKF6{H#`1QQR}i9-IuMD|b=(z=8M@kl76Z|hg0 zFb}wJpJT%Xe8#Itj4Ys4FkM7;X*k8j#74w3{@NCiX-bma`woS@l|7mo?BYK<#9W+K z;%fy-d9-efbq-O7pQ%1Nd8!o#sOgG)W3ix5p8i^5tUkoUAKtUuUO^g*?UH=^dx%5T zYBS1K-+F;K7PUcr>>LGY&R<4F@Aq#ObVlXHSsW40{Y)gL4bLkO3HB7C+xW6d#4{7| z-Ahq%dOWLD`FGjNP!9H4AHT8dgH1PYQ__ z8#Y?|KN0rcvg1cc&{2d_7sV%5H=bBo)yj48`o;DOMbV{}+9`);HQ1Aa`g-;3cZvX` zez0=J!HOlQ!#RaEVRE)G&cXG&fA$oxF*?^ww4aGW0emG(EGD3j8e3dNmMui)!u&*2 zHOpX*FjYtDE9cE^KrpHVZtsDCX4Ry(K@^~MC*A;tN>Fx!l$}5>WvB3Nfe(p+=WFCg?}j$ExWD# z&6cAuUBs|y!X^vQ2W>W3A|~205xn+(GR^%`8u!qf?~C_%ZnQub+D`@a+2Z$F?!YG7 z-J(#@EjL|m-4qTT!3(P0FB}%f$Q%xmCtRO6oeoOgjRItUC1X-YwtO(;^DO3kEG{B7 zaXQV7--jT@@U^C=}PS ztllnA*jqMSFLkaErsghuT!_IQ3Xq1Jm@w4_3i{MSagBTAj1Yfh>X;+b>nvB$>te4i zwaZdbsH^?W*V}ItZd>??l{(obD+?)()-ADx3P<1euF;vcR>&E{7!=9B#BPxotJYa- z|Ic3JZP97=;psaM*Kf7!_5oo)DZYp81_AG4``fTS+)tQF=u`gVmLnwI;xXnig}r;l z{MhDLg$Vu~V6~-8W>{^!XkHu_ZUr_yjXR-sY&l$g=MW)7T=O zxp4G)OpscGjHAd(MWFTg=*qJ3>C8O1!JK=Dd(B*YEauxeqG@L}r}6m4wc1t%{e(#;Dyr=U#gczO0-x>HS;!k!AcKxQ zgki=0g?yA#?16OjQ0zh4AYqzwuwv=K%2@|1SKAa(=n1Okja<-exAma}rsb79t9(b$ z8yH!+(4E6U(HJy6-ifZ&H`~8N@_wTDIUFIt>3=zuu`7a@$b`vRqIoI=V_f{3iQ?_Y zL?*5(gc-ZPjIpcJQ9~1KM(z-1Jc>*4m#gqaXPqBDzRK~f6X(0?RqM>g6BEZ-zjWsO zri~>wR2aTs6SD{*=>G`9M1C2oifx||Jc!JA%ZT?yuS1}!rusbVrmz+n zMSdL@S%oO%#)VWx`GyuW(qtw76}R)`-m%m3FW-;CF&~b|?@S$2=J$x8owH?W-G8 zYbMLL?&)uM9+Gs8u1v2L!N(`8GWUyeYM0+SW)gCV)(KT7=S*%LRcDVW)+a}#S0#PJ zV$o=K7ahYZ1YiK1oxo4b1VJBd>>kgymxW;n4JgEPuzrg%F3TlW$c91!Y6+GK+1N^R zvw{+m#Ocm*@eH51z$Z`RopigI>wNZ^8+QL42uqA~%rk1DiTLbNM4 ziMa~Szl%a_L9zhb>z@e0{G%II+NTQZbHQ?3q``gDTM*iQOk9kHRI5Z(jaS`5`=`S2 zfKKAZ(k}Tc(0OogtYnB?D@=(*mtdoDxaEuBOm(pqYEh-4`7(*ZZ&s3D&n2N8E;89s zCvC4V)!_7p$K&?0AT4-jdWE$p>}>XJ3_sd>di1{`3p|;}%>6{Ltt(6{J!Pq|FaHgy zCb*ui%ZcAtMeq)=pvLXnApspI<<}tFE*HT&T!8SnouFDWW2N01!iTn&TTEd-_3i_$ zAmotTVE-&D3NRbr=#)2HkQs%eC0zXpcJk0`o6?#}nt9 ziQ-xMXAUi~D}*%|tD<#pbjSdU^e_T5vSZSeu1EwPkK1ffXwczx6YWKXX=!28nvwQ~ z0M&BPe6TwqwkJeeWtuq$$lXARqyxkE} zMmNl3t*4;(p{smS4-V0?D~g2dfFO)URfH4i+aZ!3;c?4O1c<}kV+)qrCm}6XX`0sC zh0WxoHwf_7*(CyAOW#6V_4E~{+r@0r?XFTPxEhxgyfc{A700o)hp2 zF(|sk)(JD@*SGy=mmfTp;vr48Thq9xfHmbVVV^ZTTPq%EokXAksk%z*qOh-EBVv!s zp!R*Dc$2$5p2@BcnSP~c;zzMeIZqIxeXq~>%QgpIfbJ3aEN3fPTcxfxUqNcZx4Ugo z_zSGqz$3_)(zL#tzw3&%L-OSvfC!tQL~?9#)hQurpu zekuyRrBM%IRb7bm+OhHWz@815*hmX{maxyae$0-!f-q-jGB?i_rQ@*GY_)>v4{&x> z>$F;QGI)<|zgLv^k_~ynT~VEP1fn;U=f%VA$HHQKEU(xu6rkRdQ=Uhi#;y%%k$Ym4 zx!4{S4)eOIF|xKj+lGZiv^`Tlu1^RvJCN&HZ8|OoPT6n2SOBiL*vVGgA4G(m=qE;Z zgEe+Q6huc9soOif_0>=R_3y;pO|M1Q-JH3_Uw2{L>%+3&S?s0~eBft_nPc-U|eq&eI`5`@i zG>~x!P&u4pDlie+2ym-6mxjf5ZAbw@Wor%G6#su^czpCk#LPmCNHlMV&?R-$;O(s%}j2-qB7 z5>iE1$U9`*;(AeF_Q^@rcAvs@iv6P=InJ{oBETZig zvx3x4XQ`+J;rojr^0f;4n}VVBZVhCUr9|^>ZPXTKz)s9q+ee~PBbLkYz48wSor>6C zTqgj>B~N7{C{u>~O+zpIt|$JvqqIick9;%#NfMLSUth)hsN`-d9Q;lj}s z>urm|G(j#T?1PX$n(|mM{$Jv>$kgwQ?6{Su>Nna&B5EufHeuY)ZWE-ef$VL%hV;#4 zGhO$-xnjCKEF#}A?|XJ{vtNbWG^sNxrsaZ+!QRPxM@6!t(IPl&Q}wHHm}C)!`u0!e z7ks;I4GG%JeAe4eQRus=u04f}7*sc>vZmLw9%xlSsw9}1A1 z*Aub~-d>Q-aFJ}chbRMa(*Ouv62uzJad+fQy5+qq6LTUrcjt9EW$kem?MfaES@vcwuG$Bix=C=3j4qz z&M&do(n%BSE57niqG>prC9O1#cAE$^yc=@I+ZCp5=+aNMJ_2b018j^eXM_#TfRJ?G zf_)2Zl7Mqau9T%tWrsvlMKiHWBroLiK7bH@N+ffyzyOb}5|NIiR!&T=cd zj<){@(w_Vn$LMoEbfCxn_9FpW{mzUT_G5)9Ljo3_LFEfN12}2n!%P@n>Sr&*o)BVe ze)CJ4s-TZyQE40vk98tY9jBj!H40FJhUr)cAAWAF{ZnBIc<4d<_P+>vP52$fuk>$1y#oV!c%YED8mmq>4EC4&2%Xn9yhdWw4vP7iPmMM)^q41?={ZJFT zb~Sflde~RH7FVdWeWNW;Q+eDV zRFWxUHvb9*u&3Bz{|!LSUF?F7C=D(jSjUH6X9EPi@AvbQWP^mAnV92X{1fQ}?Qh0@ zM4>QE+f&5_48&-H@7NNu8blv8Rr#I^h+d%zh*ad`hj` zWItAzf^i8{ZNoql4nV3#Ws5&B;}~T(gq874{|Uc;+Pg3tV9C4-X0BU zaYbKl^=%oaF>>1lSmVC%SOJ}4iI(j%MR~t4H-z?tQTHC7kHtf%?#Jy} z5&Zi^Jhqi@zrps2MtAW{*iyC1UKRY}q8WsphyE{p*68EwN43r$Wf9$rpKA$Zt9)r^9 zie%NLMA$dhdDtt|!KE~v1U;gpJ)Oqfb#$~v!Zb&=MBJw8tD?ZdAqlB&UHlmchVUu$ zVxQhY&|7kH`lu62<{#hGcw%9-jS`(qBtjwqf0JD1i@(C6@d(G!FR)A3y}-JF#!V1LESHL&GrQG6=#v?tHT3e)bB zv*+6kg~cUl(`|-Jcm4NNV#b9hEW$MWwJlf-=r6zr&bN*G4MtFNwgqF4=!{fw_$z*^_}tcl9fUyC7sWKRvBy+t^ALrf2m|qaD#}#0QnY zS<;i|OUnht?#A-SXe@_1QTU0bSQ#He_hY?u@z`5#-m}Fn|B`1%J4SXat`MYXoC(Bi zSOA#iDYnT0aQ~8LH3Htl&Pl`}#JZ2-_$On<&=q%Rso#?aewH zD4>z7T^YHmtG0(lIkj&-v2s%DlDZS?H}Lq`ei?Fd%*DEY0Feok+yJnES!@LY3`M8= zJ6TB@c^9xck$ej8o)fnXg0uxiD-5LmStu&4oxQDadbc@h z{)x?_khTOKydP4g)~vBl71kKkV8w9}r(={j>@yLmnomHMp!g8%oUcL(qbqUA4~40_ zAcpMFE)t?)@f@t96GTc%EK8PALQkUcs<&#=vcS_9n7 zOS%171TPPV!Ew9lOt4RnP6#>s3BvaFx3;&53e#OKda&rSL5M*QBBoiRx$w02MD)`3 zj=@aehr*Pm9Rmx_4PSSd*u4Z3vesVE7lr8fyCKpLyj4`1$f`Md@2I_#>~;|to$e3X z-3o>kOIrE$62){Ckv1MJTW|L$>_z&mY@`?Wpr|xsPYuiQPJ1F`xR}o-8!N^z+`*~q!Mz_b~JolIP z+NVOE<_YtfCr-99LCQm`I&P~4m>o!sn=4mKcKp|nI#vJws5%e$s;je)cWAA)wXODP zwc0vbS6zUDVzremf?xqf9Bpp~NyxY{HwlDVOA_`FQNorPLI`_gu&n{BRU-}rSG{Qr zLr~k=rPlg>fB)x%+xMf!^Lx&I&iS8v#xoX3PWjhgCxLU1K5U_ta@0{Is`qqM^nz3> z0u0}~afp?La1LV23l#QF79ax59bv+1MboD>(Do|m-5G=s&VdlKx6uwN=sip5LG}${ zcqgm8MfR4EPik2*;g;!rQD{KdYp{a%4Z?7tt#WN!<>PIYD<$J~wTN^ZD;}5}4PbP9 zRHof4;DtrbOls}dDN6RJ=9;`h%Mc|D^;WcIrVG>XBU2j=Zz;DHLFY%GtTKdzUkU_W zm1z~XS440=BGjJ_BQU^YUp>={VPk=PI=fzwHs)~4Z#M~0CRE6fOFk9Ef|WLBO196$#TjB+Yn&;_PIwV&$TUrK5>S|^O!e? z64>eAJXuV%Y&Ic0ZK6@k*s-JNfqh?)u^fE2byZO70=5SRDD3&eZsxsFf45@gTv60NVvv2L zAT2}aN6H+he#^@bi;>HW`2O(git_SD>6+fvmkn*SzyCN@h`Q15Bs%{GpBW0 zEk<2ir>(I>$V8(sj-bngvXF_}Af99<3NeRe{baZL$4F zAbd3yudHS}{^hfvFlLusc9or~u*WiEo6t|bGQ?)Z<2FFys7btHjkCf4N0;r}nHXzh z1!-Zv4!z?;9GtoP&O!FHpmV#hgvb5YL-gU<+m1c4+TIHA$n1K#g?~rTPjr!uq=}J~ z@iNOf8#;aJf4kKvNTczoVef?SH8d2VMS>N$O?j2%%6jXoE zd|o-(ZWp8j3AP$GNgzztnl+rmY_cf+LE%Yc0%jA}*%r}!AQG9y!!QDYY>6ERd0Q9H zYhB!ktr2Sz#rrQC2=-TDYDNO+#1rG~9|6Lx*2Qb>Tq8D`i#UL2(&+aRg{E zVk)6JVm}Z;>ugRM7b7D(qIEi8p|va#0&9>~h& zo$)=QX+<8tIi|4BgJDH-sd%0U+D=Cb!zgTSv!8_Yz1fC!SC}r#iErG7b|hW^N47+Q zKKnXIa&3f$J3oMDpFcHX(r=y)a~Y8NT&9UsUB zBxaX~$mZ3#c9p_D4HG%M!`vcF#l6vz&bKllDsEr2q z&7KU|bzu^-eW~=!aXDCiUKDsv(_IQ6(+;N+mGX4UZP4FEp}~jNMD}Uc*f+lK1ac)} z+3a5Xh!9#WS=D~RI=vuO!zI4; zTahmURWXqyexDFzxo=%OsdaHf>*AR#5>9kp$(oGpcz%#7Cy_0~B$u6^Dj1&tal1nV znle_;beRJ5pzPWCwpd~32bwA+{A8>ag@vMDZ+k;Qnv+?S8{|)iT(d3Q`q{d|R3fnCr*V> zW{dqZm9sxW57;fah~#wX&^qoV>+D=X3i=a*hYG@tw(=+1$|pvKA?Dji5xvViC?Pb9 zV#@2HF|Z~1d1&?iMTl0j?ZH}X0NkV|?6Uw4Y&cMBU%8lM&dMHqOvC8?okej(fMkbzlZas~8dl;V8=q-v1Qxr z)E|4g{CJvOsc?8h&@C3qiT;AXDY2X4Xesi%Jo@X`aJnc|pPR!?Uj%$A4~%n%ZZ(Qc z-4aBKQQU~yI?)*l+&lLlWb1|KEi09->JJENdas{=B71WE0xoK-O+@cDr|fe3Pr$K~ zQSlNMrL!*a5@B|a{&s6jdZsuYlXxy;&mZmUGMiaWgG6YnagsNoOMT>4_$R2HB zn^+o`1_Gt`#-1p@7b2*3e)^|w^tvoFSq{&h(%8f_*wr;Jr;vjdPyk^kdp9< zY;y!DMHSRjI_Sri|%pvv~1>$ z^BPg%@~D!0Y_Z%gL^BW1nRH~$QkxZEJZ+RLU{(pjl$NTNm9|`fs*6iX@Fh7a*V;w! zd*b~|RVQkhctmD{&)bs6QV$Qeg$Sc7Epw!G>Z6gHiCX)QDAY`!_wCBG&qC?!|KgE= z;^!hzH`fF?cGcwoI8qufvFj8jf*-3ATO>fa$-4STML_C7c z>&GE)B9Uc3`iVzTXDR*%ZWr`VntL!QUf(MUeR)GJm?j8#$+5gxQNB$Rfo9}!q+AhF z-dV!u{d)m;i@YJNJ(kLsfy;sd`zWNvz%~ZCuOuza!)W9xc4!v}(w!_x598qDL19hx zdVZz0MCa5kmJl%<*)Rgjl8*MS{YKPaXBmlw$js>u>|``rxng`Qdtj8ICItu5oL2-< z-hd0Oi^u7lAzN#6S{Lu|O@)0D(sL~<9m&rGolURrM!9~IWnAUuN4xKdme&DmFrzrm zN(EtJksR7q3V7xi=MQ8(ejid(eKDH2Zv-fnN5q`}D(LHR1}CC|Xq$Kb)m|cZ+xd2- zfG^H1OS#bFLBuAA#sKUsm;cX+LUf<4wthux1btQNDQkgk!p_ZLUO;*yw_F32RKC$Ido2ali|IQ9mw>C+{@N=?>!P~;% zy(tc6y%5V=U3Qs3X+Z=b;Q1(7ow4|9A^GY0^YA-y4`+F zIGmLtAF9%{V2yesWI~Uu$l3(N8=1W;w-W*KwmX?;pU(uD^8DgS6=0lwv6B;M<}is2 z+36yXAOGc3@cn+dAbdEsV50q0VR5Nuy&KrD`$P#p@YuG?l1=dkLILa$YPnS^5u|_n zCT1dEJNaZ0I=Lomc-ai`8IgX$haofKnKI4s*8sU9lFs!%1pR|8i=O4b&=~@aEq0CD z(*=DlA{#LKV4ayLo}(X@ri$z$5%l@VYZB%`HpmL-Td`$^t-@dYgGZOLVcF0h6FlfSM8 zogw=qWMMQS7a`y5LKYf^;iiHz>n`N=MK(i9thXr4qvtnSKLue0(|a+dS8fk6!+&8J zA%;SGqTgC5=*=jMk7YBoUIfiKeE!D2f=^p@OHc<#iF^ z%F*%#cHwn|DPIyH{5aLyl_EF`2Q)W~b=)By5nXHRa+^nSwl+PKmV#Pm;GRoi+J&8& zV!Ky>?j5gNjp&j+6yjQ*XqjbCgt$n3W0Tc}xY6=Vx;n(sd3^5Lo)C9nEf2(h3UN4I z(k)1Ph>Il2PP?A^;50VHr%fxN!KoH%Dw?*9S``h)&(~AUm_rrU{0%MwUD< ztz!#Bqjr8}x018LmW4bBOR=Qd?**AA{qL{?3i@Cm6x2A2*Qm5 zY$Zj{8+9RGBIqmu{}pr=j7B5WJ{NWtM3Z`&ee(t{5H;)hFkygT_#M{rE^9{o$Lg6L z5^DCMy#Amd4eWBmHC7R#bpMk+)+Py3Ne%`^zVG057}TS}w>ZCq&YgaW9e zOz)3>74}Jt8!j}e&$?M`6vInuCjNd`L&p zJ+aU>D?oNxIfyMhE$D~$+u1Q-A;w-1$;Z1irzn>rV3vKqyO&s&ws)>wAvcZ3 zT@>%{iHS2TJ*2*-E4tt10x(mmdLwPBkh8ipUX*65Lu?F}!v*$|5amc#k^TBnLHcy; z@p8NPCc;|mHm^-?nIwr@A!}6g8cP>&h8D_* zw*bAc_vcrk=Vo^Yiq37?J>eQy6tZxk#a*tg7la8xWIx}whycTeX6LY7-W8&mDFaHd zMs7z!v35RePlf8lEZRt&76m)qECO8zJoTYIJ5s^rL&9NHm7yDvJG9 z9bn||ZoJhBQrO_ThS{MI#c!52Reu&_AoI}ourq${=~~OD+dUziT-9ic6{c@26N}mJ z*V%tXpgOib8@MAn=hl#K#v^uF2;)c91}p3hR!NyH6!y6@GDn_1 zo)N)W&(>xZqP?$*LKA!IP2~dtxccn;G)}Z%>;a-Qc1&Fh((6MF)@mLUk= zvgg?&3JgwUrrV!VvE0z}g6I^uB{?$=>F4%3nVl;)UJva`Q6do{>`VK-05+wWWmVQ+ zQOr?J*{0EP?6#dJ$;GUW8}4_kLB7 zPl65XwnKH1kijNol}??Ik2=R)EP$l&9hn6s_KdJ|l>L#!Ar6T`W2G0yko{ATI#B?xuz#ge z%gNG)srHE|)RsG%71hq`-wS4q=%e9LG#L9;ZgkO?M&yNt>1sg1irb*LEx6PQ8n}DWd(`aCtETdkiow0x=wbdVqtLevaAxjTfiHV zHHz7u6N06-01u*0CXf) z*_UtkYH+d2FHo6XB&?5W!kdrUj|J!m1@%d@-a>Tba07Zv3e%BfV`Z{omDL2ua82d9 z_@JP-{SNsAd0*JSw_%Z-74T&d&wG3?-b@kjM)RiRLldc zK8P$qmYREq*+c~?GnSK;ZL0-nd2!_GbwkJ`%Srsc?G*GnM#}!&5n(zumaXZj0}1mb z@Y{rT0SX0dY|a(Un;*;Nm)ouufvVWhCG1uKswyd!^_F6xu=SMHoqy z0uyzgM$E1$U8-#<*nrt z?ClUP;j!}zcR+_r%{vSD*Uk}CvZ=T;Q`ncB%l3);Rj@My`KS-n;Y#S z(L)zyf{Rx`yI!=83#>?aK*C}DMGYfRGLqe7sR(qCMvmtES}CaN_bwb|^98*6JBzr5 zTO}OMFdB|Mq1YYvx?-Y%A7$@`BG?`^*k=ONr<)?0Rc|*9hTuOCXAdspv2;OZ;;%Z}dIh}!$O~m!gD|Z_N##byo!w|pE6V##wu}-+_XUv{wOAn(r2KiMw3k9@S)90EQP_Lm zvjoG$ZNjvPa}^hq{}J-(_a@$24%OIMzwq*?oeno}p})hjM5A>@sP#Rhu$R~AhMPKD zwJ;1}=cH}t##A(C)++mBh)!;%X_=$6$LJ;kJmW$o!-fYSb5Ax;z-M3rr^2TV1%}T`XQgR&u_(^EJ7anP%(YG; z!3s3?FoH2kh|i+Kaqsfvwo3Fk?P1YWP~ze0MMt(Sx7C7lpZyqSx;F^1&hUlF z1;Q4-%9*xDM6a}E5pqOtg)H*NTwtFB6wlS$UhFQ?I$LxYMAD)#P~=`DQm?czqQI6T zb0^5-;=_Vrb&>e^4&GxhZy_oS9~Q%ILG$VY`_um<(+d+T0KEFUsPHOYmatDkmPj`M z=?44q-9*y2Y&MTM_6;G780aF9`)28E$Y9F%ME2el=j4sFi-nn8v3)CY>{#Hz}$>2iESJ0PZq)7|-2KkEOA#E2nQzZ)fL>$a{?KdH7zuRr1f)t;IppU)!KgkeZ zd{>a#j+Z}S$3wcv25}ATVJc`|hLgK~0^ZQ%`ua#; zdWjW@D7GL7ldy+`!aUje%6zu0>=KJB&Y0oVXe-t z%ev9dz6S#A>UEc0s9>lf@;4ynbMU`Hbo$G~ja{p7v}Z0)ZkrM5;;&3LZIWxPL5lL0 z6yY?zO3?dMi1dF3Tl`Y{Ohj)U_DvAW9|497BXJ{Q7Yc-R04G4mfDK4d%9?i~tZ%uZ zcmbtl#idp%Or^+I$kD^Z0MWXXBK^7mwZt=y>jOM7cjEDB)%Lm|tIS9Y87Htf#gF0J zB84XCRdT%jHRQoIc@^q^=iKY{Mc?W+yHF6;9G(v9LR^K17Lr3$kU>9ztP5^^On zgDa7Rf)s+rZ;mY%phL-xTOyg;&DJ0a^|fzaZQpsob4I_)8|`{QiiI!tb++FMQ4Y39 zGi{}Sw|XR}Hd_~Rx-UK35YoQ9g0XEBU?$^k4oPKBooD_M0t~+QF1txV+KO#_$(Z#P z^ocEH1WgT1ZM#i$T2jD;FDir~6YUzr#(S2a4-&?8N^yDcM4%LBre`B$%`*C;C{%g- z?e>O(bhwz;kh6Z}sDRS95LH{x1 zLXwM;O(7nY+=-krpA;0=^|5fRP;Y+@30gKt3(QUgD8XSS_g?~Joy|bA{l~xd+PHG! z)M`Hy43ko?4cjb36uOjPF61ffBO|ZB_8VdUh3;3>+|ay)CtZ;kN%YPWwkf2~#JhDC zKR?aFbPfHQ!y9MW$AaE9P8ql*>>oQx(35wL-6sOgIsSO{iMfxyw|Tl{h(dE1Zkry$ zU2p7U3l*j*_?fLlcu~)fD@39;4*FZUU|%mt%Mh7HnM8m=Ph`hOCv0y>d1Cy|_s4Iv zzY0>8UaeSSwZNX27Sczr05=QyU?w7Ydx;y~E~2xJTYB!2Y`Q44_2|}hc1pTuiEf~3 zqmin$%S3br4CPHS+lGihjzY{T6e;Y?&Xfs&IiUn*rYsidj)fQ7kfF7FioK^W9n5Xs zm2ZC$po7JkB{*e2H3LN3*lR?-#vZrR1BKH1a&dw_ZGk9WA5xb&HdYw+ks>!X+GYvT zv^=a7RVnNr?~wksB$Y82+aR_^IQ)VHL*ncbCfU=X(?0C3mDqCvv?#fCeza4rvR6f+ zQ1lnFi|wEgwKH^cusYWkAdSHbvz?vkvD_AHW!8RIP+6O+5h=V?h?W+Y>0I z*48Cg^QOAd{wRv~y-y^Ob5I1IJjt2~sb}`LLrEBYN^YBOopVU$%_xY$vKxfG987_- zuH7sGQ=*JzE^faR($rhV=;aG|lM{u!&=y-<6m=QVMD`D(1?fTi%m&+~uov8y!AsbG z|0j1&U&QJy<;S-!*05d7X5--2Ox6{K^x%i>0KEYgKPPz1{guk7=A=&C~DYK2JN zlmmMXtmKhwvnbv%Ojo7bbEy>QFyXe>-V_B19hueHFe5UqgJqpf!MqZJcJHN%=*8+} z-_Iq3cOgHz^te<6uY;C6Zasv3c4x$kt#5$Gc8otdWwT`o!i`akZBdAl6bHgQ>jf!U z3uW|Mx;Ml}l5?xg*?7d8od>IJ2oTqSDX=IgqYoxQX)y@~)*%~W|EK~llqmYHD(^T07g0!S~Y-Y?l z3pjuBVufrke;TsqvdU)bn#}GF+1snfW5wErhwPEYMX5kI5^)^j2}s^r9u7J1q%QHh zIze9pqOV%}(uhVQxGyiVHGcID*;H|?Z~IXFzC-!Ywgl}!c>vpytTkC7l6yrOZiT_L6GT)p!+)g`g85rKctXD1+=(+ z9dcuhJZ_@|ya!ovuJ(A9x0Rw%RdMq!s|ksb*Q>HM0kAzvw`~GGy4d%#gMvCd$i!0Z z|8rZ?swnRo%E27>3r=G0!B$$CFeM>oiVna1ThNC&xv^fZ5Y6)9N)Om z9#S~mj%~zp9om>$nK>3uQ;f5W6R>Wi>aAWhXU3g4XWfy4?=PNbZwoT>W9c{yw-1FF zz3ftcF+LOU&T?&pNTQuqc#_gX=IV;=Y*A=d^JavV6sAj&x5ow~RxTC6`!KYaL*ca{ z0W*%*2}ZNC2;Q{Lc!az`kiw2F9B+LUrX?fgH9S+m3mPCdT_c5kH#Aa06p>cIViBFA zL-3=PXRnIjANAmVcIJa1uWKlpnr8`m=ePkc=Lz6EQAmzzEZ^S`gv5ywjzAX+gs&dq z2A(r?SyExwiB6S!A4iZ{uT=d+prQGAFPD7$U}5!W*P}Kp#L@eqabEq_h0NH~T**%c zR;WYb<4bGpd4*|ASu7i6wf93(vgV8}-TS!xR|F=~kU{pX4O zVZ0veV%A%TZXld9mK)q*0ZO-w8|eE}dFfWIr3*8g5fk5Gd8tT5GD>ZeaNrx6b#caH z-IkCgmW7w$y#eHxSDW+^myf6H&|JO#aR+52YLHG;m1;4&eJL9pB~6X;Cjxk6UEtrZFamN3Ty3? zc<=%Nel%&2+)c=D*e;Ri2S2Nt;kyHDD_`GMQDILBhT+uPiR5&4SW8I6Luxro^XG#8 z@gPCZSu!F}C%;*5?h9>iVOmtk)gI#T#hfC?iNvSSx@5U6 zQJB_^$js)!+@21Q_9wTlv1bLn;$j|Ki)??$n$JG)M`c8Y`H{neYC@RAI(_2p5mO){ z_z3nx>g20q9nz(h2iDp*1v@@cCb1;Qx{F4Ug)-L21f3&HOAqguW&a6rG%dGeRE~Y; zH^h0{GBTwu+F1mclBo;aZUW-E*8T~0Uw~{ps}P)jK+u~$Jf4lTtT1IATB8-^&>CwI z!H1nE0=*Hu7D~|g@MCgJP`ecD)cud`Qg+$)#fOOU-yce=K+FTPIP9LjDY{pRyN8%v zA{-bXDOIU4^b?I{M~nD$>o4eiMBCI<>S%5c9Fdz z3Js8FkSX={rvTw6(%<%BfHIxO7xV7`vx?#+sHlADVe&e!=~9zjBTR3bSC?}CeTxu{ zkKSid=5MrLi{fLZyY&r|f+>#z(P;xOkc}A79WBUsFsI};XSzKU;@MDnm)S}I8d=o5juCuYh^3Gt&W9lacW+E)667C;w15!~)Hsi-!nD!oAc(5)yP!@J?d<~2s9Nf(mZ=90Nykz?9%{vC@-;_%Sq-V z$I(L1Kzi=N43g+nmaLm2gAX$W8HpoP*V-|Ky*K(Hvr{U-8ib_>m)L0np&zjz-jw@XGmHiJ@1wf_wKf7?TT*~p;^KGRZl^9!D(EjkL-S=edQMWS1NjFoN+j70?DJ7oh6YF zR8?ead7aD@Ua2UmERY+CTLM5$X_oa7@Ya^3l@)U|DG^13EqR_AtWs2@lC&(4g(j;B zNm%ardUz{aB?4@h52-qZnL0%F8)fw&sazdIOVQ0g122J3W;T9kQ?I51LTJ?y?LG87a$ky6WE5v1*tTX5wQma z^x-$Jx1}N6UNzM=g*;S$V)+DX6omWS635t?*pneC_YT$?k|wJsCaWh~TZlS5y}~|F zI4rf{v0#qNu=~XVR8d$89QGTSg*YXhACE9-wr|~$jwXZ5D|Pz z^uZf~?G<*G_2tt3?G(J12dp+hhC;6B<95nq5NpxE0d|FgR2`|g??%MZhKWMKrI|eS zjViFmLJF9{DqhL2%5qVt3PbEo+|WN8AUn8SjNSeKNq>Axy}csn?MxK%3;m8TEy2sf z&DYwwQ@p4=j7af#dZVB-qE7)9Z4i7)Q4%G*8e(}v6b7vuf(olccIqmNkFgd(4XzY2 zUJ(dC+za@MbXjvyG&+7@lf9=PRUuHA!I#gk(V0`Bz;g<_jl*P-^$@{ZGZ5PeTpRWk z#hH8u76S$dQXiHqOKrJ;I>yPVn^gN9+(brPmB?k_X#=K>U8$k*OWVHy>Qq)xVbLR6P-*V|Zysp$4z z_LJ!Zy)(S9VYJJ-i{cEzsbyv^LSA-_1L&Y zCtYU6M~b1B#o7F(>gVX&Gd$bjmGySB!v0evrA@ihzDFdlzQ0Ne1(P_Fq$wG!#*h;RX{QN!&)_ddZ>tl9^{-_+&u^QNKrP1_OJihq<4Hw1R}U^b zSZ!xM0;X?hFWWa2_PFdAjt$Ndrr^k18Jc1%?FS)s5zd=0R@mpl@o8J_QbD?}eVkrL zZI%dL8oyt-^9y+|zZhp{GegOds-Z;Y2a7_NvdWrQr`a%}aNf|?*sY|m5Y=mJt*o_X z!7yLTWD4(15xmBhomgG|b4ZW*1-ftFd-P;0rB zPX;!&gp_nea_4#05>h5>r`X>V)~0#_f*)L%@+0xZ#~^vv2i<0;1;mXQ#(Ro|ys7f* z60=7G0b$x@2O3^0xA`JCU+?HK)V2!Js%yG*v5!MkZgaJ)@ICZ6QGA|#M_7e|v^Q6d zP_Ld1pbn<9geP~~O(KN(fGH;?x!ywihh*#-rKVJ8*&=w&gLyp}72@f3=j(3bOL;jJ zKPq#yL%R83WSQ?(Mbge;g9q7Bh2e>w*mabm37>|%@+8g2eM%K6Xm5s&F<%iNYaw?* z4=U_mBNkHgc)fc##I}{Mw($znLT&!oBTf-?FDE?;3o)Nd;T)WIg)(H}H&M8EVX3_o zD0D@J84j$pPef4@btK!w^J0bFFb5LN=zQIcHavunEUdO)DNLmZib&ZxOVB?P8c}J# z4bhZciZ$2Gf-oPZ&{2A~PXySMnKOos$|pi6&yC}GUE`dC4jw$9|Db+$wy@9t zr&gK?g~=CrO@-kNc9W>o%^cNz2%gNWw@5zy2EfR^g3jk$sZfp(rqmqlsiDp(L~q;5 zr?r)@Xe(c0_lp7t6D+-B6oyks3K!UK1t|B8)%rfIDnM-`ISVZfFgR)39w4JRh^wz1 z0pfIBm&yMU^fujzGf4YPm^xA2vaif1tmPh|c6a9b?LeU{x0!Eei4yuG7mVk2MK3q+x@(tP5?n`E8on5x-rV0jm@A=_JcE%;reB&l*EAsvg2O)FSBh zvQBNcHevWPy#KJ<>;nPncxj40Dt>xC5kZ`03TN4!3qU?BTCaW)AiEKrz=j8S_>ooZ zlN;>50J(x$C3|&({Q62%p{Ufq&V?aKTSdK$CO#pk+Bxz>CDsK3P49!L4JNh-+I2l* zPbujA;0is*jtbL#wi)>7i2l}S%kiP;I)+!y?|46Z%svs3jzl6vi|n&hS*(8x?F$P% zPf|YS)+D!0k`(D_B9iCC(kfk)pB^A>Yn{5@EI>}^dtRJjKM_<{CejryZS5)%*hr8( zxDhO?~KHJN$Ade84*`(E|GgS-urkqo={QuY@4}XZj5v2} zoj$`>gapa$DW%Ck6cjij>T8mYmERX}sB)A|M!`Up1`>_0;4qKpn# z+vkGt>c#1ND)>f5r0x8ro<1kRso1&;`IzB*+J*@Fs6+-hn!=m@aM9sJZ0~N{ps;f( z5y#*4R$;ALkxp)dkWU*rU(ecEZ%v|UcxrbiYbPaZC)%r`sPB@U-fn%Cc@cYO#1aMe zpb(LV=j=LKyUu~Rq|F#PmzXJ{*ZktRmTeewL`^i6m$MT4 zlys{T$(z4_9_OEJ!p`?-t(a`Di^94$Fwr!G(BUPtaTg+2EjTr-rF z*tbRTX7uKPmfP(N>n0i{Uf$LEE2sggp36g`l?nQfH1^RjWyw#$I4kc+RJT^wYx6?( zF(tNCK?b5XgSA%Su&Vs82+9W0X%lK%?Twp`EEsQp5XJla(s;H`r>p?`sP|>w-z*4+ zd6m)niG?%k8B#zUa0&tdtV8;P>0YZ?U~hHB<}dwHvLG5&%VpWS6ys(2!wH}uEK+#4GWa$K{oO{ zG(2QSf{wlJqEwENB?Y-0YBq@C{cauCXpMp}bMNDLE?s#tss~&7?*!N@rvy7)cA=mT z>TrZo66|{IXQI)4wnbEyE<{`T?zdYahCqS16^%-gzA$-jZH(H^>&fUD6oMIdlH;4kS4y|Row$}Cu(q?qIX7WZe$NnG! zZSZB78@)H$w|n2-6Nf2ZiO%{MBKjEW(qK&XnQ4Vgxe5l5i$I1Z(M*? zz79t_8}0O3qG-*L`bQ5hUSypGsR@gNe3ZWo=q|kpT8HN?Ri;jZ3awZ@U$C=5ZRRu-^;&B8cuq8V1em zMUi|KjpXr>-AEk-AH)5#I9ZLe?}_52aHlfex}{PuPbGgnP?Z07NRUKhQ7Rv=nz=SQ zg|qNOU<-xeRRRxWxwbaMrsw5w&6;W3gy~0YY~21E;#e_IrHt7(*APdeHRjQ-v%d)8 zvz8Z4j>n=Gp-bchIhHDt94QshQIho@aVD0Nmy}9HZ6`E)}eBN zr$)RXMmO|P2AVAu`~tg8*{Pdn>r6ah+8v?jV>=dGeh3fh-@_hOm`3nm{ch7btS{PD zQJ668wVU4E_GtV1c}I55vX4ZhU%CyqzlS_9DDuiS!@j$YXnGdyc2?MRf=bOH*M2VG zTqum^me>O!vhTpb!)=CurrerUHcw&qcNP8iBQp~25p??*of2%F(r6XnE8yOKj%^cx zUNxZU)S9pQrP=FvUy99Jtj;`aqG)1=4zoZ?`1Y( zTO>ZPRaEMOvzI@$#-0}RMvIl423``yzd7DQ5qEw)VWJD`3?S|AZ@+Ab0_Pnu(uUGlLE(LbF`I&H8*zTMbC^a@XrNLZnC6yhyf&g7Xw5PMPMDW)4#O|YAEldGv&CAlQy8wl2?MJezm!Pw|Wfr^kJvJ^Rkil&psoFz= z)T91CLISORTQ^#_dP<;0+@&o$ut8V3_A8O7LzW|9%*X&EsgH7dC_wUe@7&pz3hGmn zjDha|*NFgcj$(=UUm?sBy?3epTdPy1Uc>b7{6UD4=K?Ms%} z-3r6pEEJjTHv(R0Uu1^Z#*7n%-qD_kv}LuGr;>0TQ+IH#Efs~k+^ESbdEaKG3qe$l~sHd7?Hqk&qr#-0d}UzMp` zuq_qji{Kc`ony5S-PkuPEzN2JU^*4sQvtBI$g$@Hyn}cQgq8MzD4LK`a`kQ0KSk02 zHqNqBw|O*ekodlA0h}WeRiQ7^X!i(smo)$E*TUia%Vf~wW32d^{GfI3}fr| z?d=YCVJl6F}>{$UH)ZRH6_NHLi-C|2aCS+IH2O$j#Y;rSm z*$&Ub#F7lm1A>9YdL?n6#Yp#bB-?~s#spg_=p|>w(@Uf|o3NyaKK=;jA#G;uAs7AX z*=LBoCkRV$$%rF?zojCFAX@TCfLw(%vYYtgPSQH-b4&7y_pK?iFN@?~DguWX59j^w zBGKqES2}Yr;Cg9*RG5yoLe`ow8o&(Zf*VQ<1f9j}UPh$z%hDvSFC2xr_c0yXd^ zS8Uk=@Em)II9z^6$XPf{Dm(nJ+EUTH2JVwE*e&cG$i)l$JAz(eq==bgAB*C1B)Z1r z5m#ob>~qoS3|sQGD8cFn;yZi1ER@fn2t8KO~F0^w0EGnRG~Q7T=y+x&DH|<2M0M$OofU`h5U#4mDeRrr zgBDg0%0*!~BfTuNv#@|T&!&nP&CPYXH~piaw@vn66JzP|DNmm)De`(T#ZDK+nUWhh znf$g0jAVXu6ThpM3w3-@eju~1sY3QI=cjU~^%aFaoxnZS10nxNw9m%c>p~13@{}3& zu>cLkBVHq&_`x4M*VT9g>>0v`CpXy;g}u9xSE_~fpeWA5QaCzVP%}bKKpze{ues*u zRu^(6x7Gz4IO{~Ad%R24%O8TR4@nq}$OpwPdqETm=Mf1T4)$_@DCW*!o6*(*>t9;P z9C}yK2X}uJQn^2T2CVOD;}SPKAU9vPi|C0GvLQ4)B$7xX>z&I$&kDr0@-=PcQ#sM& zJ=D&A*4x4wEDxYp2RJkxOJRA|DZrk5p8Pfs7liA_mMyjs3Wr-dc4a58F1FjR6a%x; zWv4qmln`8%jS+-9dC$)0gXfc<9O97(rw&^j;$c>zAjanO4Jud}cI@&A=2F;@=r4K_-2 zpMm0uzG-_{G|C^UuYvPKxiB*(vIdI9xY@QLP^eKVYyA8)h!XBRmT6=C1dqssi zsI1_?N5~nIqDS%yuGQ~<_q|3YDJYAlaEqfHlqZ#kAeiZ|LBAt!Q7M=N*PEJ00OiFsn1a)Vw}kAyS@ zSv(46*$0B&wl3FU#pAESunY5ZtL>{V0O(~~WqDiW3cEAFNNTOZ?iJLRx9QMwZpk;; zFGZlG?9XkA!p<_jO57dU@{qf&vf=Qe$+k+6mUDH^Q^*=2jS`mHZC6N%9y3ld>^VU% zitj=OvHvLwKgzF7urIa{q|UsOG1gUKntNpHHrX8<5TZ^r)!V%aQ$8zTv5gMtm^#?~ zwkknqD%yHk32P9+dBTlio@1)}#ckG}%G&jsu6D|cUSnIuI>{WI7T}@n@TAr*67+%x z=U3fF$omxr+XdjuUH$DL1sToaxVAFWgnTtfR%0Ml#zSnLh+Y&DZMC*m*qKqXZ)aI? zF4|O^MWcaz(NRr{^T_^+2-GK!8g|A@9?Q55iOZ$Hc_PsKW8)Sa#R|-&g7EI>ikbF+ z!rp|o%13o@sX(khq(}Qta_*&qzBgg#h?!FyIEdsN*uP36WHUvf&?CF*Y@Wh&0WV>g zH_5T(!qmTi8UHM`+5lUptZ1D)!|DW`+b{(M$$AlBQWi&vg#A4geK&Vcnf8sBiB?|= zxgWeD01Pc>TW104iA0Ogqz* z(Y&A0Yka*^$OLE5*S6=BfUc&=>K3wNp9mA3tlOq-)S0gUVK1A5lnB5e5lO?mls&{> z2AFKvh`lcxCFtWnAP?(J)hUeCiURvXNPc+E{9}`5*=K_EDtfZr#Z~-|UxmPI8!_0r zDCnH!RMf=tg9y|t>-pF@$Pjcx{@j=~Vj_GQ77!!`Io=u-#~@(TD>KJ-q)N(gBoY5ta+ng3fv--YlRGO(tjU&jC@%oxSaZ zkc%w_4BX^e6YU~}!|Z>8U(n5#-;r2$h0Co{ zklOP(7-NDam7L2qZpG{oQ8Z{eW9m4!P=s(@yj5P~QP0?Bi$bzstTADABG6^NLf)5R zwoO<}m*dEN9ROi3`#^x^cE09DJNuyLqW3aaW|s&PH;6;RRSMIU$oltYxi-086z0#M zQPw>q%))$oEQFKQJ8e`*^2Ru;RM4jwDnz`!%o8E-CbBzeXB~n_Fa4=ca;IkqH)UtYlH(t#6&ndv!Pn112)lj#R#vN!yaFg(dBVR<+w z6}i8bIr9Awi6OYS-74Tr*H4T#RMKJ$N$@Ecw=w~rorCxc$_0Jq^voR13Q-cd-&~qX zHJsPUwSulz&Ot$sO1kAn5hSVwBS;}Rx6(S`?h=GQ?c?k0dv6f-&U2Z3y`3ja{XDbH z;dTEKL6RnLvU;1s^b?VZ)ix}EeamR{O9EjCw!gfA2jCAyp_s_pDAf;23T+pWSrz&$aOSzvub=HVEJvMK?pYAdg5tC+5h`-N{oi9Wn6 z_L_r)ytE9RNrwx2Y3MWZ);dB2Z);j?-x4Y;6~$Rzl2&351<0kt?w97-!c_X==GA#v zLt7Fk!MX6p-L_m5xHJ~&$v=c}b~Co!750)xW{$EhZv**R^OcjapJ~^LM2b9A>DWKr zBS>G+9pO$XCggp2Wi?OwV?yHAsgFx_vMxkpfd?DES+-V~@xiuxiR}=eq}b@pvFH*A zITw0Gq8$fB5F0DD$cS^rHv8V6pwJSeN+k(&b%3lM^3HIJAO(cgaE%QQ@y#poG?r!e z33^j8K-p;J9objw?$8zp(we+D6VR3lvE+7~zh<>nOtZBj()!%IoRRjb01R#Awe<~! zsXCs{gN3BM|0)74%|PiM>!5!Z^vyz6nM|VR+u(N~`55!Iqo3kmib5Up=1Sw~UDW`{(qVuRQ_EfWlPgR*A6J92Wj!d8jyKOm9C`3k9L)QRXV zkfqVN6KrjYa=dE#@wH91Q4~7ay-QbnMPb^9Jb+vh{zVXWrtvz+v!s0*k{(+#+fGS( z(UJK3GYfNa>}*lI`P>NRMt;C^t-q)gpKMra4~205K|O3-2w%e^)HH>C*s_awN1qwu zZX#}$!cq#{Sl8i38{1KwwGmxY6kU$!o0IakU2eNYqzjRar-^o9E6|6fhom!jm+m8y zQplXeMw=+;m1E?#&~^wr&p0B=b^ZIIXd0mN_CZK3%iKj=bY1yp;%Ea$oQb@7ak^)D z?YPVQ}!akldwQPW8ZCusG-_A-!4#?j9F5LxI(~x+F80Is|z+DB-po%4lWb$9`V&bYWswl z4`k*6>P;c|5m^H(N4fmm!w_i5;C|LgLE@wj%zt~ViwIuSoyZ3{?&#tMI%+p51x?G& z!L0f{0<@W1R{L!TcSZbSV<@I5lbsAJ_$FcMVHZa>)`8t3s9Zfm;JkaLy(S8*L%>4& zvHByP;P~`$?@z2hzI2;??I;v)S}$EyT`WvPn8q1)j{ptn)Tz5QC`?IA>G$U~*|QwQOmdo+dnW9`b`6!x0Y8$)07t&m?wDYgRt z36PfMBDMZ`NG=up=tT0R<0s3{$>8_?3Sn`(b9Wn|uzx4PO1Aw{q>~f2UhkeZCM44j z5OqpA=^>G5b&m{`D%fa0EKIj)#T~t@N{BkT-FS_yP?+u>p806|j7hdSMD3f!17?WI zEZ`?4%Pu_ODUlP|fL3-t!DxsVX_UE)iYU&iPTg)?e7Y!R5^piSltFY<9#SJF#4UGY+VG&*rjV%yH{c7{gB8M-IZgU#wyoXbo~) z;=V;hO+Y-aqcYztNJ}a3PNV|3K7A#Wg25}SofO$;!b}s1q#(dk-?G6z7YQ!)A3E4h z`;ail0d5o`HCp{ja>)3)$lew%64I@+u&QNqgFwU}L9bSZ>98jIuqX^?!6;U~iU6Xk zkVjH!sf0Ob+eLvb{N&U`Pliv6022ldw=evKAahZU665w2A*!W&ov-U`-xBmzp*)}G z*!i1Vf>Sg5r4rBz4)k;EcLH?t@UoTmcnEjC{zhA^uos%#y3HB{oz0Qmcl36q*ftTp zgUOn0tdzDhm1{IcEA|UgIO;6+b}BKB^h!7ny_cdS8yfVecgkN$Pg7a3rbKh2Yaq}y zbkjJ*j1j>*#~Ejll?l@qUN+|0bOHa^`{p2|&dU3okL0rcrnPb^FIR9t5|Na^;9@2s#=?|x)9YH z9qmmD>-*5h!I5dBJuiwcAp`R4+P{0&Xn-HHTLqn&eRJ~k41b#_G~uQjEi0sj57%AO z*~SZsuPbqss4yJpDIW@Z1-y{`v!s?{&xNF2*joNGgmVxVx$qxe#9_2Of2uH@khMmP zc8n0DnTXHu`(7&4v4Zl#kLcjvLf+A31$Np$y_}+wZ0oLY$2psyY_8jw$R(os2L69; zVUS4?z2i=Rwjva2-Twtm6F}0+x-Yp|1fPc4vAFdTq_ZuvT9(-m0iO%9Hf2LYW}U@3 zfwLZk%-ZJih&I0?K02dw(r66vhi7j%JaenPAV^P@H&lPlzahcsoY9%qSz#6d+>@2R zIE9sB(~DDEwp$mG;Z|&Ky`D;h7_&P|Y@mYP^gK3xX#s|kvjU9Vn^xL`sl+{z2`Urx zapVF>iYUJkg_dM+MKe)hxKP@>F5RXGI2Xpsh2`VIw4l#mYf{iT&w3SIuX7(d&UUA= zu*XErTG&?z#8Hu%dQ}9Po~v76WS9axQCa@}{MGg+L2q0?ewX+nPKZL|`rqBdK2X@( zqYGF&IOA?rC$ScV&;KCqJRM^OB%%oc-L?PLkvAp8!GOHD)f*Wq= zX1f)pz(`~XL1jDZ69}}nrMhJa-nefPBySmNm1k_h+m^Wr6`f2(4G9$b8!P@{U zC#*vHeSux41U|U^WV&cT3J*Z*)Yc2bq>MPSQm+PZXfrlx4xa?PAO1@~L%4%~cE7yE zzWf;k?@wB0tORwGuZZF`v7JRtQm7}6)V<3(wswFH>R%unO> zW4|CR=+wQlwJGfENyM}}w||D%_D$9H<1DGQ{Tbi#thS zT9~YO}v2~elrH&$Cq$lIaG`>6drB;d|e$FDDcp?^o`5lmZd-x7{K2@I8=rRGVE|(NEqGY-kgfXgf-8PcZoo& zhE$Y&B%05f%=EbZOVB4+RvvQQ9}AyUMv_Sh>1IgN=)x}%LnqoQr`r`F{QkV1C&MwG z<*P*3bf}opR$iz3g*~Fs;H(($ug?U4P-dki1-xbyx-#q|VQ<)2ytw`F%MN)q+InEQ zT_Z?27$;a^KNFxSGF0|!g;6VsbW>PH>LW*g%OkH&z-Sqnt28jVFf+kZ7e-n(ia=d* z8&$i+q4SB2Q1edtjO$3k+H?m$QM*Ewxm>!oDok4NhH{cgl;V@%Ra&-5GMS&P`aAC4(tK1`2h^WtN zge_5!((*Y}*fIg1uV|<+Y;7WhZPl{y0Q!+=-h=lSj^}37PWg(LAv;!fqku;zrGq`+ zDRie#NkYDha+JWQLcgAO+73ZyT+bLXr*8?q zz|e-}TzbjrVQxs1z-1p|h+_kk+&jAalLRS}&q{_7n(Pq~XwB7~udt^Sro7g1i!fKy zI<8830k4GuTBlFuY1<(!%+84+gxvM3UK~R|5Wm)W)=vb6Ym9bEIRY$Rg?T7~pw`Vt zm?x6=U?jghyF>Dp6)kh^DFK+#led3me=nrva(x>&c;}t&(bP3S25Bx4_M)2S23>%1}&p9496dLL4pZk3jcw!7v>bFKk`B+gehTWbOJS&cNPIWhvhK zxSbGWH1hfAKM7F?#Oo*hvNMPZQwdv+1zE^otQXB$&>K;s4Jnu?%eMwW>JP3C_zi4M zCFnDBnEgS}XJ0e7YG%(Ye?Ad0-muiFAa8s52E zciW?|7u!2V+|$B7jj4vs!XHI|_iW7`vxBMBctX#$W5V92LN36L2gvzW?@u3wtl5~; zw;z7}WYW$zcC~ClR2X5u>-THF1@|JgZ!1--w*vQgi2ptoa!b$)L9{0Y>FrQfL{0%h zKJwhMMb?Gh5Xrlhz;3A>7pBX{9$Ri-{03nf9r@}lu`>nfAX0T1c2)psZ1wDB0WwLy z?3NHwj7>XREI^+HNBMm!+BKSnZot>T%-V}w- zz=~hQ?LDEeN=hREWfHsQOlV#K?|wThSC}SaN-5h)1k{~PdD7E}AdT*FeOH?oqK;JW zZlAv0k|7(OCNW*tZr&(Ji&*%xY^VU`v@cv?_oOn8lqQgk50SatW-ktr?)njfW7Q&1OjWqgTva% z9kDP2JUSs)ROo-$(LkXMhqfRYw86gqZO`MXK)F^R?2`idrm?6i&k%)4!EBbf03rB- zcsE}41Q-jRO7^hauD5qXEdICS)*iqct2xem?<~*v?0k-0=LvWv52ELVn81ai&=VRq z5X)QLQwj1AI^`$3PsktBesi6G&jIcx`92~7Jx7Znt9f0_o=@cuV$dIp626RWhc?>- zXM^d-wVjc~d02qjW6kSj?{-3fS@=bzmoElL+j7RH+3SKd{_xzDhZn6yN!eOOp*alb z?uMkAev#hlm6IRsrPlK<$k1Ia8JE4x`;MTE7wgTy|hkT>y8xiqjL~6&196I;{)HZnJNE`TF z!t8op-E5f(dO>^+X;vf*&$&vLy`tX=!qv9&rETT2r1U*4P&8NZs>Cj2yD0jaw^!N@ zh5fge4}#Akx+!OqDLOR{=w)B~9zn0MFkV_@XAAp6pWIp-IYq6F1j6;#eyG^+V~&oJ zp0uKy_#cVxGqZE&o9r4v#+o%QKbByX8z};fy8il3mZh-I>vV*%`8CcB$>{_xxY)^* zi9m4@f#G^>y`Z;|CzO~q2{RP@;&D~5GbB!K-XcpnrO2){*DeM(1OmLueKS2bPqVRuT=?od6<38vkk&D zrDa*mEc>GX{b*f0!A?Dwuz&Adbg%*JAmH;QZSNY?n!cqJG#tz42-4dHf|QhMnGC5J7(-t>w3qAh)ONFade!lQb=c| zo;FiKTEjX`18ufoV76=vS(OMdlhb&ntreh@f!uoL+18M?FlX=l+yZ+tz<5S;Eoom2 zke*;x)}gaLS-m8(P_xkvgz}J%;nwdb=Xq1eF_LS^p9R=fxmfnG1`9eX5w(2uwMOnB zb3~)Hkx?eS2~82f8K#pRTi_=`DM;Csv)%YxDnaC{YPDT>K3SM*5Aw>@J%o{Tw3`%m zP85{j@7D&30Mnj&%mypy)yOadnp_znHAg1&*O2%aErL&P+!EhldxbU3dnfFjWX}ru zxJA}WxG}xp2aq%^>Kde~(PcY-JsIuutKF@9LN zG@c@ocOoD0t2~<~f^$*!Ye!3_dzNSnF_%eY_D2D4Hx8$A?FC`lJ(N!`+fM%>$R`)( z1!SiM>wk8?s1&e&Hbd}h0dMJvscVlf+hD6wl)^ZMeq**>6tCp=*x0f5d`Q7B2o8I6 z1N@B(yh!BGtE{^K9pezTo1K=YlNv`^VZZt|CZ316Q9t1zag9Q#eLGEX)F@ zjtqXl(~)0&QY1GNv4+jIK+v7BSS^lG%hkt9M0cwQLE;cMt{3BFaRHEBK06gJYY3_k z{}yDnqyZCrR(Bvg`JJmF(#@R6l>PPgh)4RGq}uaBt`fc;Qj~v36xU$)9>lY)NHn-( z!LH*ah4!f+{e`iVVBfn2zjN67W~~xq#~g zI~CSDe*vadfl^#+dw$?sN|yTY0S?dEmXEa)oNCyZ0& z4pDsj5J$_f|HKIxY2#FR>Gh8kh~dSzaf^K=1@EhC_F^#x_xN` zyP;%sQbl5v3A1h@Lat=2dM8+^Fh36o>(tJd#U$ixYD8o;j~X`Co>Q0#SgEpQ_m&_- zGkn`6~yxKeogM{sL{C=~k9 z`jz>%U4RPP^J{q)uxcNNfr zmKaYOy!Uf>^hxy`U_}CMN#GPMH_M7epp(HX=6Y)qWFTKEL37$#(T`GZ}3^5|Q#4)BPKA?BX~a0*@Ap1h70U9pT8G&H{Ms+9UhQ@(L+a#ozttkOze+^;9OiO z3N^MD6xm3HgX!hT2gBQfY#1Z&wq^xg%{^~GTz98WNvribaa_<z+ZGjoOAEx=N8+dF&HZkr$y3xMN;JSMNPkO+R@ zabS(Q%~DY)GJ;IF-qH5>6v3Ne@JRW#Pr-l;l}gveeY(q#R}bC;FBgUO_UhHgu8YTx zN2VRopqoW;jSl>hu8rfYTy7y7gc(7$rEK=Zau2C4GlVxHLC@u?!spnQzy$af`%HA# zlL>fw?C5AYV#$7(%DM;+I82{2+X7W71V;%?+Aytn;Y z$bYZuxc1`x2IfMd&+_Io>@Ag+D@cdo1=VPITY-=7Tt2&FJ9~&Aweva4wfR1-WBra3 zRoiW~&zl|M!JqGnZ4rU`+sb6dXPeJDXu`b{Y`XySb+ljw{ss_TusuF{S{S+6GjDd$ z%zE%ME3lu4LdEPw>2|dMqlNx;bMC=AOp}yHJoxK?Ki!p|C6R;A;7(b`&@D2RCqQyIm=0 zaNE+hRW`zx1WzM@H?)TZwXCr3{`LZj&ho{v9|Q)q*VufYgC8ymJ9fmw!_~p7$A=yQ z>d5}FpsRaon3u+H40Ls4F}u*N_8?halkO2=q4m3Rw+!6WPY>^Bcv;= z-cy(aSZgvf(kP0~dpnRIC=D1CrAU8Ri|B3wvO`l4q(3PFoyU<-g8fH;*_sIdJ(Oo( z#uE=l#}hS*TZpGksR`VjY>*Ibn!soOUWMI3A~}qT3U&!5`NX5q;=oRo5Jl6(scg58 z3Zhrlt-%-=AlZX-u9Ygvt4B8l;(IbI*eHLYR;D=Dd|VyD_8-AKa!27P!o0dh6p9xogc%qq=<-MP_C8ja4TcP{%0~Zfd^}zXay*+}ud)@s z6lB&bP|sQEA-N(jNOua-LgebB_L6`b4CFF7Q5}pEqSNJ-@LdrY9dhSPv407<{`bbe z-;~>c4A!ag)t03&jZHy`IR_u$bA_oVS2wT~0_xaK7ICe;=Akwv1eM+uZn6 zf-MlT&jhGD2`zvG`?ruAjf0qHwy)pbjmPQRbWt1+iNO5$nJ|r$FYuo$?Am>8?>=^e zpxYgXJ;t`It3=4|6V?69r$+K56&m$L^uD>D6)NniACbu=^BG}RI4|;Zi9IZBDnf=L1BMx^yxG4%4>fVp!YdQ4DeVlM0GIR^mh57 zhfK?marfG1f|`iB4R-#o2vd(<^JoXoenjy5UoE?zuC~wmMC4CI_b9=3i9q?mW78gc zQIM7oliS``1t|TlAw%p3cMx{#Vfr*1C`7J6&92^31>JI*z*RokvP7VmNH(vrPYO^> z|C{^Tn?56^{I;`g9TjvF!p90f@WHX}eNkP*#n{8B#T@%gG&c=A>qPC+L10%Uj%awr zyH*63APx6E_A6mGPnr7{TehGZbXjoZ>hV-OLn5DNn&o#j(O#uf1^?o3|_xq8Qo z?b3g+OMIk2JX3pIkgbwmEbnB)^E!JI{I5~(dRU9pFsv$KV$@Evq8iZ&;F0um6K4uzf@Tt}+ zMAuV7iP^HlBex_h20Ng%ozKj5F>#vz685bfPsqDJssSeifx$Bc-dvQSdLH1 z?=Qig@Day%t+lxd)1TnREuD#_B2Y!!7K9vZtC0H&=zJts<*+hDbRW;5NUdeVz&Ybi zNQ^?q>YW``tL#UDQh6%krK#r7s^IFh$8Hjt$~Z+&N4P#@LqwpTr#CNhVTw4`QruBo zYIph)1Bd0CZIqy!=(NcGQk&?DVq721%YhMezX+a(Ajh-2x#>z(W4Wa�={?%op? z#djbC?~qM9BVOK=glRnCZio|5rsKZ0O$2Q(nRy|*@h$+3rTWulGpxTL{m4YtXQ&U$ z{rlYtyH+Cwm2CG2Gq|C2ZjP23j~y*tgIwXSbfHx)ih_&aYl?AQKvEATw*5CwIlZdk z^vVMJ?(pt3GAqL~*%hKN+%$63R808URU%Nm%<^Yif)I_%fO(3rq|N?H0eZCFYimWJ zTgP@**>;5~j3cXd`dL;bg7d%7UbL-C?0a>4(OP>^QD50hH?o(0djvE_EFEL3TuXi| zNK>NW1bklqv@0?JiP22ESkUFyL5WXt$a;$AIx_}lZp3aC!Hvm*Jt*Dnv^-ymOxJN| zDHf#l@6)1EA07bvw8>`--n20mT4T?N!qnoj40e_WAspM==K}6l_vKoEu zNa${26LV?I&BCrR(KZLkHRxU~(##tsTH@Vp8wkk1!;)yK;vBbuE5=#FQ5-GP1M zua6;v8=Db1T)8e14pv<L0A3DvpH?-wuX52334CtqOjTw6I5;H zlJ&Akl*reM)pAOR&4y*392U}X-3HN~@MhM4@n3cCRthtHsoJ&b0x)pMac<8EU8 zLAqNN=KCU2LoWVcY`lP5?r4J220s2_(J1qHdBM>FjKli$32ilD%k^;?329MI>eB@^ z`PI+ZdLJHvhtB`{Jk$6I*_S>KrY0D8>pPA-?$?wB8x@|g2yRH;u6?`4MhVkEoZcqb zMgfg?nv|`!dz6zCMzg6!h}vb<<8KPP$(%4PoWp7Em?#t@KREXDdjS4x%;RdnKjLj3 zg`7gWMscqG;qsb!hp;+Tv&te0GZx2oVdP-3&50+)40nNT@zKoxeF%ru3A(O55Xt7X z@b_^7I}aDZKZ(E}Y$QjJE>h2sIP`N^R)*=)Z`6ly-~Jem!UCtyrM$H`N!ujuolr-9h_zw*;a8e z&XMb)2@u_a2;_}q!|}SW&}3)>TM5=fG@94GZl#WO69mO<9Bz3EGJKtdl{P1y6H6mJ zMsOE<*5_o*q_6v~Aj5!Ee6sy)qAQiSto_vvx-KTl#(Hl>Q_I_RcBg`5!@yCR{Z`-- ze41_Gqa}giBA$jM_#^M;QmCA*1gs*;z2+ucAjCBB<}e)%jgUR<3!5Ujqh|%^8EF&I z->UYBfzBGuJ3cWErXRMCJvdx*WS(I^FX(v>*tcgX>`KXqq*_lw*OSqCd~bUR(|pVn zCfH~JH~E>^+($Y5J|CSd>9pSnFra;(tFcEFcEt`EKFG2K-Dv{X)xkS_@qLiAXUc1- zW^cI&RKRyX_Fg?Ui|EYMou_JA%kgEaj)Csmtm_pmd3^LKqoK)Gudv(sxo*8ELL zNsj{C98W)4FH*l3*5vO+R8TJlpNODwD1!g+vj+iG2In};)$K|_*A~Wco(+xTgZkJG z_sLoLLx$gJNrJ4>z`#Oaxi#Vw#LCKbWP8!W_M&QzWOmfUzy`-gJ1yvHn<^2!A3Ox6 zWeE{}0appoGH(5QUoNtnJPc(ghTtqdB=HSl6k9EdD-7#JiMC$YEx>~%)+WTb z!x@kPUoO;kL?ou=Q67Qbj%S|IR+?>}__$N+V2A#ibTp)+b;|Ez?;~k<*~8rph)!>-M-g8H8XNenon?Il-7p3Al6PA_ zQE2|@x~-Pvb7R3kZ**as=?NTc0^5)Fnh5^1P&HFlI9OqaMWhaTG6AjmPC@ru<5#(i z2bWU&!Y3KQ@o(%fyYLZ`_$@{@&1?C$%PtnxHO|={wrfOjyx`WX<#`K{|G{aF%UXn6>ay3GQV$6};k;(&8~_zI@`NlhPyXZ6W(V zVU~(4w&d9Ne&dkVc%EG-~2ANjp0db>B9V6>Ek=+ES3kvmwdd;CAXC$QuCH4 z(87^;Wl6N-BDiV7LrZFA7=IXc-)~+0IT1cCi2|-0d~)?dpDv1)8@3hPVcO=47Z%z!4^WvcxBm(RTyp%BqE+39Jqjt@eJ26B<_X`!O!LnLsXDlp zVeNdG^%uqcRPU6A{9<7SU|KkuiWCeM`b1zZhYotc)+nqcuFvAG`z7Z+lDtcJK$z2X#HM$NztxZ2bUlfa8Q^pz^JiyMzX7el`x7A@;x5LDr;hu z8*QSX8!av?7`Eh{D=eCu5q(2)6!|5Cn6xv?J)B4W* zd^@7B+h`wo#4ZVeXnt%*QSg?!(e4n%70zu4TV|6)aLv+k53F!c>&mvd#;Md;VLaOy zG#bKIDFRc)jUm%s7oZ!uhRTdyz3iO4A)*_otO&>7m@utO<_FlWFtZW#FKS19R1B{1 zmkLwxz5+z~zcm@)hkwJtnFlx6Peq}BNz?NX2ks}NA8X^`odH8~fRE-Uhe+NXLRvD? zYp$~ig04mT*R!AH_-N*968go91ZiN$oL#m|VGUg6GPotN$|_swDeXl|oN06`5rvt* zXQH(zNJaSLodUmUpP&Y(ZD*E!AwV1X?r1}mnBjlY02XF{5pO0433; z)3q~gv!EM*jD+NDj&i4bg7*9p1jy_IpE*+kPoE07lDVOz*pE}Y3meRW94P3bC#G^i z`IWF=k#aW7>#GeCjm88DWkUoQkzW0JS&72#k`icK^DMtouW-jjcMEDzW(s=4XNMtp z&IAtn>MCOO#rj)UYNjYE8|dV1q->*e9;S;eOt=GaINqivhz zpYwh}w{0A*I9&Udd`wl?BcjsgNG>vvH9iNHeh_7~!yd-Un$WhepGGmt3uI(63)=F*1clrG2IP!TEU?g-;$NDFhh((}l z?epvL!fmGo8O@W=&aiva2vhMiG1Y(VK`0ppYKj1vkX^3EE6of+8pY{6Jv=4N*7#^C zDx!9(;p@kAFqsI%aLL@&?vc@1TmL%;1y6Ya-uokQKB{3AE`AO~)bSq6_(^UPA zC@#a)2!@s4^(maINbRCu#8aeT%+@YTCkJE17A^bO(${OXJ4AHFV1fnt5n+bmSWBr* zQxgO>sIhkj{S{*jo;>zA^VFZG>ZwkZUpkG zC5}R|37ME^NVik*1cBMgJ@zkOLRJLEbB2x0paj-$tgO;$TgaTh^ygQ&D7V&8xW{qw z<4Q~W`KqT|fdF&CfI5qCQClSvbqB8Pi*380`@uhn<) z^3!%kCTU!!Cdxl$rd=b7HkA@=ZE?!8UYY8oXV6icRjQYKU23wxO zTJ^OEua@|Tch{g1?ISqEPqmK(T%`wh99qZOA)sjQ4QIibvWTysp&YF+D;3mG=rOEX zzzszj>geJOeeB~=olCJV1YFte`75=L-I3)=gDD~dXhQ{QZWb5a$qG~L1b$RGmM+9{ z!rvi$@u*D`c2y-sa`B0mZHq;6rN`>ZVs(3Li%*0dD(%7R7_fGc7$1H{ILEM4f^L;Y z64}MZJlb7`%Vepd&UmMds=mD-3LTn~o59E7B_Y?Lk&=!)Da;DP*TD?^voGTX z;3z}}MsJRG7umV3{P>=yZKNoS0y-`=Hd#O`Th7b!WsV9vZmID+T%hF@CQlTav9A~b zSNoliznn+_T3Y3GHd9oV!|B=?w!()yYq5x^F!hC_uwmZxAVHBsEU9ffw{ffRCA?R^`}BroR_7rFPCzHM zS&&W)i$>WN-u00PYt3ep?hs@G2c_CcA0vOBY_X>V-G-8i$~2mZaRT=t7`WfcB?bLB zT2LmX*&lm|-?F`uvJupH*T1^OcKSG)G$=dCo)@G^=mzj?|E7<8h0i>f*KbXA#Y(q} z^KTa<>v*K;O|fJ}c!xHP@M(#Z+~ebM_yBv;p9I~y>4~XP`%su+?AWo)E|^A`HJ1QG zAw5t#zep4}@ClhyVdk<9FB6R%=@?uYqA(r81Ye~CT7n$X^|`m-DprG0`0UZ5t+vF& z_iC1%+C0}b3zA$C4;%<UNT3nxn$sVHLNmzFvqxWB;bE)=QAz~T1Q`%?Gxu#xo^BOBT9SJ->i0gH z%(B66e-Wm~yjP^!Px2hdkkrZ50<1Zi!hmMDtbX(UtaY{({yNc*WG zDcf@6Nn$N)14D%cwk}?h)cLCCTd63tfqPt0%%;1gsor5?m>1e_Cp`i96Ne*w0)zwHd zc)m}gn*-h2F6h6`fiZtLa_yqI)jbqeuwDANE0%Vjnz7O@6Qtc(&O&wn0U?)|vj;vJ zgc*Q?&$1hC5};E$6CG@LwZuO0sWaM|rpW5TuqRwud|&vir3tzBkVC~B6lRbQCriOS z=68ytt4H>5TbpC8f>hI1ilFNr0jfC=_<+Du`^TO@i&OJ-Fu40kQn0MVrxXimjy)s_ z1)wn3V!sh^ok+_LPqAEKH!6cNc=+c5(w>Z`86{Jxo8uT0V-bg$uTwQBe<+T|%4ZH| z^(H~;%EcI(wRw;p;o0GU0Arb%kc!^l_nrc|#UE>Ej5W@-%SEAQ7{|ot6;phN2o!bS z{!*j`JV=zakOu^)Q;aQG1(||cK4>@1_EE7G)JE#MA)}LnNW(TC3mnHMd#@m4AuVti zopyGo|4diYvD~~&^zX&uCIrap*TYyHS!s43bGahI`bh*_Y z@#%O`%&>@$J6*W@bX|!R3A%B3eLJp+%WRD(ewAU%-?qeYR%HJks}%N@S@eE@Om6U+ zCbiyH`>QX{De>x_-lM?P(B)&a)Ow2IYM7GC2jB@|`oSK&-DV0n=k7YMZbGjCZ&w^# zor2LODYRE94I=@Mj1xaUn;pJ{OzAkiCE$Lk!&$L^^DxjzEwhgW`B>haVgIL~>-%(c z@&maONyEMb1uM~jvmEo?bjR4m5_D@COI*p|XrMV)Qvf=#R7^V+#Z-9@Cy z+u}Fkco6%RT_sH0nPUWsxm4dG0u$F+_pD8hCk}i7&h&BpkaXUsu-kRtZm>Uj1P_xh z%3p*S!N6VkD+1b!cEG7)6$S$CuZgH%rEVI5#pa*PfI>G;&scM6-cq|*km3{46Y2P5 zA(D4_T&lKwQD`@_ET<_uY?DtyM*{1pnYK$5N=)HMJ~d*`3DT$t#xm`)nJ!LmLL4lv z7xYU&5=0LRyQ#qUdl>PRG*Re$;Z7EKq$^Y;nnn-%|4}Oz(qK0)@f_ntOqH>+ zg|V`&HbBsgPL4SB>0iN&twrK^Q2OKq+Z@NqQ?cdREUe-xx5c*R+lzv3aHTwo_sa9; zKw)Y|NBF%A5^&MMw>sa(_~!vIO1Oj1E}%(?#&l?9!%^ z&|7ei=2$dSaf}h?3OtN#@FjIDoF`+CjUKX^Mx=%8lpupMX!HmhJ(nwhRRzLZd9u=$lM;1*z7By3X!UAqFk zw=Hojkv!YuF+A$Po$kv%`{{LybjWxoo;;(iKH1(ArV|qpY_*G@0niDwu<&*lm@>Oe zB<-cTm>^SY*LeaX9D0nOh$V^O%|}VqkeA8XftVwKQri_z&h?ky(~p<93J3N?s`dr} zhKR!o4<93iT-OekQPqP!ma0;-Eh@wS<#9MFQJ66aUT!e^D5x7_*~YHpDuz$W?9!#s z=^SFEiFU03`7w2B*DLI5O4f6251$aeS+?CI;2Bf_JAuPiy-gIADi3cAZpeu~2i(RA zOYw0u^ghj)AY+CD+^potv`l+k1h*B4ZO)f?R-A&ws8m80$ICcXvBUNW`mtEUUGfEC zMhZJ@Xdb*Bk40l11BJrW$l680A68zw2=1#Ch?z^gKw7`YM0d;7TTr!2mO;{g{%uD0CGjKc?TUr=UJdMXQe%Rbz1IXc3M;L}WBO^4Hs~3Ny@yt6+uQFW~x`9l|+d zvL%Z`aVOVr=~&%rs|B@>>7rg2JRQ~9MiKp2^s9DMj>qY*HOfZs8-;-cZolQEcm3dX zd2(3%0Fh`#LUJ3IKmqF1Dn-J0lZPE;%R5##ScOlTg0lqMCg3MWHlgNN^;al+_3mPS z_zDHaClmNjQRx3B=;&YM1(OG&jv|SD0MVCS+q7=3!jRbhxdfVx9^EuweW#lCf8M1`bhDFNw-4@HV?9b-MdU*&oc8}p1V#_5)9~=4Hd>fI1i}@oE!#(velV`9Y=?00DMO3G z-VkCSGEiywQen3$q@+n7zP<{IfAy4#FdlL?1|r$(El*L@f*-EV4W%~M!)bYZJ#qLX z$k3#>HAQW=fPQTije$mHp|$yF1`Amq0sXAzl-WB9yOqEbe6pPo_G>;NNqUQR&T3y; z_58s6P_dmWihc#UhhONU>8_kZvTT3qqo;+r^^(+Emrbo zTuFDlJj3RTO55S~OtB>bfZhO_qG=VXtq3YXW5Z> z;$WO>>}zX@ca2MopaE%TiQvi`g2Xl7gs+R@hI;}!;&y&KMKaIWNjY}8C^U(&NlBP$ zR}1QQ!wY|3AJs9lxpU22yH(JCLhAJDF8+)?C@TF!PJXv#dXUk!F=F`wG@oh4O34Bt zF-X#Fk%H8b!@jXxVdg)u)WQ7mRJK+m*9*knDq-ZV_j&IhVND8BT`IRd``l-y?YPj9 z#T@(TI^yZhiM=b;=IaDWj+bO)GJPBj!&WSQ{Z5cQn=fIrSnN_Fv?qL;K)JZW)(g_q z)2o)*|M~De$VClU@5+5F%tky(fI7Ws+lnr@Wxh-nY!!4n2N5*bO*On-bvF&F5)RRhv}Yt89%RJrpy~o)ch2@L8-4?m91uz*x2y zRRp`mBD=H{ivKO7T7i%oYo3QrHRanXQ7HaoQENv_q16iN!^T}iHrDVLgt-f!CWajZ zZnMV(wVO9o28?a~&W|ADOx84?h`D8YB&+v$JP}6puoRVIPl&{#dTW`@kEhQ<{GXdiu_*rY+fWs2s0|ud zYkNg!#IO*F;JgQchQcucihHNhIuumD%c_Dl2FgSK7Ll=`O?Zs6b2b859H*9-+4%~) zZRfslHdj;xpoWV^aW_3|lNEI1%m*+R!_-wib7#pa`?ta@5uQSjC`h*}%D}FkX=3FG zGOGz`so`urN)Hi%9!$w)Kb$P!ijLK+it)U-Af9~`&tkPcLHoKD?dzJ_*DbfVMPaxP zmU3|4s~d;4`#iitp%`HQ^Lgkg-<0W?r0ubZJaqP0YrR$YFy1KaRfXNu@dSX5VAPI@ zY@-?DZRwn2_HxXoOYlNAS(POPyK?ZT(k|`1Uo6p95 zU!Fg{^e_;f+hyM>Cn<}8Dg%Zwe{1X#5#6{aI8&ULiQ+07j~z3+R+t*NrY^R=9vrSh z$8&^$o8VskdfUBmI0@!YSkNup(@kqmuiJ#**3+{VSi0y;L3{o@EB9Gsa(_;-{a(<0 z3YY@U4A1$r58eE*wfS(*9)0X3g_$lcnt@L-?uZ|V~ z0t3a_yw;lHv17F5*&ZJYBdpr~D4=g*cq)9uH-+5d8zgIJoLm1-q^@ZR>|=c<3hjQc zX1SeL>0*QZ5Lu`-_FYljR3svQ|ARQZlk@jAf>ifXNgf}oU-;Yst6SbFY=BSBfp5mk zb8VbY5G<4?ITg3fp0v(6rn5P}iAHuH3 zShUR6SN{W%=&>`)t1IG%qR``x9eeGOt%T_T<~&&zAtBeCUIY8~v`2kxa>Bmdc%T;Y zZ!qZ7yLUT%M}jZ?*S8VNNa5CzGohU)=!!y*Hp@l`(=uo31$INNC5Yta!hb4~;+e)^ zvc@t!M%X4rz9D7{vl24!6k)X6c;$ zeb#V@7p_ABt{Hql#oxSVI~2O2Cjj{W4+v5#qIp~G5dqgPnb@bWu zM0zg2q=y7(5OZPkd_*`JNwBr?a?*0)A~pNS{iQ6>7X{q!DaudgiE0N=m(P%#teT7ds+{TRM4NL)fP&6yhX5JhS z4z%j-96%08f|LmFHaS-(iWU)Q*Rj?`_J+bbMKr7m4j=q2u)mAw7EoYfq{KSnlnM9{ zVG*5Q>(T_W0!R!L+7CrxFtBfcAEOI}x)v$cg|7ColhIrc*=Ql!bXV>ar4cfP264DWxy7AokDFUiQmCu!?h?@M7b%L+{HZ4g1LLoRCS1z2xl90V$X z%dJx+dM+zr7t|5v48VhkTo<+kUJsxk+=1Yd|T>MC(Ci=MjxlE3DU98dB`UvR#s}S z3A&abnS#64IOffw*1pj|JVTs|eCj}jSqzvllb88rHd+K4*)en5$>sCyVL=vFVAjIs z3AvK*3nP=T%O_?ClJX|og^fhhqiN!L-YCGL2|bo235&o8%xyjazu1T+3Fwz7PoroE z2r{xqc3Q5FK?UjbnhiGHLmXR3S>;JV8j*w;4N9q2?xShMB*cws1ZhM#iZI6W0`Auo z{8;ksa6EN#daAVx1}FIy0Uxy{R<^_LXmUg`t`9BMCxl=2P!VY^)eIRl)P@T(FbJ-) z{frS}rFK7t#Wrt3KG1$Ex)zXs11q<5Q5gIXC-oeKU6JXzDYi&3C{nKs&vZrRFMBtC znQaktLy!6x+iF)dhKHty?N5RfJ8{qudtG5F&DdAFe|^rr+H7l&Cmsq9<-%qXQ19t= zb51XBu!}uJPeVJ>ims^8R!^GYh z!V|cl?yFMzK$NIh?jW7^$9q2I>D7z$xjH3Cb%Bmnt6KN99Z<9kd=_kDV)Z{+6a76_b1o!iUoZT>(=4XHTjS#qElE5@JJXU~-y7Wt1lk8*I-j zOmkq8wSFe{p4Jm;zjLX>o{^x#5UDnL`( z^LGUX3+ipRC~k5B*^bFF8~CbE#^l5*X2R95m?$(MkP@l4_XJ%N`0`}idAq^xTP0C| z;QIPQ(dbSZhkIX~M)D-P7zu2(-M3~Mn5R!>#v0lEk3Fx+`+8rJx zhNmN^bzeO5D`<$?HeqHA5ynib5@3FipDdRIr7T}sr--gE*buQN9$#9~4Q0W! zvcjfyC2z#6+dSSSpA^9lFP%V8=9nmMk;{i}WHcmDa?4J!pg85pPz}&W133^`PJbU=nWquM=E?$KM?c{LNJ>*sXo8&rbH5% z)v0!eDE@aSyC01VQiClNl_nn96WGLFD#+aO<=ZZoGFv^S1=Fy0X@`7rm^5>6BKyrh zxZIM+sjRV|3DS(CMJtXLZ8=(0Z~aBlDQ!0*8>@9;vNDnAAuqaVwpDakTwZh=JivwM=&`Tx=KZcbOUcSb4ei5hQ)!8$q{2EASK&uvWz+TO^7;qhbvg2dv@Q zo zu_&&-5xmBK63>DHt|ZPr6@?ZALjV8usqt?x16JslK8}~pTs!+k61vrw8DbL%Fpa~A z6c@1@6srTm<~^~^YuxeFo)FOul0-X_!qaSKoP2U53xTEkt+Ts^kOsGXbq13EBq-q$@;WI9{HK zX{%QZqzU`$fJPtBieltVu5hLy7W(^pi$eD!^u|3 zjj6woYnl@ew0lHx3k2?6rc^~Ii9~_1h823c$@3-X5Q$4$A$l4~3?tSEWR*2-vPQnR6 z*be$E{P?idWZyhMy#MYg^KM~5ov*zR0{XVWMS9AgU{-)ZjXz?h#ua- z0jAm(`ci^>60SvN%AIhVPeZ$h@*_JW=-LdYWD^22yX+gUKx0_;?_uE%70?)0ueHev zQ+}{TRauH4^FvXi@3m|nse&UpHb;;`riBn3Ug1*)il5dj9QYAEU574xm1AcJwVVC~U@S%F!#S(fODq(R$JFJrJW=7>mJ+t+Wjg$f5Og%$}v zZf*&*o~vz{=F)|oT9%Nhhy)aX#W;u8ls%5^!@$* zYhXs<@?ZS&H$BXRXu|s?%j{Bx-HJ^}MxB}aSkE|VaNF*@w%uvgMSg{Efms8rw+2lfS{_L?9=gb$}Bh?~k3!gt?*L`&YT!K~Z}0jdgAbSo^| z$Kj4G)fNlTf&Ha9XuXvQxoX9g<%GLM6m9SI1>yad#Q zKc%M>rELJ=7oAv;-#+6_NNzo(COm5277TuzPj}l`v>%B`tEXndXt+XvmZ4h3b@WCd zy5EU#<82DNiGi;fNyjWk)VsBT=ww3XS@g05+3ai&;V9OuYvVUw`y8|)@r2-=IH(8vi%_NIbvU|ud_ zPbfmV=Tp&K`N?_N_HRMP@zneU_RS-NU0Hz+Wxbsz3hhmcP}TJwOo?y}>nT7nj?ZO% z1(_VgVR$-yP{kaa@8PCg1k)EGQZ2=@g_V2``q|X9~Mbg0Ez-aQ_?6o*BUinSCh&eLJ+6 zOYOJc0=O2x*SP5P8g#Qnp}6sb#@GOb=^ejmo+bk1(%#7dDRa6FR6IrP-^m-y-2$#| zIs+>G_w1QNnEw%S8XQPlESeMpd=mz4Qn!i~bhJlk+&IwhOx%$;PB%vVAB5We0`rvi}LXhOoTxXy+)ZTwVxP*#&6Lt=%T5E+pHyu5t=r zsb_>ASwaM{#j!RtoG4vW0jg6EcYcIll$|8vj%omY1 zI)h{UxaV4hNUj^X91yHV*iV-B=z4oW6vikil9gwD-T_c7=J`72%<9;?^;pYX>nozm zGa@sZ#XV+-D73C)5k8m{rc!u}I|Ap2wf2rkOp|-QfZz7H2-JaaX``L}cYsz$phaTm zdzci?PLg1QV9#rWh*EaQvkLAw6um6aU+xN%8p307* zI+0@k7L{pE8*KkokSb$aiY2kPCExnTps3ra3bz&5M*d373R$aYw>2yVe3bsn%!R@fd9 zHIGZ?*q;@4V>m?Y{{zCb7n>QSm;`&vC(UKwJ+IwG#VRUd6`a)sT{~GFT&9M_38N!2 z!jmfkO_SLndqRLwy8kXKRggN4wN%P={JS2~p^=dk`}VsoHdeV=mhLa=f}7W)TWgnl zc%T}OMLTS$ApOIFQLS_akRqOKcZx{8Y{Rv7kANTI^}0pD>d5d39?eDBBUgad9DFty zv05L;HrHso1vn7VG-e#h$*KEyF+xw3a_8ZcAo&%g)?+0mh?cj zr)U85DmKri`wY|al9O$Y0Id&{43PBmaSZcp+bBRw`t<5&|MB5m)Hwg=^Bmi|_C&>Q zyQ0I@NewzCv26?#g*gp;ST^c+kF@2jBD!_T8IiArFh5)P(rxvY9gm$Bh9#9NOuKl` zl_1CCf;8o!-VfVch3W7#86w!=L5?^^fApCSu4S{BV;}oW>8NI%*-0iQbpI^7Q9+94 zXqk*u%3XrAOUDrIYL@Jyjh|Y2u-z2tM@58D@r?dFSP#(QbphE z?408SX)Z$8cD}+|9~;*nE!bh#ddO&{=Ox?qf-1fRS2#ljD4w{x#@PrTH8B?z(ccKT zb;P%1YGk^FL~-NEJuA}|2>X@JS*2l5@HSfjK-kMK>TQHQaWf>D=%LTs(`_i`NNi4en+_ZI_U?qJU9+xQ?fxYTNH~Fa)>V zcAK5_Io{1*_1reI_gwzTnA6O*ON8h%Di%0W#}kIa0+}+yp5J1BQYtqCQqMZ0QGlZ=CX8Wx$6CXUS7Yev)Q;^b_>meQ;xsus@4`UU3 zWm(`^L3;8EY-qk(HMXxSULw*3_NpMGjHNl0O;S(0dc=jb-wC*SrgOc0LeRA-ny_!Z zEfVG@HR*0!=d&e+QIULIz~2CMvhWt7?IY1C?r70ASwQIVzJr=d9tD#GwGevuwp1U* z^}F=VjW$QnP1V8mT!R~|QWPDJWcLXZbL{0!BGH<(Nw!--28&&OyB!vAofv~w`}aO@ z@YwEEC#tIK7lQ7a!8=EdM3Zf>XjI2Wh>08Q>(~%cXj^;!ESsb-zvRIEC9Vr`kL%w0 zTP)9KWF~nHlgiIuM4|G|>ZdzbY_gXH{THfP>TG;0-}6Z%?TlRDnIF2UIx1FL4~6MR z;28EE0sSVqWQS~m5N!->W>(4>LxxX`ue}_b<)hzUw(``L28=UUttf8t0{x~sVTz-z zSN64LA=;Ui%}$+d#|0(Nur6Pg*|koWx3AmPzHT-m<@S+c8I5DR%j~?52-C!{49#6B zKt$*ARvX|Wuo#tN;{{yhOzARvFpjV7-TxQ%8)2%)b4us(?e<$irt-o2EkQvhI{0?g zF!!zYlt@~zZJW@{TPVap94)}2>guC~OKg`Y?i7jh1}2dgux$I2sBRgIgo*fyAnk_F znrWX3@Dt(Z5xAAG8$X7iTD@s=UK8&SNgw#S6|uVdK=E&d7&l`?cjfl#+sA$%hofO} z9ToIqta>SS#=>O$BJf^e|4}?O>nO#qV5Xh(4+w6l%Y$F@LOU-`ky;2{xy)0J7L^8% zPdn{8Q5g9CeQvga3e&SJoxVp1Fz{^^c>m?)>sev9*TAGlX&0dp+bE)23X{+u5TXU` z`D@+EwpL#VOC|8LzTc++vl!FZ;{WbbNccj}2bX+884L>CQ}N@k_K*yN62mr8(DgB; ztx2MGb9@5bbGgl~@$kgP){Z48lnK(i8}Xs`p%3@(HNbxGPw4cNwNYp{3b;aXot(u7 zBUKc}mReD>=YI8s2($`&9PM*41LET)cOfYzn=b<8;ms_^mI!DqH>|d0J|iW`_F|P! zm#+7wY9DvBuoi<~_M)I`2H(ncZ?3jO@yru>V*j6qfo#JT`@yHAXY4ak@r>Hfg#4#~ zXG~dHsk6&Oq{v`BU_5%KT`mf3o4S9orTZ-H`AdWE5<*8UmMfzBy#=zJ^MvU+x=a#B zX^Thq8o=B00bx3h@m3zJ1SovO*t_lQ|8o&(av*Sp2a#lWQey;^YQhM+M`1V66QrtR zNg~iG_&1!Oqe5;-cx#T8!|@h{=E$%GZ(`4S7;D(3m#DIM`ZRb=oE)DM#dYVv8g3@Dfs&}^Uha#PT;`BGumWBSP9;(~v}fp)+CP1Y_Zk*UsO~>q85){3yZ;k(?ZNjU0wQPq8v+hT zU;&AX!2OZYXMeQw6v5aztJ{|g`8n!h_}1H%qB8Gyjj}!p()%tQ@MeVN?N$+4*$6bV z84A!hr_^H)_&6@6>+BI9he)}+|AqyrJCdX0@mwMO0ws4-7^^I|XMKDKNtuvc_#YR~ zpdqdq$x;*s6YA{j|3ak4ck6-a z#~#Snk6ka|mRBa0QHlh8?xru%eChOe946<73{KWzJx-{Qg{%K*bPD|S=y+L z*2_agJLQ`|_|=xIlMLYmiWzySNFpx^kD}^bW=Ll|pzZCk)2X3lhB*Wkgq=)Mh79HpNw&wsCegU#zoIw#+X71ppvq77Se4+$~H)T66XnEfZ}6X@AI%a#dI zDhaOTR#zs-xSXnMwRVLWFcdcv?3jRCVjOWPD@SM5_eIs3f}_qEt;YT*f@TJ3K~y`= zJPYWSi%t#=d+aPx+;1;xCO~bHe*3YH!174Q#`=7*%}v1z{3IIdN8qkulB<&>oB$9x+p68(k~Cr1lweRi4?*c}b9ABUQn_((K!axo1q0}73lsduE-aK4}@^~|E{;dPWt1-=Fw%;|Lb2T)FM*#i-fV@b z?$_7YVxN~w(X%Kzye#O3Z!(@}bM3Ejf;{46TkmhW$^sF$QXA%>mOXy7Qauc~KjpSi z&=nVj^BlD$B4{y4&IZfn_=OdPwp{bDZBme>kP^Y(pfw3GhM8^kQTu}cgB{zt#lG_` z!mhg+Z95$#s#m`O_A?))?>74MLj>Kg2G;D3fIT~0G&dWl6k(jzMvCGZi5wjV$9qMf zX84a$i+YgEQ@%YVz{fJ^JeKCRV-x9v`~EFn6FT$$V#P8W1G7sX*E%Vs#N$m`=Ev&binRj!g;*eXF< z9pV|o$^_U_;4Sl_!gf(>fAYzhDV~h&4MEz}dtfg+tgsssu{_}5{=+9m&Pxol&*FLc zG}zb9BRzf4uBxw_oh<@|9bdM|zN0Wbh;3ddLvTM8qLL^NtngqMMl zeIYj|C|&op)4~kbTZ<7sz2SU-D{au=A$FgKvCZ3J<@4=94|(tl!8QvCx)YO3yd!+W zD{MfdI9)s~O(|Smthv3GA?$k1?@!vhQBfGM{aZ6_mBO@>r&;u*w+d;AebuV}n?A|d z2;WLOB}8ix{Lydf13|4Q%qDOu*39xU?W9vy$e)q5xNtF!N_Xc zo^2TdZfa22oC-f1?>VAT{X4|DvZEn*aS4v%KPFX(_Rb0(tx-p- zwXAqzyz^k5%2viH!AtFITO|rz8IHv-B+R!6Qg_GGg;uSwYc&>h7TL=_?%2+`_K^=y z7&XZLsW81kz%JL${VssjkQ9+C=i7xSy`!wWW8HikB1l(z^y=Az!`?ufB7%;Dy5*rq z=sb7Lw!ip%@Go)wY2W)E$(ZWk=mJCI&|JGzBtOUr*^eTSH&GPoI=yVp>B1Td`vfw+ zvZ+W)+>eTajSBbHgOQTTK8_qbU8j6JR1(z7S0h z9D>x@NzlB0!-!mMFn>5 z4+y)dMENz{E)XVRl);xi+5rLzg5cbKh#aRKV>4fjN1`7oi3bsgxZ?FO{=>nxjPp zM+?inKh07R>E16n5Iw7~Yex=Vt=hI`Ta`$zYH6P8X2b`sT2%Mry)&>f{IM^kqiVMG zH6p1MWA$^8a2P2_R}t#uS!1-2|J@>=@Ku|CQxrv)IaA`s*n>k$F;LMW;Mz4PDJeIE z5bHsaTnom9@z81=B2eq72<*B{`^e|NS$u>`FC;Elc2d(EBt%L+si$y1c9)=Ac5)ca z1uk0@8nSOa?-@}6#^*6!T%S>xa-wOnvGI2wb81Poy{9l`c5d3peOPKjo; zZA!Ny(HI2mrB!ovo@sMMpc&~QOm>E?M9{5&Tpe*vkK+j#!?j()z7;yJW5nJLiALv9 z?#Z?f1l&;d#sbc{KL%2tEQ|4Wz0`V$z~JCp(0-+`|M|*#+zuNeikr3xXj`W9quUry z6cdx`87mXTPvrg*WcOM`aE%VQVu#{5+`eO@y)Vq~mgx&_kmBOUnpz#*btX7;?chQD zL#3rNZB1EOcJWWT3l46|KNqI)ovl@NtHN|NDcn{MQ*5pv`5uh0*)5Fci>9)}tQU3- zka|1pXoNBOi0l`gE%3-j`#?d8lZm1EjrN6yxlRJ~M?WQ&ie=EK7iO7@3mMw$)a5MZHv#Xa@z{=ncmSi=Bw$EEy9z|03eu za3g@oo6Ao`a8sOwV1HjhI+2Ha3I#Q}1qF7$!c>oE?49OQDb8XV%LW2(Sb? zHZ*iBnscbs4%S_8OeqhR3SvYFi>z5Z##U#UVR4IeuZfxXWDvAr9JN> z(G9Y1{>@KBnm4q`VbAaP8^pMX*JF>aYqzbxmosb*hEI(TWMr?El z|0W9sg5Swv+o-U&6ck*R1Fn;tc(>B_R>ivMNP+9~ClBQ-kFWayLDvvYdT6QVhV8FD z3GT{dR_Tn3NkYA85k}{m0@Rzu`RZp1yI~0S>;${UC+)Axa1Q~u(Ni=|uY)yxtEjF* zHpj`?lJp)b8cjU5bAIQtnRcfjb@b}j*Pc|EQDE8X1-{M}h~O&COo-xC)|UF*jN4FN ze=6b$s5TW1sdq$Cc{dNV58{dMgvI@tAd5SR*Fv0Y+r>Wz`|qL{)`4cQT`MXZZXjB@ zDR7Ck!)_4K{VsI>wi|`rI8P13Ll9)tjuddWyDJ_ySv;;d9F(474++w~@U*Z!?4vL& zf|!~W2zG5vY)a0xaIMV|-Hk#tlqAbY3q;}MfaZhzxvgY3<*zLinN~*jmtwFpYTJZ; z2gTM2w1w>tqVgNPcci_ppzDTyIUji#ctb{HMv$dE4jrwq3`An?Mc%x`iTA9VbRUy0*w??gNXaL}8|0rW5c7oo96- zxSCLgm~MaYd71b=J$l<)K0R|< zs~NE(K{vE4I(|bo&nIZxg7ALC76`g#WQN;nZJCdzUi^I7dO>w_=hoQH-GK?6VtY}zK zbHXS}MC@T7JD839k$CPyD``C%v^GODdOJ<8zr_N~ptz|o&xNB}WeY`eJsu-D`*d6B zbG-cY%P?foMV+f#iLSaKId{$(7g9&F0!xta7Wcv`+>r4q$Y&YC);(x zZX>)C4j6nDn=6v5TZj4;Gwo?#K)*gcZJm$B&ZRSWX_cb5<%uf4RSEL@KutVWQ-_nn z;O~4pE;=0*s`4~nmY1J?2n?*e0VXl z4!791u5`&6+Sg~>_k}1meF`dlKNN5S6I{4!?Ltv#04`i@un)hxY=zwwPl-^&qBmM> zfhhc(ADwDz6{JF*2s@S(+m?7*@gVu?)Ws=nyJ0;cnkfp+$QB2)p(}q``N@@)w!_Ee zaDBH<0UDHpFxr_{5q2AuJmJAI4xWqce9@T*I*fWF|If6GMe;vmt;m7GZc2y3Y5Zjz z4jMev?(^_iYw>Z8RjGnn%L%ydoh#s1X5jEf4mQ__NUPqfFSWHkeEH!8R<5wC7D0=2 zs}^QG=w};9LVPX)BOcqiCbn}2@40sF)!kX{4<|(I9$|`Z$0Ygv3cLBi5Hx~W_8UNq+qzA0T$MKeq^%EWt8bzT0r&s37AnOi6HKVCKuynJ^c8kKI z5^FqUZ~5d>=-?!C!!<-QAUGPzw4MT5Ue%m*w%H&--6r=2uOQkDq+eGivTHg5OJUAj z-?{(ozKUD~?p)2gktTJ02(~pt!j`>fAV3H!gh0ZsCbYG+2r6o|)f*uUfwo$$ zqpkhFKj)lq`+L3qzBHcCbN2Y2d(Ly7A>>DMxEz;`OqC#SBu}y2D?C120S-1+AdR=z9p--CRdkQ?+~?rb;O1$9ELj{ zAzBd`#rw=^0l)v@8&~c+Y_CWf@x}^w{-11z1BJf2m-KRbQxs~K z1{~@uZ>JJq*CijH@9(x3QjM!w-cON`_@S>B!`L(JJ^}K}I|v*8gMu_LWB=Y5o*4oX zgOdB9ke{BsXeO_o_GeKTY<%0Jms4VPqc4o+}i@ zeiUOEyL~R?9@h8XzP*RrzXbJr1(Q==Hu3el*8VMG`!~u{@3}XDHH4BbI$wak9I0qH zQohPV4WD{GU*;(;RsyE2ZDNyMsj#1$`>~LV@I}b2hiIlaO^}iiaE)450jljEMN9*| z)P5l?FGQ#}T9zQ4;asa}C_nV9jTMQix+8E?t}sQ(uY@fY@IAoFWXyI5`%%S2S`LLj zCkowQ6o;S?dRUN#Y7PqJ<+n9eLZXO8^?@)|b?$nbef?&iSBC4@(k>MAJc!nc`A#!w zZgB5>ouZf=z2`;khaul6uH1B}yPzAos$}(UxkGjok|%hAi`ugxIZdR|woZ@^j)-AO zs4?U{Qa;H!tAECtQmNBA-fp`E!|te+jmfI%b~qG)0_{S3CjeNf8FnTBjCtqVM*(EV z5Ffbd7GG^Hd|!(&Rs4u#b2tW#r$1^ql8{wI@aKtD@6QovcJq z2Ml>~M&QpYk1V&np(GjsFNH_!mqeiE{sl$(_!kJJ99yvWc=b*X{bIzKF9K;DNz5P~ z-MiGT3t>sI-L5c|J6)U2HcZfG%hT1nT$rxLqWhn*k3(8okk+M#eJ1FKZ7`=)&b8O4 zd5RPJSu0m5w*(4VlJz?zAKOI~eLe^P^ir77I$V9Yiuax^Hc$jw6Tzq*d%)oT%3Y1+ z!)luq5>Q_6o`Y?UAU}X3?zIL5{m75V(&xBS1iu*j@>7)$L%+~o716g@rhc|ewAVyo z6cNy8qxn3bo=&s*KdIDrV`hXqPlrxt^Ct=&tH)9N;n}@f!|L#<7&Ts8F%zjQwBy*8 z^MHj*Sw5HI!Vl(6T~WWTaN` zX-G!|-Yya#&xv`@*mWU-7sg#SR)G280c#7JD<10EFV=+AZIi2PT}WLR!`j+50Y9R+ zRz((dp9p?MX@*ztvljvdUO*u_*c^6V4JBlh;4a`_sS=n&VlVGX33U91j&_BD{`2D@ zd9Mu?_NS!7jk0{<-b9Cs>W8NnukMee5;?ZHY(7No(Ns~{%}wbR74}zCo$fjANH|FH zYo&^z2gx;a-OKoLoA}>(*A>`&VOluwVcV!6!#EC|m^~qUba$EkT4Bbzxrrau--d{x zef!u~y7`Dhb5LgZx`4j#bZ4mNB0&aIV&1H99OJbU^im`dpAI50Ai2EOalEjO0S}Gx zZp4Z1MKYKK#L@LC71tC73ZgQKnyn4 z1>M2E_S5wnJ@nBTaqbC&QqkF*BNhtMrH-9$vnqw@L}5`*+*S&NcZlHVXpvJnt+Ta? zqU)01OSgR?S2PE1?EwLbb21hoe0o=deW0*!R3SXo&jgtpI2h09bZwXPfI!uebZ!?N z1$^Rk6rH-IV1L+oIf4wQdm3qSf7e)fNF!65NYYFd^dC}hT&qN_K?FYv@Q@qrcf!;M zb7{4`5;BV!&C}C4w-cu&tH;NUI1sYSMDfKHOMl*ah`^e?n*)S`zOlXV0rYst%mx-| zUW@%VTNVsM?q3j= z#@KkK-6#V0E&2ji7n!a|0*ey%W08HO#m(DdXf0W;NK`{g_YOJ1ibSEBJXuWtxd3gt znaABU+Z<4q(?ZxPgjAOEt+ej&HuwMSOcKU;><=MnEL(Q;4hi|q9br<`%3+7FuiC4k zhwGl?>E+E~kG&c4jx3U&@IMJLO?YHzDNES90Uq5sQ3kHg{}HinJtA5@g}r2)2=vkM zOdFjh&?J%QL}nB=n@ty@e+QSdk*pM8S_;uWio@AkCro9mr(lq;`jMRaa;IMDeRK?H0XNjue5e;l`+=)d*-S8k68TZ=H~Dtu($CE&Z)y-PzvHNu+DXOuo=H5Ax0j3Vf_ z^&QF8xCN~IoYe?3 z@?)^;^IQnGPFZHpE3AR{HZtm1m38)45ozyKB5(o#{Doo>hdYxVfRgszO+M^2rcxTe8+J>F0|`w3KD@n)^*bzg&6Q za~oQ}kU|bid16=-AdTn)t9N;SpT8g0{opff5)8XD?@u~;@uPPjBqC4uThi=9L4C)C zg~%>_BIH{gkz(QJAr@Ih?q25)AXe*RJ4XjC!}~;_&y;m9vQZBD+(URIvN4MG_+I=~ z+PIKkV^*?%agIGD0)uk0db0gSVMe!4JQsmh0lLmvi-~ZD$Tvjtn}ah}Imb?l;@6x! zEa4;}V%H3W=6^Yn$H3E!Opw9L64|$8SRCsKeT1nDyUr7=SRnKQ3py2o^wK@4NYQetCSCB;}8xUxH z$};+NyGul>Lplo=4Xm7dLngkXa-JKa0b(o!aWRA#Kljk83^%ot}DAv7(BS{cjx8Mw`7z1!G!MbHPCW#soEph)u;u4V^? zSav<(TAm2u?p-_E$q+uidZV3Dm=3uwZ6N|B9R@-0y*M}#rnbX$KOei)mZnHbl+CS9 z3KPLes>*&Y;HPUyJO}TtHa$dR6${q^0=`gQ%rk7YFtyUVQP?>}GfxBx?v&QqK1n5Z zhAsaY;*y&x?W==HM|Y(P4I5s_ujr9T09+m-F_3_ARtF)9Y@IsWZVUvhB-Xm+J;6af;jY*G;-;rKfZcwL z6)6R4V#rTzOel(yosLbJr}#FXsa#=|O5nbg3io>9EacA-kzUHQDf-6uf&AF&#*6m$ z9isR-$+T$#zQD9DX|_U81#-+vkyZVa3npK;h&+5Y$OuCGlVi|(W_eyBmXZDK7?3voedZEvpp`}6vkvE?selfL5NQCO3!iICJTn2 z0KL+CD=>3JrBRqZTx)X$D7`Djs~3da#~1EwTUBq{LOLY2;`2IuAs72VVc)?7I^_1TuphzRNNsT<|I+~mmc89K zMW9Pg}`ep(is%ZvU7(4-JsVc*P|F(YgdRu0$w90zPZ!p2r{6fi}?PR1~3w3 zPpc7Nc8WQS#H?0G1?jT9O^fHe;ZXdD9bSF7-fjvQ?|d*OBX*B4B^StB)}sP}DcNpq z;|b9iuAUkARFSM#Tm&`Ed9<}j0VY>Zw`WrxB{ztyn6I7L4Lt#~!vX)F6o{ey>^t=@GwY1(G=4FU&{(a=j1wVZ9sn z!fzB4x+9;3GTZfA(KJF*apDmk-?D+iyh^YL(S%P$ajL}bNQ2kXVAvP2llF#V8(eya zmxw}*xHdadHrH+x3?~LO&^Q|45igYqf$nrhaHc5Xqp5Nao?H1{byPnpaK+gT3% zo}OR#-nI?4AVop%6jfVWD2ng$D40G=Q&5(4pBMCNePq0d&*N1Q0z1#!%0uT&FI3hR za&ipJD~Z`jLDm)aXz=-UT8LFLa$F0xryulD@~50*KMRP%5%;p62ZRI<%*07TzDqf= zbXiWEC5mruA?9c{2>R9e#|fO2wg~I+P_eYNVy1hTns1FFQaQ$tWA;)gV3f2*5_ULM zz@P|kO=pC|Z#}x^T6<`{G$iGXeW6`2!k3hrj+k+00mjC?AxLMWs|d91wP&0Mf-QpD z2aw)q4Pg|C?JptOndy_xR4lcR1ZmxY<=n?V74RL-h@#hq(BV%#CAp^FaY^UdMWRp* zXOngK9s5B$T!KE+BD-0T)%@yCo1mZ;It*dBLN-O1np(=($)|^GtlE1Rhh>6(b8;33 z=d@O=v=t(1t;;qn8Wd}6r6_(qAePF#v0enGnPPIr*lR+5c6uTG^^TyQ9h~lFnLXsA z`6`;QmU~5j?`_?Ex_*iMFu*sq@Z3M%?h^DvAzN1boQ??js(;_1mZLC(l%e&PCqRF2 znuw^Sl?eJ$T!Y35`MInlEAIl;;C`kWDX7S$sx&YJNQh^1n-!6{Xdm*vxi1VoU zQGn9_I;6j^ZMXi+Mq-?#1 zfdjKmdbj>uz%J7(BxJqyNB4G=AYDlyQec>@OFqZHo;CSecd9Ema(dRW+Bg=kuAjESEnpcZ^GDHL&-Rd?w zrB8kuCfG<3=~J@0QbH2>0aDQ*R3m=To<*9xP7a%^)=Uu@e15pFHX@+;?$O2GQP@xL z!6i^)KZ2zKgxdl&2e_}LWw)NBlki9%uAv&k12>#Cn!qKdLymysnheY%ve6Z@^40~Oe z!O7#~ee)A%Gq^T#4Mn_CM7l97n#hQ%+9qZg=s=p`KU<*fFKS2wgvnsqMXPt{9F;jWa?>=--Slol1hby4ZN#Ai@>Ht z2Nz2Af_wf6ku+mc%G)igt`Cod;+FtVC>fS6Oe38`fHI2<(kR4{V77)hHkvi)s<#L- z)Q*)Bgqg*<{k7=&#JY5|!wUPRJ6DEwB19gaH`D&2FhAt?J!Bs%=z9cf&qqOLMI+t?EU~xR82iLXP-p{>Azn2IogVwSSXTqF*yMd z#}l5j6^qECmZ^mwsnc2~t+u&>l$j_R1>2w7RMggm6z zxWQ~g**0I$|2+0ANWiSgu?CUcCp^{O3aL+TU!YUcUj!MgqC7USOEUpX4nkfi(%vXU zx$f4u&F&VY+L1-E3<Hht1`vB?I7;g#nXPUe=prCLP6Bnu}n+Z2I4rjvYcyWu01Z2ALId1JO*Y8YhFw1`PY6Pa`=wOJcc!h zL}ywnYTahK*nS)GIA#ru=m3-JmLzNT+Dn40A`AzJxxLg5g#u)hs%eL8AGV3Y5FeYm z**;X5HnPDY@t}>?z7X>@Cf6)-+tOnDnkY!=~>u6TdX=j>?O9%nChXQ7PK4^h|SnWrJw_aGP`jg zB-b-63b`i&OjhqpR!_G9**+;;(}dLuu=)m}`3Rpei^H7#N;E2$nEHXe_IiN&A%X{T zLeTdSBT_NDFvq7xeRIv9xlEB^t;^QQWF~8bXi5>|qqR~Y zUpkNOF`FVxzs9ij+I#^9+_}kh=9XU;g@tfp{&f3O2=~0Vue}?xF`XE)%Ow$;$m#8J zDSB!<&ZrwsO`dbAey;Ub6kVmY?=>#52L)+Memtur-NpojV;632s{+E;HmBEWAvFaP z3AKC0e%mXGW>&D=o(~BqnWz0C6vTfcir*avI}x4>@*vRod|Ytcq_A(ufXFDjMNq}d zPJpv!ac4*%OVa$T-WBq;&0cK-L-Fm%tT&NCZmfp*+VP7NGe@ zDkeDHrhPVB6pC%zv9)dK1gi=O7@ee8s1t0ZDE`wLlVvsk6|dKkIo6t@AfSTH3_B`{ zZ)tN=wtXib$Y{20TxgvXCW;3LaiSg#5D^BXeyvDQgIhN@Syu;l$tp$hs}8SJ`Bo?F zr>eJ3YSs|4&|Z~@VTYGQp}kyRN9S3akZ%jDKYK3)hj5SiK#->C?fM@Ivzy$F=+u)w zllGwX)9kze56(C!3w8e+Aj01BcmVGx=)ZY!zY39f8YGf$^$nf6b+E^T*}~n^C61*f zV`b@~7}Pi#QISsw(v<#vhug#uh4Aw#dJ#(AgR-BA8$hheFJhN&VRqK z-z{F=!pf8CucP^yVe>(HI9-XDO!z4HGH$KF##SoZOQrG=eqg7~5{+uPMZ z*sgKc{T241qA0~(vKy^W0!XV_PlqNrsr7N9FnR7dZlkjr&}csw@v8GK(`Uj;l}b{h z479gH6g=%pdsl!~a#FGnLqtJyO~S4!^aYd@A-r*; z0I8B2HrOo+Yca{tV*@uB8PpslqHi|lmbijju_!?-ZYDp$PR@@i%Xk!C5>n&-^7M+i z?7Oy36h?vmj4H{orU0=ejLw7Y6QnapoR!!=1$<}T+Gam4@(Ar7__Do)XxovpwJ7jP zpr&lP6^O{nIJnuZ|2k?WvI6%V#?Y>{8n#q9zEB_IwQI76TcI z99*Y#P?)CVmFT_yaUuU9MzgsIVdODKB=VkETYh}W6dNb#FV=8Eo(XKFic~&$I!zMB z-y~A|a6XF-D{D$o^x}gHqPIk$A-M=-o>Z6t$`J?m69E>RTN{Wv!y{*fRBKZHt*F4a zcCw1Fh!j&f2}LZpJRAbrO*x;{|Y!9~GbIY+n^a>)6>*s}?7j>M^Wgb2R9@;Ygc3WrhI>h4MP zY$zi|rMlAQDfiUL85C!K!?5PEUw6eJgFVrgpXclKAj2a+RT`vOtm&6JN z>zLh!kUd#9Ke?ssEM&f9P-=H54ROnu{Z%0F;dExxeQdWA5c-#5w5r9NlaT`?5BGA{;VWjQ|4@8=37~B3LwnjnAsThEqUPlM8rvY~f5Eg9@F7jKO`PAU3a!SXV(m%1+O( z(guq{%XtswXMLy;1JE{Oy*(VVBBL%5vRHt@Ja~~H9nkf_779>Eb5$aqZA~F1?akyE z)DE3})A@EdWGsw~!4&E*$Vl@}Y#)DoWu5gDq?|F4{V0s~6QT;33^98mfP(1$y;dba zGkXr}X?sGRK1F#9#y%mxQMeagYZgHZgw2oT@M3_+wv@|D&S61zLri+^KCl7#@<|+4 z{vjf*D9&lF%CL^V1Zi!{5qc*9KfLH>7JE$6qTCov!~u#0C9-AS{4pVFMyrz_a?26) zLoan&v=|nJ1UO}sa^Vs|%I2{H^@kj*2^q9gOFyX8UKE92uK{=4+Y0(^>>deLqlCa_ zXHP>7=X7uKQ@_Pc_Fd5#uV_}bbyxVx^S<@}lk+`XW3MTi?y~jA?1ECC@67Z%yHa7g z0}BsBwboHE`~b4g!V<9zk;vLF21j6S$a=JCwJlTFcU_9phy!gDg~C`O{kaeC2~Zwy z>i1eQl^#zf*desj?Fq#4$Ms0r*#h0qL5^J`3U$K{M;K3lc|anX8$y2}za)kuQxLU# zMeviz?_Ja$3CXiip(?bk0W~*v!kfrGA>YtZyh6U5g2mi|Ula6$kj}L^8yhF@isWl6 zh)F>Cobt1^!O2)_|0B#?BruwBk;1e(O;Z2g6rd{Ske9qPrn;R`FKpu(o=JPv2MlG#l2m6P@zKKH$ z;{5y}WpUm!&?rA6%9nGQ07dcpDT_2+LfnDvIG?&JfI@z9ZLomv{0Ke&4ijeRbfw9& z>{Kk`jq_}}FvZfT){1JIFQ{HLO-?p7Iy(XzZE+}}C>!3Qy&BNqB20!`YkLM<`AMuz zpS7J5k%kwP;Bn+{0)9P}G;f!#s(mVw?`&Uw_TmTy+Qs92o_KD0-2PWUBh#(B6^DqC zaTr+30>F?=p4AG_vqHV0*}Fm->m^g#CentGGFLKarv=U)klMM`{|HGi-X6D41pKfy zSEDcYd5A51H*@@8bmM5SbFY|VGZpqd z$CLz)bcC-t?>jVFY#*Ao*Y6yN*-UkepnCh}B+~dD=bBsXs(?oGi-hoKKvB221_`;_ z$6WUbEY~McDPm|JoX42VR4x$BH)B{n)}H<)?D~$go_f1|^4TnUdzJ#8^%YGkZP{dd zOku_#w|RS}*!A}FK;S5Yv1}AeN<~mh#8~1**a;MtiRf$PIXd4eg#Gt87MoQsEG)2v zDH5Wdyy@pzONxZv3GDw8`-4a{x2<;8(LJ;6B|)0jty6dV(iFmeO&1pxXm4{Ae|y#% zc7y8tLU1wps!}uj$)@G*K)B6GYQX{L;xxxyoaWl4N)uX*XBE3%7?usD*0ziL!VOB6 z*axJN;ZrSUPYP=UBuTtCcvdV@vZy{mW979`s8s zOVsc}LBGPid7C9f_cP963;uXLypra4;#7g6gR1W0Dt9ksdv-?D(Q%s}oE>i@3 zrHL5lu6-#yJkHkfo-hLvi^3!NtAN`X*b{i0#XQy``%FY?%qYpT^JfyKa3t&}9GYg` z1^xDQaF!dH275Fl!Hdkv2~(|1kn+*0tFmVW{FrIqDzKW6xMe;TW#jgIfXTXT8j;_G zya<3JB9zMOY<=4&A~3)*!{Bx@>pu%h7(r3!L4!>ak>(E_+|!mRtg+d`N4v|`2+}sW zJ@2kbc|p-+x=3IlbKy(NmDKU}rUuy4-+lYs}f zwQu3nR?tTuT*r@Q$AI( zpL!rrj+B=lDJz#p=7&XbUljLZZ2MWJ2)?QVi+F3Cm?}iSNeI(diK1?9+AN#zZwRtd z^xBLinN)ITqhNdxDG#9FATv4Hl+JnYjH<*potRS;>Zd&TKT6;TFjiQ6d!4GQKd| zRto6zS+h5}rkY1KdtMY8)46+BI~XwjY5PYl% zT1$t8X=>59B8x8YNqZsi2P2?T&TJJgbUG)e#6ApJQTKSp9$iF)eph5rZbGtd zjrhTiNO^n~%+%9LLW>#xiR@#QAqO2uHjMZ74Q-}~epWEJDl{0O-!ZbE6 zZJ{#(|Kj$@<@i}Rw;k^G#NvOJq7r2Pe(0*p+P?GS<5M@3-4k(Jd`;~ey*<) zq_%@AFljY3#9>8iA1w9xf_~a?tB$JmA`$3rUNqfm6=wB4Fw~w4*=SN8_q`Vbd~ZwL zsWlty(xoJ%dHk@kAzmlsd)r5{0bPYz*RHX$Nzr7zMe^0M#UVC1LIiqn>Y3`(i{{uY zL0Zu|d5Tqs>>Sf1&mgw*Z$+X845VNbF2`OGrhd+1Y=CbF`5huzJxNZHu*vy$ND@W< zB5HpXqB^>By1v0a5@dw%j8$Z}Ec0==>6H}15J8%P?7!WwF!kic^c*un$oCVC%eXxy zOsB%v+h`L))}F%#+7gBROK${W1k8 n+m5&so!0dqoJ3eG+Tj&6x+`M)r_$`<>!w zH6tOTs;>&tvJ7_T_d^sN8o)WQ|8k-jax6t6+oCIqJuMP#PeZ%Bqs#d_-`iZ6L^ zb9I(=N#Td%+~~UrGb({|KbiMy++)~}Mb@v2goh;Rk;Ex7UNUBgPW8F+e2;*LpkGCq zaC0!{B!Zv$)@3VY*J{~#`$R;#8q=twodj_LI3qT!G(P` zMid4T6Pt;m^cZ5Hwm~G{r=hapx=C1z&%Kpz7xHZyR#?P=*AZwlDq`bl!0#2$q{YUK zvy%$@>h47Yn8o&Ch)wI!$1k(CS38Cefqu-WE>jC2O0;d!<}SWS>Fh4#Wa+bmN1=w!n(Vr%(rCf~>)vsuTw-dHFg?kloi~PNCcm}V>&1b#Spy4rLpZ!fyez^}}6-Df05g0!GHnDAa z=XgI@3-K)XZLfaGIp!xsWx>$4Uc>G8LX>o1vrY^AG#wIwq2e^Y3z=To^!-u|By|>R z>voXk%ji=--e^t)&sVFnO3M$stlwQLqhyWJY7 zWYUl_;L^|ODr)#hYUkr|$9juGT}}iZJqbYyZYyiBClscePM%-|N0oFD_%2^7x19EL zd*+@EpQvcEgy_D4WA$6xrp~uQsT^qpqug1%&)QL3yih4UiZ z?Eew+BJf;f{|o?82z&<%Fc?EPaDI7xD70pggk&!elr)7HA9^B`K&hS<8toEAk*hN{ z{H|A+cD+=IkGq=#C_=f>o)GYpIxvB0ADrbBili@e&1$dZWxo)`H~qjY+#Xg6hfh@Y zF>rwbi)g8+^cSI!Jb3TVr842gWxTyEjFu)Ay$VXO+RXEPynySIy{QQ1*BP{xTg#7A zIoa?E?7JI$!S3OCr*#w5TCJ@+`{Qf(v=f`ky+P+>(^`8pWMt*ZaDRy)qrhznlZOI+ zOLHt?-GOI`MoYZd7Y{z`Lyi)0kkd92t8v-B**(B*wjZCR@Fa)jptaUh6vlw%j@d{X zDCqeIxFguX+HXLRN>X-@D~771?9L1N2FnU%k<(e!oMlCUf@_|WYZjiEJIS_(Or$~8 zj-9MEmFaLD4}AOVb5ZCcMtdW6!6x4tjPhk;*8FloKaHFsbM0zjR!DMFlijZ{`8roF8j)6 z!v1^A!15>BkzW@@V_nTg%gERjBFJ)s7g-d$#j)z`+EiW?gLJCwBMQ4@6o-O2_J9!e z3`%5L&VQxlKniapUlclaU@si-jRJw6!*xvub$;JuykeVTC{i**9Ed_D#9|Y+S3ut} zRsxVvJs{}25)@h933<6xHrLu2A-}LBK_4kVfd0ZdXxgzFtfPXohgC4aek9<_>WTGj z&Wm@6LS?6FYtD?HW0`_}Ou3p&vtJ0)%pqCgTCNC*nU%ELFy|#0wm9tvD>40QTT>;V z2*p`N*fVn7i57YFH)=S~zAQSU*!8xX?6xgFEX}do74|DSA06#@fbx#b+2LtHsyw=D zRom{ll1rby*~&xq7JQFw3*olXYHL*3_f~GBcw5>b3WYoI;RUu^P?HeLhsAPf{n_O1 zjoMyw4>Dj$pByYpV%l2dg{?DLT1 zz$SF`;Ez~`t-c7#yqmMn1A;z>Gw8I%R)hq2(B92{vpGO_kKAj!1?evgq%3O@@Jr#~ zN+gaB2x}W`e8#Q%HbdRKHWNFl_lLDuB)MV0n>h(fzy8^0#s-$oR(k5W6_t{a5( zk;|U`00HV`jZNk{@NkGLfWczvAuiFfC6DhiN03sOpL+&bVMv)SW&Q~QVYn+f*F7st zTTV~fVADfF1g^4eP5?Y_71}ZZKJQLadXSAGne`H&BJSm7FHNu!g1#MTJ-T+WheKLk3})D}kQT?| zG7VUhO6#0o?iLQRcI@5SZe7#LY@cEn(V+u-+UpAYb(K} z$~8#cGpFZ=*pXQ+3++k)-@1FH4tawx<@WF2(|RbZNzj>Stqp63+8*axc{{}ZQsW$} zZ-=s0UTQl6ERd9Bj=dNl$Ix27{+9)r>HH_HRYBT{Qg4QRZHKS%^n&VBlNZ=^f+TyZ z)P^X?RAjl+;ZKF=6Nd7=!Q6*M@Ly&&8mp{@$3&s=bb1HwI3t3-pTi`ImJ>=~@#ZvF zBdwVi(sRm#HESj9P;Mx8*aSh!Z-e(n7e~ZMRvE5FR+Ruk{N#5}3BY-HDK2 z%_CTI&j#phuursq3L^8K+*E@d(c~ul%udwHLkLf^3wM!#QIBNsuG~X_vW`7lb7J3O z>m^8Ix^?MncZVpc)-rGRhNz=UW}**FvX=t+=sx5NLpTA)>zKmrTY#*_vhQ&+wf9A*CwUn>xm~my zK-1f>d33SD%y@oIBvJ_P`AXriM0g*<;7x}V#eW<&SQLLSEAf{6>@ku2$K}LQY^n%U zh`{3ndp}h~Z<&e9u@6(}6ZrFv+D9U2+h6Jq``#t%$09Q1ZF?uSRWGvt2vX+AaV5ES z)1DBwV=JzXJ@oD7kh+s6&|5&~@Q2ULLPgG_5pWj?ro@|!$` z+A+eg**;ga-#A9eQ9|iHFoU^&W}(ec*vIvkH{u3izg7|$pNra#RD!%H0+25X`}H+g zEM`P*{}7Vo$H&B^5%7*M<@e~`(LPd`hTxcUt$ixMKryMDMOkce-1pCrg5Pm81Aj#d zIt|NV0>APz1%2yAOX+*IFeTsI^=6x&io=vtq16e~@=RRKz+-1rMDUa5y-^+9%2LoL zx0~bBCC?Gdc(U5~W$i9RuMjH7wgAHEG3zN3Gm;cLX@0W4&Jv=~xQu8qvTZg-kP7{F z%W1D75a?b;JO^(3vm(&k2iYz6Dy-tX;NR*Ab}YocJ9nx5&-29kj{fl(7R=X#m3jxt z5lH&rsxnY092@*2vC1$&6q+*>5r0Pg9zj2I16VQzmJorV$2&)k6$TKIC#heilEN! zMAJ@iyP2l2ZW^*m$nOJorLYGJ;{|EXeYRAweu+E3C;Q}rd}YXfykVLhR@hgbA(1#{ z?p=`>g#u1K_KASLfLbJ_H`|x@18GMw$1?a!LY(k&BhvfO7ZkNSMe{qyfmQAqWuPb& z!5!^5C(ogRG(HUz2sxnuZL5rIQH3cLVo1Aq;LO2t* zf{%yr7(Rjwh5hjKEh=cQW@aHL}Hk&0UJvw!$IUn9~fQ|$=> zdQptDU#7x=K`%$rtDF@FjJj9Z7Df5Nb1aFCxI_JoXnw%>VTjtXP}C62f_(Y+XXAPm z@L*xWe%VA3{=ZMqe_{Gc3fRR)i$ufyiwd{7?2tx33H&6>o5xckweL`R^&n=VRfwP`mstBQ4DM5Dv`c_-sSXCr!&W~mxC{{d8wSk z#ScRn`h0qrIvyt#!@; zIGg-QnD!;t%$G3E+wDo$?6UVml#cPQFSkzw=~v4ZF7x)EfG|zT=k@pj;{2e(E8)6r z<)YBYL*rSOiv;}qL`yhz)C)6?^beV4_3_Yn+b$wQbCy_VpNd4oZ|>O5&UuM24ZXQz zXN>9FmjqeE@`qB)$0GYe1)}+CoHkvdFe{|*{kB>`zu2_oF^^rz>*;P$X*R616Wb@l zim`*D&`Ctear^gah}Vv%Tj+c{_aO0pY}1k2h}pd&(8O-=1BZri4Eum174}PTOvz{~ z4Ec~z6&DomAR_p-vI?U1LdeUNzz(MJ4r{KqzlNxj)27(J6=qoX@8wzY3x|9i;lOQz z{-@BnEFa%my2Gv(kui9fhdu@Us`iqOPIGmi4HBJEXqW!kY+FTP6bcK*A@Z?bNFTFa zlQ2bKuZTe3kPKXjZpxnp{dPhEIUz5#q-fzc4%6Aq>#%)QXvakLE%cJuVjq0%Wr%(x zGEl*=3}HXSy(78#amyEh>ZA?N>9-`6UOadkCro1#`O2$8Bl-Pp#Q#H!4hWsu&HoP)3BF z6v?-W!wdZD3K7B&ihfSqPl_!Om9~=e)Yej4A?Pc)p;Pw`c34>Z6w0girvUoJ*wa1` z&}UIsE>UUBRb2WS6m44))86&+Y;Di>T@f`FvT9Lo-2|ly(8$v-exH!F*@lMWog&kh zf{bM>il5OwWmb)Bg#im7{~sp%@@s&h%-0O zm$6DDn!@QsGJds!ep5yV7AYEQ6vcPmag=@~OnpbnRvjtxZc-d8`ngb0UId3zW9*{< zbK=<%oEU!Xb#l>{<7*e#)e2K3*O8s<3^xlhnwUP$wL1bRj`J(|pn$JhS8yv1k^6VI zjJIC~5XndEvs}P8`|xJ5FKmS|C6FpbXZ(s;=DJ{mMZfY{7_vi3T-&rG@)&Lh21FV=P|7Zzj;GKG@sWn#FC#Bq>1vN zU&b1U z$%ZEVAbuVavv;F0X6L-^3&LPWj-4k!0}m}=(Mv(Tg9!dmlx(P#+)Kkg>lm`c^02kj zU%)r4zr^sD3b%hq2$A5dZnH@AqjmC1dr@IVgwmxsaWX($WlA3GoA3CPojcuPmn!Th ztvAd_coR2@LSAh5VT0Z}3i@yD<;55S(#iGCkSW>lZ0PQ=R4RT2D&zytibP}ZGNj^M zB0#AOUeAIIn-NMlQnv0$IjW$w_F_nlL4jQRUXs*|P<#|#S9C(WSp>4XXOK)ACP-g% znm5^k5FXxhm^CWwdxJnfpO<}-qV$TebPDZjt-fpyW~^~^IPoFh z6u&+=m}KC?CT{hjF<3b|gS;%j)U{69+B#*jwF&y8&xpSEHz9ZaohmIE9YU7d^+$Ym zR?=kaAwYviM7iR@HoQ-m;@}P^w@$Dz0WuXi9Er+8>XvDEghha8cECiBmeeWH3xpXi3z1ydq;>W^K}O3Bgu%G5T^^bKF zflq>$|5|i^<+rnYd-fHtSHb!FkziPrayE@BPgQcwZW}GS@7j+#TXD#Aq}*|;rU?3R z=$#cEW%Grpmy>#h?f~{#$So5Q3H!qFv&n~`Rc1E`Go~0+j@l3bHE|WD!?AG2@A6QQ zsJpmjF+aUoA$P2>r9y_ibI)QKh|IG*VFsXWMuSx-%ra~(-PKw;x3zS#?#-J;qy|}z z<@a0P=DQ;K#V%%*4rTVu6VT)e0iKro+~o{m9(%&2itwYFj$$r4OCtDb9mgiOOwjK; zJ^5WoXFgYn#8PN2D-9-JYDEb((gJGSV;e;C`-bOaxZbylO7EMu${*dk0p-Mz+5`cr z#9a~J^}J95DdpOHy`XRM>GBPymd&)5kRVyRT}lXl3Atr$n+K3nzF<7UjInzKw9?8} z`!EgFGoCEkKCZYRtD)s1wjbiU>>zoHp+=|Mw=Y>!ZaCvB+0RKxXmWV~4Z#UByHm&+l$(BdI=4mZ(6 z2E*QeGovm!}m4%!g$}Cua$=aK@ zdhrbVT`F~+jsypVnUNFwYVEMXRLVBRukf{h^W_}e?T%H$1bww=-cGjs6qXFH9Zg|O z-salhglY4^s)Nhz^7nn_kqA`i7ydiK{u73ki$QeTC<>!m5OtoNGwdx9Xjod;PS&b0 z4RhB(`2Ume{rcTPH4%Y42AA zsiApup&eG3)}o!4Yi|np*5wu@^sx7*5Y2UHrG4e^L~93p$SzcnB2G_&^`tP(;wK)h z;|v=W;?nRTG)ZA*p4M^uMyL78tW=bo`}-_FBl;R<+FDP?#Rz<7Lm` z^>!veryaA${wYYSb0mrPxd4NNT{s>Wzx<)km~0#`$;ht+$eMfzn@|>@45@G{>(LHz zS%HVrHv&9T-gKm5mAx(K7yR&;)at(Wk*^8oYMcV^6rzcEw2>L^z5!Abk^(kFP+LUZ zzGPjEm^33qp}4cZ+K+@B$L7|xZJcj8f^-V25HVZao_%w9a#Njln1iCwO=RceW9*!N z_{811cfZBX7x42I9UDQ;+Ab1>`5DbebeY1`>)(D*U-^y*l!WdySLQndD&!0rv0NcK z1q*#AlBw$i{o==w;4<5s!kK#H*%^}4=|22czbHr-ke|jLM*xK})B+sv1s%lVynP%Z zkz^^bD?TQhZ~wcc3;5UW5Jl@|1wXCMA`X1f2SoJekRd3UV2(4-BBD{_m?(#}d;vdU z&iAv85rN(y7&jhknqxy^E~~{hFT~OGcj0g^5Tp(GhdNTWSi15r2Z~&DZQ3iI&B;I^ z3+LCFe7&EF53T|si^XRq)Gc!lg^npr;a)izoq{Dbktg@#B{J^o#>S|IZiXVc$&L_|OkiJ-& z0adB=I!nUs{*5S`{=63Gh-QHH1p6IU5h#wVXvl@umfzNy@E|<1gVX`B(n`egw$Z}%m zdRgoFD5S#(gZ)Qge>51A$Zy`dzuYeUCnQEd9`+KkarSLtifWn03HFBqELi*pZbVCL z+kqW?XnjR&KR(QG$A@QheCQWSVoFI}W-(y~L5esL%TC1(D&nTrBFq@z$duny9a3Hq zg_>Vj$j{#O|MDO!CU02+j4x{Bh`@1X9C0=lDgGP*8cDs$>iNzfe4QuyDH({RmK+Fi|vKP3-auADmsob7~l3^L}6eMAH_!iDow(^MAklbE-(By1p0@7Jx|}? z3JAXui1=M5#1uwK*g_R%KHg|Tgd2laA*$!#K6anNLBrJP?@1EVouKROVa0_-QnN$) zQDvg|7RZ5Co|TIbdM@L0XoA(*MiKoST4?n!!3#(P`?7UK{VQ_wm?>~EUKXand!pQQjtNj1)3<*)vJry5PiIX*+6DhPo2FyO+wC$zM)%gVu6DD+ zl$BeOz+rqUoje~c7Njh89sENcs^tGown0QcilZ>Loo$UGFpncyr+9)C3d0V^aNA`2 zohTYB*^KzJ0IeLF;B8nX9_K_6X@ryIMX$8lHi^OzdRc!|#)W+^V!Y!f>@^W+*b56s$*|X3 zf;2j(h(>?q3jp6Im{-eQUt?bv#qaQXL=`teTsvG`U91>C9Yf$#T_eb9Vq0y0l)EmJ zmXt=Y42@XFK%q;`yV*x#c2_9H*$JLzPY5ys@jRKZ937C3+nKgh$ko8lgjW*Y6>|>FAC-26j2*-u%&V@o(qIipeQ9449A*7g|yb;^u z!mOfV^x`uW_QP3Jkc-)0N6`2+H>F!yC@5JwFS#D;=>a;v+9vzb7s*RfobWBRuL}6( z>kOqMo4&-Z6495=<2MSw*91a74k~b6SYl}+_-nK*c*`%lv!XCebh^O#cRF9za6zi# zXKuSiLLB{p$6*B_4rz^SdrCkHNd9|jm?nEp6o#3%v|Mar6mzh6F%;mGfM(mLf_{8Z z&cv|r|A2j~v0A`G(X}G@)$MN568YX`*Nf`2@f*LxqQZXv!7LyyhobgmswC&Gz!D-b z^d~mfS-HZj4LE81cufvT4{Sh@a2HlgdVh&%x{Sr9m<u7-tTL4q2Ne7F#;i&d>dMxyaC6Anx~IWjQJ6AO z-iOa~G$iYXw|V>imwll}%4eNeGsSue`Z3CvTAOEd_7XMhlI~#w4$1wZF}y5t*%q(0 zyigKzUCNE@sQ`JU-OMZ0iU3cPP8B0{zo1`d$@;mNnMF(7UKi0%MIMi?c1BpA7Q8KX zqpjRhzWfzPi~%*u^y!6yoG0GeY8^w&nT2ysKeNRK3;Go)A$Pd*Tlu@&vPD;!+c_9| zoyM^ug|-A^pv!HMh)h-Mq^8y>)8z1JyC_sGo(ej8y9F8SG163iyFIm(Iaah*%(tW{ z8lKrSbry&8_e9WV58I!eVlTYFXE}2JU1Z;(8n~xl{VFi5QJpvC>wd3YC@OX3vySaj z0h%kJ0$G?xw4loF`9Jw&mrbt<4~gpc6fZhz8KSTfkXc1)@fZIUuh*$cD-H3i$9sm_ ztblUk>AaZyT8LJ_1Bl!EAqtKFr-%RfnvW{VlWnE|(&K!|mkY3fyRq|iP?#~{{jI{i z#P$%uPk8nizUaQfv@RdRjo5ezSsxhG(=rsM?u;>Pzz$I6sTCM=3lRvJ<1;nn=+Wak zYY5@tefwC8!hZec#F3=OOoRPiH1{nc=r;w?Vf%BS(07)LJV~|&imo4ejGAiy7R4_s zNi*>a*`(iPyW~9ROp1F~-P~G=^Y$jYR74UVsaSTTV(XFe#v>JL3D^~)(;43SWA+mP z)&+v9Fj8S&Y(-&jtKBIwLo*OrZ|MT`5~k0wak)OMD}RB-L=L~${&dNju`!}ju@j39 zI_OfTnJKzo565=QlP3P#qR^Ca`YHR10Q18G?^z&FEFCIA)>n92`94jh%vd{(y(Wy?jS9>I6d}9M# zq<8=*#Z=g@1gIjXd3Vcl-dRqET%Eh$W*;i-K1yCAYVFdm1NC)#uQ=P-&j*O4d6qNV zwaKQHHe3|?z&v9aoUwgG1PVbLTI{@oe`!Br*|x| zTfRY<&hZXCI%2(qC^Z{jP_RHU1SvH)qBBs$3Wfb#42{C@P1uBxf_C+ekz%7TooQY- z3Z?O=eu;i95>0mpQ!cgkLLiXGotEuO7x)wqoFN74Ap*@{!&8PzLEpKAc9U5t4A*z1 zZ58&-IyhUql${ZU9^Q&(?|(u#3a{>p3!yVC=_o+hl>%Yk((kIjqVy4!x-$!!w>K}h zp&`c~_o7u%EWlvV(1AnkmjU5Bi)XH9Lq3fJO#0VDKJMiic1)msw?-?BpV4i0DiGbS zu3b~2u@_(DYj+#=cn?F*B>lJBz8MlwO*VEOt`ww09BK;fW`U4ej%OxV&k*fwHVaTb>;AGDaB?kcLUWW*Sr69ic>S&_tp+Lu|&kBP)i;CSVg zsmpF|$mz^_POt@nK_q5ZX>#YXA$6GcWVr}4m7FRMNFxDdlxSd?F6V?z|fabTS-64Fl?Pqp>?oX^oUwq8X4St4zKywzS2 zD4w@Z9_+D87s|jgd~u1L^G%sIqMauKgMm$KX*8ZMNV~^jF#4j9mfB&U@-cod zM0W0SoqbOnM-PbkIej^H%9@|-K1(y=` zlT*SE;V%PpEYLYNK0p?tWMY>IGFbU?QePM1dWxGDw_gcTXzR{u`<=oxR`xd5Ps7ZW zy%Q2U+u2j>bbA6xc_QMu*1lk5@gGb-q$6XFFM7U+G%i_Nm0Ul;t_qL=x}j57yG77< zRE~jdl(1@Dv&LPSYHUi#!0`m8n7txM9SnU|bfnuxb;A6c!80vOK`Pvd$h1N$3GA zwmi1esiIJQTIV#Ir7(4(VuQ&!+bPIszd4VC+&%%{*+KA1?5J?NcoAKmD zg<`m&X1{<|{_ponc26<~zih7G~_v2+{XL z3l1$|s&2XLY#oJ>u`!QnBSI7y3|1rnWr18I;Iqg5flzDvecDw0v!c;aof}P#20<=;bvO~+PTZntU z>iL!Sv_SjLfTom{iNHAZiH?fM)8Ze5DK(BP*=dE{T3sI8^nE6ZI+e+hci0s^XaFq%oZt3Ra#nd0|-AXB!-gKt*dQGL3Uzm7Sv+nL>RHluJpN+P4nIR(FD6z6dlQQ z*>Zvkc9TfHaferN%)VEc#!*2D;txwxa0HS&>jkNRA>)(yl~6cnY|)7Z9g&^Z8nVP= z84-Sn-W6mh_^pjuTgZ<;T%yhh`MOcsm}UPG_G{~*2<8s#i{FEw722@Uu2Pt)o7afX zm?=b0Xr|0{P@Mf-B>zjH!P>N(ub96!OLTg4a>6tT>dzIV5=`RpTMw6NwFoqj$K4rr z=T!iJEimb9vqbULKN!o&ixnkoLn_h18fCZbqEIm}XOjE5 z==(tG!TuQ>cL6P;Efd}2*ItW@5>%;iQ?{kjSwqFIo%4=W6f!r@Yo1}NLuNYPQ1P5Za79gP(Ja zFUlE$N5;dxF9OvfUSt0oBFTuZB!>2HP7!h=x%?#GCPMgGl7DDf|_Z0}+@-i6QI-ED!qoVp%kEIZVa-K+) zf;^pkK8y~fL^*8Q%#fZB*z;i)1p>UEl}Ohvu*D+K+GO=YI~XDpJjh~HJlDQ@?b+&v zMJ1SfjR^Dst7Caw=p8V+-JUL=AAjS zyz|bSIde|v(PVA2gr5;b`vs|7E3B&7)U58l%~s?_uoJE@J<%+jdt+` zq@unT$u9F{0_5FQ)gIjwt+s1Kfe|T5j#d~R^#U<=N(5<7ZX9%MV0BQIiq6D@S(QhV zI#$9eak+44OU}f2(O*p^xUgT^MfR?+>O69&{oqH0LnUP(nmK&5C=@5g>`059K`;~BuuP;6H=rIOQHTkUrq%~ej!E7^Y>M%Ups+f^5m zIn3uGdD4lQ-6=vS4eeFjvIxU`?|yx4vcj+$S(;sBOC3i3RBLQ!>dBGBQ^C;lc`T1g>~aw_1tnq=ElWU;OUjG!hP}4M(?_BZq#@Si z=@II#u~q>}C-ZXj?f&bJ13BwKG-bE^#52?lDE2r@El(7xt6gz{RSA&+BB8Aag?0&s z+Ly>H-xI>r9{ar3k%~ifiGal)J@(-I?wLF7kEv)@3w74%(Htn_3eg-iPSPCu}S#q`5ha%BC*TwVm6R6GG@Jk@T1P3Qe(O1Y6 zq_avoHWpi!$KiYP-G{0JB#yf@STa^z;8si8{ukM+qEOrP^vU*`!eJ03_SYaQd_x31 zt2^VL>lHwOJ}2K2AZg^$Oe?_eh!7gGJagi{rR*?9iUgAx+30HI+Z~=Fy1TI+k z5-Id+MkzKcCJV#NIFtxyC`{RhX8vKRt?`IVG>bPYOtxWLZ`(a8&hxtz4*8AacAGur zN%3kimo?kdJx?}C`esK3sROnQgI>Y=J@IHu5`TK{5YMn3cheXaGiQiE$+*eDKC_)E z2!}b^@qbHz`bQId3+L=J0*U-NPRGeILf!K{L%u4~ByYFDBKY@bHQ$(EC3m=}p%(eF zZg8`3=rle(bI@JM5CulyEDyJN69i!-jr*@wlS-}A@PkcO>y*eN!@Oi|gB|qeWx9o1 z`N`BH9v5lVnf zB6(Bby8z+v)1F1-g6UNO)_9Rj5l_6>6REN{ofx_5;2L;z2eL~@$5Kh9NVyG-kI5k}128xZNj!Xs7r0?S2K}ljqgoT1t}|CZWnB# z5czO%YuSM%cArBSxt;yUR6#nvWBa~U5~L^W!Bm2qc{f)Hl35W`Pq}@mKPWJciM{^z zWnp5Av4n{aJ)tm)Vwj|2+5at3LM3F^<+~{;h1`Lu^sJVba%wqP6siSx;|kK*)L!RL zpf1ig)*ck3gS!{(w#6Pr+ANHD?G_BI7I}igrZ5+w5WysY;}U(Z;>N zz9PW;$@MF&P~pJr%gw|s74DI&g^QOj* zCblB`iU{Ei&)Q$rv2}93T_6%AB)2>eN!2hCt9zZ=OAJ&LWA?r~x#It@KuZKbje^_=}|i6hUaw- z8Gsp_b-d&-6B|hEi9RueB0?iEhZfjr!XJCb5qQ`YqEV%Z_#Cs5sRxQV<+V~_y;}*{ zsea56cWb#F)ha~wizjnAF>X%?hQ=Bfn;>25{hol~p#A?df?+`9;Ax>_BOV@7&jfva zdq)(wDdk+w{_PoX2s0Z_?NZ`2n(L#be4AY%0*Ue7oN1#xR!d|_vI2ov8I!O(6$`WR z{Pa?6m2VcL?T_zXY=2XjI%Y7A;PQpMt|cAD)m?tcXBPXn0PbQ@`c$ zyojxA9b2rw2oxL7&ndHE0+iFSs>J3g%!6H3o7ghga!;3+U@yEzAavB>hQqD)nx~cX z9rT5gf-r$RUv63}?6k{5zRxV3WTz`kfj{QYz9c|Kc*KkDDvlARP$mU#H`!Q^X1Ek^ z;n8mM=wlCU>27bb8bP{i5N&ht z)Fm3F>YjeL!oJ7~o>kO37yS%sm@pzgYxqUmVuM7a5H>xTHdKH!Is~qMn)6 zRW;W}3i25n+}nPsAkCSBSq6-~*ivCvsf;#b(XwtEj=|ckR&f-72pV7O0)cUs6Vqw_ z-FDbBqwz6#l3~YEIKKk}(0>!A^<`Tqk%>WVl+3PxL``(PxCO6V`nAhMgx#Q;V^;}M zZniEM&-b{aduvYA&$6_i6GuHM^6)I{D-^~uM|SMu2Z#`c-Gy&ftgsScb%gF*PEJK) z)Hz|l5)M~Y(0St=iJR7u#BiP>;D@F@r6Jl`>tSQnxq#c4`NXU)Pft_eEw~D6vH*SiIm8`eF zibAdUV3gSV0_uJVnz#YP0)K~{eOj5G>R>bOFU2eEp-C90;%wmB7~td zE>>XA2&(>gY}~OB@r;hLxegS zMaPyoJ{mqR0>x&sU@f+Dg#uGAH@vdPE)))JBv;(M1<6TzIl~oJPHoXEGS^0lK*OVd zHQVz1K|b(1FoM3cCvMS!8e1t43K?FC@OYOnIngv_@-ATQPQV|{)JQ+xMxTf3$%ltIA* zqQGN39o3;CdtQ)X&c}(X&Cv@YdDFiIe++w}_$LtN0@l`rk!6@M_yg9UB1PhRRRjf%U8l) zalmKz0lR2KD23_`=#^&I35MmvxIErolxk`DB2lSufBcBw?+?5)w*r})RSEKO7(K#j zJ%*zsb^fBY9{dl&mJOz(yll zH0BCfb>YNto6aZgDiLAs_?``Rt->@CYc94pqlIX}Wb-CGI$7K!F;kseY*j*`Upb7x z&3DY^^rUaA;xHJ?kfaCv`c}d+Q^eqokzY%QNF(%mvtps$o607$fVKC1!r_f&cI4CcYY|v-L=pzs-_!MP zY@^szMTF{<#AEnBEwWW2X=q9r1eFbLpG9{7n?(;T8|j4dCRn>@aKT<6VK4MN*g8Ae z+7f9YZjSDJDhe3q#Pc%bNK;Z1zpXfVau+8$Y zuDd+0Fhw8Pji~%Nfl!Uxxke;Ar00!-L>uVBD&K};`=JQx@CO>~3We!n+Buys))-G4 ztTi8c$nF(^%n!Ca|Cp^7(8sfNPO^1-(nxOQer> zy?you2$asCu^%cNdU-4>z;L2znr4x+_Mhn-V5Vw=FW3HldYwx#PLS7mkfn&@!V2W_YeN1w`YBHO@=1ZjdS9WnM3 zfVp02X}u7M53#`>`+Nh7!0QFPEigs^WsE3ctVk7bLIF4Dt5T_woWeJvrz;9o$U)#o zProGyGei3IxA#0MgWf}j=&~^(CH*y$1>p^XULEWUwas!RjGIIc#d3Xm_rZ2FrZPmM z9vwT;?auLhBBvp{ti&NUh;>zbgCGnfo2T0rg{d)bjLZ?VTf3))g|ti4>=lQaq=`M} zMv`mJsBcTw?+sAzP+LW$kjvY!Ms^6#RWfoV6D`wXcDo1^**C4fWhxA((r2<70eWA? zMldqDh-su=B<9RpZnXmnhR&aa?D@FEWE*a1R@tY-UKOE8XkbyDI0|9~>6NG4Bw! z7yQxVDA@c-VP@I%cs9OLPag-Owh}AZSptCvW^IM?U7RalU;&mF#%!gWbD! z+TAG%{=L$XpJj_ezL5gZG|z?hmRAMYL0@F^oXFC21r58J|9X2=MCy3r;YV$?!s`1- zml#pwdJ(82>l-=A*d<8sc2?9xyMgw|GQ&=LUGy;gf>C!qXQz*crs21N*XV2kS45GM zhpHZ?*6G{-RmEzaan}&?t!9^t$T&bKg0(Ek)$Nxe(T=G1vnH4;6bj`YZD9=W0rr4s z@XNc%Ho{i<18j0wnl^Z3V*e^R2Yp(QQX`LhGJ2-j>N$U+G#5)v_L3)IN#eF@+39>J zA{84nXh<*nC-u8gq>)}oekUZdt5>x1tg&rZ*tw%kgxP0GI>k#lyY!mZAw2 z&9*6-js4YAqcw<#^nXII@>1 zy(mEWug-m?njLIs>cPB%{d+jHIW9^lC~e@S{jKk<;ps?#JV+o!4nyVJ1`CG{h!q!e zHZfcTvX30);r7}g>pgAsCOV8{YQ>i0PnS#rBv0H8{M$)+?yn z@!A$|N8~Y;&9RPXBp7YCMt=;KR$R0Z2rQy#nQupg;fM)bYrrl+`eMMqK6dnDNoC03 zBacHUZ>QZB9!uo!Tf&jm7aT@61b5h11z{9X?0oy0$8lv`3+GD(Ltp4Pk-KSSow5EkCry3Se@hTDvIVRZENhA`uyqfVaox`b3rGZzB>DZoh9H%m4{dD+gsJ;YzH zn+oR$c)8p77&co!Q9@G`6=dhwb;6;yMkZt~{RR{ghoT%Dhr{6(1tUz>s1o@AyNo}*w zi%zas8!bW0^b&{3oy(=8G(?arqfvx-$Hw=>%dx{=o7Mw$#Td5_g4FgJGz)TUlSk4I znC-PTkHmHY2YrtVg(k;9biN%BhS%iAS$0fesunFq9?=#hqQE6ty}P8^{-y+BJvt#= zhMa&tia*s=IVaM&pKU++1<82oOx&pr*?4< zahNFKD@b`Ls4)!h7EMgBfMZqtpxx__!SS^^Ny@dEqJ)OH5~1%}e}WCML>oKoSrPR0 zWv`^yo{#A(B88Hko5@YKy$r0c+yxDOI;U=qEP<`IZ#jjcy64Y2wsJ|JP^n{;b8Ng~ z;f(o&H;i#`iwHzs%eLrN50~SGEW;D>S=Kh9!5$GIFr0SrKzmX!jM{N!nfdWDJ1R;T z2#2fjR(sknL(X~R_F08_7(s@v&wWFX;l)h}L`y$#lwcp7Z$mvP{7=Ly*PVhr1DE}y z<%kf9VYP;JT48#>95)bF?GJNjMYHd0!NBBDITjZ6#avam?~-w{U1biR)bhr((lTisC`XktnF6|!X9V*ylG8BA?xz|7^e{S z?-b?Oqw9|CoNu4V2$@|ycAR}xVaj=VJ0r5ruJx$nTe$SDFc}Xk$m9!hlP8Sj;`6ZF z$_1$+;_3-88hW-r$cd*tpm5+JGJCMX9u_53FSAIdU0eKdM!~^t&o|gBDSV4Wj^D@( z*%LpxfEJA~%`CSgI_;735&Mx6!61Vj=M8;@7_h9JCRkj6Y7OXfaUZmUJ{b>j#U+yq z?9&2)nPN_Bi|n%^keI?oA&5THVJ=5BD35(pFnldtI&T+WrjnIsU}CohqiiO}?Zk48NmV@(16%&-DOYZ^ts(}_`}7)UuL@E#CIcOZw%bP@-#u@c9PpksAtZlgLu8!ohk{|4j*;oO zYdo5E7>y^u@g6-&VmyY{#8h%!gtjTdr@DCoIskK16fXa9mB!|Y0)vwZGIfx!#36oS zuu))Z1>sjWRgTPL5n1a5ol-~B=sqXF(zue?gbDUr&*7a-tz4M5LxK!}OsqE-Sf@~! z$r<|SN%JE8-5&@R(d;9^P-iYD)!8Sqz@Y^v&;*|pq-QTl>t*LCO#PCz%dkW)dmCd# z2_q*1Rf@@Wwa_ls*fWA*U}eX1%j8Sq zw<1vhn><~LZnS4b2<##?DYF-a;jbSe+HRK2rg1_jvvqfg5~_C+ z`OH0naCdDfez+!k+$#;y-e{3vVCUvUQHh))FLBa1Ot9o8A|c#zCq?UkMx7bj<06GA zu(U_YXTKAXf`czR`@O?s3-`QLQE9IV(rwwXbgnV~Qz$g+m8E!PvJX79oY(A~YG)@- z#SbsY;QGjUB9OU0M|>8}7Yq%7rXDY?eLwXCSJsf|UMfOpM>a&eY?S9fzhHXHMtcsC z9vt%fX1hrgH^qg!ZC1f;;X z1`TCycnXH5*1dmLN=HPI=8^u)@J*34pCuA9mzm-=}8_h&k~&?qBBds zSeuK#SL+fX6meo(OZSeIcFdDd#4wBo<=8(&pwZD@v9IM3_F=w-*XR;q-s8!54$H7B zJhr1%*Jt83TsVBl*xp4Z3+UhLMAc|3=1q?Ygz~dvCGswOUy8zxNuT+LLgI5je>_UUliD5LeAE12JJCnnu#0J#ps=WAs!jvA%W`{pmKwl5c$j9ub zo(yAuHd5ix(ARJ#bw>)0<%~a5kn+Oyt};sqlPN=qTN48G`*U;Ibl^hwz+U@}NR&gH zUU#(}5(;&W4q~e8c~M~S#QIu$NnuJZ%45~eB|uLci6t@m)@JXD;+0*?6fmFwOyTKT zHVzR8dE=8qEAyYFC}_oQW+gFP6ph@6A{#F&>=z<%mK43wZO_fb?REoKvfAt}MFi<4 zOYJ7q!#{RlYH`JCy)5w*lO_%fk;XRIMA50u$(kms@bHP|9agPy7_CEb7gAymi9l6{ z=0<92iv$Dj2y*AYxWj5Z0qr)4RnjKUfcHt3?s5I%Pjo-naBSLgU5;rLC49stqg=kBUm1gQp=TrB9sA;4NcD8^|FSU!K8^z6bfrz9Z zhk8uDT`B}`s0mdf6CWk$18du%d3Li$?`z7_ivOq}xznim_Le~C%S8S}#C2yEh1mTy z`4jCtfsm8zY}q%3sYsCJp=)A;MBxM;bww$1Yzr$uyHjMc9am7QzRR}^C(t}s@)^ys zxColqn`X;`P-I8Q9ubj>V8{C1MO;)8qz$oVW3MX=*ArmGzEcbcd~r}vXIUS^t8p%n zEzIaa^K~nhez>e=36aPeU8#=x5?drlb|cX+vL*g_RuMjH`6SdhhS_Wm*sVdB+Gs_< zLF8ilKm;-)!8k-bRsxKgrAh7}cU0K9qL6?Uw=654?+LO>OPK!Se5U$&0`42FW#nBa zs1jJ9upVf!TSN#oOpo!EyeEY@*)F$f{(N~BCxY2lD@b*C9kXnw1FW_c*@predzFu( z_{YM;&MbujZ{5>3b>npFT*2_pU~stp_&~#i_zn7aAfAoo9sRTUWn|DT3Vpqqj)

mk5kU@y`l^;L4-ED{CD zA`ly(_XH^jM)2P$>#Bbjg$C%@USelW4vCNMo-Nk}Ulydsx)h^-`?|;T^z{f9e;^2t zg$X`rmpG8k3%go?ndpYwZIptcl{&Wbh0PF#A@bNaugI1-j3h6ZE)(Qkp>NO(X9bTF zIIH(GXyeQ{u*E(#g*2fXxz32oLe?D#(NxXKBl4d-!;|1|os3w9kjos=wCclpqW0t1 zP~4l`)+|e0HHr#TFnUwCr^PzHNHohk9K}W9!17gzGV4$j)y~Le|MI*54AZ)D+t$h? z-()YP@{?mb9WK7%4`t&`#lBd^LuA-lv8=OVrt~LVH%p=xTc9 z1~f3xfDyqP`60Y4WCdp$30lVkQ>L23~xOd(r9 zZK~QX@-#1Nd$`{EI3$l@7=OJ^FjPs4G8AxT*e^WEiMq`v>ZGVs|UB299vdhv;h z>GqC8SUkDD+J5pYVndU1pSu_xj$WdWo-V-rg5Bbf^qo`JS-K!?1{^=kW;)uxKrVJ# zg=poB74Ih&PZ_3^|r@h*0TY@*WU$0 zBaCCsVE-15x?-0E;Z^M7+w6z;L8R5L9(lF>SYeeW6QEmkoRa30uF8!r*B>BC$dol- zv)z@#jP*h*6%L&j$CnXZGs((Dqm1q?kA*PpqzYnX<9Itm_16 zG~HWZQ|6fr9Gqs)C>)q5#Kfe%APnPORU0D_f3^Kl6zZDXRu@eo`n6>0vEp1r+;0j~ zcqV%0_PP53q3|(Wbo_!~=;7z8vB^EthKa)Dqn&1?fZlF*rV{BSchE)lD^ElfisL!O zl63r9q|hS63UW&surZq@8YMqB?YVjOz5u-P)=QBu=mvi%BJ4;qf&}stzXpc)o;xLM zbv`FTG%{Eoqa3=*&JqRwIE&aRGvds^7l;(jPvyA(~Tg0 zv2OCQ+d+@{(>oFV_y>tjXMrQ{5<}1Q5Yiq`}DFq6b=nWOCzphHyHB_n6_(~4sslFBBJxSBEir) zm`7%*91*mCsBej!gV!a)9qsi>7_}O^Vj8Z&Hj1Kw(%2G7<{OhumA1>X*V6ITh4z9& zttVgEX0$e76#(uzgN)o9*mp%BHoCwyN&rq};x-C1TzG}i>%PNdbK?H_^ zOXte%ovUVauAF9H5j_-#FU8}tz==FxTVtX@1h>T=fw?Du)LvPF((*sA*YOKM&C&&;) zcRps9c-+aFxw;i`x5puEDX>fj(J^+GK&WVRHLb~VMF}$pE<3Rp%{A(^qJ_#|!!6D& zsfVtOR-M)&ie4?@^z5?r&O2|Q8alqaOa?l{5$aI_Fk7pzjHd$dG-I#{Q zRK}LQ_JD{CQI^j*gJvU^ZI6gVF@+s9`Sz%Q<_MYk3_k6Vq0Y2=(HTowY|o1Auuy2E zWaE50E=Wno7q)b7o`0;h^;q?Tfk@3GNvkd%|5>S=<2@`|gVom$MP*BI+ckE{%u@;C zQ=)Z})@gl2rR40$-9|s5(1|h~KB>UQisFNbgZG5pEdov0-TJ6CD(r1gUoVS)C{Vs8 zA{EBZc^~_mKQ2d4%+K}qcM&KdI^CMX0hfKE8cOIR*@U#u2vhqbGhS&pGSmJ~FfhP* zBFp6;i}0N@&gK2lPMD31?v+c^b895X9-exj&)`epVT1@2DUCgSW*TjrC}Ct?i2x#! zJ{OBdLnL%$Y|DiLQ(P0vw#}(05N6AMXOp#w24}@(C?w>uc0AUTMRe`(eIe@c{BkKE z*pFrbL({T{#9qD)6omwIp}b#L3DRy8-EB++-R+|)BY(61z|_N#6C3}ASR~jB+)Jsq<8xdNV8!IlMd`IbJaO3z2gyC)+X= z4prw{vzpKCTv2?$M$S(00d5bANKuDdU#vP@Z)+XW$GS+_AxMUAH!iTF{&-|Np3mH3 zY5trs_f#n*1?X6n*cU{JnAK4Fy2p1;UEewNQB0-T4@D9C9D~?}0-^C3C)GAwSYJ7* zt5tFXb*|lzN`lwBd@B`(;mrM2rTcf=EI~Nz+pC`~RG6YmF%^?<^`20Q-JIVw3zD@C zTy^S$viY+jQ96#M*umu5Nyjw1HplkKdBjr`QmuJ3#uo(10aG(;aYMC~CjGvr=&IVw ze$s{uh8FIex;#>;S!_3X8gk7oNZ8GSpmFJ9Q>Mn!h7Us7xslCm-o z&nmTdM53mM1lJ=Ac~3BW&m#{qv%`|y{_g?kyl=9;9a-T1u?Vo9)3H8gBLsScS&MqC z?O}FruU)UGzvf5?+kmV09~L3V>+nDUBCS;|?32ym5< zCD=+&oEwj2aj5t}>RB#S*g}V0l}o!S=i4HOk;%$NTO|m~T~lXuO^t4-H;4jPG>$bY zOfmawQ0e=?pA46)?IVAZV^-u&>fbLd2&wZrwqk$7fjryU4+Ns=fvb;pyHKdyC}5$( zV=Fevw@``0=+JeY-75$S>2Ve_l>(u`^I{Wmt|07_o6Htt%W8$K6EVDNENt@mem08I zGxmc=hwUPT7A?q3=kRQ&2vIpK(%Mt8lVX|d)DMXe3g&3B*xnEhRd}bd$=>#dnNbq< zK`JdC!?7duP)J*nkf7WCPmuP-G`Rg-VZ9L<>~RvZmo`)s*g$umTag0cqaBUS3gki& zEij(b(SlKfkxCR;>73eNV+Cnyw(U%Zw|REV>@9X@Dm$H3N9y@iqN)F7L}y7?Wvw-e z5~_;EL7qMBDHw{A;``?D0gfo7&_S2_{h(670ixwcIC*&2A$q-2**X6^mESl{PwjgT zlR9(;l7txhW&5E>p~r?ww>~evy%zz! z8Z%gr$qHq&-7G4@^D1^x&9+NKnBhRC0BOzRf)WNv)na|JZDz7{S#&{&TWrxzK1x7( zGRfcw^RUtf9Trxba^HbR;;5Vdsu-9q+1FHL{}c$kj6!V1)dY`?;kMB}x0u*4)TJhD zUl#Uv3#K@vN+rNDNy7R#4lY>A?3cofNauZ6d zR1o$iaXrUYI@n)>LG*P3WcJ)tGTMt4UqUZ?dO^dEqfm zgW;#7P#9e4T%cjQbL?a3m%-0l!l5DGtXO1UT@qkf5Gb54NcWGAW!bF?hc|Tu26eL0 zaE(ae;aF*o-Qf?jIMSW3e8Et7cV+wWSsUzEo&etE&$3dG%IC;u?-BtRFO8&>8-%D9 zw}PaNWm`Qq(Yb48p*=1HTgk>9_JqQrlxV&6m^~>fNTj2ujCl0-o^-_ctL-I) zshbpRnO#nJ6xNaKw53E*@Q}X!tW;r=MO%nXHdQc;mBNB@oXk$L4W2r)W0Uu0wpES`5W@&D2z`u0z=gu;QD{Mf#EHaUeyah=IFr!c1r>DDGJ zW|~(;2F~W$0jI$2+l}*4WU$vofr&w^ZU3H%jbudI?CeKF4y4a4z~3=0G({nO{~`VD zn+nSgVS5l0Mh#hImy68n9o?GB#`rYT>uAwvYcfjis!Z-&g!>E|DSdxs37nEg#&Gk zf^ZOBxQYA{v4q>=$x-Ljy@00#LjxDZ_t)5K9?c0#JEDf8f)vHYDdv{3MRDa* z7AC<)qI%N4wThT9g^Y`{Am-ckU6J5RZrr%Q_fx@8kytwN*mN70dL}bm!nZ1s=qp~% z2ENI7*YPyboF9BB&Gj@_;=7q8R*;ULA3Web|3ab+KnsDIihEf*%c0vRgxpavAS3w%<#L|uSnKgt}f^;U<+vS4( z1=r1A30KF67KQ}=J#;T1LzGY$DuG*VqA+=nnrQbc2v-=N?`X7{sf3av+6G}NaCkG{ z*aiX5w~6lGX?sM0nMhxLmHkUlHOyo8YoB~9L>8Cqn_h@JI6+!VM&n}{iH@!L_H~iG z4KVU$-x1aa$SpiIa^h{dp3TwLc_;SHw0i_;Cl;r5yijenMg$Gh%6bgeaLde|6a_ZY zOLO>GKP?mrDVBA-mpwKcmDmz{Rfrm}P?p&3eZlZe=BuMiqOuk@*ZwU!J^1?Ikcu}i)iQ?% zw?;1H8+&4_dFkfZA%Bt^qcQu7Kqx!8V;cH)_Vsm8;Pl#|H`uoorXd)xQ>Iv7AzBWT zTeuBo7QI&lc!&%W%@TlzX#2IqmI{(hys+5*;OUB`3HT>Z7uo*LwD$yk5!2k3a+35f z(J2ghzpTK|w9{%s{z<$NU|jrkhbXeY-B@Ag3sT%Y=!z}y@CeMtE>qZt99r<$@>uB) zqCU)~)tUrD?_Mcs)P7;g)u}zw&MNCpCCG`F!>}+V9c^1;XRQz6@mCMCa}=hnP(w%9 z@lBy%7%P#FIaO?QX%hQ^$a1?~tBhhkd z;ticA7^-z>8Vl1M_JAlp@O<~IqYPW^+2rEkxX7MLrIHDd=9=Wz8Oi2Wd*);LRpDxA zhaC__uSuOIQ3Pu*ivYK>w#7Q`Umky50h&6J(|wT2BGO{sY9ERMOVU4(B^fr*Qb({e z>Y(c_)wZ-UY)kD-5ko)VXCu%03x^@km8-b)Xoon74_l8;-1Q>SNZ-4(AI45^5`t%j zD<6IQgx)0reO27iY6}z&>lUPN1uVqbDl8Y#hX;4NIh*V7M+Rd|>!8B$K%=*CgK0MM z+;>Hy1Gy`aZl`SkP&)man``F^(Lj_w9E+PjO2Hct-~USRduM!;Shp*?NBtgMY3Mnd ztxy`cDTX^-S@YBbp2>+hP2J1eZLJ{n$wY>_-lHOE$xeGh5MD6y#QOMvP-smCd8xfB z9ERg5!#bQWoD?}oD~6)yC38cprvE1@bxe;TG&-yPR8GZ(Y#V+gtj^Q9BS-i+8mzI4 zM5OAxs?7|Qp@P)olmSZ{k;(-ZL~+X$)?0=ghqr9875F1GPqMa#1-H!K#NvFw!3POw2Fsuss4MTX3v{$2!n zCNGBSkp01*KpY|SZkt(4b$SB0ykV^UPe^Z#-j0>Y)@@NU*Vs9mpsVD%R-7Z5IXv0C z`Q&4}ZIB>^kO%tIqk7V6K+j2ol87B25scklk zKq=i@R@jRQ>urt}Is7)*dmi1V-vIlw!Xc~cJJts1QleUE^iW!_EhJ&=OX^?6op>Cj zqL445FQB$WU#mL%g=p|GGS2K+M2+Gikhx?C>_u}0X_^V8h&9Uu;Bnuh1y-Rj-B^N* z9q(U`a`R>9{b`}VU@>B%cLZT3GLOS~-~zc$`cOoAr*q}%&Xs#QS1z@0G(e$kF{8P? zqp{R}C`{3p@Ml*DP+Z6MGTBrTqJU&=D>w9QtRQ*Ow#Z0o4!2vTQS%et)(KJ-dSwH9mvtg&`q;T7a=X}^+_}_tcrKKbHlV+?Ids?B#|2&N zs~Ho4j;wL8MXj*!HJ;)z*(fVBHd>T0mLmDb&BD}|Lxe1wB0y93GlAAht?3Stq3 zq*&`%Yp;6JV^ddKr@}PMI2@X3esl=Cy!7_PrVtCf9z!tS5~K{&0qlH*RomJn$=a2X zI@b#8Eh5EWVrIS#^oQSWt2oIq%asm~&uZ(gUT4LEdfD6_P_^ABidtAQjv1`x9uk3> zolm0l4Am?$bWkI*=0;or>B2Y*pY0-1i}H?|Lc|gK9K!ZU#`~Gg#KLwzUWK1397ft0 zWQxNCHA*&MW5cfXgx!zKvMCDtqJz~{^qTN;+8oit+|A`E9D?LZ{cWD8WJ|(!=IRFp zX&e~9A53m(bhU7~h&0Y5y&3BSsD89BlSZ!9iUOCpY!RCjhOI&(3T%%MnPVeUS1a}l zhGB4F-ys*-YmWC<73^QWZ>GH~90qrEJF>#g*m{aWQF2iJL#LqY8_7gh*uYepBTPkb6^uiJvsyy7 zD40f92WHrXqL3}VPr0rtpoVk{n#*NH>qZe7B%P}^*xerPp1#X66ej1A80U{RK`6}I zSY^z_kyG3XL<<97(y9_cib9`#o82c6hHK~4hnZ|;Rd0@nbVH;;#NmW(79rGTQe4ZY z?cmTX*&Q`~-G5|Ki?n@;|8+eH=7NlJ>WWwh$ zkCW&IwcH58vMk=@TdN{9-37g@M*B@Fr?I8k)+HF&OxLS+JcYSqyVm|D?5xx+PS)+V z&$ga=;&urN&l0A*>=>4a>>Nj#m^Uc1^BhsH@mJapgs3z_em6aRp~LREyWU#5#$tjr z=QZPRwQPk0SFv>DLlGu!7$?6ED2^IowLHi$mWaRr2tKj)Rc0h6+BVT>pz>IDuI&)u z>2CBU|ERFnw_|gzoz->xMaB zAz#9|=%j%oMSIR4K|}fxJ19W?5sr6O?6#LZE>{O|NrBML#T>BgNe>j}!N@KF7NuD} zz2%8gpThS<3Ezo>wM-|Z;-mg|QB{yES1z;!6{KJtK_@8OKHa=Sobk{#H}N_wyL z77QuiW8>6|;SvwEXln@Vym-gOgP(Im8cBU)(l ztK}gvUuoz;1j*ravRi~u`AFuo!~P-)O^tkMr+p;AZP|l0s+~T5`gF4j{PZp&s6b>y zV6T1GA@ZVmh}?c67>bptBD+pF%s<1JuyPA@@;X*TxJOT$$?_(l&;+c2v8}+3&zNWw zf})j-kmU)&na-)~Q3wAggZ0k|P>bO5F}Y)zU9mf)jhC>C86!Y19Bp52vlJ#z-U}Rt zSdCyP7C+y0k*Sb%qJ=ieNW^ncFMrA(I@(%seEBx}QacG?Bt3?v>Qw@W6Z`$tDitJe zjLGSMah+gj1+E{juomHvZ(hf`eCF}rhyr7y#tpM)6!xW})NOI%$ZANQh^p*};%KS~ z*p22$*AW6-?0~!@q(;)s{8jch!B7awjTk($GxtEDkbyYUy-MLw`?2waT`x#Qki6}+ zQJ$`(1LISvbX=aaY{8Hz4myx4Tb?M?4M|vWE=R-p{($cJaMU)iN;wAVuncbimd+VkK?X97wJ6eP5%>xO7QaReQtVy;_#+#_Dz!TpUI0bUG-#i&S!v`&Q3Qt_;E z+bigA`!e1Gtr*(_HH_=-*{hGu+1tH#y>%#Q=yood@3P+r>-wN>vuusjG1y~9(xgv6 zPE;thAbldApD&3(smE4S9Btofqf_x2e92;#DMDDG;op)g|1m4^G}sf*?`UVQ)gXcf zOUG^`JKKe*er6e~rq=`lYr{tjA8N-uk}GK?_LhU9(scF-0<>d~nTyT#y(a=CsEtYO zrLQPjz_4=;)|T;E(@#X7j%6SolM-B%-(pvZ9;(XiCeChd6M?SioHfnvQ8@HkG~z3) zRFv>uav~HgqTMH2Xze88R$04fwEu}ni*W%%8_lzyOr_b@lHAs)YgXS7k+K3FjDJrz5G+~)vnr)kCRGin2m(5=B z>}17(`(;)3FA=Co_aY2H?+a@tO^EGdWKQevz_X3d(H7FvcNn+J_o>BsMgM(wKT zSB7kLF{{BJ>E5#P*y473MbLX+vw@V`_S&nWhAvDS(tofeh2bYJ##zGWpE?!Ev3=am zNnx%R7Tec^sc=Clf<10S$L(BE^oq4^?9ob?X+uRM&yoar{#qams~pt0itIrVsA#fo zk7RAT1bH8InS|T7JMf7d9lQR{lhGcWvfEdF6OsX=?EyQcHK=Oa#iG-M9aBf!B?{9! z$V)KK(oZP#PJD_CaoR{x;OpS(Lyy=5fiP2XuwI1g=RkrpwwAKqHqo=o%cZ_nBt%Vh zD+3oy!LDbCCz*_Nl3Tz+@G=;O7xNV+FWIEPBF+nfw1O;oAer1^Z-_wgoLyjIG7QtF zA@Sy3^kch4L7qO^`f&G_?KVvi#&NZlZ?ha+o5;$xhXq1+X2}KQvJ}3mVBacxQ8+XL zhWWAUWv`18_1LMqElVw#dJI=X=$Sp!Qy`M~O9i41LpWM*w+WJEkK$6D-RlXW`y#uo zMUZAl6vnJuVd{-3Y-#Rqv#Q@h@Oe|Jh)t~3R*9w=O`Aun6$(|lp_C8A3&QHP`Fw0Y za4;#xxAe4U!*iLD_~I-fial1h!TNf5$e@1qQ-xul<&%`gZxjsKqXM?X_6djaIaC?h z0TJjKmY_^C%&RSS#_vw0!9FglvagE}YB7$TbfKgoKNgL0n1?HwhX*=5yha)rcBLS- zcxf@dX>%MDOkkP0NFaQWkn!wcmJWv{wW7nAd8ljr{5Lu z_nkeIR>J7!f93a3DR&|pWc!|j1IzJlc(DM(Eq#jhS1{xp8FOi}m?$*niJEnmuP_zk z)qA_O)n*EYIeyr1+v*9qn`f^n>@{I+Ncme4Bv;r;&w!0z0`1#>2!jg-(D0Oyi+@IaLHI#c;{DdH#3?r?N@mzQa2Z3yCz9AIa`oMa1w^Sf) zu&8=7nyaEKr7kZnMG&vTxhJ+a_NgA zl7_iYH)Osh$oROX$S(94Rwue{Gfa>s!zEn8MmfN(PmbLr5O_d3$92zNh@j4xS`$s! zkJz0~p-GbUtvctq%PCz`D{&9gg-kdhN*E#7;Td4-ghRGiu41OH7eUQdvDl8M5+7WF z*4zo<(8B$D^|kLG3Q30y9AZE7aAH!a-KuaH^Cw$YyuEd`-6=|VsZkXuiL`w4MPsaB z=|YPCLS+5!vbiUh&$lu`+F`PUMGFL?VaE<$cUWwVD8ABcd?49~9DE+X_JZgzpT#}= zBMOIJP1bITG>u#BxQLJjI0E zg2{Hq;ZPQL-Q`ZLuORJ{6K%0>5(>?HeJQ(+bYb}4x12jDS)Q~2Ezu&ICj?(oTP%{7 zuQg7f^0+%d()F|mv=WyY?RN?@DZH@T4thF<3Mvdb@DqVTWoV;gL)Ue|vp6pkYqLEN}U!o8Sv!Y=Erm{6y2lbE0Sr|>nU zXh#hY4x^Vn6mmpPzAqDvLdzL?cPLEfpy3t6+ETF~^`@F!7GgE=E00&FGyNVG{MgHY zZ7k~)O|wA(4w$SvmBTIBagJ1ud33RSG}#Fu$~(Du!O5ydJN+eK7>;s}f?VJmqJ#=f zV8v=T2zv$gL{(_FTT^K!b!^VCJYg!-FD=dPRhR%y`ywy z?0kDph)g=RXW3U@4w<}DyXl>Ey8{fZ!m^Z0*B2-rCMW9V28X^EJ0{;NRm(mp%wB4z zjraUcuCI9K!D%){ke){Lv%>Coz{YC5P4k3U7DMmfW(r1I!v^0Ph7BUS<2psbE?Y46p9;L zP|D|;h2(w_$Pt5iynprwhqTT(oKv3^45N)>qL}^bW4NGvvVHm0@N}}aO&TJXJLKt{ zqDgkEV5r+jPNU;C!Q*vG#jKwYq;<<;nMf?=IO4Po;oMxIo^QsAV5!wsix?(s)HCyC z8M0XY~N(^ULjz4&ermp7xp`rJ>e|aX?gkQ2&AUv|_x&;*-(CepZCQ9S$ps z^Y?Acu-}VD*`?Z-9~N*$kl?E_l5iq;dDW9~KwoCZ1mHh2j{n^BB70BRI}*{kc0vPQ zg9IO)D`(lI9zMQ#v5it#%_&XFHrpWxi)>7{Ad5lwT7)phweH7+TSOtJWLu@3{(5-& z4@=R1SZrSu4DE=$KfIr(@TeG;?8kX>hRneJeXP_WVry*G+iXFa@q3r{MYnB<5S;L8 zMv@X+DuOztkULecdfMW0xs5$46w1A>fXVc)!hz|`L}4z6ke_htY$Nz#Y&+uZ zjIfnLu<*(UnClzp8m+p zNIl^HAC_{36YUU&@DUMdHd)`Z*8~E$!(?>zPr|-uN1IvNCA@C~--H@!d3@m` zC#t5}a8cko!L9l63Tv=p$w|U!D@i4Ht2)y~3B^p*5ymQiysL7zH7g81haWq<(RK^a z!|5n?6x-`UG}Y+zi5e$w2&y2>2J&Y3#V!avb2e6uGRy5JB9VsaN;a@tY_JGqKRJ%U z%R2<%<7%!Zs=9H<8n`&BwjyDwg~eIBUtui(8fzl6QgtXC$*3r5M@@N=s=ylB}8u_l3c_8qxOME^SMH6>D$LbwAARBCtK}1B7_cSmxUum z>^F)Os)5)t*RFO8?i0N zws&ob^%gMWd5aZ0k#FD*>NA_xZnya&I!BU%udwZ&l@74i&9kQjqh95G ze%a$_kvA9bwl@R=Q}pU;J0=`5(?w`LweO`;1R^c>--@~<-3N&af}o25XR`>#Hd7sAn}mDZAtSYrGrY*)mvCPwoMMo zOS!9+6zE|!5*EJil&-2Bku<2<{v!&!ai@YSfM>l0qQzlBQ}%ZS_3=DqhY#)8zMdv> zbX94W3z7kXfkc7bB}Cn+1}ADvd-Fta9=1uh(q0pV24$>9Ry6+fu@n!n9p%gP_Niwy z2XnCUX;G*nn$+{`3j(~&L)fN$$-~@Jus#Zhp1MjV6t5SCGd3tl6~_xPCygnxUnr>V z?%14X=?-E=d?E`4s6Xc56SU?eht%fw0(4vjDfpcSciRkw)ikHf!nRb}dQZ^3=s|nh zpXIBet8a${DKpw{bO~r;=MHx?vA^fBoR62;Uj@Qjg>zAy{iGw5dEXy_6^wT0W!vfR zgj8LXxZIl?8EjZ;SBOX@4^Fka6%2~=95?a8M7Mh_Uu5{eCjLe%5zxHYw8W+>Ox4wM zNI=#&j2wE@S*@U|>psqEleLYuSw#BQEhksbwZo#2JBBu6_M!l_?AVRBM(L_YgwR1b znyJn@8B)hG;&D4$0A9M+RogiVhlC}Q*u0!4Oi!juP-fTqW2n&~;}w80gq7&_L=xDY zmhHsIaE)#jV~ECbdWbrhZg%nBY|jgqHVJz~fcoH~_C)2va~@m%y74mn_PUtrG* zgo@{t;%fVdN3yAJwpSdW?vrhQ6`*Kt`6J!?NQlys>+luYY@hrSkcwamBU#^^+&Is^ z?38qdc^r3hF}qk49zhVfiSc=jLu$&Z#wrErjWJh^x7i*ZIJmDZ^YDH5^|E$C>1w8(FCdmBkXxip^7lPDHU)*Y zAA2+ppIox(Gi8E+I>Ws2Yn1Brmq!VRW`zY4w7%G4}5fJQv&J z!cn)O_4v9#^jfcqBqjQ_zxosTnWd3ovorooA`QoAojS(R)uV=v7-_dVg3%Y+EnSFa z%#Ekp;~q0qfA*9R^(Ebr=}7pR?FFYqny*|sonzhpbm#0E`@6!?3rPF8pF#Vdh%`Bu z<+$a*PUh0TK%nMbRqa}jR|vvremtwF)Rqgu47*#a@$lPwTJ2x{5Q{KL!Tu*0)+OZ$ zER-V+{M27VwzA!hHNtNRlF@)ZX?B^1$MAmNq%bXcxPjGgwg6>kurA5Bgh!zYwaBLW zGo4k{Hcw$+q|3%2j$#o8JR)jXm~oD{C{h4hDw+nYjCXFZr&CFyj|4hH_MB*xnT20d z?&u3?(JLPC4%q83UGRgZUa+amZ&t<{@pC|W(b7x^6?pM z4>``l0GTcxt`??Ruy@la+biU*Dx}Q8UAag(Z#P9Y+KE(N!(!-L{7INE!jaW>+TRJ% zZM}vJvL7f+mPG|pLBByrO)DdX+bq`;p4hU}iWR1Y$JRg6RWY4~mX&%e8V**WaNztJ z)cS0ir-sw$jAW6m7lnc_$BOWxK9!zp!?|`y7?$2{thPURN}j!Pj2&@=`OZYTeeQkI z!JuxJ(6eVcjMTg5+t&qoH+V9+o_C`*xqiJ}=tT9qu6to-a;c?>NO|4W%WR0k-#z0z zJ*D*xzuS^q>UxY1SKA0BfOP~B91UG31Pdh3z*f_3f~>~E^_9-4s7#=CzQ|&pOrksF z>XQYXHMG9ulO9)@B8FDGmQ}Mo;?L-|9)j?df{YDT4m)j|K$s^`zSFJVCq(JFRl*8V zmh9d9qkZxpAvJ?618=!(JH99iY@^-S##{3vLGm6UjiGS@@G%q*tMVUmw?9o4+5gJ? z$XZc&4}(?Z5+;Kps~3$CPO72lbcc0_KoNx$$|mK_R4#|t@}}&yW1b67u+5_b5p93* zpFBiQyj!`=zN&C&+Hs|Y_8mcZEXYNI*U#fHFqw|U)$0XAb5J7=M?OxGBULuLxk^L7 zrN`9(zZQxn&ibCq)jRBA5ve&(l;-gbI4sOm5sY4SRpP93wH8*~{O|PK5|L8C+d{CK z-1e9p_WarNp=N1A1`f23gjHDN#5l5qU1#U~3nKh*b@QficBv3KjEzl*@$MHoB!`Zf zahv0iFB2OjTO}BV4;qj~!KUl8sYga(1EAl(NuwDjk|S_3_6reczoAlzy4OKY9NG5+ z0->RQaN+kp4(~x9b(vyA+eYS!*IJEJg5q>%MYFvn3WZ@1h5KPF_&*XMd@T?}$ViNx z`2i%#*J+}q2~gybs^}K#5J5H2#|%5!Wg>?6;guQ4`ez7}r)2k%8FDIK?b%>d44=f- z>M1nyGf=m2o#=6qG|{lj%(pI~kR{^!a{J1EPT_+K$ZwQz7{v0P9kZK62wxl=mS}Tb zY`2IOn(K~!9AXMmud-4Wx48mgd5xg4ARoQhBKCfhMP^v{LpP>O;b@9j9cfi@A%)u) z3++D9HLv94U@M?Th(h+)#PV6st`Q79p4`4Gs5IMeL<~KnEg!d1djA&+_2Z6gbUk~J zaA0)|Z}v|GH73wfp@ph!w8yiYUxdcgjSf+2Y(Ysi*P-k)@V&`*sI3Xl)(BGXS1Zw! z*d!1d7Cr4OJCedr$m{InpQ z-KXyW>!omb54bE5=`0NviRZg2=XS4Mrn%%&f1YZ|{_Qw_Ci;9gN&DIE5s9|Wm00b5 z0UB!9h>nMEu$hi%+0iAo)t?1j8QZ@~>I0991Ur}X8Df7?IE-&j5|L{BPXsbfm-+UO zJZ0ZLgX~8i5#f!fgOMs5A_}dBc6VGlabtzacSu?vyIo?h)P+bt{y+s)(WtW zx$$b-tYGNXgNqp(TZPFl+(ThAX*)#;opB}K8;#Q!MWPbv9lPv^KS0~z0<7u4ZtD_- zmhO$o=7IM0PmJuD3!*8ArJSXS7GAL7ydswg%I&u%j(es~gyVGVGbeC2onyO2 zqWf8V(d_>SC|fYwb5QoCCkrxs?m-L!x%RZitX%A0w^FqjyTcI z5h5yEeew+fN_%I0r1A3;hsTy|;0tWG2!?K+q`mPpf0V<7B3s}58MZ$fBr8awQ?o?LxZi$Rra}0K@{IQel!8<+B>bkjZ>Hc(BIp} zL18uDs7#T<_d-kkND>#(LZKO0=RtUSk5akJPS!5;LBmj$6^TyG$Ha={_;Gle9I9wrSh#Jdr~4b5Aw8t%_lt?RJ6at_M>kO)6&Kuv@!S zF*HK5Z7L=wZGs>SFyT+L0s*}$1z9ZI?h^{_lhd*G|50@&@KILR-`8sY)>^IGY1OW7 zty)16cI(ox2?|IFini5ul1#{uWnv~146QW@TL2M|ecu9vu!WF>5FoS$v{s9tQgCUV z2niryZEbC}t$n}0=iV^Bt>klm=kCjM=iYPAQee*{@rXF)hvy{mun>kUmJ8E7o^nv7 z5BQv0MRG&hUO8F+;koz?5ov*B#9O!7TY|J8G#<+wpLvkd6|3$){Xb%8jy&%a*e`|1 z#-%|Hik=o^0hL5cZLY$dqtFE7E*OYSw%Vu3K=h+VVfP6Q;w=I554O=qpTZo)ZiOkG z*D7S)twoS;|Gr0TuY#-q&LQx&WbE|(iyTFp21goQ*F+&VlmR?)UnZ!%Q&t!-BJR8? zt_wJFwf0rTx|S5m&<&PtQhk1|ItyVSKPu=(KAMl1XQq#)_^f>kY^k8W9hm4V6z<#x z;F4^&waLsQ5D&Kv$rQn(=Q{hXD8WbuqM`2yYTt;Kw63ytAqszR_|qgH!;!q~ZjTF7lN42OLz6AYRLdHNO%QOyfTkBhY;kK5 ziO!t6T-X^;p#?HMc%a#OUrZX>m&J;UC`@&j9e!yM`Y02GHHo+h;NuG*Nl$XrsZ|tJ zBL=?Q_WCmVaEASZ4`T()4lC>iGyEjy#4I}|3S*Pq+K7m;{c7><%GXhofIaVtq=^ zvFlI^d{mHL^z7Bu@_e{;YPv-gc8z7%MfJx@MNyNsEDG-UGVNm(BGL>-Fc#0W+2_)1 z2-5Jk+%}0w_3ZuAY=_T)-NG{qCfPwjC*KlibxoGsi`1dE+WU&4Woc|||M0m+;cqFg z$o?m&rbzU#lbQF8OCV}QX3jn_cZJ>HVWOrYQG-)54~w~cAf<4xpav)=W#b?D$TRaQ zPtROr0|lu$rRS~o3x&04V5+WL!MWI`_}J9czP7<Eeg8zp37@!#9mJ3&Xg9}5ubo-dPVSre^dnjo5^ukr*Qj^Xl|Y|B6(S` zp1XdkIB%&+=6oNp$F65_uTv`#Htt+o2j+KL%Fc#xdk6?ZAs8cP( z)bPP3woLLqdA5t_e*aIEm3PcvbYkKv{@PyAsieKK_IUGT+b8I{hDS(F(e|!Skf>dU ze4U*Ybe~wPb;DR0I=uKQNHjXWA6C!h0#qI-BW*y)T<#ek^C@J{*w%Xx%i&39j{w!n z`2i=E!#?ity2GpOsE@-3MBDUA>+o?Y@@Dg?kE4(8*OfWQ0N>K+*(8pRv`T0z&4;xMnb)xz%c6EivAHu?12CMx*Entl4Ba0XBBQTwxx4%mM7Y~W&YsTC!2j0j^pz&0fbl9~=%tIvS` z6W@xxCdk*(xjmjc*IwQ1QH9+ocGPWe zpRvGVqA&`0@CjL5fW7yTfwnT4QZ8jVST%`47XWpfJ6;r|*b&GHA5@su=`9{dctUQ| zL8g^GCO_Xk5RDXV^?Z8kN)9Y+HO3=R)jL*&#vqzI334VNsSCIbPbM3QKm*=UM4<6#vUBGRSxlG~ zjpf)rPGMU9)KkchP8ZOM*ja(;J_l*QfhO8C19`|Lif2qI2`|XFU@Zmgq=4eu z1OIlug_VNZxv*@*q-~C%>rPrkN|q%exb!?cM{Tt*rKk2xwJ&{xFpVe(t~2)tv5p_g zvmYr)d-6ie;r#+Zk2eJ$;(8m}87(GnOu+pTE0u_?BB@O^nAvTxy*_znYc)!r z0g4u^8~DpsD0%-Bys)1=*v92QkYh@ zu4}E0*zH2pmLEYMX}tg|5VvNM4_L%a@SsSp0qC6D89_SLR<a*9+1+Y#rp<%>s;cC)3>S5J9_|3>;!}ZLI`u2Ko{b)sqs{ zTO8CZ;?^vf$?52Jx% z8Jt%NX-!~4m1lJrWLrg{*`YkN1ZsWaXl5qQxUE9&cZ6q!DFKVEF?grp^}`PMqG(t= zj}uYI{w@k%RUe)iF1wzvn@f2qxYEO5wx6>*1l^Rx@k-izHFC1d%Gc{RSwE0*yrnwzrT*0pFZErX?C1 zZIUPycIvqmHeX>zqyWE)w$VqlPK{ZUkKm1ev%Mogm4$IMS)z7KP({{OC2H$r(lO;m zD6WqKu?Jy~3)6&R$ywUeWNdJK`Hir?4Lz;@L4d-J@0!-WWcji3t@fTMwD0a4eqbN^ zSlO{`#)#z<`?n~xgw(!$>@J0A3f{t5t3MKQ zyFvyge8<>*BDmqpg(=MMZJ{Xi1*u9zT2KZQ)>ol3gzUQ{zlv%_shgZkGwod=zH-CgDZJjeadGagGS~gg&CSe-5Lyh+YTSqZzPUU6n6a%7O;Kl(=u)kM916M z?-5ViVhl^WYfPK|w(KPrI<{z7|!Cn!hdETSK(q{W(G7~49EDXDs z+Ud{H7;|(7Lw4Es$w{y8=+VutRhZ%6)*G=w0@Q%LpcqVqd_meh5Q#?y8LJi|FyzRL z#K+l2A5AZY@ye8E+eL5#218#$=LdX}U?XD+tz87F?u|3ce<)1V2R7kR;48N{@Y;gc z7uoj(R1fU)0!#N%a59Q4%Y&oa-r8#U0<@PFr1a`-g@TNuEI)7tm2T%ekthi_JkRg4 zW}i6={ab4lpegxzZ0!nbXe*kIRjjxEx4Nj-^5J%$!bI)gUCLv>Jt*krGC!;pS}ua? zG;Y&%NSQ7QKhJ5|_PT=Z17Ue{3DNO2AtKGB)mU%U?Zetc@{d2|dh*jDgm=wjqSMBn z$YK3OVH$!jI2qFV!fh@pCTma%>r>!>TxeZ|NciRyM6|Y9KS8&YIB`npvDU`=B=FjF zEX?!pj}s3!9d5Cif)s}hEw1(ph3H_weTN4>&zAY*ZO^Q2tK4iWJZ#^(`($N}ec=bB zXIdX*(bxq7u5y^0oG`gxT;~(8)UiJ*;6AAVoJj57B<6Xx$OZ^An9t9(K?>5ZQx$8D z^Ku$MH}ILU5Dx(MkWY}PtHSWRRer9h@$CJr!s-aWj1nE)WN(T>Q~5m$+ZS(l(M(JG z)EVtn8|)jRF#2gj(yg1qR2~@bud?ofuG!g0^jbe*n)%Xf8`_zY(;Wh~PYbeOf}N}O z&~lqBf@^oMg2MKk&k@SQR6~Ky5v1Cm_I=EbDD3yJD6dIHp==i6pL~iQeR|qw3Nu+) zJ7oy2>;iJX?2?9=4994uT`MAOZ>`GAVa49!Asrox@u)6>RF~SdhmBS^c&lC~ETsIJ}s%N1E6sn=z)ZTsVmrCG<2nWw-==-vX*7-(TLNF%I z`SKpcc5=tJbb13_A*TA9ilW^2x7D6mxzrMZY6Wk`c41e5t0a!oGW&|K`?h#SLB6NR z9ubN3@R*t{%g1YTkT5Y09>PApc>kh^<$I8!N9SJ(Ff!x!&$c%drV-Mc;YaSchr!)@ zr?m^ZW<1c^SZJRLyImrJ+sFdD?hXiUw9YJ@cxLB9yIvGlO#1zHw}P(GPsI6IX4wxB z$rVr}_cS(21nLi1V3jsbkWRfigLCG5A9rXbzkSUP64$jmemIu;gz#T+o$V8H44%)4 z?PWn0i?j3;{9R-hcY~-UfWsfDe)D&7qTY)*+hvMnNKQ=LZQoVc_37YqOliH{BMLQV zwAPHZu^!;QF2hO%XntE|W!toQHrvOIAJ5Om0Re3{8T>}uUwz6%lNO(SEa(P;-In|2 zCnC6^4%8vPngT{PLM%kNnLXSLJ8`ewE;7krFmX&qKVFyyl%VU2zFdWm);AzV{d!?q zJ{DKx)|iYOfO8W2ld#vwMdpM-mMVtaDSk1P7>1y&Y)QaqX|{_zg@T6^X-lGRM`HU7oM_n+Q5ZXR`BlNI$|e!0d&tm%_8Xtv%Q03|*zZL-_aj6Xb)`jO z{+Q^l+N>Pj>jWtkkD7s%a=WU#E46(`we?omrA_J8!}`|l+Zbqx@TZ$s@zKvt! z`EmQUpj#3$prRA&O(N0e7>CDDam-SMU8nlx$&b`1pMX(Ce?M-I3TwdDZ3(2?s%@Ol z^6C^stMyl)oJPg9GekXbt|Ty1X?(alFpTkhv@_E`4WE{gYaQqMlL zF4&W{+P$K?c}`8a(<%gAcLLdxFj5thMRQF?S`Zs(b41Xv)GrE3S!8Q{DQA|?lJ>`o zf^@il|50|thexM9YJc|OMDu)W_gUH}ZSUA#ZU5JcES>GcnBtV+fquK0=nM}W;9?so zz??EFT;~~?276Q_*YEp^*ncvF)ixOw3>c9)qR>P38kxXfE$Av@!Xz9N*aol-K3P9x z>d|?>s5i-|H!F&Kvc9HJ)zY+5ML@salMQ3z;6tk_5Kd%!Tph3F)@bMiG2P{KDzsf1bGKICu#T43!D5O0l3QMZJWtPqJvGfz(kIYU~i^7mT3WFraHVJ8Q zNMgR)>ID53re9?~F5CaPBDk`{)w<+|u7ZxT8Z(7m4utv9G~`+GdePj%&xnjKj-uDZ zVL?RK?)28(S=L?HO;Ou~^)h!kFj>?g7(MzWw6JI_nSg~XfrV)zFoLmSzP=d(^o1Rq zJJ2a1ElV`(Wsiiv<%!w`J0m)MJUO+wW9e4=)Wgg{`M)&C!WRevxmV) zQD`rC*s)-3$F#L}*26^eTKGeD+1=*~?$!+}FQvk?ASWEMxeC*J1iiWRwFoijf#JCs ziJG;EnihLUM0$`P&B6!;{PN=>xb+yr8T9)}C=19R2~v6*43;n6L)di*TgF*-y|DkJ zWVLaxeP0w(r}XJ%A%$rR@)VpNGK8p>E*1nV?lKYFDjC5O2{M)iR-U8x7)?}C73&m9`nO~iT z>_*XCudp%952&y=fQ|Jco|9!mrA#l)fu!awil<+>1tGWqKM)Ge@trn)8_E;))xj@c zwGCFRzC>>czsZJ*!U)}+($gMOSk0}eWr{3YkX{XyQSA}|*HR6|(KWCrH;d-SdpXbd zQ*FP`!js?@dq;rc-o!iy4<{G=$iau%JPDrd zd362TG6VKwK{|{Cg0umIn8+hbj?CeLQ*FYd$er_aqy_rvMw5kYDRWx-X= zNdvgG7S4~@d_mVHnq0wbFZYzfXe`pYzI6>xWumy^xfJHwI$`G$B!i)!-@i8&TZyOOIy59eP~^H@^5z#Qq0gLYruefecMw)bCsdM1FZo zzg7fB0hS+%0NsVC5JO+fv8vkF!-R7T@qi$0<~6OgiY-6gW7;z|lwDvs!tV4L%FD#M z8lIa(3RcP{ClzZ8MA7u%WZu>b(0(0Bq@!+IM4&lY@qH`ew%g~X0-kcTrymi;uk=Zh zvpm>ne-%wrAy?Gh`?~^)BVzvjScs_CH^9+dCg6sAw3tUHlQ=!jW`5QuhSi^MR}OHA z)3|BD;IXTP)sdQ(L`_3tW0`%+r=W48WZ=7-Fir1yy95q;d4zdy#<%+NXt7NA7RoEJjl%S&>rZ<;W;+GkSWu)i{L5^YXztg0 z3cn=N`5yi&l3ScQ;aoV-!P6_(o>?~0E*GRktlV$1D+QR2K*qD4Z||ETuyp#5>Tg|q zxNYh@8}1X*cU{dQ_J{~n0YcY zi|%u!kRVaJAo#_vwJSt)pB&y@;Qf@^4I=4tzygHz7N8a!j+ro{#owMoK$bY-Y+vMu&0s9w_6)-23u#d$W{pG>WLKVnKBBq;~y1WWxN zK7CL8LtHhOyfiT?Uy9cXxT-iy=W(XJUKBSmJgJr1_k{gh`CPsnF&E10hsjj15SuMs zmyJ|oGzW>G?zEMLUC~&p;*_$SOLE#Glbm- zkW`v)^d;hs&dTG;17p54;spsA|S_Cz_X_|jW&GkuW zc3wy(FSd%n*3?!pv#o4vTiIrB_x7;pes|GxJ^n=Pm}pXKlU-<(=z|NXE`VQrfC$D9 zKV$4;R|~lrJzcx>3p=5^;{g^7CG4xEAaz~-aT+F7?-RFD)I+v(wAwoQ>4yW7YhN7(ruM9y){O;rn@73G3>DkcLT)e*?vVWjD-(t4 zkMCOBv23lC3sP8Sl+Bl)`^i2#R+@Ohnt0Mw_1?iKAOhO zyw1nbnNggXcKGbTBH8_x%>28e(IVtKyfv7B;-(`V2l2Yx{!@uqime-vl~u0p!(A;0 z7am+?KN6tBG8b25{e>8s8}3Z8pDOIW^(|5|QW)*mi_t)(XNGLzV3jMI+tZ zd}{t2+wW74LI1=Kbb@^oVs^-b9LZoF@wo!;my_+%5fq}`zf%{KbGLm>MBT;Y{cB31 zwlT1lR;vg3Zz_tu-pi9?cb}KxZ=1Fj-gn#d?e;^T;K2cDcAvs-B}s(WGKA?+*FLB| zWC^&j)Z@KGBG@-8L{+Eu%sH`rmn{}lOYqzv_ttXTkk~On-i0ZmPme21ulR95BvOD)h%FsH6h|p>#pGXal_K0%2cuPA-DYd{rS$57=Qo9^ zprdlu2@IB>6l8_+^H9O=w%1OJKrOrj?6D7h#*XGP`U#CCPDWD zAm7-L;Gprgi0&8}D1l4~jGxZ1zxa~IS z$~H-;vr%-${tS%wW*_d`t-G}-%rJ07SZ5y!(5u9TvP5;Y^%w=D%W{s0%B_$h#qX|k z(De_|_9iP)GzG=JVA3G=qbGHleezIG{XS_{Xk$9 zM*c-`D>=>H5>b05*PNLBO~}=a4`M|9d=?5!Y+3AdW$b?EYC(?)s4s%|NUZ z-Pea*QLVejv(T;*azzDFFBNu8k`gHUFS8p&ab1Zm5 zrrRP>sJA=%k=<;Gkk%(ER_pc#u78(0qMuK_q*Pn2FNVfSPK{rI1ERQ9TNuR(*K5Lb zK+IXV27mMrVP{=t-xGA5ePoE8_b7<*Pw7JoF7#luERf&rL2)TFGg^RV1R_1K12DQE zirZNNSyCRV>TJ2s)3$R>#Hs|?yO4a|ffdee{7`m?u8A@`}bMPPgiQPj47co5IuH_2}QsjIA$e^@2{VGj{W zo+**(;XVc3OH?74%&SVBJtd;Qg|zT{8r_q|AYNBqV zy`!*ynvtwL4>Pa;`Nikg{)WfS6*CrhhioG^isHsTA3NXHTbLg3Fj@H)ep`~!50=IY z>_K5#9SKZZ|5V5g!iXZoaWaFm-P67Zi6nFUotG?v&3TM16xP@l$MbE4z_~589KORg zdtNj@wBm;A7row5)v)q6Cor_HB}>JkTHMYGyWz|!w7&^aM~3u(zw|hOoUc9u2XUK& z3^iZ9l%OWtARo{Cwoc-QS&_LTHM1>66M%yDm zg&m8UPfpllFA6d_{qSDF*Yi@9!QI?cZ2v zDZe0}o2bJzNT1aO5@@NSy0cdy?4>N!sUH-H3c~vuseE?^{xAYkU^2;t^ZxBYJo5Z5w^K58?`);r87-?G4f0_@I-7GfiQ}=ghJuIWhl2 z2*}uafs;LUod|By67@U-udr_=DTPI?P4LdI7lo$cr-uFYRv~(HV%KCFsW2UGZN#Mp z25yl!iizY`iq8GA>KxF>a%%B38|Mov(3Ra53%R*QL=0mJE0Wm<@IDdfN+0rB7&fG+ z-w>j()YRT~(uW6d&$~JUx?|nR0~-g|OkMx36WOm)E`-)IiL)h&6-{E)<;Pf&FawgP z-5xw~ZMUhSxHfW`B}=_e!b0I{Q-{L-E21#7;d~f8N0QOr#FpKkgxq;syL(*b_pOl^ZRI+GGz7H*g(UE1@Enx%X00{)So)vQ zy4qe7bn|j>uD%9)M-+P1jT79>_BSEwMGK}N&JW0cgc*P*;=HNcl@0I*rrN2A+KNQ& z3L7UP9ZFPBPE=RgQ-Yl~{;&eu;%;WaEe*S{RW@4*+@2JyUU;+hB2h_xxReP=#+Q_4 z1fI6v6v+*S%x>G&W586&ekPt_iXdatnW%@%Z9MZh=|e`367v@I|MSd%Ovg!2K+FaFk-!0--B>u+ryrssvjY*eQ_ z#L&p`=8g8Tpz8-dkBaRRAB{xj>57H+r5slk-pwc59Rj3HgO?>cGpQc$o6)*ChFC#{ z$5fmHzuTq@GYEZq^|WVvT+EqOvt`M&XzUeRl`vC|u^7w%Zxf=2h)~Du1p$gFhGX`+ z!aB`2&67RHoip^?_>PEf5V3zF?Sqq|P+GJ=p2I&CVo0$>#S*uF3%akQB$CNh`O6Uq zRM4?$rQNHr8;^9DboQ9AmJ-ZEbU?-kQgBH$6Ss{KA-|71QwEJzAet*ZLz-DN!}SrO?)ZWLEvU2*}mq{endRGJ;Ke~Uo%81gByE299{w!$bfyPX(mbY7u~?Ha|CJJ6Bd$<^i#5!}`9 za4o0F23)kR(rpoEr7^`(GOUJOHc7xQ$VM($GV8y?o)wk;bnn&2W-H9TlgqDu$mR>V zMXra0aHjmgF7(NZq8YYcVMfY1Sha0}S~nPBmYJyS!v2ekNZ+sCrw=>=Y_~sn7^pNw z?e9K2vwv_g_ND(LOk-kl5MbXa1Zhi_?OZ%Q1zyd++ez#cFPELvyF_#Y*k8+NurSTw ztCEoLOe+w99w#=|*spvfm2y!+4`X>UceGS*oWh#on$5wx{{nka6vmXxa=HCpfU?qt zr`xMOB9sB&`)vVg9S3g=?qtl~5kUnx0nN2#cK#DkTtl(i6|-*$lY+MJlwXTwlJAP5 zwYF!LbxFp;tz%=hfub-#;v3~#nMc@;Vih$DF*R+KQ)RJczlRikb^;m@f(&LLZoELu zJ*2nKh)9dOrlfXF?S_i~SrMq6vxxmmVSOPh56rU*id;%c+rNs(9=k*YYJ)H|jB9j3 z_o>Uk2^P~D?F!LcXGe$`d|eX5E}vlE^hE^QP>p@p!?75<=EwBmWa|AZsA04)&DmeE zZ;?d=v@v+aSGLYy`Ro|S(re^z1?ie(4G|I!+aElEM<1~o_S!)aC=i!|l?R_}9OW$& z#SIuNI$J5I`KZ9X$f{!%Gmcfvu{}NwcGBYpop0R)$clY$P6~a5Tx%jR_=;I}uaA%A0p{;VpImp}&OMAo? zD3*SGuxN#?_bFk~A^>j9f)pGdzkl++Ir&ulu_)*C9>he!8$7q9di!GB71O?~!oI4o zYfuRNHe@ZnE{YzRr6$2fB}H*=PCM;eig2rFG)AKAyTVK?jZ0^PMDQ|6kXT)j^%8;h zwpT2&zCJgOQ#i0?*&tym4lHUfWaZf?QRq9m>N-#S%qPeQV}PVURz)5Yg`teVPKI;f zlLt@XyKJXV9z0L*PQmsdikqg)ND=cGvOoDmG#PFDIlLGI6m4yi>UY+|S6f2KXm@WE zp*~iaRt2WREA3N3HwXw8ud?$>TyzWr`Us1EQIP0-IZR?*qzKbw{rDk2)X82BJa;Tko}Aiehb#X{{=<4IX4g z&}nTDaQ#WtHzXP=tyvV;q*1a1-{O-;TDL{*B>@IQ=4N7c#AkLFy2Q2$yWmOUnJ~sc z*7qB6E^xU>bRjsx$wI@eBGBeuShVf#!^d|%V|OV`Bm2vnT#=7>Z^1MxRhUW(#^i?L zwqA%4!0W=X^4YdQ&~+fwnJ{ed30hZWv~JI{-GWrtr+Y8k=fiE!RM{H}Q(Z{XRfh#! zDR>G(DBrFxg+eu%c*#U>Vb8-~nyF6{YV zxoG&PlX(NWUC_;~&bXLS$`QqVRr_1y&G!irXoYN4{%%oJ#zv<3LXdort)gl{ey)Dp z-)enK7T0U`hl7H2n`4^`(=b|_gXhuzDvE~jzB9gc)fhYfDKOE&dAH2&67-8siU&I# z#Wk!TjCiNruOyUE66Podduy04VSKryrHbTsDIUC#bwL`pw`kf~#Oq!iXby0z$Li`L>!T>T zbZo*bJs3XhAp@}Q8Tj2E^3b~;RlpVln^K79YBiHZ2k+xJ}e>l z{|lnKxy^ggioPK1a}j7eFHzS&4)00V7o<_!#YG3lOZQAm-se9~7L6`mTCiczSk{CPEVf%rw zn`>lo*t6~uf&S#_hvVl0jAh5vg|6R@D?KxJy$qE&A zJ%cUL*VYQt2)N*UXfOMeeY&UGUwsUhZ7$9iR*;TLkS6B(WLJ8qovUh#-6TkUPA-%{Ac^zC8;gxu%EuI%icJRq9aBakAQj8I{J zEVxi|IJU6j*=r8hTDF4B!r&qI4Yn!)#-n4}yc5-XZMz^9jljH8*!KF!F?stY!G28$ zx*0j$IPuKHW!5H&n+2rtyW5E*#==F&{wd5(d!X^a7JLotKs$Sfoj1|dQ4qrCfqg-U z){K>;!z z`Vz^K!}NZWK(6uqU{UfdM@1oCE>go5_wk)-ICE{8C^RM;*M#d7M$e6@Ni?nw#IbON zq#uq%V|}7=vwlIEm4-H9fEkriYY}8@9?UA_`iCU3Fx8=Kz^CwmkXkJ1E3eF9U!Ckq z&;sDG79ZjDnk|iXnW^w?uXGLLWr+Ino9ukjQEfR(D+n`HC zgY8J>LM@2fjXm!v$0p1^Hi4&+iT0V#L^-3PQRGann&Mls4t0cbyF-wAFt@%*hLn!( zwC*A?6umHE)}wOhCpce~Yo=<`$HfSiyG@(^W-4Lqz4_IcAr<0)KW#_F7ttz#8a zElWh#^H@B)$g+Kg^t4e{;xjm-zMjXiT0};UK1eIbyuRRrqq-8a=5 z6?V^v!w|ye93vi|9rj61w$#}Vr@G|Kj6JBZTQP;`rm!MxtVmj|Tk1Obr8BHdMC#v%%RP;cv(wA0N?}(vh9TtJs#X-Yl+y7Vztbn~*uKhMRG6;jhT-hK zE#Q8x;1kz!Ol+{fil$W{>Ft_C(_C%Xsn0<4t^y<`7@dnp1lUSL zxc0ymkC2<6YCiWepAvJ``SzR#BSIW>7yE1(QK|o}5TbKqaEP*6VTwpptwhGyw)v=h zG9B?@R5k4U87|q;YJ7T5w0i{AOYsl}3%K#&Xf_w+;#od53u7$b$ZDTDc&gs$Q_C8G zY!Ym>CQ;}U3iABmedZI#i?ZVO#b=44r3EzS8ilDdUl-c00;~+UF@Ys!M<8YU9;~D( z(oN5>q3M?D;h`CNO|+pNqDUb@&0;}i+}&dHlNnQb-D#^lOsrchak=$^^s{Zkg0=~3 zQS`RWqPXQ$ga!ek_SrhNHMehBZ0`!vZp7Ah*(Uk|FJg0j>x|=U-cIs)dvHEm(C&?lSm==hqK6cl?YnhD7Zzfn-J}h z7g?-24G?r4K2zI-FH9RM3f1)N)z^mmSf^wtPScM?3AS6f^Lp}T)3slS=&I~LKFotg zc9JkQEWUS%2()2;i{7-Wgj}U!=+xKR?~;@#tO_I$j)>y6MO>kEw~vHrH>Raeu3uvp z&IWRRp_hren!uJxZD7)AuU)4o*P3A=?qK#U5y;xU8xL$VY`CD`+N(JANN%Rg9{*f{ zvMPh}`L@6h&lHO*$~EQSEY!tIg;`C}eKW#VECUA_F=V=rj z2m``;wL;ODy^gYSo9)9ru>8Kjhh;0hN@3ccyKh0v)(U7n)Nslyx90_EE~f!rzYhon z>^dCWilgK<7m1#H;`3$j9LHJy{5h@|gft{^9Y6#n0_!MyJ8hV5b??L4p6*t9+SwPm zqQO2qK03xmD(nU*k{`Ih)T(>TmrhSpx^%Pgf}U5zVYj|MIQ1?M)QXndYQ-{L^2`(C zFuzBb!E{!+q`DVSG-vq1=9_$531~}JX}*2xGslWcGshzX6kx`xwUKAWFVA%)(~R_7 zPOH}nQx3KSXRuW~Cg^^cxKyB*z@GkuXxc6uQv>y?L(@@G9VfDuP%e!AasjtQh77Z5 zLKMn-w=6%*5affSPI>Jp4xT?}i|Ecl_+rJ!XUJChY+btb#3GJu71V=n(~`gn0OAdJ z&Zx6`(cM~RJIJyZlO^4UFW=V9A$!Z06vd<|1t0H(lk* zZ-WGM{6n}TIPEpqk417{BSsjpHakuP`bUTIqhoE7pj+zle~zHav!XB@CcUM$(dtyu&H(7k*IhDB=ZKO)jU8F}D2KJG+AgDp_l57rJ2m6)PVQnKZ$Dr}oY zapT)RE#0;YYD2;UXP|ri)ddi!E3vE0u2I;nytHw4qmWxwQbp&;Z?}r%3QM`Yk984r zi*cm0dVu}Lo+46UAGBtB`SgJs)46t!hsP>vxU1QNg05>=*eb9INt{;1&&Dia*S%LK zVuWa`ElW~RY>V+A{j#S7dMXY(%I^>|z1eg>GGYJLtxK zXgJ2n{c<0lXk41uF~`0wNN?^={hkd}SUX43B!Vbzh(=?+LDq;ee1_B>y)33M6{qy= z+t(@u7+g5DFp*aXX_4*VhoQ_GJjAhyB;^kaGBW*9l_}!X@V+oLOCpR1Jf`)GPr+xC zmdEe-)r(vTa4rXvf>Qf|C~kBg(DUmMVb}2ikwVMxkY6R8Yo7F}jjbMFIJwS)*;}QA2#nIC3Ys6 zL0;~4SigNKY20U!XxxE1l=TyZrbe=}?S3CFis#xx3cK3Iv^ExD*ThDP@-RwhQw<+?IDIRmk{vjG2 z!x$8bAOfAMt;8-^2K4=mM$zlITm+iR7Axs|>=@YhL~`v3?OP!;i~)sOVFm@ia|4vZ zFQIj~VMbJ3-p1_bqElI5(|x+d1j#mn2Mn$*x5TK(7!w|2E#1elq$5ko9hVa77xAE5Q&2(F=L z7NWE{*TOz;=J?VA8z-O-wNP958X?AttBRge4*8Uv@5b9}0(6LH+Q4q~QDIGzID&z( zJv%89l@DQUBAgVp4?F?>D;~mLea1c(K}U`Ht$||L_TU`0-M+TMdEpYdoN2kaVa^O0k4JQ}kOA4yB;gncaN)-J=s8{@ZDLU4;8*JLQ* z!c6Ten%ih_?iyGp+hTW%sFBZ&pcXV$h{nFZed6ggtE^g3T)b-k-GkQM}897`N*k~V) zqST!0^U8ZX%eq*QS$}Q&p$V@o(LejGNLq5pvWqcnCq!Ty*}vFsSnW!cALQE=hCIB~ z?oiOpE@~u2cCRp{qBy;80Jh!=HZi_7uq_sKYY^j2- zsw^~NkQAvEg~lD-DEDHVY@0lRDsuD0cxn;B^^m(#g`E+?=ZI>uX5KyqsCQ&8}6LB_q-2e7i}=H7l?t(O`o`af@+8SYkmx6T!8j zK+NAFUjW-OTi+5PicZw8wG|4}K01(&6QMT+nUGjvYg17yzjbY%y(5w~B5=u7NQp`lWR)a~3|A?NR|PVLAMp z=bQ>!PZ6p4%)+WO6}1k!hK-SU%Lql&k{BKmeylLdd*8G6h|kqlxkT2trz3n3wr4zMsbKPSsl#M|HR7x&Rx3g9c_HDAuT~5v)Y%phY1_$)RVUU~ z+g?F3AtAB;?PuVSzvv0h6a#XsFLh?9+1oL%{v4#%C((F$L$PkS#2M%REA4Hc_i4hopKcp|LcP-$WwgIkbja?%GE!0mx>L53GNcMinsoW+= zX^45s3-4r}|9>D7tc8S5tnjx`@{oP$#Fi~UOHnLyox$6$4fBAHFwcG>Kr8ZL5X<_;8z6QSDA*s4rkR1Hb7zh zQpAHLcA*f-xoICO-(vqKNFU#uS8Wgba38ee#wr{%lrz=@eAbowd>v)Gj_+Dy69lyr zNCRx1fNNyi@ntAKBOn=SLn zLnsn+$!ip(*m04u){;!$Y5sJf9rEdE1m|e`mxn>$w%8XplAV^wxu$(bfcjAF#?cm^ zAg;IUW)WTQOG8iE_XHWoAH&2$AKK>#%&wv)fUed;dqhOag z3G1)pvN?hAiJi7ZQOp=Z%vcJGS);Jlh>kPx!0a;-+?Sjl&lrm?s(oVe*4&YmSz{3K0)+)Bg1l=S^9H$_{ zW!55|sFx!NXJR*VkhaonSwTfln_(A6yaa71%Q}_ZnsA%pnrQRXB zYfBL-TBiivDlUlT#_hb#E}9QqcB{T9$Pgiw*bp3tz9hnTzkDq#QNPDrs)GkQUx8f1 zUE)g!KKIFXwTJEV8#~r+w%Y|ee|{MDy)^tT(Oes{CH|T&%!=z+U2czlF0OSQ3e&>w zOBft3;Eq=+g6mHT&y2tH@KpKq_UZGiEE(I6?Q5Bb{MxVP7iEH46Mqr{9^xs9^;2-yEvK~=ofP@}#MED{%@Kw6bxY}KU)k!)-&Z?c0YI4{6r$nUD+801woDZIJG^zfZS!GF9@%zCNVEl3Jp24tlz{E>m%p?d6(nmQ z0a0f+c}Uj#<72Fcpc~uKID@oDd~_yOm${?$9C-4VdE6;TyiWm`RynROuPqt1e@h^(+=&^JUlKHMe~a_#kv+d zm`otupRH5um?#Vp+{q#i0i|}|b_g_3?tsV03U9GRM50lH`1!(~A?M+GPZ6VN-5Nf4 zM{!#|+*Q;cict$aqh!=RCKNdFxVawaH+y;643iFLcRFRk^( z^g__^w+b`e==#E(dMR1P`}JkAME|xZ!9J-S((!5Sgov*0@RM9+?4AY)nnR2l->a}| zhZt~(-e!qHa%_>vA7{OVMPOi>Jb3P5nQS|-(-I;wHabGv zM?Ukb&nni=Yjkm}g^Bh>0oSlx%-2NhG7;Ro1j^kl*h{i2MWp&eQ(*y3vhNGB9}FCA zJrty(tO7C({)21S1bcXl3DrEQI`k2yW8rL`8}B(+$b!ITH>VLqSsJXL=x$mbgp)SR zm%%J$g|VXcV`0C0h)1Fa?SSSsgp546KDf%{g!!1y8O_dy;}^DH2-9;qB7;p0mgNZ) z%`KcCnru%9JQJ7m1xyx!2IEO)r#&O!MkBFqDb|c}nIy4}xuUzJcH}v(V%4@TNokuf zNA}Toh(f1=L;DQdE9g4$rZeoe)AotR>g|nU#9^O2l3l8O_(LDnBekdfO<`ACc2qBN z_T?rh>X&m|(O?5bprmnJl!hzJ?CzUyqZFhK$D7JdHtev+Jf!y_BnR?61J#=r8Pc;?efpD&_MDj5Xs-z} ztgTx}e#C=JcYR8g#8z;zFBT&u8awt+?6FKC|CixVfj{#$g zc){lbjpXp~sfQ&Jl`gV>3+k(N8e-cy(tPGqa06b5CF(1hNx>xbmukD=5Ti0jA%61dio@6%`Hg4rU`2Q z!2q9vt}{|Ku=|9?xzLiI9C*@z3xz+8N*k#tI>pf%`KZT*SVfsR94E&LFrTMZHP{q| zU5O9CXtx!@ZdnWtXV~gwG_nslwpZ9qe=fg#A$v&#%Iem$tFS;(=#X9U=JDDXkoF9 z71St(c*Xgpk8{$8HbGG3ck;Pxq9}Z6==^9;+zFGQ(&q_|5_Pu7Ly8|7kJ#@7gH<(- zX?Wd7cI;YwV%7vZ<{`{cB-QNv-;$PDLnT#mFw5-wBG5K|;fm}50UATWiTWx;vMgH^ z)_HoaRVe6IWxhN%OcG{XgJX6hr#N{^s}hm&xuXTvKCD^WHj2Us73Dn;F0~JQJY^04%>jOBxg(xmA$^mt`VT{ zMDyH4bAx@u$3?QjA^VAd8xq*vv+Z$VH-6eB5MBCZlEMqqT=>~jM4>z$7?Jz3r5?UH z@$F~$5nnCnzGV~wk?vjZ<7w;wxc-|2U5)+ZGJLBrO&t^Eg3;(R>t&m}W4*oNV|g;& zV*ly{>)%>v7w>cV_{wwb%L1;_oNyj*HkXUw`XTe>2=(<6g;t-gEju%Rw)OVevk`2u z=^h}h5w_U^{-hIl_knAVgssgN-F-EIWZh0!#A`(3J8PS<*0w541;y~iS;a4UNVi9@ zY~J!I_iZe;4uzS_5JnM0cG@RQ)XkGz;C}@D46K0RR%W;DhZ0OCzhYelG?_IE#4+mI z37aY;S@&~6KMpXRA~x2?w=Y@QvAN1B1ZnT7`KxVxW8-~#yd&DP3ezMUig8SyE9Aa2IU~V1 zf-Mt?R&f-TPSI{b|M^SC{&^oAxE{wegCO0-V$4+g%*Vaouu0e43tn_3=}i{C!9{|8 zj*w?yapa0ZTbS)(k6Mus)1Y<2HKNXDh@fMI7}K0w1IO(FkyOF3h$lA77cxMeH0?Pd zS4pC7srVCfM4Gjb5c~xd4^4Z(V0C3Nz~1-`IF~7gvgM zd)Y_d(XE$tC`>7s1LdjdBOjw*#q(YwMk}Ur!-*x+>_R~j7UmV~TVfXpxyDLpBi}9; zf%@b6V6N~WCzM@mS9iwb@;H98fEzMwy1^E)TSS4W639mImVl6iY!3yDirJQ?Shr^S zVU$Jo_J)c?yBL-f3>~ElGUS0D8szqW`nl-ad)O}oX(+rfz47K-mI(A?2){;oRwzWX zBeL67BES#PqfvWSK}N7|?`}3rVJZytgy-5GK?>k$78dO*K9bgiIpzI9(676_QjoBJ zi{jSwfw>17?7ZKf!)da&bxRT#MzPP?MVQ(!Wv%;bH$fV52POu3DeMLvhTDhQB3+r*e54T~8^F@U{+XJiqj{Cl?%#Qf%DlRg{-Vt<5WEg53c1Bq3 z>9mn=ABw_xKloW&e{>UK_>*8L?qnPI>MJfWWo5HqSW!2M!ss2XdAr{y<>RurR`c`zXF=CwE^AfR;iH)h*_QrH(CceS+js{z z$UqO2AL4rtoXd)dFE|v!t}8hRx0WWO4{nBKJ4u+`g&Q0?L=jcqAd*&+oTNC_jzx~h zgao@2=3!RZE+vD%8~lEtkMZU-nB^PnMUm-NEXrg50RcDEI`ik*(ay5hS0~mZr?@_` zeyjacL{=&DH!Q<0d({X=12dW;c2?k&u5gFxupxxZNx<7 zeKlK9%d=Zgo2ancP$e26$EeGFYTQY*P2X&*eF_QFM#8NtsC0u!ZYRJ_L>4cq&-*;I z`jLl+*lU8puP1H=b_K8Go9tbm=FGf_l1Vus=(y)WnnmtMwyli7Ci_^iZqf_mC{Ue$ z5X|5ehw%ZWjqn-~7}`-%#e2d7zJN*gvw6|E&Cd zT=N>y%tmjtt8Y}8X)6lzKx4gy$e1U^)(1Q&&g0i*sDNu`KOJ1th3N%$8s!*1Cg?x8 z$QbUet9|C75uWH81>D-_xPx)DnC(iEa$v*AY1}I!xhk+vmS=}OCVgNl>P=xcB`J4w zw@Y3>m;5JTl%MYsc7ue`A)S#&iQ*bH5M33nbe^CyuTH)e5<#1e#61usf|r9w2oW_w zTXsmOC?@Edg{E7Ijq`ch%H}6^Y@Zd8VH|@m=a}vDB}BQiN83Z7j!(@Mp!EXt0|V!E6;HCSkYwOlQ_l zJUg%6di%IcS)Lgr;MP>IZ^~?5rcc1{S(A+sU>w+0`o%M4Jq?!7R1xVDqWwr=%o1{Q z$SEga7|-zu@)1+R$deGQ3B@v__NIWFjNn`|(b`3EvzInxr2SXW)yU6FRvg(YmZ~q} zz>)*A?SBG(!)*v8dw68$LUqOAb43T^&}iQm#m!J?Jp1Zx!hXY(XJURhoPT0YxeIKV zVj2D5Zo&Q4o)LkDXW@yH?|qH+&Z6m($DkH(Tr@QRp<%lu^6v2*7~e4NmEjYJX7JEzScg<=Xt!33gUATE|P-Li^Bz3~eQU6`*G9$FgPoQ^+l<`}ry9 zgozEfcB!_{eC(SObV{)Qc^Ek2joQUWT^*QJFS08H+z6z@=PkBteJn?lkliRibAQ$4 zSJuUcM~zIg2Nb6IGt;WhVx(r6&x7V3E|h&7hVzh0vxj_Kq$mSUz6VfI8oAsufH1ULOa8DrDIL=XQq7aF+NLMIyR2 z6&+s?+Bd_hJtfefL>su)_9fHsh%YYY%c9V@QUt{9H37F)GEh_tpq3z8`kOu~@UK*5 z{}puKDl8n?r>tea-D6*R>s%w=uiJBGK7lfkZR z-`aAle9^Ir>DEO=+LNeSA}6v#1!*fRhN(78z%7uHJk)(26n2lrD6>uolqc)#5uc@N z*RI#wG69N~-fUrCmLBN$xL`n2r-)9fz-jAr2CWhTZ)?Zag9+&DG}J`<+2F5Ob4 zuKIO5FjovN`)q+AYUX8M_4bpOUSjnpX5o0_{6Nl6by>o?;!_Vt~^#D z=oTYykZ?q|CX)}3Myx@QdW+&bpzZN-!5i0fdsooQwETb1S^S0~kZbl1@w5xgZ{A}D z3ejlVfw4V%*hhwnrCF3KKM|%q?K5jS7H+i*{{(a`;NHviaEx6k zl526dzFqAG4->VkFqUWC1pV<-x)=zT?Uh4PMDQO_6xH_bk!nBjx$@CXz#vtnpqrfs zaA#)Ig=udIN3j1#*L%QMU7hj&wY9dj!}-*(r@j9Ofhs4&a}v4spb*9Lygc7x0={*VBl&TgBh1%zqP+e@ zMTL7;-D~qxiO>zowiPKnNL~D?aQJv7mnj1Rax4G5=)QB_SPJvrDjNOaqnHZs^0Gex zX{$S8>G`{t2*f*i`fWBy&~HLV=Tj(h#vuo*ic<_0)<%dxe~xZAy1{-FvJ=~-b8nk3 z=;v6zJPJA3Zw@KogzmzI)C&P7H&;tb|CfS#Y^*(ytlgQct#vZvwX$jVMktHjUS~m` za_zgx3$065O_LJ;AW{4ZO14znNWp6_y;}R3gjHF$7sL+j{*YGkj4zd0ekjZtXTuTp zbbC@1dX~ju&z=q-k6&|}AF`rnvxIk&MS@^6U?4G@MIfl+qUZ;Rp=1`OLg>muxbB1TCH2BAVs6Nsp#R81o?1z2UB z^v`mII1pkx-?5rtX3cd*53rfgU~$DVOhaYj3SE+rl{Xw@HWmx{s@yBYG^W9JiWrWj zk_MDm{R~(y&7y}F_W6vYGbqGQ$R2woRVasxq!2aoX0!msg1-pTaw&D&KLi+-DE>h{ z2obE7e7o{ZpGo??T);XB(nA~}fVvAYX70hCU&HD4h$xhUb5DCxVcP2iM5a2?5Uly{ zwB;d9p5ze}*2h+M;6&M4=W4Fnc8SQOvAs!5QqtA? z`L%pLBn=GZLjm$JU_jG8c7mK?sQ!bZ2)fpv67=ISTB2-DWb_LmP10HL zv};pY?q_Dd8sO=fTTbq2u-gRvo)S;w#f$9@5dw=++|d8IFI#rVD@Q{sSJ`+$KVfbw zY_vt9FqAxZ#O$a5MLRctnwoJj&hgcX2T|JKQPvnzJ2TG< zV3OIMP@LoAueKurzO-EDW@`(%vkKv~+esmoyK_rh7!!Z&9g!I25eP-Rt1!KY!kD)Y z1pHVH#>WGO)I^{mSYGGy@lnXv8$;J$c#9Z1j9QHnUM5H}xiO54T_M2N`|vQkPC*t~ zcwcC7#$v7y@ehu%2NVn&H2&oIac{IoM5B2{`T3TwaQI#8xD-0mc2*ctrQO`aHiqz< zh+Aw^SWS)>q3ZggkS`x&h+>sS?0wOgoPyTfhxT$N|FoW6Oq)49SyDv$+zUmcH$%94x88PIVR}ilj2_TuEGisZ&dXMO>r&Z@=M zvybCbh>pj~v7mm>s_UJS+vX(x(1(dgs}qxpP-b{c$XA3lx*0ZI*ni(Be4>*+GbG0t zS4pug7NWgp7o*RrFb#2x?9J8~qI&h|Z7+sQt$TSPvp$!j@3<=I!5J`*nJB7)pAcniljD56l#SV&aKpPL*mY8?M(>bQ%f3cj>3K)(iRuR zJkqK_>i7!Tc&bF=1u!WcwhbNP%{l8p)Gr?I&_@J)i$>ybi!JDj zfAtA+^`&K6njjM*yOs7(028B_195A4coaWrabtJsFO5_?)n zm#~QRh7FbqfKbP4rS}6 zu3un#O$6E&%fZRI!hY<~c;omPvky}wno8HA_SD~ej&rq}ZI8mfDO^-b?FC_4db}1> z0N7IrFj>1ztcB~}BbFgVQzMZxo|idG7lrhED{=*~T9C$!W^_MN7#2lv%3U@K%^vCZ zl&!F<{_b<7M@x9Rwrd0fdq@vhyOA7hv~P*3&vth?mcbH3Y+pLX-D%MbveEue~TLTZ@j2~ zRbhX|beD`e{%TWobiYiKb?YUIo2Hads22{4cB~=u)-nLRi+IgxOzltF}hvUcu zY(@$CZp(Ohw;QeTLJ12`?t|4)DfaiNkV`*iScUwnAO-g6+TA|>FTynO0cVKKt_~1! z*dm^>uM_m2R@Xi~dfIIv`lab;Yqi)2LBEf%bMv0~!NFETM5YP!HSMU7eg=p=G ziWzvqI8m|97Kq{pD4U0zUkFlNtaVdsJ=c#@spwJY1zrBW&+kO~B+q9zi$Z=*MGeR& z4i%(h{Yd;|N8*{4Z8MwXDe}jnXq8EF=sEN>N<$LIfJ6j$nk^TF_72CXSKQVM`S}_h z3<UTP#Sk zdf|+CYlwm^3)jr*1u0BSG`A|A7l9SPPg1u1JtU@yC!1#5M}m}dc5$g){vRK{6>ZZi z6!xQneL*fTSBgL(*pi6ZcLit+Lo^goh<<{8NV)m#vqw_6pDcYnp2FD?EFEME`(?`R znhtxTz=}nqA?*j&Sy?JQqJztAPKd)vwyg@`9-VvGc7YIMI?6dqgl}gqfbo*p;yZpbTazm=89xgW5i$H55 znI*PeVJ$HUBw`C^uGNRwo_)GnbI8h2<;jYX{VF7f0IMLIEA>fHXy&kigYLC|hUgRJ z3(suCUDQVF@*&aT>E(jK_p0rZe9Tl7Io-`-VgM7Ptf&$JTI@YCp_?&71iqaD9Gh`6 z;f2W#h{QtAjOgHUFl5g!#5C7&0j;*?<>xBr*gGNa*e0yTT>BAme#nraDzMJNG-9+Q zgYFSvV3=ncrZ81HUM^1ttc-g_;){5`)b3Z%ub~WFU_2y9W6}!_ZY{Qn0m)29ft3i+ z86?#rD5~b!(;+X@qvz-H0NL~Eq>(B}0|!N6gCdvn(-1uXDP<1-t%8)s1dNVyU9sPV zcnDoPb+t38_`#e>qIOOM|1l;vug9N--TblP9YcV*;}Yu|a4r_M^-KPUu!e8SuJfzw ztRg_kMDp%4UC?i1*?b;4$5x8Ou$`G(XL}uN7vL#zhdcvEK=W zhRM=U3G1 z?>c^`k_e0n zE|mDd-c4meMF0*UL;hFM=q1A8d3Mpqee!|u(o3wfkRL?uwmSXx7lrzZQ5GDiFkNb` z`1!)tX*MQ6ox(YEKO{&?UfVAtS1*D?b{@)z4|RuobmnijL^ z3e!3^pqMQUAXArsH3C#{e21RwI6OCszz6j5d^@Hf&Fnk6uboiXcVZaYCj3-){e-8a z3=8%Y#Sim98Fw2bO#VTm2iXXP>E@}WHSLSa>^?yn1dlsznIYY|#anES!i+Q`rC2vw z5R%1<;S%i^@WtPq8=YhaL$>tRYCEhj#YZxc>d3P5siebsPO`s+q&yPXr5BUb_arlx zzzmXoRunSB{gqho7X*Dh{iK7?BV=%LR|}=Wb&n`~U$D!TYD(F!$B5*Y(edin_rJ05 zjl~uV8G~F?!*-h@3N@Wxyyxud=d4VB5O{o|ah-U*41?#G_@ zp9PpUnOtq%%rD^ILhLXEH0<93zKfB3m^?Z5$xlKFvIYN_!3my4KCO74gEzeOyo8Mq zg{JatjmH4W*Z{Fsh`SB@Fu-wK&)A_ax`f2O?Z+yQZL`k8%zPTU^0z6>j8f3>p*A9S?$-tR0*4| zl0(ntT9%M+Hsx=#ogp?>h|f!F6rzw*+qc*o3SanSHeqq%Gne|3#zo}w_M1Y!q{0Ym zcc?IZcrwC)_Hh9U>H>$kOyRId#Kv5eY}jYBLxOV~)>*y6z9iVKJ*+|4&(mmR4}T`; zrl~l@@LNTpK3}+4JJEeU%;J z6Ug?;7#0R&MG0?`h*r20tiKDkSVPF0n9R?;y(vW1c-Y9bQvsmLyVO4SIb!GnJ1$IT z`?4Ti;6^D4j@@>>2y{93;MPpJCcHN!V9h!1rlpI(&(y>HEmy(RgOTIw>Si*>ib5&m z8YL5TPlrsyC);ub{TSZRt&cq$;HkxXPA}PD2U8h`>a%=bm}5%&fvqQ-X14DwZQro; z^qxhh)~vEGf1aqotZ?Rgbf&j&iOLxAqJ^ya5Fsi{Zf=&*nK1#5h!;$RXAl+iKOK1S z;QfUYf}_!Y*|pM-`8cKVa||PZBWFn{!*Tf!*wO&W&U=|16=YI_^(lT4&xt~VFkQ>R z{B0qYUm?51Engt)ceG52vh!HhOC+Ml^OSUZh;~BF+|n!KHb5jT8Lid|bOdaWD3m^i z^%1e0kUeK?wmqdVU7zsF6(u+!+!eAiBS)9I1N(kaSR=fM6xvS%I9SH53e$~9Z%YTJ-J|v#Zjo}{(`&#<)s>AuI(-lytOZU#UFBFih-I&~sw~PjB4~g5KTXS~F0sHvX zJ~5YRyI5hy6pd3hi!Te&tqJ34<~IfW&~U$z#hmK`g--@K?-M8*4-kcV$8mMt9>U!) zi_xgCUlr*kg?32LkAio|$$|DI(WvW0X<1kMr+}|ZzM)Z@__p~Hl2vPYFSgqPtO!=llK!`rE*p2zn8h(INXJypVb@AD;~?_j3P)Q=S=I=Nw{ zOjiuD7QY zrUoqRNxP;Zz~OO3sV3S|LE1YJ(a<6kByB~AKicA?i0T8xP);N>Zod#@(j3Q}C#_|O z?Aqy22D3gff+IWo{8xOHjAYxI=j_{pzI9SQv71sD3lux8Q%HV%qq`?}7i1kF_80KtwNZQi04SvewGluJ-=?s`DOEMUdR~3ME|mmsD>@)EBD&2 z5Y@(y-!DQ`RuM*taA+E$bW!+|!Ynh68XQh#km+QI?Spz@dkCN2KHENaEh+u_#||hS z%(CnYB55a<5B8n`V32O9Jr+O$?Lf;AU=n!zlHKslHcJG*+8)T|oUIqZ`J&M?4eH)f zTP+w|6S%iGPPlT4YOg55k9I$K$~i5}*kwfGb|%E(&;~|$fqnX`#8I(6NcT}bErQRS zCvO`YgsJq6vNz_u-eTJXDNFBbjS6ci=uLvxYv-i+brI=sKBidguOan7co6S}aND}| zcJtRr+`$f>z+a(M;EgP?y)b4;)~7Uf*-Z6 zBG5y^6KsG0Z8)|MrMImX7xW#=Ze2gw3WYUXEtNJ^VZU98OUPC*KV*Js3f#$3YY_A` zNoJ(bo)5Wuz}5e~!eKmQL2iGtuF~EQ1+ddJBTn!^NdH!So&87Qz{8hCL`2-nlN)OZ z*hSwU%C93{4DHbr>4WIklL|9f_)s~&WS>c8S9J-WnH%i{fWR zp}RUSaAdhdkj~^D+=cAVkPw-y+91;zxq^OVM>-LSNh0{kkdPSM*Qr(_8Z$dsW)XfA z3Xrf4W4j?hR-s<>n*@D%_Z0CwZHGkgYl}}(=0biWitqW+6|BC$3e(AMX}#^DZ~1WH zq=;RuFtx^|@BaeV9Vc!68(QzGK8)9Q6 zMRrbM1}8tl+q8Y-+aN{`<}4Ov?M6Y0!7`@Zt}qGjiD1RdA|YYo$s!rvoG<8G2XkPb zEfMxdIc`7R>(YLEO;q1Z2@-J<{|*E{*6gl207dP)qIGQButiu-Jp(BUzrFxY4*Y~4 zh~z6AE-O1jgtZ97{5T-~$0$)~FMcd&V1`h~&R^|pvuw7fMD(A_n$WXR4M--0dp_d}`V;X5z7Ji!6YEFGMogxz;9v?|PS>x2D-yVc#&_ka;G$ z^m{%(OCe!j7SQVI(%bG(IN09j0gJuKpY+*~CE~>K*`;8dPLyw#@>oULi3&U%hVowB z`084FB!EJh=*<+M6S&Npa=6i!3Nq#sQ3iFptgn7NQ=ZVQp=^SSrm$HU7w5C+fg9~g>O*Y*cv-70;>iWXK2y5HF#1aKf41n zGVGm@g-vWY@4a#RhX{1_f&~gqS@#ts1SkP1IkDS*7$7Cw!z=xC!7%Q6 zo~yQ%A;FVTI9xH?nM!bUncEy{Ljt+LqQEn$1TU4lhs{fGq)hsyUk#X)E%p@=Xiw`F zdfiiiPO{Z>7>FJq5{*c1T;}9I;roR|v=Y1RNU$a>B2xI-mJ6O)7g+vQrYH@9F3z}C z;cuBO6p^ux;SbRbbQ4h1W|`?MFhhSeH~l@ z6o_oDXv}~_0Ca-k2WPe|2>Axd)N6q~8{m<7Ziv?fNR2}har>E|Z_$uMqKLD(2w{il zacdX*TS$L!Y9SsT?CMTF{ozI@HCk>a0&@^`I?!JWh|&i=U^fXdSjo*x@Pcez1iy3X z*OSed%U$mri!5;trMII!muyVG2A@s@4zEGsv zr`u-(;Bd9uz9!(u3&j%nzFkD{t1dqmk%R*4D+<3~@qDpd5vePT@f|%Vs%J*xDUEHL zXMQXs{6u8BhIcIdDdOBWRUAI;GC2$`5QPrH?$}{f0&p*ejkg8`8HDJ#xP2JHY52AN z^exc+CpRE6(Z&dpJzJ{cPYLLYZDPn9-3+X-IU+KHC34Ya3x&cFX1!zO?6ykL!mvs{ zR*rV-@k?AAa^lUEhx&s;ek6`eDY16uw6ebzFl%a51Zmvsg!MUM(l1S6>f{8##7UfdScvV@tD6-nOn04` zy_N2De7+Tn$bcMerUy?6Q1xq5ow?A8ka>K3f>kNZut-Iw&^8LOVp2>es~5>H1vER@ zZuW-U{rmQ}0}9hHC)(u}z#jsIZX|2gCU=$EA4SpUQPr%|ncJV(1}Hrn~GX0$j?8(@|(hM}cD3i>6WgAmNb zHc=QPj>FDZkbNkE|CV_#&$5qo12eF=W7_4Oit6nVktoGm!Ivm)PM|ocnk~qMS&=BT z{_Q2J-m2eja|Nje7e<)(;C!_)#J8)q(P%EqYVS({o?cSj{!F9& zR#4lj&NghR`bD+#A~N=w2+VvvjW8n&7dq2!67ZV|EKRgBqt;0zEq1WX)ZHiS-gNvE zdwt6E(F2O2E@l|NoZcj}<%<~hdRgt9iuIr$iN<=QNRBkm2~nwTt&FTR<9CY0SDLKZ z;oRO-c_V!Gis-)Y$T>&sn6O{BxI;y*;L`38=*@_}!v|WL0M*c*0VAx35X0P7x<*Q6 zqXm6Yg+)j)&I>uUd)dyAmV2LssoxM}yw2>{Zzn^#W0iPd`HO(>Tnt`(pzDM}2v?&FD8BHnU1W8Bl?arE>6EzD z3((7f!}{9s5blb)*{GhL-m6b{dnkna;2G|b5YC46<-)!$*4ZrXgAv727uJul7Kw~Y zfe7r9X+N~d3ThzsZah7In#~O{uPwGUA(y<*>L6v$g`~{kMD)Ej3DV$n>^a&`1sK3& zZB?>%vpctKwwFSR%xoO9U($;dfz`pfVoQ;wr6#1*KBX8x{l$^V*>CBcT3Zv5#g9A^8#CoT}E_qsSqWo6_OBg-P9P2F-#kFtW(!PDZ-4-CFkK>yg zBN)eB+Ukq?W zG)vOi-wQCgqgD%c_54G-|(uF3Ex zY@{F!ZQYwU&YlwR&B}~%sCC#c;{?9yCfn4I`K6Wada*D-icVHfx5`xZ9E!K2!i;{n z?0Hev4J+*%f<6fh5xuU} zS&?XdEJsF=+5p?z(!!hS`btDR!21C%G{ShihqJFzTy3;?!n*RzwE zDE`yLEM2sKQ`MJ6(zaXPe4_k-)0Eh1-wQ;>A=$Llo#bn6u!w%04TxYr!A6MSTN=&h zm-#Va|AFcILcT0JP@J_7?vYDuQlLZ&FxtuYoM$DX_+s-B-j3KR5qzh*_UdFUDL8n@ z7<*MvzlAWEcpP^e`R+rv~WkQf{-Y zso1`VQ`QL52Q270?GkTLZ?o+c-M23@Qk0b~@$h4!`HK2x!JqhpFiQ{%{&9OV0Gd>1 z{}rGKC-*Epy>g0O`~x5zp#5wTUl$@$&!>pnbrSRo0YOw}P+~-iR1!gh6{hG!kysnf zqK6b#_3$|(lq7Ce;|iM}vSG7DKFN8=wO@$jn>-xPPj*h&PYv&ayN;CEKSW{B5A7|o z_Z6mPyjfPGRr66Qv2^m`a)0$s642ub+WfBLI`d>`t3;q)9%>`DPCz}81*KZ64{%Tc zdk}Zcke7}XW7Q+yelG%3bY}B*`+EqtH?FtK20#xJAInX*uLx7DWC3`4zbh3z3QY_f zn8Juq<=W#Rxzm`gu?2$uV?wZ+y~lo=%8(vsr)(3}Zcy{m0jFfVAyhyFM!j`cUmc;uOc=YgUku5Nm0(%?8m6H)rvwrISEv2 ztwD(XIJLcvJa+t21lokro;~(PNQ*POqH*>|A=-qh*_nO&uoZl!a;m)<(jQ)Oc(t7u zUGw<9Cr?vl$;Oq*#&Y|&=)S2k^u;!M_1#dy;h=(j8#`mxLo`3! z*pcNvKSl(NQjYxn6bSi3+GcI$JZ?{m!VpE$Gi<)XOilZ`UF{3-87S!2PnLc!&xL5m z)7xR68st;IRkz?=HFk6bDcU`cW4NN$t`&u%Gm z+5{LQeI|GuYZs&x-cp=0+E<2x-EiJ-6BTx!7J|VGb;NF*B6;@dqSIS(jMg-_ttqv+ zqEM(bwfNHZ1xQOX_@?%U^uWoyW#CokP)Lxht4-EVvzJoo3$i(d3DT4F80+D4!#v<8 zbAo+AfYugrW!CZFGT{yti_P9IdU9u(vs|^sI)|(oaNO;70k#epKs+Jq3BaBZiB_Mj z*xbHr_63-%UMo&oiDFs)&o8lA3i@pTFAGxLS{5Sv=G!&}{bC7XMiDzAn&v_V0bdtj zpQB0QuIPgl>jY{Wm z$&c~O6y>%3ETc_UDGKdC$1B6$7NEQG8qG%hGxOy3U+76#IM;58zw-3{u*Qt1qeSIA4T?rpzmo8cI0hpz{Q$Z zs+=xNyAQ2Dw85SiU>MMb;=KL~A-`c@YZsd!uZzHn9mr6B?p_~R3~Lkb>Unm(2!4=; zN4TEl+6|)ks$bv9v%>d<$=<$Wy>$)Q>1jM&SEt)UcImuw-{b4OPZS!{*SYt5Oo%ly zWV{_$kS1ecqs~4E`6y@zcCV@Sl95C+9WnhrF9mh6UmmcWXrB(%L=`0=2F7CK(MJmBdQ@OLR6Q<82CfjZW{pfn%P4&PH5hrW3HzmXH+};O%tY{Gl`mDq&cc+=G`ziJyY&`izXZ~FmB#}#t6^X& znO<_o3i@Ib+|Z+zA%dIVDan>Sn%`nkXvK-LW}6&h+e$HxxkG+aYeb=1=j48|ebkX% zygqnkbM0HBF4W+JB0a%3e5js-yD34(zBxU)qsDETy8d8A-tJ0e=iW;45h5_QbEm-O>wlyd=tRbxX+103ig#_u6^1PXYmk_f}Zm z_ln|McC^7sOLQ<+QBJb^6ia2Vm19=riB#s_9yqoh&R3Qw93Y)2=4O7`SHT!A64B4{ z@#lhK?7URgB+Bs{%59}6e(sWc_{`QP_sq~tG>NY6%|H6A{X&4{!~K&D_*bby5Ezl~ z*0Z88F?tE%aCt638J_1s^s5gL%cQoi-(p`^n3SjYJacCL2J0)xy7p%#v@$nYOe9tb zB$Rn!#Xc3XXBVa0`jFl6n`>-SfD9vc!0g2U@#-e44X<=0Zjq#wy)CHK=BxtX_{hFA z1`6FukMhc3LxiYUU!Tixp9r-3%z>@;Xb2DG0FoENnb^WEQaC)Z@#4wTnZ*7uq+|0H zL*caThHu*vAy!hqY+I@z?cf@&pA;<0E*FKdK3E3#uQC9%ALF)1fZ>8=f>xQmCKx6Q zQ=GfYlC_94Z?q3Xo)Jax9xoc}OFO#yXr)~x(9tQqQS}J1k_)w70yKp-y4RAmwpbMZ z!AP>sHii78@B$OR7s0QpyD{;L$?!ZoBO0xAckEjG`h!4LV{+qm$5X?pe3jiKB55cn zA76N#1bxl?^y(jeE6-5o~|y0ftFi_FGjabi-)jXOcic?VD^md0{{umRQWa3ZnNSH!z=vTldfDAt#J zxE3+|+K_d)9Nx7E_-gnu#mJQPei#akP1s671?25nP4;ia`Wnt{YkGUdV!PxKD14tV0;a+aXoMZ`Es+>Q zB=~fix=E0Q{oq);Q$gSQ>^Q#?>B0=}goBH$NMV|K9m4UpEdchG)pkNa1q`=;DfmBv zL~PIV> zvSyWS5K%ioZWLzgMj_t;4wi^32-Ev9yjSGNGGZK-5)X*H#@hYsJr&htRWj8xRV72ywXe@lX z0YjLd(U($**lHU2t*y7$Lpkazj*-p?GR?`2OWZe!gDp7R_}JoajeX`ZVrl0?_uFL( z`Wf$sbPmcYSEne+s;$YYy*5r1-_0&*XzrE^(;#*_&c8?#O$`J-WVx_gYfDo(GI6b8 z)xvCB&J)&FXYYEYgm$)z?rSHXT(vf)a^53W-rf`*C+|Z?0_J_nc^`URnE7j6pPn8w zhy3sbPII%JQwk38M=RmIiRtd$SN~09KdQwsRD$gjk3(R_aHyrz_T_>;J{$8698FV% z@sY_TId+W*{&tX3bHY%q$e|5Qe3+89$P&%}6pS2XiGU(9pldUPSWrFT6z@^kkB2i= zwcGZGwDAmS`@bpV8^xiXXP9?Hpt6wz@4x%r`-k@(fmO1PKLL%(5CPyv?Q=qO>z6A~ zUPaGTkj3Y8DZeJ*CpI%$jNb=gUp-tVB$I4u$n8ud9k62|cO)b7q7(xbcmR$&T) zKcOSd>j6rr^>~Z@TafRwFI*>-YNSv1v2;kjPbJ!eJ}pcV3YVQwsgvECN;3Ff#2Fq) z;eG|Ug0{(ckyHWiZHWvxH;^c zZ;C+2a(GhvP+=;8`+!4a>y-gygfj4nlCVBPMAFG>&U+IA#K56kCCw4kih)(Nq$8@7 zm*Hkx79hT+q%5>kkj^_sJEAj)&>j%UFCl&=5%fM3(j03zwgDf;O~>}zQIY&QNLFu5 zZY{I(qR=2lF`cW^XETBFSs-rfRCiu-+7^6UO?IUs=}I1k31W7O5IJ!yfxj2KEkGWM zN;1aVQvtGv@vT@}fG3;wpIJZ0-VE^2URX3yYZqi9xH^urzXb7qM2s;i4aWZSF_lA~B7NU=R`>0w(HW)Ncq zY;vsuYtc|hlU*B zJkRpeQEiWO#EJ)>7!-KerhzXy%Ocu@vVkt;=_ zXE3X_+v<>&Jzw0pt%8g+43#~vY2nd#u`9=uAY4zRa=audj~WYsQ}4B`I2YQ{6y5P2w!0Hcvz<+4zaL$) z&*zX`Q-Q&yh+QV6L6>MuL4T?p>Ah@omgy^^U;m>pWNnX!Jp9^@gCjgg zkm=}(5A%hoq~nTmGwpZ64BE+-s#Eo4r}tOc??s_@coIwPTmTs4oNn(3&@cRjy;U>C z{v}AU1+A6#VMvEQ;(EIz*GHULzsNqNFr{S`U_bBE0*pjHPafELDYUCZ(2yo%zy3=? zl+P!%hEKekpboWlE8LA~MY3+U^9c+qeXfmAY&hOGOm&Z96_zU^O=@r2VFe2N*=}32 z$SJ0kwym9Q%S315PLyqS{#B-(C|l&ho2*_@G&VyHb$%{D_0B_U3rwYdJQ{y zd$5Z|XFkXWuelL6KFK4J<+YN&HAfzmBHwlGJ8PRN(m!3lhiW;_dJ$&64^5nhn zEVJ;4kgp<%`o@k*dt8)`d)P{+&>=oO+$t9)KYv;V=|IJ{us%mR($hza~gOCzi+n?e#*; zwznxI%?-GJjz|FokwfcY0H=ilWImGBlqE=AMe@fO6YzcJ-T}YNCWh3>rZpHYwN(Ko zTB{1-hp!Q&_6b-4f{hR&&#zqk7K)!Wf|M+6NP9tm%^s6wIQfgU?#4MnNX`Xh7E9q@ zf|LcD9{&^<$32q1bi!^IqNwcFmW16Iz^hAs*KF$rSW!|8;L&rZRf<4G_h#nDC&r@E zVBa6oJRbhpuSB3K*sZ8O{6>h8e{=DC`>(=`6}orjP@F%aFpVf|-G~vl%bx`K?{r8W z2H2%SWZx2zvR;~nbmVsgXj-y*Wpe9W>nTXnPE@RN7BXhq9f6W)-H7mcE{x>IMe)nt ziHJ{Q$?UNrQTe7$R4l`4?}>8EYwX5|ul+ORr?E#$kCYw3HLoZg3%=Str*$lND`oQwL-*iT>2$i)wX6WTC=Xum2RV2tc|KN3KckXH*3U+KDJnqf1PZj#wNYJU%59|s-a%b?+J<_fUk*JD_F?h8g_h<|P zNzJW8Z|O#n>B+zWqb)6jyJJ_okHRX8mQAwTg_s?FoN)EU8M3bk^b-Sv&jO$ZE9e(Pf36*Nld!LA0Ln#C+a&@m87F>XL&zMRFf1=8 z)Ak6mj0aBQw>&XAT2L||R@^_+o)38<<2k**AmD4@V3==jrZA>7i|mXr!+HAIS$0n0 z@Kd>iw~uM^2K!eLX%Z&B(Tl^yK<8oz{?zJTKiPHf5{Z7cm9BEXe9iWRD85>(M?@?x zOf`{1Q*67!jKQl*`2k;RHG;m+6L>>P*e^t2d>_b@a@B7FIyg6m(Y05Fw7*v`VGl@F zS0$?t*n1*|uhjWF!JVFyBEeJRM4}i!*Z@4P2+)Ypqeok=!Zd=Fh%rPf3{e@a&)GzU zecKbcNO_kF`(-><_M7r-jwqCRXg{Z>g#t8}_Z#Y1D#YlxyNn(T>aAWRei|d0GC$ffM!vVOoA_M~gkAupj^QST20Dm^~JvyY}vG zYZPWUky+!{fcM-@B2ZOfQGu+Z{jLM@JA^&Z-wSFnXW?JQ{tzOQn>HmkRolM=eb3!H z_!dv0F!?iEHP`+dax;o!hT9iPd`;S7*8qp}Y|XaTLy$3ieGAe9 zSpphEXEAY6K$JRgpgk+($7Z~&3$97wV!b)8750skWf6vBTPj7jZfGhu3R6{XHW%by z3ow+oAn|=FgtOq8e|oZ~_r$8_l_A`<`;B&02#+2-(7vItugZy~Y_c0f(P&Glawj~Q zy8;2D+1M6B;`4qHs2m%M5gR4I@a2`Xw%}!AOo-z!HqV|AU<~N>_j_4fh-T`s=c$lv z64FUlE8stu{CMVsn4J`XMkQ2&xYT4EJQOlLkf%R%>dtCvi$t)RraOiszFO##ARh#S@(dm2~ z739$hdsZ%2B&+~rIJYx1Ly{8}`)sbleo4p!6i>JtMWHnraU|IPm5M&Ph~1#hKKo-| zeoyo*2Z!+4m5c2Wg?*QK->kPsg(+t6fYD=Yihv(P9mOTkvkL)Tnm4!2QsZmwC)zhU^S$TK6t?K0uU#B;WZDL0T|Dx(A(q;%g#le{@mq z3a}Up{D;B(-WgI@sR1tjNkU=f7`5}Rxvr&=TXFp5jkR%hnUPYi| z$2Y$=4F=mU1+~Vdd|D|_GQSNu^gxU@>=gT(2>b{nH#Q|V?r<*})$WyXqkZ;iqUcCO z46-i?_;oABKkS6v_HEI8`w|!-w|hbc7IC^|2+%iPWl^bJ^FP_W6>gl>issh1Wu(A~IYjTUt)d#K@^269Ds6wePlq5Kk?GxUv=t`UW3ez^Q(Z z{UqcV$B`zH%@E8?<#?E{cyS7L?UZIK0`$BQs}f}Vux5dQ-iTET(+Rw{@ddUDG5o0B zBdrv2!r7E}!50PnQb`oH?k=#`MbPJ3)#R8&8{7|hwOvsLz2kJv{hV_(N8mj&B6EwP zd~fo3H$SxBZWo2&NLFo2ZkS<%1!>NG{KRqJ6XFAJ-*mw8Q(1Av#V@%1OcXH_uV zHi$3scWJVz8D^QaE0)%^l}H6u)ps;f>e& zg%s{)gR*P0J(5Z>9M+sYAq>m&``s)DjYs2Z(BwWQ2M>SRxy+?I)*IY$dF}F-_bTUL}c?x+7kl4XuTq3 z3o@eoQ1VtDW_;DH<=Pmw}aQK|$a%ng0>bFwGJQPRZ z&i*3;m1bal8;ea}o#r8%$lD9&+8{w{)td+>CUo0H@O6wsy2^5egQ@B=>6!CuR*u~Y zxpOX79JB6z0i1JS_ZlZR=_?h*fE-sqE0!9Au1!hFGe`zj4fl z3sXGK8W8%n`vhrAdrkS-rKRmH%WSkLei{-y17U0>RYGw*H_tMKY3yW{*F=R`sFFKl zRA%D@by6z%SQ#{~2+2Ej=hmb!m7^K)45qTy21wibVc=k$AVb!_2)0#a0J`tluLWr3 zxw$oxm6=jw-_fX;N>HhU+)wz+fLX4cykGfB)~yg74*U88VlQ{76>8@C%o^vfwx zgmu%0g(;nluO3D4NXTCd%XM5TX_kJZvxL3Fg~R)m`tX6nbqL=~CoCVN3B~)F%HztG zX+Y@1=$Bg>S~rhg7Ktn7@cZpHOtM4?1EuS0}g3 zmF2CgX8B6o(PFoKMUYC8P1Wuj52$m83%@@=*k1Vg)dmU4%!Uy9}j_SoEG>+QHOtEV`E7|mY={8TWh zPClW~J~bPnhG)~xQ)|jBO^_N3<8fZF2Zc!S?Ab-`4KOR}X6dGA6fLV7D%fj371Y96vfd7d#7>EHU0X%9y&{SXb-EV> z=I$Aen6hPfLJ{<`q=--JeF1-Q!dMYzEx4Am0bDl+GIh&+Cn|B+J-JyFnx9cP67}h9 zyHD61|G1T6fD--P;07p zCyI1M_+@wOIV3dJAKPhVqA^73ZcU}lNfmcDzon5xi9I6{wcq&Tn{9pwzddicEml}x zO5H~HfVRn&i9(;)i>KKJ0pHL*xAI%}tB{-nLIKio91UL-g{pZ&5})mrkUN$ekJuRj z7PEMVxGxQPnFPEu9o&^`- zDWW2-{`}~@n@Y$}4h#67B2eX_{pf>cKI4nw=cbZ-ZKF*HgeW>sNL>}!N)hN4Ujv-Z zDj}A!^T%0zqHNBI@|pHhNJ9DSH*G@9S#sMFJEgE+T*-!2_|mZN&4)tmL*jHXO@O4Q zdDOgBVV_iN3Fhb?QT*~6j267OjF}?&G3h^|zZHf2xyaKsgm7o90)KLWPv)MX%WX`6 zjO3`I40}{in^h#6_nypD-rbn_LmXFHtz>uC)y!((v=E_xUhAikB2* z*?vVJPsEQ4CSfrYbO70%dQO;FE3t!$^sVpSIn90%ptJK+W-ki*MJv9I{aTm-aj&#a z$nc7V7g7|*iX(QVFs-;}4c`6$Dw{(w* zK3L%fd7Yxe6O@|2+nHI}CMp%6mp8%cLV@soP*Jzv3;IE0OUGu}#fyA}Yy-1!@O7u4 zU-=_>OT`~f!tN5y*MjLLFVMKp28&9U+V|Gmuuur^A*g# zg&ldDn{183zI3E_YIJjlDf67D{tF$FZ&gBc+r6;Qw^~8J-1vYpqWL^uh!mD@)igM6 zcIjfDClf~cq1|@1AZ-%w)V?l2=^WKE6ZSnJ-$EGMT&mz5q=nq)7FE5qd$08nqO1FMfoC;1D$O1QW&Ptc-t!+RzCbG*_3FqpNptA%bdd;O;hmW2GM(mBh! zwk8mCB0Msa`K%OyQH|!7(DVi&n$tdYTl=bowokC*M_{4X{?p^rYayAFQ7*ObEG3z5 z^;lR-c9U?(RyQwMx7q!CRoO7n>G&EzW3@P0i$e{NWKZVacmUt5@z5SR2CLzoI@f|_o%+3><L&Av z`H5`;kcTAs1okhQIJmt(wQ@`|gYh}70TZMP3o%$fSeQ>!=Fdy0Zbc4W4_uVDB9 zq>rV`(Hs~hS3VnZubN?3E6nIb$6-v5M~JTo`=34cF-?_>jeSp4x{HG=oP=U*r+X-( zThBDhSC|GQtCvc!usEdUo70_kzs(i_KHs_O&2LX#e7<4T`86BPZJHrfn@y$XE9$LD z3EYuiqeHcFKO|0ht0MH_IG^NvU*!lG{6a*kbIZ8JUJ+zbjU7A$Imdqr`N=+9b$G2^ zvyS+vPzos~s%x1^?ZOHnhtSKH7pi1f(jXjN%Av^GHu>kF?2+Zya zE24J!>I-Ry!c@3Qkf=Ubh`TdHonBmjdg~g?5~QmS6l4}zjsRUvHm!4vfGy&E&I}pc z5BMf~CS>4+FK$f&zRAaHokH%Cc;@sN5MGRj> z>lQ>HKD7n{J?-W>hMx}53Fz#y&kOqGXqa;1`9_E@G#{VPx;L)53D{lr_8=9NWQZx>g~o5MHSs)wf`W%vs33{{L}6Y&{DBnzwU{8)bnDs8#w*Oor$;k6i4=sGBo0doLnf3Z50)kD z$AW%GOvib&$IW0sC)wE{4QEY z*p1C9{|aI^h(g9(iI?{jV085B3a4hEAURJ}%(6#AI(MgwhIELiQ@uSE(xr9pWh+9| zx!rYdFI#R~1pP`()=b0YgVl!w4D(0~p1drm`r}zxR1F~gmn#bGH34cLG-Q;WP?+(R zj-w~#vL-Mncma-28eJyNg805FltcT(U@7|)q+&HRZmx)A8eMb!FYuyD{t+MvP zkA0K%5<#Y7rRH6a(1FDH(Z00NieNB{-Ss~z@9(Ds`INNZ!eWyQ z%NCo_Q3fJW5nCzb7t0tP*sM~RhQ)M-yXZN9<}Qvc1pAzjK1|$BVyQ#UC!6dF5vdWj z=`4iozbxpBL+2JNi^4Rqtzw6>`n%qq2pPm$hgDfDNPXPTvF{z?k{j1?q_^#Y{wqcZ zn{#%(2>#x3Z+NY>L!wcGJPbB$moV?2L~*Sa`{cb;AsGb`&Y16uKodEY$ezDlQVC{i zyW{ZW9)-2$8ugQAV+DP0P>fn?Ss|L`l5=RTUio`q;jZwojiP_IxPBagkpT2q$!jzO#mFCHnqu zC_zSgI0Q_$e~Ca1zg&@*iFdS5Q~_ClEHHjH^^{|sMDlHS%*|c)s3^3(C=*`Fj{=x1 z8MRpgei%k@B(cT9^ra|2Vb3Y7MX|Bbv69wm;u=Ks6NktU$M6Fo6$6*7t#=NhYV4$l zzSsiEPn;E|*gp6;cvoTOkPLT^@`&!>9cc+d#D7+#FCG(n(R}+*1V1kNOgIwqYQuTl z(fx;)6Dt^xn=1SG29gE_!a8>dZsri&OH@A^=ciQ3eo}8y=$Be7xo7?^v3??QVRgiM zXVWcMS+V|#(w7MzI%30x7|Zk+uU8KRkjOz?`f&leLJ^}|x7ZVcbcHXIV+jv6Po?6= zweGf`1c*ffWTNbsg1$rJq7o5?TPPBvI3aduZ6VCJ6T*yQx3sSIQONC>zvXu8#!wle zRa94P!$r_Slyyx!iV4!{@iBg2%LV+m#;*!W>in`S5{#58&9M4-YhX=zrcFcr4%pL3#YtGyo}Tf-*2>3n_*u@s#X=WuS<2r+yd+X|wn zktetT-6fhYJdOlPmh}~Z!gJvD^;1|&MvlcE4-scqO}A`?{SX{pcX)yQSXdvkyK}mA zUK*0;aC9kC*jIujK)%*#BG76!hG{IYC8>DW*xWBRiQqrAf&Hyoh#@&WujcIZ1*i8d zuwRD!C-<$BoyuQ_r}F7_XV zsTY|h#`&X=H`%mF2g+-!i460=%^+&u5y7u~FQ}7Y-xZ00aSUs|gj{9lLI; zPn3D6oE6bYhywW$$4LoqWp{|cP!7b~>j;JY5`i&OY7Yq0nDn9&{lYyi$RM!7%OoHs zhPsUxk%@Ls(<^YF;wiLhVzykv6^Al-3rucaWy=KpnBgmzWBu|F-&VRDEgf4UNOMlt zO|{Ap#c;+a#;rL-o!m3m-V0IA-xeMnwvmg5MsS(UMf@5;-w$UkubZ$R=Nyc?$(3xb zNHqII*=h`qGoiLTP^b+PO7iaZsqLhpBg63>n`@T|(XuYRy4zI>Qy*!E$1y#Xg4r0h zzP}?SHiX6~tl`;sKqiYf9E)m} zcf!Ahw7iXG#0pF7JzYS$^Og}2+CW3hV6 zk8(c|!)J=C1PnFSc^a!r+_abG@zgU;i5P}6wd>oPDr}M1iC71x*+v)dal=LlP&WZTOuN>BR7ifw5<|i2oX+{e@d}b)``RjWyt># z7t$TVv_)r+SC`q(1*txY4Wic-rd{~hDYQQcP#bOOi*S0L-V*FX5h=`>zTi4^WewP$ zKQ4F~D7I@vrJNFpjo&EX7f%{qaqkL{mo*tay<51<2DSp*v|zSCVhy-_JR;IPv4HN zhmW+^j*3D#mZ(cG&f_ zc4+J9@LQ#XjkPKuONI!DtF2y=OA<28024v+^&MthW` z2l%XJIE1k^3)a?TPl>XWP4V#Pp?BJBh4p)tDm|(Un{9z8R9Fy`Al!CGbYRS2+v7Pq zW-hQ76s9SO4XY$U@=L)mzWA)lEwwWqPtE}d5uO#)Zw|(1#E9Bw{}zd%$ZcJbW1p)H z;N|J;`rQS>T8|z+5%RKrqDA%XN3+5P3;M`D!!oV2RHt+bP0q3sQD_r~+hCt|S|>rO z$hxVl6@kvu$o|>ZB*bvGtzKupR#??FmC3K*Awk;u^3+3n?JbW(BnB3(eWZ@K&=gJ% z@W1V1Q78!0n)XSBS;?GdCh(+g-w^iSvGmgG_-$Y5B}5N082p}Y*NGC=ds z15s&Ih+p7<%V zs_bsT@R7>bvouNsr6PraL=j28D;^bvfz0Q7zsNJ7hIb%h*_Yc2k;2x2W>CcHE45W3 zGAgl&*mz*8Iwl-S<(h|w%RmV|D~yk~zj_8bT9m=%;$2}Dh4es*?8aRoyQG?{m&k=j zZ&4VfH!HbX-0ab0ZmZnbR=M786NS>@?CPf8&!H?oB8E9aFic7&#<^pbh&M?x?&>_tIZFWV!$qw_dtr@XwZmc2aue<;KtAyiTigP_4~*bO24 zPKOQ*D9Bmz5MM?ylKE;v&t;I((e0RgB0nI&LSSlArN5Esh2X*+S5glPQO78q$g`2E zs1SkP-iVFCSqjtJ$az4fJt^49KnTuhkmi|dJ46p_Sl%SeSNEQz(kxn@%@<1gREgyHzP1sx4~Skbx4T6ZFi%CcMku6G7HlDx;MWt&iwP zCwYKR?IDsS*j~Q2y?jA?`4&qNg?4s6K=l=7Az}9IJ|Xp)=(7+}n30He$D0nn zxLhToedBlsnC``?U-@j2KNn_Hz;Y4!vY=kpbr;BMt%6}Sr14`9 zIP3_KT8YeM*)S!7)rpX6d8AZ~#TPbm z6hk}E#Kr-W!GmW6m*wJQWEVYGw*v2ObvVVVSY zGsF6N&WsZLU)ey<3A?Etccp_p=j1%BPGewlxMMmT&p>_SA>n9b*`KxSB{YdRw8&IY zM3e1Nf$&QihMV|8d(sJk!OQn@g$RsFr|*oVvIXBPVpts5-$3+JIDFg&=!qt7?});3 z@0QZbez>17J?2M058d_K1w-PK2g<*z&ct$aTQw72l)vHTSo9p48 zy}R0c51*Sg!xk${r?~u<+HnCo#jOCj6%=^>>nZyqDE-j`L{e8~j>L$+AVkw}wz~{2 z)K!9^YRtHGwX20W{|uFN=1xgX8zCx{VUu*7WeHFoOd`zqmwFWPS&g>OqjF+04*Y@; z-8j9o*?#XaFD@&E!TY{vq7kdD%X1{7MG;HB$+`+s{CE^X3N1xQKLOtsIDk5ZNk^$p z^E=E7fYJkZPWK4X*p{t4jXx&9_L11LHL<5whD`PEqClq>4A-(k7j~vX>QoX1&qn`l zwvD2P(do}xwXI$>?qxD#dimE?Vut&gwi-j)z}%OptAPL$kflPWhS&GPpPm1DW1zC--OnHui4k3B zTa}Q$6-OelhlE1kaAhaAm+f9?4_LZ?Q<#=_G}WKk_l&*ki4#o~(N$!Mz2^yzm+j%N zy)Q`FxxC00+ZS6v^y`N9viWw608vQwBX$*`uod&Qim}(&ZAzdguAr=?2l&~#%UH1@ z=y7gKvz01LgBYD|J$l+gFBCzY^kQ3{jBIHrvX!2%FBbkbC>*+ZZtaw_t9RKePU$pn zu`Nj{uWm1&Ew%B4($Gp+Y+GTn@m^+s7KsL&TDC{;t`Q7V-A_-HUvDLvwx(j(pu55( zg-5U$OEdiiHSO}Jiy=+RaDwhhQAhJd@Xw(huI7seH(NwX?8>IML}5xiT924ujqUWP zj_T?5vPYq*imy#OEJ#uMtjhT020g-Deh^}~pCN?}BLhz0mqiN$n$j!9z9$%TUgX+K zg3mgoV(hrwZt|p&U4gZBYcl5uZcVo*p-fpnE*J*9h|L?l@@i51m?1%h-H}R7h1H54 zhA;z@G1e%|I5EY=EZI!^g9t1m9AFjN+XA%r?YdpEw0FrbLS}6NNZ#3Jg{d~NXK|u= zE+&-&C3KpT6TUasI=zUt>CfNzgWUDL*iqTQ~Z1H(p7&S4&r zp8`9Y%&ykje}n^LFR1v)K?o0#yXVP~!6h#cOY3vfTDGx5t`Oum!4aOeUoFIHb5k5U zc#ip=NPeS^ED`fgcY|o*ht6&!v6ni%1B*(RTGp0WhQc&Br)5vJ6?nv%icL02VL$9& zI6>8}x8@AhX!57PwawaCsVF`FLV_k`&SDA3gwIEcf=uoVKKt>V|k zY>Nn?bdDU6w~SiP5Sby~Ef9tcyPz#XTKc=n6ZLE4<>HVh<|97eekVXrBFkzuc3P0K zIU)s6LjXFm-94VZYg7J4EoW83sQQ@DvB(MNuhw zGKU}gwSfQ9FnT8mD})t;PRj;+%aab5#PM+fb)`I#Qmwc1$-L19w<)@e?~E*Q?y=9m zL~i3l5_4-~9pIM@ z^UMMiDYgmEZxaQ%K0ok)J)s~Kw{GUu&cns5nI{#d;MV;pi!B!jOZHgJu_oIf%qqYf z13$yJgeaU}R>8q-cE)q!m28Xs$>Sn3GCS;|Uz3y%?y+k9;W2Oq-#N*Q{6G;}NwrPk zmSsJh5WWAgU3TN_X)oer`BS!1VJ3n)2gD}8+S({W7?vSA z8b2o-b)qi1B-Yv6$t*>Am<_eFA~3AEa>Dr+f$;HdtE`ITsMg#2BGSu3Y>0m4Hz9R# z7QRORPk`pK_e$*WZigqDH+EnjAj=_lNEw+ec1T-`TQ=KdL7FZXqS( z-3vBJ%S5DcDDh<3MuE^d45K0R=Q#okH_=!xJ?=UY!|!uA5?RrHQSS+(VUiKGH12%+ zM5F;aL?HdR(|+Droc0ZDqVsRLy((g;bEITG63L?6FamS@PZ1fQUfc?6bvf_ zt2^fG=~%PI4hV+2hvnr@u;)Gc#;$#^QEFd598%Mi;TYZcuAt9RQ46*MlDL>(g$)#D z4AgR-gzFF3M5jbAm)JhsY`LN^oGnc-?G&ala5Od_-DI-_`OTmJ8XX@I`CsArDUtIj z3&;N8iJY}pe&rcq5$cKaWsmCCy|2BdFybST+C6q8Xm3@b zu_1zy80tSgFSdiGGd`ETLVDVs*i?aucDqiHT24=Cv^zbDp*y%g&&GD96pyLQe(Deh zuK1j{4A0mXk>qTJS%2}C8tiZh=-iE6Q(3x4Rj|T#i5QMABVxJ56WH;12rRG$(P_=; z`l>V2=hv%qs2Tr6eAxojYq@WZSdd zi)E+HvcGx>rxw+PFxA7V7vueJiVCgdJ1J+OdRuC?i(U<_Jhi#Jy`sWCB^c)I@cbiF z?9;-usblt2_63E*j(__k>np@!;=(22>m}R&KjPRADUJUq80p&mHeM9gjSR8l!gr26 zkxY^b?}hvL8c{+MI4s~(TUdJu+&`-lpjYAr!@1b%aV@i2rrUD@VN1Y)G|w^-LF-{7 z4)Gf8*Z&hAaV<`ZKqdF-kEvn)el zTAf}HFSbVn!bszR63rxGFFbh0#Hzo|7gdX1)`R#JK9gu{w9O*YH$JpQ^kJ(Y#kMSL zL3z#NBGH~*_O;iE3yXG)GXuUWQuvl(ADYkP!R>Z)l7?*saS7_}N1`!g;^_@gm>L-h z8EVTHr0ocG!p@J|5@A{Z^A00t0@^ra4VnA#P6hm~yis@MwVoP5te9;SpyWubt=w7! z!>Alwi;&8BVH%N-=Jb0CGZr-B?B+SB&YgR@-aZg9tUL6W;4jU60|Es`W(t?vc0uBK ztqhLh_J|O=Usx>1V7aOGrf9Uke~kCeV*(6Wa0UXCZ&e_19+m^NN=$Ap(7#eNSR`G8 zc4eTAJ$m)APaGvob$k+b>+EmCL{tK}d+-3u#~kJSYZ@-txpEfEN#*gvnN(CUR%Q~mx#{Z4%! zf9=Uz8hMC&S%41oN$GB<6~6MaFS9@4)LWvteu;^mV(cA7`^{ieLnN<*I}T=fw`TT3 z#fIfSjE^8MIhVW%MdJ}Mw7H&s#R>XRWyf0QT80SB7d9x))vmN0LE2J)Qe~cp`@lUZ z_I%-Mg9_&q&qqP&6N_!8!>CC!ZLKH26Q@@hcu}-hJ^pOP(zC1f+8+c%OS4+G=uzi* zlES$2&SWP<2~#&>u$>grD^LC6#I9M1`t_a1xw&>)QFK8%Hni-0LH%YsZ%mkHvroJQ zk$&a0th4S4Q#n5u*Ui3Fl(33hHp_3tZ6dG;wb8mb-#C$2JICf6TV(eL=x2jFR8Cj- zC-aZaD`st_i$L33rngkyX&FMHw|5omM{9+m(43By>#fMsMn_L85u)VS_>36Op^rM0 z^mZ|Z2?Rq)k!AT}Tj;s5Z_UbE?8%vxZa1dbZjYRRbWf4(_soeMnCFbl8z1-N=axKs z4u`D&aM-qbr7k7c9t%xDW#jbr-S#6v24EB($?Q%6ZTfoE7mrWUPWOoze%s^YrTOvJ zxw%&CxzaIxWRD3jEaQvi6=SmyU5)N%v+Q|?GDTRu-(L2NCGrGv!s8~jY)R)v^p3}I zFv+v8{f=TZSCJe$GuB{Ni^P0JFV$j5E)>CccfE3>JV>&Ls#)4o$gYE&EaE$O7R%~;hocO8E%;aSeCn0BIZyIuTyvV`59ds=o0yV+$Tg{@$O6z6*j zhf-xGilgo$qNrMg+^ehY2|=398{u4z!!=eT0>h5_cayy-5Js5p*77^qWAAwZ++;8- zYyWn5s%poXb*t@Te;`L_lf0=*Ed1jlQ6ryE8B6)1U}$2*6#SNOXrivkb+fIDr;yo( z7@EXo!mQp2Xj#^J_{`dAw%60K`o5Rax3|42%=qHcjbHfhguL8_W+^Y+@y81klyW)K z+XzuKBkK;LwKY;G{4DWx$`oaZz*xM+jowN;7Gd!nx=C262=t5UhunU<9T23x#HQ_$ zh)%V=m`pGV7wfNj0Y_0#g=4YD{^IcxKZlq4kwhqf^7<8EXDDW$b4sFdr5nP!xs$T7 zJiKSyh)&9m;6%07(nSf~7*c}Lvt@`tH*&<+T`Iua`m4({8_cL&*;nvKehR7;$w(ta zV66&=?v2SSv3CW-#GojvQ+hLZQJlk?m@k2sDG8*Pc$6sjx8lUMfcB2dNc@xn~G=@1MHXL#nrIuG3^3Vk1q z$-L}h-hMLdry@~gMrJlD?GJhh^rsjDQQ3lw0k4FWRvfHDj^v=d?2&1De#*2zI_#J+jlXun zAx%nbT4jF|q)F@o&)TK!p^7d&yV_?I4%3{)IrRoX(!pyhu-gUvCse;xtc-D@=wsK& zJ+>xVl+*1ep6Bp3L{Z`na$+!Oo)8E_jk7E6rmY^^GNom!{Xsxu*s_k-ig$(R_vxKW z>;r{q6lzp0>+|g56Tom#ie$xky^Or>UMKpuFS|;8lx%zb|J=4YJy%D*L*`gdCG*Em z+1%vD$`KW_2Ak|hilUbt=(*hemc6~&VDmpTa&*(=x zh@5bMXNH-)igj(Xs3uWOk+%YZVT3BrT6-jqamhl3pFcFh5>tiDX zLJgx)G`u@m2)k^R{Zu%tK|P@4TB-=#`6fSN6BVS)0u1d;QJBRZ`Adjcen*^waHTwv zA?Nq5Qxet7BU@j~?c!79q9y#45J(W9xR$;7)?Hz$#ULI^0Y|3VAQ2eK=m;a>H{BO% zM5OI7=AU|Fk?j) zgD-GuCT<*H;&vBR=h?<}(an+{Ob9$2Dokx9lVh#ZZLXtCM3xF{g;3bw@wHlg zWT~wc#ZRs^^54X{V6UwgHPkUa8; z$j)Q$1czF@oN96fLod&*o{y1tTk4c(r$l>pxvdk0QDVv8GsM<=0VfXZI5mHo)e460 z!~4s&7LWGN_Zto))s0&FP*A_8=x~x~*lZV_gTnjJg(t!w=-6bRQ3Surz|VudhsV;h zwKL8yMfq1z^ia+f7``5Yp$Ts+SoIkjS5GU#By_xiu{>p$iYIMtrcK-pIJ7=HY!X* zISIvhj@aW-qtMnr=uu%R?X1JL$}RA??L)!Pr4n3;+C}HR&{bPPSYs&b;h2@(YM)jt z)eLE=7mxL84(adM(fuq%kn|W1<_IYrsve?*^%mz2S6EueUCxGz8tO!6vDngu8HS@W zB$X{duk!LUEl1(7W)d5g!_~ARPg*=NKhI_fFqp-~@~!ivqu3-Q6K$a>;4 zP9@h?`=uaN#CRcpRpHQN7!HhT=qnmg7&*=}h1RI((3Lxid8K_tm`JoDr`R2T22kmdGQI$NY=j^~ zhda>t1pBd&e(k#}6T4?68g@v&V|SUAh#vM29NS`R=TQ;D8jU}~qq%+*OFc6~kl3&( zVm&|29&9UDJ{DV_AgIH z6%(+H6+s%$Ho%YQzaIHo!Y^pdac1O z`YS{xH_@=lKB2HW+r0Ty^LFbh7^bT*iwD`B!dfYCP1%;w7qDAJa?W4!!&DVnWZh*C zdwH=OCIYvs4w2`~%Zl3!K^9A5!_3J3_H^4M3SB$6u61{&Z4v5n*>@A0wnofEn4z1a z^LUl);cp7AemP9QXRMZ&hu!>q(J3NunX+T*@bAao7!u)X## zVS3(H`3x`7cJtqWjFeW)rrM~|CH4~$DWJHpc!K3RU@UiAu|Ozogv@Pw!NOcwff*{`;`C<$G0D*P}b>n{*Xvv8Bdb(6tY%!q_dE&J%f>g*F^}w zw!<6s@?ifKMTbXN^=!~}I=M1;^58O%fUYNaTQeOF{9t35a4nTijeJ%Y@0 z4#qhO?L{FUE!dP9_Aig-jt_6@-$Im=(zlm=sIVH^uw2HRzw}Nhr$3j*JnJq*B#gUh zZ%((H98%{UycS0=%n?c(h{7wH2DevKwO8V^<{5iL6#Wu*Ra#|d7&Fb&6b|hh*3y7k z=6NE}e4a;<&|V>=W!%L3?K*~bmy@- zZ*92%&rPX0TTx|Kz8ezLi&1>Qx(d?9ZfF#C_o(*DW$l&Q>=wb$%#p}+6j;8e%|fTN zRAHW9w4f2_+KDKAFvs4xfi3gGP!yDLyUoeMhDKMKEl%Kvh5rRu_Vpr!@jSZzXnnw% z(^z(Ejs4b>%eK6J?)H`lbeaZ~rcbm#CUd7rrShyW{pyDu^M82K66D`o8ti>R3f7}c z+&=Lhh#w(>OO^H+0WAlyNY>l`IqaCe2Lq(mLoizR60+fn*VFT2ZobOy5{ORs>#*M; zWoWp&_j(puIh^;%0!#BOWArMOEkJdrcFj47H@rfJD*df(R^v&tW0MQ5S%CiZ#QDxX zPj_xy{|3L&jPLqedFAC5Ej_gvM zy;?BLc}c8*asRFee)mIond3w=%Dm!&Kdu;l)MB^h!*4Z~G9(N;TZ*_+d{*VYQ@XHmT-x&yDV9iDX( zE5iOM!15{LD@$|i-;Oi!Ho~KUeIOiqHnz9`J>O6KGY~Lf6@;d~ECMYllpDTp2!!p2 z9k~JF*&94PZIfD3+Y zJ`|sNW1%f~7@e0IZH1twD279d=LLc;41>zSLKC{nj)@$ql<{5r$iKjB`m}E>LP^qN z_^e1&!pUwEAO7zP>bKs#hxPWT=|hOivw)5DcRc-bAh1 z6Zh)b!`c-NEg7bv{>Z;C;4#?eu`eVs4qEU(AxyJq>9{+G+tq?Jt1r6W13ikLfG$)+ zJ!ep0E3xsy(G=l(=rMsHw1&uZ9ztu8ZqCzQM0DxhYfBxTKCrK2)>?Z`kQJs^>O%X2 zN79mPmR|%_d=dBSHc!cW1MKTRCsXp~XqUZDF<~`jpeUXb2=ob!KcvI$8;aNF1&6fA zQiSN?n1N&LArJS&{a?PqWJ=4+(s5?0U>Ie|*rGM}lqc@dt*<@ji6ibowLR~!Wl!r| zzIp%fl)ZV6_`eTAvG`oXgPDzYsD)cH-tru_*5tOuTB_4M4oHOP7psB+<(d8G zB7`<&=E(Sny_BRR#GIXNuZY61m0+64-WLdyJBcCv+<$|jN#S58qdSXjtWy~Aw#wC9_AMrg3YNB&3`9&7fu4^YI%1$z3uqm*uE^&3r(Tfd z(or+d_IXstd{hn>*lz^G5O5ofBc%cZUbHZ7xhz(6W<=29!@*aceJDg}#}_|iSAR&D zirDesSlBp0qVnNpKCG}>7-^%kPfzj4wl$kpg`jjMu$FyY?d^I~Q6??`}3)kS0#zO>Ms|a1_gs^}b&ulcwQSD+9Jb9^(Kdt6sUYLU*`~sl2!vi|#}Op8)gpv;;e#S(8$2^ksn5<@WG@L) z>j?Dg-&8oXDYDUvZ);IPX?dxsSb=WyMLE+#jS zAtHoz_3LRC6Vhd4ZFJC(}enC)~y>$ z{kwvp+YA-g8ctVTlC+4e(*y^8uxI18rnU5-!$|$9!U_Z#Ut9;F@Hkm0%woPc^to2$ z$$1(^Im(_D)P-04eaR12+bfZnBYr zfejvbAslWb$u`!@=z9YXM5O|uO+4pH-tRF{*i2rZ!ojPSrK|ssbeL?nqL`Rdbx&Kn zm-5>7*S6VT1?XoXdN@V)kIrOU=Gwm$rhK{6(yx1yU3^KP@H37SxwhNqMWNeo@w$$) z%_fdYUlob2AR@b!_mQs&vNFjNvF)cPsvE4Eh@t!JW@*+-I4tH#1$K)NtAphS+nIjd zn@rv<<)$8%B}``$4GV0J!p@{V!06-tk-1$eDvdxpC(eem+6&0$X<6?OzYq>f1VaTi z_?c<4W1>;$jabS&uP}4P*>AIbAP|0f*h%9D@}Z~1bfpdY1d&t}iT_QvI|S)SY`la4 z?(@hqWi@A>UTB#P;}arFuoDF-cO(Zt=_*^62yC7Y_O#gwGC+kf4(P4h>?vW*h1B6| z^<1%8l+f?ANL4F3!Bu$?Kk8wP3eul$-Fw^56{adwiss6sf_?SUK*&cB6yriXFsjk$p9ZM-9JwjD1U(#dpEJ_yOxB5;dP} z+;XyRrrjs#J75s$X|{QyQ9~BOi(3^A4S!|lkr{T%v$I&owCu67f~?)Pvel8=h@A4- zKRy4+rz-8EmysYWjr`(lxJ{oHA@qDWKl8zY)YZ37PrFm$P#1d6^>&ZPU32~QmX^$R zeA+_GNG3hB;ZTEBcy!BxUp$2b=}L#E>(`%X#%jDE!@wm`esi?*ZxD%gWwkVva>FdL zY7u-!=SNAJL}9UwzRM0O7+N;5WkZe~5~e^#LBc%Jl#EEBU-^iDA5jXHIUj5UJU;#@ z5EGc$f)My81W4IdhI^YGcDW#B^BM>@0yE2=R_<+Vya2^W88dFVLZOX?2&*79|F9^O zg@N%6$cW4qq*vM##8b1{WR8f**lew$_=K<2P5}4%1xoLDn!XyKS$+w70dK*?Z2@G03t=oals$#Fw&Df?P4x@gp;J`DWSG+NGa`%#>$M zYF$x8G$;)I5XAbwDyRw8M^yUW*NMPNWG0`akVf9*zavr@)1%wO?y;U;$bh_J*1G*P znSfX230%7#6onq7j!(0x3ey8Dh)%JYo-}%7jvj$GibC`H@?RHg@x*NVS@wEoVmC^% z+{v>+XC3yX1f9!QR1iIa_j%T`~`U>T8dg43Xq)4EwM34q6BFrk6df_iO8t&b#1VT0<<1> z#`*HSR^ssF)6-8hH`o(`>VW$=#pRi2=AkduRtcz?RhuF=ozoIk6}DEy(6Bcvxm?u= zQv=sMyiD6=p9eBN7}~NFh1s_5xW{f&kj`bMXY;MC6e3T=f2_6Ho~LDRnr%>+)?fel z^*^-D9x?L1e)fXGq24k2vHwyytSvtDVtD^b6z#3J&*d!a}eQ4dGE%wnb0K$O750FhfR>xH$Qfd4Ve%I`Sh4yVxLbDF9 zhm~==r(gxhq&D>o7KIV%*tgflcsQlk_4Z?h!_FP?eNa-J(3y1q6nj)*ess};)0WqE zMW62~ds1ZDh{IpF00N97l@0Ab!j^g*Mz@Nr$^qNe65A&b1}o}Gwml~b-HSL3JM_SD zw9bx*sCn7AIk9o4{k9XzkYt0M7Ysj*{7k7RU;af;E>`7i`;H(@8(loU#BLQ}!V9yD zIn(v`lr%KwV2us%lrdQ_9qMtIT*F+YjqtcEWP)s>K&UboQ?&6w2pxf?xCJA7m7cw0 z#)gwmZ?|=VVcH^b*I6jvS+j^t!KsQT?T;SrSW#{7C>%C{#EvHuJJv_9lI!jBUxH3g zU|~+QuR7?{hx5=k1z6JG?`^jz7-pVFzT$wXB(b&91}ZLm+M@ZZeU0CxmlG?_F0ztj z=E6Kg8fOb@1;tR$yNtwdiGaH6Yj>F4vOSmm>CX;3Hf%mMbGm)>%OP>1wjxox*sc%^YbQH14!`Js z2<#U;q<-B~AefpO!zjCT6M?q!n>k=l2!y4U*t%8L&MQQrHE*K8$p`!?K_+Q(>oR-S zbLPvK%O}4=gbypM!}f<5lr^zWO}Y_+^_8+ zPmtBJPA?e~93s``1jCHC06Jy$^Pan%j!t{wHmM0h%cY0o0sZ}MTyWP;$wg~zn-P#x# z=-g#HMATVlzXYv#HgB?Bo^1ls1w0OXDCmb0Znvn8b7|LghJoO z$_U63Vd{vS=IpT=LFQm!L0Vqx8cCgq5XSD-=s#-_rSn9CCt#UdjwDHH?5~PqWYXhB z{Sb>m-iM|2;MXA17@i&A;t9}7NhM5Fn5qx2J+k2NT6xmwp{2u6rFi)*B-2`3=DkK%#D%eNnNYz+Abg8ZAr< z_|!|_XqF&FaogcIv8Xe$cD?dGD@eZPFkPC+E@Fp0*92GsU44%Efp z08>RQjqh{1T_#A*th~u3)bbpVH$uz#*DWs+bfzi7?l4o$Pr*d<)th-ls_&4)!;89QrX!kOut6PCfV}}Q%PhUbXS0( zMJWHK*-=k=YUNt{y@z}CNU=XC9MTQf%kpty|Glx<9ImksJaxR47o;mgg+1`i^C^WH zftF3#2r*qPM8gYWu&#$WK;$yh?iC0#A6@0k+RB>jNfBu+KjQv*lWkKH=43u>+dTh~ z)mVVp<8hpArgL*RMomLI;R=uAE7QU2 z^*22Zz9JkYfzbCeo0gqy+-c)IwRT;E6mkR^EI84T@!(=%CP+S=;QUXsa!*gUFtuZA z1w(dUb`7_p%p5AH{@wPcUq*NHlP5`N}sTXeG-3 z;#C3+<>{S^Pd&NCx(jlq!9>T;JlO=jZ_adpVnLS87hvv?O~aZ(t49?e`DecoU`4pr>_GGI_)AZqC4}a3 zhhP|_;!SB%>2>0@LkV0zvZMmFITTJV_wzvF|I6 zE-^?gTlmQfcK%RwMX^Ci3f`F}wlrFnQ&5x7l*D$P6?q{vx*sw=WrB)#V7Mh#l4G9~qHPS;YW$Mi?eNU>>8BP|*@J?V zl)+WgG6X2;==L`&j?KaLVt!}d)pD+3g&t1}3uDDxOQwlH8+G`ct1u-Ez;cu=6<`F~ zDmO&@j&=5-C^RG3l@;m&E@6h!P2^^Ml*rO6jNI;C%VmLA=^S)0P_K2eC%4>~P`2d2 zwlAzg5#noJh?c2~b!+I01x2pw1!xPn8xpLSKJ;BQ`_g}{Cg}FBIIAluMdYmxrDR~j!ZJnv5>xGc%etXzs+g7i&NeWYM>s)SHWdd3R zIy^43=N+OkD+kVpFJzlY@EV0o61@-e3Q&JN!CNtgekRc#*DTZJWXMC9EP8@3c6C?6u#Hmrlr1tgD!hXakS%mw_b0*RhV`ZVjVQs+Jx8NxsLbTxregjAwZygzRe*KaN-aFaK<;knI+a=Y?Ih_qm2CVSnj0zUZrS>_dXwZU{uqW^+49BuNQcH9Bhd9v;A0!(`>D-}+~wOv58raQjNu2Yyig)N&3?ArpN zFNpA<9@o`VcI<4lG=(W6H;(@oOLxRT_Rp}Vg=h#U9k1j&Juru(zVd@m@)#Wt>;@rP#N8yG+?-5 z=~(YR-R-BHDRI`qG92$v=VMN}=?;-A75sVDnVEVoTv0wV!K2NqmQ)=`-NawVEv1;)3tu= zh-mc6RWJJ3KRr+MShC6fxFF82(Q*Xo z!B~{Z(35f$!*zI?&Gn=R@6~g&UFcAfIdJ@H1sU5(t=kIh=N@-@-5j|y`L$r^DA&16 zUis~hB6V(081$~<`Or>#wqS?T&IyL5r}V|m#=nK>`2e0+(lR+(U)C)UIHB&wC)zH% zO$6q23`XDjD;&m-1vVv8J(?n#Zvn_#@^jsrq$H}AB({`Wt0-X#QhIl{Ho?${(mdVI zx?dkQ`hoLffWjfb;k>Bq&6IVI?y8K15#}zilyL=MLTSY!ZZQH z|2yI4%oYr-9mOHfmI^zAQW_cXw@+XDT2CUy@kUTAQVrT2@mTtc92(L(6IHtb!eK)1;o0a;hlw3{ z+bpyD1T|D^)>*oTyRqqHD;(A=E)i0tI`^38)77(Egh*9cJt*(kC`=CAPrMEy;Fs}mO8u7 zVbp^~mM6%dO1_9iz29ayL8~13kJ&a;1X_zD%`@wEotinrR(TN!uyFmct%6!P&+vKN z?r|NB6(_3~ShHYg`{C8F!hYk?u=b?O^Oj)P90qf_ff;?q6QC}mGvK>|VdV|E%f8eX z6uL4NX7M)!!=X1)tDDL}aF<;vDp`1jV9y>XME@eCmnpP%vItZeDJ@}`;DBxQB;g4M zX@T=1ghq24Y_QMY1ZFBTCl}kd6sEk~oOpJcWbVH$LTJ+5leex6FHHVz>>RC z1ll`pa6cQaFx@@4KYFbZ^36p&t=agtwx4?b=$#aKDtpWc`o*bRHbaEa;?X(~HhO{# zRzOQF{M^??;2YU#bB0emuE@A|ERjitKPI!6!g{hkw~#vGhGKbSSE3QEO8KKjx6*D? zR2V+Fgqaw(VWQ9hOaoK@6^(Y-36V_P=A*mqx1JmyetC9M zfOel-GXLC^m1pNou~VL&mW-CwSo^C8zSL^;=)zX^o@maBlsa$ZIviDxeRNK8&Y&J&j&j$ZY2&fWxKr8!GsdT&L0=hirq54Ws6OBl$w{u z3HBKw{gRPg3iJa3>h9NE*lM?`+ljC|nI zvszIV4P1Jgr`V{2GP(Sm7uK?8qP?hasF&Z#EZ%wl?6GZo*4kGF5E~X|0lQiRY0_lf zrEXB1es9>B;iEp!ek4LzCu1>$U;~B2=nO$d6=f^2+edhjhnKT6?y)<)l*lPbJ!kuS zMeyECv35DM0=VjPu1-X__c-Z=SOk@zY)&>fO zWj`XWJ8+f=)N||cvXz!AK&x774z59C5W7C>JUgzB$~j$B2}U+%WU0|bWc9T2D^Hi5 z#~zLoVnJHR=(0P#=aE4z&^|FR)WQPbCo!I*%vVIw4}EKs>}G#okb%|tPR1Ez@Yik= zF>G-|ctpTMRIUvYEo?B@yvL$tc*h!1V6j+wK=+?R4$g)8Zbbn-^cvS%P62 z^YHyzWD7;0%dt`o_(n&VNTtWyg+g<2Px-9#qfN`Cu@!lb?}?wItaZDY2fS`J=GxKThRmq$go*lYJD& zSe)Fry<_$=J+=-Ig&q{e&~CFjp-}$qa0hL-rw=xa-mYo17Eu^GNe~>}Wp8_SME1Ep z{mCO)57NVb-ys7#6lU%Rf)vh)JF>y>u_0gv`lfE(?CT0s>>b5ifeWpRVEBked<~Sx zk2Y|Q&h;$Er!BCbDa_Ep0osRggQu<1%Rn-70aw>Wf??jzRyCcURb~yMFt%vI#_YGA zn#QHzZ!UsiA{a+_xhE9w=L|SEvi`Lg27&!4nQv@Ho?SVVWOOPg4^b(;21A6S@kD0V z#tBg)+hL|n7ob7W>@SoO+)U4m6&$%vw2i_v;^;21!r_hAI3Xt1qpUXmSp>!jX?%X8 z_MSud;nOff@x?nrbuC-rCVxX9^e8Ka=V2Qx0^|L5U6l<@My8=&8n?Sd2%|Ls78<|u z2SvGXkO^L9rN0}|>0u!j;7TkB4C}P-?v^wC_ZMF|ZV_;s{d3fKe zTkKQAfT8mUOY^XCr3iGsWoNG4pfG8f`nUOZboHoyV+UAIg+q6EibX6x+iv#MSXDu1 z$p$4ekEXQ?1!;QgHZJ3*ZVB zE|H%aPIAWoRKxQ2^11C5Po3Vny}hE$J}o+5nc*d73Tns3*j>%sAU|t;MWjU>Pz!CO zK={N)5=gjVfnPdP)Gz=ky;E$JAagGZJ=*nm*jiDT9t@tM!@apPzF|RP_Y}RMHi#02 zw=hcwq{AYF633Pl*c*b3cJFR|?YP3xZvat+x0C56Gq#tGAdWs_`2)T0_VRh{6?57v zX4~bWQ{3UTho>Iq;a89!S$^KZy(Ouo_JA-&mSX4Hek!2W%LfIbc)RRDQD_~XE9B%@ zq|b>!cVv_=ZZ8N?I40)xW%`ZhXEorZQmWR6Me$SHN;Vg1^z664i%O*j_qQ&wkB$V; z!92ErZzvoFBbJIyS?ehR6Rq2qw%#L8T0fCAQ*NkmhqjWjUPza&-EEx0G?C4RqtjBM z@M~l>l=5p_B??1OjQ$Ag(zXbPN@eaRW=BN`jV>(C$B0-Y5%5P*>1!lRw9~$RCs5y@ zviWGJwUupet5{?=h)Bm$`u6Q%w+V!DMc^Mx3H_%)=y+t^WRHE2r1a;z zf`s!AMuiNhQeZnzAY_P)qs0P+h753Vi@r*sK38&uT@*QBdB*M%k>{u5@J^KJQPJCBlT8-nhj`~eo30=;4vW4KuZ39Kwml+IZF|L( z_VV@ZVwa$>+d2=zaL2H`V$){zz-pM681)N+qPk%GacyDaA)NRa-Rrc z)bHmu{4>EY>IKD^$e7~MGMTnvhb9-2FflTL#(gH;(`hGS>e!IDV}1JM+ADp>ujcdS%98ktVjFZ_XPdRC%1lkxwTQS z-7LEQ#IQTKy(ls}IKoRhwD`~pD-{U!w7JJ})Lx*2jYKPJ3CDuueFR+c80= zAsx=nA3Z91^PXY}hvF^r3q0j1d-UjKDPuz+Y)N7f4;2iZOU+D$MKW51(1?4-*jOPZ z@a0*SreGNU?{)8Sz2ym0{lRTnmalMVSqhp_HAy%K`KA*NPgl)2wW!(73o`L6+F8h3 zN!sf2yF>oSz}r^)oFFL}2)sVS*DtoOh!h4j(TEw8YU?iwKSpVZ@jnz3SX6h57zQ>o zcS2qPB1WnBZWfiMqnTG$sv4g=Jqdl$P(Fbwj9@zztw7nn3htd|I3 zs9}q2v7y5LvyH4HB1SaYGfD1+_Zd|2M4$>bb;(gYA;|DZfN)D$qAJq0n<-)_I43qC z)8+}&4h$h73cAKifa$vn_QSXDxsW&;(J31#jP>revgWq3rz0y6O>Je1+sd{@pPuDN zQnZ!Nwk##1VkXaxjxj>pL=4l%rA2R#uZj|Wkcs+viF$14;(x8i-WHu@mCBRX-xUr6 z!-osT(>o%B_QMv=v(McN*6veQqn4B}x2r|bs`%fX<2xdT(eK9+zs+IChL!xaA%Yr& zNF4DlA*x8rgX2ic(>)e*T__-|7Gf2|(m3MQ34~(bZkS`g71W4JXcXDRnndGr`?H8F zW$}FIH^w3C6Zb(1ohU$3*{)3Dex*DycN3=9ViO!{LW1$9BB%`MeKc#Iv1!RXg~h3< zwp*B{z(=dX+{TL@ofVIx-=A*n!VGFlW48TS;V`JkLsH+T?uS61cx#?!Ul*VtsmS7= zZn$7bJTNB9y>X8}zkH7DjZgG=G<2Wj2RO$zdVFELG?%mfmmdhlwar@FHglt0D;T;C zYbeLMc(e`}$bsD=NK5$O?zVv*2WPAC2rtnNQ}HCvo$Mip=POs9t*)_D!O+f(O!-il zECMSd+AN}(C=n&B%YL%&h<0wOHF+_}f9pK>qQmyeG70GZQ84T##krFB#za|(y)P#oS@gHh#@*s1lk}wp4c6~UocFEUbN-m9FupRX>=Tot;uk|R*4YW zg<14m+bm2gSk&-yYlJAWACju5yqs|;MMd5#-xmzKX0*$*;_P!j4LSJoG~pLC-61dI zEBV?Yg~ouOQ{aZC!0k9U#!3Z|eo>I9!9AS@H)fczr`xG}A7Pg^JoM*R)s8w!RMaaL$qKCc)0n6E3CrHt}z{ z-PV~)x7#Te_hgC2)zOCkR3s=>YuTO*rs!H0)?~r3B}9vTxlQu~CmPGnR94xlWP)?6 z7hrPKsyzWq2qW@#K#)1-#;-3)v%Mh#^MPJi#EgC06Og`JS6NzlSAC?Tt#RQnGll5CI-6LWgj%=a;6TmAeZ?joK;m3eI z_+l&cY`rlqR^s74y}H;G&sCU(8j@{x)Z+%pVDAy45$JDc+CG7>{DKK5P4f{E{dcgJ z4?;OZ@Rn#kM~zz(jZKl4ll68ySpfELqvymuc2Y#D7@r9*{=WirEsIO-$5IK0_G1nQ z*W|*)ou5~4S1L@V;?!8aea9mPj_qfu9uY5SS!*={p#&UH6x*vFnGxp-_J#n173ufe zn?e*mY-m4w%VU_aQ6=^lq0sBLvUP1`$UE<}i_;)d|65y*ZnZ7~J_O=j>&f?rqEX%$ zPAWGm%rB?3bz4sBLYp8MTGd}~aOp|R8ZWUYgjpXr+=fg%04J>YSD`leW(!i)f{yKJ@yv@54&Ly;}>#2oDO=DtagKJjB>5mz~iO_a#xPN6WAWA)Ll^Q^I= zJXtXQd1qqI6ft{IC`=Pxs_Tw59ouCec#6p-<5|l+CxjH#oR4a6WV>mAXiUF9=GUWV zyx#5*k#hreccy&9h93XJ%#FJDv>v(QNxnI(cYQ zZcN@??Mgw-u|C5{Zr&_RFKAaG22ik>XCp)kdy{M!b0{9?nPA;;@{Zf1$rNnq(ua;H znmkk=ZHgzS%q%IVEfl5+2iIpxqjh&OIzNwdW|J^^;SAuEM1aoYey(Mo9d|?w>+b_0 z%3=bL&sT6;jnITv%;BRN;3HX+0DwfO`?Rl#z@0;muGLS z9&YD6bz;Lx_c?Y|=7j=>=H}AP?~CB;p=IBA8zoHh5?iYhTMt;NAS0BU)lxUv9`i_q z2UpV42!__hQ2dM8Q=VF)0h4Tr5Ya^kD~fYsFm~5?_H&ix9M-K`kf|uhY;z=PnjyM2%c{+Zs{P!l ztx%LOgaaSJ0_$0Vj-*#Tijj13Qy z13Z)d+|zeVeYRt2jlCe~i-k8#!h}-GrAk}SOQY;Ncn^bR;d4geTQ!$O4X)}_9!MvTsttm+um0617?lBR2M3FYmBtPMPINp z#Mb-bNQRhy%k4F#2@O2IXv*1jEA8@JDD;Q*&F991_*F#585jwOTrINhB8FXbEXG;& z(sYYRp`4&wW%rB1_@-d%=s|@;%kuNEZ7@riI${NrVDv5(($7%yzD*z$#1#g$8LJV6 zmGI`YW97(>?y-HIq^)up{zW7%wqF$14(E{luhiM|4l|I#ZCzn+35FjMUN+%*Vtufj zqr+Lz;g2y95}nZ_zr;xXCNe+R!xi>V1!-qdsjR@=k_V!lTuctwZ31poJvbJXITEHc z#IVeR{Z2hS@#s{(zrB1#d&NTQuY6iA>oR#3%@tyh?i@YT9#fc3=qm&#zDf|0w{uIY za6@};RpYs)dOUcao!V#z6~!1~R*WAl4ioH8qA*6B4P$U6O6(mGC|Gam7v&QU)0LN& zo;Qil{-;G^LMGy|G&f#ipA`!bQZnv1J=cpvmQC@O|h=RREG)EVBxi= zr@n{df!!uR7c(PAfP<2Gar2XD>7F;RTd|;0noKZ&3!ptNOw)V!>19tUOe-d27e8W4 zJ)#5&$Sn#}$D|V35qMcB^l(CKe1Vwk5sA^ntdNxVjwuC`I`S^-@M6xC&$6o@hD@r1 zQwocaI{%>{)#b@e<1m5mUv^z&C3<6`X?kQ@6W@hcjOOpM$hQMO2?BF$gc36%NULBB zAsw+pC!nDQlMETxu_AP?Q4~xC>1D5!6iZbYEf*WN(@^|0c@Z1bf%z)#_% zD70oWAECb_V-a(d@v(?P>`3%~-u1F+Yi{geyS5-yH9C?-w;KhiidZZ@z@7e)2%+V} zB$G8fnE*2YT)rka0R}bZHMmA>5g{~=n|m|L<6Aw6{LMs;dh0}Bn%>-xyjF(*olR8l zN>o?a89~39XrFB6{{3Ik7@aI^(`4G!g#o4M$7B74=v&&soi;#W$}KF04{*CcX!(Tf zl5`s)Og-P@O568(%MheY))$|iTp_P@xo$&8%B|2d;KvSju1yuBq7H;I_j*)eCfWk& z)+!jfjLu}DVT&CUh0584W9cyCe(jmhEn8_HDGJlQ8A$YlDZ2OGp?0HE1UROp&VJw!UO>m5O*SxDV1E>1bL~zMd_JY8 z5#f-196pYI;@SHlqr@0idg^pGDcCo*DVdw=dcN%w4#UOM}kmk0&6zMO0?1trG8V=+-W`yG032<`#pwv~eQ%a$pTd z%VDZ$%>U>yV{L|qPnIpV{R&efwddL)fiS#nnJw7ilvI`p*i2~3XjHKfVglSTw z5m6gSuhhz!%Vg0hm~S7CYs6Fxy{Ze-hCAY~6&~guN#i`dB2L(GNX35|XXA9*+>sc%R;~*zQo6 zMh(8-Mk=T&Vy7>)Q9=~SQ`}-ynjR7iMUKftPaxf+8QcDNxX2Pz4`iOM&_VPruritP z4q3B&#WO|@%`5CLg6i1L8R4{OAA0V?iw>``8%i!ziyR)m2x00$f3V4J^T^xd^8J@( zBZcWe8a$O^8!bdH`yd=$;o-Aer`t@0Rm5T};%^Y3m-4u{eva*OC^4mSYA+43d|7w% z%zC>ZGsEVATP5a|*LoW$BE=QNFtnE@5Ec`>d=}GW5!Ag476BKCLX8+{M-I+52-3

(ZCDZLKg{dUsA=L2ba7+Z6(7NK_I_nS!g$&DNP`W<~ z4jY6Vd=+H!4Y^%3Etl#i^&uN1sMG2HW9vKsv#!ql-`c8eZEYP-?Y8cTCWIAjtxQFR z6mZnm+rAkjA=4}jR-3`zf^1|NR)CNM0s#WF4Oq1m0i~k1+Bb$U1Us#^RqOxr{hbG1 z@4f%l==nZpJ!ibeiQI>`}BG6|g2>4vDFdVQojoA$X@XPWzkt5KEeeN8G%ANJp4}+|Egj8g zFwOdS(&OVc*)J4!t<8pd>^?G_?kRSnqO^?29ObsiD&%xqC!%Vd#EEB}Z5E_pZYB86lP2!)<+A3#hyFf;>_Gq zo8s{!WfF9z00R@9zOhXzfWkO#19ot!R%fqy zNhBQAR*UH0drrvb>WS40872{EOh0z{6?SDMD7w#Jw;nkRKXaL(Yl0gTtEN_Owc9;B zVg$ay6^36P%g`sohHjw<(rpSybA2cq^+n-yUaTk$+m03S9gXoF3vG%hbj_>tc#v8s zK-ZtzTJ_G{CALhE8c%FlX)6?lQ=Eiw@4ZQgX&0%T*GJ;Nb=Ks>1Iu)5W`W@VYuiPn z$fLXFoUEH>tq$puvXV6WK#)?~=8fgz@8T*@_(=>xt-in>79mW95gaNf3&Jbk_!3(s zKr6YEmj+(-l;}vz!__idr>HPMC52;icpcj01)W^K;AG`8+wAbb@`FzuSZ=!n8Q^@J zd8V^Xd(JUT@;S=1-wM+Y@r{-CN+K?)`)%C=)~R~ylG969omw+VYtfp@(>2ZZs*=Jk zD|oeCT@COBPcAQH1iXuAe&i1h*(44o0g@!ez>yxIq^>qnIJD@UnUmjcsI($c=$d1D zDm&{Z+j!3a%gItjUmy(2#h6?DwI?O+UJkO)CEy^ofX@rUB~KK&_JV`h+e@d}ivkQz zPC+(9en=?%EcE8xVkbm^1F7+&F#fJXY8ixF)HyZ8h9Aus8Fs!xnA9{+AK4y)(NyRB z+lW6yW_Wx%r`5UK-q}5hWqRG`f(aSAo*O2jMq$Hz%TXAn`{)B&Bfzd2_2tOypET(* zZKkN9%MQ#!E~mv_5hcu~eXYF9dV-FsIUQANJF3>$A4Q>tRQ#8n@bIx^jrMm>Nz+bk zt(30!2cl3v_o{R3%i{vTksu={Kp81XU9FqKp~BRz*l2SY$MnmpY8xRK)}gW@o}p>t zk3@Cst^eIl#kip>+|UWPatC z@8d*0SOhrKlfB#s6$`_m^r~{QY=R&&shk_mn5_~DKZQuqik-7<_6&6Kg98WJLC?S% z$qUCTLX?i0VL{uPn7t|-)`?hI1_BzVM4&6_GxeKRP(lRfC6hm+?S73D_FkANTMjU@!bO$L5KO02W3`FHOsCme^u1wsY2Y+v3HNr>L;Byx4Y$0Bc#9 zc{qxBK`8Wa=4keMFA9elR?d0nupm89T*m$t@RpWKQCNwxhbXqh>+}vf6nfhRJ~flHOjX zisC=4ybL^jK9xvv`svnF4GV0sC=|>$UkX2~1Yx#dEIug}_7??to#pJrHRUUwfu0%O zzn}eH5Kf|-4({4f&|YA15#c15oWVh`{nK;BMyD0#jka%2ApyNfA@}yRoq&KUxiZxw5E1jrKj(X zjM{jGLyK^Y#odt}t8KDp3y=5+q*h~rx>ZDqj@LEmY`#a3!NEL1mR&Fv5Ll99NV`Fp z)@W;zX(>XX(}xz)1iRe{T3h*nY`5==5C$sptBc*oM4pF`)-1O)5$KYxy?R=;!r{2k zzO*WmT%X*&bZ7gr309*h*g|#U7MtUF+Nws_a)m<&-#5~>2+^3d%#yYioZ<+=e{Aen zWP*`-_Gm_96xTc-3kLRB44V-Fzib+m&?JuRs6bpT0_-Jo(Rs5+(meU}*=^%Rpte{B z$|%J)QIO)!C>=g!Gd;nHNp;89?XVSsVF}AA;GIe$G>1hCi*BS!&>+)A-<}SUTF11l zweKiQB~nyFrSfLMs1g~olQ-^LM4@YB1J05Jd@8SEH;YUTd${j;5-d_=*iZ)u%4gbr z0+il4v-)Vu3@Z_Y8{|fxlH_Rg3{>tJig@L=M;$~Ll23c?6KkG1*0{sYz~gIYoY>9s z=!MbSk~E#Vv3)9++MWgCAQXo7CEysmy+1EV|Kk}xm#wM5p1>g{hL(i}?f6HB)bJ@@e?)9mvz&PEMp-t`xxu(r)8rr2FVKJFM6!AFM( zVQO%~8O^t%%=33tZ|taf#@0JzSfnr6C>VYzJor9hTZQ5K1b5tjOvLfjv)YaeJLmHG zr(=%2Eh3|MWHai87tREPW)x#U@>0PtZ=%n3BS-x2i^i7dp#k<&1!>AC?zKy8fl%N) zUb{$UmX;^7V{ZZBP&OW0M56{S3CFN8d)JFlzuj$@%nFeR0Bq;Y>qbG9AH5$X3sLc# ztF5P}BNy+Nwq7vQ%<-z8YlG)K0Y>TbxYjuA@YP!U&@Hsr1SwZ8v2yGqkIY6ufjz=G zPZCLgum>uzD;z}nqCEw~PBG7FmgZ4uvT-#>AdJEwS&3UH95~9Th=f8Hi4qjN(O(HG zy*6IfqKGg9hO-wewl)z$r@M2~LVGKbNH&U!?IRJWAdoj+J9)?yhbY zq!~Q7jqUZZE(ejj z{hm;0Kc?U|TYq6{J%Zf?WJT^3WP?k7IsIWH9f_00M5ZO&wM^v3rO^sRpwNP>EL6-Y zh3G_VU1S!N7v`0_ZSX45X>ZoP^|nIcP%#f;G5f7B^9zgmF?-no$J$Burhr&tdEzeZ zEzc;iY5wfuxkQJa=Ub$^;7dhOKX3=4O(qA=t|CzeS0dUv-|3KMm9s4M7lf~Zy_;;5 z!tjw@hE1a!Psa<%WIn1Df*hI27O$;~R_cammS6-9S4WBsBUH>qP*x5{_I09#Ab}kC0Sc`~(C$7NoAo5!g=p4iZByN6Kkg=qq zt8TL+jta+=Fk{~of}y;eGW+!WkWIYezx6YMIsv*0E4|)JBvQ9;Uum}~9EJ%wucfFR z{UDKF0&jf;16zZ+Qz)?kBIpdH1D@(`~F5;)b^$7o^7Cz3;Lo6sAD7 zno@*B+g}8RR)#ihya27t#kzQw&G59e1qn=S3@;MFw;$^5iS%f-t@IMOs?M}m1?V=c zv+~esM;%6&8FTEUAe@|@*J__yK$v0JU+*eK{jkeuC71fJwkyko7w zn!-B{k+VU6!afoV<@UwGhy6#`=c}F@s?lWm;zDTjNk{eSj_NfWmFxyl=mCUTaoFx) z4AUSR2?-;Vc!zH_{vI^trZde z5VzwCTw=RCLrQWl`?ZIUjoV}U6sFt*((`V%!w#uNensCDq_0sk=ViV`ekuOuDX^xl ztIQ7^%IU+aYwYGlWC-0LHIhtBVOkH-n44T%U>W*uAsQO}h&Lb}agPY0CyNTp$J&pC zY1&vUDU4Pa?l6il@lwvE%^ySw zb&cSz-Z~QamFC}9+HqlZdQ)VA<IaTzzY|j&79u0F3T<#4s9(e%)oEpzE-<3 zk!F}25nV4FScnW*jJ2CYp?ST!cC%X)rhM6r$h59PVTjw-6;b32s*qk5=Ak9ho{-U9p$&lyW%xVLczbU;-PFS29mE8fwFpX4RwP@k`V_& zlys9vzB6UJB`F-ng{-S1VYr^6(JyFUmD&hTJRE~fMIJ7XjkU)W_Sq4uK+a2;0g(*| z*4S!6$~e&a@vT$ z^AT>z6x7cS4S9W<>|+r^wX(j;%-C($E`=7lc2q3A%AFp*&eBG6WrAsN=G@x=IG%C`vuq3qI}qB2|RnYoXf#-wbqzj|hx7++IwcP%45 zW0ibOFB{}x?N&+@W;XIt#F=HT5H(`*t#kY;`;DO3+rkOykU-#`!+#??%a=u=%yJF@ zogQ13md2qDwFmo)C}D+;H*7@8Bi_(pLzjmvr>~&1@=5fC6~s^@ zJ@nR+r|e6Dp+??5Gb|l|oBnk6ML?(@{i!JdwLFgD>g? zVfcJ*v1E}pIKnW!%pG~YTQJpeSYf8uElE9ef!xzNMWFr2fa%@v&w`$%Kdg5ih8xtV8#-;3Zs%q1*XRrZZqD0F`By9e5}3WwN8VWQq{_t+tr zIQf~vbbmev^$aT&3Pq2=r9+X`IDw!0c9c|h*>=x+dcrLGmBOLq0-n9FW&eUGbSZ>zK=4G8I6os)o z$vCffDCthIB}g;MC0uHM@koZcUl~7T!N4cC6!Um(_}CM0FIU5i{?uv`c#oDLJ#OcV z5C$ndo7c8dZVBvi5#hM3bZ?E_DnL_CR!y`Ng+uw80!Y55iUK=D#ci#lte;T$!Eo?c zV)qE^ZC<8}YUHUpGFvC5;U6iAR<=)SXrDCI$^@yUSC1YxMqxfbG-`xv8U042z<2NL ze8%s2&n_7i?3f&MNR|PdwEy8SUN@H;YWt@kXXK9R7H;7>svA3MuyQ-ezO*LL$#r-m zK#74ShJcuiyJ&sUg3`;k?+Z9}o>$ZCu*jM0#o7eMnh+Who;NZLx6}UWkfQEJ0pk4xOyVT>?X?uf zr|yTttVTgi!Ipy)IDyR+gflEkG@Uso@{618sZLIOva@liEp)iIrWhwUc1TdOG4i_@ zZ<=rKdx9fd&|GY^|2X7GJe9BM1?xypS0wkk!7fvnMyAV$b}x^}DJiqt6o#>rODc~w zR@*>9?Jkx^HiWobvLAU0+Bm#0-D(6WNrp6Sr6*@XE#xeV12=0DiCX*KHPQ|#tltB& zAm}yXL@t47l>NQa)8X5f@>k!bS>zQgFD6BD`%T2vbauFfq#YbFz z5t7e!B%yFDmLK2vl&x~e13Wh|woWi?ETYrHS}fL{vp(d=)RTUR4RVOUS{ApJ!v*0b zGC5f!Ky}AvG##(qaqQ_mHbE5iEmjX_DoiWUh^r{H1)lN9=GS&xtw-gtuAx;g1dsHU zw3Rn`Bp;Hb?zYYI!A`cd3Q+vsMqFnq%qkFh-a)4(y0|$aBJ&h6)5y5yo1UQzLAOrN z5Z#5!Y$~=c!u=hj>rKZL3&T`>KFvHRt z^NQKlMM$jn=w(SBv$qNn0{ejwHPZaSY~A|_eqyS~Zt71xgN}mPHri1Ul4K=T;fU+% zic*^<#83WBE{f|zxd$g6e3C1SYC+${Y-6{J=p)o|qUzT)h9CG8&k?rvHr3(r8RL#E z!8wp1Ef~Y0VUb611CHRgt#l~qD&!{`1YzRpuGXp`nDg7}vfa@OS^%bH}-)k!^RY5xQ(BkMj z-A9n>Bl{gc@{IByC#k_jmLm#7MAzT@sLdAgJqeaew^hk<>3UBxGP5|p#5M?pv3iI@ z`0IkY``^KieMVTZSYc;cry}U9Av{)}@Dg~F%`LHi358jbr?c{ZgefudaXiV+Z2-bi z=A&#M?ff$cn=~bMnNXN=n5<;aYFCO93M=;q}&x%Bf zS65T|O8bFeC<1%DQpvef6b3mR*VZ=BQ>$X`tOp6wpE59)YePH|-iKjq`5{5NkGpE= zD_cxBG+xdH3h?ujD-yrPD{s3J0|rS+SE310BU<2~L?Sgg_M{i2a*GR#Y@VkGRwCjp zD{bW`QmCp3Qx&sR?&8n`w>3&aJ1OL<4!V&YEr) zHWA4vADh3@zUkqCLwegaXXsNJLzuzl(_F)5Pihn6Eky~av9OdUnS6WD(|1ehY2_Xj zU5?GQ$qw5qC$v}A+g!nL|4vaYy0VhMzxI)cIc!vHm}#+sJow$@xg&E)&Gs9IECD(s zz9|^G6T^L{W-hc7qJ*C%7UkX*qIt)gtL-12l|H%)&4CXD89=OimY}9-7i|X9XF1O; z&at}$V8dl?dfFfn!mnjGSE2S(;n3m9Hz(U;4&(J(kj=Llf>itcA3tn!6=s2XB+V8o z7z#;F#-znkVYts6Q?Pd-{a9eDMPX>!DCjy5dr5Z069sQtTC)Bi3Ux;|@wV9;4#yVq zvn{Yc3uuk@!qmYkaEo;pQ0rM|3+?-!k79B$T8f1$ z5kfshn2xgtJa17BMwQYXjOBx7MTwNXC73ut7>1)g^fsI8X;Egww73A=oSwhHe&Z4C zI~OvWJQMYw-dSs%f^=rQX%ok4ZPZVD&fZul`r_6Q8@Zvwmhmhr5`{X+K9-K1AxJg7 zd-t`C3WsM{ectrMB^eOg>4lI*Z%FJWpA|*(rCw`2_NC9?76>HCYpcb0!B<3}n$rs- zQ@}R}hS_>#13U5wHYky!Z>$(`nx8s>ZwOxxHbP}0(688K<7ohUu)=}!!K8mtFciuC z6<)9HxG4N`$3AK&5^39)J!5|r4EwA|qhZhCEp|f-G+N#RvEi-?lk?#lte1k+;;sw( z3NrS?Vlv7!P>5;4Yjn)+7YGbs@`JCFJ?MGqh3?qh$ag4bCfHP%DHy&u=vI|hup?S4 zQm_GzEzT8(xA3>=@CweJ&)9Co!CwZ-052&_y_kh8vO|e-uwW*Gi1vy|Fdc7rM#@>o z1;cL;qx-VV&gSMrPlIS22G#8&!O$h)*<@_KU9deA1OG*3Y4#;SMj6RH>_~EJ_B9d0 zOdrBW)9x3h2Rdq&`a${$Q9>KhH$z;n)Fz7r?|J%o&JuvplT|IprYyB*9kKz}x324s zkhFa7oMIa!KvI~IA+QGp8HkZchF926g|Tcv$5l36ke(S8+3=bnM9=6_hWGQ$wpfHv`2AX1S9t>X7?WAdHN+Yb=p4z| zNt$SrZ4rg~N-DUd&$4DOp|m0kLjXDUYhhR~uE@263R8JbJ|Zplx@V;92QXK2N-+FF zv~|iXvp;(R&UCHF27MqHc*~D*C;p)@CCkmh1+9d`__Ld~ZwfM=hj$-dW!(kzMMN#A zVNIl!hzkS}Lv4&j4P!Y;6pB0Ay37_UOmTWYMErG`MlLOAm=cg(8Bf?gX>t2x zv>625!7EP$x=~>`!)G+EII$sn)~h35X=|)qM25a!4$4*h^4ZlM7YRNdK+Ny7hxb;M zT9;jcKBgkidMZr&9?C}Q{4N2Q#X~V~iZ<9GA_fvVcv4W+vYFquFzn}Tj)>H8Y}*QZ zN@30YwpAQs1eg%)JEyRL+iFdo5+5ekqHyT55m<>&>+^Os%VAU zY9~Yry9zDV*xhSHxGw{Z8tSdz|29b2!Z8z?Sy#kMEk8M)LU9!xNo5y@C?+OpCg&WP7%T+ zyH6Lbr#v~b)1cErVVd1>#~t>5A~#nP*em^51U?N&M}BrsNGfS8Q~)m&_YC)txJj5f%Vq%x+ zl=;a5#>cnR#<%RT3Ma<5JQG>r<4R?^&G3x$%n+{re&;aqps?0n5`?K-)}IcKYM+K% zvGw*J!O%?zP))G&ewDy$|9h@oFB~0D*;Z6DizYH#*VzpshmOKGJ^R0YBKY=4id@@R zxb;x@!$f&3s=MvS!Ze?Y@10m4suHB{N=nPiY?g!Gqj>|W7htBIe0nzjoR~h@8bzV2 z?m_#yz_tld(9vCcWOwke!-9%3u72!w!O$!$dX)qm`0W$ZHvBD5A8o-l+UI{wdg3EN z5FN`e5FwgNd;z~Kz(BI8lcDV3z3*!x`cJ8Tyu4vvBTD#wM-SlKMBFM8Tz1sV(TDR6 zK?XvC#?w8DABu$U=Q(5q@8Q5!?3AJ%pIJhhh(mY-VuSGXj@T=A=JMJ!74I# zg?4UR0Ns0Wt^E}Ns_Q>=gk7#MoKSuRH@H^_h7KBmmaJt9haC+<6IhAnd47y%k?=E8 zSQUblc6#Rqo2D?0=VDUcFEZ0@fheKC-g#`-mw4vW%a_QY&sITL&drO_qBf6oD?QJ9 zBvxZE#{Z@e?J1VAyuS#9Vo|l;Z0GF-(-(Zz@-jFfnK^-WaiZYD;C2y0L$i?>c}UQQ zscCzB^AxL0KYJ9>spWyp%v>&N ze=i8nS#qlO>3sm^0eUDr1eNN%eThiKXJ_V@*);-zB|MsJLh|QYkM5>X`=J0`#c7Wh z&Ipk>S|#hXL_VzeFwKJ6cBy5H#L&bZ&9u>;{nk5fx#bpnTuAHe&dT_XrSToxBKPPU zty=W(gFpq2&E*Uc^j&PO3JXnq%O+bOV(9(-3vqh3Oqf<5-Tm~*<*oLX!_Mt{I=3yg zcLhVcFb9;IIo942#jjFZBZ(=rwgZ(?Ik0kitgsuOB^3?gjis3_!USgcog#&bOA66+ zTJK31<$)X)kh-vci4p@RvCL*F>>S82 z42B*=Q3vkWa1y`m+XTZXj2hjxHP7~n5V+tiY`eYS>EXB^cXqD|hF@tK>Pr~l;GxUQ z;O8G*7}&Ybg*tLEMaUbU3Bq2m8*-x%jGvk`&c5&A&Zin}kizr@VvBohhyYw*?g2Hk zavSFfyCmOiixmz(h>n_h9X0DZYBqP&Otu!$sYWIr3v9QLmaO24vBma_;DV^Kwua@4$O)w&#-Bn)*fdlwfGo0nG7Oizgk#gUwD4-_kS5S{2qbL`}@z z^N18g$39e8pPjRJPe|FE^8&=cOQhE`Ay_n#9>cd3ML~z3LCo!P0lI`m2;+-(wIF5D zL-8$5@#d+R@3k964DU&8OU~HyM*q&bDT3icTR@J?mRQV7(x^ zPzCR~d3Iw0=N58v{6GS8?r*YD!jvv^!n~|hJ3Kb6>iEu$wn#8k7JbAUtkn}_qKKB^gv1KCWXUvymx4S z>nBK4q@FHgpD%h+1n2D~kE32~ne7ANGsBA)=w*=-{&fCr=e|f(SS;AX(Cc3mAygCJ zFxM^;^da6lHE8wPC7wp_m`F9H3I@*ek$}p<_OtaBjlM~^^?pDUO(Yzg*v|yQ4?f;7 zK3I$3->yjj3n1)Nh!EHrirvd!2!<{{uu|9F_M#{hi;CSWJ0uXscYyShU-8_d z%2;qS?RAIrOBROk-%q4QRXxQn+#ib4x4nrVcQQ(_n^{$tu*1#O_7=s3(o6MBGRU(=av{4cCP*}z zJ&}jTERUuUNw=h0o*?b#y=Ah^7x3%Ow*6AHFa?-( zJn_AdC}mio)Qjw8QK+P}tu@#F=m2L*Xk))b44+wUJ|Zu!>|$;fDGZrRGd<)mvPjTi zQw1q%RE(o{tAIM07vuvTk(1A-`H+BCSBw(L&pcXmm4O)j8oJvbm4Hf4Y_98^JJZ?) zX+!UPJ}(;Tw>@66HV5b1mk)$^_f*`|E){{Y@c$RW`pQ(4@Z(8R3{`iwR(7tM*EwTC z=gcPiz9;XBpQAezrUg7R$Lyy9n&cB=*t3yiu?BN zXPeK))(72xd)_m%SH^Ud0DNgbTx{X21K`CpwHAZP{^jXXVlDlkAWv^bIyQ><=EUC>~=+J>08% zS39XNz02>YZy!75Xmptla_-HA2h>>3Z3aq~Y=VM@>?j|~;zdnXB@gERGx z#Z;bY!_VYxUTP01OtA+RM|W$*o;_WDw95qo%ew8_c-lTpdihdVZ4=Ay{iTJ*9d{}JHilSq{_R8p~bz5XTW1}4r9lp?&JhoxCz2U|5 zt|&&4`-Df*?ZfbQ_fJ9ET-vs=+&&WUkIm`{I8lwSZVg0A<7nT^<>_Z%Aq|~(eAPVr zyuyqD&ljjVa1-?f5#X5i(4rI}az^Iqmsqi&|48KTdqQNnh^^vktMh_%K4W{a$99Uq zvX;inCXLto|C=IF@rXeK?YP2ljX4tRcMF6cW4n>t=^w7Q~0LOGrKJQkS3zG8|JPkzRrdNp&s!B?|t()B_9DWfJhGS1Bt)ng0 zLv;N#>z2gpnss;km=}d0x$NuivbiGARrseuG(=*tWxcLyx`FNg%IQR(Pi*?tlD=?{L$-cpdl_%YZCPuBg?523iQAWNvx^j_n{Q3(ajShxfEu`O=MYP0T`d9|&FH>!eRSuto*TUtcC8{p zk$kZ5V=GLNo%Q3bmzN@Y3Nln}eMQhDk(1IU8|G!n#c>EbDCMye!KRpn<8WNj6sDz^9DJm*m?wk3gZG68VI`PQZU+4XdMj z`esWKQPZV%S@gACX5E~EdphhGbyQc`gQ8GB2L5ueewrs3_|4UaZk;gG38@h#b+adq zH!b5P-hM3zfA1`)v^NwE69w4?<2_k?)6{rypZl)dRnc* zp)JFaySGikzIM7pDQilkIWUj4bie1{J15=#k%&t#WCLff2d4NHBW=AFovZ<(L`H@% zEQFCVLtJo6uSCOg=suBFxYCRp5bj6Lx z8bNg{LJuqgyKSS#M|CvY?;LU!GL_HB0YQeVckg>gjIqCX=4h%-;llQ>PS83OE9K(i z?;?a*#nC$6h)&*rL!r|!GJ;Wm6qbZxpb(i8*7*Xv!4t3@V`MWd)iGrj;x5Yu3bTlS zF=N0Mr__dtMBB?obA+g{p9zLaG0vzjQkE!ZKE(M&NVjBKc_NiAh*)38Bq->J@M>;j zMS(f$%FE}q1EVL&?r-tKmR@-ESLyw^EfqsJZeFF;9{Xi{yfNJf+S!M1Mk+Hcq(!NW#QsL0m zeGyue`g64>q4GY(Wmz`QlVoD~0tHwG%~p#8+v~9?fXM~{ zLL~c~(q1z+vY4^NUJykMZmNzq)!IQ%K%JPRm5lr=qR>BFR`0_Rm=MMHNa=2W^R)48 zbK+ZD^jdz?o1rjwcHc#iAw?f)A^%@s14Qs^!4-NaV9!%+cY7`*skDV0=CE_g+Rh#8 zE#C`)qhW#1F1$sUX~*MMtKE7gGCj5z zXZJQhn9hZ{LD)1LE=akY>Fh_IlqhVHjJC&xDW|M}Q|~O#hgl2E^x0Dm0ogQZu3-43 zjO9aAY`+tMHsZ~tZ5dC`FFD51HLpoo_PQ`-k1fPY(FuVtKO@)kbL=0Yz*Z7gQU9rM z*sYxT(r~Vu80em>c6=P~tLMf^8QNA_Hr9Fxp2>*24IcG4+tk|+Jeiz$BW*ZZ(96LS zK>Z#o5e4QVZH;9DG)*oBbpdM!MPRV^?T{ec5h3d2!WggU_6JW*ANDWE<+JmK2(;ww zT{ZT;!l5R19NN=g+zy4B3JVbByHr5mAXzwC!5a7X&lcLvqKBcAho>~_B?5Is9_JhF zcEP~(>6PP8ty*CDqEPDb?HFYA#B30?pPT29=I3&Qv`$bvKGtDQYuey#5s60X?KN^e zu!l+dyom51xkNo|*b5@iYRb9odV5VMu#kpl5>A4D6Gi<{yPjjK{YwyzdUDRb>@C8S zR^|AL$%>#1myA0G!S` zlx|Oo5`M-w*C`DcCWbOA7AY>&i5>W}Fp}O{Zfg=bv)eXeYfG3uX|F-DYj$v&Y`aAa zjn2p!H435e{i0|ppdVap?789#=Y%%BC)kf`?KdMk;u{e0`zwg`jXty+K0k) z2Yy7@J$>mYC=6LiPF8{3E?g)Q+#Z^P!0tBdBFMx*5(YOrJiDigKmn}TT<1I{q+g%* zgfhd~Q8mpbcoDo8bH=lUUId?oAThmG1fQ9xfM7ni-gCq-oX2AEE5R^H@NUbsf=5R? zyxI1#AUq+U!@U@LnM*nYfpKb%elORH5cV!T*sgkdS>M+_Een{#>=nWV-C4S-@qw&fgl4Uzrujd`}tId4F@iLegR>_U??Si2RX~lcj+dyFkk9&PYQXlq=$9RDrr!b?w z_i4WP^8{!qRdh>9viWCnVKXP+)(H8c(6luYl0pHr(sqj;x^!SZs=jzP^u{_{~(=dmi`dY_?x>>_38`QFzYbt-j}pkeC~kLQ56Uv{vV|SU-;|8OveuWq~up z8@ZxGF5uFWA@|`)hi6+KfiPt_`(ObHnW=%I(V=Cd3#~w5IwgkFmr|Q5!~%kZLF7=3 z$?~K~p*#AF>}!hz1FuD8h1~gW6oJA{E^M(a3e(7p0-kJs;}OSq?y+MEYxW?>hQh!{ zf}ywKO>^U$C)pR?_5{-;2WVe($i{OqXTVDYsg&~>D!T`(>}HQoD`P2$S(0E__i$5? zmx(d8UY_FUt|gciw!VTi69?j~llKY1a~6iUCV0%g^}29c;W0gXbhXV2N3|Zp^)6k1 zNCd5p)!RC1Rz-W09d=4Y+RV{?Nngn8J))F$zIPr>YB$;G~h z-7E@y6D=_-tc##O%(~mzWt_UTwZZmtU&X?CE(gmY3d5gt0vWElK%n-Hs<|E2*u&jq z!;}P$^>g9kk*iB?>;;0{7^zT8VBIUgAA=bjRQ#&P|TXNv@AN~{R^ z1Rk>Mh2aGSd23c!13aG*+G1M;>2A&fC4!jtRk>=lpUt&3Z_Bc4six^4694Z*-} ze9MfuWVEK++aiV;99>s!V!L|Y>5vdHz-ULMqq}`cMEDtgS0B4r;V@0nrf2>7h6r$^ zT}!52B}BW>vt41=d(^Gm6ZWzn3x!pqgkQ3yds6)U=2?LN&6E{c4gfVC*I)K)XA4mM z(Ut|az|(QM<4a&m93ES+sdLt9`;EgBtEb92$-9Cuvv)n;jH}-Pgr5f1JT@YTcuj&5 zol&=0S5ZRahUH+E#eN_H{T5$S8DG6RzGfm%D%M{__(|!})9&(c*Iubsps@QDs-GCI zUmMM?V0&*@??%4q z*vi@!PRQg=3Z2pdOY-EewKA69cW`$e?<4~qaP}&>Qd^W*a>z zTaHru)SpR8W4oegeUXPx&X{S}C>$8i$|-qgixmZAGHe zRd|ao&5<>X5+@WEVaYU|E#L+xu!7G=V&O^KECOH7`05Su)oUY-`8Cp*?-V`y**tB3 zQW*Ae!c=De5eOZbPLcMpu>Ue9MF+_OUbBBp%dcPYJ@D#JT5r5tu%&i4=q@QAb}SB2y%K_Om5I zfkg>(H%l7OgSC?m>gS;u&wa;W2wxVTLf4&fE8U0<=+T z9kR*tg};O0zZb$@Ur{&=?-0J?_H|)B+U?;BFS$-VYRuQks;6s8;ad6rnWcLU%jQ(>tB z`VH_A;0w^t;r>>ndpFoKXJRGkHQ&~GPE04}*;bFEdj@ltx69)Y^+0w^0A5ZopWjHN z8_|1+bqdlFj3e<0EVN^eDJDZp>UsYN@!*0A4$EH=hQZ^_vyQEqdKMznGaaiQUsWs( z%a$m|6`mBw(bz$>WI=u&dZ$A=V{5ccjEEFR@dG%2^-vlZy{2mqEAZ^BJ!_F}nIOn` zl$51u`_FP-=M?h3-MsPCrdE5_DUlw=T6{#=VNt@sU@;seA%r7Dg2$I8yfoeHy#VqH zc@zGY00Z17&#qPwMpAmE*mVlSJxM{KsVcLo?QK6Lx!0p?co8N92XA=Yi zFIWYo!kM1l@WTX{-p$Kr%UNfKjE4##Dhn#pWv#$%na6u*plDeM(IVE}4!xNsGT6=2U7E2YR zrQ9Fn^Y~*w5cWmCZoI7EuCKC(MWl(CaH>1ff_0K|5g1G4M+?W;(~j6kabT$xf-$Y- zhZfsz!O;B}XGMNhICOtBvd`G@1V#6w{3!n-ii*$Nx1!AcAw)MMv28r>Um-kt-+H@6 zVJf1-2B8i0JwcefB^fENq~vb)6OZT1yaqwpbV0i0mLGS0*s2nFQ@Y}PSTHczC$ls= zU>LwBBBzBliiKsYW}H}Csl{}sDD)C1nn>1XMMw2ydsak#o3h_>WX$IyQI9 z@mUk>9*0N)vRkuE!7xelF`XMY)1vSjy2vSnyXJwrHtV9%g8p+2DJF4hVb2>IR_yl|Q{;W4VuYdIe z;z^ud#(H^!0FBBVC4&X!)=e0miptPJwBABtz>`yNvzX_^!Xh>z1)|Qv@Z}O|;^vCt zQhK{36%~DVBVUVFsyNCW9?L7rwy~a30-g5o_Qek&Y0J5#C6Y644F3*v_`x{>ug141747WY zwC>oloz`EFzCVq_&6r2QDoQR^B}l7*cvRJe++0t zu_ev!9d9J066M;%jd{}eqmkmX5&PT%;?Sqh?CW1;jC z1=c&NQ9a#k4+>I|JkDaVS|~IGp}C&+*x6Y2H8xF1?Lcu#S`F2<%+sg0tw5p$qcy^T zG0rrS_0>*rtHnj9BU8Kgu#*bI06PJ7&M86q6>a}2`;(``=a9YY=}yj@+F8B9F8r8u zFdDWt%us&q9 z;pI*@-Jkx{y-{0Aa5aG$aUro#vs%BzYXzx}9e5eSikbFb9?!HLjw6zubZeH8gEZgqU6g-R<%zX}zH0e>%jtGXy6tA7B zH^#aztd0^ns5%vp#(AZa6 ztVSs8D(@}kN(AM|St5nsuO8acZH)+=h7i_7<~u0TCv6T-v#-u^1-g>J|1HkY&PB(F|g8y9?P*yJ_8}_ zFC)j`NO<%1kuNG;lRkD|bsY6$L3%3QQg71~h6#?&(`=!qqYcSD zyV(*!`ldWm4nE|lghMgHU_$f)D!FTQCCwX#Y$^O(6(!9rDQbrL*m2g#*JGvK;+( z0&`zkXs3m#ihR7;edk<||FTxk*ComgqQGX6j+3bZw63(Q0Fhi9kx0#h+bq246o?Ww zLeaG5u^9{Gnn99ooabc~N{3*pm&ITW;dN=3!x8Yoa@{5vX7WIZA@OP1CsJr;T3H#Z z)3YM*6ML|qy`mu9(X~giy`iv%K=Lh%?4+Q7k)*l5+hxt(6P<>kF0t6oJul>mM1rt4 zCg@W=wOd!aN0?4xjpj`tO9)1g+LHIz2AhybjuquHn=Bl9F)0;4TgkRmlxPbPJw+)~ zlaqA2CizT0IISXufgZsEo2CbdwnSM6mXmV3{X-N@oVHb2_D=yCOD`fqWFHH{T68xi z<<{=!hs4-qFR6=3QUI*J%u!CDAM#*IE?NHZd&r} zXCnAil=*_~mgAXe>%hKN*@fj+gwH{{C@_6r@Ba3o!eN&Bj?%9Be26~U+G@8e z3{&Zam=NeE5W0I9htGUZS1>Bi9`~sD+F9|nE%s}N*~|c4?>L0-iCk(QaLE1BbdC!j z3o@pAt8#g5J?9G`pLr?CNi2fj7DdA<>vp>K>h(X8^z4>u-xCGS5c#j+S32Mm2~v~0 z^+>TFiW2(s@XE;SN+W7#u}`E+>6X;3mt}mClAMf%j-P*$(yb@_JRu5|vB#3Fy^Vr& zQI}*sI`yASjfIcy*5nkq@RZQ&`m+?BxAi!`*0y^QC^P5S3j*QS-oA8RfE)?4vWH&rqCI0Dhz)tXv^^v@V+438_Uh(3;%&?%g$s4&PJh%wO z$3l_9gv3aBiIob66(Ld#!s?SP5-m)mA*ib?7G%idTh~Os9oN_&*tQj%X@28b zj_#a-El&IN7tfZ;w2 z^*yrg{P-Okk`~M-mU*kgXM!vE>38 z5xOHnp<5q7j_Hqr>Q?jsHP0ICZBfDs#jX`A_3wHPNsI9=aPgOjho9rmRM~eG4*ay& zY|!5M3Q>X~9X4?sA|<8Y%D(R=MNn_&Q=9afpW>yQp4wsqJv;b8%N=MsBDG!3=JkPnenix9fL zFu%BM75jpS1e2&z@e*hgJqu$urlg1s`WzAHN{(@yTowz_6FqM3ZuN$vlXTg)M5H(j z8nyBRx=N6$Q*rr`qHq|n+`??GtnT*$5Iu^-kfwK3Pqj=DnWuYeN(<$4Ax}6AU3|+1 zDOUVK6dIRPQGwz7agJha1#)eQ5bVM5Aju zY8E2Ptg)ZxnfSg8*IpG-KZAMv5H{0)pS=hI!_Lr-D7ViGktg1=AfY4h z6%px$$W~;`Mmm&GBlj8i3x?lOQVL%S+~1^&q?xdGNqlW9{bISIFet+ZjeVB)XV>KaK}zo0J%vZ>7eA3Ixoc`__wIJU zOO}LqrX3Ory5XGc#d}6DFYtRsFb~JX@^bAR0oqD;3_|DkT|s9jxobDfCEMpNh7_hf z$FWMgKse%G2aPXzELB8W)Za;@9@=|EZ@XHU^4Wtlvn+n!GZ*0z7sX|dGlq!{u0CPY+kU%Vd$dWYVa=lOL*(qmJdv4B#0$20 zGFXpq9gob9{aO^-+CI6aebRJ$&LIi<@zU{DB0U?uI!x&PMU>DPdI+@l6Dg3{)eGaL zmxdB0;DNB})q-b#kI5;x%bw^VuvEk_Gh&%p zHr_L^Z!5ML9+xZsW?0lsBrU;>$5O$tirtjbD=9TOsasF0``^T#J-hy2Vk)nPQA&mg z@88y>T%qx~F$5$c92&xGl+(NIo}w?Z$yn94-wA6HPMl~jDohiyP&vbh=}|!jl}iq( zij~^A2H_Koc9oFAB_fiF0YUzWRNoSTQgkDTMIt_SXS#P}W_or3GEp&4fzXZ&fR8$x zfvd<8%MlDbm*gTao@skU302@QiuwGU=jeQT(XnY&_JSZkm4}Ad%bqz1<4Udls3-}h zyQie~ND9QzPeZUQZ+};!fX86?(moKTSK{^Ct;<(K0s6vh+3dH{cRP_*Ys|W*bVp}= zh=^ed6tfe~6ATmUj7iTjkwV)JWB2lKvrYV;1*IgX^yqF|y`WcT{eIi4YwSOQaFf4p zlYQp05YEJ;%H;~f^*ua0>$d4?VJ0b8Plc8u;N4ip({!2*@Mud+A5o*V4`M01VJJq8KVRAdiy|W7-qI^J0)_!`h4ftAaQ#Z zX{c=OsNNH8ho`&E(B6s-T;DB)h(W@PROBV3D!#hO9uOr=q@v6*mg5PSY#iaHv#1q& z0tBl3}ZaL)%#cgWOkLA`KIu#GVzVFS>WZ9`|bw5HZNHe+h(dVS8m` zd*#md%DM8@dj8j;GX=VFRe7Dlp#}_@hGSidXmIVeV`73-kfF_Zp+M+e$!u=fBfEOV zBKp$7vv{G6^%C&1%jVRo1*!1l_B}Q`krrDre2M3Z5axG$iwy1Af=^H+7Q5S)Bq)7l zj-%LChystJ+G_Li&_WOl9ap47%x{IoGuN+n!qZ9!f@9&;-yjZddL$)Vs={ud#88 zhL_^DI@}Zsgs#G6Co<>1^2`U8AA0h@a{Fx}zC5QGt&dkl2-S1bEHBCbEegE-5W~^m z48ezn+Ib36%h;UJF-sMoS<-5cyhS2w5U;DYVWJ1tl2UH#YWc$2Ph8V0=^C3Z6skJV zc&L%pV`UdF(GwV+d%L01{^YQuW|IaWpKn32 zUG=R{cWG=iN}tyV1(RW>5;GE79a zHu08?@s`F&FX*W#u*mLH6uceXG4)u@c>Ad!v!J-}5gYAM9|qTVPozXerwcNmT#2J`Pan+_!Ou61I=gUk*(n;86>$&pD-WNZ`;F3AXz4Py~{6RAc{z zTg7_2QIt@~5Y9e!dm_sao~z64P7&ba*g|EoA2~#jy79Y_shYDG$JjhuCCq8;)deUhESE_x{?O*IUn+uTaMQsR zJ7?$5T^~-0F!SwM<<;WLHfNMQ(ZPdfU%FaEQ*+*?uqsz&j6!%7PW^2 zeM`&Eq-(eCRv=R7&4C5wTI)7?p2&P!u{8-YD{e*lX0zvqo#tS)!`|@J?C`5GCVEoFP8@-H=w>!8F98t*ehV3i5=Nt}XKRF6`}#g$hZLl5 z_4dw>_B&SqsS73fQjSx0t1x|yb%aby79g#ZMEQsf_PCr39x8@7cm!i)He4WZT#~~x z_yeAi@fbqdRw!Nyx9`(3fzIrxEAT*Hk*;2$T zCyPQ?4U1JoO`GZXuC`Xp-r#LT82(dns=7hpuqKjMj-3|TbDo~sathM(?C-*GhKRX@ zFF)|?Ie4@DNa0X(Q?K`miz>_vq zAap)k2&TooB7}0eN|2VvNKX=79TsqupvV&t7;G>u`Z&(hURc#u4M`aHn3vJAuQVo5}uWs zr{#-4o4a*Su_qFl`{JN*t04UuE8-GlhXDP7m&`GvvG~yHX|vl_>5}z1kEUMUkW1|M z!f@tBNUHJGo=8z3WzkPv6H4bO6SH##;G(TH%`R3Lo_V^~J^e1bR0P^|WbJF?czO7` zVEB&SRaRPp^>Kc?*NMnlk@bjmQ&61}>ATBb;{Y!JQSUMqvyp=Qg!b=zX`0M%qF-F; zd0<}~;r%T(`%EcPYvA&;!PbZpW(41aTH7oPTe_}<%WZ;`$_D~n3d~bYwqJURvf`XV zuKJ(z0`|_qf;D^`5N1l{M~)+27oyqd4d_IkX@3^sj6pDNMfJcVFF95Au4gQcWub}o zPa*m)C8?MF%VTmf^t1ZHwZy<7*P%R_mixYqB9Kl{Lt?9_TkM1&KN8lg zg?JWSz$nSS#bwt)rgDzRs15K+ks<;WNacnnJ=EcPh@vj!IUrSG-_q?kAn&TpHb68g z>Zq#isG4r~2(s}E8xHK~M&dz{&zd2M3VqBL3SD*^=6N~Oq4{7rf^eg)A_vc1mL~#z zJXSXJ3kAa5?vsh*14KQttlYDp+sz@6_n_&XB_}hh&}KS_+;24rQ2kjsSlcZMO*_4^ z(%$uOeEkOdhr%>Trxv+JxjsNlhs=3(sa+sQu5PKR_8pIsI5omv-2~xx-==-L?H-TQ z1t$g~9u{Oq=j5mH*7Ud#h4ex>ty1Ce<4I0U>0wJeB^Ech_E_$ST@CYeg^(sdLv3VO zXS&si0<-))@4~nA_A?2ZTB3{QT6<0un9611+aZBa*Wj{rjOPo(RdHr621WiV6y_pb z#>)1o8z4~lLxpLv^s#o4LzW)>)~*$#WEP(ZcAbN~931%F;0a;soh4hWGy(T5FSIg2 zD#bJ&R>mK5l#6rvbps1|dM6;>@A=D=_SJ!d7N`{1C_77A+yU^LEZ z1!w@@2E==xO(ZRq%ktMfX-+wl?WBWp+(p~JJzw8qb?7}ed69_k*aHGo|I)Bj__*9Dn8+s0UXY=S8MaHkQLB3`rAiYk*i zwRO=;EBX#Rfwx3rqL}qSWOS|7d09-p$h3K*H6|$W)l2wh*=|u_Ilc{ddOPh^!O+8~ z8sY-&awhm%D6wlK^5efL_ng7iBFr4?4{ zd3qzdT&}Qwb&(^-$2`ZIQ|;#p!bkM%fWHizDMBbcOTwG)35Stx+nQmY?E<8Rdq&(l z)Xo(k4l@ZzquS>LVL0Bh+`i%A5%&$XZVFR*X<2Dmk@XXz1dLPV*bq;4dg?|iQJB)` zcG-xU?T}6#PPRD-h>2Tk5)5pP&{pcOa2WDP`$(Hg`?u$b%s8yIj}v(~s&2F|-a;Pu zO-}7;-&Po|isWCryFmDE6crZHXLg%MM|YtACBV;}4Zv>RnZi*^S*8^W(LSD__Ij~>V^TY9PO6NDj&P1|b@kaw)Ie+vZeBGbf8 z_NiM#+P3u^`Y#ip+@iv=eY<&#$`aNzDoC@kGjTWr!H23%7H0I>e%3~x=fxs~Zshe- z7cJDjQbbrOh_S(0D-dc&3Tpy0X16G`3yDC!h_>J1v1O}HY+7k29OBRM)D(n>1nCkM zm9_>;N&>-K22ziADNHf%%-nFgX^B1T5YbXSi99L@4`s!eHy$q#b_GbHmE@$M0=3c8 zW4c>A=@%XD--ztdHv7;alW9o*q4tqr=zF$id3HfEIQ$wV{F!4tM1ajqZf5X2Ba~Q| zC4#rxW{C(#W4XPtdI2AkEsNq?cH3rAXbv?FLdNa4pGdHFLwp@iMWRrr`kv=bsw zC#K7hO}?WmD6oh(jF=4&raVq@bv$U?C8*r9*+t2{z<-GV2PJ5_>N>2#GDQg+HNIA| z$b)lYQs#$%15^~;~RIOCy6XnVE;rTZA(N997Tp%>V@GbUb|V+ zCyfsA4!C!IiT&CkyT6rK-#91;d(rBN5l3uwh(eA0zRD}?UqWF72C*rvu+OAG2;4_k z#M}>ag}&X+6P+p3xq6~~(ZeYS=G~;QxREioc>$6vrB_cIp{Q?v;j8S6kk6v8C2b^A zD9vmGci2xnC)#7WI?ZwT+9YYjTah4)AFG^!;of5y{fsEo)VXK;$rU?oz7m9P8KjTy z5@GN59js+CdMfe0UwJ{1R2jZ&x7$miFoQ59JlkFqP`hwEbxdJ8gFW+n`tND`K!mWt z7|iG*fN39j8ZN-3E#6}1bR!LH4Mh-uPwDxBK4;}|Nqy{Z}+$3ZMC-jwsttG$R2hw1!W0n zwXLlea!D>3H)dgItw~s6iOAk7KtKo~tgzb#Y^^N@tqQf)8!-V2_WS*IX#2I=-}80O zJKX;NJ~W=!`|S5QpL5T654z3qC_gv`I_%l8Iwd;-M;x|Qt(2G17C}m5uajfXIv74| zfrSe~}>IB~%f2VGZ`IoSk9n~Ur0bG>6ZR1{2*5rsZ1y7kNK29MIU%@&^V zq8faiwoly^1xw^C2ZhXU2}YKL+kYD>Li|nU_gI=@cCScum~#xrwEMk)j@m~rEZ%Qv zhib)|MYd8f_TcVRa8SB0o8VrA+gb!;g=4vc#mTM+l&&*inO*eM8ZVZVKMTh16+fcr z1ma4x&*cKxZx&S8Xc3qjL3-C!XyuJ-jOgS==A+Z5c0wc#9kS;|HeQH%&yz>W<_N@g zm9lq9*jy19dnCqmYT=%e8T1P$2Co@h4>ZWa?4L!9eJ)Jp=%wN>P6^vul!Bz1 zds+la8`^)Eol-dVIEc*bwvYFNLetJn4>~X71xY*}hk8>Krr6dEt?TU(f!OAOY1zqG zB|`iib&y{-OXSbJ=pX`Y2Y6?VVqPLgSbZ{GQu?2$FiD zY!uxf;3IaZCUbC6<{+nY5yQu#wWq76FG#|t?gAXD!*9l>c$FIZc&j4xMIR2o*zB|I zc9E#}(wuddns(R&4o~k785RlBjC@@yuIDtw4}r z;OV7NJ3V_y1nTV9qr3e?VcLwmevv)wpa?xpdsHBf=fp&XV~LZOIre=)8h3Hs z9?MhM4>c0e#;zIL$0ASBQwBX0j-Q3DHL*}50;M61D6@?%*?26NmQS`{cxH)+z3yq* zaUb8sRnIrc8rhtXKjj|6Fd(?Bd@Qr?p!E|a4j{6t(%PMF6Gfv!y4sDcUbSZeN@X1# zk+sNO~I=Z`gn*mZ?unni&%XPdUv~0Ks%9e2cb*waEYq>h%YND4)8Eoms?`Dh~l4) z?1ggAu+w^q$RLbJ6qQfpU|3$F$KqQ>)wezEOm&$2Lqv$&HX zi&%J>wn1x!VY-f9PkKrOir8fQN+=HB@MN()El8`mG+tmG0n|`fYt)#K^Iqb0|t*BXa6~X6cmbP?o|7_ zJ({EoJSl3b7+&j4x@S$``Wo9QO6=RHiL3(w)0x7St#szJ7gWlKe{kUCl&u@N;r)m( z9}H9P-;7jaM0l=!R#85_vSi1au*15EsMR$-$TZtGM9`-iEM|=niVZ0!=e~Jb7Pr>I zl-0sfcSol~Hrz<#^FWSF0srxqV0*nhE}ht_v}E(Miz|#cE`Vi7d?xHEC14a8Ho2{M zNsy)Yhu!U6g9rwO?g*EM*k5#t`nqXLX47W-gebANv6C^TYFB2Hu&3QG^F5;QdntjB z=e9n#H~fDer;kr5BFw|4V8)QgniY1Hh;V}mh{#A+p~#JidhzcjOm$MD4L5kVIe`}4 z%}-gWjTAvY!a6uhp=qT}@HA(plS%gi=zdVM69L-ai=NvqHAH zxsyF+UluVg4wvexE*_X>*NH+A)YYR$7whVaGHZR2o91nY^3NEfp8)E z;D~DpYj)^LMfVT~Y`Q0b&w5F<`NFj4hVON~&lU*KRLmm7w#7p9M@n%Cc_B+ERkDe?ZPw*e=k^bwJ#0@!os}Nc>Ait40brA zuMeh|WNyYricS_C!#6irxuDl5qfgEDkW=){ZK{%QnpL9EuY3%**jI+d4DyhsqhpN? z6NM7tKQ3wPt}HCW^wHgdo!^39M2futl8?vU4@FKDg+8JJz{BI?+4!Lh8vBe!QQ{QG zDo2Gi2{S_4cJrTnUhcQ!PHd~<5)Q%BM|BnVTM=nm$CAeL)obj$LxjyXpvr$yka|(M z!3yTzJd$>`?5nWXJF`~L>6o_CuDU&zL~L)w5TcVG-y?P#nmy#{_A2@Ag6jfpdYpI$x4l z6XhGC#KGnI&OUrcEK>q!;R;l$DW4FLG^gaYk9an)-JR@rnfcx00I ziFpDpKoX1`a4`cD*kLSE4D1hn3h?dq^b9=3FBUPDRv`@z?81lw^SJyKT;~bGDRWh3 z{|ehKN^B5!>SgwVFe9j$&&o2#Ue6{tU5|v}J4cWJcI9)an`<8zf+slgSz*HjSlJV$ zGI1ZI%x)Jc7LV+8Z~K9#KT*@N+wK?eDa7p(@+X{daz%{8F>s`%gea21VJ{;0Dz--? z->3$sli|AaRZ*!Zzk+KL%x_;Y63h=n{|WXf1>@+1^WQWrsLZ5mMAtEE+tMI!jZ?8k zoQh?()mb-1#b%F`sk}bIah{G$ChZ%7%&&9X7uoF!dvmxoix&qrT2usC!euP>Jj1=* zESoHH93T{33y?dk@}j!%8ZlF0tvrY^^9eLlR92eMEma*)NlV$w<&+X z#(RoOHQS|KIMrcrtbNpK1>yAAyp%O5Ojk~CXBs{w5c}RXl^>u*_L3-Z%m(ZI?O&ce zt@p)GkB;SBSlMK^dRRIy{S}Tq%uh|U`voaAZo>5BKF1Se_N@~;Hw*etGm^^j7s6UZ zg==FgWxoC;q1cdAA%2QKc4x#%?5;}3_VdEb4dyh)29ddz-@=65B693e(7>#*5u(uf zo;|wRc!lBI>FwM)V#b0?$|>0llFntWJSK`b)W{u1okw#=9{yc#$2__eV;uIJ03Z4b zOZ0}zb5WZUu)|)KryI8`#*iX5NLOF{l)hgS8o{}k-A$66tbHn*=ys_5o_<*rm{HJD zmz!%ng!~)dHj8fy1qbUbV&qV%i~tSrLS8+J)$Q7|)s2@an#lWCWcKZ};Yvf_Cd$^p zNP#$Pug!hb#tFix%%K_fLxt%V2Eq{t<%FLnf^WVHk`Hq6Rbo}5>C=~&-YwV@&ERNT zH!+R2SW$7s8HmwKZU7>Q+IY4<2$#<)1uZE|Cvm7d7C>B%A1)vf!NZH~h9f;m!Ju7aNw0Y0`L*xf$A zvAtp0xg!hg*Pf$&>H+Ien1=95oT*=He-nholGD~t)ZaZC!SbCa_t@JWP5G61+yve@ zj^vv7!JQKho@|IHG_+sOzLro}48Xt%mwDA5)v*c-FbcD{Mu%*f<}JZ9)E0YI^wrl&5!rMEH2VUYDg9j zz+|R<&vRn~3E{SUAv)22%s4Aim`-%8n$tdij+F|=2BLf2vy092w8J?e{K?a1w$w;& z^fPypmiA#-5NF&DV8(2_Re%ZxkL_>66_%uzWPrE_VB^>nmhcgZphs9?;)^K};+N^W zNt>;p_^LhrBevXO@Gm&SRtQq|=sWJPbsojrN0QxSvqOe&5d^ zK^ps4vzWG_nExH(W+?QW zQfn>8r%Z{JibAJo+vxry?Qj;h%uL(Q1u2P(3VYV0&Q>owTQk)%g7HJmNy!^6$GyLb z2IpC$7Fd^i0KUy~WwxDINVMLf$NtD@C5j4P6D4whd+{9mrZ9s&k!_G1!C8R_l&e=` z+b=+0F|Elu{F`jv(>qv4Ph~N7SCaOMuyb!w5F7b>HU&EF1@?}6V{P2A)`!cED@1@L zL9cYJeZb+R1A8tmTos{MjZ8?wKB!0-)V_MQ@q5`hN71U`IrU19?8h}gFE52tF^@Uc zSI|#{5=4_KRG>vRJa^=|O*TSl==JGM2r}I(z^{4R)HUHHbw&7<*bx3Mx1?gp%GIwN z9a@oa9MvIQQ3n{A5KNlPvmYx#T!Gs4rDXSwoPJu8czI7L=8&;#z*Y9XJJ}ZXc(Rmra|#C%U%_X4H_AZFZ@Lm zI-7@tvi&ogJGW(bfxYXyF%lChDZ561*}*fpe!&v<1CMPvj1kfz0S(@IK9#jrD#-Zs z!>ri`g?%>=^*trP^|+|9DUwcR_i;`Xn$ofrXIrlc#6e-M?zT^U4@~VPL8*qPpld`R zHFAp0_E`bnUdq%^6d%7<)HoV)>3XBX3(J~18u!>h!C1o!bfWVf`7NmU_ zYw9m8S#W9NVOuOp{IFPv+}a#3z$>x^jn)OhW?}l?v3I(?;o;1I8hcaWIQHCxl%#@3 zq>p?bVr*kBl3L#sjE%}oaT)zxVe%kzo@v0jMYsX~k>?p*60VAV>`)Q|)$?tMpk{2% z{Gj^DIec0;=JI!+E%(HU!bHoiw5=6n_6$$y>U)n+tX%?y_LMLJn#?9WZ2Ln?U3h5W zFT}m}ycbBCmfaj>9Wn&k1^h`6E@n2b$DgyL4Z7S1g$_%Snk&Hj1gUaLVp7Wv_P#rW z;}1@0N?WH3o}ThAt>0t+`2*thOL}N(=HT+o!RpMxCIa>`(P32E)a5}Qg5BIYyHZ5z zIx~~|)SY&XAOq8_UthaUVWv+R4!5nF0F?$=F3F1x7J*hVPKjbRtD`(;j~+d3oWgV| z2pfePjzFO-r0G(Y@M!T2Lz26ytymOVfyCD?G+oO)LD)S%s@?N+QE1o2HLGlKXZS#^ ztyGw865muEUsXM4_WdHoImFFWc-3ELFNsJ6ZL>DEA?*2zL$W-IWuLPSF-}y*sqE?> z#>!+T=30fdb_CnC-w>kHGUd*$!oK4vGfhiuvcgnBuP|wB4|-I`@;Np|VVD;L+IDhj zUG0%Qc?X=KFs$JhbgOL?@WoeOsVphgi{{G!GUE*R&$EMmoRglQ3v|YjsIggxnT;X_q!BQ8tMQn9&{BG||5EDO!vH6u8l0GN`)eI_;%g|2< z(ad0nrp7+yk?De#^`&;LkVccexpYl$7KRbU=m*+2Jd)vTUBV~~7N(@&66i<{aLi0O~T5P!R_=%{IfvubL zGi>#u#41atBugriQ@94+?q$jR)9i)TD5%F_RF$^O3wE0RO^j)`Cw}$Nq4wG9ZNDI_ zLT0`&Wd}T~zDl(3cg_g+D7@nYZJyJLWi&2SEwb|p!>0;4$m=ma7NT_)=^!CEfFrfy zDJ@$|ZIuY*E5|y(uM~#gw;$0GZ*!jTiW;G5L4 z&u$Whu}CAl&O_48f^iNENf#j1af`=usle9RCS~LGg_lPHjgniupD2<>p*g14ZF^W4 zF0=JM%99sY8?b(s7g&-$K994+e9xl+q~Es5qt8yAEn_;r6ojwkrPxffS3PoE647eB z+h_BRO%^+*EoJ#>`)CdsVeB(IxmD^T5Wfcm`tq%>FbVtM&bq(CFn|UOK|E}bAZ_k^ zdBcrZt=%aiONqq!OUwA#h2!57jg}Q5$(3u9gg8L_0i_fLDG*(?=`8M#397(dtk6w5 z)3Qh(bYgq`u2&B{8qbgRKO)A#L8!4TJ;nav$%33@{8??b&n07l?JJL5SU%lPonI6& z_HZ;hz}8im9%5t0dMQjx>EZJ;thZ;^+&s7&B?9_zxf4ZnZMywL1e#KqTWHf1rfezLV+?Gb zU|gc{@E}S53D)TOY2WRMd{iP&c%E~M_P1}sD6_{8)WxH$-M?>QtdI;N5QZ=098(C$)EDWy;n_?`gtag)dymT0oEa$hP)P{&e--=Hx zZ`m=4o5E=#P!)BLNtSXMirH9OA|jpS7jYdwqVU@D&yLFZP)b$s*J#FE!&&nP0cibvEG763BRh-&PV?mU=7qG!Aa4Wh_e1z!olETNor`d;6u_!SEEuPOhY(I)+$OikIpwAr4V(6u#kEh6NYmjT`Zwbar(C9j1 ziiC9SJ0eqEcW#}&?>WM}Tw_Io@v9oHWpI{9hy8M`ZOTR$rWunh!jT^+b6{-bq4OysWB>*DqCe^Kjf|}>t=i1W(G&zsu zpw%OgW8Y=Jb%20lxxFMn2@*|Uhw-vQX49@SQ|-J%sj(cYv$q^-l_KvIlVS~BapLtJ zg_%r};CR03`Dyk!kJK6Ys~(Bfj=8Ao9kU)zKx1DD#AzEXf`7B?POd+>*6tQ1vb8@3 zb1YYw?xZU!Ev2x!y>CTk|7xogj1w7yVeGsf6(N@P0B*RLvRgcNSFWtLE6k_GogrEi zx(Q?BcT&Xo12N(rJ1^uLEtxuxk{fS{O0hxx8#TOnc11oE8q%?P)A^a(>^*|9&6p4< zvyTch9{9d2vrz)El?Zt820u*%dh}Y&0b8PQ{M>JA*{65$l3ZKi*@yQZGtAZqP!;c! z`<|<_O@g#{04_ptwkYHWeZK-s$rVvF#b%iiOYLNu2g_uMnwt`gJ? zms@NcZSUnO7dZ^O+B0#N!$IVWLXm&?SuL;|gxS%B&wf^>VPiN4Bkx@sqaruQr({^Z zpVCrcX>qy?t*ZHgk)cJXc<`3Nx4uwBTG3XusI7|kXnq(E>M{C+=gl;);V5HI3bK*8 z{(Aenf^dxYGB(@%BB0M)2no(tgo*VTA*$tv4#!V+v!Jhb(Gc)n>*q=6MM-(uzLtf( zZtiX07St@5yWIwPxMRUq8|ukAYBrv)nr_8{Gz5Wkj*8ep$hY#&6tg#7s9qSMW~WwR zt;P1R5=h%~&(X{tZX{)7E7+^6533A+t8K2*M3$G9^Xs@iixEUj*aqSFDYYJaX6LC| z+b>Gw{5Vd7MfS1?aj12B`d7~p#3({L-WCOpb?w=s*Z1v1MVFIzy{VghF$;V2LYB#4 zsmz+B?3)geU0BF{@F+p*4li7~?MR3~(Ix3A92d%k_}Skx@|XnTm}1@7A+;i_I<#C12^@HOivKLH&Mox3 zOyhp55uFD2#Rk|kh2hS`mcx1Y*2%TGBKY9#=YHs@?GYvZ0AgJiWs)g&(o3l1#`!(P zM8~1*hAGJp2~zlAL|v~`nC4({WYR<%=?TLR&ng=$80Q4{11M2+5=Kk8F2xzC#EZJP zt=U#8?9E!iH-#5yQ7Ak&B|(UxJXygnd&{DQ?!aJ3HZdic%avTLt1!3@A_G z;O+&FMck~wUKXG^ypZcCdqI$94?ra5j~>NJwvwIk`$~zT7cGYo=ly^Hon;9dI?!$w zimXo+M%U#dMe-laC_l#=wB0W%wPm(#l*POfF9qA#jMh(tXsDFlai+S<7Kz}uGSiqA z{D|>bH5DnR5EH-NZvA+mJgSs&*!oYLn4s}L^`c=2N)1nW(OtUslPFPNJ0n7D@t|ZW zDpAFDbr~f3$FF6LeN_NHoIloRKTw#yvRg-=tyqYh?A00&7C*q|rPLEotjw_o6^{Kz zM82qH3(v-=28v3*VOJXM=W0QkU#KT_KFddisevz>DX~U~x~HJ`a!O%3#z(|5FA2;S zL=l$`ojf3!8awStD4P?^2DF3!R}{^}R01K_kC(?HGW+%fsp5k+SQL87U1`d0^Jub< zE$2v6WOsOU0p|E^R5rTo$>jwsJd3lG{FXX?M(t;!h*@laS`?;v!PbW?ru{(#D#M_1 z($0G<`@ZQM77p9HDq^e@n(Vy-@pI+pm8(hipFKp2vv*KQIW9kL6#!QGn-Wb9Bho>!R*~1;`2PemR_|&S|V186E z_V%_Q;KwDmc@pXTaLGANkh1!s!v8&mwa(rtNaXOKbM{%G$n?B)3G?>`CrFG0qw{<1 z>mo#^@(xvEgR(f8!wuOmVaj7x-F27c3ek@-Sk+prFck;Cx>IeLAT8#=UuG*DpkGsN zdp*-R49fpr;aJ^JwitFvIP&h~9$EW1*<{yNLW>N#tlW>4TTy*Z)iS4KPVbwdsR@#T zUuM;UG~xN`=N^xMI2zs$gEZ4Uu2AoMO9WyozAM$JjlwJit(de_!TfG*^7Oxx>43ir zXji=EvEZL%Roj}Q)_n>@DhNidk6BNL)OQ=AL$?V=zKt4SLxp06_*cxg`-CYl_%_<@ z?v1AYTO!#!T#?14#h3!w)S0P?_wuE|nRB&m7m*Q3=OwK{VOqqoXBrd!r-BTA*is?I zXRV$#g*A>B6sCO~`>_Y~now+CQ6)#&8-El_YFo3iZS5SpNias^u>!puVSdJgvp{_J zVJmqaf33i?!Gm%A$5@qM95Qsr59uY*R%DC1TbGq*gyRfFs+6bEm%WTk)3!|0Q9CEd zn0D*VbJ6Rb8O`4!eYEfRF_H13m6#&m;#H!=D#PW;^wWFndeJC?O>d=j6Ns)U`FQnH zB#DV|R*Z4FmRc5%g+XGh89mvd%EWN-v$f>13#qXp!fmu>izX#)oFD@+5KqX33j5D! zva}e(HCE)&75tEwDy-3$ZpguK)SYd!M0CN+rf9GyOvOXCLNSp=Bk{vkU@N_>mVHxf zRcBck;tAqJa#g%mL5Pl%#$uknQfK)D z-kYt95S)q^Bb?lOdo-#~T#DV{i=>}CS60HaWGAJ0cBUBvpBpS+ z6x!++;6eB*5;cT4Jh3XJfN3)G&PLy+3KNN0y8Tuwz_#|}8k?_RoP`4s#!qosvcR*# z&`O@?Y^|_nqE41ITr|nzMZJiWhsBUGJ177LSesF)v%`Y)IXrGjIN_uSGzOD;7;7%I zR!`r$PalgtBM39wW>2>^h2b8C_1S%#6A~|QPc9|;Ci|yn4Ytva*!v!i$wwzJI&A}l zD5zhz9yUZ_wc3qOt(D)t+eM_+IO0s%{Q|MoN6=DEdFBgC57`3>N3J|j&fXnCv1*T= z#E8#OSY^AJu(jA^u!SPha%_F&m)bfZYQhX1-;)3gK_eIo_dJXT0tK+H;88>O+dKHmna{6?7C+Nq4|BQwk0_K&5y8fjJb>EYM1(6B zmaek56=vl7ea)_@B1of1o1ZLgsk1MM5Wllv?zWF`Y)?ydf!!;pPVd=<_nge0Cfy7r zvuQ?*8WbUmBW1E>Dds5b9!(oP9qWJ`ZapQ~d9#CLdR-8=2zP2BDt2mY8iR#p_L36E zcHwafA(?8hSj(y3z9bNPF*-4Yy~#)sXmzFu75iHIzMw`WS;l^Si%=~5(&E}n>!#V` zo}7Ehy(bUZZciY4!odKgo%94~0Hu&HIwMHs{gDK-*972f&z^nlO@(7CyY{*j4Ns3d zKVy}Bs3vAgO2+FG0XsGB#8XQxR}ru$Sl!+$ch!qTp^?{K`!#FwSUf=lQSOxO7bSiem}%Gj z?*uz3nwoz^#<%UX7tj@9_TMW^S0)C#WTn>ONtv-ddv?7HlMj@qc>aIGV=qjfhfg{C zmlrf@r{gz8iXSpRF$I)Jr-2da*)eqfjHl;@6qc9yCsW?;TgK7=J^tN>YlWn&E^C4ZD9A9Kl3TXwAHUrNs{H?LqM5MO1S)1h0<Q0Es+n5!6iCRv$;ue&LDpIm^7MFm2?FT}AmX8TiU01wobXPdC_~b?|gudZPv8Fqnn~(m2%%zfay0Fy7 z3elLe)l1J-&%xrm6^cSpJ-eZc>S@{gaK$l4h&qEQo;miYXQc^Tp-9ent0=K%tQMth zm#{{kUT6WeeO^La)pGfDIN<5gCCN>32sC~aIBw< zXeJ!C7Lll*-=0V9)ol79(jmSmOy6|;W@Z1o!yNrymzd3oS@~^FT2BGV1L?@Z!%o!w zMG1JheN9ocJ$U}(!EmTBJYhzR8)x?lG5Vm1SP4v6p$IIU@qH-Myj}s6_=`lRG3^_h z&eqJf2L%0d3wt^)ALffj$zu`tLu11;FsF2KSt&?GGUuQxKU?htdWR1zzLqCFJIa>G zUR)G{Gg#@avCqr}MEUn-!fB|%pd z7K0h3bt39lc@=9))W!-Hi0z0XVr@f`+$ulex#O)Q`~HJZ&$X9DgdgX(Olz-w%sw;^ zNcSYJ9xk}95`p++5gsGUY=j`Q>B7dH_8o=8+Ojyf8fmtqDEdl@*nrz)p;&)z38u_u z2*=(@G8<3ewn-#ahouy@*)wc+HcyEz0-niYWD$2;#xpR^qcA^JWbb~25@_Gqs(ELt zYV1E9p4+~?edS8KQji9XM*x3wj`0AnQ_Gi!By0UedeSAI89qXas4+{oiqU6$^$A1Nx%TU1-px%$qgiAF8qFkfv8 z90uRpYi*$*EgH`2bD2FWq?xquP*C99fWKNh?K!j`hw!?Bd)u>u^!3uN>Gltg zYMZsYZBDIy;!%%UQ)>ehj-xIuVH+;Y5VK{R!v+B-pCZ85@CLn5>UODY7FGc)UpXF- zlrDL$%@C&M3IyF&D-2J9ssVP5vD>z&ZR)1BsT=Iqo~&bOy*;lmwbFJr>?|{Hcw}EJ zpL}isk?4lk`< zc4_%y+u|^2ww2lAf^ioY#LbTc&F=%&lBH)Fw*_mw>I+Z$S!($bS4*R z(N8@_JnWco{Biq=!W<>;9An)TjPsRcnC*oP7bOk_tV>u*n11U$+8z*~6zPDMDolS7 z0GC7i8QFX%SKwObQIDqn?wEI3mW6jQI;$N93%=8BjUbh9Ha-3!SH55uHk2pDz-f04acbb8Sb+e;r*m|hgN zY}U=uR~+)-vJQi#Hwx0OTe|kR#qJh>#b_(m1&zg|2wFtfur=dQ5cUwwtoLP#pn{2P zR4cp){1zo_ssLY54lhL7MTcj-nIcjy7mO<8kgXJfHo>|S3LD!z)8&J{oEaYzQ9o66 z2cpZ$;J{&yb%;*YB`rtnf4v;8-!j|Q+S`JREIY!SQnrMB7J(UiT{t%!m6iNEMPXXw zlY|Xb{&<;EzFRaJ(^j=Y&cN;w6i=(x2Pa|ptdw+xJ>W- zW*YeLFrK)`){Cz7F6@=-h2qzTD(e&}td=VhG7-ZjRa5TxbIyruS$ zC4ktj0oovbT$sp_!|oVtHwb7U#=D8cwHq^AX4;J+`s&`Y569vAifp84@$(MCOigx| zD0F~zBC7IKh`=Dwh!d-9i4aS}jo+}19)X}ehr>D{IN293Xh#%|Z5x5(9y=xskAgB^ zo&C(CIe4V7Q*P&kY5J4|`^^6nhz#N!qs@b%JK#uk;JM0!ApHus0NXw?1$CslqQf5PiXlMJ@6&-sH*&TxZS@SL6Ge?BjOgA2} zi_i9lo=b1r41A44woo-}t5yiY_DOOK(;yJ1Uw^(NJ0c8|ZbriWwCBTmnTG2_D~O^k z6VOFMfa0Tqq`YfffBTfebSl9)z0!IKX`!D|tGlBlrnOp*tdJO%S2yn3v-aS9Ck}&JVT~q9H z3dWr}qKX_>Q+A_BvBSkl-Ei2qMbU(=I%Fdirp?fKj2I;Z$M`7?cWmJ}p5w_c?5MHF z6sDDDt7o?#Txx3^1{t_|`=y|`e;A3&UwgLk*pX~kIJRLd+ArL~y(LPVk(f`$FT_o& zAkdJU@)PTmHb{ti_+T#8F1HFn@*`Ine!Q02Vo}JiOa9Xhcs^O>IXX7&IJbDNZS;70 zp!@zts}q5)o!%Dg9_|gAPJ5k5HDi3iOInMU#L}oowPzg$6I!MAuGO((-oddO#WC_f zM4;ec#SfJm+474-SlkaIIiFX8I8C^bPS`+UxO8e3uQ7uKNZ7S!KO5Q^v8HvO{XhT? z@I+Y39sUD?aG+z;k#Gq!%}Pb_GX_dH5;%;$JsWJg7sUEqX>%QLY|FI<2izAvYCjdA z3VHfvNBY$@K-%@(m@)Ps?8Q%IeD+2P$=eQWL?O}h79lEcV+CTx6BCsjEx+%XWvWG< zGMg>eDIKeKy?Su_g{enuvM5wIUN%hFXDtzq6RLk9?#Dd?1CKs-^9$?jzeUk7?%S1p z0sizlh%_plTaJfc0g5=c{NVZZ^%2sAF*sIE^6r zkdv64wnu~*ckENxV+zN%4L}m1K``mksRa5_1x z4dT_JQ2M!j3+);YKlISI?Mn*BntJx@5@8&LOw)nP?kel*MZuTWdL2itr)SC3VS68Q zBwpIUf&J|v4_{hxD26Zp?&%(nZH3MMx8nGkIN3ZViq?2ZOkiAoUie#WEuzEe*43?x z?3sVdhOGi*=bjS<=1s(+QNmsnVytK-8#%=JcDoIAv|TSkWK-Mhb#1k)?G{n=)pqG) zJ+iUkNK$Wii=z7WB8Y+C>YX7_V-rQE;HRgCpPECK7J+tBwM>Q93i?NafdZQ&9P15t zIx}ppD6rvXtX1^27d(60)OuD$ds#59{bLbtMDW$xvL*bnx$)xeX8WTk@R_HQl)d2~ zkt!;*FKr}-#ze_)>nTVV^RW5T*Q1IPl7_Znf>APZa8Vgftn)E;oEyn5E`cpf9c1-W z#nO?Z4DrLrN2{ySj)_1&7{Fn7+N+MnbDNWAmxQSJ+J|nk&(;y9wOA2L+vfygDaGaG zIpub}2=FHumEVesI_n_{dHdltae%^_GUY}5ynI)P-i>Fk3}Y60Bn=of)RqgylCet4 zlk+wwFp2epJKdT@pdq)(M)_L;)Y2Vadmr8uvv%ngJpT2z>qVd<7$u2>n+4;T>h{ih zc(it1$OiOs$i7xA?CYsnwAiL4?d!t-cEus*Re?so1{ScPv2k7iy3os+>J@@SVu*uX z?=OXDH7ojdd(~6Y<#buObvVq_t&;WT|JWRpqHtJX9~FQf;lt3)?l!Mn+e0&$LpJ0rZNvAdmq zX12YenE3S|XD{2ic1a{!5)5);k$Z#vLlm0I$sDmldsC2!aBgL-y`?bKa9+=`>$k*o z!OOukyG1Z|Nw#yiYVPI<+UC@^&6#KSIix=&<+yuvh)uy)kF2r+L8|H5t(O%mtga%> zib-N2h5^@>!EDJ)Vf~0n1HttW>=6;;?7o9bIYdrGpcj0dM_Z5CU%e12ADftn7f(+g zr5rE{^|q%EHqz$VHTAJl(&GcJk9|QnuEw&;e`2j{{(VtIO(98hHSz`BAOif6dWiMN z7J-^)zV#E149e^~f)JM7B?`lFdK02LGW$BiDx5@t;dYG+|AiuH0OlOFB??nPewsU; z7d;|19>EO{kL4HnimfDyKT~1+xgO@$aI~m=W=K(>8pD}|JMbTh29r+iIC=Q#stBp+ zmBkA!Us2RlkVHMIOn`{4OtmQ=>*UfK#%=Cb8aAbM1uZJ-21E zJ*P1JV2j45Zf68(!~}Fl%j{1=6ogFd0sF`{0JTiQV_^Yafc{HZ9l!)Wy3}*5y9jC$ zCbfGgOtBnRmSg+i+n(0nc#3SS2=Q~`C;Ry0Xq%7o>|L;;d$*^j_c&2&!R4R`l*dJf zG_4;Hq+y)Hc3Y*#wbgQGy~AdCUS`38@LD|A5pK5&kve%%v^Yt^PZ%N{e6oM=f--gMIi1=of^bK6)L0FQ z>F&MhpD^L=-V3xCfefuX~s- z+kb42Y5I2WYoAn@Vkzf%W1-z77#m50Do`R!S{Kpc7a!KfMY7G-PsBJ$$L8Y={u{#d zM34G<3};>r6G;=OS05XtFxB7-ykpf`yGKxr*~8CGy#OOvtYyuP2~k@(`RcMtf;Fc+ zdoYpGXfJs72juSH6#-gOUcf5NskXx-3(^SPUl3s2I0f$F=V!OQ?WyT+u5LX4mBnZe z723OY#Ok~J+duXm5$VbJ@e}M?g=u=eq$auuL@ta^viOV>78kLZSP{fU%8nZbG>n*^1 z{7#+?SCC3OnvU6no(n#HCz;|p*(yb$8BmZ`TPQ@0h~zceS_d+nZd*KEN%=&+qca|b zbYz*mZ)eO@o>w-<+tq4v4O(=+3S~aw|*aOgs3p1ZE8cfdceP&mvKw?TdY7~ac)09 zBnPZq(9h&jmwhalE}SjNpSg;PA9fCo)^t3a?**M(d(4(840m!8{B$fAh?A)G$g^AR zR~}iB;x+GCkCdY*#9!M<71&0WMN%rrL4%?d>+GlUIi-Pn&Jtb||HpcLU zlH5as`IZzGz?5yrM*IiSD}cquInWp+~G7>9eEls)TN z&n@0|ZtpZZ>rjg-v+lnIN^HCwHE~sFkStJlA=s*d0*g6s59X}wA z-1625(!V?m6>U=356qaH(PE8#?|M-qPZ$bLAJ*VSu>K8`#E zzb}go)2p3LO@sYh;n=QdNr(Ra+EdF8 z6`I6CR0Kc9p(!0;EP8yJS=Hvzr+4aY**XMcG}2|e>`jmU-Byf7O}DoN=@il@)9qt> z0C5}=lge4AKP>{WeD?F~IssbQv-b@)NMYJAHX39sbch@&CLHY{LEWQBXr?I`WD9aL zHD1E4J$uH3(|fEk=b|25k?B0E5erYteH=(ta)qJ>jIEqciu@UP4FR zs*bgX?6-oh7}@F913SqaHoEO4M$OSR67OJ9t$vG*5IuggIL^rIoNYx;X`8w|h#lZF zSd_>T9=tH(ST6$O3inGNusuR?EbhRR=Tm|{se2MSy9m-M?y0$`9wZc- zF*=PhqSRl9XER)!Ki7UBNUQSjEjU48v6?MBZ^x{M<)ZkavTwSsRXN#JcnPl_J=R{k z)gJN^E-hbV8$5gt<$;|F)09M+e&m`Q<&;PqGa%e@c3#}+0$F!x+Gw(U+^58m**Fg8 zM&OVLFhQG3egO2`@}fww<1MRTlf!T=*=(-}irEJn?7tO;*+{(R*&70pCGqCqbc0>J zKNiLHD3_#mv!H(|YnjEvgkx*CXlIK!T7<}zaC3)H#7eu<3t>Lwl~1yJJ(F}d*{4pm z$s*9AUR{y5^vIC50;de~M4-w0PjNf(9VoJVSkCn@`jDA_rSDXOFPx@hzi zedMeN@*5&jLl732YyT9C>>4S#vA2X_myQgQD7P;(L!hc)ld4KC7j6{AtKvsPav#=5 zH2TmN-(vSEO!x5(f%xzJLVUJuRn@@)#cp>8JI(2EA#zL5i0* z+!@TPCR?hQ*wld%6kg?c3~%fbC0HX&8!l~_YtJj}Q{NRXw%UgeTrL_7h+XoB71Xg0 z*yj|Z5#xos)V}HnF-3~)ArvPN?#8)W8J;B|rdc$_5+X!~$W$xBnWdI5nwEw7`b_=N zOnrl;o$}h^WtWy!;j`AtI@8F}crbcYAqrjSfmZAkg?%$CDJT4uNv(&!R;?&FffrO% z?Rmivj=g0`%IB4lhsOp_1cSSE-!jwI1>|pzi!s>4YB0u(mqrv0TT-ClLqye8= z7go;(%ZQ@y7t_;7qs`^F?0p9zF^kv^XfyCZhd~Kytz9ih@v<>jV#|c6I0sRKbqdEZ z#8hC?8a*ZEW;uU7Ata_r?TY8;X8VOGah;!#sI-GCGY9wD1czZ*)?J452S+%TtJv7Svb63enY)Wbi!3O`If1Pg zrdGb~V|-HwSvWRjlcdVWry>p}i4jpkPzh-<+`wW|N4AM=i&E1fUCEHSNzj)9S$)=0 za5P)ssoH0kN6hf|L|)*8;@}7ON=Gc?2}>v7O^5x{o5D1X{tUzmq2#PTc?1$HffU}v zkJ)ELi4)>G{2pE>sEM54a-`6@3Q<+t+IqWHVf}6-xyZjq02u>(N9Lh5GXm-GXw~n2 zMKH^`*ad}s=IuGT|Kwo|Gkp0d zw8)te!Bvo5FA5zlXj#v@hd}4R^WhBl$Wv?3sv6~>M26TZ1$=ocL3-967J-rx6w2k0 z#iy}F6xvdZ;KZ*yoO!&#&Ui|V&IW!6j#!&0aIdu~JSzXULlkW?yK3wof_}nB&hMF( zY#FNTeaE17HXt!M$8HnBCzYNCn_2slqA|{ri9!HUhz4c0Z?xSW!}B_~7cKvm7a!i5 zl6Krvw;ZjsClro+;b|JTmLAu$Z%=ztVV`V=PtBug9iq@39u~3fXMYuh!zBrx!@ly< z2v~VGSPucloV~eJ&?F(!Q>54nWEFVRV6_XhSOW(1$`SJI?WLf)Q)X2Hv2l3r$g$N~ zd~(6bgSOFe%faLG671`bdge(fM6jO_fJ2jz3e2@%3(;~6tw;vw51x~ex#WA{c+g%P?|*jxUG&*E`eHH)iS=+dl+plYGb8n*x;8K4*jd(<880 zRAgUiiK(91#jVHn0-;%WCdH#yqunY>?CBK5_9ojv5i}O{hi$yVbSkq=$_bY1QT_Np z9#fdgTUWHMx5ouO_kmC8+LUl)PSJ^Ye%_ zFTafqRv+_|8=iPByXr zd_gn{g~ub2Ou0@lj$3h}g60kN_@FvF&xQ-ahi`FJZX+DDu7Q8W0<>C#mEBY~T9xSZ z?`-vgvo%f)PGBFjM15ye)2O^6Z zhX_C;<7EF7qxQF>m9!ioN@4rH z!zK#o7kG1hkOIc#Jf=fMjJ`p`5w+9k8NX2BuB*u&@iLISD7D7~sH$I|epaV2t&q44 zmJhZF!k=*c7!E%>odPGvL>;p~Whr`NN66-HqQs#bEK90-&wNWXT8-Lvj(z+Wu~_D0 zkn&C1HKGuGb5E3vZxM*2bsM&D3JQ4dPE0Jb?}*OWoSGHiV(iDBi(lWPRxJRRaACkT z%Of6@NIjUadVx5N|JYP@Y3eNdKT+Z^%kC*F=Px19fR-H?YWSjnJ~tj@%B`1BY!2s* z+(~%Z;hh;9$f9e+cwc3X6?h3clUT8U#swjxrkYGs9UkDVM#Q)rj=<-t&IZ#&g7?Q8 z@32~hxk!z6Gm*kQ#QCho=87DfIsohDN!uub>XjB~vuziQo#QDmnX0g6&%uxO@^}j+ zLY!;7Zel^p6X-JwPlYdg86B&tc=SIvtJYrkB*kerT>Z8PFz5j()_mZ}%LSe}ih=ek zg^8r=2*gD_yp`?32(#sjNDrRdgoDqGR^(|qJ@+i*a?2)rnzq_yZF4u; z!-Dj3Vk%iOp1nQBa76K&vT8OuyH3saE&Xc6!~qM=l^VjavBB!R4Cd~o8FT;GxXPXv zCGz&Wd1*TK(O<=^L9Zd+z00DqeOz>^5889Vbiq}kXxZe&_A?5zy$+&^`U&7W6L0Nv zieMsf5ae#*3touoXW?JG+y;0dJ@NE6N?~|dk--1P-2$;;gwq5-jX!Pj(p5~5a3+);~n%Q<}v*mbH z|3YrUi)@NR%#^V3+H^s6Vsmp?hj;41W2T5+!`g85cf{6;Mw=v3^5R@>t9N@rysb&L z;weF@yzYjZ>~9MH*9Wd*zaepGez=%eGHj(Yfqo?3dy)i^G2FwH+lPe7JaE7u`%i`G z3O7S|oU~DbjAR#CO}gLXn23{zo8eG4o{l z4ZhpP2~i&ojb)jMZ_9ox5>>#H;EF~!ueLzM$m%>U!+3_DU{8q#2ZC?8mDZk3!u>L) z7???a@+9p?Hnbm}Z`ZcQ91Kn^r-17O;Tcuq&dK8t(%+4n%LfEwM~k>ak|VkzEAuqt zqzy93b~vOf`S?$55Tv*~-7_8%h&(~~BHvoFI8n;y`M;jGb=|3Dt?TR!hqxWmG4CCx zF6VAt-#XRaD@@T?MZgt+K&NDOV-Unz6|~8B+O?hz6MoIsTY%Zp4;lVDJfe`wBY<)3 z$EKgH=1IuY<)E*fRyh9HjKEkI53n0VVT4Ar#Nqrx0-C$Bi86KbWvqHnmQt81DM2vt zkSNpM+3I!LvQ=*hvX}GiJUM9os-1!Pd@>8*s*7g4N@51NG7<9-E@f!79}n~0}`cq=)HInW`rd58}?X<7tp(R zUz@HlJRx=>cF1Q5@+Gi^s>ruTJd$Ec(EeQ@2z$;}*LPH{K3l!bTQV4g(+*Y4A#9NK>Ojd4ETNdw?Jk@Fd+}K7snqKriR_7s95Vn zWVda&FgdZhsH=lJ1!JcMaF;M4ZTEPB^HtUD&9#;gWbXXCK+XhDa&c$X#r3N!CtC)4 z(?*t$$(|rLIT;aOdr*+3PbgqvnB#Gb`}KGx+%AYI=b$0ED)>S}zf!kJlQxI02Te;Le@B-mzYmlFC z{g~@*cdq{0Rf>z{j0%qmUlxIKxKxw4!1aQ0LDVKb6}78Ggd<}y_fdojRjG9mC4Q@& zHiv>2!-1ZXp0b~imiN#srLA^lTkT%^fhaV!<=_;{@$eK56w?%@X~DD8UJM${5+PQC zL^k?@PtTCrn#~qHemhcN+%(hHdsd_b*Kr=%W_z9V^1$MPRHL2PAF&CB!$6fxqz6u=H%r%(K-qFd1X-KTVW`#%{L{ zDHz*`oFCKc2puNSjhHNaJQsoJzML15omCDe_Lo3 z4tYhBv-F<`(%^F&8tvx_Q!8hD77hNxy^%dFBE2Y(`Ixr_7!20tvPyf;??Ch^QC=oP zv-W9W8i=OAVk|o@wmu@nImEdsxWlF|hwM(#DU+#@Ye@mMuW5&Df6nQI!Qini@T3U9 zPP8ooaTM9w72AGcnuC-Yk_Jx+srt>Fh&2zN7J>c*^|8kAe6i177LmFUDyg;e0HU!79j$QW$q?)U+I^mqlO@->KNO;ymugpCYFcFv3c~lXy1H2@Kq+^P z9A=voj-{lqRbrckBg2qLPp?b zbF;Qu(jf(Q@7mpR1sVCLAK?(UpZD3l_JF65nU?)Xa0tR;d=sT@iO03gLZ57_Z4!)) z305v=*sn#=$Ab~FGVAcndaNJcfEW$b7|uES{8y&ib7L^0j)+e!8j}X zm!vp6C+t>{7znXL7BHA-`Bot!EqrnAi;r2ggVqCRrp*g~zAoSV1=Owvv{^MK;$7 z>^xAaVEtYu0?eaHV@BIqvSa()KHD!kWj>IeXeSho4318F?O9P6jR|=z zyX}Q+bl#+70ag{?5`}5~V1@lF8`-)@Rx0fiZA8-iE?tBAS|7Vo1m|A@?UL(+V|DCS z8=u)|Lqvg32-swX!c>N~cFpOn_KsJ8I#kH)`Yv5pN)`_*V>9jBilXR2!|t_F3dcd2kSgH- zF-3$pyE1j#<5RiSdXDnSv@P~v7sWyp64S8l(bPI&{oM~p;dmTU>wfD zsiGgZQZZNig;*NFDyeqv=&Xx({y&YN$AG?Rz}4e}%m*3ZHMLsUg#}I2_RzV-w!K z)E>({cVvN_&R-`;rz%?37g=`!Mltx@!8&4iF6bkoBs1{;d#E;Z@JQz1mh9#I*A*Mv zHAK>oqlIZU?Yg*Ro&8Xd;wNYmT9%FL-ld=YND!8d!}i8@Plu7ql zakP_DuH7OSry>$u(s%AE3aMo|)Vg`<@PN~3y#(XL#>r}Wg7x;qmE4%wFpp(l99~px zjK}K3u(<+}VdJE>xk{Ku2C=>)wo#BZI3MYgLfaw~o7K`#RMxV={+!J|LUs?{cP?gs zwR!g0>c{LFLH~V2!(I2o_GQuFbKAkA*2R<4?E(FVTUSA5_wjYs$79%6Nrr7$Hen7O ztQ4fxJOmVRDzd4LDQ6tlBDO{tZk1K)-d%tSM~xY5TNRG1O5nP%#CCeh?{&Sei=7gP z%*f*{&^m{?sau1opjp=K0;!lLWs}o5a=O(KvLrk#4-txGUaHxCY4x5+p|wHk^iW$B zZ?^|+w32Co*fcZPcHiv3L=SuR_?Cg^Yg;7><0-3c+Z3itna%TcWAZDH>d~{Oo%X0; z&TF;(Nigmv1`W0M{SgFjC-Ky2pAjHxWQwO5`-)I(ukIH)^z{`ba(66RUS+wmpn0c= z)SN2iN|kH4Y%a`qA7zW15QP!r=3q7hu-~S55~)66^myMvTObNOK~>n+c{p=mfz>HY z*L*d4nU+XpX^G|tfD^AN9>B>qjJ!H~@MX1iZuO)QBT`)SKGSH z)=y!d>q~bq-R+yAFi^>|@q9T0gy0be=mUsg-0tvqxQFkA)O~z4YsP~fcW%*I9jSgK zNXczg%fbh9z^XjK*{L(MCtu(R@DGGmKndz?D?LHm)aA&**akuE4;yCw|92g~QB>@w z-c~t0r|frL9wQ>x$vo)&#mk$-$%>Wp1AmGwIzOxF+@ZrVO8!An=p9)qDvRxUK^W5m zJ@`Hf#~OYYlxBFrx>q!wF!bi3E%E>3>OA19uI~P?ooZ|C`qtW3Yu#2wAWW_GVS`Kw zxUJqKm*kRhLv9ibZ8ZZ(*n7`F2oP2X0kVL$fws2KgE&yI>&B3Pf}^d~`nUT0-=E(( z;re=b0k8A+a!_M0gvsrWeU2@QTG09xv(2vB#-zxu^+yBF$KJw zkaZJwO-SmNWZeasEc~MHOOJ5ajfvn&;4B)bC4Pp2aafdI3w;^6$M(cHpFS(74D5bkz)t%V zFH8?4X#OPVn#=D+Ek8YPdx{hJm5|@3qR>9>)_d)%?*Zt4AiS{MekAAyut)zM{p}`U zrlfc8{&q(qZn(S*_4HUDx#o9>Pm6${4Ns&Ut@rq+eOw=`F6>Ig;WjtV{wB<*a9R^P zYnz=Ffd=;I-@l)o6QF_om`t5I)xPj|APtl|GM&l3DeUHN7*G7y`e+7haERmFjl#6Q zfKBhwL>x~tsrHmGV_q*`+xhv+5#&?MPQeF`6?sI_Ba`j8Px@x@ z>a!cR*q;PhgV|H;UkXwsK6C9q3bU1vM{zCy@v zGUYNAs|RwsR#b)vIV>J?dEM)mNEW<=9SU^i28c@4{V8^w!fqxJ!^8Ktu=`Dt>FrEy zvljM=5P=NDCUShDE_pb_#Wz`@h%}H+yi{q2Jd#J89D7;Fujpt2Ozpq<5-`ZJ6Bglq z?6j2HSN`crKxGH>Cf^WZ6`wr9t^CNzEhi7zw?$&$v-q__p6-W&`o)k*PJZu~dsDGD zi|#fBPEwM1hwtyR(mHGoSb>K;UFbP~wjiT04pS@`3AN?IjL?GxMOLXWZAWW05(pyK z_{0>|Ly}vy!mhgDS#>Yp`aeaZ@$A60eEju@3tjs@B$g}9DYhE~+<^4xe-ExLZ%PpI z3-pQK;*-N$4O?FUuOB1jG#pBB=9Q&^`xWCB_5iF_5BIr_^7w8M0ebPX+wZnag=tXh zb~q-?DZfHFBDqaKCXC_LM}3CAu>C6(rn#-V(0>kqOB$R_n{1trMn-vwt@of{5)xt? zd?7sk+Gd6QE19?kyrf|Z>Y|l?dWhqTbLdIm>>-mn z4VKF7g8qxFt&8t1kJnbm_m)_9(HTa@UIskpp<6jrB);islWmkQ1hs-~uxXwUWc=^u zeKSR2%|2#5EXPNjUANhyJ`WYAz)Si?0*=N@`g4L5-s@f~_c8Ih)m(FJzK0zP=gC0h z!7eywR!7T0YZ7FAO%3Jq1INzV;^R*q!oTt!JMLk;9?^rH_O2i;?bEBLy{~Yvb>fth zoL7DTrsX-U`=;7$0&Z#^WeJ4^-O^_Yuo1y8$#T&s>s<38+u@Vr{Wld-+u#LZSR#aDZ=C*fl3JG{x@qQ4(*S zrZ7eFV12)B6ks?CQm4Vf=gwRsf}39yDW*m;k?K%q_lQn`frYer)+DICUw#QN?^9~O z_X(tf#mV6%LDqwN)C``7-u8Kz(BQrGfc5;hD-_CT)^n1e+bFX$9ijwo&3yK}BEekn z>Ej31!pF9Wf@(x{RiJcTZc$Mff+0gD*s}_|4Le@5Gtm00v*}$W1RtGM3A){85P|~v zRxN`2Hj*WrbV8UaI_EF$s915PVV1q&Q?PyG+&ja9_b1;Vp> z%#QiEvr7+l>{@8A__+Re_U~<{1zgo=C*Z43n5wyLBeu+M%SS#lk2CAB%aio~h<7uB zgQ#Pr;nZn4cE5^-n9N)3aDiziiDYJ_hUo4mL z#qB;lqmx;bV+RB&Fi_pVDCJf=<|!m#%R!RP{ve7QW15u}Mi=f+BGD6mp=wT-Q*XNn zv@%_q9RKpM6p@}6#hus(BDjHIsZO^4=Mw~n!X5C;zxOc-nB$Z{0Qp)WeW-|9-Jmf2 z!&bCjzlR7?&+(Kq<6nBzjtheA+iR z&amSOtHkA$_^JR69g%xnk!Z6{VUr57EKS(+v}^YV0?m8kwabD)mCf_1a|&cKJI|I02RlpzhSw$`eqDOs zYqdUtMT>;kZ-m?;8pVrwn;>n%uuIe~`xM~D;ljpc7aCXE4@F@ScgOC*^*)wYX^k;5 z3wMd)1~~^cVSCc2z=dZ979CLM3i%Ygki*^yS&pz<1-UTdGo=?&EE+B1=VKoC;Zk2n zd&!aZQZ}<1n=J}$%tLw4775TzmL1bl>!UEmzsUi&^B}i~!+6=F+@}=u;OGY4%6)32 zTzA?@A8}!Ni=9>2DfaM2b@8b!V69?H&i&K1U8U@E>^T|3Y;G zo7;+iZ9z9S*lCG4Oc6MH2Yo}aAjybhn=X+yQ+~h#OqZ33%|X6G8b8knvfKvb!-YtJ(|`*sk=$&}clfe4&V$zN?8*-cPzG8H9Si5% zQ-Zz^x*0UuWS>9_c-?e+PLL)h4|&8&eAL^cBn zbhokA&ig#@ZuZ;TKF|4@894K}2;&Vk@rDL_SFsF;JDzjw{Zs_EBF1SYUvjCdpHp9> zea?gKJ?Qb51!#00vR&U&*!3`8Qz?(C*ZHJ@GRaMeXxCBpw5G60>7+S--E4N<6F1c-xBEu4Ig6fClUmka)o{UbFTCkSFj;mFF@(q zQ@LQ>EXd##Mi2#5n7&G1MKolUVPgqZ_2U~Vihwo*j!ZK*Gpv#oF zXD26cdYIuX5vIEHw0pnL-Cnk+y{rt068p6%3)GX*^%sQo{bcNEsdY7T0DuOs2M_y4ABgT!ihyv}SYx6W5Z6#Dz@6VIm7+xiLHE zQ}T3#QRXj_P4l1&`a$^*`?4r}C2VA}y9y`Kt`v#J;`J0SneZ&H7J;^4^-X8i>wP>+ zULV$S>+9pWDeJXku%Q3F&x@kXi^X}LMV}fDEFz9T6C*jWL4T%L*ExQiITiJ}hx={>Pb>^%|O2oA>V&xeApRRfT`%)*k( zx4s1FGn#}la}%hfW0Zsv_@KY96fA&VclWj*Dy;ft8#Vwedc52qz|d1st^-+;VaQD2 z^a2E{o)e}iDG{Cw^KG^uCoJB5xZ+5Dbv(B(o0G_uBhTOgrVa*eP@b(;tQ!=#XoqCr z1?!!azU1`i2rL8HZv{07I`PR^>B}Oxg94{ceuDFGBJ?*A=}ATuwbKp{_%+J2_kDhz z=;bM)#y<80PNfx0euZ7~W%AM}4jjzuJwk3>(-Xd^un6=N7GriKYMFxmYm_YbG#`yP zE>1G7qzjf|{%@&G_mDeE#i=@*E$C)UFJv*;cXLIecD~N^44dcC@NfuQv0H>(T?3@2 zu}9doEtePanC%lGu{BBZjD50ERBAcC)9Wuc+i8)QAAZlcQTdmhL)IxWiy;jg+wUn% z;oZ9TMwpw+#HS*-+I#fx-P^u*8JL04Z!AmvIzjTbZ78$b6n06oQAV>K!mgp1>OxfF z-b8}#z3_*ZD$IN(Pv9h!=E39;;$cNTB_A35gfy!ZX8(ydt-z#6Al^_Z+ag;;r!^hR z*LLpQWYs%LuHPuC>cWAx9`1!=_C^B4- zg7aG|aSH9@UYw!#Zc7&oX1Kq^_50c{Meuuj9mk147=a({XGP}|Yg=F3wti*sqPfzZ z6OoczcOiFlz!x=Gehb8qJs|>X4pDVE9I53n_ji%Bp-C78HPTh~p(u)mah=!#|t%! zi{sT#bQrr#Ng!#4RW(?}fo-*cigF9Dqp{)Kk%d;~DP5l6YOGuox)<0f9EByEd|T-g z1?(VvsBW-UQT!*yfyw?R%n+SwYMmlcdf*e8eF?MUR!EY1zm&t_wUo&RtWpS(&0w;0iQiKF_vp@3DCYkk|j7NfAuOT z`c7q1@-_i2p4sc{rwY5GGvsL`U07`kbZqrMO0}6H(j0u8%QZk$PzxwLiQW_?N{Ec( z5V>5~4Nl-9zT8pB&Qj^ut+!Pv0o9*vSz`b6MTMrd?#r=Hgh+UFZ}!o>Xk30B=w_K| zi*JRYU2ESJ(G@R4?tC|Qh(fLhaP`a$tDB(v!UJtCXTx`+qcdnlL!QC30Su&=J?wL$ zYd9Yz+$ROq_u3o>T#Tb z6&!Y)%_2}Q3^u=-tV$#|AU*HuZaaJm#s^-Rd{Wy!pMuxGH1?84K|f2aO)+a0_RKuF z>N&^?Xz^*BE^d(>^F?s?l0I3RApHp5k>|-I%LP$_Wf;7C;f;{TuXlVd+A@ee`ky`* zkSmb5FR8e+apC`7=t%XWG$HZF(R?3vkOqNdqlw1IRHJy#|4=b z8vgoznd zeHD&&FZl%gddmCMdtFc$$2oT8H>psg-CP!L+7WN6K54~Bu(OwgD1#v=N!<)uwpPT`hJk|a#qxil2x&ReTg=rYEZwtAc!3*d$9y@Q>kJ_!m%nS9T;l8pUg)dMf z+7>u-&9T0M3^g}1arTzm*hH3b91&Bo9RIjT)OoZl|7e*#EktkHOJ@ZpVyZ3bDP5eg zJ(lMSp-kMz^Y4iLQY7ltGsONDTOjDRkMRs(I4=#+;Kd^H)ZxQHz80?dxS>cjDx*%!lSnrp0G zbeeX&9?iW&cHOsKjd($Jf9-ZrE?xry%LqKL_Y~Frj&PXf_LVFGqs>dtR^(9R1Yo{M zZZBYh^a+;QLeZ!$BZ5CobeIHbVNY~~_9{$8LvjjH|IHDv5Ki**qB{2aI0W&Mr69tQ z@CA{XmJF=shU{e_N)3#DwAcrN{wRg7WOtO}?((tdEGE`jk^QfLrm|#upf^6*9+5A8ilNf{Ya2$2=4q+=pHYE zcC@}p_JqRj1ITS{;?+3SekPKel5yB7;rbM|r$lpKX!seiXzl3)1t-^07Gl0At{HEX z?!4QIgx&O~7KGCBY?=tJUFSDe*t3E(qoZcAZC99KK$JCRyL`HM^=^JPtyz$chYC1& z*>Ruio}}J(!YAbFjw3|-lc4`ykyHa)mv#6gdR#=R-~&N_Q0CGVh)l$H??nL7{-Y?G z14rJzbTwgGihehGGX6-79{ZKZWrn1-gHupPCheN2zLdRc}q zKo@>I&E*KXkwV??h((1Z1=z4AIJnmaPUJbhFR@v^ln8!qRw~RO<5^$ef*XreP z^SY=sJaDnG(BAYpsJGTW5TJQk!8P|s-vzlDg^Pg%tql={9VgHqm+eY40Bxd(6xcCy z9zu^6@-TkjfJ`G72+|Qy#Zac%ZW$@kR+3P+9621|; z_8M0cgFJe`1nZfAZ!g$oeLQ5-kP^>Rg7gK32{si{h3HGXxddZDvKO>S6xN=!(NG!A zv+W|#GMp=i?XU;DjMmw21XL7eGSclOpOIFK3D)_`BDlF4k%o*{r!WI@VO_(8t;O~q zK{t%3E905ZzWzNZ?u#0P%^PvHz9ABMD0E;5zhEA=6)$cp-eI>2vV_tLa_w$~879s| z9D4$m(*o=66AT_O#`-ASwYOsib88@xy~m#PX_At9T9(4vc66PVxu*R#TO@js+a;qV z>{`^b$6ay`&bnBEzu4=*4Nq<-@llO6)saA*NDQXVm)?--74TlDpyvg ze(aOWxw9-y*II8;+#-lKZNaMtH**X7Wc(swq7Y`Q{ZSMKw&qABVxR#1my?|h8|-Nh;c;n? zIVk9cT~6tN6; zhpPqL?tnow6z!@e<;Uz!Q5o1M4CuQRriGmL(9VXf*;fR{i*pbs|ET>$nD(6C+3;4~ zAsZ#=mi3eZ9Hpm4Y?4nA%A1T~H=FJYV3i|Cz1J4|=-@z8Zq{mYAxvar{lgP&HTK0E4MR99)0$JEd zdB=4w7ffEnCasSk)rF6iTZWI)g|oEE3I%DX{EG)0l@)j8MZ)TZxz-@a&@pUcS7z)@AxM@A?UrpYc5n8(iIt>Kbc4#xW2M~`1p9ufq2a} z`*#<_NdJM=_O#b1B8pox@tSpZk02w^t5=fUmq?t+)SxRg zP85A!vI>NL0-hW&>zOFwu>$VJg0810CGt`2qmS2Nf)6Q751GRj#QH9H2)C6JfIaY| zi)yAUU48@U z-SD1oSbBc_D!VE{Nk?ai-_f6lLM1Eod zghQAtQPhW|{#LB8`{`&eTN9{P?64goGHzYk`E&fcYK!RXRjkV{cHDo9;mW98FA$72cd8lokf5t8HmxYdQiR+fTw%WybmgJEB_Xw$qR^W`!^hbg zpO~}nHhfOi3R2ba)p!)pUVKypq>b)*+>R;e8ahUNjaP)}ZEhCFs6PwPsd&Tcc*6>N z$HO;EmT^C`_XS-W(5shv>*sF1SXgoa_E)|rOn$mF2*t)56Va(rF8%|BsVl3sGTTyo zT6Uv-EY}SR%}Ezw^d|XDAx^6 z2mD1*SZG5dd5Ddi5>o#HaV`6+pg)3b4R!_Gs*9%OH*bJlbqirvD;}%EcC|2Z`S9^Z zD(nUWt3PlL>~SC4pQTc$Fx6&;_0;`yA*w}BK+?_Y1hq(4p4em?e6D0($u|0QNEFf| z+vL-sdlIp&J`OkQZODAxy{wXi&Y+`lv^vYwIPyQDSf-&Ww#6Znb3QD3+$TvMWd-aY?a$s z0k?tc0GL<6ty$LilSOx3;8>NJAI-IgMPfA0FIs6&D(q$u`A!Z?7|b+@n&2TrHpvSqzGKdX!n`qYH7!Y1;)TGZHPb6AHWEEpO@!n;)vR)73s%7`LN2cD;`oJs{bBq_C?ehY|(~dhVLj ztrySPhWi}xrc!%UVFo17g529yywp-fajSJa7Mb(V6HE7r+5^vqT398bP*HqaC7fzo zDCowjt!znPe`ujC^JzNQ)OXC@XqAF)_3--D(>4p!+2s5vul$FF_$7JjHv64|>S}g| z{Y7CXlyZC}n^zex)wm;pX>vv!i*SB;j-;>cLHc4%Cv18=GfN+ zi7V90ZZ99#xpQl0eVs*oTyzqLfdT>gl8>hpXPBNLdqzYWj|>oO%Pf1&V?HdTKyibV zz!HPDCBk$rg6L+Utrk+f-egyTOdrn;Z6>XTQfLO`?(_+1s0H6Gfp(`JuEF%$+{sAqm1o z5qnI~&9qw}P6L@oFR41`D7NdC6wE&N6i%uIqSN+CNTkhB*wq+GXJ0F_^`bCl`ke~y zs2fF~3DlX?t&cT%*j~1tzxHbn&u!b)QM$@r5Tu9b#HEE{c)cX-+7Rf3s%4n&wlDQ? z=>k(-q`N|xN=PInmmdhag(zP%Y&(0cw@>l_@?(hmj2Cn*N7DkaL3=_Jw}t9iHH1|3 zRFSj==uPoi0as}7thnveR$G{$1s{Yg9j_J5&C){=bSZWTGhQPmjIerzDG=4>x%LME z8r^ZIrDMxJJ1I#0Y#E3sU}`#IXGPNDj-=D%zYD3LW9#iBg^qZ_jMEy<_VQ0Yp;!%3w>Fix0 z*NedZU5y14*KgKQ#BT0=F~x=Hdu90Hc2Ou#me*qTP$FI~PLLsgTohV5cKiT)(uXm( zY$*zB)gRzR6JbL^Ix$(^JO3)c0ts|zk*n0tLx)e06`GoD?+dtz9fZ^gR~q|XA6EdP z5_NXF0F|bsq*!-_X&~DFp>+E%A%>dW0_2&}as!jwnXQEBEkqeKL)qpTaRqGyh!Y@ts?ueV#keGs|YNOQ(Gj| z7I02Zi$uev4UuPm5@IlObqhEnK&xbl117))LAv|)$_l%pzl&ldM)9h1ry$wb)w8Ux z0O{UXw$%R1hdY`U*ifG*V2T{E5k3!U6(LI&a07N??@N35(R;+_dyqTcz8p(^l?GS}h+Dl8sLptvv zgEV3M2>a5#7fU!vOhW{O2{Q^<)vlFmh-L?os#6EoI0^Fv#FmZ!9f!?p(2pH+!Mec}|I+-7?O z-512#hV$Ma0;4w+QFSDO>@{Bky?$%|&huNV>dt`LDDNuvAePXygU zYFocnSDDeG&?#=P1vW;23fTF>(_lPhc#K2#2uAiSTUfu3lIf7e%Y7VLi+v)h7?c4I zrqU+@W4FIhLOdZyq#fAlk+-$IN<%x&mDikW-gK^cw*5gAwgP-J@hkg^kh@2;tuLpY z5>%GarB(JsT#I<1L??(FsjLS=0V0fn67g(6rb0V02}s_3;I;3(lm75*sJNXCA2ZWgoH$ zKG)ebn`H;$DGzCKG*)Ptf>eM=XpR*MXg0Q$+I)o>aMBGF$8f2S>(;-AtyGvn!@W%e z1=X1C5rI+yHzt1+(AS43XLW7hP-nHB5RtZb%s9}od%eBxAzIp*9F{--U#>tZ8-;P~ zJo~N)ZWfM~V9Qm|H74Ly-6%|bJUMT+n-gi{bt^AItllDXW@(C5YxKHUCufnhM;wtQ zG6J_6=*s#6-d;XSCa<3Hh2sw@!!`Zra-6Be`Il`(!^@-o!+dN6X(tj!;)n2%aq>0iGN{xLWNX68{uMiTHmkx%&bOanZJb$mX&v^=xQTgtL`3g}eF_zJ~15c_# zl=xx=pK6mm;UOg+oyAYEpxY2mOy@+)9&P!exk_V&VP0TM5_#Iz&#*EJ|Yvp}EoQs5J|_RTQs1z;|LTqA)^z;g!7XvvPLWX=era@gMn!eW)N! zr>(>Jo%qU-i-iR5V$Jp~Q8cvp2;{mUJNn-fi985L>c-ha(Dwz&y=)sR0)xX9k2~>9 zA=;Jj@wQwPT{h~mJ+V44ma-bvQmH|#P!v5JDO0+eJt)ZHAX6p4L5}q*lLY0g@X9_H~1goQx`VH-+7_4Cm3`e&XZeJGaQ0asL4oGVB{Wuaw2NE7vv$FJ5|UvNY2V_kuQUTpc8{xBZ4OAAkIh*$r;HO zPoY#U6Y{oDD+)!X2Btcigj|uwDiL}(oWQZxhD`gluv=843ZAi-6Y!07Rdz;@aV^Nn zEU^B=04&7^hS@{~U8$o&*(sJO>=rae^<)J!DvH}-__;Yc<5-Kmn8=jLZ?~NmW}Z5? zHQHYiaX5%8us4LMm>)@0%Z39Ox%1l^-=1A8mxtBs%ser$$}XpN^19hgM7O?iErn!X zk_cKD5hn4c0|TW_8va9(4HTdSt-E*$u!$a}vAgHlWRF-A$zyDq5MSfz8A$d| z57^c+9eG$}O4g74D+<%Lcx{99GENG**&HgTXc7B|C=39GOJ?dIW}o`pctW#up^{jo{tRcvXen@00QLz}uRodN>SsCR{MH z1ZiqdtjJ7P*i8kJ)Lg%3i@-qSM2eW=#Xee_U%;_gC4!stT)wbcL5k{&W$Zd1g|K%O zkAbiIsHEP#?5{owc{}-6{^3Yh2s_BU6Z`B(9^Tcxzs$-E6!aE0g7-$T@&hH(z@m~3 z^U0ZxM@Emb5rT~FplBW@HlznUUljK>9bd-Qu|Qap-n`zHDBSfTfGtv-3hoVjKkao< zbS-cQx6-PVl5w9J;!&kpKnpY<1qwUtqh2h*f%~5Y+=xtu(cdOWbK1(52aj4C?VKp? z2aT&O&Kk62?en89W{VwNVwVZh#$lr;*ewdX_Kyxd6S2E}QXLlHE%xzH&l!7o!0|AK zOI{e0nS!)*a5$F>))bFollA)Z3!$KGJh<`mzSy?-%#0=cF{>A3XP}i5OyX^lmhSgy z&TiY+v3s>0^iUQZQS)r{S-PCObFesUzZFr#mBj_iUhpU-l8%`D--K9k90$-$XLI<_ z6KF$-8O*m&JVCe0z>UadqshV$W7T&rGCEfaYSrsKXL~S*fz;Me(U~z$uWRio4^aN^ zYl{UK(P_L?9mCVX3SruZBTEEHpBHj_Yg+i|h6wMae-O!ivhli__`ccp7g3m?0i#CQ zS%uw{55+{nKLn{av$ZD2?il0Z;s^G`53aJiJrpksQRcpalpD+)R$y1#Kv8tMuRjpq zkC};svYj?kL`pn);MEd)LVy7~wTq2R0zOlGA)2&;Oeyi@iNa6GxW}weLE6?fw~&Jm=v+i$t$Hk<3?^do#=k1Y|@ zD0w^e2W^EY7cVjOoP!V#4LEVz6~kg^D>)MUbnR*@-q%*T%<4Toc=6*ynHLOsiR~Ai z0f`))$I=Gb36ybUSKez)5f$B~%j)Vq6syWWRm*irARFono~%Z@qr zw2#Vz$M$oDH7X6W0{0TDY__Mc5C(GlheI;oXNQuG)1%dbZYP1im1}PZ(|I0|`7ytc zNIZ~hMgVD2A2dbYQ53^~AIuEIemp9^Ez{ZI}`6(#BWcpYVKMV_)#WPbBYCKlS)7NqjFsTr-BR*{`lET-T z#>zS+id&ci7?^(vQgAO0`(GMQn9nKL-xdYl$5-3qBGMLWOM#n>BIJ{zP#YHz+z<%p z1KSyVVAE}e2o#t+V7yf*tO=2_@=R+Lq^S1Noq+^#qaEu?(6mNkf4>*xQ@r7R`)V>l zH~OIzIOyLJrk-v+dP*a{mnDfnEApbS^9FeU|8%R7M;QbQj>k&~| zCTX&;ZO433Nxgd7^9s8zr9HnY&;Be-53t)Fvo-jbXJ9r~RJRAcw1#CV-Nj$} ze2DhKNGla$#9v&g5B$YdRw06Bv*s|*Ted{d?flp@!E}DUBawxX@6oRtd*(5vq04DJ zgG&GGcfzbN5@27Dwo;az6j3(gRwE^jt^c4kl3L)!*gjMwg9Mwa%)UGkKv(l|6wWv? zzITe^8iBS9;vx~d&nKb|{Y3I3Im{;txFj;~vDcDCVu;ZD=G-<>h)SdmXSo85?daCc z_Ddh`*fG~C6lSwjxl%dVM_VN}w?=d~t=;)!TZJi_bmJeeokH&ON#`dV2E`sx7)Fk$ zvw4XAt02{-V_ou#4-uvvFoZ*PmjLDPGtQ~Hn-HHypIhv41>My7=j=T;Nirm~Jj zv+a>Y>;znZKki{}Rw#sS##4f>f&tiyNBcU}CX2?p!e~HXhXI$Z)i%}V<0%aTcvdXP zc(yi(YhB@UIIW<-3|oWE^+YYt`epI`{J+<>`+S+0#mcq4UHNKvd*yK$iibsX{TmfZ zK~VgAk2r$&ZhOSKiNK(ZmmptX0XJxq_-yzt9uUQ?t$0;Ka3eu?yT%?A{WDX&nak26 zqG|oWy5K;Q?IA8R^U{l~oZ~bE%e#PVu^Z}_obd)_UI|7|*`A=V+E3+7m#vTe@1wmCc6W-qfxMe&E0!?FUyM^|bgQE533gE{s~ z0fyTd#I<6&|zCH#H0j*1svlV+RJ83?0C26TnTiub2e=PCHxkVode^O zO*|V*$>5MIWw7Lbr8q_k?uUM3f9D}ecGKa}yevqy;VcXh+ZiENhlVaNFlvAI@z`?Z zr1`1OOrzhbt+4x^BDudEaw`np`DJjr)CMZXEwelXXW0x!i=qWynOSIS1UMN74&$At zbaemWh_ABsK4;*`1sgBZ?AM~WWinE1jpM>}h(`3rqTsuUXe=g0>?2_&d%~armi+X^ zqytBcwQ51vD*WB9!kgRfM1oN$o$nF!yP!9)y3JZdboCd?XKkx6J;{hLQO5)rzhjkB zjs1fVbr(i6?5x7BG9(4$+4(I|{4TyW5JKN)?}$ccPFKLutg}l(T`*8c{e~c=XYd?{ zPJs}mKf@_un8NHNueP`#pABMFSp`$k2_K<28Vn&X#2w~YVd`MRlBL|Og8o{flM42r z+OqPX$Oi%!e7Q^E{t z@XN(DR!l`}7txKyXt-5T-q-D1B4espYHthE?l9sS9IoH@8y|E6o>bEdtQ(MJ=&rl>Ek{Uk&~^jEwodDjBiq}e%7HdAK?EL zJ2)r0>)ded3Lgpk8&V0wD}HUWeIlw~NBFqcsk*{0ONHnLH%sQfLUv<<@^-~e1iUOs z6q;~XQa=kRtiE73Bj`(tC$R7bWkmQAri$QxC){xsi;W^_uH{s<-c}2`=JKnHn=oPe zk9{qe9^X&97&}UO`Mrg8pwekehR^2OD3Qo;zy4UVkm~~Cl&O(vHqpoHfQ=NRkgE=- zPtz=2nAR}Gp{WsjPLN$XU>M;JWhZYPfi?roqdiva6VtsR*ri^XfCKQLdPdL>Y18z0 zQ?pFXofXlw=V9!;@w@PjD6R#Qu)1yU3A^7E91F0;e;@))3XYry?L$EZyrX=NT^V*^ zS*O2UVGR^NpjZhPB2%E5j{JmAn1t7{!9F~G>=+y3lf}0nI<(e?3ThKCDQz#sD;TfVr>vqwbagDJp{Ya|A{ox9PaqOlWU z9dnWu3An|O7Quc#N26ywNe*3tf*NeKNc8sx%rkED;XZwO+D?VtxXD(w)g^E~jvnox zFeAX^q#=ab=%e$w*yR-E+c{xZd0~X#OOD?E5XqkwRDbP@D>!4nFRD9jNIg$pH<0~Y zWgm-9`KWzitIEDQ3CIv-h2;z5S|K+?De_cYWcP^Tnu{fS?0E_^5;zD#$IxfSNM$I) z9u(4-T!s@MnC~#BFuGA`xuUy5lS2`#BG|K{s123aH(Y451l?XDnb$)$R|Li`tsr9c zKKv%u2LGtA>*+9<3*J`ZpM7>l3~gP+w*D@H8^R)ViMyo(#og2AeOSMp!N!>6$z)>z zXX@uY!nzK~ZuyVGOi3(XN@jhOmR|GYvC!@j(5lku-zEw%K$2{j%$GSy*nI~N;Nv`O zSw1h;S2AsCBKG0HO?{~dZg5bW4aKtUr358VGn;L%cuF9ua~R(r_Np&NCqw)$uCh}i z^t$3MIwL31)kosh2Lfd-!VXpka8Kaq9z${juo|RX3>0rq^BDGl89>(7q^O*I2j`u~ zL~_$VSkKSw$-bn7r=z%Xn2`D(GB#pku}sd&@`NgYBcg zD}418UuQ-FRTfP=D{{YG5+TCWv!K;`LpT)tdkHg8pV$oYPSh-mI#>qp4E?wLFw}MRh0j}aM1$u_W@s0 z7~k{=z3=R8GX&lKr{_R4N3C2WW;~7CeMp)b%S7?w{}7EPVb%+ASAOts&4Qq+3fTdzrw{{-Oo%#>yWAcXbdx(M zOMb^JGm&8kK1>|G;fn9moM3lz92x{hbq4j{RFN-~6I~tLsb2`XJxb<^wB{wZP?{Z;cAqQVV(XQHkvy^R#A<60pwT(B5XsM93DR2%=Xv8VK9UBG zD@d_-1UXPoOtbeDbiYxFCveocJ{8@s3T-LZWP^jnk2hD`cgAtQCJF_nVpH925>OfY zPR~RvY^0!T3b%&G(d8#*|2IzV60rL}6&q;6g~44zCx1N0~r- z1T#J5f&X^``{NCpbhy~!b5QDVl=zFRMg*Ev$X(rjE#TTPSnAoY2>V@~H-P!<>aY9s z+@?6#2r##uv(X-o5~e$mEG)($$0z8DI>{yFy+{CQM9OJKxB<=NMvTwh3vOJ`h4li3!7W)_kxUCBr_vs{}FII zUK$rP4&QcZt}hJ%HMC~FD@fG{+}Vu^^L33#x4V3*Ge;`VF5PB*1Svn@hzVPUPr&*h@>Df9%xsZdN8E>qMeN zBt1Dc4HVK6)p?{!)@uj(EZEOyx;cj}u%C&<(BQWD!it6Vb6*0U9}z@ZAmm3GGh9}i zz$Z7ub7@TAQ3#URQDH5a`Lpbd!n7rlA!G07g?v}4W};c*D5QO8$QXNHv2GVY2_F{f z?RgN`OHNjyT(b$5W^=&f=_xXerOOle&3G~aC(*j667T`6n`H{RH5Q9@*)J#%F_?x^ zySc?9@oP&xDX_OHXAe89Dv_1v#7TIf=MWL3YnT&t)GA+mLlYS zBy~eTV#poM(?oaY4!tn)Zdzu~iN=yXd!)slSC|pY)O+Cn_=sC?z189h)2&cW!1~Lv zP7zqAZN+QaiVw9FOE}VgTmX?qq{AmgLQCi~ry&_ampSx$%Tf$u7JT5lZLWufY*g?u zmwLz~4wyLDRtsv!$8e_(C-Cal_!PXe)|@VPkXFNFT4_6doK7TVR}_|hY9WAL zbnD(rvIhOkL~!F2ubPptB{)=c#w05`#YQMhV`DjHH-7LshBGp#wSf?D?rd2g95z!x((ly$6_-|(KvY4G5fQS zR*}ROmRN^}%+q+Rv%KSBaH6fZcRlRfKj++@WoAV#FNtGpaW@INh9F#{uk==*AYQ#v zzd=bJ>gI@ny#9hT>*!v1q4pCW8RH@aH{cOLzRJ@(T#$8?3!h}Q$Zlttz`-bH>qKxL zqnJskT(sCrqLJavHJj{}M26%7ya3u+5ttT!7Gn0MKyYwnbY-yWko?QoSEgMoG?bqm zjqx*JUlol)JJ+tTZz>$D4g97K6riO~CD}Sp&%mFhZS??CA41u3=@|;sNex`K1+bKLa70OWKcxyKbE6`Z3P4KBBp+Hr&v_;%XZH67CV1 zDrw-ua0%`ebdNKE%*7VYzJcB=LdGG*QA@0~TIQ!MRhX`fZ!NK3D(q@R8X~O#%SJXw zG>VpS^Vj#-JP)a<=iN!R!b3Tk;%u=;kj_ntW@Xv|4|pVimF^SrOS~AVr%Qi9Lf1kr zDbjM#(>$e{T9B`@%1Yr7o=1+La0ibkc?HxDXj4%%a7bU1gyy@p(HM>(4ZAwv~dKN6uSo6{e!VfZq1@ zvgLN(Cl16G=Ge!A^a+zQA^SwYb$GO{6yJRo%m?(yV|J5*?(-gqhSntesR$}r%%)X( z9~>+SMP=ySXSe|UK|V(wZ66k-O_Okxn(5Q=3KFyBK3z%?ashd^*{AE6(PDKze4%ug z)hkSGfz^mzk`HL|6qX-?@meiMMWIT5_~p3t1rLL}&Ov)ikRgj^MJMx6Cq#EnY+~bX zfnoQZ=UfF82gh0>(8CfGL@Z>SY=S6s5DQ=-OBK+R$Y315Be}xVo{w%JH`)1uEDy?y z6{JiKXXe{FpMr&7jacvwL58Fs1~{7)*6&|aX}oDyys2Eih?*+`1G=m1kk6>6@HzAB zgrLTvYIPw0Q)lOVQIUel_Ms0;lK*qXq@;};}C`Rg9jsL3+C{vBc0NbjfBZWf6t zpOR{Q6r@M(WeYXE!#w1;S;-6aSP$9Tmtz~DK#<;~ps-M)FoTO7lm=O2wRxTpyak9; zxyWZe+q~e-8eHZGGB+{Q=d-Ls$ge`FoV=UB*xJ};|0~R3X{rz!;MnA>YH8o7voDtr zORM8`4e`2KyG)Qd<5%Q}UGC%FTvc_ZX{KE#=ql=t4yfJf(_-wn+#V8e3m$(Q2u75O zphhy9Ikr)VYL3oB^cNpM9tICBrFP0gMBC*_=L@AIcHFn%QGK5?MB84r&#qDw<-EQ< zI9*-up`JG}HG8X|Yay&X&Pl?I@`Tn|)=Ob}mJ;Ib-rs|0;n5X#j{s$kd1%Do5d-W2 zpXtJyMi(YiVTd2IWJPEp$##q6NbM1y6F){BbC%hk1>G3vWhG>BA0L;P&U8EH3t%P! zlZ|!urYJOUJSKz=$sNJ?5vw*lNF}Jfv3%Sn=9y;LwYI15~2Kt&+{32_3dfR3d3pt z@&EZ_tv=B$xBSSyJDo%f53PM+rtCc0HKOp760hD6JRQ_>M7HmXNagvFFfKRkrmi%# zNSnZ5mCT9t^J#QpMtysdpc~g*6e=g#bP-%t{9x)WakeN-6z2xSlRQYv3s1Es0qP6% zuNxio$E*ekcOp3YdnIAnOo$e?R$@)%-@=S)UT(I1eugVJBF^e%0yJb4yV$fWIQ7Fs za199*nJOHGyk{EfY=loBExChtfOiQw0#uHzA561Q!bVUrc62Xr86GX(f}9*AUT z7uhPG>=(bd!PYCxn6{TywwGbwbqkLcRwW{R42)H5w!MO`Rk_hLxDnRu6EHVDk*5ER zAocdf_T^E9-RyIYhW#A8+Wbyb*HXT%dixBX!1KJl;tM>xwYg*VHf!_A18d*xN!QFI zd(fb2Sx)IDNP~jMRwRAOt-C0!WxnEyQ~O{hZMEJ$4;~=7jM`uiX>$KQ-EEXF0?6G( zuJ*==60mep5~)jgU*h@x5k+a?$u-NWc=Ietoj;l=a9pUsGJF~e#D$Q~>O6$jCel2_ z?QXB2`{H<{jp>@v;Ijpu;b#VZFlO2z5t+MG+1&e`fLmcO{P&|Md&o|R#&}@KS$e21 z39^m?1~J!IdshTEV<(oP!+h9oE{CEnY%PyhZwo%34c1dct@1n+mt5IqTb_t?`^A;IpX&j@ri(<) z(?XmE)+CC_3X79uTSZ~L%Jxabb|vCD>YnQUjxSmdobL*1 zu^(I&#}+qUAwCg>(Mdz#9zR&KK(1|QHqNsfggtLwzt7k);&*v~s0=s{+=yriP(#Op z>*MkRooCB5v2T-a4f3;%ZRxiGk&O6V%9|1?|{4mU=F_4(HM^t zE!bv}Pr4SXP%@gueTFw|IS>tH#jILXn$@}mgAlI@xRXVoVc|>%B1415Al?yX6YJ)D zDhIx=3A#;ubnA9j$!HOn$u}!kx-i9}P+4T-6ah#n;`*~zz|GVUc}nMcS1pnT_&`Ja z;940we^nG$)u`|@)+R{F@%oi^#z&pnh8T63z2za3uJ8SAL8?L>Jl|{%fNw#{1PlU# zyL`DxBD!Xi1I~FDfj2qv9irApDX1;!u3k1;VHy%|n9YldjT5B!@ddbpdDH`ZD~7Dx zgQ-04S%uHlRl;sdxLzNNyxG8gEU>nPa9 zcDpdq=hn>OuiY)^R&T6;I|+}0H0wT5Ig3nu(Edw7#$eEx!J}+|0Ch7PGhunv*hmp5 z4RIPB@}KbW0SBavcf2eSXgV%_;lAbwG1QS~LYAj66tp|`E zUnigig9@-5u{YW_QK&W-;T*L1jtH{I(lASs1)qYQ^H(CddB+ntit*2j;Km_Sh6`R4 zrvB51;0<&LxKRk+XQbA0*g8dZ+wI${R?2C_hd$G^spuSiDxiU`-J?U#x90(=kw=Z$ z97{^-KvA#4d`{9p{;CLpl-YtsUmw= zM0y>(WNqbn^D$4LJVY4S8}d9M1^u_&2-+-DMWDv#SH<#a^;$tj;oRa?)~vAhPbVCN z`3XBB5^ccd0{-lN?Gxt~WZG*AyJa!3AXU<$r$up#Ed$1jg*$R+_8BxN2Hq$v?p z3XiVQlVz4DS~1nj0v=Exfg$K}dse{h$oPlgs*qy~d_2B{vTUCK9Y;)yU+?`w zbPdhiQg&eERz;u%?62rZ+W+{}L$e~x{L4bLH8PC>eocT*$b)RV4-dhh^*EDBX>gHE$c7lNpPu}B^=+OGEyDFhfFcBdfCKea_-yLiE9$ zc-;Z(ohW58ChUhMV2DkBgdh!-I=zJj+R0k@t|@#D;K zP6W5)IAY=d#;#a&krHTcm)O@tar25|7(caWV~XTDcA}b7;CF*DB&EX!7yx$cBw~KJW>JmxN)-2{`?!{g_V<8+1BSEg6DcD~NNTHz~`0DZfKe#k!VF4K(N$ zBnTt97f-SsB5?8kbq$5`NS7Dyj+RKW0aB45zCT9d?751U%voa$&kOGMtU98oR^Z zi^(zIf+^2BYxD(jHNhKpwjC9w(_^{A+c5!}#%4a0we+f>n{m7*gwXDBB)2Yk9WOez z%5GXlB05K&aYOA6A@>cWhrNE#T?tws_Kb$3^$?Ak0xPWBY^Wgoi1W2eFErMlU)O?} zIV4ivp50*SilTvmgY%frCbeMr9OZ)aeGJbJhZUypNVd%4h4V~Z4j#Qr5rX|Zs+eRH`>O|KA$$b+Y6qNv4H>>!nPjUcNk-bh&+^tTbS z33Yaz=zcLEAgb%!X1mc7gA=ser!sxkPefOJzk5)AOctgCdNw;fo9FMS2(+iO_@Lid z0m)5zA&h|^2r}N7nTy)(9`x+qvxoKe3EwPfKp@5L6Ljs*Z`~iZhkV*o_1x=20&X2} zSgD5>(rm>(e|#&_(ABop=f@guNRHWSgk94EBgiFoOcchly<}^kTYxn&Tz-hCf#=zx zX@Br?!Q(}(#YNZrESDIiePt!kEz1dmZHN$&0n2QcjTB^~d1io(@iAQ2^m#ld=-M3^ z65toaW{IW|kw4MZK7DId@Q}GpkV)fQ(E!T_X>6aEv+-X0y#S4v5M__CcAs@}I1LVo zy(j3Zg5jPToyrotWEC{Vlw)!vhhyBO!YqOKfo=9hg(Fyh7dh^vwZ%!1#9g# zLHD!?dmk$jZwQwc5UplowRr0R~{ zhpa|ndPx+1cG0gn;R$>mVk{KcX%XCFfO!_SbHW<%eWx4wm3c>y!rtIU{jS1YS2Hf1 zu*p~=5@xw+H8Bh{0zkX%#{x7{GmnLbk`2~h6v_1^s!h;nr@ESzc*Lb+fs$8lLmbo-02n>lXEQeOSHD2&*N&4>YCz6QYf@YcNu$+K$&-TDfS zD46%L?~6wHlsI^x{lI770IDbLYds9!J$Bo59+Eb>D9!E`bX`U#_OSKv7%Q5Hy=Pl* zVT$HX!M*ptJSxae;jlPFh?Y%4y^>4DQ-ZE#Cl0){hc&Rzo)(SSY%5;hRy@0{_+VRc zlg;*}3ACn!zEcskt5N&=N znVIE2ZdjDTJm}*(8u(6j+slFs5=jT4Di*a)AFrDa?2jwf5%0R6&&+hekj&SI>{`Y8 znW+p$u+DxY8Y7aLfn1R#`TWPX9IrgS%7zFse9?@_EP{|t^wCs5wFpHyL0ZFbXi$pn z5g(6{!6FWjse<(X_?{PvEt1GPxB%mxf^K%1TDel4FN$j&qbw1&gSIG&#zh72vO`r0I&Ct5nDbLo_hNt3iqXCSRIXlct-K;(wfi577H1QGmx|Fc(meZKGc zQWExm?S1xHdpLVsgGU!7bGSvZrv-V9cEFKd*dY-ps%%NWpfKG3=#8ugs0}*~J9Z^U)+Lx45V5l!4iv40}*8)Ta6odL7lCAUgY5YYWl|h8O6_+_s1k zswzK@229hr2iB%=|NNvsfapx*d6u=CC@`SCS&99{lk3Khi?H_uL(itNn1mt%skyXd zh&pfixtEPf;X@C zW@U@zdHBT4r8dcvu`HfRWiN1;jWQpHAj=%a^2)fsvE>f4BI6>qQIN_;$S2o+FAypx zSKI59u$!Q2Zoz6@ioa6gUX6i@RCAr3^rw@!S+)-as0>uW(C1>Y^^pjw4D3iG?Yk>L z>Q(#K+I0%+6UE{2j)|!j6bl;cCJ`w-n=X06$Pf(6WLE!$Nj9KA5J}%kEfUM6>z~u zB$3)x#+1J%5T4*3g4F@{K)pl@Ombm_%L~gFg+g#y#ua3lP~dP-CBk9lX*?7k7FLnQ zM^`d}PZ6eV_=r;&9g^jZc`j7DT2Z0SJ9g-1YXoV0rQBQ>ZO@k3W1d}q*&|~W*eV=Y zj#f_VIox?#v@ra^a&SbeP0eA^$^XQ>i6<7!u-61b$=G3I-p<-QL3t^ip3_9ZkawR% zc@Das*u?jT+U1d}?aQl3MU`iC?`q#xSVNLzHNGdn>yA&JuGzk`U})QHloWF9)--`Z zJ3DE2i2y^#s+RD;@x9Y*o+wl`@QK+<2RdZx~ zWtkvvN?@v22U{EbaVibtt1XW3w!t#(501*I!V)_o1i#&}mHV8+8vb;f7tSJ9Tbqcm zRgplz>&t5aq1`#}(wVn?OQg_iqaz%NHQXs0?DQ?+{kRIh-ye&vfSYw`W!WN8K-?2C zj^qo4PH`{#>*ZD~0&jgtM^*C#{1w}@R^)aNxGco)-ls(W=p&?yMk358ZxX z?q0V^FXZa}BEhf`4adYN*6p@$h)UbBm_`$TU9+yYt`?DEm&G5f)Mwkz zF|H$Xw7bt1R^xZ-Vha_fK)LZT7~E!pUg-p0i~JJP+-ecn%7iUB?I25NyKIlhlp?YZ z*ldpp`r-UIshfwwCIeRZ0YBlpyoQZdUEt${1p#NK|j@L~1{9phQ&& ze@jib+O{di`y40!4C{OBkD~cEyKOucW9?L$Lg&{F`CFp!&SO*=5}Uf zo&8G``UxkXs3m=2Lpsa4^{I7Z?GjPckdd8xHd-{NT_Ga5rs_7P)=jnV3Wf@jOt6H5 zdV7jH*vT-L{6vtq#Cs)?+#4aJ4;!x{awIFqB7sPfoP?|+JftKvKgT@FqjQ{Ji9myN z>Dtw%c_bBCjE{=A%@HBgM1O2I6{FlGN~9^l7qKhMgLIBXBJ&cq=}h>`J)dYvudjHO z2$YMFq_rVodjvzdve}Vqzw?AYLer2drvY#HBSo#7IGVNNg0OjNJ;<|oV#N~;_A>$MQBGxZ6b>a|IN5u6ffb6v3poEqP~kzLPzRAY z0E~$&w*M0`6f#flJb&wn$U*vO%#RF6r$i*@wx!G3mQ86}+F~DzqMp5KpH9BNyxF0A z!>zP$2!=Od(S^}48^a^ldm4Jhrw2$uI(P12wul!pAXhWE4;3e#R?a>l$)0KQ^~!?$iq+6%&Auz|6?EPGq*sA%NHSCA|l zZSM*4_MY6{u*DT3w*vXtIIljW%lS6ZGh-4AJ? zNxV+h{EI|_(^C&~@T4$rVy(bM&u=|uP~R-8Rv4D3qfR|CY^9*OpnSCM^mGZV#=ogB zbr*GJO#ZC34@4o);?^|`<{x_6u06Wj7aBrZ4K1x(X=D4EC}i8SoUYhGfLA6_{mDk% zNRXoBCz4VOw;y?0UDH`_fzWMJYbK=Dtg-%{JkrZWEBN;RO0j;Mtmh8*6onkXXb~L~ zrt)BiF^OA|F`fWIVhp27l_x+)m}@4s_Txo>!`8j6D>%QMmQFuPBEk_SI@PdI-eGDL z%^aV3wXII4?5DegMq$d`dq|(YwoL#&_>!=V$wBjW5tPsR4VWsn-w9GyPCU3cEIY8m zo)?9xfo(=;9AWwc%=}o_u&9hHAKY??M8-H;*ulNb z`Aq>vx@g<&Yl5m7*<#QY<_^2UQ=Hf~=|ok%b$6KB%(3EB%NOK((-x}8ek~L_bru#8 zM%o4uC~k6e(zYv13(<>WwqGFdo(reQ);}o#PItjj12PAdYeUoNvvRl{ z$V=nGSnFEL_vewL(XcR5P({IKp-pmt?SZY<>tPC#`f`~a2vdHGWqFblizlC$JIS&I!!gOZRRe;9 zmhZ_}S+L}?GQq%b{%Ayq3T?V5p7-Kt`&VttM594D9vriO1Bc0VBFJi0boS2{lS>7b z&ehhK&QE>_>9tLws3hyArs@{kcF#(Fvr6)rRDQZW#M4mSI%HVFp|%I)S1d@Ypt@#B z4`-p@G}Du%nj2GFG21H`I%NDIHcv-A>8ZwM`+N&=K0-8%=jzaYAxcPH-nys6N`!ey z7ff}PDjc1}2YrQA?MwKTwpLLzP%N7h)b#>X$@LjG>R$)jAOhvokdH5gErQ{#9FJ(G zZ4!kp$o3|ZtVKS)S){;7F*@Ax%hi-`8ik=CVx zs_>Rt&VM6`5G?6zLANYKd39i0wA^|qZ5Xyrjh}m>w)#}vB>RQucU0~-n=DU|cNM71 zlnI1ZDB&W3qyBPFjYW8!o&29*7#BFYr%NIj>?sgA#LR^4awz+aIBD7E545h6qpV0y z{E$ezu`ma{WF48S!#V<^foYp=391a}$H@Tu#Mow1apN2ZQ51q3qgYX=kEC zdfrYXIj1{3v1IkB`er*MsHR-X?pxvtjV#HIh^VGScN5t=p>u;uDOLTNDB%;44fIlb zO9Xg}gcujv-vnvH+}zRj**)!13-Om?HwlK}1cCKLB>Ho+XcPta#OR}Y`sl-I3=mdE zBFTwG2t@`~4lQPlw98_mz>3t+DrGl$oD=95k$j6ybOL4DshyWi76E3FlxapdrB*OB z6zj6BwoI6J#PX0=NN6v=#wldUHM#^Zwu(XlzyJLk?1;i-#|eL&vxL(QNj$7|mAxZK z*=V&QyLvAmyb52zC3cN4g_HDU+`ccQF-a<~VOXjS_m$RDQREUE!%5FQ0=zKJ8`0@0 z@wh&fT!iftpgMxE&kOtPpa_%(TcB+-xDD;N$?rX0^+=V;To!g6AHZnvr+i@nk_=eP)ew>&~0-?qsB_nDp>B}3~a(RdpMR~ zX|E|vDKSh@W^V{kN;nu;nQx~(T@Dl0Wsea>H4c&SM#KodCd_k(7GclyyAB3DRa;^= z3W!lDEm^iuU^EBY#2kW%5Z+P9zGjBtIRRFUY`f7Sm=w-7Y^P$WNmtZlJ@&Xi&P7U` z{b{{Lp&HMyKOFAi?wK9z=N@j|Rc*BADcC(dEnRI|&4;gVx^xq1DrAY<{fGq_LO`s`$!vJLSRFg}9) z*S($r0y+3sL|I1|R!`1YWHS|Z4&1tVgDn>^@Q+?FCQ%;sCr)f#XImA9J(f(Rah4?8 zJg!ssj`p;|u$Dww6CIC(g4EE>9WydI+e;n^S3_ASyzG&Ku+*8b&$p5^d<86688w*r zzakoZcIw#8?o$|+h={hrmJlJ-Qc*Im!p5iZm=aDl9u*E5j^N5_1dA_QBO3N6+oj-| zBLjIWA~-kI(#-jwH7N}*?bxBKZB>|0rLYA3?$-q<+wm3iPpuq(4zi~|UR`VNcr1nM ziOHb9dM;h?$b8;@9&Ep!z=i=D>j(GQm7+3!@c`bBB4a|q-%`C@r6@YGqBtbkGS+&C z5(-;d#P@xhFiZ~UGsJ$9j_a2wE4F)uVY+z#{Cpelk^8rdPL^?9HO(I^=A?a&hjUwZ z**s6jEAtWOvDLz~9QgLT?I}lnVk7zRUlG!G*0>_FVE*OE`H!|rW?4K9*9uX-M zf%g}-Zm=E-hsujpWU~jgTRl1A3cAU7*s?^RC8L3&!3qWS9dpgb@s>yS$Cx^|2-SjQ zeQJDzjq~tGPJCy2xN}B#+n_KR6rt9?E+BoLvX-;e9#qMTK|F%(Qz2)$=6xGhQHc<)iCgL00wEIksE0PzOV~o{d?X2rznb@>=_c zhcj>LWFIRWK5#iSnOfI2xxp^^Jw&n}BXzT{33!T{rNJb&{m6-_U1KA&C(Sagey@mu z5w3rBQNleI7Y)WaFq1pDJVA91U8!IIdc8g9IjJ|hL5uBH)t*eAe$ljBXj4Rq_87>B zx1?HTr&^Xr$Mj1>6nRF*pRoA0)8;6#nk2JUlA9P=S;RhSwn*X289daM2(>Tlr>2^hMLuKp*`uO|=DwT8+bl?)88`gEws@2{ z-7wvDdK4Gt_OilJui)1GFP?5lULL2D_IHQK?c#F5E_y1w;AoRZfYjdd} zUsy?gA-)w$?Aspw=DsDT@fUbw`@`rfa-e&wAg{%xSJGw*P`(nDVdeIyQ0P1HVhI_@ zit=_5$z5jh3%N<#Asnh%dJ1!!?M0_xAB$~l1*?P8qVSF!?4sKlfzY{Gc_8)ofyc_# zx7pL@Vh15_gw0(MsJ+geI@#3cgE{zN^Q zJHUaU9rc8f@cj~N6C_vJqWZ|gXLszj3!Vw|j`WHDRhY~QN;!J|frA5kQIP2?;Pa-; z#cST|C))KFUln<53f5rfME71ea0+mJ^oh-}K}yNH56t4gPhlDsTZ^npM+?yem=wh} z%8TRd*CJ3g?1Iro*FB0&@~4kgueJFKhx$mZ+9a!kOVSjCrQT|oYuiL2PY$3NIoJg6 zbb@RFFs<*lLn55JeU@fVi|%6(GW(M!)ha$#!us(w5#T5CXtS4{|5&@|50Iz}ZpD!2 zi7Z^%6{6{#vPZ<$+pZLW4sv2fWNNmLAkF*JJ8YoGM4ok4+dU2=Yms&KGr{nw>-cC4 z2D4^}6vhV$l4R>tso3iLIZis(9NlCa1;xOl5ew++dVE_=z4U{h6!aN!pT<-QycLD? z7euFNGBP{bn+ntX*_F~3@aLI3sYg5GkRE+6TkC&1%;QRv&+6X}+oo)iN2!kmVTQy7 zkOTZLfk4U`f4E8K1*PJIuiWPw}9 zj7firB`5deQ-pZ4Zmp?^EjCw#_Ht__fS|m#s4GSH4lN~T_Sm+{vtkd7j7FUh3>31R zTd#0fDilg!eTOiui|7=0Cbr)p=1?Y}J^!jhil5n|tNlqZG->Ywj09mkG$Mrx8k%Gs zX|E}b3vX~kdSqp&bWuYxTs?Ece96H;z@&=&_o6B&0!*gX%(QL_hi{{AWWd=9LehE?az%}7A`06og$WxX>m$*gu_# zXuT&qxp&o@yXV_s!O*|`=hku)+&2f(DOg%(8E3yHMDNon0U?@`z-M5!C?;%9b(^5m=`ex zBH#0EA<8FnR_rKma(JR?t_%@o3WiJ*OsSkGiooa9F20PDleH2`7%V!O7Dp=dzZ8Od zY(pk2R{%abvp}d+n73&u#WX+0qXyhLz<#5!w>_$=eCb;}_Q8tL_LRcp_)0zB+lvAW zuJ_z$uPf*yh1_T$!$ZH(^AFo6iVHc8KsblXs(weHz)HuA8>~!W)i5?n>>+{BAs*Dt z#czb+^H|Ls*{7Q5@K6g!=?(UT*HeSe6)JYdswc zoC+LpAaC1gy_RaQlL6~lB;x4^XAQZnijseeVPaAo@U2F>k zy~C|wl-1s9tSuLnqMfeUWP3c^0k?$D`@@l%_bhu^ki3f%@dJx6`*ch=bgBK*(dV&u zM1Yf?I!F8B^8oR^YH=hSS#Q^g!fU!BW^%p44B`U@5AQo{*xf_gGQGGwsP28*I$u4eAB>J`#O#?;|-e})lujxBBnj~-fP%LK?gQo6%T zhM<3ANu>8|w+OUQBr;?B{L!9cIKMw2Kqq~9ja~8r!7xTds%B__Unvrt!L<&bF*kIz zC}hPYDU$!!IKq29b9013`wT*%$g(|_&kGUOQqL$BO8wsMWCKB3XU4S1C$mQ*(`ss z7z01J-xE@UZm)4`QkzBbK=PHvbgq!pikg9{*x+| zeIy!vBULxkE_{(N4CN=WN$~}NkXUmQ7OLy*M$tm!XDs+N?v;PbTC};4b07D_R?b(wfG+UK(V0<9?Fim)pD+>Mz=&j#x%Iu%?o8h?li!zU8oal1nxye2!+ ztmM@EF3&_C_p0@2HLYKY9$tnvU5Q?N` zJtPWUlQS!>T*eAf=**ikY^JA8HLglEO-BL8W;q3Q5pMgmcX~z?Ee^&m4rY$sddv~g z`9QK_`7KKtH9ito6xFC(Xe|(M=cvK{1g}n@q>ejuRO;b4Z)qAAwv#dR7 z-M&=a6k8(*0~~KO+eU#0$dIoeNT@F+1F%7x$%LM5<{TKuI zPI7I4#~+zZfp^$)L249(_jsc6#GE@l8rfjj#sdkeM|w|H_)!7Z~t2!x8j(oWo- z_tbDQ6cxIpofLuI6s@}Ixv{Vh{Q3RUS@(P-K=&z+XWM6958>kvuL-*)_MMS3Ey9=$S2d~YrehP*@%$S3vOUo987oFI#*AfbA{E61+OoFpK z^7!P5wp8JB+tR(9v`g@Fi>M((6yZua;cga1J%VLEXB|(a6X=1%}r>#AqtDuOAw~ZIJfMhutwDaxjPytM4RQwF4`bZ zc6|D5`-#G^b$aE}Q@$LtG4hdU&k1=WauP*tMU>Fb3lZh|vl37VsN_$ye>sp(Ap1lB-Z+|%)8h>o5V{!u>8%zn0~3u>;F+;wj+$UF)x0VehBO z!pdkxp6Ab>Sw4vacl&Mnk*tCk<`U8vv(05TL0Df%zCODZvaDuBLh&x{oBQ&gznSkms3mZe?$y*z?u_D5n*z9vt{~m442;MxeQL0 zjbeY!vC=K+2V%19I$RhQPS#JhQ3}IpmoD79{hvRY6)zi!Mi{fiYEgm`jBYD%p&JR5 zHHpl3^z8IVtaYF56oE2IPo~@+_h?3R9qEp>Peh37cz$#*R%hQj21Wf_?x7JcgfqfA3Gkwl7)Q7^NC-aoyb4(hCN0;aV%(V{f`%cGs zGE@x>Aa4^}t+QyMV?}ZsMZ)B+Rrj+sRwGEaIX-i*P4g(G!>O3iUn~ft6s0IRI^w-i zBp9V>`rK)IJbh~YMqTSYE*RbP(t5`7Qy(I zWLJh|EYa0=JN6x)HO1~14E+IVBlfrx(uCU+CApP0Nd#JsDI{r21mID|Lu1w=MEMF> z+aK{{*&N^7Tb?ZR{WBhQa2po4-xmliDRpwj&`(5x1(uwc(P2aX!;>M~zBK5q3iI|X zS;y`v5ST5Cvz4)-BIuh~$Y?7cp^f&F|4t)CzHuT@;Fqh|!fvyPg7nVFz3ZCDe&WM6 zUBtjtUpfNY(z$60s~#PyM-+8huB)VKwJO^GHP|Ah49(W%7TYYu+wgKkL)#Zm1xTBg zCGcA)2*1GGr)ozh4$vMV(Xi;s9azDs>Mg>Q@kl*3kauyhlO+PYMVl|&%(YlNol6#n zIJ&)lP$XKS9|AVx{QjKDz$!mYJH=v@WiL;ciJ-^H8@s=)l&PT!!uJS zp+R7WJ>`k{Q|+SDWDdGtNc6Y(Xl^y? z1>iY4EpDX$h_Mw0;z_^^H`M26<&IBH`ph0@;bAllWquX9~e*5fV(cn2>mM*S#fOh8?>nsr3 zY)H25;)Fy0>yxa^mpOnXB8BdI_b6K?q!MGv5s%Xm3`2Pq8(GA`*psdBG;cL6KRa)h zU3WG-Lak&Vy>x>h#qHLyi`|-z8j7ULP>&kHaFM03zFd_u>=ANB3d|NiRm(C%nC3d# z{PM)3TaYDPDgvxZyE8ZM8cD5@OjxuB@sI_Yy7LI6bLE@-~KQ?j% zUE>ykQ0bh2vG>PXl}HqgI@9HFj@{?abJM_5(ta*TNipz6zbz7?*R+p0tE@&8_#H5? zr)~B}aTkdncH1QwY7ryZ3EStfa zShVk0)g+rC*gjXy&{3k?*fiJ@(L>Gg86(qKZmlAP?u!3`Cxu|;rI{~oe`)bckJ@pM z#-1_9>F)?>tgmhk$AZUdCfmO}2^t;DRG)r_Bos8W2Z|ylK;6l@=_X{oBg|Iqr*~VX zr$b({*}4f(A(SVNFMPBhtf zWPUQ&28qBcvapU_QEqn$(r$MTyxV@NaQh~jMYIGB_gaCdbf9Pv8;rEiwsH|EXhmcO zw!;2Tn1bdMU|4On0I#5HVB+6e1XYnN*Q4W*20Q2}aHNRfs=e%x)vCtwHbHnQ<#^$w z!eoq?_iB63(~)t%o_E=We-9Z03#71$v9gOq3Wg73xw<5XE*u$6ZdU|V2}AhBy;F!> z56(KcD1cB!Su(e0_b4{BVrGv{-R)PxDuAwL(5_)m`x__m>if`!F5y77S|o1`8KkJS znW9jRQ_~-@B?|LKEPHjdWe%8MG1)9YZRtqjFuvCPp))%c*%6O&JGZY1@`gwX2H(7k z?a!i+D@OcE>~&9#p+Oc__W6H=d||O;$L@BeL+XS*n%yQCxIHvW6OauOg;Musu@HZ# z!X6MNZw9kT_G^C{;W4)6Hp|nN$NMGAFfFsfqxGFI^woJZuf}E^cF2Uofc5llJ0L`d zQq$ARkU4vsf_55RUj8pRs5 zY137bT_G}+)R%8(G#XXRvbTt&zKEfkBlXtLVKf&M+b}^&M2$x#SCBzP>0d!lAb7>=cMGY7>-MJVup8#jMq6Yy z$8+qEC~DU&(Jmi1#dg?pKpmZF=+*WFK`d-CGxJ9xg{mox6=Lg97>0{Rb2|Cm_W>}R z+J;k%d8uv9Rw4>jo)<^1Sz(`(xigDy%TmoNt;N%HM<8m4=b%d2jI*l-dt8*zvqmQJ zIRtz~1m1|=Xu>}A&j2{)onfC92)y0Ik9|Rym!BTL-M*zTm2qm_+~bq5f921~xuo4F z5b8N{@ww5uib6&1--6+O>*LSr1I(_pEQiJMVnlN6KEc2scF|ccM}&}_Ou#WAKcFNq zCc$9cc~l6io;FOC0va|Crih|;pNXpQDgk)qeYf0j!#Rjm+|3Bm)_c-Onsl!Kj2@_M z-89mkPCs}Dqx37DvMh-&oOd0tq|dcaf52n#936IUWs>-u2y{jr_0q8=3%SR>AR=tG zja?G?x!!4C6$Q>%r0r!VajhT~JceVk8x^Jm3@#=1V+TEC;p29H=uFdCIffbIkkSpw z&9!<#)rAa1x1@Gf*=|uNQ?E)X?wu9#UZV>FJ`<*cN9-d{g*6xY`hSE%mGvCl-_HNg zGxa@ny?t5~3MiBB4SU`B-j_sFaTYLU@3yPjpIWmx97I}IPrzaJM9TX^LCQEXj>Je@ z05%J_$dB11A^$FLs%6vKp9})A%hS=llB0@jk4IApnb-+5jx+Pqo-~i+d9tn3Q#4QQ|Pt(>@g8w`0UcvXP1n%_XWcur#EJa zE1A*5_1KsGO++XO*Y)y}Vc!x-9fnN`m$=^+l*T|~p$=+82YX8K;&33`ZgkdAvA#>zt*NQ+b(^^CG?7QvJjk?t4 zTC8I`0TW!z{(EeI2=Gbz7q(z*VYl7o6dg@#Im1>{6kgb`Pfr`^;kL;wHeF%fg%B^& zE8B#aprgB4ZY_1@wAWMT0loS1eOjVa>k*@ z$LZ8mzkTVG@GzP;xI42S3X*;dmzHBn<2Yf>5(S16SgpQa;ZW^aawn4zrpBU02YYQR z5&>3@PhE9Fhn)hK~IY9^N}phUloLv>;yyIg`WcGi)ciQWIOj3TkH!Wg{&Aw6ZSJ< z8j!URSM@oLvJ+*w<+e_U*9;jrz?u~fJf7XN_^mB1wnG$+xH@%17GhyqWa@E6QQQ;L zCLV8?VG#`7bx5)1>!%d&gfiJ|IPL@t@YhB5+s=qUF?4%k9|(k7x1x9fLgTZ_Ti4~= zWfwpWENK9IvD!?O(7wzckqF=QB9Rda&P(l90r*n?+=c~oLCROg&B#ViH=+{Rz|8{i zhgq#y1zvZpSp;ACG(4jIDzCwFtwr>}0t=*EM8J26qN48{dum&g{a%n)$Pe!`3WusM z$!FgAqd(lP9k#ea*!xl4!t{;^Mg1}aUmfBgQ5)<=N0<@f>1r$-iP zI%jh0FEZ?AbnjvJDjf3Vc3>&@17k%A{pU`uRxo8Lf*gowOv2PHHriMw;OpOh|M{Tb^flY4up*-CUG+oQG9t{vJMwy_;T9xAAgpGV|5`QlKJk5aLOiCFMh(9}F8q!!+t9V{qw10zb zRG8Y%iR+5@NzaHE^+93wbG$m9xpS&!F28q5DrUBtsGa-{Z{TEEq<6ostcSTNSU zr!a-ZK_c2G0CX+u*siZo>S)CI{$w#qC)E+7RCHWNw#jWHw5631|HM8 z(@qPvf5KcnyyMYY*0B)$z#ko<_5Hs+4nt|Fwd?E~pXE`C-l2OYCtm_oak7lVd+R7f zt_ie!xcRW|!lALbK*w~zurwh*!5Z3rCPJtYZd}WfIKH$;MB@b|6&#VT6@c$k&70ob zfe%eHC&4@-MB1aQ-b!w%AcQv2*4* zNWE&T{n%rVRd1Bw&PAUiF$^7^b9jbbEI>~25tw{n>GoyeP=d(%rOwm;M>OTWc-iUs z*w#rWiHyNY?zE?9GAOfVpNqN3-}R#4_1gL=R&ShwX4(*eb0dqa4QSy}EUJ3r{8{!Z zg=u{5$tf&5mLjVcDX^u(Pc8B73DKyh5{zW$+S5W|z+gcqpQR^7AkB`FsYlS^8Oz|;u%Sm7n zcdzy*k*GjztV7TK*z!}qIWKk7Lc;b90v=k6XwQQv5(VY8w_h}!MkwWC!k410)5#sW}P zG}fn@ssp5|SkCX@h;f{aRbqJ4fe4oMDe!eg`M`bFAzn} zZA)jjE#20(bZpzQsdj^iE;>3pIz3dE3#a+L6&GIA4G(kwEfhh=`fjyV-yMo z?&VS%9pa=Yu!Ey;1Tsbmg@%weI_K2%zu!7BvWHuXHDMWG9VepL&~-K5@jM$`u=2zt zAxbq8XjUn6QL1TiNb{|JVWS-sjAom2y4TD$Qo^=Z z+8aLZMgclOO1-q(@>?F(LP)L=urnirXZ5eOAQUztE}`My(9>(_+coOSjG4n4vE+)aR*KrUNNHAs-6 zUw!j!_JBu4kFB;Gk7}(NXk{KwZQE$$6{b8aFJ;0}=dw#hqSCkzF1Fdlqlq#&BSPb_tb})xd+-~ zA9`xO1lG(;xybrhl<=+GEB)9pMb9~aa z(_@$09scmaB?sppTxrwOc?}&lz*Y!Sb+=|_*hY`KGsdxIi9Ido{YH-6H%6PnHTDP5 zDLvd^d>q?(&v_m_hiBPK3iF*FU$p0R?fBz$Ti)C;(_VHWPaw8>rg4>AT7Km#qztn$ zj+Id^N8E0geNWU-oX9oFX1h@oM(OLjSuX{BE?&l36CE@w5G~Zz9g+yG7G~IpbMzUr zDMIbj=w#WNi?p6>k?6dJTl~`-7D$u%8BwT+SP6doJe<+3n|-7(waI{oH7e_LNl3Q8 zVYvNJVP1vyIz!7HLV-CZPn2spH2Q^Tv=qlO8!X$C7vhg=z{Gktr>Oxvw6 z+#H@oy&V!D$5?c4X8#fl)rbn+Nc)dRvy+>LPUhz?C7P`r(vo8j?uaDEHY2jxMPxE* zn>wit`P?5nR6959v7bA- zL`-dkLvl~xWd5LFd()0jZLSRmZFWR-N{LQk!k!bLPWw}*7hXo#mw)@Bm7EL|eN$8l zo?BLVU@!Z^n}jKnJUS!R*g=pCPVe4jwgBy99_@bR;NdHcvhr#Zcpvpgh3wLCOQ)frEFVT)wb-Bwxx^OmhNGhV6BRR zHKOk1Zsd7~ESF?W^L!(g-k8H#=Q{h65NWZ~DY3ju1f%|5cdQCQy$yDmC=}*sQ)F0X zi+$anK;UTUOH4L`R3l@3!fy7*d2?jyW}kKS)CV^rCcVW*IV3?I_Q9&t=}R%b%UWxm zC}fwHhYZX*PyNC^bU`)-7`_~O7v7)`Zn9=izkd;`p)Ux~Mn3b~JHnwMFr1cOVE-0@ zQ75%?VQSap)Xqj3H9G&R;g#`n?p!YspgxZ7WCb(9ZV(LJy;3%t??_`#yO#w_^57oH z1}oaT&ir!|#@(JLD^~WtcCW(|^BPVr9B01}q{>-$KJmg|>v}!M#8WYoJMjbe>dWL#nYZQv2g?l}+(vlA@r`)(KL%_vqODj{+K48s?`O z>f|isRi_-Qnt7~xvIe`9KQAQ)W{lH)-!wdL>}>;^`fkM^B(4*1kg*iC7iEuoMf2{STKX6Yg? zWJVn%5~Uzhw;z%v3jMphLMoOCp>W!vDV*6u8ceb@B1OPz`;Fphz_yx4WN-8TJUiVa zv#mowdY&lM83H-wwnZQmF{>vQS6hTbOJ!!TcN#U|%Upjqe_glKyW5Q&1 zbPFCVpAw)pPtL~Ji$~x%I$_s-jR@~Wjqr&A0d4#PQPnfoazM`^nqYYKFcj$?vIj&U z?@FE7WecbhDoYTP8sSmcj^=9K_VVqlQ8wK@&$pIdwkFgXuF|}iY zZ4x~+6+Ie7Qgt!g~@ z?T4K7>BUZ(<#Gn_w6pxL}f}^V>xUy|d3y z8zdCIXeanLm5NRNWfgAb%)G zhdZ?3P_13@P0wVLZe{GJf}u&HTk$;@LfOjQc*2uKE4|rPEeLa2mGaj++SWOyHhLCt zNm4JYmUh=<8|ego{6Leyq- zpEA{Q1u0=+Q4DKr#a1QEr|7QK6572__eUZhMboXtA3>6KG^b1Uj7Oi@*dm9FFFT}; z9%Nnnrl6{*c~zueTyLLvg6L?y&d$4%1oX*6YoqatIe~+uY!Ts7y0(YMTd`oMRjH>Z zY^@01eoOgsHA3%Ti}twa)EL*sId)VabS+L<3+zwAVVD^pJFH1AFqYPikP#BZjiy+Wh`hFN zRAhDIpdgisFU?2=ot95mu{a6&3Lm3c#{8EpXU?}&|e6~|P zUiO`GZH^EeGWW-9jVC7a!&P#!sHONJ*~mlxZE)u)J`j z4M@YT^xI($QDiJfc;p_3FHd3)T5msf$d+XnRvmsONNIx@bj-#(=6(`;=oL1z{poQp z*KpFez+tL+J6?UQR*<^s(mB)CDNI+@&}o|lLLp^w8~ca2Px8dwFo(8JVQQtck|9+a z{zLwBd5m4xYo4-8MmIbET2jJb>jt#sS~u90!qiOf1P8u&j~1jws2R@q*q~!-%u}>Y z!M5yH>yb_oW7t9F%5p@4RrX9P?biaKz4<1~0;WN8Mvp)UZHi|`{)4f(y;_h0Cr4J= z0nY^sugDX4L~7$QdtPC3%)$;QT-wW?FgM92G6L|g4}%JAhgi9kVZ1k#RK2NnCl}!- z{lSA95AL;70#q3@vo!dVToLF`#>o|(U6lKWBtDyvfzoKXay&1 zms4{3XdjzPh5ZeDjkjNUxocTjqfSaU?m(@pY)zw9O0B8t+h^6 zeKWexpnp^7ogz{HW#}eZs{>ZT9qo`n;2=B3`T29g;d|=)khCi7Pok*)w&+u_e+a4v zcFVL2t~>W|X(EElCXz_ zcqLMwx*4wVXCExk?Ba2!)~-CYp7U8jHyEm0*vR~1v^;)9G2tbBV%Ua^*^5q*M3p>l zPO(g23PC!etJ-Xbr%N0XW5KctPn~0R?Sk0tkHS6szLG`+l{QOEAE)+(r@=D1mJSYNn+^thbEB7b4##_e` zc#UBA{E$wFR7K}F>4lw~lIWzQ-j;|2Llv#Nv1}(0nz$^%eDatuix;j6+G=J+9?m2E zi>Lgdcp>ZHKMKG?4>Uaf?BNrOcH7r)fF8yf9grXty~VB)jlx9|cg@yIFcb&fBDQOkizQaG@EXfLjp{tI{PbW5gHDxNZC%XHxs zfxy0w;{Y_9aar44xp}0_nIB))5 z$b#>20Iu#BO*)F=ZN-F>Z}+7MJ&SZF|4R{QA+&$CU^RL(=1<0Ynn)`KsSB&|G>l1` zahbOkN#RGF#H$JsCWl2^-2PZF5YS>9MWD!eQeJElV3~DfAs-1g*0>Y!8&TV-sL=Hh zM3I4n!=g}Fa*r`Yd?*;|wimJ*cJcSYl!~-c?ZCN)2-L=*WeDZ5>0|_~u`ChEq^)K{ zTg^f%6O8J2)v>C*$Ev5;D9Mpp(x(WT0m)Zup6QAEoF4>lySdZ;fQ1X15Nfrp-}8j-MR!A zK21&yQrl6(7;Ak!4Q;VW)3s+H!JJb2g-~ejJ8@86VKbcI-hsN=3V;0MQNHrr;0 zsZ}em0B4UoJiTE2srp^^j3B(?o{aV2AB4zUr(n@IbJ$a7tI^{2mS8CVz(jrlL(ktu z2{nfH`%1gurgJF57@Vve8IM)kO^!z-3o2r0_;(kE18xEtmG2Y^^@;HC z=)y`)%tnY7-kQ;)Yi8GOHYQEcE#HwQo8`HTs6^`6W(!fv%$nLE*Lox>EXdBU65?w= zftKzrg(NsXMbd{6OJQwxZUprIq&Sn<0d@&&7v5)}g*7N~obib{a*k>&h)d zG~OSHA8xj;g1rCu=7;S*g~JyX6c#x~wHzmE6T%pUY&$!tauGuta?p~rD$gaW*AS~! znD%6o$1--YP~Z~V#0Yk*6M^?r$ewsWUGMOvnUMhBE>F%nUpKZ5_MFGdpbYoT&kKe! zO0pkT-WDaW!n&@|{w7R;I(6z{w`UNhyCjQ}F}q6uPB`10g40Pfi#(R22@C~0!0Z_IJnp7Ugs`6y2;2pG`hfyB-8*%*!-}P1_SeWu z`zwxO9I(giO`%YVK@uVOyD-o795le*RhUwgb5LcU%{-Tov#DM7RpD@xgmts_vm`vS zS6LTD1#SwsU_{w;fKxCMq&@4J1{)-bkICV(b+?tIpNm`vRoH~~=cFfr+~o?Jntl$U zYz*ilq!rOZ%MNU9u@y?x}m>)OUv;T$4TmfB%|Nar8P zCP?J)bVzm_RhM?zIm@R=Y;7f02>7xFdwXM`~@oeOZv|J2dyuBi2vA&wjRW z=&N}M9YniR^uQEetl7wU9!Ov zuQtBA)&A(uYYo9!lgvUL_VoRcIFb>9zY2%eI=g%v=VA7dD3l|T##w2f>l6|ktD1eR z8c%i8>{3xkP%fA9<#vrAZ)D?CVi^K-HuOI^)4R=|<)eFh!MHQi#@g+oXsnZm5bh** zai3;GMI=Ks$pnYB!(BD0opbD$4#{LNRg^8r>qg<1%90*8T2|`N=n@Qtk$apCRsT)N z!b>0p-sq2@(IOM`je_*}T+FyWp>Sx8qibHBYfpPp1iWMj{#ilKT~h2*1I0fdsaKrx z2W0PR(h@trGY?Q1$x?PY_E{kneNAm75-F2T_EiyieUh7#n-r#Gk?>QdKp4Q-#?i39 z6@gEtvMgEJx+=JST_IZFh+Q&{-EEhrqOw@(vtU3;;BnDH$IR+|yB!vUX&L*M{mH)x z^IFtV6ZWx3>Uo*p_(YgDU{_)!NAw*3p5FzEf4UNKxhRyB6@y)+Ft5y#6ySFR;HR=sJ|fr)3BztSmdnuOyv3gzIN(m}?@8gVw|x90 zJWl>aEBrCy`XsV#oS;f2OA~WE-Pzi4w#3uLD=?Iju-^%4<-MD;maT{%RM}CHsQPFH zJC)P_vm(GDMrO)&dgW%+`E zKkSImCiaj&T$nsCE@upa{OJ^h7}Ve_=&(~x?%I8P<|J$~*q=p-K4mHQzbT|jky05; zF*_{+@02}~RXgmTg4B119;gKWTOcsNm8_PnpR}iL+?;BfVCQuUsrwBX*2gY#kQ~8X z`j-XB76DQ9`tJ!+YV}8HseWG=HX_G5`;Zd(kqB^E9LHy+^%J7R_GpE~hhq8zA(sG+ANUoju>0nO@Q5>D8ZJjQ2>jy>QJw2}N zZbuZRf!*VfQ%hEFmJo#1w#nV z+XMlXD_8)YE~uGdb4&ZKWxCB2(Z>hr=A%R%N&7`~a=rEHtAAk2J)`J!s>PZFL&3_D zh-9~TGzA;XvBQ2r^<%l&LZW@H9XUtQB|>V=W_wN)Uvh8^RgA6sHj(H-6|s?aTw$te z5ZVJQ7;&=Xk#FuBXP><#MDn^E#742VErM!5#{a)6Kq-kGDwB@oQg!^kh|~nc;e0Z; zI*jFxWURHH3i7rN8J&Yo5AU)*grQ z*}A&g9&^a%f_1q)BdB>`lN=&44^-PBk@y6%E9DUG6`|0+IEI;oD&*^;Xb|Yq#okEA z<|fcmt*|qqgmYF&4Df+Vis55XDKGNEEWhkCKRB0Zq~*TPJ}(MI!h?OCecb_9xufkm z0Ul)tDB;kxpTkt+N)(yxenIu2wNu+Ia_+SWPCQoqDEIHjs&`2eVwH&OpZ`4Fo>q_& zs-01;6Y@1Z!*t``l>6#iWB*YUwZq;BlN0>zvCsbyQfRqS%%*?a;r>N72^LYJ$nN4D;k*vV}|l=y|ssJa#iz{9cXO~-0hN^YQLVPv(b$$sVOux*@Y z^91PW0~y;(#w2Z{u)21`zDPD{uWb_r{#d8!P-T~(bgog)lx`+Unr;j+GFo3{Pbh(w z`4as^#f;w?`o(dPLo;Saexx~ptO!&xKiG0`6QT|$l?ZOi(FA6}VgC`PXK?>&m)}a5 zq=$Atw~l?|_XX+FEXif`tigJS5I(p87GT5}gO!4{h!LKuJhW4IkX+PEzc z&3Qv>QP-Ql7m2qYT`84cdrC08ZbYoS#4fxoL`UnHD*K#Z7+HJCij`d=0!16yx~bSM z69^9v!kU|1=MOV(BN=MF1j83MoNF$QU$Z;`WzFIw=4XObb7fxAVjjkrn3Z|5wz0Ks zV>jDm!SEV%%gby^dvwi81mSIyKbcD}eoWzLWM8YnJ@`p_Tts+gAyaGz1VU!z33z_i zBkwCnR@xZ>DhH`CsRu-mkDze0blU&!k;v zA%eVtE0cWc(<9m1Hu3tTAeo$6S!LrD=5zROjm!=mY^KPe_r94pW<(rN2EzkX{%GuyFY?+h(UTz#KhG@kJs~skp7+E@zVvtR+y4+odoyQHGp> z?Gp%m_LQigOr<;_8qCPO23KY4?1(?j=FFZ~7{;Yq*xY0<2vQRm^u+l}iRKgbv#*O9 zS|qOwVZ4*V@GDDlQ|F%AHN`IM2?d6R3>;!#RahOodWz29tLDoQ*f&Ik|KV{wd)aq9 z`Kk46Ius6-A1(@X9A^p7q+KDoKDB1IjTSL{t(detTisw&Lbo)h^iK-7EqGp-*3dpWZm$VZ ziSjYgF#~DQcSWEq@+0=1hbtZ`u#XjPA5SQT#`Ft%Lko=Elfa02ej<9P*jMTiN!el-_JtDK zOTHb-art7GiAG^NvxmM-;lL=zBJ}e+MWFPNoj- zG-@-mNB0cd?s>?cOxYOb)P(I3iSlz!P{{q!lb)Q}yuuC&z|YB+Dtk%cP=Z4{pIb-o zI2RPHq#L$GYcuR+gDB&G|2rl6Z+{k@!sX*|uE@>`X^$Y0 zuy7f$hwHJ34C7_Vc&YtEX{Z27i0uP=Ul2ABnl`&5M4ei<=5*a`yHb$6*$VRET`NSw z-u-*q4W95=6}RNG>@Gp_&zI2FPX*|nxNT?W6Sq9!P!(tAZ#=tnrd5bSz8v(?1%Kn| zDQe$h78SFF;jfh8mhL>?Au?<-Cir`Q7Qx1NuKh_!J%J6MQw&;oncpQb=;>^^lTQIx17HpFgFID80MW3-F;r3f%t7UN5?8A9YcCKp^J_uLKs zz)0pTPO;+J)@ zh2f^WAo-wuXCRp8-k7|>G886GPC#USxueJ3-1TPrnZk5~=$vZ2&H|2ZXMlP@WXi}1 zqD~Q)*a%NY)oU_Gks)$tR4Fn=hkQ*}tMjKiWOUaA-`&oL5V*=MU{bhn5SVeVnM30dphS5Y6y7R?e$!mIIDvORQFamv`o7#wk=TF%WGWg7Fa|0oFPIM%k~NS`Eq+4W@U@8KP7P` zizd0H{WeV+PvZ;OUtbd`{JJ#Mq#Cx1t^EfPskXbKBBy1iM4-hPMYveC4~1xA>jwB? zSBξoPKMBjBr@&%+G2xa&eR@(zovy8!u)iD$+T@6WPFLi*H(T}~XX%JN0=^X`GW z2iw8`Pc*cgF2i-3pl{>gDEt2!VSjErxGkPPk#7?N%&pbGsAfFy-qRIm~^?ZcBKeaK>qZ?dx5l;8PZ~36bKg~M5DDK@|b$LsQ!x_?8|>e z(DwqmwOncJaZ!A|N5y#J%M@mzbXJwGp9=Oc~Xc8abjhS5BnlU$HrNO zqNr3>qy$|H0dg;jKA|6P+bs<9OyYdMFLMGRn-C1a6}#(@7K*n3NUSvAk`T|=arv{L>-56OwSF`WCkND zo70sQh(xvBoo$zuhSY@!)o)PPm&INzJ4-K!LRp*+GVPO)v;fmaSByEAG&6^5baB#V!g{#v3>vfFb(>LEpxn-MjNNbgKw{fDf(3VDOLAI6DR_OgvaK6&yqM&* z^ss33;wU!r;_TwqnuMi9q6vt)tmHs){(V47kjZ5%-wP4xxc}95U4U4W((?b2pl@An z@wk}X(wVxdR&KCv74#Fu2@C8%c6cVt@!57qNQhvnM3zSei1^b2PG64%c&e`a#G(dE z1ej`QNHt=1QIJB%azl?J%2Ofi`y74W2@zW+0z)~W0A1v@0*rL0*~X2wUKG0a?Cff* zRhaI8ohxGs`ax_AU%#>7ji>E30rA4;T8DzZt)peD>0@Df#+~yT?Y#SebQ`Oh2cpjk zQ3Ygxi|mpRg{8o8_17N$5~4a4fJn0_ysm>^$}MMu3hVRBy)x zebw2v8Cd(fC={F8G|#>?may;sLDW5ACW8N=YoEQoeP$yX2X>W+{x?!aM-Z#C?jrgA z{vP%!Zce9wO^^pK!hRwWbxcZ(WKc*R!yFqwNaF=*O(DN<0Z|G1ayJr!`>ZsiW-#%R z`e`^~$o)1g#A4n_W|F4|2oLS-l4(}i8M|{Tc9d;(fF~L&Pi$XhzZMKv{->GXdQxZ? zjs6%ue4w2T$>o%77EdofC5l;|)w&6Xz%d&tOsg^T(6MlhJt{~ua`6%tvByIs;s>}b zw9F8Rs~c=H=7z|_^O<`MHbKyj%9uD7Dhg~yNYJrtS;x*5wmzL8Rnwf>Txy#|>0H=x zYzDQ7;FsW7-SUp46JRxpL(5wtFalXJu>5ua#1z@P0u-OB+7LW4{rMcF*-;wp;s;4X z<1mlNo^*{6QvhdEJ6^APVEHR@wql=*c{OdbcU;Yli_xG?LOdA;-ag z74{=>c-rB4JigDfe}*)}IX>oc9{9H~-OcOfMDAA~3zGdEY$;bg-RENu5=#td>QtKWP@KYfF zT|EW;=nk2r*ZUudK=%*j$1B(F4XK@BrPcPhAT7(sVw>d%xWVM57B+CBRfN?1fK>7u z_K_f2wKk!wV4rye6zd1EBPU4za^A8zm`RLfTa zdOV7)F*&kL3Wpb0*$SPVs;hDqL#wPxQFKU798U~e!*^snRQ}^%6877_IO}LINf7e1 zsI)G@Wc{ncelqHQioU=O`-f=sQ*vUw2pOO+JPM>46Jh*D>~bMKerL))>!~maB|pJ- z(kCR$LB%Q#YM0;iWKdq%|Prn7THV@RBNQ@m_>61!!MuI?%+>xFkLMwSq!)i zX{a^fiv))czF05lM{u~5HyJ@kBDs71*`j#v63CZ&!_TSaDXHcrcWuBol45D@@HkE| zS+YlksUM>72ojG$UzWQUXy{cu0PeBXS`1O+`|#cs9Q@*9s7YJWOuf zzANPC07o(L#zNYcryty9HwjSb0X+s-e}#SLyIp98-7E@KaQ?7&*kD25!xMEmzAOC! z{yK1yWW&P;NQV~iA~ZHYB#@?JF=L!ycp#FI2WP86qPrZs@W`9atq_4ER>!_TNi{DD z6X;JNL;6+PdLIhX?AG=C23_#8bG3u%TW@y>6WOhA-@Z0VKm%PY)y0`Yw0anh3g(1x z5sHut751|my-I%8v#miSx*(Cq3@xxm!Eje@UfOAiVv{V-H!rd06iZ*j_CU!9uKr6R zggvSDnN;m2CoZuaMGE_^qNveu9!ewjvJl&)vpe8jJj}K~i0J!!T#O&-lfqQHSC4-7 zUy*ZhymL*q9|}|T!;864%M!>m8!l3(ommv=<+DP(F6RdiPU{H8lU*K+%={2;-&b$R z5I))Ptd)fDWR~KS3ey#5=PZ}>@mfJ&yRlKuhW`lBobg)@SK3!Hh^G75Vl(aQ0(6W{ z22P3bK@t4N2tPDhu`s<6Lu9PL7NlcQsZ84X5PR*-*IxUiH3%_JJ7%vwF=u~A)kbR* z#V>V~&*pH3+QC_Nr=1qv&wW|gN1;aqUo87$($3HHS;20PvGc>P|C}gPB(JrJ_%8|3 z2;5)cZ&`pJr{3SUECn@OSY+MMBCT<*p-~a(A0{UPlr|xJ%S$DDg@CWg9Wi8IY_142 zVoWp>wi0u|wunOAu-_2N(hXJEXV=c#$Pq!Rk|(CuzXce&RKsHXIHY@{jGoiEUEOa#*Q?%Bh-Dy-R@g9~YF;|Iu;ky87Q1u3Fu_ul<&Q24BU^;R3KusQ`B zPEor*Bs;q8^?G|mfE-b?!)>%>3i=h4-P%-?#fjpHkbIaNlS~xwzc^Cwnc!8Sm#4qN zm+?e?YiyNB{x=urv@m3#trpF9Vxkx`x3*3MKb$<;%(hq47@M~}?1V6tJF~9t%<5(K zo*?Zzum;)Cu2BGOD$c?lxAhVtN=IYsD?qj=&SqgT&STxsg5Qa$T+iA|ciO{Br%|Yz zZkZu``^e#z6~evx^|YLj7rsRp*0{xlsnW5sW&DhLNRwdXaj0d~^Fq8+<*u`W0N>rw z_-^woi-+8pVhE9E+ah817ikG1=Hni!-5s{k7AuO%$*<W@WUMS%DyV6}47bGDJo4 zvZDMDJt@eVkqmPjr4)YMV^$f`I3t=XY_lN6qwA6{p^e%QO?!F$w*w)I)P|bWhAJoc z&o5{AhBO%WiP>pkEdZQdI7ysKvLR4K6tQQ+3%F9yPf=%~g^6{wNZjlXPe+}^y`D7+U$Z9M(#9J^TwDC`hk8d*d2mJGarLffP%`$eUy?ee*Qv{>r>=dIVIosxo!hG$|f8DHBh&l|s?>;-Du+KER zs5ry^lEyj3d4(Q3Gf@<>haU(T;NnNZJ`S05@7vwJm_sIX67?Pti`k`u*IxY1PLJOl z-xoWMIkrv-W9xLSeNAboL-)RY?7H;l2Uf-H2ZDa}4%IULD6&+-q4)dFeSBNv0D%DL|ZIVQB(juPWj-qm@t)>KV)pO z3;Cgbh;f-E==%#<-F*ieq&NNYbXzZanENnXCDB}O8^X5-S07w;aJ}si^!@a1UFq31 z+pSF$UsGKsI1Io_6HSX`FUw1|z5XTw^~2amQ4|wg{}!h6GT}k~XuJ=fn6uYzQCLIP zz^foOu>#~2Ar+!~1jz-#6CMNe?5DzEXXA&Do6IAkP?>D*h&kL}uncY4;VUNkUGcnp znf*@&k$B=O}CJXN|2y%ok*gU5(c6;ni5vXP)1NHM_J0?it1f~b< z>$w2B9zG7gB;OGtWyi9$rz=+35JB2KawtO_!tYeBvxveBZTrgE?JHZ_S5`RdlFzoU zY_@SBS+oSJatc!r>NfK%Pe6TD%?)*i6$Yqg(yK)Qc1**pZxwH-<}Of*5P+pWoK$xxk3PVku4L2!f|s3L7i8-mC{Ih=AX%yB z<@s-@cdDi?RkPo&6UEO!_W~YI7FZY2$OoAgyh>QF@KF|m@5&`^*`iR5wiQu(DkOFm zvPx}?ce~o({fkBBNWf9Fm^ptz{M`2__vPA!!gSaurz3WS z5d8^_2ET1D_JD~!SPc}bX>Me=lr{$u#GAo7I0qcC+yU`d;e?UW#e z=Ek{vf1&VPF1WnOu}g(XD>Dl-H(L)uO-)^+O44Z$J&Vg9hpAp_7Yg}`^OZ-!Q?~cE zio(1-w2LPP{X*d6LL@SmBe@+jtu>urQrtC%i9Wq&Dqa}uKOxIbP3d)Z;S+pMf$u#t z&xfg5K@@%Zixi=@XJ@krh6?$gIGWx9NG_q^6IhRlV`Bb(d1o3+<)brPy7z)JBLxY_DF|*~k#? z*R!|fDID4_X{OE2yUz?O6aC!NnuLLo6|mW9s^g|Mx;MIwqWWo@iQcPi6825is}L_5 zA~3Ldt(yw%WdUl^y?1vzp)kFywR8VyJ1wZet3rhydkxF%@`O)-vPKyez)a9j$@_DQ zx*NN+%wfFWgozXFUwXF@1HP1trJugTLONxu_Hwu#x>l;%uY>=;t zKxgISF;ku+-wxll&t2UmRMx9J^`k7R$$o4ZWB}oE@rP9D&WTz zRW1yC3RB!Dj%*Kx?>HG&a8N7=QT@C1v82M}(e2?rk6Kx0I>cM)*J*+@Ppp+}n-LJM zB(aaMG@yKBMxwS>h|0rz$6~9_bDY;Ui0EsS8_&tItpVr9Fd3FP*>(%lKV1+@z$(uF z21r|b!b3SGNW~;#ZT}9R^88nji@lrA75m6Y5^bf=2M~)MSYuZSP!rhpPWykJTwr`V zO*~P$!l}q?uU5x9-KLOgc%fymI0m<{^%Y49G9NUX3$ zB2q}(c2pVm3D6j~_rlz;BO!+q>q<{F&at-y{dyl2!HeztX^ehF!ed4O*$iYM{~2;) zOfq7!So-Y}KKEU7YFf)F%sF=vg%2j60heVx((#F6Y=x+c9W2AK_Oa3Xyl6EdluK2xzhZRq}e8>65v z?Jkz4jTNS|oLWT8B7)SF2M&G)GKHus1FoBG!tzB3^J2Ar)va665@(Vc$r-&Kxama_1?Qlh}I?Z z3+*=w`!Q}`xyo^5mbS0lgF`Sottj7%8G3g=BTNB~Z?(<-B1m&|pt1{|1o(d75hEXS z-P~Whritw<*EpWfRO>FHpLN6I+M|1lKt65L;DinmP#N6VaTXj2u%mR9u1JLeGD>&A zgnlYO=hkhjtrVobqtL^W_|67l>XfS6=3Gv}(VSyjMWpQ$kn8-d!d))DM)SLthx9sU z3htc8WPTF2hkMeQ@Kv7y{p}NlDP|b*&MM~O{{~ZK{rEL4H4~(($@Dkn)<-C;4Y^_U zFZ;E2bNGli-Q65~M+*Ac3~PdCQBR?sefwNzk0_QB2jb2y&SCJe@G(jv7-F%X3Ho)0 z5j5SF>ui!}eBNHxXeA-}Fc_1J{*2D3(jE4c!hSG#okpU0i3ltW-C*9B;Uqkt7l{TT zwazoyAt6R9ADKV{)$i zx?LwgKE3+%v~CLXsS_E*=vK4c5n|it?y!dxrn6f&$q=+n2vPmJUuTse${j_Q*cL$w z;3|Z-RRJ>f`l5(g*vSx`6T>0MsSr)&MoJ(A4P;TMQASR&U0&+Tq)*CUud#lDOe@Y{ zMYs^wwidM?hcx6+ghjUzg1%p2%_MM`83^hNbO{#Q!y?ch6M3ZNZP2oWeUH(DGNoNB z3Joa0leoPmK)X-vU(kVYV!I&K8_<1#eGsB}=;cMqzE$Q^G8H6$-X%aP))Q%n8wAx6 zb&bwX_6EB}6f{+M_uQ!SE@NH;=Vnky$aff#mz3mVA*bx<1ld1M2r?~V<0ajn43Qju zcx|$&g47+hZIi7Kpt5I|SJ|%=reiQ%15cixtJg&E?QY$Ob`IWPL?L4Yd07V+OaW;a zG2UpG3TUaxgK4dOTTp$7ArY;_9>RQyf4+L2^(04dDafGPEn*- zz*hpjpN+awh~hf9x6M|VHNi$FPvR|ynrv%`KD%qH)hSHYJRS0j&c*cCfk4+`GXukd zuZiHQy(zVEgZ(K$;s=c!VehBm9awAoY&nVjYCf{fS#DfspBIe`_#(+6l2agzg^*2sz=b5_?35yy=%Ym_2HijM44+X( zAy!w~1SumnuGn5t*tg#M`of@pN;KcPL+iLFpAx2+_VPu}Iib^@J%1`h<_V8HGwd1x zzJN0*bCK3HUJP+wWTq)>>3CFdW zwoI6c504^!D|zU(Aw8p@_rq5N{ZPAKn`sEloDj_ysQH3;?8)$v({sdDx*(Zm@Z*b` zXRdu|Is`iF#mdYVEA7idK2PbSkVCnBRkTpABF=Vw)A-Ob90B(eW(;^z^^8)nhx&_1 zC%@6cp18vX2~sR+@O8@X>!@@lIeDC=?+xiW_@LfyqeJ=$apWi^VHgYXPUEK0$^(QO zk=j{lm4b9VJ(DcXV;@P_j*tTDY+M#>ub{6093L*2ts-#7yyX^qJ$y~o_EEvz?D{7~ zV$2ac!YZ?X?-V>VN@V1P6s!DZ-Yf>fZm0MlR-1pEk$(Qp3}VaANKLo#h+_%Z{% zFM6t0keVHvGBt3HcBDUd3jM9ZGq=Za>^Vj5DI zmXSi@T?tz$g71jq`<9<7on|{k3D;8067y@#o^MA)rMWuTa#?v>(2p3b7ASdk=`2W8 zOD|w{wSX@tA03ZkyG{h!T+q6eljqGsp$wjM17mfpsFZQ?sZB5@?D2FSgW$^LT5gD! zASLD%l7hZl?=9x-Wp;?BZNtzRnj4~Hle2N6p0sr#`oxm;XmQzgLEmsr_iDg@L}3QN zU$6^j`@Dysq%E_VmkAShYU@t>y28Hgt@FGLZ`Y7Kk%w3>gV-xzs*)ei!Y61N=SJYx zKPnsq4rRy#el~_Ynp~jTlIhg>k@3a0T$rB4c4V%t5TKLDIs@}5>(XflYayNzrk^7P zt+fUA=X5MyCz2lfejkN2j+|u_=GaFe%a><3@s~YG{=`w z=qyp+AjARcP=#9^IwDBc<%c3u>5L*Yux}LzS=KB`ZJy&PG{Z5v7TDcN;O`0UhthkZ zn_^j_`!T*xw!0Sy`_>N}G0;{7c=#D6>QdV#=x5yo1W=0Xpa_)f%<3Z1H^UBzqF)?m zmXlu`X{Wy>B6aQ3qkC8TFl3@fIT=;9i|6`WovOgH*PgXoMDVSl$p|FbZK9AtQf!2K z1Q>$Wjra$&`-GS|W9}Yq_oqKhHBFa?wFgDi#28AVWq!Z**@TNNTq;67#-(Dqr%wynwdn`@kQ6Cd09GEjiqp3+` z3nGPZ@t+WZq7SV+w8nlRKv5@mK5a8YxI5-J<}2*`8ilPo`g*M>^bnS`b+)M!^u>!U z3~#Ey>P7Rtnh7mw&wq*lS32$ql#SKd0{f?8DLcu2`mw^~o|yyVq`*EABwgD)euNyL zF4%iS?o{6CFMP^JBW_S)pBLcMqJn7KDohy&lKNQr^kY-7)3(>TiJ~_;og6- zYYpKl8>m<+&G`f0lQi@p5h#1$Xj~#b9AaOdg`=qLn1g>*1m7{}y0k4UvK&$TL>L>v zD#xVo35q%k^K3nP< zm(KV3dDHHxol{axn{ASa{&({BJ8YVeZ(d$(Lc$gbQ+F;>@};*|F!*kF4?=E&Vt$ts z^UI3!y9);dr^?K~^X+hF7W?&xZK(jM)Hp1E{8f-1PE|Ljs`sXDzMQ}z=L=8!l+=B2QL%><(5vSF`*J!e z`b4qy6{K!&On9 zi(8k$BO8u1*q=qC%D7hJY<0mi9!-eqnAuH;{?Y(7ZM4Y&N=N7LN?R=WDP>$;P~;_K zn!ThLznG9Eic;_2hfkfIinaE-Ae}Z&PsK+CXahfnyAXB2`mqS~%;yUX%_zI6|x}%O2r2g;B z*knO>Cpm=Nobhu?hr3ISnl2Thj?=5 zbqpgZPlyn%LAluYwdwSU-0`+fkWo1{rN)^m-GrNE0#+OHktx#c^JM?KSrm03@2I%>@SdbnBGLn| zY?I=L0Mm5vz%lj@h5dvX#2mJN3HvSHIcSBWy-KzTKTfC1jG!6+<;6aaZusfwt#JBh zK^K4SmNTuN=oH%Z;U16L0|Gn{w3n}UbBc#bCKqNgm)h8nZbH1R#eOQl@SIstab^?G zfP(&{?T*Eg25z<|M5P7TRJPLabuJe!6}EGzXU+;oM4~9xWLT;rmB~us_SipAXn*0h4z?& zevZfx4nLN4RwNpA#*eE_S2%1AdhVH_=bo7&`YGYuBE@W0IvaFF5G`Sss1}Vn492}Z z2did5>IYY*-ku9_6S_6Q+llmNL&Cv+q>+{I&jY z@YzSuSBj(lE|@tti$aO{?1DBp9j!*Pp6?3L2)8EekpTFuj+_IB_WiEXB8u_lI`7>! zM+7Q{0b|J&*t+oPP2Kz4WLpJjbo;wj_K(W@mRTx2Es~@a3so zCU;qjC{%7taS`VGuu&z}O?h0&SRBz;|OjPP)B$!jyTe3`=+$ zY;XAVih5I{Q2)5ZJB2rMl?!E1Lg~^E97aSSuF35UPu`yCr;t11=0I+O2|q@N{2D+HD~|@kFt>=Km8Qtj5ci&*6>w!9^TfhEyVQc-`8GO*v~T# zJME>VGD-5Lh+2|Zw6M1Y{O{IiI9IJ(vx}pX?XRNx!MHP;VgCv6^t5>=_tn~$RuZe( z$Z0ZemkRk!E>*QfjxR0?@$ku|o*3fYEx24a-yxmfM_|<1142v_Y#n1NJZbsDRBxQT zFHI8g4dB9pdFd#dP??ByI~Rn`7o!T1G+j_KZr+L^K3gXW9d!H1G4@M^weQF>gBK_5 zw2ARt_>f7N(t~xgy&wwZ<0v^{F9jek4|YmGecr97eW z!e{(iY}OCP*H%NMU`nEQdI-6$nCdi}cUgZ?{1~HRC<#i*?+y^v7dtMVWN{A^fri5* zEU+;Geg;G*W!g_amBm7i^zP-rQ`I5(lz&Jm{MLxWXhew8I3tFo#~fj;NK}XTyx9^W zP(n!-P6o5>zXR-8w(``PQkyAAWf0~{SVaKXCoMP!?z?;NK-;1iKlxZu8E|e}Me#Ex zRlC#)`Ze0~BKp=8=N8$Ug8tZp@{i+SdK#UGQEt>bjw{8v-gei}ok;X&2>@h(NiTo^}db8Zdby8}d@-8kcE=s7kyjY80 zWD`VmLxKgbxdIe;V#X}ns4xX|EUY;_W3Fux^yQC`pv*R56#r0kbY?d&2Db=9Qs-Ri z$f#6{()jUbCt@2sC;~koH`_@&EJO(h);u8D^UJm8bd2LU@p$~fm)XBUVub(n zd~)Gh|7~6jD|rd~k{}h&!J$;Y5I!+yxeW^8)Q)PqLt$SVzJa^{U?7B3`ZOyLfi|(c z7>)*Zc4^(=M#jBZ)FD~YY_pZd_XF&_q}2%faUX+N&~t)*+>y=P5A$-Ly(k)$OV!S@ z_d~e7yw3imu*OVUbFh+^+ZWeC;oSV{F8jKIeAF>Jg#E-p$L0pb_;xUlxIOn0foMDm zAuRr=5CeLjq)|%*eC@I(AOIogXMi&tk4wg=wTkE`I3lezNay}W6kkFzjwpcrK4g>6 z5tA+RPlB8d+=h?H{}#9H@3y~&@A!3FOsCe_-@;c-P3h`Qk) zvk5)Rn*^yhLeR@>xB#s`Q$N?TLPU<1=C}azqd8b95%6PiS7c(YJtgd0B0cE3<({IS zPiI0x_epzNi72*hyL+~KCP27hQomW5&H^=Ond{gP63Cv$j@7m?z}(iR$uOTb3HtFK zgC|&cypyd~G%DZPlx4dW7LWVcz$DZ%mnWwqAs^=281{?T1?ie1gq84h9U@<^MQUq? zeIOWE2AE|yJX^b~=BE84B$PP+tWvvtJqi7Ary5q;9RX^)Mf~*vLCQ$R#7uTbx6slB z&YT$I$Zz-7_ENm+D53Aij;c-ku_>a^q!SgZZI;6Hx_hpN@%>Clo!T@rRlP_|%{8Kg zJD*q_k|lJsZ(jVhAhaNr0Xf5U=OKF`WJzbdxt%w4hiONuew*B^98@&(aRRg5x%&L$ zrwUj=SIE2UCm|7e43+UP`|Jk3QYO0hFSpAB#Q5j&-OKE%05vl>gzPU7xhL!hTvy3(w|;q^Qz!FSSL2VH+3Q9UqNlRvXf2BFtW7 zI|QjDopD1?YZUV9xkq0aUUmf58t)`e+ktc}w*9h8?5GIDCQIO2z9~RyDXb6bYyD3XbFc@WqW#7;U%irnya&=biAB{w+mnU~2ZKY8veZLDneS zt$!NEQ>9402iQ2UO%=YtLLz2CtXz=JI=ym^Em4^IQIgIKvYuUUTSfE@PM`vrw1)7- zi3MA1r^56NqMyBA73&XX z`Bg?(eH8PQmply1+AQ_v`L66oIPCU?j?)EddhWvs+(#NszkT9!=z87|IR{Q}lsVD52U}AqIli z>6n{MyFwIf>S{v=4pdUA02An3z`yy#}OL(u| zU2^N~t2Ge)?1>~WEoe81z#!xnquJ6!z~{niq2qbpE|Q=B&c^H%yC)>#{?LS<;59rl zJQ6aAwzcHi#PksqQM=SWzXeF8kW<)Xmk7|@ z_Od3sOks+5j2QN>M!P~3Ke^;M%(@Aa(`4N%CfMNc0qw?QIi_rFUYhc5ecjpBi)@`J zzCNgmBeGg(RU%P=BX*3E@6yG+n` z)o|WbaOCk-QT#OE<(W(1^=U#6w*Y#IKvB2zR@6&jU(|gQqfzUZ#*soU^|uQ9Ed-qj zGy=IR-YFU-aA3@27s#}SMW9X`dnH)=GeIqqhL!F-&c9_=7}B(tKf}uQkUHEqc(Bb6 zQAlpdQ(FkJ})+4RkBX+%rWY>#rr?0}kO9~SuuoP^B zD4~&hC}|d^z6d6dXESaQq&($Ofupy(JO8c6RC&WYpR839eZPekgA zoA?HPlO7ZF-90AC!i))1OiFS$$VVa@A3o4km!sH}@PP#F^vt_RnBKXm_f57;VFt&Y z+OT6|TSXwZW919nbIvSl5JfeNKf&Q*e~3T6XZ`VJ{3q4g%c4*{cU#$IM+3xOCKmSW zbb#nvNGtM#088*P#oYL~GqHDIalv-7qGo-2_pyr=cKZzvSN8P~lN*_64=GHYQ5@CJ zMv)bY@TmiC9Zx*7M59(kOi|YVlJE^3TZp{Uc&iNY5XNCEvRaVRaxvo-v7JIb!{1eM zcwCI@(qE=&lIG-Pd78Z;nx?jFx3zHZ|F;O7;=P&zCjzRXBPhpD7v=MG-`5}wdVanC z{@@m|(iz(;MWEL7>xsp?Jfv2z;$bO$N0EN^$h>Sal5N+A^sjJG;@75^ATv=mKXqna z@6OECcC+YAmSbjziPQmZwvXicR7@<%Kk10!DQU?DISL zf_8HLl%$#7-@heN7~y?Txwd&4rSJ?Mw_!>^$6@5|?2-jm^{MnS-QK_eo)(StiPpwE z`+G=#aG{Ln*uMmQB@qsY*cThXzDcrs5wUNI;Me#NtoU%TjAfxNVmFFTxmZ!FMc992 zfbNHMhCL?8e7+wcipN7#d_p3(b)V%5hU3E1o+Cvar-`ZwReGq&HF8$?ioWfMy{1m6 zb2m@G^@=6`3?wODQJA?B8OJNHy&*)0wKm~_rXY#kksME>17i6Tqjp{+3D{}0SG}?f zt8=ByQAeaTr`F))e7=27Y2bexoobgTNJ&G746~jJlj`R0qFDAL>n+HHKtuA(oT@Xk zpS3|D!N4LmxlFrP(08WvIPoqg0=TB%-YwWA?EK zH0>Sc&1aejQwmOz&@B}3-^L2Y$-b1MP;rdBFjKLXs}%G(4zS^i-y-BEQFaNB4ugfs z_*BbY8xg`?P)r}AF!`o<2$H>=Dk~6$d@}Rms0&OKVrUMnWN_rXx1OOX4H=wWG}q>a zaPPi7Y_q~)rWVE8=Gk5mXu^>y#Cr}2_$E9MEyBW!y(NmDciM*7v=Nc9chb2GK?y3) zPKiJd)1@O($ojV+O?$9)FZSGXt;^1H`3@->7ssSse^F?H3?@Wvln|dg!#YbmWFp;p zuSFC?r?k}#w+w~Ds&>NcJiZl(7TUvPDYhp?pf?=f0I7jy>BmnOkMk-n`^*gvqUSO%l=NmqTpZlJPt{CG4;; zHHzo8?#PyS%n=dPIJq_7Z*Pb6bX8sq;qP`~zfEUh(VIzU|0fcS?%AuWeWsbP-$~lb zH@TPBWfBXzT6AhSaM-XR_8kE=psL_ONX01yWwjw8$)|32 zYiyW^R90I%Y`rmpzI!nj!{PU*B2cyB@dcI<(z5%ACl(dtz5&x~>Kik0*jA5O9!ZQ8 zDuQB@JlMdi7xG!&1^=PMHi+OS9lseXIWAR;LYLgn>swK7v3#j-6^VKwD|lk{3~LJT z?CdRP)@-wv1nG1`-&3I6 zxC`W$=I9{rI$BRG;@MSJP=@Lbq6 zB*5a{6ke4F3;J>#7j6yC>)`}iA|xABoXF%D|8qgs_{d^=MnQ_yYq~3AnFw?x2kZpq z6%gUI?IO`pm~Tb&Yj601I_Elnk*|s1Rz2&zGKJayot5@S5$R5)!a`|BNOP==Un}pZ zc7y#zM5-}5$FAN((03TJmL=9#n9sc_D!W;bIfk$=`YZxr(IQqS$&LRB$q~yg&QI80 zK|gp{Z$UXX72=PTmr35~te`I~1KzluzZXnt8O8hpC+!MBN;|X=A7NL9IA_>qyM05D zLGRPOuU)Nhc!!$m&ILSsShw)O$!Aw}JiX6u5@gJr`_f9Lt!D=Y0AA!VwjB~`P_t`_vu5o!1tj!oE%lj>_ivZI^cnD$1CMFgp^ zdw!ErvMmn@V0+3`z?J|zYS$g#yV~jmrQ`s+nC2s(?e1_dh)kbFu%#8VmxbsO^e^+Q zJ!Ig-ysPY(AQSKBH`?Dr%!!@bJEpBa2RnAvpDwSq{|m9K4p{E?&j9JgZ3xz1*+Mo< zqsgfMa`LpE0W;^(L5teG!hW1mO>?A}G+7inB(-~KYWMck?#k5eXH&cDtxR-!__a;Q z6)mypg8s)=#`4iui`j0`7_=dn`2D5Ae*WSLd5^UTv-=}@$-eIl#q4^$nM==sbV8_G z>_y@5D=vO79cZtL<`+xP?qYF&VZSHHL^%Gh5_Xpe>R8OCXIqvK^OaV%mv3>CY_*k& zNbPtJu~`a-6!Me^vK|+OokPjpM|E~eB-+-t zITJy>4}};XC-G75{Bh$hW{Z9Cmp-db2MyKsBT*PJdSPHjmW@cm!6Qf8_>lfs`7-AX zezjGLLOG-#G0bX1JjlJ~Y`2U5*Ovf=3uz17z;B2`mmVu$Z#O7RX}B`cCAx>8@5>43 z86|B{NZYGhZyTbp-yUV=m7jTgIHcE!fHgPM9v6Wz)KeoPG+iz{ZCpr26|!6Rwr;c{ z5vX%4TYSM{A?k2qQDsMMl`RxxeT~s`Vr_^+G88Ky=O8&f#8TWIVvm(0emvFc1*v^8 z>cg2_;;mT(T6%hUv%MI?r>5?(qv5-bvZe54?84`K1<18Gy0Tpa{csN&Hu6r7$)z`D z5xOdln)4&Zx#3u5-`;JOZIqrU~B)(!}c-Dy7N)d={t%Wnz z5&#M#o9u{yU%7YdWG4S<=e0ufJ%AaEU3R{(pYYjO2IqX+O%z|Hv%42qzW_%e&JW}K z5kbF^%W;DY@l`s5ON*=|q|`};yY7^9KErUAJU>9kme^%=f?COS`<-ozrJaEdI&<$+ zEZtR}v?K2Cvn2HpCXR*4X9y9|_VK8F;d_&$anDx12WHw*!>b zawLIn6(nhX1`fEM4RMZjgNw1wxCob{HwAo;qcIIvHx&{yse1mfmtAzg7jvSb8r4Mm zLV$3F*k$Z$!7$}8gk?8`XdcAdwrALF>FC?#K6bdUFAP0gCi-|0m{~j$t(NdF?EL&d zI=I6r{OyOqQ4q4pieYq&`B)}Q2cDVLU~?4q+sFeWY=saVcCvA9$Kocd3JH7M*vD!V z)}c&>UXhS+KF@cHNcX#;-XV$X--+UOM1Dx6MAl$mdma*v9E|ZPemw2#!t|{K7wxJ5 z5U`BcHw6BFr_Bx4Lv*SoYa%H2+%HJ|yCFK6tuQrcpIOsB=c)Et`z$}io|rk`5+Rns z$SJ|?QI8xlVp$pAS)m$k)H9ur`1QHVNz{AwTUn zyQ6<)?}krf2bS>@XXpRQ=S0D0w(YfV2!>xUy$`wPRl8bLIwFSjgN+vO>(O6LDdxVA z0K2(Z+ZLeJ?^IS>uEMkeGf2r84#=%Mq#O^jX##Y3rmkrG+|CSN9^X}AjUi!Htgy&h z1ZWh@cf_LK5+YOfT!};OvSZ=9zI}SwWiJpJj;7KXhR4k^c2qxqM;9aZ^-ZPn4uINM zE^#Nhh3%`Rx365>zN)N!;DZOGXaVs{A9f(>}%Ac zyL&1+nx-I#fs~y6A!LD}J(y`W;6-0PS+H!`lZ5Fz?$FHJp9qC1xQVOwS{oQBsk&9+ z^>nN#OqsL0SKA{BQ$OTV7rjCAoul+&Cr42^J(OJPTqc4ktu92akTbgJRWQbbA}8p+_Y{IrnY<+z3;Tj?$``|KIf$vRm= zIZFlT&F_BqyS7PT-&c3E?y$O$4u|%=ZBK|hzPF-d?qX{ZWLUc)lHIdACUKq*1kJ-@ zc0l`e_&|m-`*yXC0J%=dli;7z$&*q1y4WWos8ed{-2+oSH!(Z^C5XP0Z;!(uzebqK z@``6&LinbBH`xyr_Un>gEBKQ)i$F)naBk9W4@ui+uCsd;_HDc$mi|~l-?IqP(QTN6 z45=R&INBap*jJz+l3!rsg{dX2Of_sxHMHnaK3_z#Ol>Md$vmYK3fmU_Dae%!UK1MPywXw|pAm|q#3R`lW_pT^hA*lf`r|_C_B*BTO z|8F2u2Y0Kfwt<40JrUOPAOULd+JfKje68yDdp&`~ynj~k(0!-kX(TERIPb;;pRnsZ zyl&cfA!?V}xxl7`loa0Oozj&yFU0le+s{@hOs2A0#(1s^kUK#mr=@Lzz84TrhHJG$ z1Zs$iL(G~4d^hv76R`u~V|R#~X3q;!h2H&p*dG)oqi#KVXyWy@w?&|~+0H3^C&Iof z2arF7#I2hWh~+T0m;RQ-pHjqgOG2*`c8y|u6(&a#d3K#J<#5~5MX=d^D1x>c8M;S& zq1sdYC(N@V>!uXCBkS0LyJfiUXl0k$ZRxCe*Ii%_3e!`km(D-AqSUeiY@fAW?+oJt zWG>Wa)xB^kkR&Z8p8OsrGjC;)^2mBr#gFCM0$)* ziYD4|A@L+6{i+iK|MClUMr1$p#^sfu^KaK3ghF%i*%WKtk+A;4)Z(4RQ|(6z(4Rc&@6@{U}XVP+k1=S zwm5v8K>jGpmW7XbwchRCHrI$irI3WR%^|6TNifkaM7HG92cIVAzw9CQzNGPbg&@t9 z5xiN=(z3cPBq05R(Hsw{G(uA>w*iWzO@+vUIadS210gpNYnygpZ?2698J?O^(=l_p z?D;;B&hXA0R+%7o828h)Q7i<>o8b@-rb_shqEii3SPZu*Y0BBDRcAS0O$)h6^RaG< zJry7iiPC+m4)E;q)n}Q3I|Lbz!n^}BtwX^7sygnFI7IvG{qV&Y>Ia? zmkH2ysb=X_*-e7uQ23*4*@WStTr$rNVwYV-jQeEjKTEU{#Vewm~rzhhScb z?GvD$IoOhI4H1Ly#%S6Z0V>UfjfaBwg=pQ`6?4zdSz(_9NXHE14Dy**i1l?Iq@nvS zVQr8yej=M^T}7bC5ktm~v9SUa*|&cm%U76u;Sg@byil>A-_bDE-nusr#76uv4z&FF>USax)fnhoS8lSwaJrD$q~ zAvazat`1+~5HM-q4ghl|+wIPfu9rKK7lkN%*7dMo2+&ojnpsw=F!QFk(?>1P#|F%emt4J(9yIn+I-3&SDdQg}} zH*limr_-Yu6mMHq6d7mpMf9a|z0_E55``8OyB%R~I-W-jZB!{ydla6&NbX8dj; zjfBx3I~fx3n}C$9eJDsfPt>im&mJM{OB5f6#FvZW>pUjPDYLsUsq#d7KK~#^N>EbFwbU%a84d9HifCFbE;VHA)VL1JG;&j#nDO(X2i$a0U^3l`hW-b zzdVgfH`?Klgli!i@~a^;hGzu*_IetkoMzYlK4jNgh3Smz1L8o7q)S&JRs{T}ES`?# z(Z-QfGla2_=(#hb=P{b6=m&&oe#h(?77O9Cm5Xe$!h95gZ_80?ieNZe$RPYPshwq3 z8Pc3xJynd2{et1A-#L~2%xe&s4qb1sOBAG9K`l}WO;?IU$v5@C$*xhD(uPK2xmZp7 zVTii*`Wvioh{}r=;^4(YKYW~C3vEP*=K5Lr+F~0OzRZortWaTwfSDSzp9@Hv32V#2 zj*fRNw9~=Wa+dXU_yFZ_PN@O>@L1_=+Z}+jrS`u9RQCAp-5neD*slcDG%1PF!JHmH z5XtuhXPiXDzW(|-LIw}5S$3}o%!ovC0%yq*A!>@L$t;^IKv%o-8|+RT)|OdCClML- z=5@}==u%rDqVJ2>w;XM;s*p{$0l4ekC_pV`j*9#4^McxIs+Y?!&8AX&RTR}*$6^T? z&bN+`h-o*GCp}I|=e^-`NjY^gyHJpZA*~d1mgg=Kfk7NOdZb;dFsvsW)O0l)AN zJB*>l{Vh?b(2-5fOw8zzIy=t8XEL2y-rAec!V;x(rMt80e&+7N@DUaO>g@LdREEd7 z89Y&Lv3EqEQ;Vc0ZyiEm>93{__FAu_o|3BD;uW6lPEp*n)NVsD%~mZE-9H*87+$NS z299YF5&f@NNhH~}y$FerS4E>e9J$Ue*=lcxtoV$37;=OW`=dxykdp>i!9wIXM9?5O zK4)s<9D6rV=k$iRZvSy$81DG=N zViV(HsM@6>&@MKQr%?_2h9G4YkC%n7>p~QU$PCIOdKzO#OZ*)J&O^*H3XwNE&j%Zt0V9OPz4kxy*)4BDKpl_)zSL~A=Aps2= zbvLGO1!>f9o*Z=^`cJ?_-z9D5k3({f-55E|Di-jE*7lWC0#9~jla+-;4`Of8$_0Gx z1<38$EMY1$vM9z`4r#Z==>$CWB<#76fX3uGY4#UH2$NSV?mhO$G{tLk+euNV7kYK* zn5J_Wh}(e)_Sv_6?7`g#Q(hE+(?K%KIj5i4=Any$MTLbf)zel};GUA^bLl z87`;kvd(e@eG7-P$gMCW?bWZhRVqx~-9?Jm;*EAZP{tK;kA$t2YbVp+kAbaVr-aEK z6?icn{wzqzkx3EzC`8dW7z48Z2r75!mbzz?j{`+BA(EfZwsr9zNps!B-%8cixCgTu z8Mu)E8|F?vWvTrVC8JP2d;l$)YXp6p*nzU`d&2ZDW+PBB3?LuLHXAFzT;jPPS!_QQ zA{%F|XR|#l=y_MVr^?W`JKwe{%pyBhw&vKB z@?%q+r~7&lDF(M#9kna1IY26pHj4d9kSgUwuUUc!F_wYV{6CGHQx9y9Ht6hsY;B;uRx9znkVc$Vf(3QY=r|1Dc|8sluUyB(jBK5b53-dLO`pgMo|LZ7kx{OMrUG)w$ zH=;p%UaRYR5$OvV4?qA_i05G%ge1szBv#mPG(kQ@U`Muh+6T@|4ZW?pBqoH($O0ATif=VV8_sX(Ywo4$}UKcG?%4tphG5r}D`tnQGE(+}& zjwt5@_9H=tZCsQ~z`G%is`MDp%gze=e(%z&e^>jru&=|IWJJ8^D~|ch_@GyhK6bMp z34*L|;l_9yTHF7p6wPt9)gfjTNA_e9`^p zo*vR8t+n|UQB)WqX;L)VFGL|WGO^gEhY!Q0aH*i`0W%wZTKJTMTmdeLd4As&D3s&8 z7{_h5D6CQ_!{JkCpFAKE`S%@w0-nOu@}4Mf0B;GPE9)8F33of>MziqE185Ut@=nMX zFj_VzdI?jt82&G_xHZGoVck7S=3Q_Wk%xU2~oWx))v z76GPKguUKz&t42U&_R8$b#h2h1(mPu)Dy)|)f#(MbULk=h5MSqnjcvB_NLsiXzzF zT^F~nC?R=aCjs4j4^^kymCo78Ji96+$d1S=tz8`;9+lhX;ZsqNet3BXW4wza$2}sD zK`ze{li`0Q?Wci4RSwVK*s~T>v<~xyS$B{)yO|=ol32|7p$Jd7) z4{^!X>H@p+6yMQ)=F?8yQSJL8`2OkH^G17AQ0-{io!W_~w`~}Hw{ape{l$p zp;96#v#Fx_16HbPi93g3@4Uqpgl`$X`{eX-zc6ibTG&nY>rS}&$i5@{?KMHB30GJ2 zi3RBH@lk%I?YwtEe$oum-RA;fk|yFz&CdxiN#vmsKaEqS*q24|lO3bc%>JuIU?5Mg z*>B$tk+Lev9NJ{x7eQ0IxjNO{ipLv5>5=tEnrylVR2ObOeAJLWRkfd!jbxLziQ>DD6U%O$O%E^Oe6b^> zd~Z?3yW8uul6HoSZt4yvu2sO#V2noE5kcCR#XfOTVc!9R@E(IxuRw4Yr8@Sp4@ICy z6Bs7b-Sqr3;Lr+A*8H?yBnsKTu#9C}WTOQ6;J#SF!g4F&*#j|!+VL#gWFIM8}PuSP(u9(~n+FwG7 zK}g&sd2F?_q|n^frid91B!6q{`y$fo!(#lfcM%}}jzt@+ufqOjF&LIs0pb910CrnQ zpIw|^Y_|*0dS_m&(e4g$iTvVR8zw;SB63#&D|3a76oCr5)w?aVZO%E0v{apyUdvS) zCUksqoXdP%$TzV_I?GRl9OxYHuhpIrh126b57{OKsSAho%GVa!0YQrA$I{!fdL{j_ z6Ej$9ucnhUVv2dO9TkD*_u;bpH-*X0%`+S`hp(M-)iW|McENijka-wg$*_H*7>KGj2Z&$zcQTuwDYxB31vC^-U)nRt)=M zjxe<es-(Enl=cNGY{?vQJHPi>~V#e6M1@m8XrJOJ~5L7eB)5=+-#GD z$;6!-m)g$*q&}W{&6}PPt_RSG% z8co4EQ38`lI)+mI*kdgrn-fp1@0h>DUI>uI#R9Xp1Zj+X(AwJqeruMpFpAn-1(2vc zEjpu4#obxKt(!A}hy@B9j-b7-gOmN4l=>kn7H54xKp5QQuJWi5y>Y5~s|+oU7YwWE zX`W=cLWTrbKsdg8x@{Kp4Mt=s$F>M-?#bYY6jyY+-6kRd~V)NF%0q2$l1 zZJ1z~BpaH&^pTAg(KlyU6l+Lhgy|xhla*mfA#yJ%fe|xVfO>WB+t+@fF!joaWTGQy zPX#zQQh+NI+a;)7Nt>EfaQ2Ge=dETVJU>lFxVED6(|54m@vHVQrKceZbUdkQeB(o(g9bS^5RxC!pTS6+>0V6h>JU&kFf5ml}8+wd81rl zx?AY7Euf;}O>&#uX55gQv`v4GCg~ou&^_qhE9p+tC8kxtpQS7*pm0m*q6L5aR76qy zzdz?Z>Gj`NDd+Q?J?{CQ^PFe!x#F<-cTuRmZO0B>erW&5<)Z77zL7QlPyB^63?_!} z=Gbilv54mN`rTP?Q5YSXCm#C!p5V=c<4*0LcXDpMB}HLkO4C&~TwyvW^A6HD4a=?0 zvoO_YAs}FEQ$?Z5lr-noI^f-^!qy2;*r-&IZC03?dms+O?s!tv*Jf?X&AbzOUVNBfe(v?#+{j9no>#epBZ*RFGT zV&a4ot0ta@7{Gn=AU0;alnTUb_)6<{o~Ymp~h<8B+jCt@65c{_tM_e0N9 z%r0re1>#D>z|s2QR^$Y1K1g0?kENZUSD=EY7L?g25#riAvKwyHP98{Pe*2)+D2D!J zw>)Cw6^?^fT+IreNI&%+$Um(*C+Y)KW1kl_E`JTRZjLo3xm2i?jL zD>&$&{~+`opA=yJaSXE+&d>7>-`ct9-0}u{(_t|FQfpoRMq(}b;M4?S9X;(SB0FqL z-KLh>J&5$Q)U6j2ps(n3kEH;HYV{XufMTkz2M9<0)acVi*AQ z&gbAW;0eKNF8T^^eKDst~5`lKM|)7KiTUcuJi3NqT( zC;}^~k6xDT*Pc;>g6;m_IXts*{;3@c?T8?4`^dZ_i|y|@3Xawgp|DT>oiwykwih`MeO=I(I{=-v1%mY9#-FvZ zWeUd@pi}?@%r=NZ(Gsb)t)4T9B_EVCc6rVd*r(j1aIAHJ)VrHKsf$fL>ySD-cfQ#I z)RAG6WT1E}8^_|>V}DdCD&v%!vf~2G3(f#}%>O?jdci9zCph-J4@ICtiJ(g!EFSDmSJE;f5MN8UQ5n_#4 zGOV%-J^<7DZo_pBzEdd1;-$69`U+EYaI-SUGJ+JH&-udk3ZSYTjw1*7Eg%l3uaft~XuH_;(V9ud<&g(~$JlJ?0D z&zIR>7P^ zG!K99GTd%oR#3lXPMjPgciO+boN%VpseL<{ZuvK<{E#8{)H-oD+TEg(HweQuK0Dre zi$MEm@kpd1Djg0^2fu!k1mjTqFJrnWS|&lIf^Xtf+bt5^kZ&TSCbdSU*pnj0tpYLH z5y@iq)t5zMp`cTUC-~PL6_sHtIwBe-Zx86W;Vj`JLt2+XaTOQ#?!q(_R>MrL_dNwE zOA@sSdr&BLb_h<$5uz3UAR#JcCP%Z!;5x%5h`{I5JZjjArk)cegIQ-mkIjta*E3fCfOx#&V6 zA1DtgCjNqO^1a?N!gPBKZ{Idqz|T56q+h}vHqc`C`eUDmWvWCp2+!rxoY>{?+~bY% z2=hMwHR-*d)w%=G$9d1V+ zqb-3QPtD_JK1M8Kkiv&v(!L=?+n${gR`d;mbdH1DLF?x!6M51!F7T*lAK}zjF2KUo zCgSFpW_o-zFF&?efHC05qhDR4E%#V5Y_T;0RLXo0NLSkihvhJ%4)eM2ytp5exu@;+ zln4w-t0#}$_Sug|3I)7UuT=IGQQ}t4{Y45{2%KQo&0eQEeJRKc{WZI#)G-e*gApmE0Ha}CxqyPlw2DQ*q>UV{4{U2TP`3rE`mN> zHnE^00-b27!BWm1dFkom6gna9JjauJMPXhM0EW|;v?5`ukl7Pf-e`{|LtX+Cic@nK zk%Ocy64ursyT!GkT?cKgh%_2WF|- zdX08aG)4kOSIV9e@ZYpN1)*3VieC}LsprPeyB+rv7+vpq>XzEQZsWi{L25dN!umS+ z$KMvDRkBu9CZopPJ)TxoW3TqR9*=xTiift84fA*uY0o^q!X6f6^EtG~#w!?q8r<^I zwm_KQ0s_UhG#6Ja(^ad4<31O-e(2()N7-smg%U4rUek71n2z(vvf6&*S$LPK!p{^^ z3wpCUA}V8y2WNJq=Y+z-Se)HD!CC}qL5D6~?SB=H11mjC>08=IJ{dEU74?X(3dQzw zCERK4a=07j{cMOZLv?Hqi((zm{tt>k^LqBDhD<;zSL`<_)K*$2>iX ztVx?M5E>4z$rcIG(lp{$`xT}WSgVjSS%5_K$Gh+o9tFdwn9Vyt8W1kXuPKiDf*de5 znzq;}Q52nr7tOy5#9wLn>|^{pSe*;>*y~>M8~IPA(|1uGR^zqrGoOM)3vTbw#qLvB z(Xw_f?X>NYq~9hcby8NM1PC#PD-g|C96t8ke4@Wb%reUCxXn~DS{r^a4YKGl-%DoU z6{IqJDpR(+HIcIiaZ2kH66MP))JVJd(-g#LotU&puh;Dyl3*}q3I_`^1jjZqUwHyd z;JtV_FF!P|z#jJ4bOz%Us=Ecao7qf_=v6b$V1cN z=Qat@rVN(nDod?^qAF9dN?*;uwA5TlW><^;R@`?(K{{JPHfZGbQ z&&t2`LJ}}5-BE>Zr!Z5{rz#(@#jZm9CfnR;V-%#B8AL~r@wP>t3FE`I!o&T04YZXW zKC|?o?NpeW(JESMy9DS;0pi8v-|cbS(ktx)kJEDJ1a`|sBn{m}60DxL1ZyV>P3&~r zZPr2I*hk*uaa_XTy<=+*NsR>05}icxlcapk;KgR4b@pO9+|t?Z@o<;h+S*Ws=`{jP zeD^AaXm-1)1y0Oks!3hfyY861u&_4bwknX{5}-)8^k8G?@MoZwSox&2K<*3jrN_D|0{ zD3i{!k6aw{mg7l`C;Mw1((&$CuD?!@Mkfnp+4_cDB(^l{7Kd0BT8k5vTOD$?oC6!= zPC;6DxE4%r$L5_64)7Hck}H7u#gY z9uubF|CMrXA8)6f)J)B96w(SbXk*?Gt-!05w1@qN2?2sVi((eIl@fhkI z!sEw39Tufa@R4cPf0l%_f!H2Dut7qxjVZiSBH83ehxXV=Ldc3PBou=UrCqG0mGRCD<@QFgE&r zyqnoHVU{{Wk*Pwi#2)kXvfT0PY+K=w_@T{>_Jp9$QaH(#e9T&j5&cd?dU|4Vt-a>y zIrQ&Gf$(pFG&wyI+pQOU9uP+X+3O9wa}E*4eJ|EGH&5oqT+C zOYKzqrYQ7O%xXT)Jp@VIzu!Hb>;Pjpsq#$IgjJJ5H{twZG>Q#1FZ= zc-;7fSfv;r+>^28q1&ys6PYmynlCG*`BEoJoS^~oi857~Aw;M$VT(OE)1f;fvX{$6 zVTqt)R-CaXJh2p_?RkM%5~8Ge_JXh$Ngd~d{r0BkOplO2&Hv^SA6@qBqxQZq)gr=` zv5#B=U>u@UY&fe=v#UfT1;4s@P}b?))2zEE=9;pAYsvt@_^ZLi za~a+VJwCYYJYWww%&u7^)zI=>at_;!=^_y*dSJg^wo+k6q>B4I-s^>EH0pfS_A3V% zUzulz1pJ37fo2Q>t+eMvj14J5rE<8vAVU09g=6E&uzr@>D_#^_mX!TLfWhD$p|W{J z3D>B%JvyT|&OZsn6_OZ*#K{F;ipeF2imPl42Y*@=HWOCwcrNB#yqTY^_8Cve!hrcH z7Xg=uLZgFwD0E>q@&0C)ibx;fy_eW!0zrNlA-M%XP7d>!8?ueNk#n;fJZUC@9p_sF zV!Q55BT{&mN5UM&ey0NtC@JeF5I-#X#vWUVerHCsI8@xK;jsT)1V$kk^F>-_ugw!B zR?uIDX_g7IMCebqVFPTP!*HiMV4DQBUS-B2G6E0SPEV6vg-hQC+a>6gCMvlS9(6+4 zge9W*7ZKvZWOvjB`lAMg1)*p7lzmL-{KB2W_IHuTrzC@9*XQChRDNK)~s=$^p#UUN6t}3(Hp7P|@{kB|TCW+I~Y}+mnwOh2R>UlHV z9u^iNC+S{R17yGlqaYjr(LOO-Xuy)F`QedeMdM}-zSN04u>daF`iZ~ z$b9fS)+^%%`-Nxk+q{FPmyAuwWlwU`nJP$IPORQ#k9pL&aa-TozR%_h#;F~UmUdW^ z2n+*kR?b4d71HWzoSWSBarl%hL@>j)D1W2|<>tG{bl>+YMuH zr{#GDZhjg2sQ?`)!c$?PO%;k$gEXx~xf*SOX!Myws|1zT*%HqW?|7Ci^ZeX1XI0uJ zp*S%kns-*&Q^MK`c5Db2DgLdor$tm>HcphF)G0x_*|mLF`5dA49E4HM7xHe8gji|OzeL3ghz6>NZ28PIN4Z}V@pih^CD3lzsm3sGe;8HFM2wO>hS7c$JhU5CuuLi zKOOuL`?E+s=sKf)=t(Fl!zxH3CGp9xkc3&zAmUSD7YfBeg@?j7+Ab1>ZVboQ!Dl@! zy5)T3zbZthj_rZ9{4IevJzU1&N8Ka>Q;By)Bnhp|6LYAnuv!7iJiT+FO;K3&r!ufP zj(M)~DlQ2nc2Y2wioGNb1@C%19cbU7qh0=0;(ciFz_@*CcH45hLPTn25r_|Vr6A3G zSQe!_cpUlr!kOqMc-=);0x|y)^%8K(xuy=l2w753l zz~Pk2G3$gcP1>r2!!XKdyVd53h~H3-SC4p-)038*n9*Q!1*x%9=XSPNVcL}*$)=HX8NX_|)g68#@4gJ^^;5ZVQ%IXRj)|ZK*Uafg~lHDr6RqloT*3;?Ncj+wsX~A8==3|Yv)#Hy(!uQ5TdtNZk zM@jRV3VTsFK61#%hdf#QzScc`98+AJ=EED|VQ1`LqA+HdmS;2Ue)akI?79uv^$pff z6h@JkX>COKC%2&@(buXH?xJ%AT902lv-Oh#3%HdL{IX&xJufY_nv+8MO?K>Je^EHL z8b^h*>|Y*tV$VLi_?nn2AKyU<`;t&h`TOxNu8f#@fnkMQlbNE`z@T?d6C3QyBE?>E z4X@y-<}0F*=hT9|CzkE7%LQpjhc+GT3WXV7W@30!HoM!35bF~w*X|Ie4~Lgmaq8+O zMCXgDimGg&K>UGod4m0rvW!U7bYkCb@ltC&v*f<2tWHR~MC+Tml*%6UBp5B`XtP`> zwmlqYYpq!nttP!8z!|YuMW6{~{7&t(fRE!M4vl|y%qOxm&A0B0UyJ1hcO(bpi=>Sx zl&9-fz7(+9i9nNs(W!mbOOWEPyW!feTVDZ2sx4EN@^nG)eXXTED%-d?hAAR&v$xnt zPuV?<263TH6pU?Tm)&lYgxN%YRcBK@B`>?!>aj%*hwv)KjdP`-zjyO1er{IyEHV!E zOHX)e>ZVg0r`oRtX=YhJ93a~JJhcL-Y^E69IA;$Ej zRS5eI_b66e;RqWeL`l3mV4n~1`#DY^8V?YeVw*1lO9Bgut|a>_r;r5uRG8nk--tp5 z$SlvdHyl(+!~2W?L({QyCwog_8i{v9PEBLZW6C)?6ZM`8z8*^na%PtZFsDIYbAp*5 z!z%qRc&14-PVK9n9vP#g-RkKnEV~K+je^wMxqUn9sWA1H$Wq?=j1BaB&(u6K*&gsn z@(q@<<_{bv%a}e`K2wD$=-gC(E1vPl<45M%;{r5LMjY5=1B^3JjB&4s`EkNU=AI%i zP*$%T@yFGdr2(gQtCt})X`aguJESeVb=#8;&rV%;ZrhFsDKQA*?R;i3nk~;O1=C!T zrm^;WArk1U{_I#SSNpR_v?_zEJS z*_Ce)DK;DSnoQ?(^-|ijy-nt0yV(6AF#jm4R9S&Q{Mo!%FO|v{4+eimyKKezd|j09 zHrp!#V|a4$*i$=e?P-Unc8)u>XSV%8Fb-*@R14qqXolz9l4WMs6V1;?GrnzncH3rb z0okP@(!o*rnZTL1%S6yLNn?GQwexu93?tqUI(j@!mC=ja1Y>7Wl+Z3PSQN$>Npzn5 zhYQiuvIOVC5&_o6>8bNP?#~Lp<>W8*EUHcx>k@ryd0nF zHYJy&B!&GQY!n6(1-`hEJL^W9rWDS;+Oe?nq3%gZR z*lrKMF=3`Xr!e(bAd$=#WiJcI4W&om!@VK`#a2oTywYBEcy`9Pv#a;nYYq{(mkrUZ z!)K<;Ii~$dkP&O%CB?4~gd%^odopZ^GV1WJ992t7$6;<*U;GAX87WwaYhXew<5i%o zNO2Jq7d7v&4#G@83iAjy*ug84S;|EMRGS{9vqWhwAH3WGTO#bMco&PI)_x^Q98no_ z3-<`#u}sCfON<)sx~Js z>>h<_#>sAdUNvs84~8*0VcI9YMG5hTFoq9r zrQIk3l?S`uv#hrum3QLGJyhY?+hN$=wI2#IIu#Yo8!h8NKGbYXYfSU@Q8wK{&g!C9s19rEf zC@WcjEb~ACB06^MWaB*CnfvZe&mSc|tw}Jh-tWXewqK;ULeWH)iMVr4IXQP8ntX3I z?zO*oDd{R?et3&<7`|;Y?Bh360>=TI>n2cI2w&>Yh!%&kPxC&kQ47<)&Ye2h9SXC& zN`rO9Dvvq0ZGqKzxD!0p=?cfD^3j&5Pn#hUg~Ed2&{AlRI%dUWWD4v*@K9Z7W+|SE z^o*9VmxbaMlaq!b*Q=s4#MxatBnX!kWLWV5GtO?g1wgaAb3$sPFcF;j@a7}n`&E3V zgs;Uu)HZhjGXfeRpyU{ZW{E;Ao zQ^fNI%2h&i^wgZ~HqsNmxC8T34ffxHab4$Op&WDOBEUF$=>Pj;(?yO8DR9x}*nCmW zvmVejfj7IzQypKjH5k-eVD+NVE;`b4P`?QEJFD5~{&X9ZfQ}U=IRb7K(D^)kal^k0 zt&RYbRjVuJ0zZFJLBI-Bk z_8&H={q{T0#n+nsitX+9BE;jx;Ho^HT=82}Xg9TkO8-zaK!xRIA%9Q^npJwm3ij!E1QB07c?tqAcKUzWfS!GoTF`7Gr$fSpk<1wuEZ zS;OX;5{(Yu!g+O6E*|5%a~b)q_IR!kWS99x?iL}|l1U*o`;2gCEUfu8YV0ebXbw_X z_O!Qzs9;!+KK98Dg#F2$>ohjh5GS{bL?uNhu55i3X3V*8G_iueCm1V3n{2p!Uzp11 zfo@kEMt&elEK*lQj7aIbJ3>@U_V5T3PLsig-f~Wt^KG7@G^^vspBTH(mbJnO+fQs+ zX!`_d%ZWuh>~{*w+9TII;snwA%#Zw}wMns#q)!k$k*2tmBEw zdWfR!rW}QhdPNs@xz6rh7= zr;UAk&Vsjg&a}TeCEDnc0qT$6MjHARp1fDvrv&N9NDS})m%?#H7a)#?(RC3RoJ5VP3I7sejjUxUCYK{>`&dlE)_)Kjx< zy&%oNd>T(%oT7j2@xdy^Hhae7NsgaWCjMEE9|U`tOXi!ocz6jgbwAdH0vW)LD6)K3 zVOo!orc%3EATGZ2Xlx`oy)(PV0On+*8*KEVw}>_6y`u}r3eygL2+6hexjN9rO|f0ZBQ%= zDC~CED_4j1Pf_Seul%ZnZYdw{8jB2;e_^h! zvQLQ;+ti(V0%ngyU}BL%To+WRHrOp9(HWi>c~9o&j9i)Z7cq|Ly}ZGEUy#+AIb53< zWeFk8Lzmm^X9{br$qb<$(gF<_F^+W_radEwchsZr#=>8%a?dVSn-&a^uvTCt2mU%9};C6@{rCDM9%CN71 zff#)%FA+IzPQ!RiK&j)aqR^0JX%ZfC)`jxZLQ zXLosYcq~H5CKo*<*4UW+A`7NB;ZWZ z8gfa*TSB+8+;)1BQnXCj?B5fllA`e0d|yab{I=;iBHWpfY3JrtV}DnaZ!m0i5mVLa z`mv`rOt*h1nkhp=8_BOb0CBj2HN}bcX;EV92c&ghyF>(LqC>}acBMz6k=($&YO8%i zgjiR;v`jh+Q+R2LedOB${_AUcem&n4u_lIyN^?%FsX4iMg*_$MdL0bMVMzy$wWmd= zyXf6;EPur_6-GYZ+k#hJbQ#kX?gF@l)LIh53hNq;_P*kE?hY3PZV+$~_;-d~eJ2rd z*vpeC`??^_<&7+5Jp`z6L={g6)<=kO7{HVUpHV*)fpO^Ev7;qC@&TqYgQkNeMTj4A z4z-19^3O$L)grpMNs=qawjm-gPQ*A~M^_wu?Ek}yk1nwpil==V+*fOCc`o_O6OV1M zEyDhkBZ&bFm2wk8-PLv}hMshh@9U=pXnVW`Fdo0lm4idngq;*_{gqUtFg5VeZ$pV4 zC`hqKGXh@}DXxUF=2bATE){{sW5Bq1KMTFRV`c%p>ngijn8p*moJZxpf(!uBl6sYu z$!#`BL|<_69{EU)5CY8FtCKcI zn0jyH=(r%4Ivz|SB~~|FkGPwZg0|#UWvZ<~fF7~eN)>vmAgyLhdf#Wi@v`*NxO%GW zC_g1iEDPh6uzWl_qEFkOdBpzW*?CLjG5NjL?Ca)b*H6!`Yp{QaLe;2bCG4Z$0mS`| zA3`~bu}S-aXe1cUgQ;CAK)2eWInz_&xXOAa6WrnhftJXPoBL9{}5(GAj~Rjtwp}Fc6_q%_w%-X+L3lj^zaY$b2J@nS6rd#o5lZ^A z!tpo3_Jp0qzk1F@0m^^;fG+4x0y-5eqa3u)3dR+2bU7b{t#+L#RGbQCReR;)@0WMC zLK`YdtbyMdgfbE0w|Zc%{l5^E7V<$%*$M~vSt_x$0&0Qm6K;x7KO_!?#kNV2^rgA6 z!0J6AJSnt`tywS*GQtTdJK9t37(Uj6qR@rs>$$HyB%sw8t=G?T zC?A42YJV!187s4T6A-HP_qqN}$2sk0~#)pF4OC_Tj!T815A0+?HK4 zSyvWWkK*FeVE216jDt%idVwGv$RO^Ov@Jr6Ubbo5`EYk+_6~Zc;zZFXF3i6YjH{Cm zj(#~J#EC)pL+`0CI|U{!7f{(nd{-38E-2v0>jlIM1}o*-UPLnPrzY&1yP@wmV zlPD4R{?V+U|FJD6rhiINj6gpOT)pMtH=EYj1-)aLSXGu?V7pS7IiZMQqwN|Y8h{S^ zCiwH$IwZ0!n_?^ElPJB5Zz(p8Q>H2}lR~lZW|8O(6IEtC1n6u~13(*lj@>H?jq2ZT zpncC{DO;8tc$Irll(-%APqJZF3)7f$Q&zn_uE916#$VVFc)n?!a(0MDf%#*qY@fo^ zU0PLDU}ppVV9U1Qkdat-oVN4X9C&*@J?1>lRephq5bR;h2!iN zq+z};C@-^_qR|xHMltNaeY(vNCC(FWMLO7`9OmgD6)`PKrz&wPVw)6A-6N`yt85Y= z?wPScqT`Q({6fRG2-6{)j2k)TT+uJ4!d)t+zU?N#SRwX2%Ir3emQiairgsUdh0gj} zXLpN2*H7A2=s+K;`2bIWRE24DU4LlK*23;s>6jDZqzI+nu)>8dg@FapUtoe4eqBM7nN4B7`gCk)PG%MRVzqI#qjy1`3fZgVa z>HP3?2}06$<_KI=#b6h~c`mKis}9)1qQ-&NI|HUH5;itR3%Ad;oNgv~ag0?7%2G+2 zCIX9~JUzztD;&qNzxXA;%oQNE9kLj>TJiKniIMhKh4sVdDHFqDLUb!xhEB}C1sEQj zJvP3$%|6y2$Z~C|-5#vQJSG{DkBdl#)L>3Nmk7jJ$Js}A`y}fv3Z-Mulx`=5V%vD^ zDYVhTaT(mup}mb4WXpWvfX(n2W?ODXXA7#4&FgvVUFk?iw}h<`Vrn=MAe79-Y_kaZ z8Ejh^9FHut2G4-+GT17Igw)3p4#BUsCf>3&2z1xmCk7BrqcX`L(ECL}dL4Y;U>l96 zviWw4h!j!GUC}xSF#2uUb+BOyvr_c|5x0w9=mR3r_Vnl!`|S6H;vDy`DvJi70xb?p zDK~d}SZU~y)(`pwDXSA9ZdbV52~Tv(Y@Ud8Q=Tm_VN^TSo)jexXCLlYPYY7*NKuWcU@lw(DNyyTEWMn1=ce|vgjE?qT0zehx&%X}Jk!^>SF7)!3K!tTbp9PWdaE!!f@ z`svZFzwJ?&&SkgOAWkP`!L2p+gl8xbR~d%q?}S-C$f``h^5|as>~~`>-5gmHyoIu3 zd{IPNK^OaT?&%~*E%%^DUFlKThJ~^)6d=3K9~$iE9)k*M6&#QnK_(g79h?tm3sEmW z>uLI#*oPu8>(qJBK0PGHhWhY}X%~w?zY8ibWz15%TsStUf*1GzwM-?Jakb*q?=^FD zUJ17}YbT;l(z?d%`bm*OdlDJxk@ZtNUFzDIHz0-i;h{Z@O0gbH(pD#0WZB~=L2oF_ z*&Z`C&dbd<%yF^B$GoJ|Q&25cm|oHkEDv(1-s#C(Y9Bi}jc3bM_Jk;E?VhpO#(Az) z`@D$sr=+aiA~Nek)p)TX zd6~M&iLGJ}v)EScHWBH4FEM1Q*nGN+5La(;3c=YLVV@Lrf4ieUo!m1O2UrX0pEvaB!VBCqrOYGw8j(RH;J$?mx6g974!m4b9XtY9p6{gIx#~tDe zK|*TF9P+J~ZT4M)RLTQs^rCa1HU0n}m0Ej7kjd}NEA%@aK3m&pA1KVw;(Z+p@*lqs z6nmE8<+Yi!iYTmuo-*dg?#V>jY#3oyDy&$PxQH+ehRs_z@Rg#`Ql!#L zY?J_v>z>L;DSEbG)Xfu*k@J_#AWAj{yL+}!vD9CL_uDdi(X*c3{rH(hE9`_v(n|zZ z-u1{66Bf&I-1~xbdO&j%Dr5<}heQ%gi*a>2p`|29|`Ma6~EH=&<;Hxc51y|SKn z!G6Lt`Q#%jT59Imy$<=j^3raD9Wn+zIZLNJbAB4xS8k^nVOCuybn<_ZOA2qm)(FN` zSDiS#6i(d^QRr`4;>y-26sP^?kLq^sefHV!5fyt>p62cNYr@P3#;Z!KjR5&x9?wN%q74w#EZ0wU2Is(5%BqI& zQ(I%-_58`Q!`m%ifJWz!Ngf_&4|~+n2}dW`;`2F!ydREy*NH-d+u}F%mkP&$M^Lf2 z#14rNd3@T(qg_Z8`bDO-h+gintU&BaQS++Oh+`p`(u0Zi#qY<|{g64e%LM4#iQP+1 zG}c-lhurz~zTQ`mmW!K{u>PJ=#?UM!K(Rr>c@4a@dJ$s1)HBLn5vJt8Cs=213&y!Z z$Ua$;mTjPah)DT|H;OOR?guePNlAJXdroITBJuimDkr0xPwalCH5t~qNQvLpw=724{MH~Pt3Y_2+LmYcOtQm@rCB8@bUp1QA%+2unrvCm)Y_{eP&q}HOOh7J=i?$V5|G|rFg?P! zs&V4U)z!`@K^(dVU?|&%!mNQI_YJjAKN#bJ@+sHJ4)3R)1Z4 zJ`~kEunyA$6YUc}jAi0RUFMoDcgQ}znIY*c7~9B0#vC3p7F(Vt3CpM6iUnyhb{i9x z7SQ~`_jmP&heh%YUM7N&T$*8{y$n1;o>)ECek#bQoo-lRH44*fxZopGK>^VTsQ%zk zm1FB-Pkmp%VYXdiIu!UL2u#hiogRC-Zi4;BW3!TnJSck|&x^tka#h%i7~uHiTY^8aNegyVM@wTmMgSx3&qk) z@ZQYzE|&pUL5q2fxJwj@&hRbyPA)l0`+WHdMWC_VZ1hnr$>kQ8jN^XV6O<)K@U~{v zf^jhpON>t06k(>OipJRt0cvV{TL+uv`QTRQ32&ammYVvOnuWH)A&b6!$F8>7VKDI2 zWKRpmN=8Ud^LN5=i9{wfa&pgz7ETx&vU{e7vjx9F{2WfVHx`(kS67;J} zR^VLVlTM(Avby((UDQf|SBNL`@%CRL#3rPQcnZ8Uhk5?1vg?KEH}=0vtZgf3Vgd#T z#2M37wZ7I8qA)CP9;~xMg&A*}QambcKlQ}fZEJ%_z&e}e34)!Mbv8$kdXjmZ?RI-= zGShPS0M;Z5i=kVBbG4oDys+vEF^v9qK@HKCh1vQE=dHQv(d{D-5ycQayO5)T9w)C5 zfi^z1sLZZWm>%F}^X!z_XQwyVU7j2siPY{I>~2p&OL`5LMQ`h#qhM|SWWzk`DN2|j zdxL^YJuMUeMb3~(xf&zFsk9z@c&u@Ht{A^n%7-w5=n|4%~Va4$= z@>3@~anyLTPyFP3;*lvBPG1rxaf-LA1DityZeAFDPs_HZ<7JJhlqA+O-N(U%Rf@bza~}v(hw@^o(uy zXx>(ks&>Gb?6GGA;?98~O4|2(5&RU(ifJ`-VsCjuJUou&nPIwpG(p0+@AECk_HwB# zMh7ZW)KZJfv?=y6B?6vktUIx2rClIMhfb`TV_#7?);uJc#}Vol5$FTYm_x0n0~kR` z>+7JndDj?wKp+l8_X-RKR@y_NQ0mFW_4csBaf#f=#a%O%2;r}acjC3SC09^qWPtWKJh^zX+(O6^ zelFXQ3Hd-28Y4*_J;Z-1FJgW^iIV2!cCj#@gqE5|TWY7Z)W9>WwW~y=Lu~Vy0JrM| zTkoBXIMUAU*sMp{?|8D8=JM&BZT$t~x*Ep0%pMT-ZzD`EbrHiSo`?Fow!OttxjaZF zS6P`b^HGvW6xz=P7~?nQt+D9}`%)BtqS4lg66Z3X?|YdYZ~`vQIfHOYJmmRLE_vLZ zQP>-!JMjMOjyd+Oh!h%(%CJw&wvXgP(c0NJ_5^$8#{_9Xw}IccFDT5W61aMNjA08$ zr|gUDDn;pcSSI6g*9x(T^r$LDWGYq73cXGwYL_P{UNiO{+0B#kts+tvESf3U_`mZ! zd+H`({ccvzuwN@oO&NK>dD62UUot<)F=5Rtm-U`1 zZYcqi{N;(9!Cp{g+!2RW@m8%L!0TQf561~R?I7?i-V@Mztee?@*EYtJ-`?@9sv2-DTvP!jevVeRa0v4}58AW>Ch z%85G%^YfcZ$EXD}dAK_i^jW|=*^%vbhbS&4obu{qtX7++#Y;YpbY6E`Bp zQSF;3$+JP8=;*S*d%^S){FMU*_gOIH3Wak7KOZQL!@+B!wip26vYbY7Wl_w3od zW3x?jvrRj)O%v_6A~HN9(TpgwcRYuljB1rfq5-O2>dFAH3X~2 z1~oLU)syTR5q)W3FYZ}{A4H)MCwJ^UHG5rzG?%x`ZG0)Y$?)G+RLS<*JxUq7i-dBq z4Hc%cPMy10zQW8;@UlG4i)0_TpU>WGK{Z4^-*NZ5P(EK)cm}kB=2(jWoh*b$im0V$ zm%u!C%ZrD{qUF=at`&e|c4U&>t)Rs3#%?`cbKv+`%(!j}T6;J@+HC_A8#@YXS^pI_Yuo}5$F$QJBhPK7#OmGQp?mQ-y%o z?A}$)ORcA{I)&?$N*m;efgd=+h6#}oA0iX%0RhG%yXK(!hq}cyk1mny$1IQ5rvuM4 z@MhMCLOZb2x&SxGbL;~Ve6=?>R@;R|V5Wa0LMfkDm_xVPhsXR zxLZQ?F5IS;Ix#H&=pu2YZFC|QmRf37w$yBusinOl#>V$X@bI?|Q+bI|h)KU7$S7iM ztjLa^2gr|~6Nm#cK#rn5ojPC6{cr>>6J&g{n`YQG3Tsl~$m-qQzUBma)~6yhe7M~& zLfm?^P>!wUOBeP0^A$tyNZJd1xnvNXEEbVApP07y)ZBSiDHxk_bQw(RYGE4v!rm7r z*e?WH?>YJ$u&5prEiCsrVXg0GUDzTKY4IBqH`wCV1hNp$*3l^L?J^OmHM?OuhOTX+ z!{d|IqaSNeIi#V32Mx6s1lg5=L(mtHO!@1cLe`A9LjFmRR`yM^^Z(a~ST>u|qima3 zIy|v|s~jd=?GW<-(p2ao82dZ;Pz}Pq9{tROXU4OS_Y{mj{=5`?{z6N50)&fX7&#@# zkn{|80`v6gvWG>ZHN(>!i$)7j0cYW0TWPGYem?S8CD&E9QxwKR3|pABuXqA_i#2j- zbNta0Kq-^wx#F1qxhy5?j9b{ zv%5Xu;kIqM*iRIuv3%02v3X)e!f{2V@FD~IAG5ojhVB+9&7;EMhdveC9g=QXDGDRT zJyKp!Ry#zieL6SqV-Drc9zmVgJ-fyeB|yYj-xh`SbZmz#V+RvR?}$XdurQjoKM61b z`_Zz(mspr7@Puuy$=%2*&w92foa%^ynbyiMY8#+=b6) zFT6&OipPlc*T&<@(>yd++TDV&B#beRvRU#pdxW-sm&3HohygkRb=x;V0y=(a=TbzK8lAX*lq=UcX>T=*v%3Z3ioxMWm38;%U_f||J>(FcViBj`5xLaZ z`Kqvm!YZ2IVcHsmXp!u_TT?FS-~<9m`-Efr<>xMKZ+c$16YPNRc=Akgbd_xOUQiy( z%4^=2F%yW5M7SwomkKlPZ%k;in-q@aa^+`p>MQ~(@hpk#mgtN(C`3g^KU?tlQ)A^y z!w4OpwEy^&IacB2ovB%GOB7~Jux5ux3v4XT6`ua&iZ!R#*4S=A+B2$oRZ$76@St#< zEjV-y2<8O}6=2XsTD4Co0V8vGHDC2t1p*goZQvqdqPbLs?`!JUtS2qY-+NUdsW)YPnu()ZL35C&-JI)}QfD{azwgr}_C}Y1f8}sEwO*7Vp2EA~F6{FPQ}|#k<70T`3xdp5^ZF9_ zZ}P2Umx&Y`SDdIWW7ECH^MtqZ4fYK|iW>?W^d`^G#J~ZO_N{gEXiUM`0EH>5rEYVW z&Zk=w;B8CRl2U)T0_8U>C@PR~0(1>|fm{RFcyN*(# z{8A+Zx?6#tp*{-J*n;@n-`^o=hj7yvCP-FS4d`k=5`nVIdC}44`V-+ewD_0G?wn|8 zQR1PYAbDsh#(`2cMl>psFZ-94+s_0U0S-a%Ajb>E<{?*%9>C082^Gnpu!$g%mk{2M z7u(~4`a$BQPvef-$7>?RmSET59L9{^5T*5niB!70?X(tA<>0rL7d&O&0rP~@y8Sg* zKoBUZi4^7r@!cx>bQSqDH*?`X@J?=bI)R}YnU+!I?})&#o~hYvJrpJ@N2F2qT>-iu z+?ww@4@1q3mhhzfpvPLN07G-Arr^*lo9PI{vS@ZL5Q+oUyiZ)HZAlZzLg3Lft=7PedcaSJF5JL$z)Wd^%_Ob^(Tn#JgCkoT6z|^Y82~kP} z+Jq!;rImUt-NIG?uQ?+{h%yU7pr4ly)WoAt5rokc%kYuBc!bsEGN(&MV&{F12naCOwRH9A`U%z~NHdPkIXc_^>?zZdEU*teCAIY80q0+WaplRnJ>r!g z9R-Dc_v|^uE>zeLK2l-gVSl}SPQ;icS;ld@(ry=p>UfdPSZ@c-``En(2xx$GNUA%s z&!&q&Cl9Yg8rS9v#fk6KuB~l%DC6Y(fDbr)xh_aq9~O*DXk<#p%U=?KmbPnmz5So3 z4Z>cv_BW4ud&Xw_^k|atvoA~*z`LomYlJC>_G~?}4`&QFi{QPK`w*^@+pMFA6keVf zVLcS4nY`(a$NGDJhv5P9G5emN+kcX18$VPSBhq*OpmLHme&5=ro7ue~M`jlLHN$Uk|w zOWXGLH-)JfaochBiRze7%QA^Yb?nMrDpKsii%r-_+x_Cf@c&)*RmISC-cwg`9lpZz z2H%;BgZIoEbH$;LULd>u?L=Zg%lV2)yoWD5GVvnPh9dE(f8d#pE<(0a!U)@JjAv?P zZPkkbw8hi$c!}qU@O;!LQtM#@CO87FtI=z+CehFXXH<_MAxZ^MoRLP52@0v*$&n zr6lZuVDu{v`EK$;W2YP*uUjQqlgs~`#H{XoEO}R2M+oQ=kNcYjN|1?&M*io7Me76yfgEgq6y0jeq zgZ3g(e7OvF-6bF!7}(Q5j80b1IRWqq@8hXB^o_6~4o2c?+%f`j!wr1m)saGp3Axdm zsA$HibNj9~U17!v!((RfZBZF1{HtOkJ!2pF8JL+s<_@mH zH9{0Of;;sM3dfo;qb4QEyF{Uab4wmQH*dV%BN!)wCqW%%4%#43Qkp`eqRjFIX)F8O zY%6mxM)E)P|B-pSu0t^Ww&iFQDS9uJz@4JeUMa=t+_p!URt-&-V?bCxnqP{-3T~-+ zJQ!TU&E^4X7Lkh6IOnwIJrC2?3oR=d+sIMy15$>j=zF4QLygik2cDg0pZYl@h9(mP zmM#-=nL|{W>8&&xcJup}u>4huZT*ZyD+tT1gNS~P zaXXQ{7@Z5Enw{vH>57qQ+a=)3eqCJnw$F>gj}`{7^lb5jC~*bek3nc@z%+{#e;dr> z66^d6Fta!UH*$Q2?-Hb!tPecX_YjJSk8XW_fh9e$h7OkjQv_)d{xZ@wUx3BZzDs9& zT;ceAE@jii>v}HZJ?SF*rC=-xppd_@M|?J!nu*d&o1w44Z&awPKIz(Dp|AsYgjGfy3X=1mggp ztIVc0!{ZsbWO0?v7mU9q>`zEoaC44=J4(1swoMc|RLyr3quN5W<0LPGPby63`k=sE ziH~c+IGTBhykx{gbG4Dc!wZV0uGYu>ddrHUU0$Y(;XUD)>7czWB11G-zN`N%5ZjN` z26xB5il7-1qui_}mXywan)mBeD{l+*;#j4TTvQ0A#KTnyyI)uvMJ)!Jk&W164~oJhQRX(8#jzhd0U>EjN@L)sKm-N`p@a$~@$0+@ z#_3*Zb5_|*Pf$^nkvZ%Y4pXIalVuwPsepTY(7m)Las}|*IUTKqCq2i}{o%drw}M(( zZcb>GoYTFN%Y|_n{Q}2MF^yCmi9H+jm8w6r454dy(FC_gaC7aa-i( zsvB>(_u9`zr;iT@!^!rjAag(;?;Bxrgy_K!5@lm}A*--v5vYu4T&f|yVn;-wExo$+ z=xb*L&R9Nz!^(q^xXFIDU(BwK&Fu5G)9KrR~ik%wt3m@ zQ?lD?DNEOFc9@}Oe z1gL^;3`k*dDU;^-8i*|P2iw_T#WM;h#n9;og;+|`#?4@+c9bv-GjazeHc~XLcJxA2 zYzER&`o#WJiM0q=<@9%kP4?2rjTBj#Efx+xD&_=VQ~apbdmc2(dH=%B(k4&CO)npv zU9lM&MT-5u7d^*UJ+AfsEBN~CO#fPdWK6Nag0 zA#kwQRc@|g)soelZLOCgnI=S<8U*8Swj@!GV_c7h--xK9{XsAeZnC7Tc~|j&Qt}cR zUQ6xuToLGcC9K7ZI5TnjiB)UuZNa#m4n^9uGQ-6MZ{ri7(s~TOvf~P9M=Q{otFI7s zwi^6dXcZnW+eRt-tq{FAv3P==QJ8TZRBY!wcw~cJFcB(+mrCO1LIJIYz^&qx_Bjz4 z)>C_zp5C(D%%PS7+^j1F*5EZ`46FCF&`(Z z5QPTd>9)|S1Zd-PtL+yG()4n6<0+m7i{Ha-sYi5d+ts#v1fz{KkVljt8T69E3{&?C zNff*+L^qC4-6#{BZwk_gfoaSSVl+Bq!zM)vt3DjJ8f~;FWZ~o|`(4un{roEfdE;1f zvqXzOQ`C<92em~+hESXg#I4raK`$tok;5+g<$n|;CW3gefkqx`9~|thyre`lFnA); zgSWljN|ELb!lnzX#VIW{v%=rCcERLWHO*rSW#9U=2)@!~iEqE8`NG^v80cb8POVr; zMEmFx5!B_qh}i76%LJK#kvvd-RbeLS`Sn~4uJLr#(trxije>DXQTwF*cC#q4KkYh+ zm)l7MruBv{)?Gm+riyoxO6x1+r(w)JubwEo$e0ovD7pr~nP$7}0nbNk`vlT6seZF~$z#x5W-AhOY1!#SWl7_lzj|$)(c; zcH&F^0Fn1+4h>Jf3s?u<}mDa&wwqXq#)7C|hiVI~~XQ0RN{X2jle=F?PqhIr^fmWhe#%fqa^SX?U6pFva z`!QLOu}xl5iIn4i>1p%Q;+8z?X%l(8t?TFclBXTnyr$5y4zQt{wi5#M>)hHJJ*EFi zkUG<-coo|%(_^H}#8;GA2SFVn8u`NNz}H}%Jq5$kq2tZgU69^kjbfAa6`-uA7VvAa zez}z2UKn0*cMp7!6K7%AoxGgjJ?T;4v(BHJR!>`EDXZC$XJbc_w~uGIaHh!|eK+?7D6CjYlXW{^T$yI?-+trsapGb2M!C|3yx+9*%=S z0QQ>)gy~tni~`yB9HFy>%->QW7E^fOoErA{mDxSqZ$zg81h976ZqG8JN(!NSh4}3S z(-l}XS#D2w;yj6IyyVeh2yt2CAaPuj*m%_C)Ap9I{}varl;q{{J<(`m3BHOF_SqRR zNAoHP0+re4h2v`Ki*24l``-X!`y+;lZ&}g@=aJeJ?iHIzl460apc#9T{ z4a!Ho!zzUJ(?ot^q{38%m9C1E6qJ7}0#%K{dWZenQy$tl#x9>p1WhhNva8yz5F+1^ z#n{Fhhi$wci9nqwnl|r33qg=NPVZiBg$h&TkyS?)Sd~C*%ut+fMod9Lr>nAgy-o21 zBj8adt21`MbIZWH9TK1x7?)@2_S<2>FgNpp4B&EmQ50IqLhF3HwFvQ>kRv8}bz^n> zpGdL(0WvZeux~GtTfi?R2IY3zS00T;Q&4u-qjJ7@gDBJurCUjueMsf zgTg~~m9|@evN-59K0BXn_PJcnKCrC+D9BQ1)4r3PQJ6vE)E(XtzBW7N<#jthEoSlc zBE)vY1Et+4N?f2vm+;-W&9g9pTj9p6weBKNeg$HF_b5!G5iV=OFwyrss!Q9>Hc8=F zaxf+|)#^owCG&1O*>;exT1vLuFUh?FRl)g7ClBVo1{ zdx{{milp`d8{;Wpgur^Pw4V!8{;Q85*0NcE4Y6O@a2|=>Fx+-eacCA7nEw%=n}Ksz zZ`aKQ#_gH=MJCygFLI!fgo`bLMb=J9^gCLCD#}*aVVy)E{;l1OXIJjAZi2DV@=WksSjh0cZe+be>o@}IMZI{mj)2DDzft7C8h(LCpmsX(u z(@BulRaBK7sSj^&VfpM)G8RQ(`>c_t$frb!mB`12J?~hC$#Gq3F9=h>+1mN% z7S6VJJX+G3ayavE;W&VU*{(3QZtdqoiql@ogRmY$th;E83C%zy z(0{B!fzb*@iS5ERL((RCh8I_aL)H>OZ3Q^$`He?W_yahgeo>Gs!`E)Gs}>Ms#NdO~ z+BE{PM7~*-*25$7aei(03ut%Sh7LYn4!CXVt?S1kQ%#q4ovcD(mfHVg>)hkBF6;lV zmSy_2{95nqq^6Z=3UWSJR^yC-5JOGPZ{6E&*u9P2^S(E5D~pZu`3!O#gE7W9jd2>| zWDQEIFHDLAwYn#4Kw#NHt<0~#=j*zzar?(_usyEV`#K*#*WrC=UAwS#?cUbPY0^ww zA)*?zdaHYnUu}~`p?i}~-gcG{C1_cVvVeubYkUl~tF}YPHyuygQGK#^ilhY)<~QBS z*sCFWY$BH-7X&Ck%X8X;y33>ab~&WNZ6tBjF z)rwv_h26{ zEQsah+j7yUI;;jv^Iai!8(K)ur(>hCrPwYD`{6qgQw!Iv_V8l)ytC!=>|=uJD29>a zV-nlI9U@Zn7~1{qO93D~wa|JAP|U(Y7BfrOSVD1>5{N z0Sz26@ZmA`x)3?ihi(Io6mOMX6p_&cvtJGz%xEYRL4&pSNUC;es`jwmxCRPY4_>26qux6Qe>fXG{vC(}GG9wOBjn>Zn&Pil6K5&d8>VK*7;_ z+oij02=R+cv&-Nv*&QGyZ$3EQo)`4pGz^hf`)%4g86WE*^+ZdF8zZwN$9hp=9C5|j z^8IJaYh=Ra=CwZ4B1FN_P%1>3bX~=UtkJ$Cg3o7wZVo#M(>wVxFFsU2BX@nvT8u0_ z9wKv<<^@isi9m&-vNdK4(%#ZGv)`T*!4KPHQ4}sMs}V&doLnVAoyiNt!rvHzr5{*c z3@LGtnIoxpYY~AZ!arFvln&T6m5?ZCGRCrcqX4YIP7@Vc#_Rt-TJ3L97IOe@e)kZ{w{c*-^)eKTzgHBkFY!aqDiSkpHiFXWHv^p zC9O;p;F4&5mSpgPI^Nz8K`o1VCUJJ{x4(%(d5~R`{WmQl{}74BE@<9}O`j@&uXJVv zJE>nt;|LcDAH*m`NZRf4Cq16l(6z`Cm$Wa5L?go;rwf!P1!-hh%F*8Ens--fJDU9J zaa)0W^>{&d>g%MtFTPXtRwfS$GJ!8fF`tm7!VGt(fqw-er_y%6K}70@(iN^w$MI;g zHw2?@j;DezTD_%2R1u`a3>8hgxhr=3J11Yt%OD!UUad_~^TVC}nKbk4dZh7yi~Dzov5G(nXQoO*jN{xc>(mjZ>Qv3CDvV)w6@fwB zyIVKwA-5k%W8=j#N!CXcij>;DHMM7keN&LSQ$i%-3yP%})CPoD&K?0~Aop9jC`uEi zw<&V#vm>E*s}^DqwN5I>XreP2>7;h|i|$A4plCEFD*}tdPepUXft#|w2hhANneTxg z8{^no*xeflNq%pvthEQ__9?l?f;!Hx8KU`mU4Cu`I(lq~D5Q<5%W?K_h<M`n+_WbjgLPu%u_r<-E`_328UW3@-DZZMGz>aiP;|0T1Yg9Ge2%%*!Zc*tszb=8 z*c<8a(ZsQZ+$V@Y6*N_^tHw=nTPJl4uiFEK7n!CTc=Me$o=vA?+OcoR$5eWH^u zPd?nu7Tbsr)0vBLyev<2P$ZhXPdfP`qEI%@v%0-`T9Be#;+Q;1ZeI=9yUT2D8V?uu zC1g+*qCpdXN@=AGKeG+Y@d9ekH8DWZ^36p4iWA^K?4 zy~gvcDR>RxBldiVNOqE6WT?F+iiT|!FB9uuuC=xhgw3PVz8AH(M4>qBl4PLfni?O2 zG{$2~VZ3z~0e^T*#*gQ^J3#zjpT5W*NyEbESQ{(oDL1RJ!n5wu?u?&q*m{PC_B2w>HHlHcCvUK!;VyqElcYhrD z_7Du!`cc~>ppje7E1zsnye@(t{Db2x$bSuZWRX6#N&9H6&m@`8hWY^kii~8aK6rx! zHIOkx81cxbR4irlBjxM6ZH$!NCG9(+khznJ+Rse*o(R;SB!SlMcshKRw>0%5Q8YDV zof9U^iV*yGuLj`^O(qA0o`SxahM_~vz9~!_ApEr5z7@c$4X^IDVFLb36Uk0Q+5B*?9s-mU zmXz5@0V;-iViI~XAgqY;^%y5a>tpuIYr3(CqEHT5tV6SZSqMNQjYl6HA}~`#@%i;b zt4uUf@{G+Ejn5;2D1G#w|XoQ>&{B+ukB*Rv_yYu?K|chx1GJT$r-({LC3PKomc&hQQL7 zaO_yaMD&AXR1}7=0Nu8z+@2DoDv9O;7LnUetJJ>9seOy>X;D-;?q&24JSRjxzh^<) z&Pg6pp5|>>hO>F{^qoAWG?_06Xgi!vQeAj*l^qp@>WV?ePNcnuGo*R@bQH={fEnfA z%Ga-`rIEzZo$zF*M4~nC@6t=wT6)_>5&WQG8AkA1m|4_e&FQ(;N%ibeCqKpVw)`lxPfuE2g$uE0);PB2tb=GKScX6x-M7KG2(R#VPy`JZIMoOHZl9eH&F4He7PC_!3eyEv!lb~Qet!2#lVx&7S1npuw9 zZP*JUx|+is<7+N$mG#$aNhK-q_hj zFN}ciTD|NrG_mF+TBr}~cTtY35U3BGi{-x_dx}5?Jpc9jAYsRVK~|mg(CG~-?3lFsbRWN zuW$U$uyZ1*ud7z`W?~luNEvzNOfn z2%t=l+7SVb^T{tQVR{|5JP~MKIL6pQC58OpVBYA8jQd`jAS%UTcfIcA^|n?}BMgl| zo2I8~8mulP!uX;Gn*%~81~Ng%gqS0009Hlet#kcahzb_TE}#g*J(#b`#qi()BB>E>I{s&5cDUJ1q)nCZo)d9J?H#GxE9G-U{%Q z&9Bbka^wAU^hj2+1QD{UcKPU+cTT;`gN6m@n6}p2T>)ZUk5Ox*1*sr%VwMPAIydjC zu$ll{*Dh(T9X{ffF zcH>1SWDb2|pmh>ZbJXp3np~^x{@|Sz$&1^Q0=~=pJu=#!5~Q#f4x#$J!(F${mdi~$ zlUoK?ais|H*NNn3!wA{u{;jZ|ncNI9r0jK3eA=T1j?$~ys zut9fh6GUXbQ%*fJ?XgKA#`uz$RRm8zJkSF?za#X44q;FcNb~&Rd2I`Kxr_QKwl5^n zwr08gN^Y8eeDmHsjJ8NI<#CayYukaQ3$@#L{JwUaf^$l_S(I|otjo~g0f^tE!DWpwc`r=i70+~>HUCB5~6bP1eT17Y`Y*G z89zEVW@iK_c53$myCk^;jGvQA| zEM`qSpC^@{3sXt16HvL-CPbd8+O4V0)9tdLpJFhfY*<9b8yE_`56YMt8h~?f#>lr}yRG?70;%Kf!`?5R2)E{v# zYO^g@6xxKFxR~V!P*}`$LsEchT&S9JuCd8>2x=24{o2Ubp-O;j9=eRzStmh~dSl6)OY3In3-y2~ zewyUSnv^{x0(0|H`}Rme{)B#YY46s#Yx+CW5rd zvxwA)L?!c3TTy76g?zR}rRWo|x{yGk?C4a?e>MwJiuQ$#_J?%XQ7B0&ws%DEqfEPQ zPK}4`-yuFzqzmT8uR2H&&1DIA$x0n}9CnR}6rs4en(hBbg?QJIo;+tMP+Uq_oVSn3 z!w>3-;pQ1 zNJ1!-iP<>;U-~R~ZLD3GVx^karkZw2bo{DAo{}taD)o5J_-LSze591CfC9T&6rW0F zE~(rvtX1Yfxf4U*B*mhAMI_&XJT}Aw1Ug^;RlUV-oa-KCH@RaK+IX?cQR|4+!3#xz zBNd*;XrF{k@H8t4kRrqrPus)**+?&Zd9lqDq%EEpGSoK7O|Fp~ILtQ-__={VoL(K9~Y;4Y>Y^L!_6l~28Gch!J(lj zB*py}ibbtNFf@IH#m`|<@|?~5U1J+U3}#y`++)WD{cIZ^jTM!}#@T;FqGnG>N6`&O z0JPq-n4Rk)H>tzm70G9H&J2F3hJE&<;O^bCr)`j%3OPl$wVbIO!B4tTs0nlTPkR)| zAA$zKXWU!`yK$MNM5WmZxZ_XQ>q35A9h8W1PL=wDcRPYaIe6IB&l5zqN|RC8BqmJ9 z8Vwi!Li>iis2VpnZtuuB>_L$<6x}-SE+!V+7!ftKBV6>^cLR!{!y(_kFGN+`P7UD~ z`+*2bR42^?EDwh*OC&#zBH8daByFrHutsqN*e1=sp3~FDiR^o4qByC4BCI(OLlMbR z0df*+B_Ho;wo(K}39_QR!B~BOSdLuw@>)9}=+i3ViQN8In6zSg96296`^5_}h>)<0 zA+CF6FSi?hN?7`|0B+z<$W2k1J1B8=##6;W+g)TbFCcBk)j&a?8{A}7Hb|HzLyCx^ zk4pO%O28#Ejr*Y_-hM3X2d~?fa8z@PWu@q3f>uN6-D5rLyqGaQ#-Q=W4tOFsG z?v!|T{jDGs&5LuJ_s0NEZ$qKgEkE!2Gqlt-G9*6Kg3*f(R%hNeXY% zR_p7cP&_B|!vgwEVRFi6Ey|FaX5@-Z0+UY0lMh(G5Tp+ibhO-*lTPn)Uk{54`l+Z} zSGRk`+<>U$n%X?usjxg~GeQ!*diJ)J=_E3dnmQ8Tgrlo+(82h#5SI+lU8c79Z9053 zJKJ6s^vlk~TqOLt!G1M_fTj0r#bghCi2lxK6rz+0K`j$Uw^*CpT3=a<3hhlHl@;lC zu~comLZJlAW7cF(A;zeZWU|!W3GsQ0=yJBky0W0ZavK4W)@LdnOvAG2jH;iRQ{n7ru{Dke7~UH+)jo> zXes#l*(042f%^39)Xgr0h~4!He9h1Cpg3{1w{g2xh|GzHxdpC}Zx?~SOf+x9wlU8@ z!jv}xHx$<7c?#+$3YBzMa++M67j3ABZpa+8N9AUmcjy^I9w1vJie4;9gggPCXMS^a zX|WZGKwT~_-PgXl(k6z$+*C}lnF4-n>7#1vL)fuM6;r&;g1)Vg+hnH_nC+WHr`Sov zzhUiz+wymTLQPQ)Rr~S+`*WHidsI88+Pk9o{u>-G)EhY;@b^R{3#SYl`?^vVi6%w6 z<`>Xu$Sm$I(fTVyhG!=~cXsmRvy*4qZK6;sXW^sV9tcotD=JhRq($@+Wsil(PG1Cx z*IBkGvim2CRenK5V0OK`opDT1c?v)yo|&R6Jvj3xir~xMVfwAY<_DtFp@AliJ<{60 zR74d_dtfeMLrT0K>DqL(H0vkh}bVC+Pi0uuv@hCIcO#UhmMo>Tx2 zM-Loq*SrAq1#=>a)Ng}bC!+uQxKHP_)3r84H1eHL5Rck-17I{q>`4J%2qdD8=we0` zpT}cd??(lFa%6e^Ue{CuMWoQ}TC)edr*&hx!v=-`<6}8F+@C)rs3CQ% zGT#;mkkuP(8<)%Nk7@AV@L#sJ4sAZavDP-r%g-MkW+qA5Aiuk8Ye)3eyKR4Pk3}=l zF}Z!a55XkDD*>{1)Zny<&HYNy2N)?Y$lCKWUv zVmCZgC@g<)>CzOE5UsH9icZ$LjzZ>WiXasnJSM|7%dNhm9eF^SY{x^`)b0jJ?41jc zK|CUsXCMBRPe}?3nx@$&1${3aJ6Jl=?iKdm>3+=z9na@CLl8`uOW58r(or&tSZ$vW zrflwU>xlgz?JWgt78RySA2u%>>O+!V8aZamIuCqs~)XkELw zb?w2{wW}>VP-rNgm)L4qGDJx=axc8cwhQ{U8IA;cyr9tbiA2^Hwlv!QbbQCIG0l#K z_-KZ>IDNjI5~OTo@KBEZD;=C6Q_GI|!~csvbIOS=W6#_v&$&@V%6Yc@2oeF3mj1LT z__s_B&8{|jkL8L1nCdsuEnYXfeNwb!|q+92);O&rkTO9=d>v5wiv=oZwGHw zyRp0`(f-{*VTX+fJBLEM?KlyrUC-X#>~^_*UB>W??;u{idi1e7ulUgbyl)XDm$$Q34sq zow3Sp5mZbmVK~gw>rzUs*Bi8OYGLdxIvF(YDzl+-tHtZ<+~yVq zPkfvzQuWpLu)Jujad0`=m=LEIS9w`-`-T|EIVo4rjR{GKNCKcZQ0R$Nb)}4IEfIy* zV2d`EJ6~H9l1MewrFNCuIzehegb^`u-fR+qB4TOEei7W!30%RqTnU&J&1a>(IZY^v zPe6U;qayevq-&oCy4zF2)S^IoLn8s8{S8h30pvwBS?-VL}C1^5@;jv zrd@s_q94F0G%VoJ04X8SXpX{h=-xtEcRC)=mne?Zk+)ey>NAA%z^{Y5b@l=~E;prP zxJB)R00}v5boF*IKylFUQ1tfzb)UYa+^&1c$7Imd+b0F+Y4^OtfpPiSis`z%x=-{l z-=rO62E(hjXuhqVE{T)vXc4F=Y8zPT_$RGDW745Upv@y<1tL&`fxLw!f_uQg5jII~ zjmgQZ5zB0bAkCi>Nn{<}l4bKl(7W%v-xkR2=c6nJp*ML|i0o|?+Qtx>CVvzSbM|8J z785A96lVoBp5f$2V|T}X%-#{*uY@pzYUgQJpS(f}*JgIDNECFGvm$D@2r>y#zX+>g zPa!{QuvZkb2ZclDGc&x>vu_7ts%E=8ZedCX>$M`%ob-~Oz8dXu5vZjz2{fOfWB(%x zIa95S;sQ$ul5;nHI@_iYynWKVbNgqUpSRieiQ+rRJ!sfr5q#mwq#yV7U+i6@dYUl7$KO_YiRa2f?@GrW9Gt8+oPv@NYnFvESluC28Li@}a0n?S7{g{(LtYne+d%*O5Pq9Ph`jbM`2oH%+Y!~AJRFg z3)h=|>jM@Q<-q-(u-k8m zFw~t#3_jau0_qnR=i={$sCl9k2EdCUG57gI-RyIoLT*LJcI2S~{MJ*1_Vt*6_=?>t zjJMOUKGW{&fXGjwL$8;h9~2y{*yns*1TvGUHp>)X71y52HLFx(H1bV2WaaXrz-ZD) z7TOdc-=*wZxAJ_84Q3wl4v0?XExXaYaKMfV(x%T$OxUrIKyy`Yyv!~Nh1Qo^hL8Nt zC+HR0J4t`pn6pod?!)mhj@!M$VeU6<=Y7$-h(ZNW_gQM)1jzryloN9}Q#>R{q3MZX z<7~JPpU!)~Vk3fw6bvQpF(Dddl&sR~BIpSbd@jkz1a{w+BZ|L*L~+FxrNU2(Onz=1 zDz~YERqFd+CHL8d`l^|H1NB1EIf@y*oftUtSNUi{gjS7>)Rb5IiE)5&HzlE0d+HDIJiMN~}2q z)Gp3Wgn$zmtnbQAqjzv*z?XsEsxv;NPCdHXEppSM=ok=_$xuOG33NJLZdh%@MWF+R zJLUIJhN$fo4fa%UkL3kFPi}gG2W?p`+-LbBkgha7aDJQ+!rqOUi1OgRP+MX3AsAz% zTlcQkAV>pn`X8sa$>)Q&Fz`e=^r(#C_FF+;*i^%Ol)>5`MWL{4i)f|R5FF*(jo2gT z`id`7C--9m1H8O$_2sI18zo5dwoR|GN96YX0CRDkWeL-5ox5UXCr^Mpjvvu=okIjY zT8=T8qvbY5zBD)$;(?DfR+5D4Mbxluf|cZuZ58C}T^v1D8O_hfB!e}E5Pqe$V}fMJ zj2>^V3XoxZqFydjAtV_ji%RXHpl^fLS?f^QYHy0-3!h(#wt-t;1(SLEyxO+?M?9pS zeR}q_FUm_P>%z^x5(3J!D7Wu-3lcC=m}|r3raXCk|Lh3?pMf^%RuSSI-*9~S@eTHz zAVUG^2DUJ)COZRx62nl*5ovci0az7az7nPwu>zih?S|Jt)JTF))f>1ju}_K;n0CeO zgl!kBm#lQpFB|Q4`Lb9YueCemBx`bN-t^*%eCsO$b$1*B+bk|fHCk$07Fbe1`N)nf zr-Zq-L=ke4m%{NabPM;t zX%{I<^XI2JQ79P7^Vd4zfx{yB42Ph?!G4*>(&=w6rE$KV)Ep)U@+;!A(JF7hL5A6S zqGSqg`U51t5rDggmT{+y3j3kx8V7Y1sk)j}UA@f@6vvviO59l+Me(H>%)!OB2$LIE zE?VsN3(~}+IQ2FKciZ7bb|eI&P>*Gdw&nmYY?|42c)7hMNPEFgit4~W0>X|SK@y!9 zKP^&NO7`w{Jl{L)iz51ZcI(sAdJ6gxFIMVe>m`COY0KV|J8eh^@2*f*+9*LPd*yxC zo)v|Tf)zYbYUM)uUdU?ccE_c##Ab`=(|kg^@dn|rOKxa{ZA$DmX!3YLbjk)ta;m1$ zUKI47Ob%jG-9GJ*y%z$}CYLbdduxh7%23=wsJ|0$yM0MSGRz6w8@+{1 z|Lsd93Z}FE22WQ{?wgX@w}7;4K!|vH3tzXLHdc@pM>w<8#tHaZp)D%c3WX*2OOld4 z-idDBEB6RCTfU@Cu`jP(aC!AQTP%w2iAOQ8Rwc+77&tuB>f~l7J)LL!0(s2n$L&`k zhHR#FTt4EXg7-Db2z?;y_N~nA}=pT_5W=h!E1Er;ml`uib5TaRm8cSw66q6 z@`DFHZ1)NJG0Va(W_IfnLcrm@1q%TKLWnF$dzVah8HS(ft=bC1m-@e=$1R2K|3+IS-Fd%eB z@|yoMA=+^?&*#bFTy_$_43VkH!(7U~Do7!Urlh|XVyhbHBk3#X+qPv1D{@lUuhLwY=0|ge z7Fw}rv=d6W5$gJJNFcRuO=|CIs}f`orw&Z9t#VUV$1I4LUV}X^iZ5J$+BNA36dr+L z4q^6-kQlEDJeJ#ULjn{oCz>rzoHs@Bj3*1f0MwP&G) zMX+gx)A&$s9}$s!Nx6HMKK4;TnuM3{68p4(pEE4-nCi0ILTD$dy~@5SNF6gr4YJO` zi-fxM>}I_}To_1GwacZRWSA%cm21qcA~%o-xfdhsOp#aI*k zvq~_sX-9>8#WSU&{nx@&Am4E%T1$vdF%b3G0++_gKuI;uaxA^|_D503a6GCdbL^s! zZx#-DB`|I>`~M`O&xJ>tJ@%$>P^r30=MQaObY!!Sob!p1;hN;?7PV-;ui@ia03XlR5M>-v!?s<3 zGV)}$7iPj=2-5NGdu#1<@S*{7V$ayyf??1sa!1R5r70+IY2KEw8{2#s%m!YnsTafM zCq&U0Kpwl$ZW5vu3*%A%gi7RlM4)1@Kva1^4ls9l9(n5}Z{IAuh~(QhLVSYzTk#{vlHf?rT@%84xgo&Oou%;v(ykMP!zWqdeHHFq zVYjG$Frk43@khzj?-MaJNtN!~?UE=|7pXD0soxZ$HJ=`zWglwCtq0>E`^#ne@^H7l z69aaQcC!MIn53sr;mg_VGl4<}z>gyBly`~ZM=PZgtEkg<^U^fyEFa%ea7e}Neqpk{ zxb8XcrqyXP$6sn)f@j;L750GKS{Py|wEk8)mD~tiT#to-PYfJwkIU^B6!hg5WOHe(JJKr-1VQ7uL>>YU0g}%ZpvoL&lf%uwI?=T*f%!% zPgv05=EP;q#FWa>RC7)EL-EH*z7 z_TPPE*h*j=vFt$7C7e^-TyH-ViJm#$#I4T$;|;c21iEc#<}j<5n;hC^tv$a0D{O+~ z09Srq8KdJ50n?!H7a?1m#w{yP?(o>JSmciKuQwE+V_@qQR&1U2cTvgZ+@aNI6}5K- zsbegPNwlwB0BAHv^!hSDh_YoeFuo%vCQxxRkk<}4kKP!8LGU$#yuYVbys^5z&R&^|1u9k>?2w^zVK7>%VNdn%2`MY*4g2@~BNRd!p6AfwDJ2D==SaCrzp3ax8V zA1~-j&{E?9tP24yFWV!f$_^6XYz0<4F;8D_8$y6{M(SCUTIm> zveI4<2xblLQwaL zv(cUsqx^4$%&5JLBVHXwkPm!5=PuT7H-i#q8UK`)0YX>?|;ZC6@R? z5y|PytXJyn=@9zp@_c(nZdU%*NhlSX>J%-p1A+;EtK-SvZyObWVj;i{A5@)?>teJj zO}7I9(w|ZfbyCo;E8O_=j96^fy#a}gF_xESHv|xo7LFSO$dZw~&joON+wn&GngF#K zf@tl7a{DBCsYX<{z=n!MiIRxkj<+l!nnns4X*sScp9%ha1~E1NLm}Uz1K8_Wxv;)B zx(Q-aW%ETKH#B?NO1Y`tAikk&gZ2d2pC)9D{Jo&q^7mI@N7F6j$W(fRyzY5VyjJL( z;%-Yir|j*J@`y2y+BHrz39l8%*bDfQpq2)4m3Kv;@5jXw1xfoqA<8vI zn_jf?!2bRhPf3dX5Wf6EyFnD6Cs+Cr`*<23tK0Ri%=Q_hSYBuHsm zo@?1*j|!+6bT_e0_QxCr?}8F!N3-Qk3uI>Jq7$MFr9SjZk%+zqPN7 zC#g*~O+>22x27b|HVFAvdt=e04?=jatwz37s4&)CD}nFL!jy^xOL8W}@(b*R5TDCk zv0$VGDP+dO14i0K0Sf7uw9$YFL(Cj|FGP^>8s^-`-Xsk9yKA6Y`?Mg1lNykueMU%) zAn7=s^(R^X;E$j`TbZ1I#_}3oY|jdjcWw-`B~t{rRr&5P+bE}6egByXSkw!W);A=k;iL#z+Jj`zA3gp`=iIZpL>ywPb zDryxWbldbM*@|$G#)}jbJd(8e!LxnNbo_0lAgyzB6SDpL1pMbJFr%`yV5i~eel(!v z-<}Ui@Oi3!d8z$cP~G7a+VIz24T&~y=ZWfXA+(lbI_JZG^MxIl$cgL7^F?9bLh3x0 zJV&9tjHVa2*tbJ?5SBYiY?Lr%Y2SDVMnDfKe{&PO$Ce*b5= z{WL==W-KO5|Ly}9#-e%2vZ!4zOv%OZ%GT+V0iNE?PO98)5hOZ&DlLfz?eik|hH(R9 zo!ur11uN!du+X}M_-rar3+JH^-a2`aSX7e=aQF3hTw(e6;FrN|kobVOZ)izKiZ1Oo`y{{zgb-nzE3b?tMGUty(G3wsIr zPV%MFQg6;mF;I}!)Dh7h7N9=k#meoGA7n!r1*7CiZ()G+Y{db`06W7T7twz~l-F9= z{thT#F-gF8EnhzmMnvFbTOk~dR8sj??w*|KXqsz{{8W%q=Hyxd|J=<-)~kilbO`C8 zXB^h?h&2gQ!oZ^aD?yrR%)sIHirkboUYyCvH^~7iXP~_i!V@{37;AqCkUbzOj_qwh zKiQ5iJW-CJZ0+E-+ehAl&OrEGd9cj-SrI5|ab$eb?iHZToPs&KU&z<^QMljjL19WL zvx_r#*7PM$rH#z>jvSWbwlTOjuPKNcjP<}C@9r;?#2+NclVyQUv9s`Wx_0gh=nx=?@=Q&?Umpj z_E?5p^|sIXY*agh8iyS%4hZC&P0# zN08QXa&Pc;EVAd);k(?|-BtxS?pVdq6 zR*BP5p0p;BeD{w{U>oD8Fm0cb6(^X(cfdrLs$On?5S^-xZ{Dm;~9gs z;~n#M5&RIkG=IwF*{ki2K+&_!-lo*Pm3C)HgN-p(dToecSg^wddA&U%BISLcM<07K z1ZZ8mqP23STXd$_4?-ebx#`I+U(n}UhztmaZ%66ahO+xamsx|`gQB7Bh$TY$-}v7B zHZdI+#_7`RJbN}Iez6?eOuKAKfM}(Qp&_wBkp3u}h+1f*@LB?l_SrQzo zcvW~SP0_U|`;e>uNtkfq>J-Ad?-DHIc7uGVc}C_KyIF3Umpj_+?vZ1(2edUJHngv=v8(+D*Yllzq@v`C^X$~6kI3%?V= z@9gjG-J@^c9=*F*N;IDr)}*5ocyntLiRvd}<5&f+`ZtK`w@zA^R&)C`5vdW~$W4;n zE6DWWd1R4w5(wWIDJPNYEm?=>X4JaKk0KEdQ%+n@4iv#xUW|`08fJ>(lfJu07ke_q z2=cLwmMwy>gyW&-?c38RF?ZN}yQ1kd`^TAj2`f#f+2z^JHciki3f1l-9)z~7DjgA# zllqg3ZM`T|UY6`+h626hGp$-gh6`p^(0-7xp9)j^__3;({X#&qTpv|7qn@D7-#st2 zyTV%J59B4L3}LNH0lLL}w*4bO_!p5rwGY4Ra~eQTNO#-kg=sDKATp(O(n0&GD9nH^ zY_~cEUm6#Kw@9Yiw?cd^Ix>W^) zq%Zs5jY7VcN;$7>6ZFgO0C9P57xtrSh&H7~rVZ&hT;3Jiu@Jy*;l%)Z zCJ@}^uSUEgij_@3?OE_e( z8rg~!c1e_QQeP0>p#CNr<>Z^lj&husIIeonM?^*I`F)FIdG1kB)XTL8EgpPn!@&s@ zCPb}F6pDDZd>zBmCJXv<6c_N&v?+KOW4SszX|;lWc}!JLO;w{4;fQS!kqoloX5AOU zw=FKeu&d5~CP=iA%vwcqvCuE1WD%Mq5bVY!OLA_l-nKu z=VNl^5Ib5Kvu?tEcyZm0iuYk6kPl`>>g~G%=|$tr^fT<7MWeNgkCx|^SiTUIZrfhj zzU+`aEl4xE)pj;d4s+~>B2k#puoO(~@Ya`H_IjsUT}S}EQfS$*6N2>C@Hn#e?wawH zK%s8=an8jNdrcJIvM{SWZI^|$b~wYzr1_V00Cblm?d^bvAeK_hoV@CNBKT_Q)4`*| zZu_!mRD^quZ1!;X3zDWp2oh3I-!=FrO0wX>^N?~Di4&GW725y>@Z)TF6cZo`8yuoM z7E6lb^GQL9-CQNL$}nyRd~AafD6sDbOhr|5%MqpsdJ*Awt4$Rl^u~IphpWM!6OAp1 zJB4{wiCdh+IqoI4O1@MIMkdXywSvBgMQjb}?P^iND6HR}+BpmRgmzFwpAVcA+(oc5 zy&#%;z4;*5^B08}B<>#Kk^p@UuheFa@yk)R`d;vlOS!={S7o|FP_G`yHQ$hi!!t6i zYk=;#VY@vPVEC$y6r{vb8=@WPej6Pqq=q;JE1G@3BMR!C*fW<1`q9A(6ZhCxS~`CB z%(h7(0!a)jDY3bNROaZ^V+Uc?{F;+eqkn|1Z z+bsbloA9Tdfa=^`a!(Ox33d>{>cYqnJeC+|`5`!8 z2+hqs78ikb87JQ2*#e9t?n1Ynm~M*$L~umUk9!0u!sVsa-c3k7Tr>Um%Zqv-se{nu zfe??T@7ANYofq_d70=;ga3Q4Ay<0E4`5K?&oe%u*+57Ia+XZQ0VrE5A+P%5JlQi#L z$Fm3eio!5(3!AHpbE0`pE$9P*CetIuk!<)(5*7mxS}i>(WoS*@?2Zc8Sw?XVcR z&`ANnx4Ft5k}qvTLK7nB+O|j11cc(*LY0a@gHDVbI~Wdzi$wBM<>V^&%Dz?g@(Tmpr;dfg&WJ|yNK_`u#qn()_Lz*Z-&bs%gngQkJQTqcSZ7hf>!_G64xoQ` zhV>AY+MTUvJUe-leOHk37d2PMxGycVhzNcg;@`4~$zIX2Mfd3!#j|CUJ1?XJC!?+f zCx?_&AU^Izwm}4S?&y)WQEtXKb}6cDbMWZJ=xYofFRwx!?(+hajYCC*4NOZ2cYf6} zdp&ry&fKfB!+!+*fPR!09{a+zU|+fziu^If(koEHcw{B%Eehqi_#8@fgRcbSJkxoe zDD*%CJIG0k3u$lExHQ$c(eWHFv1da9{n070S#IC&(%+JAzY>8G$nfk=SRv}|ccLi8 zEHrysO32TvVdyu?<@6a^xqS`$NztyuKEr-|^$P9kkNCh7 zIktXCZn7GWy7hd!R*0fFL&S9wRO~8B`21|27VN6@5s?zLtal5M?${m-F;g`)?oE2B zeNPlEPLT;rU;)aMj50%6Vi$+tbh+1i<0|JkP3El$-WLW19MCI1d z9ucMrP9Y|ifkV(9UAtRR@Iu14d1u5X2~q@}qj&>Z9T3)=ic0LHkYa|cNZkIkM3f_@yWZzZ^uu?lN|2^WY*OMrh-3_)$UX+kXyBGnr zf?*^}zKYen+L}c4E8Qp_jbcSb`jj?{>Q~Y6(Q$b+e4(|3)SRyG{q~k1l}g6d7*}5p zq8)RN?Xr)^O}6a|n%Ww7cu2Nh5r@~yVFMM$_XAQ8)%IkFBV+6sZ^;v+hKXq2c&iZ5 z^3XITbzrWPU^t48jq<64%?ZKzMqqyEs*S=h!90{NRoiX;jfa>cMs{K?*V?}INhxiW7vlm68p86=!-jm7Vfc;8D z+9JPHf@O9oKsbJ+GvfUKvHV^DciN3N_`oP*Sb^fWj|=)*@`T;fdJEIWtYP-`5OjDf znH7)NP(j~cj<0VyUxdsMgmxGbEwRT0eQLRpaivzBj>4gYyBszrj^@L#t1hx#3ghP< zul-S!-H1THj4eT4?}UJcZuK6w_vH?Dx&1~&3fT+wL$8MDzgeMsqTj5r*F~T;Fy|+Y z{{IY6=UKKqSO1syZJp)JxOYIx-+UNuLivVIviq=0d8&O|R5q~gu>(bQdTf9^I3z{f zXhFU`B1qp!&r{4|LX4+JFd@j@0?S#UD8AnPc@nXwg?+uV%djVE2SspWc!e9rbrRNm zM?@Nfl?c6+-tTb&Fi#p^|p@1M;BM9K_I(#%8@)1PxM-vD|uTEAk7G zC{}9IPVc5tB-C+_vCm#r0Pju$n6Ghr6`brOD5L7C`^uAj9|0CA>S2cde-U^ zLQ_a4L05Y~n6_-*j#SBj5EY}$@C=U>(s)FfvKBWdVv#8#ZE)%^D&4l~k;(EzqMam` zC~K!yECOkE!Saqx5C|JcxR|tww4aDZ@n!aIhTJ4bfyTfIk{>JmAZe`BIbn}*fkOFe zWXG}#Y?H7bQLw#fM2SG2Jl7_yMSv#2s`to3`(5yAZh{Z))c|s&W&Ldd|0&?vhO?~Q z`3WdtRvh7@t_}YI8!ak{AFbq*_k@6&rLn@PdP5=6CVNUm%HvpyVSTj<5$G`9DlW}h zVsizlaod)1TNS)y#J;B9_5^sTX6v8oC)o)>@?zVv)&42q3&;)_V?XwvKye&p8_rJN zY`5Ozb8?myw|GeWEUn#l$j>*^kdb<7>?Z=g`OQ_ZlM84BHq~JtR9ZYIrNeWKXKq)B z;M@J~9$oLT*&#p<3Xmn)YjZ>(!8;$gv!^W*pq3XmAF!HqOxDMUZ5O6N3Yr@ckgz5} zx+vjPz#a^q+#bzBkoS-v1-n#ND`jm=**}HgjFMND+P{LoW|{_Pg?;$HiJ|tFW)cbE zUn@e#*}<)5n8o1c;A5iu3cXYgBj;UdJWMQ8!-ai|v-6X}`iLl+PrAyqJRz!>k2;bv zx&4H9?+7cU4r;Q9)RVKL%nNQ3^uwFYRzV5dxIG~RrRS()uLzQ3>!dl3Yi)`BO%y8Q zwNEm*-x0wV`t^#fUG3VN!5ZIrFQ90+g0zNlMOUv8`=kF!sO;6z4k&jj9vZxuz8Y2OmZR)GK(me)k2%`i`g5$pey9H`LU+5XZ z|J=cK=MPlctN>Fr`^3n*Ccsl0+=r~P!*|=<)V5077a-pU#;m<4NEenxbe0sLVtiX= zp~~J0o)~$bZXZK6sA`2+6;s7-6(l0cmberUpvPWc)?l6GrmY@^-#ltvLo7CBOJA#yvb^zJn!kfgjdFqO;_iDnZQA*_?z1pQzg+q{_zk9Ptgl8u$i zqdO_XjbrX*~HFx5}(ZuG}Gn<@%r zfUiYtvd^U>pqxEwRl>B++4AXD7b1^kzvr(-w}?V!gW%4HShEnNixg=BID15K`*}!U z#L$QBm%)uXY&$78O^u>~y;8=4y!r zj|)+@5gB9b({fXdXd<_HFY4a}5$khy>6W^x^0&OhMqRBx*4l`WLK1WJBZGS)59Awz z`;pNZRwK7>!u%4N)L}}<3bKl+S8Ll9pkuF(Qco{BVS7ZT;6)Ox|4m466xa3_<))}y z3S6$(YHx;c+&SgiCqC=_SiY)w47fv(G!WK}+cyNrm<=;$365!*B2fRHUHe!}ZePmb z5w4^Y!ek;1z-KC~Bm_-upKptT7mfH`_F>xv{ix#w4`Tp-5P?j&M1pT!Km$fx2Z%H* zwQD{HglptT8u3frz?b@wt_pUv<_UziLYj1l-9<)WTEQ5~RSinH2Xp#lN;rL`D)SlaQ9)AVf*AZx`eJK5j=uAi1HVhuV8`vjs|3 zH#&x%jb8JkUH5tDG&!2HxGas>CxrRB74meWhxtOZNPJ2((&>61rh?>FyDn^5>j63P z^b89Em;IlWFBPR%`o**Djx_Aqr;FViAgd)}Io4IsuVP$%%icvsI{fPuP4*o@byBb~ zCYfyeo`@6x0lUo>4-sO86Zo3gl;Afais|^N0>0yh;+?QnBKZ7NS#(#g7KMsUWOKVk zZa?-O#oUqY6;_|kXBHo_LxN<>>2HhuQNSlXHd+9?)U{vmfsy`gpS;WN6{O%!$tN6? zYjh)je+cuZ`qh`Gth0WCBnZP#fqh4Sjx39z_SPePTlTPgFi*GueI$f_bt#Hj9upw@ zSm8u!U7Bq%5vbWPbkx9r#zts{DAcT^xXioBX<5kC(T3rB%IC5+|0zj)s zXM0|N+Vdtm*cGa!kzj-sKykfgnkn-y(G-e+c@(q1_$?+A(7uu)N zl%hCV&I+xYD5R)0fZuxSE`o3WQ*%#kv~LLep_EJ%*`tEA33@2DxaZKlHZFv~-p6@H z%hQ6Sh@mgkC%(MKCWt^UFc`UKix=d`$ZM&HG)6%jKHB9W1wyozm)nXo?9!u){Y=nT zgOv;|p7yH{Ae(A(r_v&*I_&Y6JoZwE;bbb7h-vDY+peTB1ipUmp*|}L^*YzcOVcL1 zQ&3~PuF9Ek+5s!Nbrw;Dj3IO~AVi>$Pq3*ODX4n^-9&aAI;Dzjl|7ygIR=fSPY6=} zu?crAjvUUDApm?G`{3;OA3@4b7teCn;?D$sRve7}*o=?>IU_P1=Q(4xDD-o_)3mof z1jm0&EX#f^NU7NwY_?ZJ$VfpEAJsnvub6oJFAGqvB#gYp;^wuF-wuT~L45_{7IurE z9}0|>B9EyuON?%ycKT-l*1$%)OF>EN?Bw#Z6`Rghthf6@S`zxKZ?Lb20F-eAiYD3D zJt2x#v+iH9?+166KHV)Qx4)a{7fTf9=Ab^KR5V7NWgRUW{Tm|D zj*teU2`g$1X~FeRLw)2uy2X71}NeAwO) zfdXGxJJwP*mb{E3AY4*^@(PHL^3X2k(sT($55T8)+n`f1@bvQM{TqEI_dw4Ci<5Yp!@UYf>d;dqFW+BC^M^B=bNMWJw~Yh`Etl^u4~9S|rSVsX01xHiBGljpRrK+lMv zKfpWLE(x_`UexXrUAq>Y-MCD04DW}mQ#!cxYSvde@!v}87E(RE+i4^15rV;-=D5^q z(5EVe)B{DQK3Ff|w&@8WU*DE0E)N*nSaws_xSBIXfT<-P{AQx7XzMV=;qAu1Q=Z;=X1e`51dyt7>=N47?j!(pLLQtAaVH^P15T`nBlHx(7l+Gr>0llt z3LTP#)nN3KJQU#Ry=1W8h6z%b9GSBj9ss7`BQ{QeYP2_QwWT4_s|#LP2D{;g0MAx% z#ZY5E74&&(+M{auq$qv}XGM-y+HZx`uCZs}Q46ul3L~(@+I3&TkMe0^^S*S0ps!I* zObj}o7s01~dCJZ|?b>5sN(YG}c9yVjqzQwgEDkm>5V+42Q_vPG6G4?}m|;Jb+kMop zSRz?(^8%4ljNx1CjtjO!MD4YL7LJW6S+A@ zC+zK1zo0Yr#xMIY?G<&>`g^M&H#^<$wR`2Hk@EBgGE~6Vniu@FY&IRmw`n|!EAn9> zgcI$6RbaI}E(%S@8o|DZM1CLwQN-b;JEdoY$$`(p0h=vAVYz87VI(aJP+LH%vPRH< zbEN=m_MX-m+pI2xq2Arl?6gf#3&atI+Dst`xbOo*vhS&C`3eSN&>!#?jc=@G6bP2`?|0`$kOi%`!|A!?hc;=bY6%_ z&|aOpSyIp^{b&rGaK*Mj6fNz$YizCDf&W)a*{+UzH$DM1@}tR4%w(o)!}82Ns}qR{ z%_RyO$gKf&#mGSyYY6fe;WKFsurPg=ixVzV3$Oy1P84z+L+y zUU7pkJ&z)Qn0-F&$wD8sTZL&Ct%>yEoq}{d+S<-eUf?u*&af|wNCk7yCiATjfW)1! zAd;4iHYg;*)4Sxu#s^54fe#P1q5v^dae3u#dsdL*96Jox&J+R4PEe^mu^GaQA;jcS z$brI+CFuZVEE%>k#4e6B*ISK%Utu}2OC5WY9TSZTxvkZ9`;8ziOa?<9w?70F&y~iL zKMIkVmRBUQ5|$Fdmj=nq61)1V;4qA~BcWPpH>D}CeyPQ75rxLd7&gj2E4QBksAA(R zbEgRYlgF2255vJJp|Gx^(+1sp_v~(c1Slc{yew|t5^~+H7iuTEi;Ywm0XKRt0`i!{ z1RosI;O;%wzAr$nod6U<4*@o>b39{_08h+BgmkAx1LX8o%PVSJkW$Ad#=U;iXGHML z@Mr|h57R??4 zffguEq8{nPcLRJo4@S2&%yAzTg)(6@c0bn!Q|)6S5FU=2xP2x7OlFkW@Bm~)#zqLJ zH%1J$k#aMZodzXzC!v-5h&?SL&8zV%ZQWv~aT|+jn-=0e3J1wpAlg3O6e zHwpXtkBFdlrN}-NB2dgM)XsDgq$+Hi!vikO-aCY8pYh!Jr5i1y1Fmjro3Yns1&CPx zEZT3jAX&Jb*HWbFZGH&AtU161_HsdACLW)2Y-8|loiVd@?t0rKNDbLI?DF>XtWG4_ z`XO{lY>}Jf!-1x8sqGPk7KFVB9U;x@?B^kfT7K_zEgK)8up;h3ZDWhVFv6oK%PzH- zglIK20#EA>QbHK!qhmQ!osu{CU#?&e{WaXmA0v^{`L@MAA&OSA$yH}3Pd{6+LFbD5 zMI`-E+AvwB5D83(#%!Y8)K4PXyyT0yyDE6MPo?^DlRib^7+Ht4m3zy*=_4ZE@eeJsS?qs*z>!I7PFS7ds#8}7Vmc8~ZL0Z$< zL*2)1SieAEdPw>3PEQC+O|S~UM8kfi0{XILa{RMFAz@y#q)>K5NQa7zkC(8C94&%R z=j`NFXD9CnAAx9ymz|}5_7g!`vIxo7xK#vCgp->$CvCEz8WuB9vw|18z|aV0D+GN* zVuLFJH{5nnsM-h~XR@{V+$jP{$BtFO-1Blk^hsWU{WBn>$~c34`aYjeCQ^d8$gL5e zclLcS)!8>hQJXnV7T9~96oIA~muRkm&(X4k{pTZDl0^paRwYDZ+UnS5KM5(&IQp~} z*mM!Jr0k#U6bEkPz1UW#L$N}_v9vR!!RuYrUKU`JcWkM(%Sn$rCg@7Lwv&fR4DP`X zakC&5a1$m6E<7_)ZoB>#ve%l)Hq)C=$0Ce7H-Aq4bd{Ua-c}_hOZVHYqSJu7`NMv< z5H-q0ipE9;z?6*JGXj){F~*{7KN6(ZGDc#Sw1GY?r_fbL0hvn2y;U$d=k-&*^LAUP&W96zR&3;QwVOj0FnS%}a-UQ)!9@DV`@ z5h)!BhYQbvuZI-W(o0a}`qvNuJQBT`F?%;80D)JOE3joPr^)s9zI&|JgXK<8{%|fvU8Vm?)YjmLJ-m!9Yy2oO5u)=;7WI}=}<4YNnUhp ziA?m=3D66dmrl97;h-H9)acj47LQqI$gH$qh{&iJ%2(iINBD}Zm)AUJ?ExlA;Xa2i zJV4!@u3BP$7bK@-F4OM%E&%Gy8JQOQiV*pppS9!sp*s6sfKKpxm*oVQD2a_{f|UyT z-o~PnG%m~%g(CIt(%t6DO)m6PjjalRBU0Sfhft5A@_s{bU#Q(_N9FeEF`e@;F?l9L zLrr7c&I$M~p!ZNxu5Z0)8r04nIZs9JhZx8Ao|tJL?&^y`Qu+1?0m?R(?k$(w&lI#v z$854N$D?3&Xa^bL;_J0tP%SorQ^(#Am?G$gqS1aT0t?N9ciYe9q@(EX%g@!q zASJ2Izl%uD7aNb*+j5hz+pR6J&vgSbWSaLP6#-upQ{;;x5xRBOe*3E2zD4|=&%P-N zCDF5o4HEF9l&iEP>TpExpAct?@eudDRi}>F7}1sM?vp#%IY$J^wL`j*b}I@=A^VQP zmJ{Wc5P_^y4cmf_yZNH18Y@@X^C38TjTq=CSw9wnQ*jxQ;uZ15js&EzV0jADv~P++ zJDr+iAL)+M_a)7Egd}#`^`cR=QPcNU<;seRa>*kRufeiNliusB3cM7cl8mQti* zTO?YTSUO1_cA8(ACrfQr@M@jJ?rTfyq_wS+X0ceeme01e^70LcvV3%uG>YICR311E zrM>fct}3>liQvag*#s=QIZSZW)AL1d36bI4_sXdq9?xov-hP zENVU8_J+uerp$hJREYAqtwO?H6r@1obx-wj@FQgFq-s0Y5tI)g_p;ab^fBA_l+*O~ zaltTOXr73DQUuzkh$H9ya#Q?7T&lVIhnUW;Xx$YE^Xyz^%YfiZuKhSgJtXL-sn{9V z`^81+7+A=5SV9DHdJsc0<#JPj{5T#J0{Uo3AsKJF94{7&kB}j4uA4R)jG9CK(QdudpABsZNGWrj+sd9%kn#B=C(Nsq? zQgg5S<+eu9H%-^xvLopTlqRzPE(v>H-o7*#MAJRvZ$f-`_}OKr1%2)FIpSHXu!h|P zZ2>L>-yEzybN^y0m&vlW#r@125_wRi76_O0O6R===qKkT}Kq{EbC<_r{~&KT*& zXs-Yh@Tq(4DLH-V;m5$n+tVVD0S({C0rMwhd+hL=VxxdP`B3J79`i>)yFICi+uoYbqPBZ zOlw(Z5ve}+DO&d%tcwU#KUF;+wKN_w*vM>bA~L?19osiT2uO+MjW$Hkf1=Wy7<bFW^6;Zp$OJ5Vl>!FixxY!WL`|qL3Tg*P@bQX{EEKbfEqixPpQ8q$ng@ z#^sHj2{9-PTuIUqXMYfh_K0&$|Hly9b2KQ6v!ZD5&V);AioMwpV0)w2RAK)Rg$^%3 z&T6y0D-?zY3@~h8MIiS`3778g3;5Q7hkcx0^FG@b65T2L3gg)L-78FT&D#p2=Gu%A1TbGBWyEVE~u|oNN;W_%u2_0_f<+x)xs{cMm>Q zUVbi3Xx@{s>B7{O=IP%1K3kafJ~JINk(<)qgILt}1t?=9ysMuUq;`m}qw$T6#6de7 zLbT4F*E(~*{Z-IshnlS!)ko|LAp-ZaNePgB`2z_ope^o7uc2Q`9geVUe zE$GywN`nJ|{70emJZ|4f$3H%ewQrm-tzMXe8kU%V@7{P3M@wvD|0wvsvV#xgjCAB= zE(h#Q!oIb8^yp)o1LS5|XT+T$YI75koreV|nHDh)E%WVk@I|3&v7Hg17RVg2p8iqD z&yzRiZ@qk=+S*0&8F7DOe-)$^-E-eTz6)ZJ==1d}u_U$3RC+)ZVmZSOQ|u=JioJB- zRGTH(F-7MEmbdvLQV`50GxL@TG1Zertf>Ewt1|(ty1MfJbYGlKb=uQy+K#nawN=Wl z=v2!phzJEl+iL4e@*s~aFD7}xpw=wx$R<0o$`-;F!WP0-8)((3h&yU+UyKB-ICizm z)b{`R-h0C9{H><ATF|sGFtJ0{{mB6=iC|g zRfVZ79^FyRyF!F;k>~i(+FpZoivxNYz0xzaKlG^G;Nk9l_+}^^$F`%cL05y3qI8Wz ze7UH#aU#-E?9o^Z> z(QqWE+7|@kdO_;K{xb)MA)odQ!PsM&zE75L3b(5~&DkAC&K%xhqXZfBe!ctIXoYDU zQY9OZQf;t1MW8MgS=xT&8MDC&xUF-@V2vI;(6$T4fjK!J?)GkBUEd|crEgFRJ}2d` z>d9O}SULrEQka!kU4#~mWre751RQ1SaDXu61pB=JT}3w@Uyt?|K`l)j>io^aeK;$G zFv(6IY~@=Sp_Wa(v~9Z7h8Ng-$`YH4`T9zG{|#XOWNE&d?SnbWn>8!WHgB;{h(eyh zm$g}dw$)t$iAyE;~v~UO$$C_=2*Un)DW7%%jzO4^!yy|cUzGF zFZ_AQ$z`b|j||7BR8&r~a*^pyDJG$rgZl*;tVF7cUGPU9**Rmu>6!%*(uCKS%&EL^bXzN8JSW5Iz>Xk?v0b9YW?j(-!_9lWoS<=W z&<+Yx$E&+uZMLTb;%n!NQyDQ`^VcI$T=T*Xdrff+1oeMswEb2nb`H;wh+Nt~M4>BZ z7Ou2QzfCyS*`r^Np4L-1ZY1Kx-(!75pw_!Czp|V46`*IGO;`(k$SNI%pY=-HEEvne zfEDT*9*xKv5Bs)7Fcyv24ou+YT>9~C`-<&3Prz?mtoK&?lL&F684Q~yqeFsS)E^=Z zDwNIq|8#I-*@>k#RDcR}4wopDA!r*00*4gb)$b6B&8w;|E4D(-f*^5Ic5jW%7hN-pO6!4{L7RNJtq_sX*CTs{Z53j~c#}%f~dons1obZH53s2;L{emEc zz)mf)FAo63QO1Qaq5$`a5a&M#4;{67QAp4IC1JBXT8?fv9La8&ZnHfaqitzhEudw; zw#JSr9M|Yd;4}?bD2z}IJq2_~vG06xdor9so3q)g#_$0wIxkxD9yvE4l zWjQ-trF|wxWtFwMl=fFejYGtYqqA?9d0CZDEQdQdScox2;4H-#5C7<%L0-7ypp6zK zj!plvlzmr_c}^mK&j-lv^l09%8n7f@D9ETvNapi5T zm9|QNWBlo+d7UjSoipa0UfFP_^{8B~9#oXp94;-(?U+y;>LM;iur%#)k*M(Wj0N_b zN7GfkY0hMUzbq0RMBZ$szNq$At_ay2LJiQKrbX8 zz9<-fY{N5r$1d^cx0f%2%Vb~moc(ZGILf0sXC62`Z-ISZkg4T1$Mbc5P@712l0FQG5tx;#woS!$v4DoR9LvLYqYyo1Yd~nuMtjPFWU|a|6JVS>*R8iZyCMo; z9Ns0soanKE^9p|>iI%q491PBEbo8_`rHcz2=B(JuO;#fs6_zDvUa9RCq>lE3%j~4W zG;=cEr0qjP0Ca-wSlU31_6ZTxQbdbjQ(x(@bNhOI(E=nnKLp(Yj#ziaQy6nwVb^+M zijoPB;esS)Vdhth@0RcJL6vpA-Q$q>GK74V2*w>V_+8_PY`MC(RrEMp$R||VE@8&N zw^}IzwOE@IwXvY0f~(~{JMDNb=LmT?ep5I$=-3=I$?UT?L7=liscV6KPLNTWoUF1Q z3hU6=%$nb?zM^rgDF_kXt3pg3Sg}$RMxbBwbOG6;gNPx$#1@^(ABw%HA@7=rSo%R`_*qkEt z$FR`j1coPC^}9?2>OMVh_M5ef>^q)ahp35mmq)hG#C7CWyW2Adac(V#Ng_})Y`I!n z;Gk_Yllh1MgEc@V#T90X#uvA3(9tGfnwTk<56^ub%iJJ&dca;0rpYX^!-2t;6@g0F zZgCNnOFbFhli@8S{tB8H&)MW?G84%^PkVN8skhIGq9XNzCXe4q5oiJR!+|anqOI9& z+ij-8REDd`wYE|~v+VhJBVsjS8s}L>FlZ9m#@zF(4*7|!WK=o@Rh>LRVvfYlh(L=_ zes0^&87SX++zds25$aJ!n2*d~m!AEO5LjsalzoK)bHaMP`J0>0z8ej`U*F9JUf#=1*IIAbeCjK83Pm@c(l!c;AJN91d5D|XncT)Ce1!H;rF%DqwdHR<%zq?QE4=x@* zFq}m3aizUJS{YyrP|M%|sVMz|#YO0n$h<>DhOj~$U34}lS%Fi6dQV+;Q==tCVKBJ! zU+uwWP4RA|;0(KM;YAaL%22qJ{qsTY*S4j)uxemz4?AF=`smx1mxnj@Y0y z))tFxvM4YVx74vxZsSz<X zAhu$&6po)r-_Q@+Q=XmP4nZm?>u?}89N02`C&(HVlPWle#UErO#u2$SQ6)CVdrC$F zxh<62`)>urFBtp6ewY6^Y#$fhYhM^l;_kPviAKg@14rI!{RG&pf_jDx6{41`7}gxG z8|-FL=*rdC^tO?nk*fidFTk+kWKMQCf8g-!p_bEAH(IendRS47RS-drw}@S{D^3pl z(+aEeL^w^9rsP~fnt~DuJf%lG^7O{TXI4zHy@D(%3hbXR6CFoHh$rUocD(O}UD)Y( zOw>3`IJ=Wg;5Jd{Ey{`O>^A~58iA5&?B<^w8B^_SoEVz~(=yM8%19A_fK z1Zg3SxcxRmm?$*q_VRT8@w&t$s}+I4hS!i^p2A}y_rH~*QT^m9GzV4-z}|=!jPX;6 zt;ZK@tx;t5D|p5_nK&8$Ky(e5Q9Vk z!NaV*DFU-M0jAwGqX@?V3)M)Dq&ZSak%xZkwMs$fv-|6_`xn}G1Zg_2!x{UbK>P)a zNl@C|!sHvv+?Od#Bg08`qYh59MWK;s$g|tc6{6g;`=*|0-E0d5V^Pn;2(Tr>)YG%q zHQj8z1Bu1j(XNcs8{as(F2cCT_4vs55@X1`K4q0m^~;3Tq4w2}*qcsaqVUeLmn*_= zyjplMUnpLmYth4~p z;O<(gP0>=W%G-cOYp^FfUs)K)Q@dW7VUIe%{mUYIPJqVr4Qlf*II2qK^LX_ep}6FG z^|{J^C&(a&t!;Z$(CNT6^xs6Mfzk})_m{N4i@@NVo3iq)15?h;+F+NDh7u=sR2r*B z*9ucI28*^}JMntK_&vKRIZ5hZHbf)}#THwB3Q;z@C0E24y+_^dMIcN0F!GWU1=To7 z?@#HdohI>fSUZ7E(Q)jU@O@V8CEdgY2|q+OS(qWHC`#b}>!Gf4ms2i^9fEP2EX!bY zQ#N52dXir^&3|j&R9ht&e?DUT%jp<$sM|$l_Pbrv$9|%4>^{cqE9^Nh;7r5p)2;LD zoM7aEai!YZWv)cGCi{ot7zYUgP7rhDvM~_Y62iv_;chrK2olbc6yY}&6&K}b^yzWU z*h>^GCF$h1*m#E=TIV9Ck`dJBL`#J|eFuN7UPMMij;Ts)qYw)RJsCcb0TzRvq>Vv>PuzDs8-lF2{^(W*NbgVE)Diz%WbYO%{tY3s=+qrG6W&aMjlmP6pe-j z&edUiMUbV2Pb1y}e<>6{aR0|OMTcjS78cJmZwN9*of|MT;su};hg$nNq1fayegXE5 zF#V?bsknUDW$%jMlM6d7Tt0D>z_fqh4r2HvC>hD82W5ayiWuiFRSGxPz9a(e23&U_nNjVNFh`4%#!rM52|*xh^ljo$ENyK@voNt3Bf5=Xboz zT1AQTa$9K%TW=>sh-*_)k{peWM4GP6-?Njs^kcBT#G|4JjMLcLM%z0IQ#CTmEA4{s z0lF;3@IH)S;-1JL@A`|Hm(q|KB_F$7Vc+)-KC{HnWoY&=AvN|*#j5DdCl^~!0fvL$ zA3w>Pg&4c|5@;g?Stfct(so8CCwhtsxy()3d_l@8IJOjf`T|S`v&WNkH7ZmbjdzL| z$3KmfzO{Oe9{sNEYmW<1UpOx9uu~p4S*MKOIv6@^&|v#rSHgxP*@gr5o?sm6;Th{i0TN69Q5cLYm;-`*QS6j+unOcxZ}TY~ibG$Nya@F>jPx9u*r zzdNL3-$k^)@13NiW0;KJY}X4=$|;CSA_&Zf-7ihz4ST zXQtp0gU8xVPh5g_30B%Y5*8KadF=IE+JW%Jek<6uyG{>Yf97Fw*WUAF?0VDKBLDp^ zlF_TaJ$u__3e!{$4>#I*GG#J%>kpceQM zqzomT$A($1S(Xi1OYz&v% z071qu$y24YWdn|_>3%K79(zX$ql!Zr9ybRnJt&28E!+(u+Ht(mexRV_HrzR8RFY~5 zV$EBv+^eBWrI<^z9|_ayQj~*jngC@MAr(+=GdxPJRBe@jW_+vk|81?OWP9gVx>LwM zNx_31+i$JTPWEF@hs+0E{+Sm-<+t`9Xa6n8G-da%v}ZjE1;BbV&(G%Ij8|Iij}Ehq zJF{DHnD;|6#vclIfz+`%Df_ZWaS4`W3h`MjOna$n46FBhf^;)4iM+!Pa%nNfx<6um znH{KV*B#Km_3P|MO2noiQ8Rfxwi#X`d`&(-w!q`5U_dHyyv~-35E~_3c-c&OM5H)} z8JV)Ew2h*4Z326PRJCluC4OzdsR^*$#89SNt6R_pMzx3<;VZxyD` zMa2mDw5_w>3)4w9%O3WwKx_`jh|0DtMfTylAyIZPc`Z};pAjJrQqNv}?J_}{n{C`1 z{cGA4qOc%j7`D{D;rZz@)6V15wIWbq8KPF!&p}2mBxsn+gN=rt%XhpmtvyZ+zsn^ragEPJtS$KcSZ<#c)W{%$@gEAu z9WLA0nB6)j+t?%vGl3rabFEqvGbv_gZgJ;1?jw2~d5WlvPHsifmdL}WsP81kg(GBah0*nqWCXq&s06? z%A))ZYY;thfhw4S{9qf(nnb0t)IF#K+fhf{3Q7y@=R&dL!A8a$d)nixQc|6JM<_N0 zOX>C}hrzjFo&D7zrbA~U&wYI|7B(h1fg`aPW;Z&81-H zhXiTviM5ifg8qGm%h7A{Beu|S5 zc&5JHqml*17|9aQ)a%_Ij{!$*yi*uaSw&6eTa74PqC?0I{~z<57|^ucN)VS9U;IaH zpD>lfyS?5H3(&2gmwU(_bC{3kDZWRq3sV2^Qaqg}3pl&IZvqq=3tx;=&j*C!mXW+S zp9k-MBG5XR1|Q0$7{+#7Y9A3Hwx%dEK4bsp1-7j#$3W*rLX6+YTL#;IDooRkKQbxB zL|>81I2ewTbr<&YM$_J){khq0@H|LNr!gwt-_u0V!_xurPpMFr<8E9@>o<_i0M{*s>bB7*s`BiSZYPo~>FBE}y9jzjT;m$3W1AaRa) z<=!Bu?q!hWJ17vRt57<9$AlT4&c%<~f9K+mqugUBgyU-9BUDgiCvyqpF3nyQrcc55 z;e31_w%0AP*F~fo3{g6-+WyBOkM4*0R(~>ezD$-J>*rz-LO0h2f#F4Vi72s0gI)3C zzeJ*y15nJrIadP8jhK(`B50>!m)UD~?vKbA)^f!pjT1TU?Xzcfb8Y$!OoLtV<2suY_YC#^e{m zm!#PDO^ik1Q!5j#Rj6lT#4o|H-IjraKPI1DD!Fhwj#_mFPdVkH7} z4;v5Cj67^rp7`vJb!T=gwFd>MCM8Kjo9dCj-n;$n6%W}Fk1WFb59Zq(v2k#q_?suC z4S2D$_XPD@lkB0qbbh7?0waTDwq#qb5Tu^|JRVorH-%U>WPWK`a6mUi6gB!$ZvA6& z(PhV%w{4#oDgKRZ+7@i9?3B`0g%{hlPIt3eLezQs@WRfe8*P~vgXQ;Z^Ey<0?SLo1 zp<(j)5&Mb5mutB?)Y{(#X+wss-#%O%%c6-Rsw?bcf*MCTUOm7#eoX{Aga|N`*IS5d z@%L`F>vI_=Rg~FHf^lh{SPw68sIYfkZt+&zFj3+;0gfII)CT_RC`P_>X-Kx-9+ zny`&jX-^7hJ#Yq{V4WUSo<?F$_b+*DFFBf}Gn1lQ8+B z(Du$QiLS+kcUlbb zeY_MH#}uY9%CmNXC=@dC)?s#$!m)yJn%OT2lNn2uMVKQW;7NP&a~`B{{9Oi{G#Co9 zA)?V>7Q&!p33fK`@cdk}Q<)Mg7j&*BPRqpA;x0act=edtR*J zLBB>6T6(s5oh?(C?zXS44W6BFH+@ic7T5n%6*7&c2kjaBC&NI^_OxQD0=8z`_JqA8 z$aG5J)7m`_hwEjWAkI4E23&zs!~b|vK48o3L+Mz|nMHfgZk}cz6^t!KR*ucyE)<2O z!7Cv9)8`#kpbUZ;cTXK$k!+WJ)BhBOX7uW%&!<4wxWd=j!y>+0G^R!F;gCMIqr!2V zdC*6h-7Y1`mxo&YbB+e?q{vL_v5;sq6^ zYC9uLBf}ANwVibsC)C~+q|L#E*Id2+1_}eBy<@EX!IQ(ZlZp7ND`H`%n(^2=%|KWGvW+e9XRKV5xEac47?0st(B*Ve$4M$B(2~$@Zm~GaZ#0ICRZ2P&Hw%2YgTy2l zNn^c)l4HA$ufbh_6^TI0k;OquI>~-E#nWeWo?0Y8!{vYuhrzbSBlQ4hEgqRHpO`Fx zZTqCKeiwA6DICr*>vmd1EobaTChcv{k2##Qy)Q!)3jquF^2QS&z2gw5m(o`IfZ`!k zCng`;RZL?)ECT&1M8L!@5r|{T?gUfaQ->31Zu_&BTSd+LUD?a2HW^Tv&|Ca zpMKhhIFKw5EgmDV@{mT@UovIuMUCS=5aSh<)-1wTKmJ9y=8}#-5BJ_{eX^UN=DEfjR zc=tQ)D$jTF5FGKtc8y@{8l15dkAydLC6IaZ88*~&m%zrWuzVq!Fj+2LCU{C3d3#~X z9`cmPZonGdD-=g)c(QsD16dJi^r}n78STdT_H-`c*!bdKY$ruiHQJb-6JTL??l>C5 zGl=qwAZT=E_d)x!hzySO9N%m34HwbhJ6Kla6R-ZzGfV78iit}|8=}n=jsuC~Y3Vqx5GBt1 zxYR_Xnzo3*M2t(7;)3x|u7L7%UfPZe(`Q_Bl-o}QVreCbi4rop=w2vZUk0(}&kM&D z(q6wIYKmHK5h;TWIbnkZ7`G8^wKh^=sz#88*PkB<#Vv$6mf`MJ9cQf2a|}WaDU|o>LtGT5+Y}|B1#(;y_&u{kpj&7gvGXKt zt}yA#5K65qwWWfLRomva&9+m39>JLB4O07qHH*ZYUWa44 z5Z*NGlTHbWN$d#w?K4hEmf|uC70hzGR1|uHvt_v8-xQ*4xcAA^27a9|ZG-n9+wb2N z^gS;c{A{+No}_cx_R|kPWIq(7yaMg`R_uuT8C%DLLfp_sOt5;-g?cwFu^ED08)rd= zhH*=`#vGLeDp!q1I8f{mmGvVv&!BrJ<7j(DlsA@^Mke2n<^L>ILm! zWM32N`pJf^=_XMq3}#Q#h6ylplV#jgVajQrv8;W@UK{68lh{PRtFY$08HRyu4Ij30 zk>W356uq{o!VE$C?AcglX`i!>e{G?NjP7lf)}SEWJlWtI-C+@EC+ZE{o{n~*>SMd` z6DC9(i;J-+@v;E>Lhx+KTTk>yvcX;zU2W%m>0O0sd*|GSvr9JFh4)9u7z{zE*#zSn zd1dFRreMSJGon#Fk~h9LBukG;g znR#>Uh$qX+1lVSKRxs`>I#lmp&H>wA6){dp7N!Mf*mu2zb9)}LzbPEsdTIx(;9WfZ ze&YdX^sWMpU>J6&Mb$+KPmy|`A3Dc7EWHupOjNen77W2OF8nuUKrhoYnBwEMI`ab)J16i)OD+L%b z=6w*4%sm{!k7_^}AkQJkZt(~w<`R$Q3|%IusX0&^q`jC8QRoIHz@+*c{pZ$-$VwcK zPDIkCc^Rh<9DSo@rOgx6&nCzQ?s0w)Yk`WvmS_Ad`o2wgn2Loem_WkH4~$pM7N=~CQ)TrCo-VAQbw)=Oa; zJYiY7vi z%cci2>~pM#h}1gZuE%b3o4O% z5nvpV)+xp4N|UEMduXN|R5*5c4BsubiN{2t(K6Jh8=gHS0(EfooMz8EIKCBWnwJD9 zw}O`w4s5>=jAc!DVmS)e_Ipv(r{)dW-I(QA%3u3HZOnDHVTKsU|LPE17ZO?coM5;J zV2iLR=*Dsj2rfB#DvHurqTyA9&En?gS`{#3|EukT!L}Pl`sd&2|D?}@~_#Az) z##Rc_omcn1+GKkKe7@I6K-KnpUf3tF`hF$EV8}B@sZ7$n>ABmsa7}$jfReL2R-!y* z-Rojv*3EvIq_l5|Ks(CH6O+<5z!46_xqJ*0itVNuN)R#HE@v>8h@t_-GFRIA3DK~AFtcv<7?c<|PmU5|@^IlL6KHz- zLr!|J)>tM-^jRVHyh`IJE1IEp0g z@fVLgx$C(t_AgUnAz0R5Z5IeI)D&{=K!bfnP@Tg2+ve%nO>68@QD}p9IPO~56BL1i zP0>AegJ(uO`T%F^@q*3>+0z)tg=hc$HnFRS@ZQ2ke!mEe_v=_Fn4>Uz0EQ2xv$~xJ zzWE}>H9Qm{UcP|~MPY66W?RWOv7#_lX2(Q}KOUA1_dD%}Kg(s+fu5{SiNauyPoYj_ z?Oh1Hy9)c-RN~?=p$?m8-GpgZY303kqr!3Qx!^E{g-+meD(&IDc8>^g=ti;m-Ji>V zy))L%auJv;jsZ=!O(0e^N~%%;6FHFUl{G1nHtNl~*met1E}x{SFCU38<{Xhm1@o7= zl7=G-Vjr7!zT}GPHq538Q!?j?q+KOIFZg83m-{M9h(O^y7SH9GFy)D>+BQzGbQef+ zpL;-nvFKbo&!#FIn*iS_V{3(D&quskx-c#3(;FA`3bSg+g6=#Pu`@PMq_`e} z#t&lV%{E%Zu%3A@m^ ze6hXi8MFKA#4r9IL7EfUd(xw_3uZu~wPy}2vWpasYe|Qtgk7B@pk~P{NPiJ%RaJ@; zL#+U<3bq{<*;I$w#{FV?PV0(Y%$Z@c9Tki}mIQ3IY zLY#o}2dh^^izCgw3agRA`pRL&$Tbs=*zZN8*t^1%Fw3LFepCyTV zQz#5nibBb6wA6GiIbhYf1o%HL#Z}sLQDRq1@X1}QuT-GXwrf15>+G$o1Zq+KEq3pueeSUcQh-9?M5FP%3LGfOGERwTyx)rVf$!eh=f`?eEU zOCz{JV<Rn7hAuetOHoM0I9GqXt$hvX*sS#V2bhjjvRXx`dA#Wn~s5fyk;-{#>&+fLi1F#WGYVir9fAxb#Dj@@9t0KIEp zy{>)rk@nS7?1(6|tFUb=CgCgXs4#WV`TCPP8Phh%w7{zb1=hVZ#tM%&3pV? z0#wR!A3DH3|4>YaxIGdv0<^Ptx87Exa2$q25lrbt!WssD%f{*UdRrl)S{bZPh$VeU zlvp}ecz^5&2|-5x=boCH(Q{pm#ic-@53o=r3iFH-0K-MJ#hw?WImMZLd&Q&B_(vMR zUK7;FC2&=0?H+j(7jqavZwRtfI080{>1<~_o~r`46+0(L*LX$_UYg$%jz8w_pbHu8 znazo1&{jTM+>@^mL2VP)bfJA+ke+Av&(H3k!e8qp3L{={Y*&$8Bfw~duMz&p_!=#; z+eB2mqS$(~-Elq@Nf1IJ)qET8<&^3IQ7yo!Ht^#%29tX5=7Pz>$%(m< zymdBTVH%2fp{}_Nf;1~po{#Tb0eT)ds!aG#JZ-wF?Jy4`Z#v|WQ{G`eHJ8+M;`E`# z9kuJv?wf^?TKf+X{b%||3!U<~px1~f-u%E?C?!4?Qm4Gd*C;@bTiMTwuQ z;TTLS$2Q$oPejw&S3in2kL}M<&`@238K_pLFiRb^YjFH%ADB-;@z-0Pm{@85D$G=# zT%*qyqb8Nbt3{+SI2Ne4>vB0J$%$IP`e4YiDPm~whGpE;2IbO?E#wIC1L63Czw{a_ z5Mo?;`hfXl#SUrwM8u!(^|UZ-n9>J?Xd|9nlc)&T0%5gLuV*zfxsJ$BW3GfTw+*n} z4zUXJ#=(8|GeOtllQ=n+sn%roEDu7=%Y!N6o%Uy?p@ICac|!cF5Mz!dL~gzq54KM) zfYNmr!g4VeA<8&vCc1tr#i>GZ2fivy*Z67hNDlLl zzO>M8a2WlB*o~f+s}Sd`!9pBohF9A#1+~f&Jont^nFb&R!eL>iAj_&-_iL!5CysnY zh>H~asXQCohaZN*LOZu-rd_CT_^}luUU8!kZD$_F@SrXg?66B+T zTn?A2(rQJdypbbD+LT`^& zgYe|XdG0As$qG-}^PZBuw~(XL%MRODqrA4zUiG9s`Nz%*PzuVboPPi2DF?uR&BuA> zLLy_g#-%XZU26X-3Jt<9^m6;M01f7MS&#{$YW5A07*x7lk;L$~-6R5y=6o(5?=89b zQLxHNZJY@4cXe_JtDr(yzY^J5+!EACgHiMuxpX6BlxUGKL)p1%r8O!XD;*^ZL$*@{ z20wUl-w-6lwpnYggmGzv+kWCD41&G+gu;IML|jX^Tst8eEzjGx*Syj$TSUC}D4e)nuCNAe2YT}XP%JOKdV?%cTyXCPCku$tXkkpAn z>=a%rK>rF-#hF5^^9=UftQpRoB^(|Ch1}VO9a5osNC~K(Jpra@_~OnJiRr}dYaKEV z`(D@x>u-gK)Y7d_-)n58KwJTN#c9m$ZWe(;wGJm2*;Y>sr?1)$2{0B#DHtsFb0H?G zGBerEdNSyW(;i{Lg~FfN1249Ao^g|GPdSF$Y#Tp&Wn0RvI?<-oC*N9(hKq?|mN9p)W@*xtd#a1-7YW)y$;!ynowmY$D0we!`*5o;i2 z+(B$vQv#~N&lPX6pB189#(BPdPJq4pmo;{=ry4u~i%534kn~I4S?-3cJi!EcBZPCV zR%~nz_ViIM5T-f9Sg)fLW`JZEAs+ni5QPT07vvfCeGzETB*evwZHf>L8j|58JX?UN zOtQm;BLzQ)hdsa61#FM~_NWNVZC*M!m)atz8YT03sJ&*V?e+v{KbN*0w!K~etW50g zVRfRdD@Q$+_m=3;`jjZKM`cMS<27OS#mIyS4*nK$#I;|0Q96N0&dvTVLR|T`a)*E4 zve*{ZCp-y+>g@ti=sj}Va6FLc`-BrnQVKh<-2PnzhC@O)3A;v!vFy{cm*so7Cweo5 z3j03LJU!dI*Qz{UQE?_O&mIuc4;l02VjRN!+$2)`UGURJ&mYU(yK==0L-vzjbfMUMe(pT#+gwqoigDoTUY4{+MTp<<35m%CiQv>ED#noU3&bfe!c(#Rhew{Ae{z9coQp(J z8#_P3KKjw`XwRHB=d@G zrV!IUKAEs%3TrBZJezKTc3LEcp`-R-N9~~q$y)>yuf)zNl5Q2{^J{tA0YCkN_7{QJ z#{oDvw|{#i7Q^(-LpbVFf=mHy#3)$*MJF(Pcu7T#=dv8(>-xufh(OHHzc<#4L1*zi9v|08|h2w7u3(@?bIaKvNpL~sU60h^* zr{_FuqdjU2)+#v!-zFG47rq@y$2e%yfLfVw98Y*u<9R1rB1-&G1nD%)lFhcGIpXNR z#uhj3MG?aiX~yi9y(AbLid*x@^?OA$CWI?$Q?iZw>wkd6R67e{m z+dCIGorkFjaLy*$bdNo@s~ir8EfA#G$!+V{owf_np|k7jtjQB*H}NX9!=4nRira?` z8D{_O=?37j>Q#j)K{A8|)+watJH0&l@N$~cGx)n+LYeN~{}70~UVom5)Apg&u^5DK zc!&P1kpE!I5j+39$EGG1U~%}%LJSpWn4?cmwJRLzNX#ARTe-YFdvdQ2SgE(6KZb29 zMbpOuo>gvAm{z>B7PVtN)|877SIa9qWiRj5MRuQP>_?uz2+w3S9v(7g zn9cL>#FA>8?^zgRgf(oZAVY-lpkvdmNhqFl@u`Jw_GH_AK7FaAil1|MsxjD~cvUdo zY_(c-apn8J&wlC2SYXv$^Ew3MkQVXShM5=hKklMIVI+;VYo=N?u8?W1njf#2r|-{ay|yPd-j2TjVbg95^>W2SSIx%B^blACg`RSG! z_OW%bDTp2v+ob|b0NuWIoLwzMMm7y?vV8?9|8&hvyH;TaAy@_3pqKCpkL}sxo3_!z z>9#rcvWHnI_L}GF5|&$Oe-MR%D3NNzhaZiFhOHBx=ra4LD72>}DHjY^dpxZ>w|3>Z z)eG!fqR^yK@M(Uaa9prg^zMG8P4K)D70Xzar|j0Fw=M7(F6e9>jY8VoC5$1?@gBaq z&DJC`%QqM{S_f|pQ;haA5#vv0RH70d-{T_0N>8rir1X?97lWJcu#*bLbrPNlVbzP` zGSzyykc+`1_NJ&&7mFL%VHURxP~)_VuW^n2PO<7Cm(usI&*j%STH>9%?31FB;(LQ{ zwa+RXn=@1vD!(B7PhUX<6q~G%6SK_?*r?7n@5F#2Wg;^_ zysxKhy#VW)Pb_jf7(K*`h)DGJyOXU=LF#&O3RhoDzoKTh(_Rx%YmfZ}@u*)3Qrv+4 z1Bcr=k7GKAj~rot@wh6UF6YJ;!oL+F)E*6aY zT{)-kV6y;q8T*zOl9w)p*F02+aT+{kunqSDh9@%P;a}xDq$St%z4kmr?^T|`lZvI$ zCl;SrZPOeSq!^HS4kQa{N4*g7)!6BtaX7I8o5vIFqZ=tC&P)W!QBr6JkDsJk#MI z(tN;bL}XD0&mcSbYtu!c?)KT++h;Ad8G^Cy+soyvDG;Nn@y+B_3$i{Z%%#b#9Sy-8T7 zm%slDvyvic8C z!{|zx_Jf-#MblZoQ5)z*fPHLiTcz?zbyMrA}E= zz}~bYwl<`=1EQU$XFP1TDTbNkIc<87L^@y@k!aMpDJ|z#EV4{hA-mG4SB zAqw+`+%j@ZrS_~ajqcXFyPZ>*&Y^|H*7>fGx*&Tudjdn|-?zjB_+Wttccl;ssBK)b zFqO~n^c0B}@eVCnrbfF~1pg7q*tB&XW{9&{4TE-fi_%&hW3sBl;I%8eRuJB z94s0mm)#Uzo`-t!!J~)SEegjURb?{26j3WU4uzhc&8&UaLaP$gk!i!;ASkmloIP5C zU8bEjN3q(BYSslu)Ls4t9~M3SY*4ynEEjvJbXTv^YXtQ{!XTDb={8|MF6vcS+BM;C ztL^uk($_z=*A6=DsGX&ycSw-m28zxV_Lx8%Q`pc!5cV077@h07UuDlK%nXf6>Rsqp zLNrlJ3>Lv$`;7=%_u^5s+Fu0eT6R-Qcv*<=3r+S9(JAru#f|5~r*|H-_ccPN^x