From aed82f6837a3ea515f4d50201cfc77effc7d41b4 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 20 Apr 2024 13:27:12 +0300 Subject: [PATCH 001/101] common : try to fix Android CI (#6780) * common : disable get_math_cpu_count() until Android CI gets fixed * common : another try --- common/common.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index cf69535e2d1f5..b6143e41c02cb 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -108,7 +108,7 @@ int32_t get_num_physical_cores() { return n_threads > 0 ? (n_threads <= 4 ? n_threads : n_threads / 2) : 4; } -#if defined(__x86_64__) && defined(__linux__) +#if defined(__x86_64__) && defined(__linux__) && !defined(__ANDROID__) #include static void cpuid(unsigned leaf, unsigned subleaf, @@ -162,7 +162,7 @@ static int count_math_cpus(int cpu_count) { * Returns number of CPUs on system that are useful for math. */ int get_math_cpu_count() { -#if defined(__x86_64__) && defined(__linux__) +#if defined(__x86_64__) && defined(__linux__) && !defined(__ANDROID__) int cpu_count = sysconf(_SC_NPROCESSORS_ONLN); if (cpu_count < 1) { return get_num_physical_cores(); From b8109bc0139f15a5b321909f47510b89dca47ffc Mon Sep 17 00:00:00 2001 From: Jan Boon Date: Sun, 21 Apr 2024 00:29:50 +0800 Subject: [PATCH 002/101] doc : server tests require llama to be built with curl enabled (#6788) --- examples/server/tests/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/tests/README.md b/examples/server/tests/README.md index feb2b1d6cf5de..83c0208f36c09 100644 --- a/examples/server/tests/README.md +++ b/examples/server/tests/README.md @@ -29,7 +29,7 @@ To mitigate it, you can increase values in `n_predict`, `kv_size`. cd ../../.. mkdir build cd build -cmake ../ +cmake -DLLAMA_CURL=ON ../ cmake --build . --target server ``` From 69dcffa4ec8bea8e50d598e7c9f1e349336b5999 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Sun, 21 Apr 2024 16:48:48 +0800 Subject: [PATCH 003/101] updated lite and colab --- colab.ipynb | 2 +- klite.embd | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/colab.ipynb b/colab.ipynb index e29081a7bebd6..ca791c6859e34 100644 --- a/colab.ipynb +++ b/colab.ipynb @@ -48,7 +48,7 @@ "source": [ "#@title v-- Enter your model below and then click this to start Koboldcpp\r\n", "\r\n", - "Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_S.gguf\" #@param [\"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_S.gguf\",\"https://huggingface.co/Sao10K/Fimbulvetr-11B-v2-GGUF/resolve/main/Fimbulvetr-11B-v2-Test-14.q4_K_M.gguf\",\"https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/ReMM-SLERP-L2-13B-GGUF/resolve/main/remm-slerp-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Xwin-LM-13B-v0.2-GGUF/resolve/main/xwin-lm-13b-v0.2.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Stheno-L2-13B-GGUF/resolve/main/stheno-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/MythoMax-L2-Kimiko-v2-13B-GGUF/resolve/main/mythomax-l2-kimiko-v2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/airoboros-mistral2.2-7B-GGUF/resolve/main/airoboros-mistral2.2-7b.Q4_K_S.gguf\",\"https://huggingface.co/afrideva/phi-2-uncensored-GGUF/resolve/main/phi-2-uncensored.q3_k_m.gguf\"]{allow-input: true}\r\n", + "Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_S.gguf\" #@param [\"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_S.gguf\",\"https://huggingface.co/Sao10K/Fimbulvetr-11B-v2-GGUF/resolve/main/Fimbulvetr-11B-v2-Test-14.q4_K_M.gguf\",\"https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/ReMM-SLERP-L2-13B-GGUF/resolve/main/remm-slerp-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Xwin-LM-13B-v0.2-GGUF/resolve/main/xwin-lm-13b-v0.2.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Stheno-L2-13B-GGUF/resolve/main/stheno-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/MythoMax-L2-Kimiko-v2-13B-GGUF/resolve/main/mythomax-l2-kimiko-v2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/airoboros-mistral2.2-7B-GGUF/resolve/main/airoboros-mistral2.2-7b.Q4_K_S.gguf\",\"https://huggingface.co/afrideva/phi-2-uncensored-GGUF/resolve/main/phi-2-uncensored.q3_k_m.gguf\",\"https://huggingface.co/grimjim/kukulemon-7B-GGUF/resolve/main/kukulemon-7B.Q8_0.gguf\"]{allow-input: true}\r\n", "Layers = 99 #@param [99]{allow-input: true}\r\n", "ContextSize = 4096 #@param [4096] {allow-input: true}\r\n", "ForceRebuild = False #@param {type:\"boolean\"}\r\n", diff --git a/klite.embd b/klite.embd index d9418216323d9..414398907effd 100644 --- a/klite.embd +++ b/klite.embd @@ -91,6 +91,9 @@ Current version: 134 .settingsmall{ font-size: 10px; } + .settingsmall.widerinput { + width: 8ch; + } .settinglabel input:focus { color: #cdf; @@ -8625,7 +8628,7 @@ Current version: 134 //clean and clamp invalid values localsettings.max_context_length = cleannum(localsettings.max_context_length, 8, 999999); - localsettings.max_length = cleannum(localsettings.max_length, 1, (localsettings.max_context_length-1)); + localsettings.max_length = cleannum(localsettings.max_length, 1, Math.floor(localsettings.max_context_length*0.85)); //clamp to max 85% of max ctx localsettings.temperature = cleannum(localsettings.temperature, 0.01, 5); localsettings.rep_pen = cleannum(localsettings.rep_pen, 0.1, 5); localsettings.rep_pen_range = cleannum(localsettings.rep_pen_range, 0, localsettings.max_context_length); @@ -14583,7 +14586,7 @@ Current version: 134
Max Ctx. Tokens ?Max number of context tokens submitted to the AI. Must exceed Amount to Generate. Can be further increased by editing the textbox. Older models stop at 2048, newer ones can do 4096 or greater.
-
From b97bc3966e852adb626c90be64fd48282800f504 Mon Sep 17 00:00:00 2001 From: Pedro Cuenca Date: Sun, 21 Apr 2024 13:50:41 +0200 Subject: [PATCH 004/101] llama : support Llama 3 HF conversion (#6745) * Support Llama 3 conversion The tokenizer is BPE. * style * Accept suggestion Co-authored-by: Sourab Mangrulkar <13534540+pacman100@users.noreply.github.com> * llama : add llama_token_is_eog() ggml-ci * llama : auto-detect more EOT tokens when missing in KV data * convert : replacing EOS token is a hack * llama : fix codegemma EOT token + add TODOs * llama : fix model type string for 8B model --------- Co-authored-by: Sourab Mangrulkar <13534540+pacman100@users.noreply.github.com> Co-authored-by: Georgi Gerganov --- convert-hf-to-gguf.py | 49 +++++++++---- convert.py | 9 ++- examples/batched.swift/Sources/main.swift | 2 +- examples/batched/batched.cpp | 4 +- examples/beam-search/beam-search.cpp | 2 +- examples/infill/infill.cpp | 10 +-- .../app/src/main/cpp/llama-android.cpp | 2 +- .../llama.cpp.swift/LibLlama.swift | 2 +- examples/llava/llava-cli.cpp | 2 +- examples/lookahead/lookahead.cpp | 2 +- examples/lookup/lookup.cpp | 2 +- examples/main/main.cpp | 8 +-- examples/parallel/parallel.cpp | 2 +- examples/passkey/passkey.cpp | 4 +- examples/server/server.cpp | 2 +- examples/server/utils.hpp | 4 -- examples/simple/simple.cpp | 4 +- examples/speculative/speculative.cpp | 2 +- llama.cpp | 72 ++++++++++++++----- llama.h | 5 +- 20 files changed, 124 insertions(+), 65 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 358dba8ed9d90..4fd916cba3ed7 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1301,15 +1301,23 @@ def set_vocab(self): try: self. _set_vocab_sentencepiece() except FileNotFoundError: - self._set_vocab_llama_hf() - - special_vocab = gguf.SpecialVocab(self.dir_model, load_merges=False, - special_token_types = ['prefix', 'suffix', 'middle', 'eot']) - special_vocab._set_special_token("prefix", 32007) - special_vocab._set_special_token("suffix", 32008) - special_vocab._set_special_token("middle", 32009) - special_vocab._set_special_token("eot", 32010) - special_vocab.add_to_gguf(self.gguf_writer) + try: + self._set_vocab_llama_hf() + except (FileNotFoundError, TypeError): + # Llama 3 + self._set_vocab_gpt2() + + # Apply to CodeLlama only (and ignore for Llama 3 with a vocab size of 128256) + if self.hparams.get("vocab_size", 32000) == 32016: + special_vocab = gguf.SpecialVocab( + self.dir_model, load_merges=False, + special_token_types = ['prefix', 'suffix', 'middle', 'eot'] + ) + special_vocab._set_special_token("prefix", 32007) + special_vocab._set_special_token("suffix", 32008) + special_vocab._set_special_token("middle", 32009) + special_vocab._set_special_token("eot", 32010) + special_vocab.add_to_gguf(self.gguf_writer) def set_gguf_parameters(self): super().set_gguf_parameters() @@ -2194,6 +2202,8 @@ def set_vocab(self): old_eos = special_vocab.special_token_ids["eos"] if "chat" in os.path.basename(self.dir_model.absolute()): # For the chat model, we replace the eos with '<|im_end|>'. + # TODO: this is a hack, should be fixed + # https://github.com/ggerganov/llama.cpp/pull/6745#issuecomment-2067687048 special_vocab.special_token_ids["eos"] = self._try_get_sft_eos(tokenizer) print(f"Replace eos:{old_eos} with a special token:{special_vocab.special_token_ids['eos']} \ in chat mode so that the conversation can end normally.") @@ -2429,12 +2439,15 @@ class GemmaModel(Model): def set_vocab(self): self._set_vocab_sentencepiece() + + # TODO: these special tokens should be exported only for the CodeGemma family special_vocab = gguf.SpecialVocab(self.dir_model, load_merges=False, - special_token_types = ['prefix', 'suffix', 'middle', 'eot']) + special_token_types = ['prefix', 'suffix', 'middle', 'fsep', 'eot']) special_vocab._set_special_token("prefix", 67) special_vocab._set_special_token("suffix", 69) special_vocab._set_special_token("middle", 68) - special_vocab._set_special_token("eot", 70) + special_vocab._set_special_token("fsep", 70) + special_vocab._set_special_token("eot", 107) special_vocab.add_to_gguf(self.gguf_writer) def set_gguf_parameters(self): @@ -2523,28 +2536,34 @@ def set_vocab(self): field = neox_reader.get_field(gguf.Keys.Tokenizer.MODEL) self.gguf_writer.add_tokenizer_model(bytes(field.parts[-1])) + field = neox_reader.get_field(gguf.Keys.Tokenizer.LIST) self.gguf_writer.add_token_list([bytes(field.parts[i]) for i in field.data][:vocab_size]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.TOKEN_TYPE) self.gguf_writer.add_token_types([field.parts[i].tolist()[0] for i in field.data][:vocab_size]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.MERGES) self.gguf_writer.add_token_merges([bytes(field.parts[i]) for i in field.data]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.BOS_ID) self.gguf_writer.add_bos_token_id(field.parts[-1].tolist()[0]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.EOS_ID) self.gguf_writer.add_eos_token_id(field.parts[-1].tolist()[0]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.UNK_ID) self.gguf_writer.add_unk_token_id(field.parts[-1].tolist()[0]) def set_gguf_parameters(self): - d_model = self.find_hparam(["hidden_size", "d_model"]) - d_conv = self.find_hparam(["conv_kernel", "d_conv"], optional=True) or 4 + d_model = self.find_hparam(["hidden_size", "d_model"]) + d_conv = self.find_hparam(["conv_kernel", "d_conv"], optional=True) or 4 d_inner = self.find_hparam(["intermediate_size", "d_inner"], optional=True) or 2 * d_model - d_state = self.find_hparam(["state_size", "d_state"], optional=True) or 16 + d_state = self.find_hparam(["state_size", "d_state"], optional=True) or 16 # ceiling division # ref: https://stackoverflow.com/a/17511341/22827863 # ref: https://github.com/state-spaces/mamba/blob/ce59daea3a090d011d6476c6e5b97f6d58ddad8b/mamba_ssm/modules/mamba_simple.py#L58 - dt_rank = self.find_hparam(["time_step_rank", "dt_rank"], optional=True) or -(d_model // -16) + dt_rank = self.find_hparam(["time_step_rank", "dt_rank"], optional=True) or -(d_model // -16) rms_norm_eps = self.find_hparam(["layer_norm_epsilon", "rms_norm_eps"], optional=True) or 1e-5 # Fail early for models which don't have a block expansion factor of 2 diff --git a/convert.py b/convert.py index 24df0a4d84dc3..1c700cf6a3d65 100755 --- a/convert.py +++ b/convert.py @@ -525,7 +525,14 @@ def __init__(self, base_path: Path): # pre-check so we know if we need transformers tokenizer_model: dict[str, Any] = tokenizer_json['model'] - if ( + is_llama3 = ( + tokenizer_model['type'] == 'BPE' and tokenizer_model.get('ignore_merges', False) + and not tokenizer_model.get('byte_fallback', True) + ) + if is_llama3: + raise TypeError('Llama 3 must be converted with BpeVocab') + + if not is_llama3 and ( tokenizer_model['type'] != 'BPE' or not tokenizer_model.get('byte_fallback', False) or tokenizer_json['decoder']['type'] != 'Sequence' ): diff --git a/examples/batched.swift/Sources/main.swift b/examples/batched.swift/Sources/main.swift index d75c503d58311..5764acb6d5825 100644 --- a/examples/batched.swift/Sources/main.swift +++ b/examples/batched.swift/Sources/main.swift @@ -153,7 +153,7 @@ while n_cur <= n_len { // const llama_token new_token_id = llama_sample_token_greedy(ctx, &candidates_p); // is it an end of stream? -> mark the stream as finished - if new_token_id == llama_token_eos(model) || n_cur == n_len { + if llama_token_is_eog(model, new_token_id) || n_cur == n_len { i_batch[i] = -1 // print("") if n_parallel > 1 { diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index 7aaf63ceb1a7c..be30d20bf8194 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -191,8 +191,8 @@ int main(int argc, char ** argv) { //const llama_token new_token_id = llama_sample_token_greedy(ctx, &candidates_p); - // is it an end of stream? -> mark the stream as finished - if (new_token_id == llama_token_eos(model) || n_cur == n_len) { + // is it an end of generation? -> mark the stream as finished + if (llama_token_is_eog(model, new_token_id) || n_cur == n_len) { i_batch[i] = -1; LOG_TEE("\n"); if (n_parallel > 1) { diff --git a/examples/beam-search/beam-search.cpp b/examples/beam-search/beam-search.cpp index 866c6d7a62867..3d34378a506eb 100644 --- a/examples/beam-search/beam-search.cpp +++ b/examples/beam-search/beam-search.cpp @@ -47,7 +47,7 @@ struct beam_search_callback_data { // In this case, end-of-beam (eob) is equivalent to end-of-sentence (eos) but this need not always be the same. // For example, eob can be flagged due to maximum token length, stop words, etc. static bool is_at_eob(const beam_search_callback_data & callback_data, const llama_token * tokens, size_t n_tokens) { - return n_tokens && tokens[n_tokens-1] == llama_token_eos(llama_get_model(callback_data.ctx)); + return n_tokens && llama_token_is_eog(llama_get_model(callback_data.ctx), tokens[n_tokens-1]); } // Function matching type llama_beam_search_callback_fn_t. diff --git a/examples/infill/infill.cpp b/examples/infill/infill.cpp index c69dcd06e461f..afac145f63934 100644 --- a/examples/infill/infill.cpp +++ b/examples/infill/infill.cpp @@ -586,7 +586,7 @@ int main(int argc, char ** argv) { // deal with eot token in infill mode if ((llama_sampling_last(ctx_sampling) == llama_token_eot(model) || is_interacting) && params.interactive){ - if(is_interacting && !params.interactive_first) { + if (is_interacting && !params.interactive_first) { // print an eot token printf("%s", llama_token_to_piece(ctx, llama_token_eot(model)).c_str()); } @@ -651,8 +651,8 @@ int main(int argc, char ** argv) { // LOG_TEE("took new input\n"); is_interacting = false; } - // deal with end of text token in interactive mode - else if (llama_sampling_last(ctx_sampling) == llama_token_eos(model)) { + // deal with end of generation tokens in interactive mode + else if (llama_token_is_eog(model, llama_sampling_last(ctx_sampling))) { LOG("found EOS token\n"); if (params.interactive) { @@ -731,8 +731,8 @@ int main(int argc, char ** argv) { } } - // end of text token - if (!embd.empty() && embd.back() == llama_token_eos(model) && !params.interactive) { + // end of generation + if (!embd.empty() && llama_token_is_eog(model, embd.back()) && !params.interactive) { break; } diff --git a/examples/llama.android/app/src/main/cpp/llama-android.cpp b/examples/llama.android/app/src/main/cpp/llama-android.cpp index ce8ab3b709407..4af9de3038359 100644 --- a/examples/llama.android/app/src/main/cpp/llama-android.cpp +++ b/examples/llama.android/app/src/main/cpp/llama-android.cpp @@ -408,7 +408,7 @@ Java_com_example_llama_Llm_completion_1loop( const auto new_token_id = llama_sample_token_greedy(context, &candidates_p); const auto n_cur = env->CallIntMethod(intvar_ncur, la_int_var_value); - if (new_token_id == llama_token_eos(model) || n_cur == n_len) { + if (llama_token_is_eog(model, new_token_id) || n_cur == n_len) { return env->NewStringUTF(""); } diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index c249291aea110..70c43a3852732 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -158,7 +158,7 @@ actor LlamaContext { new_token_id = llama_sample_token_greedy(context, &candidates_p) } - if new_token_id == llama_token_eos(model) || n_cur == n_len { + if llama_token_is_eog(model, new_token_id) || n_cur == n_len { print("\n") let new_token_str = String(cString: temporary_invalid_cchars + [0]) temporary_invalid_cchars.removeAll() diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 75948806ee5d4..50dac4caecfe2 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -45,7 +45,7 @@ static const char * sample(struct llama_sampling_context * ctx_sampling, const llama_token id = llama_sampling_sample(ctx_sampling, ctx_llama, NULL); llama_sampling_accept(ctx_sampling, ctx_llama, id, true); static std::string ret; - if (id == llama_token_eos(llama_get_model(ctx_llama))) { + if (llama_token_is_eog(llama_get_model(ctx_llama), id)) { ret = ""; } else { ret = llama_token_to_piece(ctx_llama, id); diff --git a/examples/lookahead/lookahead.cpp b/examples/lookahead/lookahead.cpp index 5af6a8ab6c92b..9c3540b2008c2 100644 --- a/examples/lookahead/lookahead.cpp +++ b/examples/lookahead/lookahead.cpp @@ -299,7 +299,7 @@ int main(int argc, char ** argv) { } fflush(stdout); - if (id == llama_token_eos(model)) { + if (llama_token_is_eog(model, id)) { has_eos = true; } diff --git a/examples/lookup/lookup.cpp b/examples/lookup/lookup.cpp index 65ed408a27583..9526e898fe763 100644 --- a/examples/lookup/lookup.cpp +++ b/examples/lookup/lookup.cpp @@ -141,7 +141,7 @@ int main(int argc, char ** argv){ printf("%s", token_str.c_str()); } - if (id == llama_token_eos(model)) { + if (llama_token_is_eog(model, id)) { has_eos = true; } diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 249fc2bb605b3..1180734b9760d 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -795,8 +795,8 @@ int main(int argc, char ** argv) { } } - // deal with end of text token in interactive mode - if (llama_sampling_last(ctx_sampling) == llama_token_eos(model)) { + // deal with end of generation tokens in interactive mode + if (llama_token_is_eog(model, llama_sampling_last(ctx_sampling))) { LOG("found EOS token\n"); if (params.interactive) { @@ -920,8 +920,8 @@ int main(int argc, char ** argv) { } } - // end of text token - if (!embd.empty() && embd.back() == llama_token_eos(model) && !(params.instruct || params.interactive || params.chatml)) { + // end of generation + if (!embd.empty() && llama_token_is_eog(model, embd.back()) && !(params.instruct || params.interactive || params.chatml)) { LOG_TEE(" [end of text]\n"); break; } diff --git a/examples/parallel/parallel.cpp b/examples/parallel/parallel.cpp index f66c91013eaeb..7c5595d6edb2d 100644 --- a/examples/parallel/parallel.cpp +++ b/examples/parallel/parallel.cpp @@ -359,7 +359,7 @@ int main(int argc, char ** argv) { // client.id, client.seq_id, id, client.n_decoded, client.i_batch, token_str.c_str()); if (client.n_decoded > 2 && - (id == llama_token_eos(model) || + (llama_token_is_eog(model, id) || (params.n_predict > 0 && client.n_decoded + client.n_prompt >= params.n_predict) || client.response.find("User:") != std::string::npos || client.response.find('\n') != std::string::npos)) { diff --git a/examples/passkey/passkey.cpp b/examples/passkey/passkey.cpp index 2cbc9e1fa89ed..f2ef9ca10d4a2 100644 --- a/examples/passkey/passkey.cpp +++ b/examples/passkey/passkey.cpp @@ -252,8 +252,8 @@ int main(int argc, char ** argv) { // sample the most likely token const llama_token new_token_id = llama_sample_token_greedy(ctx, &candidates_p); - // is it an end of stream? - if (new_token_id == llama_token_eos(model) || n_cur == n_len) { + // is it an end of generation? + if (llama_token_is_eog(model, new_token_id) || n_cur == n_len) { LOG_TEE("\n"); break; diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 634e653ada284..25bc296396772 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1201,7 +1201,7 @@ struct server_context { }); } - if (result.tok == llama_token_eos(model)) { + if (llama_token_is_eog(model, result.tok)) { slot.stopped_eos = true; slot.has_next_token = false; diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index a8d43ac63bf11..1a22125028204 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -381,10 +381,6 @@ static json oaicompat_completion_params_parse( } else { llama_params["stop"] = json_value(body, "stop", json::array()); } - // Some chat templates don't use EOS token to stop generation - // We must add their end sequences to list of stop words - llama_params["stop"].push_back("<|im_end|>"); // chatml - llama_params["stop"].push_back(""); // gemma // Handle "response_format" field if (body.contains("response_format")) { diff --git a/examples/simple/simple.cpp b/examples/simple/simple.cpp index 39e2d8ea490e3..b0f8e0fdc4987 100644 --- a/examples/simple/simple.cpp +++ b/examples/simple/simple.cpp @@ -133,8 +133,8 @@ int main(int argc, char ** argv) { // sample the most likely token const llama_token new_token_id = llama_sample_token_greedy(ctx, &candidates_p); - // is it an end of stream? - if (new_token_id == llama_token_eos(model) || n_cur == n_len) { + // is it an end of generation? + if (llama_token_is_eog(model, new_token_id) || n_cur == n_len) { LOG_TEE("\n"); break; diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 6a7367b0cde6b..12e46fbc91a24 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -360,7 +360,7 @@ int main(int argc, char ** argv) { } } - if (token_id == llama_token_eos(model_tgt)) { + if (llama_token_is_eog(model_tgt, token_id)) { has_eos = true; } ++n_predict; diff --git a/llama.cpp b/llama.cpp index fa7c022f29130..8ca9650de09ad 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2120,7 +2120,7 @@ struct llama_vocab { id special_prefix_id = -1; id special_suffix_id = -1; id special_middle_id = -1; - id special_eot_id = -1; + id special_eot_id = -1; // TODO: move above after "eos_id", and here add "file separator" token bool add_space_prefix = true; @@ -3770,7 +3770,7 @@ static void llm_load_hparams( switch (hparams.n_layer) { case 22: model.type = e_model::MODEL_1B; break; case 26: model.type = e_model::MODEL_3B; break; - case 32: model.type = e_model::MODEL_7B; break; + case 32: model.type = hparams.n_head == hparams.n_head_kv ? e_model::MODEL_7B : e_model::MODEL_8B; break; // LLaMa 8B v3 uses GQA case 40: model.type = e_model::MODEL_13B; break; case 48: model.type = e_model::MODEL_34B; break; case 60: model.type = e_model::MODEL_30B; break; @@ -4179,7 +4179,10 @@ static void llm_load_vocab( vocab.special_prefix_id = 67; vocab.special_suffix_id = 69; vocab.special_middle_id = 68; - vocab.special_eot_id = 70; + // TODO: this is not EOT, it is "file separator" token, needs fix + // https://huggingface.co/google/codegemma-7b-it/blob/9b1d9231388358c04d90bd003458f5070d97db44/tokenizer_config.json#L565-L572 + //vocab.special_eot_id = 70; + vocab.special_eot_id = 107; } } @@ -4308,6 +4311,7 @@ static void llm_load_vocab( { LLM_KV_TOKENIZER_MIDDLE_ID, vocab.special_middle_id }, { LLM_KV_TOKENIZER_EOT_ID, vocab.special_eot_id }, }; + for (const auto & it : special_token_types) { const std::string & key = kv(std::get<0>(it)); int32_t & id = std::get<1>(it); @@ -4322,7 +4326,6 @@ static void llm_load_vocab( } else { id = new_id; } - } // Handle add_bos_token and add_eos_token @@ -4336,6 +4339,27 @@ static void llm_load_vocab( vocab.special_add_eos = int(temp); } } + + // find EOT token: "<|eot_id|>", "<|im_emd|>", "", etc. + // + // TODO: convert scripts should provide this token through the KV metadata LLAMA_KV_TOKENIZER_EOT_ID + // for now, we apply this workaround to find the EOT token based on its text + if (vocab.special_eot_id == -1) { + for (const auto & t : vocab.token_to_id) { + if ( + // TODO: gemma "" is exported as a normal token, so the following check does not work + // need to fix convert script + //vocab.id_to_token[t.second].type == LLAMA_TOKEN_TYPE_CONTROL && + (t.first == "<|eot_id|>" || + t.first == "<|im_emd|>" || + t.first == "" + ) + ) { + vocab.special_eot_id = t.second; + break; + } + } + } } // build special tokens cache @@ -4498,14 +4522,19 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: general.name = %s\n", __func__, model.name.c_str()); // special tokens - if (vocab.special_bos_id != -1) { LLAMA_LOG_INFO( "%s: BOS token = %d '%s'\n", __func__, vocab.special_bos_id, vocab.id_to_token[vocab.special_bos_id].text.c_str() ); } - if (vocab.special_eos_id != -1) { LLAMA_LOG_INFO( "%s: EOS token = %d '%s'\n", __func__, vocab.special_eos_id, vocab.id_to_token[vocab.special_eos_id].text.c_str() ); } - if (vocab.special_unk_id != -1) { LLAMA_LOG_INFO( "%s: UNK token = %d '%s'\n", __func__, vocab.special_unk_id, vocab.id_to_token[vocab.special_unk_id].text.c_str() ); } - if (vocab.special_sep_id != -1) { LLAMA_LOG_INFO( "%s: SEP token = %d '%s'\n", __func__, vocab.special_sep_id, vocab.id_to_token[vocab.special_sep_id].text.c_str() ); } - if (vocab.special_pad_id != -1) { LLAMA_LOG_INFO( "%s: PAD token = %d '%s'\n", __func__, vocab.special_pad_id, vocab.id_to_token[vocab.special_pad_id].text.c_str() ); } - if (vocab.special_cls_id != -1) { LLAMA_LOG_INFO( "%s: CLS token = %d '%s'\n", __func__, vocab.special_cls_id, vocab.id_to_token[vocab.special_cls_id].text.c_str() ); } - if (vocab.special_mask_id != -1) { LLAMA_LOG_INFO( "%s: MASK token = %d '%s'\n", __func__, vocab.special_mask_id, vocab.id_to_token[vocab.special_mask_id].text.c_str() ); } - if (vocab.linefeed_id != -1) { LLAMA_LOG_INFO( "%s: LF token = %d '%s'\n", __func__, vocab.linefeed_id, vocab.id_to_token[vocab.linefeed_id].text.c_str() ); } + if (vocab.special_bos_id != -1) { LLAMA_LOG_INFO( "%s: BOS token = %d '%s'\n", __func__, vocab.special_bos_id, vocab.id_to_token[vocab.special_bos_id].text.c_str() ); } + if (vocab.special_eos_id != -1) { LLAMA_LOG_INFO( "%s: EOS token = %d '%s'\n", __func__, vocab.special_eos_id, vocab.id_to_token[vocab.special_eos_id].text.c_str() ); } + if (vocab.special_unk_id != -1) { LLAMA_LOG_INFO( "%s: UNK token = %d '%s'\n", __func__, vocab.special_unk_id, vocab.id_to_token[vocab.special_unk_id].text.c_str() ); } + if (vocab.special_sep_id != -1) { LLAMA_LOG_INFO( "%s: SEP token = %d '%s'\n", __func__, vocab.special_sep_id, vocab.id_to_token[vocab.special_sep_id].text.c_str() ); } + if (vocab.special_pad_id != -1) { LLAMA_LOG_INFO( "%s: PAD token = %d '%s'\n", __func__, vocab.special_pad_id, vocab.id_to_token[vocab.special_pad_id].text.c_str() ); } + if (vocab.special_cls_id != -1) { LLAMA_LOG_INFO( "%s: CLS token = %d '%s'\n", __func__, vocab.special_cls_id, vocab.id_to_token[vocab.special_cls_id].text.c_str() ); } + if (vocab.special_mask_id != -1) { LLAMA_LOG_INFO( "%s: MASK token = %d '%s'\n", __func__, vocab.special_mask_id, vocab.id_to_token[vocab.special_mask_id].text.c_str() ); } + + if (vocab.linefeed_id != -1) { LLAMA_LOG_INFO( "%s: LF token = %d '%s'\n", __func__, vocab.linefeed_id, vocab.id_to_token[vocab.linefeed_id].text.c_str() ); } + if (vocab.special_prefix_id != -1) { LLAMA_LOG_INFO( "%s: PRE token = %d '%s'\n", __func__, vocab.special_prefix_id, vocab.id_to_token[vocab.special_prefix_id].text.c_str() ); } + if (vocab.special_suffix_id != -1) { LLAMA_LOG_INFO( "%s: SUF token = %d '%s'\n", __func__, vocab.special_suffix_id, vocab.id_to_token[vocab.special_suffix_id].text.c_str() ); } + if (vocab.special_middle_id != -1) { LLAMA_LOG_INFO( "%s: MID token = %d '%s'\n", __func__, vocab.special_middle_id, vocab.id_to_token[vocab.special_middle_id].text.c_str() ); } + if (vocab.special_eot_id != -1) { LLAMA_LOG_INFO( "%s: EOT token = %d '%s'\n", __func__, vocab.special_eot_id, vocab.id_to_token[vocab.special_eot_id].text.c_str() ); } } // Returns false if cancelled by progress_callback @@ -13268,16 +13297,14 @@ void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * c GGML_ASSERT(ctx); const int64_t t_start_sample_us = ggml_time_us(); - bool allow_eos = false; + bool allow_eog = false; for (const auto & stack : grammar->stacks) { if (stack.empty()) { - allow_eos = true; + allow_eog = true; break; } } - const llama_token eos = llama_token_eos(&ctx->model); - std::vector, llama_partial_utf8>> candidates_decoded; candidates_decoded.reserve(candidates->size); std::vector candidates_grammar; @@ -13286,8 +13313,8 @@ void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * c for (size_t i = 0; i < candidates->size; ++i) { const llama_token id = candidates->data[i].id; const std::string piece = llama_token_to_piece(ctx, id); - if (id == eos) { - if (!allow_eos) { + if (llama_token_is_eog(&ctx->model, id)) { + if (!allow_eog) { candidates->data[i].logit = -INFINITY; } } else if (piece.empty() || piece[0] == 0) { @@ -13476,7 +13503,7 @@ llama_token llama_sample_token(struct llama_context * ctx, llama_token_data_arra void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar * grammar, llama_token token) { const int64_t t_start_sample_us = ggml_time_us(); - if (token == llama_token_eos(&ctx->model)) { + if (llama_token_is_eog(&ctx->model, token)) { for (const auto & stack : grammar->stacks) { if (stack.empty()) { return; @@ -16880,6 +16907,13 @@ llama_token_type llama_token_get_type(const struct llama_model * model, llama_to return model->vocab.id_to_token[token].type; } +bool llama_token_is_eog(const struct llama_model * model, llama_token token) { + return token != -1 && ( + token == llama_token_eos(model) || + token == llama_token_eot(model) + ); +} + llama_token llama_token_bos(const struct llama_model * model) { return model->vocab.special_bos_id; } diff --git a/llama.h b/llama.h index b5da686f7b7e5..5bed97ad1ef9f 100644 --- a/llama.h +++ b/llama.h @@ -783,6 +783,9 @@ extern "C" { LLAMA_API enum llama_token_type llama_token_get_type(const struct llama_model * model, llama_token token); + // Check if the token is supposed to end generation (end-of-generation, eg. EOS, EOT, etc.) + LLAMA_API bool llama_token_is_eog(const struct llama_model * model, llama_token token); + // Special tokens LLAMA_API llama_token llama_token_bos(const struct llama_model * model); // beginning-of-sentence LLAMA_API llama_token llama_token_eos(const struct llama_model * model); // end-of-sentence @@ -796,7 +799,7 @@ extern "C" { // Returns -1 if unknown, 1 for true or 0 for false. LLAMA_API int32_t llama_add_eos_token(const struct llama_model * model); - // codellama infill tokens + // Codellama infill tokens LLAMA_API llama_token llama_token_prefix(const struct llama_model * model); // Beginning of infill prefix LLAMA_API llama_token llama_token_middle(const struct llama_model * model); // Beginning of infill middle LLAMA_API llama_token llama_token_suffix(const struct llama_model * model); // Beginning of infill suffix From 89b0bf0d5dc123cc1053708f5f84b89e52cdc80b Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Sun, 21 Apr 2024 08:19:04 -0400 Subject: [PATCH 005/101] llava : use logger in llava-cli (#6797) This change removes printf() logging so llava-cli is shell scriptable. --- examples/llava/clip.cpp | 122 +++++++++++++++++------------------ examples/llava/llava-cli.cpp | 49 +++++++++----- examples/llava/llava.cpp | 36 +++++------ 3 files changed, 111 insertions(+), 96 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 5954bf6cdec68..e431c7f70fb0e 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -3,6 +3,7 @@ // I'll gradually clean and extend it // Note: Even when using identical normalized image inputs (see normalize_image_u8_to_f32()) we have a significant difference in resulting embeddings compared to pytorch #include "clip.h" +#include "log.h" #include "ggml.h" #include "ggml-alloc.h" #include "ggml-backend.h" @@ -23,7 +24,6 @@ #include #include #include -#include #include #include #include @@ -145,7 +145,7 @@ static std::map PROJECTOR_TYPE_NAMES = { static int get_key_idx(const gguf_context * ctx, const char * key) { int i = gguf_find_key(ctx, key); if (i == -1) { - fprintf(stderr, "key %s not found in file\n", key); + LOG_TEE("key %s not found in file\n", key); throw std::runtime_error(format("Missing required key: %s", key)); } @@ -247,7 +247,7 @@ static std::string gguf_kv_to_str(const struct gguf_context * ctx_gguf, int i) { static void print_tensor_info(const ggml_tensor * tensor, const char * prefix = "") { size_t tensor_size = ggml_nbytes(tensor); - printf("%s: n_dims = %d, name = %s, tensor_size=%zu, shape:[%" PRId64 ", %" PRId64 ", %" PRId64 ", %" PRId64 "], type = %s\n", + LOG_TEE("%s: n_dims = %d, name = %s, tensor_size=%zu, shape:[%" PRId64 ", %" PRId64 ", %" PRId64 ", %" PRId64 "], type = %s\n", prefix, ggml_n_dims(tensor), tensor->name, tensor_size, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3], ggml_type_name(tensor->type)); } @@ -265,7 +265,7 @@ static projector_type clip_projector_type_from_string(const std::string & name) static void clip_image_write_image_to_ppm(const clip_image_u8& img, const std::string& filename) { std::ofstream file(filename, std::ios::binary); if (!file.is_open()) { - std::cerr << "Failed to open file for writing: " << filename << std::endl; + LOG_TEE("Failed to open file for writing: %s\n", filename.c_str()); return; } @@ -284,7 +284,7 @@ static void clip_image_write_image_to_ppm(const clip_image_u8& img, const std::s static void clip_image_save_to_bmp(const clip_image_u8& img, const std::string& filename) { std::ofstream file(filename, std::ios::binary); if (!file.is_open()) { - std::cerr << "Failed to open file for writing: " << filename << std::endl; + LOG_TEE("Failed to open file for writing: %s\n", filename.c_str()); return; } @@ -515,7 +515,7 @@ struct clip_ctx { static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32_batch * imgs) { if (!ctx->has_vision_encoder) { - printf("This gguf file seems to have no vision encoder\n"); + LOG_TEE("This gguf file seems to have no vision encoder\n"); return nullptr; } @@ -879,21 +879,21 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { const int idx_name = gguf_find_key(ctx, KEY_NAME); if (idx_name != -1) { // make name optional temporarily as some of the uploaded models missing it due to a bug const std::string name = gguf_get_val_str(ctx, idx_name); - printf("%s: model name: %s\n", __func__, name.c_str()); + LOG_TEE("%s: model name: %s\n", __func__, name.c_str()); } - printf("%s: description: %s\n", __func__, description.c_str()); - printf("%s: GGUF version: %d\n", __func__, gguf_get_version(ctx)); - printf("%s: alignment: %zu\n", __func__, gguf_get_alignment(ctx)); - printf("%s: n_tensors: %d\n", __func__, n_tensors); - printf("%s: n_kv: %d\n", __func__, n_kv); - printf("%s: ftype: %s\n", __func__, ftype_str.c_str()); - printf("\n"); + LOG_TEE("%s: description: %s\n", __func__, description.c_str()); + LOG_TEE("%s: GGUF version: %d\n", __func__, gguf_get_version(ctx)); + LOG_TEE("%s: alignment: %zu\n", __func__, gguf_get_alignment(ctx)); + LOG_TEE("%s: n_tensors: %d\n", __func__, n_tensors); + LOG_TEE("%s: n_kv: %d\n", __func__, n_kv); + LOG_TEE("%s: ftype: %s\n", __func__, ftype_str.c_str()); + LOG_TEE("\n"); } const int n_tensors = gguf_get_n_tensors(ctx); // kv const int n_kv = gguf_get_n_kv(ctx); - printf("%s: loaded meta data with %d key-value pairs and %d tensors from %s\n", + LOG_TEE("%s: loaded meta data with %d key-value pairs and %d tensors from %s\n", __func__, n_kv, n_tensors, fname); { std::map n_type; @@ -904,7 +904,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { n_type[type]++; } - printf("%s: Dumping metadata keys/values. Note: KV overrides do not apply in this output.\n", __func__); + LOG_TEE("%s: Dumping metadata keys/values. Note: KV overrides do not apply in this output.\n", __func__); for (int i = 0; i < n_kv; i++) { const char * name = gguf_get_key(ctx, i); const enum gguf_type type = gguf_get_kv_type(ctx, i); @@ -920,7 +920,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } replace_all(value, "\n", "\\n"); - printf("%s: - kv %3d: %42s %-16s = %s\n", __func__, i, name, type_name.c_str(), value.c_str()); + LOG_TEE("%s: - kv %3d: %42s %-16s = %s\n", __func__, i, name, type_name.c_str(), value.c_str()); } // print type counts @@ -929,7 +929,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { continue; } - printf("%s: - type %4s: %4d tensors\n", __func__, ggml_type_name(kv.first), kv.second); + LOG_TEE("%s: - type %4s: %4d tensors\n", __func__, ggml_type_name(kv.first), kv.second); } } @@ -944,7 +944,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { size_t tensor_size = ggml_nbytes(cur); model_size += tensor_size; if (verbosity >= 3) { - printf("%s: tensor[%d]: n_dims = %d, name = %s, tensor_size=%zu, offset=%zu, shape:[%" PRIu64 ", %" PRIu64 ", %" PRIu64 ", %" PRIu64 "], type = %s\n", + LOG_TEE("%s: tensor[%d]: n_dims = %d, name = %s, tensor_size=%zu, offset=%zu, shape:[%" PRIu64 ", %" PRIu64 ", %" PRIu64 ", %" PRIu64 "], type = %s\n", __func__, i, ggml_n_dims(cur), cur->name, tensor_size, offset, cur->ne[0], cur->ne[1], cur->ne[2], cur->ne[3], ggml_type_name(type)); } } @@ -971,18 +971,18 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { #ifdef GGML_USE_CUDA new_clip->backend = ggml_backend_cuda_init(0); - printf("%s: CLIP using CUDA backend\n", __func__); + LOG_TEE("%s: CLIP using CUDA backend\n", __func__); #endif #ifdef GGML_USE_METAL new_clip->backend = ggml_backend_metal_init(); - printf("%s: CLIP using Metal backend\n", __func__); + LOG_TEE("%s: CLIP using Metal backend\n", __func__); #endif if (!new_clip->backend) { new_clip->backend = ggml_backend_cpu_init(); - printf("%s: CLIP using CPU backend\n", __func__); + LOG_TEE("%s: CLIP using CPU backend\n", __func__); } // model size and capabilities @@ -1006,15 +1006,15 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { new_clip->use_gelu = gguf_get_val_bool(ctx, idx); if (verbosity >= 1) { - printf("%s: text_encoder: %d\n", __func__, new_clip->has_text_encoder); - printf("%s: vision_encoder: %d\n", __func__, new_clip->has_vision_encoder); - printf("%s: llava_projector: %d\n", __func__, new_clip->has_llava_projector); - printf("%s: model size: %.2f MB\n", __func__, model_size / 1024.0 / 1024.0); - printf("%s: metadata size: %.2f MB\n", __func__, ggml_get_mem_size(meta) / 1024.0 / 1024.0); + LOG_TEE("%s: text_encoder: %d\n", __func__, new_clip->has_text_encoder); + LOG_TEE("%s: vision_encoder: %d\n", __func__, new_clip->has_vision_encoder); + LOG_TEE("%s: llava_projector: %d\n", __func__, new_clip->has_llava_projector); + LOG_TEE("%s: model size: %.2f MB\n", __func__, model_size / 1024.0 / 1024.0); + LOG_TEE("%s: metadata size: %.2f MB\n", __func__, ggml_get_mem_size(meta) / 1024.0 / 1024.0); } } - printf("%s: params backend buffer size = % 6.2f MB (%i tensors)\n", __func__, model_size / (1024.0 * 1024.0), n_tensors); + LOG_TEE("%s: params backend buffer size = % 6.2f MB (%i tensors)\n", __func__, model_size / (1024.0 * 1024.0), n_tensors); // load tensors { @@ -1027,7 +1027,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { new_clip->ctx_data = ggml_init(params); if (!new_clip->ctx_data) { - fprintf(stderr, "%s: ggml_init() failed\n", __func__); + LOG_TEE("%s: ggml_init() failed\n", __func__); clip_free(new_clip); gguf_free(ctx); return nullptr; @@ -1035,7 +1035,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { auto fin = std::ifstream(fname, std::ios::binary); if (!fin) { - printf("cannot open model file for loading tensors\n"); + LOG_TEE("cannot open model file for loading tensors\n"); clip_free(new_clip); gguf_free(ctx); return nullptr; @@ -1057,7 +1057,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { const size_t offset = gguf_get_data_offset(ctx) + gguf_get_tensor_offset(ctx, i); fin.seekg(offset, std::ios::beg); if (!fin) { - printf("%s: failed to seek for tensor %s\n", __func__, name); + LOG_TEE("%s: failed to seek for tensor %s\n", __func__, name); clip_free(new_clip); gguf_free(ctx); return nullptr; @@ -1128,23 +1128,23 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } if (verbosity >= 2) { - printf("\n%s: vision model hparams\n", __func__); - printf("image_size %d\n", hparams.image_size); - printf("patch_size %d\n", hparams.patch_size); - printf("v_hidden_size %d\n", hparams.hidden_size); - printf("v_n_intermediate %d\n", hparams.n_intermediate); - printf("v_projection_dim %d\n", hparams.projection_dim); - printf("v_n_head %d\n", hparams.n_head); - printf("v_n_layer %d\n", hparams.n_layer); - printf("v_eps %f\n", hparams.eps); - printf("v_image_mean %f %f %f\n", new_clip->image_mean[0], new_clip->image_mean[1], new_clip->image_mean[2]); - printf("v_image_std %f %f %f\n", new_clip->image_std[0], new_clip->image_std[1], new_clip->image_std[2]); - printf("v_image_grid_pinpoints: "); + LOG_TEE("\n%s: vision model hparams\n", __func__); + LOG_TEE("image_size %d\n", hparams.image_size); + LOG_TEE("patch_size %d\n", hparams.patch_size); + LOG_TEE("v_hidden_size %d\n", hparams.hidden_size); + LOG_TEE("v_n_intermediate %d\n", hparams.n_intermediate); + LOG_TEE("v_projection_dim %d\n", hparams.projection_dim); + LOG_TEE("v_n_head %d\n", hparams.n_head); + LOG_TEE("v_n_layer %d\n", hparams.n_layer); + LOG_TEE("v_eps %f\n", hparams.eps); + LOG_TEE("v_image_mean %f %f %f\n", new_clip->image_mean[0], new_clip->image_mean[1], new_clip->image_mean[2]); + LOG_TEE("v_image_std %f %f %f\n", new_clip->image_std[0], new_clip->image_std[1], new_clip->image_std[2]); + LOG_TEE("v_image_grid_pinpoints: "); for (int i = 0; i < 32 && (hparams.image_grid_pinpoints[i] != 0); ++i) { - printf("%d ", hparams.image_grid_pinpoints[i]); + LOG_TEE("%d ", hparams.image_grid_pinpoints[i]); } - printf("\n"); - printf("v_mm_patch_merge_type: %s\n", hparams.mm_patch_merge_type); + LOG_TEE("\n"); + LOG_TEE("v_mm_patch_merge_type: %s\n", hparams.mm_patch_merge_type); } @@ -1155,7 +1155,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { vision_model.pre_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "weight")); vision_model.pre_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "bias")); } catch(const std::exception& e) { - fprintf(stderr, "%s: failed to load vision model tensors\n", __func__); + LOG_TEE("%s: failed to load vision model tensors\n", __func__); } // LLaVA projection @@ -1184,7 +1184,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } catch (std::runtime_error & e) { } try { vision_model.image_newline = get_tensor(new_clip->ctx_data, TN_IMAGE_NEWLINE); - // fprintf(stderr, "%s: image_newline tensor (llava-1.6) found\n", __func__); + // LOG_TEE("%s: image_newline tensor (llava-1.6) found\n", __func__); } catch (std::runtime_error & e) { } } else if (new_clip->proj_type == PROJECTOR_TYPE_LDP) { // MobileVLM projection @@ -1264,7 +1264,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { ggml_cgraph * gf = clip_image_build_graph(new_clip, &batch); ggml_gallocr_reserve(new_clip->compute_alloc, gf); size_t compute_memory_buffer_size = ggml_gallocr_get_buffer_size(new_clip->compute_alloc, 0); - printf("%s: compute allocated memory: %.2f MB\n", __func__, compute_memory_buffer_size /1024.0/1024.0); + LOG_TEE("%s: compute allocated memory: %.2f MB\n", __func__, compute_memory_buffer_size /1024.0/1024.0); } return new_clip; @@ -1304,7 +1304,7 @@ bool clip_image_load_from_file(const char * fname, clip_image_u8 * img) { int nx, ny, nc; auto * data = stbi_load(fname, &nx, &ny, &nc, 3); if (!data) { - fprintf(stderr, "%s: failed to load image '%s'\n", __func__, fname); + LOG_TEE("%s: failed to load image '%s'\n", __func__, fname); return false; } build_clip_img_from_data(data, nx, ny, img); @@ -1316,7 +1316,7 @@ bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length int nx, ny, nc; auto * data = stbi_load_from_memory(bytes, bytes_length, &nx, &ny, &nc, 3); if (!data) { - fprintf(stderr, "%s: failed to decode image bytes\n", __func__); + LOG_TEE("%s: failed to decode image bytes\n", __func__); return false; } build_clip_img_from_data(data, nx, ny, img); @@ -1506,7 +1506,7 @@ static std::pair select_best_resolution(const std::pair & or int downscaled_height = static_cast(original_height * scale); int effective_resolution = std::min(downscaled_width * downscaled_height, original_width * original_height); int wasted_resolution = (width * height) - effective_resolution; - // fprintf(stderr, "resolution: %d %d, scale: %f, downscaled: %d %d, effective: %d, wasted: %d\n", width, height, scale, downscaled_width, downscaled_height, effective_resolution, wasted_resolution); + // LOG_TEE("resolution: %d %d, scale: %f, downscaled: %d %d, effective: %d, wasted: %d\n", width, height, scale, downscaled_width, downscaled_height, effective_resolution, wasted_resolution); if (effective_resolution > max_effective_resolution || (effective_resolution == max_effective_resolution && wasted_resolution < min_wasted_resolution)) { max_effective_resolution = effective_resolution; min_wasted_resolution = wasted_resolution; @@ -1545,7 +1545,7 @@ static std::vector divide_to_patches_u8(const clip_image_u8 & im bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32_batch * res_imgs) { bool pad_to_square = true; if (!ctx->has_vision_encoder) { - printf("This gguf file seems to have no vision encoder\n"); + LOG_TEE("This gguf file seems to have no vision encoder\n"); return false; } auto & params = ctx->vision_model.hparams; @@ -1622,7 +1622,7 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, cli } for (size_t i = 0; i < patches.size(); i++) { - // printf("patch %d: %d %d\n", i, patches[i]->nx, patches[i]->ny); + // LOG_TEE("patch %d: %d %d\n", i, patches[i]->nx, patches[i]->ny); clip_image_u8_free(patches[i]); } @@ -1765,7 +1765,7 @@ int clip_n_patches(const struct clip_ctx * ctx) { bool clip_image_encode(struct clip_ctx * ctx, const int n_threads, clip_image_f32 * img, float * vec) { if (!ctx->has_vision_encoder) { - printf("This gguf file seems to have no vision encoder\n"); + LOG_TEE("This gguf file seems to have no vision encoder\n"); return false; } @@ -1777,7 +1777,7 @@ bool clip_image_encode(struct clip_ctx * ctx, const int n_threads, clip_image_f3 bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_image_f32_batch * imgs, float * vec) { if (!ctx->has_vision_encoder) { - printf("This gguf file seems to have no vision encoder\n"); + LOG_TEE("This gguf file seems to have no vision encoder\n"); return false; } @@ -1939,7 +1939,7 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i new_type = type; if (new_type >= GGML_TYPE_Q2_K && name.find("embd") != std::string::npos) { new_type = GGML_TYPE_Q8_0; // ggml_get_rows needs non K type - // fprintf(stderr, "%s: quantizing %s to %s\n", __func__, name.c_str(), ggml_type_name(new_type)); + // LOG_TEE("%s: quantizing %s to %s\n", __func__, name.c_str(), ggml_type_name(new_type)); } const size_t n_elms = ggml_nelements(cur); float * f32_data; @@ -1958,7 +1958,7 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i f32_data = (float *)conv_buf.data(); break; default: - printf("Please use an input file in f32 or f16\n"); + LOG_TEE("Please use an input file in f32 or f16\n"); gguf_free(ctx_out); return false; } @@ -1985,7 +1985,7 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i fout.put(0); } - printf("%s: n_dims = %d | quantize=%d | size = %f MB -> %f MB\n", name.c_str(), ggml_n_dims(cur), quantize, + LOG_TEE("%s: n_dims = %d | quantize=%d | size = %f MB -> %f MB\n", name.c_str(), ggml_n_dims(cur), quantize, orig_size / 1024.0 / 1024.0, new_size / 1024.0 / 1024.0); } @@ -2001,8 +2001,8 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i gguf_free(ctx_out); { - printf("%s: original size = %8.2f MB\n", __func__, total_size_org / 1024.0 / 1024.0); - printf("%s: quantized size = %8.2f MB\n", __func__, total_size_new / 1024.0 / 1024.0); + LOG_TEE("%s: original size = %8.2f MB\n", __func__, total_size_org / 1024.0 / 1024.0); + LOG_TEE("%s: quantized size = %8.2f MB\n", __func__, total_size_new / 1024.0 / 1024.0); } return true; diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 50dac4caecfe2..a44c6cd7632c7 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -1,4 +1,5 @@ #include "ggml.h" +#include "log.h" #include "common.h" #include "clip.h" #include "llava.h" @@ -18,7 +19,7 @@ static bool eval_tokens(struct llama_context * ctx_llama, std::vector%s\n", __func__, IMG_BASE64_TAG_BEGIN, IMG_BASE64_TAG_END); + LOG_TEE("%s: invalid base64 image tag. must be %s%s\n", __func__, IMG_BASE64_TAG_BEGIN, IMG_BASE64_TAG_END); return NULL; } @@ -87,7 +88,7 @@ static llava_image_embed * llava_image_embed_make_with_prompt_base64(struct clip auto embed = llava_image_embed_make_with_bytes(ctx_clip, n_threads, img_bytes.data(), img_bytes.size()); if (!embed) { - fprintf(stderr, "%s: could not load image from base64 string.\n", __func__); + LOG_TEE("%s: could not load image from base64 string.\n", __func__); return NULL; } @@ -112,8 +113,8 @@ struct llava_context { }; static void show_additional_info(int /*argc*/, char ** argv) { - fprintf(stderr, "\n example usage: %s -m --mmproj --image [--temp 0.1] [-p \"describe the image in detail.\"]\n", argv[0]); - fprintf(stderr, " note: a lower temperature value like 0.1 is recommended for better quality.\n"); + LOG_TEE("\n example usage: %s -m --mmproj --image [--temp 0.1] [-p \"describe the image in detail.\"]\n", argv[0]); + LOG_TEE(" note: a lower temperature value like 0.1 is recommended for better quality.\n"); } static struct llava_image_embed * load_image(llava_context * ctx_llava, gpt_params * params) { @@ -123,18 +124,18 @@ static struct llava_image_embed * load_image(llava_context * ctx_llava, gpt_para auto prompt = params->prompt; if (prompt_contains_image(prompt)) { if (!params->image.empty()) { - fprintf(stderr, "using base64 encoded image instead of command line image path\n"); + LOG_TEE("using base64 encoded image instead of command line image path\n"); } embed = llava_image_embed_make_with_prompt_base64(ctx_llava->ctx_clip, params->n_threads, prompt); if (!embed) { - fprintf(stderr, "%s: can't load image from prompt\n", __func__); + LOG_TEE("%s: can't load image from prompt\n", __func__); return NULL; } params->prompt = remove_image_from_prompt(prompt); } else { embed = llava_image_embed_make_with_filename(ctx_llava->ctx_clip, params->n_threads, params->image.c_str()); if (!embed) { - fprintf(stderr, "%s: is %s really an image file?\n", __func__, params->image.c_str()); + LOG_TEE("%s: is %s really an image file?\n", __func__, params->image.c_str()); return NULL; } } @@ -153,18 +154,18 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ // new templating mode: Provide the full prompt including system message and use as a placeholder for the image system_prompt = prompt.substr(0, image_pos); user_prompt = prompt.substr(image_pos + std::string("").length()); - printf("system_prompt: %s\n", system_prompt.c_str()); + LOG_TEE("system_prompt: %s\n", system_prompt.c_str()); if (params->verbose_prompt) { auto tmp = ::llama_tokenize(ctx_llava->ctx_llama, system_prompt, true, true); for (int i = 0; i < (int) tmp.size(); i++) { - printf("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); + LOG_TEE("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); } } - printf("user_prompt: %s\n", user_prompt.c_str()); + LOG_TEE("user_prompt: %s\n", user_prompt.c_str()); if (params->verbose_prompt) { auto tmp = ::llama_tokenize(ctx_llava->ctx_llama, user_prompt, true, true); for (int i = 0; i < (int) tmp.size(); i++) { - printf("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); + LOG_TEE("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); } } } else { @@ -174,7 +175,7 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ if (params->verbose_prompt) { auto tmp = ::llama_tokenize(ctx_llava->ctx_llama, user_prompt, true, true); for (int i = 0; i < (int) tmp.size(); i++) { - printf("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); + LOG_TEE("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); } } } @@ -185,7 +186,7 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ // generate the response - fprintf(stderr, "\n"); + LOG_TEE("\n"); struct llama_sampling_context * ctx_sampling = llama_sampling_init(params->sparams); std::string response = ""; @@ -224,7 +225,7 @@ static struct llava_context * llava_init(gpt_params * params) { llama_model * model = llama_load_model_from_file(params->model.c_str(), model_params); if (model == NULL) { - fprintf(stderr , "%s: error: unable to load model\n" , __func__); + LOG_TEE("%s: error: unable to load model\n" , __func__); return NULL; } @@ -234,7 +235,7 @@ static struct llava_context * llava_init(gpt_params * params) { llama_context * ctx_llama = llama_new_context_with_model(model, ctx_params); if (ctx_llama == NULL) { - fprintf(stderr , "%s: error: failed to create the llama_context\n" , __func__); + LOG_TEE("%s: error: failed to create the llama_context\n" , __func__); return NULL; } @@ -257,6 +258,12 @@ static void llava_free(struct llava_context * ctx_llava) { llama_backend_free(); } +static void llama_log_callback_logTee(ggml_log_level level, const char * text, void * user_data) { + (void) level; + (void) user_data; + LOG_TEE("%s", text); +} + int main(int argc, char ** argv) { ggml_time_init(); @@ -266,6 +273,14 @@ int main(int argc, char ** argv) { show_additional_info(argc, argv); return 1; } + +#ifndef LOG_DISABLE_LOGS + log_set_target(log_filename_generator("llava", "log")); + LOG_TEE("Log start\n"); + log_dump_cmdline(argc, argv); + llama_log_set(llama_log_callback_logTee, nullptr); +#endif // LOG_DISABLE_LOGS + if (params.mmproj.empty() || (params.image.empty() && !prompt_contains_image(params.prompt))) { gpt_print_usage(argc, argv, params); show_additional_info(argc, argv); @@ -274,7 +289,7 @@ int main(int argc, char ** argv) { auto ctx_llava = llava_init(¶ms); if (ctx_llava == NULL) { - fprintf(stderr, "%s: error: failed to init llava\n", __func__); + LOG_TEE("%s: error: failed to init llava\n", __func__); return 1; } diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 29764757aab5d..9a990bb182f35 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -54,7 +54,7 @@ static std::pair select_best_resolution(const std::pair& ori int downscaled_height = static_cast(original_height * scale); int effective_resolution = std::min(downscaled_width * downscaled_height, original_width * original_height); int wasted_resolution = (width * height) - effective_resolution; - // fprintf(stderr, "resolution: %d %d, scale: %f, downscaled: %d %d, effective: %d, wasted: %d\n", width, height, scale, downscaled_width, downscaled_height, effective_resolution, wasted_resolution); + // LOG_TEE("resolution: %d %d, scale: %f, downscaled: %d %d, effective: %d, wasted: %d\n", width, height, scale, downscaled_width, downscaled_height, effective_resolution, wasted_resolution); if (effective_resolution > max_effective_resolution || (effective_resolution == max_effective_resolution && wasted_resolution < min_wasted_resolution)) { max_effective_resolution = effective_resolution; min_wasted_resolution = wasted_resolution; @@ -154,13 +154,13 @@ static bool clip_llava_handle_patches(clip_ctx * ctx_clip, std::vector model.newline = ggml_new_tensor_1d(model.ctx, GGML_TYPE_F32, newline_tmp->ne[0]); if (newline_tmp->backend != GGML_BACKEND_TYPE_CPU) { if (newline_tmp->buffer == NULL) { - printf("newline_tmp tensor buffer is NULL\n"); + LOG_TEE("newline_tmp tensor buffer is NULL\n"); } ggml_backend_tensor_get(newline_tmp, model.newline->data, 0, ggml_nbytes(newline_tmp)); } else { model.newline->data = newline_tmp->data; if (model.newline->data == NULL) { - printf("newline_tmp tensor data is NULL\n"); + LOG_TEE("newline_tmp tensor data is NULL\n"); } } @@ -224,7 +224,7 @@ static bool encode_image_with_clip(clip_ctx * ctx_clip, int n_threads, const cli img_res_v.size = 0; img_res_v.data = nullptr; if (!clip_image_preprocess(ctx_clip, img, &img_res_v)) { - fprintf(stderr, "%s: unable to preprocess image\n", __func__); + LOG_TEE("%s: unable to preprocess image\n", __func__); delete[] img_res_v.data; return false; } @@ -239,7 +239,7 @@ static bool encode_image_with_clip(clip_ctx * ctx_clip, int n_threads, const cli bool encoded = clip_image_encode(ctx_clip, n_threads, &img_res_v.data[0], image_embd); // image_embd shape is 576 x 4096 delete[] img_res_v.data; if (!encoded) { - fprintf(stderr, "Unable to encode image\n"); + LOG_TEE("Unable to encode image\n"); return false; } @@ -252,12 +252,12 @@ static bool encode_image_with_clip(clip_ctx * ctx_clip, int n_threads, const cli image_embd_v[i] = (float *)malloc(clip_embd_nbytes(ctx_clip)); // 576 patches * 4096 embeddings * 4 bytes = 9437184 const bool encoded = clip_image_encode(ctx_clip, n_threads, &img_res_v.data[i], image_embd_v[i]); // image data is in 3x336x336 format and will be converted to 336x336x3 inside if (!encoded) { - fprintf(stderr, "Unable to encode image - spatial_unpad - subimage %d of %d\n", (int) i+1, (int) img_res_v.size); + LOG_TEE("Unable to encode image - spatial_unpad - subimage %d of %d\n", (int) i+1, (int) img_res_v.size); return false; } } const int64_t t_img_enc_batch_us = ggml_time_us(); - printf("%s: %d segments encoded in %8.2f ms\n", __func__, (int)img_res_v.size, (t_img_enc_batch_us - t_img_enc_start_us) / 1000.0); + LOG_TEE("%s: %d segments encoded in %8.2f ms\n", __func__, (int)img_res_v.size, (t_img_enc_batch_us - t_img_enc_start_us) / 1000.0); const int32_t * image_grid = clip_image_grid(ctx_clip); @@ -290,12 +290,12 @@ static bool encode_image_with_clip(clip_ctx * ctx_clip, int n_threads, const cli // clip_image_save_to_bmp(*tmp, "image_feature.bmp"); } - printf("%s: image embedding created: %d tokens\n", __func__, *n_img_pos); + LOG_TEE("%s: image embedding created: %d tokens\n", __func__, *n_img_pos); const int64_t t_img_enc_end_us = ggml_time_us(); float t_img_enc_ms = (t_img_enc_end_us - t_img_enc_start_us) / 1000.0; - printf("\n%s: image encoded in %8.2f ms by CLIP (%8.2f ms per image patch)\n", __func__, t_img_enc_ms, t_img_enc_ms / *n_img_pos); + LOG_TEE("\n%s: image encoded in %8.2f ms by CLIP (%8.2f ms per image patch)\n", __func__, t_img_enc_ms, t_img_enc_ms / *n_img_pos); return true; } @@ -305,7 +305,7 @@ bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * int n_llama_embd = llama_n_embd(llama_get_model(ctx_llama)); auto n_image_embd = clip_n_mmproj_embd(ctx_clip); if (n_image_embd != n_llama_embd) { - printf("%s: embedding dim of the multimodal projector (%d) is not equal to that of LLaMA (%d). Make sure that you use the correct mmproj file.\n", __func__, n_image_embd, n_llama_embd); + LOG_TEE("%s: embedding dim of the multimodal projector (%d) is not equal to that of LLaMA (%d). Make sure that you use the correct mmproj file.\n", __func__, n_image_embd, n_llama_embd); return false; } return true; @@ -314,13 +314,13 @@ bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out) { float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)*6); // TODO: base on gridsize/llava model if (!image_embd) { - fprintf(stderr, "Unable to allocate memory for image embeddings\n"); + LOG_TEE("Unable to allocate memory for image embeddings\n"); return false; } int n_img_pos; if (!encode_image_with_clip(ctx_clip, n_threads, img, image_embd, &n_img_pos)) { - fprintf(stderr, "%s: cannot encode image, aborting\n", __func__); + LOG_TEE("%s: cannot encode image, aborting\n", __func__); free(image_embd); return false; } @@ -340,7 +340,7 @@ bool llava_eval_image_embed(llama_context * ctx_llama, const struct llava_image_ } llama_batch batch = {int32_t(n_eval), nullptr, (image_embed->embed+i*n_embd), nullptr, nullptr, nullptr, nullptr, *n_past, 1, 0, }; if (llama_decode(ctx_llama, batch)) { - fprintf(stderr, "%s : failed to eval\n", __func__); + LOG_TEE("%s : failed to eval\n", __func__); return false; } *n_past += n_eval; @@ -352,7 +352,7 @@ struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * c clip_image_u8 * img = clip_image_u8_init(); if (!clip_image_load_from_bytes(image_bytes, image_bytes_length, img)) { clip_image_u8_free(img); - fprintf(stderr, "%s: can't load image from bytes, is it a valid image?", __func__); + LOG_TEE("%s: can't load image from bytes, is it a valid image?", __func__); return NULL; } @@ -361,7 +361,7 @@ struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * c bool image_embed_result = llava_image_embed_make_with_clip_img(ctx_clip, n_threads, img, &image_embed, &n_image_pos); if (!image_embed_result) { clip_image_u8_free(img); - fprintf(stderr, "%s: coulnd't embed the image\n", __func__); + LOG_TEE("%s: coulnd't embed the image\n", __func__); return NULL; } @@ -375,7 +375,7 @@ struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * c static bool load_file_to_bytes(const char* path, unsigned char** bytesOut, long *sizeOut) { auto file = fopen(path, "rb"); if (file == NULL) { - fprintf(stderr, "%s: can't read file %s\n", __func__, path); + LOG_TEE("%s: can't read file %s\n", __func__, path); return false; } @@ -385,7 +385,7 @@ static bool load_file_to_bytes(const char* path, unsigned char** bytesOut, long auto buffer = (unsigned char *)malloc(fileSize); // Allocate memory to hold the file data if (buffer == NULL) { - fprintf(stderr, "%s: failed to alloc %ld bytes for file %s\n", __func__, fileSize, path); + LOG_TEE("%s: failed to alloc %ld bytes for file %s\n", __func__, fileSize, path); perror("Memory allocation error"); fclose(file); return false; @@ -410,7 +410,7 @@ struct llava_image_embed * llava_image_embed_make_with_filename(struct clip_ctx long image_bytes_length; auto loaded = load_file_to_bytes(image_path, &image_bytes, &image_bytes_length); if (!loaded) { - fprintf(stderr, "%s: failed to load %s\n", __func__, image_path); + LOG_TEE("%s: failed to load %s\n", __func__, image_path); return NULL; } From 2cca09d509c0e114acc16cb347c3cdd86e5f1b40 Mon Sep 17 00:00:00 2001 From: Mohammadreza Hendiani Date: Sun, 21 Apr 2024 16:02:05 +0330 Subject: [PATCH 006/101] readme : add Fedora instructions (#6783) * added fedora to list of distros that may need the package (the packages have the same name on Fedora) * how to add clblast that is avalible in the fedora repos --- README.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 08661831526fa..144052263c880 100644 --- a/README.md +++ b/README.md @@ -549,7 +549,7 @@ Building the program with BLAS support may lead to some performance improvements OpenCL acceleration is provided by the matrix multiplication kernels from the [CLBlast](https://github.com/CNugteren/CLBlast) project and custom kernels for ggml that can generate tokens on the GPU. You will need the [OpenCL SDK](https://github.com/KhronosGroup/OpenCL-SDK). - - For Ubuntu or Debian, the packages `opencl-headers`, `ocl-icd` may be needed. + - For Ubuntu, Debian, and Fedora the packages `opencl-headers`, `ocl-icd` may be needed. - For Windows, a pre-built SDK is available on the [OpenCL Releases](https://github.com/KhronosGroup/OpenCL-SDK/releases) page. @@ -574,6 +574,12 @@ Building the program with BLAS support may lead to some performance improvements Pre-built CLBlast binaries may be found on the [CLBlast Releases](https://github.com/CNugteren/CLBlast/releases) page. For Unix variants, it may also be found in your operating system's packages. + Linux packaging: + Fedora Linux: + ```bash + sudo dnf install clblast + ``` + Alternatively, they may be built from source. -
From 7f54f5580b655efe38ebcec085f41ccfa2e05f87 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Sun, 21 Apr 2024 20:32:21 +0800 Subject: [PATCH 007/101] fixed broken tools again --- Makefile | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 134083e6db596..a41228637198c 100644 --- a/Makefile +++ b/Makefile @@ -62,7 +62,8 @@ endif CUBLASLD_FLAGS = CUBLAS_OBJS = -OBJS_FULL += ggml-alloc.o ggml-backend.o ggml-quants.o unicode.o unicode-data.o sgemm.o llava.o llavaclip.o common.o grammar-parser.o +OBJS_PARTIAL += ggml-alloc.o ggml-backend.o ggml-quants.o unicode.o unicode-data.o sgemm.o common.o grammar-parser.o +OBJS_FULL += $(OBJS_PARTIAL) llava.o llavaclip.o OBJS_SIMPLE += ggml-alloc.o ggml-backend.o ggml-quants_noavx2.o unicode.o unicode-data.o sgemm_noavx2.o llava.o llavaclip.o common.o grammar-parser.o OBJS_FAILSAFE += ggml-alloc.o ggml-backend.o ggml-quants_failsafe.o unicode.o unicode-data.o sgemm_failsafe.o llava.o llavaclip.o common.o grammar-parser.o @@ -527,7 +528,7 @@ clean: main: examples/main/main.cpp common/sampling.cpp build-info.h ggml.o llama.o console.o $(OBJS_FULL) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) @echo '==== Run ./main -h for help. ====' -sdmain: otherarch/sdcpp/util.cpp otherarch/sdcpp/main.cpp otherarch/sdcpp/stable-diffusion.cpp otherarch/sdcpp/upscaler.cpp otherarch/sdcpp/model.cpp otherarch/sdcpp/thirdparty/zip.c build-info.h ggml.o llama.o console.o $(OBJS_FULL) $(OBJS) +sdmain: otherarch/sdcpp/util.cpp otherarch/sdcpp/main.cpp otherarch/sdcpp/stable-diffusion.cpp otherarch/sdcpp/upscaler.cpp otherarch/sdcpp/model.cpp otherarch/sdcpp/thirdparty/zip.c build-info.h ggml.o llama.o console.o $(OBJS_PARTIAL) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) imatrix: examples/imatrix/imatrix.cpp common/sampling.cpp build-info.h ggml.o llama.o console.o $(OBJS_FULL) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) @@ -626,7 +627,7 @@ quantize_neox: otherarch/tools/neox_quantize.cpp otherarch/tools/common-ggml.cpp $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) quantize_mpt: otherarch/tools/mpt_quantize.cpp otherarch/tools/common-ggml.cpp ggml_v3.o ggml.o llama.o $(OBJS_FULL) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -quantize_clip: examples/llava/clip.cpp examples/llava/clip.h examples/llava/quantclip.cpp ggml_v3.o ggml.o llama.o $(OBJS_FULL) +quantize_clip: examples/llava/clip.cpp examples/llava/clip.h examples/llava/quantclip.cpp ggml_v3.o ggml.o llama.o $(OBJS_PARTIAL) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) #window simple clinfo From e8d35f47cb8cb4002fca02e18aaa1cb9fa21d6f1 Mon Sep 17 00:00:00 2001 From: Jan Boon Date: Sun, 21 Apr 2024 20:35:40 +0800 Subject: [PATCH 008/101] doc : add link to falcon (#6789) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 144052263c880..a5ba710d7b106 100644 --- a/README.md +++ b/README.md @@ -95,7 +95,7 @@ Typically finetunes of the base models below are supported as well. - [X] [Mistral 7B](https://huggingface.co/mistralai/Mistral-7B-v0.1) - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [x] [DBRX](https://huggingface.co/databricks/dbrx-instruct) -- [X] Falcon +- [X] [Falcon](https://huggingface.co/models?search=tiiuae/falcon) - [X] [Chinese LLaMA / Alpaca](https://github.com/ymcui/Chinese-LLaMA-Alpaca) and [Chinese LLaMA-2 / Alpaca-2](https://github.com/ymcui/Chinese-LLaMA-Alpaca-2) - [X] [Vigogne (French)](https://github.com/bofenghuang/vigogne) - [X] [Koala](https://bair.berkeley.edu/blog/2023/04/03/koala/) From c1386c936e9fbc38eb2816c711ab28f13355708e Mon Sep 17 00:00:00 2001 From: pmysl Date: Sun, 21 Apr 2024 14:49:30 +0200 Subject: [PATCH 009/101] gguf-py : add IQ1_M to GGML_QUANT_SIZES (#6761) --- gguf-py/gguf/constants.py | 1 + 1 file changed, 1 insertion(+) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index ba24065a891c7..06cb26a7d495b 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -872,6 +872,7 @@ def get_type(val: Any) -> GGUFValueType: GGMLQuantizationType.I32: (1, 4), GGMLQuantizationType.I64: (1, 8), GGMLQuantizationType.F64: (1, 8), + GGMLQuantizationType.IQ1_M: (256, QK_K // 8 + QK_K // 16 + QK_K // 32), } From 7dbdba5690ca61b3ee8c92cfac8e7e251042e787 Mon Sep 17 00:00:00 2001 From: Wouter <9594229+DifferentialityDevelopment@users.noreply.github.com> Date: Sun, 21 Apr 2024 15:03:39 +0200 Subject: [PATCH 010/101] llama : add llama-3 chat template (#6751) * Added llama-3 chat template * Update llama.cpp Co-authored-by: Samuel Tallet <36248671+SamuelTallet@users.noreply.github.com> * Update llama.cpp Co-authored-by: Samuel Tallet <36248671+SamuelTallet@users.noreply.github.com> * Update tests/test-chat-template.cpp Co-authored-by: Samuel Tallet <36248671+SamuelTallet@users.noreply.github.com> * Added EOS stop sequence according to https://github.com/ggerganov/llama.cpp/pull/6751#issuecomment-2065602862 * Removed adding of BOS token before first message * Removed bos token from expected output from llama-3 * Update tests/test-chat-template.cpp Co-authored-by: Rene Leonhardt <65483435+reneleonhardt@users.noreply.github.com> * Update tests/test-chat-template.cpp Co-authored-by: Rene Leonhardt <65483435+reneleonhardt@users.noreply.github.com> * Added <|end_of_text|> as another stop token * Reverted last change of adding the end_of_text stop word for llama 3 --------- Co-authored-by: Wouter Tichelaar Co-authored-by: Samuel Tallet <36248671+SamuelTallet@users.noreply.github.com> Co-authored-by: Rene Leonhardt <65483435+reneleonhardt@users.noreply.github.com> Co-authored-by: Georgi Gerganov --- llama.cpp | 9 +++++++++ tests/test-chat-template.cpp | 6 +++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 8ca9650de09ad..ec4c1242b20bc 100644 --- a/llama.cpp +++ b/llama.cpp @@ -17247,6 +17247,15 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>"; } + } else if (tmpl == "llama3" || (tmpl.find("<|start_header_id|>") != std::string::npos && tmpl.find("<|end_header_id|>") != std::string::npos)) { + // Llama 3 + for (auto message : chat) { + std::string role(message->role); + ss << "<|start_header_id|>" << role << "<|end_header_id|>\n\n" << trim(message->content) << "<|eot_id|>"; + } + if (add_ass) { + ss << "<|start_header_id|>assistant<|end_header_id|>\n\n"; + } } else { // template not supported return -1; diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp index 522cc7d0d9e84..cddf86a4105ea 100644 --- a/tests/test-chat-template.cpp +++ b/tests/test-chat-template.cpp @@ -46,7 +46,9 @@ int main(void) { // No template included in tokenizer_config.json, so this template likely needs to be manually set. "{%- for message in messages %}{%- if message['role'] == 'system' -%}{{-'SYSTEM: ' + message['content'] + '\n' -}}{%- else -%}{%- if message['role'] == 'user' -%}{{-'USER: ' + message['content'] + '\n'-}}{%- else -%}{{-'ASSISTANT: ' + message['content'] + '\n' -}}{%- endif -%}{%- endif -%}{%- endfor -%}{%- if add_generation_prompt -%}{{-'ASSISTANT:'-}}{%- endif -%}", // CohereForAI/c4ai-command-r-plus - "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif false == true %}{% set loop_messages = messages %}{% set system_message = 'You are Command-R, a brilliant, sophisticated, AI-assistant trained to assist human users by providing thorough responses. You are trained by Cohere.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% if system_message != false %}{{ '<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>' + system_message + '<|END_OF_TURN_TOKEN|>' }}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|START_OF_TURN_TOKEN|><|USER_TOKEN|>' + content.strip() + '<|END_OF_TURN_TOKEN|>' }}{% elif message['role'] == 'assistant' %}{{ '<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>' + content.strip() + '<|END_OF_TURN_TOKEN|>' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>' }}{% endif %}" + "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif false == true %}{% set loop_messages = messages %}{% set system_message = 'You are Command-R, a brilliant, sophisticated, AI-assistant trained to assist human users by providing thorough responses. You are trained by Cohere.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% if system_message != false %}{{ '<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>' + system_message + '<|END_OF_TURN_TOKEN|>' }}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|START_OF_TURN_TOKEN|><|USER_TOKEN|>' + content.strip() + '<|END_OF_TURN_TOKEN|>' }}{% elif message['role'] == 'assistant' %}{{ '<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>' + content.strip() + '<|END_OF_TURN_TOKEN|>' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>' }}{% endif %}", + // Llama-3 + "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}", }; std::vector expected_output = { // teknium/OpenHermes-2.5-Mistral-7B @@ -73,6 +75,8 @@ int main(void) { "SYSTEM: You are a helpful assistant\nUSER: Hello\nASSISTANT: Hi there\nUSER: Who are you\nASSISTANT: I am an assistant \nUSER: Another question\nASSISTANT:", // CohereForAI/c4ai-command-r-plus "<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>You are a helpful assistant<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>Hello<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>Hi there<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>Who are you<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>I am an assistant<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>Another question<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>", + // Llama 3 + "<|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nHello<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\nHi there<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWho are you<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\nI am an assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nAnother question<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", }; std::vector formatted_chat(1024); int32_t res; From b9cc76d87e3d7ae5900f19d4fe8f8976d0a35888 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 21 Apr 2024 16:47:57 +0300 Subject: [PATCH 011/101] ggml : fix ggml_backend_cpu_supports_op() for CPY (#0) --- ggml-backend.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ggml-backend.c b/ggml-backend.c index 402d86ef3ac8b..189b5c140a8a7 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -822,7 +822,11 @@ GGML_CALL static enum ggml_status ggml_backend_cpu_graph_compute(ggml_backend_t GGML_CALL static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_CPY: - return op->type != GGML_TYPE_IQ2_XXS && op->type != GGML_TYPE_IQ2_XS && op->type != GGML_TYPE_IQ1_S; // missing type_traits.from_float + return + op->type != GGML_TYPE_IQ2_XXS && + op->type != GGML_TYPE_IQ2_XS && + op->type != GGML_TYPE_IQ1_S && + op->type != GGML_TYPE_IQ1_M; // missing type_traits.from_float case GGML_OP_MUL_MAT: return op->src[1]->type == GGML_TYPE_F32 || op->src[1]->type == ggml_internal_get_type_traits(op->src[0]->type).vec_dot_type; default: From 40f74e4d739e9250431cf339ae7588b28d8d0663 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 21 Apr 2024 18:36:45 +0300 Subject: [PATCH 012/101] llama : add option to render special/control tokens (#6807) * make : fix common dep on llama.h * llama : add option to render special tokens * readme : add API change notice ggml-ci * swift : fix build --- Makefile | 2 +- README.md | 1 + common/common.cpp | 4 +-- examples/batched.swift/Sources/main.swift | 5 ++-- .../llama.cpp.swift/LibLlama.swift | 4 +-- llama.cpp | 25 ++++++++++--------- llama.h | 4 ++- 7 files changed, 25 insertions(+), 20 deletions(-) diff --git a/Makefile b/Makefile index 760015f294729..72fdc6ba46bc7 100644 --- a/Makefile +++ b/Makefile @@ -699,7 +699,7 @@ OBJS += ggml-alloc.o ggml-backend.o ggml-quants.o unicode.o unicode-data.o llama.o: llama.cpp unicode.h ggml.h ggml-alloc.h ggml-backend.h ggml-cuda.h ggml-metal.h llama.h $(CXX) $(CXXFLAGS) -c $< -o $@ -COMMON_H_DEPS = common/common.h common/sampling.h common/log.h +COMMON_H_DEPS = common/common.h common/sampling.h common/log.h llama.h COMMON_DEPS = common.o sampling.o grammar-parser.o build-info.o json-schema-to-grammar.o common.o: common/common.cpp $(COMMON_H_DEPS) diff --git a/README.md b/README.md index a5ba710d7b106..1d4e9d41742b7 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) ### Recent API changes +- [2024 Apr 21] `llama_token_to_piece` can now optionally render special tokens https://github.com/ggerganov/llama.cpp/pull/6807 - [2024 Apr 4] State and session file functions reorganized under `llama_state_*` https://github.com/ggerganov/llama.cpp/pull/6341 - [2024 Mar 26] Logits and embeddings API updated for compactness https://github.com/ggerganov/llama.cpp/pull/6122 - [2024 Mar 13] Add `llama_synchronize()` + `llama_context_params.n_ubatch` https://github.com/ggerganov/llama.cpp/pull/6017 diff --git a/common/common.cpp b/common/common.cpp index b6143e41c02cb..06f252ea6914b 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -2328,10 +2328,10 @@ std::vector llama_tokenize( std::string llama_token_to_piece(const struct llama_context * ctx, llama_token token) { std::vector result(8, 0); - const int n_tokens = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size()); + const int n_tokens = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size(), true); if (n_tokens < 0) { result.resize(-n_tokens); - int check = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size()); + int check = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size(), true); GGML_ASSERT(check == -n_tokens); } else { result.resize(n_tokens); diff --git a/examples/batched.swift/Sources/main.swift b/examples/batched.swift/Sources/main.swift index 5764acb6d5825..dbbd06da58183 100644 --- a/examples/batched.swift/Sources/main.swift +++ b/examples/batched.swift/Sources/main.swift @@ -229,7 +229,7 @@ private func tokenize(text: String, add_bos: Bool) -> [llama_token] { private func token_to_piece(token: llama_token, buffer: inout [CChar]) -> String? { var result = [CChar](repeating: 0, count: 8) - let nTokens = llama_token_to_piece(model, token, &result, Int32(result.count)) + let nTokens = llama_token_to_piece(model, token, &result, Int32(result.count), false) if nTokens < 0 { let actualTokensCount = -Int(nTokens) result = .init(repeating: 0, count: actualTokensCount) @@ -237,7 +237,8 @@ private func token_to_piece(token: llama_token, buffer: inout [CChar]) -> String model, token, &result, - Int32(result.count) + Int32(result.count), + false ) assert(check == actualTokensCount) } else { diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 70c43a3852732..737f882fb2d2e 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -322,7 +322,7 @@ actor LlamaContext { defer { result.deallocate() } - let nTokens = llama_token_to_piece(model, token, result, 8) + let nTokens = llama_token_to_piece(model, token, result, 8, false) if nTokens < 0 { let newResult = UnsafeMutablePointer.allocate(capacity: Int(-nTokens)) @@ -330,7 +330,7 @@ actor LlamaContext { defer { newResult.deallocate() } - let nNewTokens = llama_token_to_piece(model, token, newResult, -nTokens) + let nNewTokens = llama_token_to_piece(model, token, newResult, -nTokens, false) let bufferPointer = UnsafeBufferPointer(start: newResult, count: Int(nNewTokens)) return Array(bufferPointer) } else { diff --git a/llama.cpp b/llama.cpp index ec4c1242b20bc..7440c740fefbc 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1600,12 +1600,12 @@ struct llama_mlock { }; using llama_mlocks = std::vector>; -static std::string llama_token_to_piece(const struct llama_context * ctx, llama_token token) { +static std::string llama_token_to_piece(const struct llama_context * ctx, llama_token token, bool special) { std::vector result(8, 0); - const int n_tokens = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size()); + const int n_tokens = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size(), special); if (n_tokens < 0) { result.resize(-n_tokens); - int check = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size()); + int check = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size(), special); GGML_ASSERT(check == -n_tokens); } else { @@ -13312,7 +13312,8 @@ void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * c for (size_t i = 0; i < candidates->size; ++i) { const llama_token id = candidates->data[i].id; - const std::string piece = llama_token_to_piece(ctx, id); + const std::string piece = llama_token_to_piece(ctx, id, false); + if (llama_token_is_eog(&ctx->model, id)) { if (!allow_eog) { candidates->data[i].logit = -INFINITY; @@ -13512,7 +13513,7 @@ void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar GGML_ASSERT(false); } - const std::string piece = llama_token_to_piece(ctx, token); + const std::string piece = llama_token_to_piece(ctx, token, false); // Note terminating 0 in decoded string const auto decoded = decode_utf8(piece, grammar->partial_utf8); @@ -16991,7 +16992,7 @@ static std::string llama_decode_text(const std::string & text) { } // does not write null-terminator to buf -int32_t llama_token_to_piece(const struct llama_model * model, llama_token token, char * buf, int32_t length) { +int32_t llama_token_to_piece(const struct llama_model * model, llama_token token, char * buf, int32_t length, bool special) { if (0 <= token && token < llama_n_vocab(model)) { switch (llama_vocab_get_type(model->vocab)) { case LLAMA_VOCAB_TYPE_WPM: @@ -17006,7 +17007,9 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token } memcpy(buf, result.c_str(), result.length()); return result.length(); - } else if (llama_is_user_defined_token(model->vocab, token)) { + } else if ( + (llama_is_user_defined_token(model->vocab, token)) || + (llama_is_control_token (model->vocab, token) && special)) { std::string result = model->vocab.id_to_token[token].text; if (length < (int) result.length()) { return -(int) result.length(); @@ -17019,8 +17022,6 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token } memcpy(buf, "\xe2\x96\x85", 3); return 3; - } else if (llama_is_control_token(model->vocab, token)) { - ; } else if (llama_is_byte_token(model->vocab, token)) { if (length < 1) { return -1; @@ -17041,15 +17042,15 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token } memcpy(buf, result.c_str(), result.length()); return result.length(); - } else if (llama_is_user_defined_token(model->vocab, token)) { + } else if ( + (llama_is_user_defined_token(model->vocab, token)) || + (llama_is_control_token (model->vocab, token) && special)) { std::string result = model->vocab.id_to_token[token].text; if (length < (int) result.length()) { return -(int) result.length(); } memcpy(buf, result.c_str(), result.length()); return result.length(); - } else if (llama_is_control_token(model->vocab, token)) { - ; } break; } diff --git a/llama.h b/llama.h index 5bed97ad1ef9f..4effca42cc65d 100644 --- a/llama.h +++ b/llama.h @@ -828,11 +828,13 @@ extern "C" { // Uses the vocabulary in the provided context. // Does not write null terminator to the buffer. // User code is responsible to remove the leading whitespace of the first non-BOS token when decoding multiple tokens. + // @param special If true, special tokens are rendered in the output. LLAMA_API int32_t llama_token_to_piece( const struct llama_model * model, llama_token token, char * buf, - int32_t length); + int32_t length, + bool special); /// Apply chat template. Inspired by hf apply_chat_template() on python. /// Both "model" and "custom_template" are optional, but at least one is required. "custom_template" has higher precedence than "model" From 5cf5e7d490dfdd2e70bface2d35dfd14aa44b4fb Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Sun, 21 Apr 2024 18:48:53 +0100 Subject: [PATCH 013/101] `build`: generate hex dump of server assets during build (#6661) * `build`: generate hex dumps of server assets on the fly * build: workaround lack of -n on gnu xxd * build: don't use xxd in cmake * build: don't call xxd from build.zig * build: more idiomatic hexing * build: don't use xxd in Makefile (od hackery instead) * build: avoid exceeding max cmd line limit in makefile hex dump * build: hex dump assets at cmake build time (not config time) --- .gitignore | 4 + Makefile | 11 +- build.zig | 29 + examples/server/CMakeLists.txt | 21 +- examples/server/completion.js.hpp | 496 --- examples/server/deps.sh | 10 - examples/server/index.html.hpp | 2883 ----------------- examples/server/index.js.hpp | 1911 ----------- .../server/json-schema-to-grammar.mjs.hpp | 1826 ----------- scripts/xxd.cmake | 16 + 10 files changed, 78 insertions(+), 7129 deletions(-) delete mode 100644 examples/server/completion.js.hpp delete mode 100644 examples/server/index.html.hpp delete mode 100644 examples/server/index.js.hpp delete mode 100644 examples/server/json-schema-to-grammar.mjs.hpp create mode 100644 scripts/xxd.cmake diff --git a/.gitignore b/.gitignore index fdc5184a1e89b..5c14900844435 100644 --- a/.gitignore +++ b/.gitignore @@ -34,6 +34,7 @@ lcov-report/ gcovr-report/ build* +!build.zig cmake-build-* out/ tmp/ @@ -100,6 +101,9 @@ qnt-*.txt perf-*.txt examples/jeopardy/results.txt +examples/server/*.html.hpp +examples/server/*.js.hpp +examples/server/*.mjs.hpp poetry.lock poetry.toml diff --git a/Makefile b/Makefile index 72fdc6ba46bc7..b0b2ea997ee12 100644 --- a/Makefile +++ b/Makefile @@ -800,10 +800,19 @@ save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(C $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/utils.hpp examples/server/httplib.h common/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +server: examples/server/server.cpp examples/server/utils.hpp examples/server/httplib.h common/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/server/json-schema-to-grammar.mjs.hpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h %.hpp $<,$^) -Iexamples/server $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) $(LWINSOCK2) +# Portable equivalent of `cd examples/server/public && xxd -i $(notdir $<) ../$(notdir $<).hpp`: +examples/server/%.hpp: examples/server/public/% Makefile + @( export NAME=$(subst .,_,$(subst -,_,$(notdir $<))) && \ + echo "unsigned char $${NAME}[] = {" && \ + cat $< | od -v -t x1 -An | sed -E 's/([0-9a-fA-F]+)/0x\1, /g' && \ + echo "};" && \ + echo "unsigned int $${NAME}_len = $(shell cat $< | wc -c );" \ + ) > $@ + gguf: examples/gguf/gguf.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) diff --git a/build.zig b/build.zig index c35e801f88129..96783574fe740 100644 --- a/build.zig +++ b/build.zig @@ -140,4 +140,33 @@ pub fn build(b: *std.build.Builder) !void { if (server.target.isWindows()) { server.linkSystemLibrary("ws2_32"); } + + const server_assets = [_][]const u8{ "index.html", "index.js", "completion.js", "json-schema-to-grammar.mjs" }; + for (server_assets) |asset| { + const input_path = b.fmt("examples/server/public/{s}", .{asset}); + const output_path = b.fmt("examples/server/{s}.hpp", .{asset}); + + // Portable equivalent of `b.addSystemCommand(&.{ "xxd", "-n", asset, "-i", input_path, output_path }) })`: + + const input = try std.fs.cwd().readFileAlloc(b.allocator, input_path, std.math.maxInt(usize)); + defer b.allocator.free(input); + + var buf = std.ArrayList(u8).init(b.allocator); + defer buf.deinit(); + + for (input) |byte| { + try std.fmt.format(buf.writer(), "0x{X:0>2}, ", .{byte}); + } + + var name = try std.mem.replaceOwned(u8, b.allocator, asset, "-", "_"); + defer b.allocator.free(name); + std.mem.replaceScalar(u8, name, '.', '_'); + + try std.fs.cwd().writeFile(output_path, b.fmt( + "unsigned char {s}[] = {{{s}}};\nunsigned int {s}_len = {d};\n", + .{ name, buf.items, name, input.len }, + )); + + std.debug.print("Dumped hex of \"{s}\" ({s}) to {s}\n", .{ input_path, name, output_path }); + } } diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index 61f58417449a8..4b89c53022fd0 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -1,12 +1,29 @@ set(TARGET server) option(LLAMA_SERVER_VERBOSE "Build verbose logging option for Server" ON) option(LLAMA_SERVER_SSL "Build SSL support for the server" OFF) -include_directories(${CMAKE_CURRENT_SOURCE_DIR}) -add_executable(${TARGET} +include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) +set(TARGET_SRCS server.cpp utils.hpp httplib.h ) +set(PUBLIC_ASSETS + index.html + index.js + completion.js + json-schema-to-grammar.mjs +) +foreach(asset ${PUBLIC_ASSETS}) + set(input "${CMAKE_CURRENT_SOURCE_DIR}/public/${asset}") + set(output "${CMAKE_CURRENT_BINARY_DIR}/${asset}.hpp") + list(APPEND TARGET_SRCS ${output}) + add_custom_command( + DEPENDS "${input}" + OUTPUT "${output}" + COMMAND "${CMAKE_COMMAND}" "-DINPUT=${input}" "-DOUTPUT=${output}" -P "${PROJECT_SOURCE_DIR}/scripts/xxd.cmake" + ) +endforeach() +add_executable(${TARGET} ${TARGET_SRCS}) install(TARGETS ${TARGET} RUNTIME) target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$ diff --git a/examples/server/completion.js.hpp b/examples/server/completion.js.hpp deleted file mode 100644 index 9a7f3986d4c07..0000000000000 --- a/examples/server/completion.js.hpp +++ /dev/null @@ -1,496 +0,0 @@ -unsigned char completion_js[] = { - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x44, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x3a, 0x20, 0x74, 0x72, - 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, - 0x69, 0x63, 0x74, 0x3a, 0x20, 0x35, 0x30, 0x30, 0x2c, 0x0a, 0x20, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x3a, - 0x20, 0x30, 0x2e, 0x32, 0x2c, 0x0a, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, - 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, 0x5d, 0x0a, 0x7d, - 0x3b, 0x0a, 0x0a, 0x6c, 0x65, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x0a, - 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, - 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, - 0x74, 0x6f, 0x72, 0x2e, 0x20, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, - 0x6e, 0x64, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x6f, 0x73, - 0x74, 0x20, 0x75, 0x73, 0x65, 0x20, 0x63, 0x61, 0x73, 0x65, 0x73, 0x2e, - 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, - 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, - 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, - 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, - 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x22, - 0x54, 0x65, 0x6c, 0x6c, 0x20, 0x6d, 0x65, 0x20, 0x61, 0x20, 0x6a, 0x6f, - 0x6b, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, - 0x69, 0x63, 0x74, 0x3a, 0x20, 0x38, 0x30, 0x30, 0x7d, 0x29, 0x0a, 0x2f, - 0x2f, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, - 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, - 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, - 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, - 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x2f, 0x2f, 0x0a, - 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, - 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2a, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, - 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, - 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x61, 0x70, 0x69, 0x5f, 0x75, 0x72, 0x6c, 0x20, 0x3d, - 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x61, 0x70, 0x69, 0x5f, - 0x75, 0x72, 0x6c, 0x20, 0x7c, 0x7c, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, - 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, - 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x44, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x73, 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x7d, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x20, 0x3d, 0x20, 0x61, 0x77, - 0x61, 0x69, 0x74, 0x20, 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x60, 0x24, - 0x7b, 0x61, 0x70, 0x69, 0x5f, 0x75, 0x72, 0x6c, 0x7d, 0x2f, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x60, 0x2c, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x3a, - 0x20, 0x27, 0x50, 0x4f, 0x53, 0x54, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x62, 0x6f, 0x64, 0x79, 0x3a, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x73, 0x3a, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x27, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x27, 0x3a, 0x20, 0x27, 0x6b, 0x65, 0x65, 0x70, 0x2d, 0x61, 0x6c, - 0x69, 0x76, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x27, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2d, 0x54, 0x79, 0x70, - 0x65, 0x27, 0x3a, 0x20, 0x27, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x27, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x27, 0x41, 0x63, 0x63, 0x65, 0x70, - 0x74, 0x27, 0x3a, 0x20, 0x27, 0x74, 0x65, 0x78, 0x74, 0x2f, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x2d, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x27, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x28, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, - 0x79, 0x20, 0x3f, 0x20, 0x7b, 0x27, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, - 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x60, 0x42, - 0x65, 0x61, 0x72, 0x65, 0x72, 0x20, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x7d, 0x60, - 0x7d, 0x20, 0x3a, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, - 0x6c, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x2e, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x0a, 0x20, 0x20, - 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x72, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x62, 0x6f, 0x64, 0x79, 0x2e, - 0x67, 0x65, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x65, 0x63, - 0x6f, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x54, - 0x65, 0x78, 0x74, 0x44, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x28, 0x29, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, - 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x42, - 0x75, 0x66, 0x66, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x61, - 0x72, 0x74, 0x69, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x72, 0x65, 0x61, 0x64, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x74, 0x72, - 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, - 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, - 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, - 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x28, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x6f, 0x6e, 0x65, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x62, 0x72, 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x41, 0x64, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x6c, 0x65, 0x66, - 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x74, - 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, 0x20, 0x6c, - 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x2b, 0x20, 0x64, 0x65, - 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, - 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x20, 0x69, 0x66, 0x20, 0x74, - 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x72, - 0x61, 0x63, 0x74, 0x65, 0x72, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x6c, - 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x6e, - 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, 0x42, 0x72, - 0x65, 0x61, 0x6b, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x65, - 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x5c, 0x6e, 0x27, - 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x53, 0x70, 0x6c, 0x69, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, - 0x65, 0x78, 0x74, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, - 0x74, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x5c, 0x6e, 0x27, - 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x49, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, 0x78, 0x74, - 0x20, 0x64, 0x6f, 0x65, 0x73, 0x6e, 0x27, 0x74, 0x20, 0x65, 0x6e, 0x64, - 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, 0x65, - 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x2c, 0x20, 0x74, 0x68, 0x65, 0x6e, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x6c, 0x69, - 0x6e, 0x65, 0x20, 0x69, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x20, 0x69, 0x74, 0x20, 0x69, - 0x6e, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x74, - 0x6f, 0x20, 0x62, 0x65, 0x20, 0x61, 0x64, 0x64, 0x65, 0x64, 0x20, 0x74, - 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6e, 0x65, 0x78, 0x74, 0x20, 0x63, - 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, 0x74, 0x61, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, - 0x65, 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, - 0x42, 0x72, 0x65, 0x61, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x70, 0x6f, - 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, - 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x52, 0x65, - 0x73, 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, - 0x20, 0x69, 0x66, 0x20, 0x77, 0x65, 0x20, 0x68, 0x61, 0x76, 0x65, 0x20, - 0x61, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, - 0x20, 0x61, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x65, 0x6e, 0x64, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x50, 0x61, 0x72, 0x73, 0x65, 0x20, - 0x61, 0x6c, 0x6c, 0x20, 0x73, 0x73, 0x65, 0x20, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x64, 0x64, 0x20, 0x74, - 0x68, 0x65, 0x6d, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, - 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x72, 0x65, 0x67, 0x65, 0x78, 0x20, 0x3d, 0x20, 0x2f, 0x5e, - 0x28, 0x5c, 0x53, 0x2b, 0x29, 0x3a, 0x5c, 0x73, 0x28, 0x2e, 0x2a, 0x29, - 0x24, 0x2f, 0x67, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x66, 0x6f, 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, - 0x69, 0x6e, 0x65, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x20, - 0x3d, 0x20, 0x72, 0x65, 0x67, 0x65, 0x78, 0x2e, 0x65, 0x78, 0x65, 0x63, - 0x28, 0x6c, 0x69, 0x6e, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x61, 0x74, 0x63, - 0x68, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5b, 0x6d, 0x61, - 0x74, 0x63, 0x68, 0x5b, 0x31, 0x5d, 0x5d, 0x20, 0x3d, 0x20, 0x6d, 0x61, - 0x74, 0x63, 0x68, 0x5b, 0x32, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6e, 0x63, - 0x65, 0x20, 0x77, 0x65, 0x20, 0x6b, 0x6e, 0x6f, 0x77, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, - 0x63, 0x70, 0x70, 0x2c, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x6a, - 0x75, 0x73, 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x20, 0x74, - 0x68, 0x65, 0x20, 0x6a, 0x73, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, - 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x4a, - 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x72, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x79, 0x69, 0x65, 0x6c, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x79, 0x69, 0x65, 0x6c, 0x64, 0x20, - 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x69, 0x66, 0x20, 0x77, 0x65, 0x20, 0x67, 0x6f, 0x74, 0x20, 0x61, 0x20, - 0x73, 0x74, 0x6f, 0x70, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x66, - 0x72, 0x6f, 0x6d, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2c, 0x20, - 0x77, 0x65, 0x20, 0x77, 0x69, 0x6c, 0x6c, 0x20, 0x62, 0x72, 0x65, 0x61, - 0x6b, 0x20, 0x68, 0x65, 0x72, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, - 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, - 0x3d, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x20, 0x3d, 0x20, - 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x72, 0x65, - 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, - 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, - 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x4a, - 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, - 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x6d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x2e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x73, - 0x28, 0x27, 0x73, 0x6c, 0x6f, 0x74, 0x20, 0x75, 0x6e, 0x61, 0x76, 0x61, - 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x27, 0x29, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x68, 0x72, 0x6f, 0x77, - 0x20, 0x61, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x74, 0x6f, - 0x20, 0x62, 0x65, 0x20, 0x63, 0x61, 0x75, 0x67, 0x68, 0x74, 0x20, 0x62, - 0x79, 0x20, 0x75, 0x70, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x20, 0x63, - 0x61, 0x6c, 0x6c, 0x65, 0x72, 0x73, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, - 0x6f, 0x72, 0x28, 0x27, 0x73, 0x6c, 0x6f, 0x74, 0x20, 0x75, 0x6e, 0x61, - 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x27, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, - 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x5b, 0x24, - 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x7d, 0x20, 0x2d, 0x20, 0x24, 0x7b, - 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7d, 0x5d, 0x3a, 0x20, 0x24, 0x7b, 0x72, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, - 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, - 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x20, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x2e, - 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x41, 0x62, - 0x6f, 0x72, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x27, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x6c, 0x6c, - 0x61, 0x6d, 0x61, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x22, - 0x2c, 0x20, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x65, 0x3b, - 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x61, 0x6c, - 0x6c, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x62, 0x6f, 0x72, - 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x61, - 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x74, 0x68, 0x61, - 0x74, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x63, 0x61, 0x6e, 0x20, 0x73, 0x75, - 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x20, 0x74, 0x6f, 0x0a, 0x2f, - 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, - 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x6a, 0x73, 0x27, - 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x6e, 0x20, 0x3d, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, - 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x6e, 0x2e, 0x61, - 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, - 0x6e, 0x65, 0x72, 0x28, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x22, 0x2c, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, - 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x65, 0x74, - 0x61, 0x69, 0x6c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, - 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, - 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x6e, - 0x65, 0x77, 0x20, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x28, 0x61, 0x73, 0x79, - 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, - 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, - 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, - 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, - 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, - 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, - 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, - 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, - 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, - 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x22, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, - 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, - 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, - 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x28, 0x22, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x22, - 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, - 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, - 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, - 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, - 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x22, 0x64, 0x6f, 0x6e, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x7d, 0x29, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, - 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x20, 0x70, 0x72, 0x6f, 0x6d, - 0x69, 0x73, 0x65, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x73, - 0x6f, 0x6c, 0x76, 0x65, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, - 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x20, 0x74, - 0x65, 0x78, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64, 0x6f, - 0x65, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x73, 0x75, 0x70, 0x70, 0x6f, - 0x72, 0x74, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, - 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, - 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, - 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x2e, 0x74, - 0x68, 0x65, 0x6e, 0x28, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6f, 0x72, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, - 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, - 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, - 0x65, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x61, - 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, - 0x65, 0x2c, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, - 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, - 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, - 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6a, 0x65, - 0x63, 0x74, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x7d, - 0x3b, 0x0a, 0x0a, 0x2f, 0x2a, 0x2a, 0x0a, 0x20, 0x2a, 0x20, 0x28, 0x64, - 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x29, 0x0a, 0x20, - 0x2a, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, - 0x63, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2c, 0x20, 0x63, - 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, - 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, - 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, - 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, - 0x20, 0x47, 0x65, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, - 0x65, 0x6c, 0x20, 0x69, 0x6e, 0x66, 0x6f, 0x20, 0x66, 0x72, 0x6f, 0x6d, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, - 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x75, 0x73, 0x65, - 0x66, 0x75, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x67, 0x65, 0x74, 0x74, - 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x78, 0x74, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x20, 0x61, - 0x6e, 0x64, 0x20, 0x73, 0x6f, 0x20, 0x6f, 0x6e, 0x2e, 0x0a, 0x65, 0x78, - 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, - 0x6f, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x61, 0x70, 0x69, - 0x5f, 0x75, 0x72, 0x6c, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x75, 0x72, 0x6c, 0x20, 0x7c, 0x7c, - 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x20, 0x3d, 0x20, 0x61, - 0x77, 0x61, 0x69, 0x74, 0x20, 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x60, - 0x24, 0x7b, 0x61, 0x70, 0x69, 0x5f, 0x75, 0x72, 0x6c, 0x7d, 0x2f, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x60, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, - 0x72, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x28, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x73, 0x3b, 0x0a, 0x7d, 0x0a -}; -unsigned int completion_js_len = 5909; diff --git a/examples/server/deps.sh b/examples/server/deps.sh index ea23e64500b09..d28378901a5cf 100755 --- a/examples/server/deps.sh +++ b/examples/server/deps.sh @@ -8,13 +8,3 @@ PUBLIC=$DIR/public echo "download js bundle files" curl https://npm.reversehttp.com/@preact/signals-core,@preact/signals,htm/preact,preact,preact/hooks > $PUBLIC/index.js echo >> $PUBLIC/index.js # add newline - -FILES=$(ls $PUBLIC) - -cd $PUBLIC -for FILE in $FILES; do - echo "generate $FILE.hpp" - - # use simple flag for old version of xxd - xxd -i $FILE > $DIR/$FILE.hpp -done diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp deleted file mode 100644 index 122d3ff999f04..0000000000000 --- a/examples/server/index.html.hpp +++ /dev/null @@ -1,2883 +0,0 @@ -unsigned char index_html[] = { - 0x3c, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a, 0x3c, 0x68, 0x65, 0x61, - 0x64, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x6d, 0x65, 0x74, 0x61, 0x20, 0x63, - 0x68, 0x61, 0x72, 0x73, 0x65, 0x74, 0x3d, 0x22, 0x55, 0x54, 0x46, 0x2d, - 0x38, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x6d, 0x65, 0x74, 0x61, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x76, 0x69, 0x65, 0x77, 0x70, 0x6f, - 0x72, 0x74, 0x22, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3d, - 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, 0x3d, 0x64, 0x65, 0x76, 0x69, 0x63, - 0x65, 0x2d, 0x77, 0x69, 0x64, 0x74, 0x68, 0x2c, 0x20, 0x69, 0x6e, 0x69, - 0x74, 0x69, 0x61, 0x6c, 0x2d, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x3d, 0x31, - 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x2d, 0x73, 0x63, - 0x61, 0x6c, 0x65, 0x3d, 0x31, 0x22, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, - 0x3c, 0x6d, 0x65, 0x74, 0x61, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, - 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, - 0x22, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3d, 0x22, 0x6c, - 0x69, 0x67, 0x68, 0x74, 0x20, 0x64, 0x61, 0x72, 0x6b, 0x22, 0x3e, 0x0a, - 0x20, 0x20, 0x3c, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3e, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x20, 0x2d, 0x20, 0x63, 0x68, 0x61, - 0x74, 0x3c, 0x2f, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3e, 0x0a, 0x0a, 0x20, - 0x20, 0x3c, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x62, 0x6f, 0x64, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x66, 0x61, 0x6d, 0x69, 0x6c, - 0x79, 0x3a, 0x20, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x2d, 0x75, 0x69, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, - 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x39, 0x30, 0x25, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x23, - 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, - 0x3a, 0x20, 0x30, 0x65, 0x6d, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, - 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x64, 0x69, 0x72, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x63, 0x6f, 0x6c, 0x75, 0x6d, - 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6a, 0x75, 0x73, - 0x74, 0x69, 0x66, 0x79, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x3a, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x2d, 0x62, 0x65, 0x74, 0x77, - 0x65, 0x65, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x33, 0x70, 0x78, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, - 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x64, 0x69, - 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x63, 0x6f, 0x6c, - 0x75, 0x6d, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6a, - 0x75, 0x73, 0x74, 0x69, 0x66, 0x79, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x3a, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x2d, 0x62, 0x65, - 0x74, 0x77, 0x65, 0x65, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x31, 0x65, 0x6d, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x67, - 0x72, 0x6f, 0x77, 0x3a, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2d, 0x79, - 0x3a, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x31, - 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, 0x23, 0x63, 0x63, - 0x63, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, - 0x64, 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, 0x73, 0x3a, 0x20, - 0x35, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, - 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, - 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x62, 0x6f, 0x64, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6d, 0x61, 0x78, 0x2d, 0x77, 0x69, 0x64, 0x74, 0x68, - 0x3a, 0x20, 0x36, 0x30, 0x30, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x2d, 0x77, 0x69, 0x64, 0x74, 0x68, - 0x3a, 0x20, 0x33, 0x30, 0x30, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x2d, 0x68, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x3a, 0x20, 0x31, 0x2e, 0x32, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, - 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x20, - 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x70, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3a, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, - 0x2d, 0x77, 0x6f, 0x72, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x77, 0x6f, 0x72, 0x64, 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3a, 0x20, - 0x62, 0x72, 0x65, 0x61, 0x6b, 0x2d, 0x77, 0x6f, 0x72, 0x64, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x79, 0x70, 0x68, 0x65, 0x6e, - 0x73, 0x3a, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x2d, 0x74, 0x6f, - 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x2d, 0x62, - 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x23, 0x77, 0x72, 0x69, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x72, 0x6d, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, - 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x31, 0x65, 0x6d, 0x20, 0x30, 0x20, 0x30, - 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, - 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, - 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x63, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x69, 0x67, 0x6e, - 0x2d, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x3a, 0x20, 0x73, 0x74, 0x72, 0x65, - 0x74, 0x63, 0x68, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x69, 0x67, 0x68, 0x74, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, - 0x61, 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x64, 0x69, 0x72, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x72, 0x6f, 0x77, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, - 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x79, 0x2d, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x65, - 0x6e, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, - 0x65, 0x72, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, - 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, - 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x73, 0x65, 0x74, 0x2e, 0x74, 0x77, 0x6f, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, - 0x3a, 0x20, 0x67, 0x72, 0x69, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x67, 0x72, 0x69, 0x64, 0x2d, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x61, 0x20, 0x61, 0x22, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x31, - 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x2e, - 0x74, 0x68, 0x72, 0x65, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x67, - 0x72, 0x69, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, - 0x72, 0x69, 0x64, 0x2d, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x3a, 0x20, 0x22, 0x61, 0x20, 0x61, 0x20, 0x61, 0x22, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x31, 0x65, - 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, 0x65, 0x72, - 0x3a, 0x20, 0x31, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, - 0x23, 0x61, 0x61, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x62, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, - 0x73, 0x3a, 0x20, 0x34, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, - 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x30, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, - 0x69, 0x6e, 0x2d, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, - 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x77, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x62, 0x6f, 0x6c, 0x64, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, - 0x6e, 0x3a, 0x20, 0x2d, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x2d, 0x30, - 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, - 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x6f, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x5b, - 0x6f, 0x70, 0x65, 0x6e, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, - 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x2d, 0x73, - 0x65, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, - 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x2e, 0x33, 0x65, - 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, - 0x64, 0x65, 0x72, 0x2d, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x3a, 0x20, - 0x31, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, 0x23, 0x63, - 0x63, 0x63, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x3a, 0x20, 0x61, 0x62, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x65, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, - 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, - 0x77, 0x68, 0x69, 0x74, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x2e, - 0x32, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, - 0x6f, 0x78, 0x2d, 0x73, 0x68, 0x61, 0x64, 0x6f, 0x77, 0x3a, 0x20, 0x30, - 0x20, 0x30, 0x20, 0x31, 0x30, 0x70, 0x78, 0x20, 0x72, 0x67, 0x62, 0x61, - 0x28, 0x30, 0x2c, 0x20, 0x30, 0x2c, 0x20, 0x30, 0x2c, 0x20, 0x30, 0x2e, - 0x31, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, - 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x35, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x67, 0x72, 0x6f, - 0x77, 0x3a, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x77, 0x69, 0x64, 0x74, 0x68, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x70, 0x72, 0x65, 0x20, 0x63, 0x6f, 0x64, 0x65, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, - 0x3a, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, - 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x32, 0x32, - 0x32, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6c, - 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x64, 0x64, 0x64, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x64, - 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, - 0x6e, 0x74, 0x2d, 0x66, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x3a, 0x20, 0x6d, - 0x6f, 0x6e, 0x6f, 0x73, 0x70, 0x61, 0x63, 0x65, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, - 0x20, 0x30, 0x2e, 0x31, 0x65, 0x6d, 0x20, 0x30, 0x2e, 0x33, 0x65, 0x6d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, - 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, 0x73, 0x3a, 0x20, 0x33, - 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, - 0x35, 0x65, 0x6d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2e, 0x73, 0x6c, 0x69, 0x6d, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, - 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, - 0x61, 0x79, 0x3a, 0x20, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x68, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, - 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2d, 0x61, 0x6c, 0x69, 0x67, 0x6e, - 0x3a, 0x20, 0x63, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6f, - 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x38, - 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x38, 0x38, 0x38, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6d, - 0x6f, 0x64, 0x65, 0x2d, 0x63, 0x68, 0x61, 0x74, 0x20, 0x74, 0x65, 0x78, - 0x74, 0x61, 0x72, 0x65, 0x61, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x34, - 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x63, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x65, - 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, - 0x31, 0x30, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x5b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x65, 0x64, 0x69, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5d, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, - 0x61, 0x79, 0x3a, 0x20, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x2d, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x77, 0x68, 0x69, 0x74, 0x65, 0x2d, 0x73, 0x70, 0x61, 0x63, 0x65, 0x3a, - 0x20, 0x70, 0x72, 0x65, 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x75, 0x74, 0x6c, 0x69, 0x6e, 0x65, - 0x3a, 0x20, 0x30, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x40, 0x6b, 0x65, 0x79, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x73, 0x20, 0x6c, - 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, - 0x70, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x30, - 0x25, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x30, 0x25, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x31, 0x30, 0x30, 0x25, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, - 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, - 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x2d, 0x31, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, - 0x30, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, - 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, - 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, - 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x73, 0x69, 0x7a, - 0x65, 0x3a, 0x20, 0x35, 0x30, 0x25, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, - 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x3a, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x2d, 0x67, 0x72, 0x61, 0x64, - 0x69, 0x65, 0x6e, 0x74, 0x28, 0x39, 0x30, 0x64, 0x65, 0x67, 0x2c, 0x20, - 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x2c, 0x20, - 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x32, 0x29, 0x2c, 0x20, - 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6e, 0x69, 0x6d, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, 0x70, 0x65, 0x20, 0x32, 0x73, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x20, 0x69, 0x6e, 0x66, 0x69, - 0x6e, 0x69, 0x74, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x40, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x20, - 0x28, 0x70, 0x72, 0x65, 0x66, 0x65, 0x72, 0x73, 0x2d, 0x63, 0x6f, 0x6c, - 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x3a, 0x20, 0x64, - 0x61, 0x72, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, - 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, - 0x31, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, 0x30, 0x30, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, - 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, - 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, - 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, - 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, - 0x6f, 0x72, 0x3a, 0x20, 0x62, 0x6c, 0x61, 0x63, 0x6b, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, - 0x0a, 0x20, 0x20, 0x3c, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x22, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x2c, 0x20, 0x68, 0x2c, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x2c, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x20, 0x63, 0x6f, - 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, - 0x6c, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, - 0x2c, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x2c, 0x20, 0x43, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2e, 0x2f, 0x69, 0x6e, - 0x64, 0x65, 0x78, 0x2e, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, - 0x27, 0x2e, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x2e, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, - 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2e, 0x2f, 0x6a, 0x73, - 0x6f, 0x6e, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2d, 0x74, 0x6f, - 0x2d, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x2e, 0x6d, 0x6a, 0x73, - 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x76, 0x61, 0x72, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, - 0x69, 0x64, 0x20, 0x3d, 0x20, 0x2d, 0x31, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x22, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, - 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, - 0x20, 0x55, 0x73, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x4c, 0x6c, - 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x61, 0x20, 0x66, 0x72, 0x69, 0x65, 0x6e, - 0x64, 0x6c, 0x79, 0x20, 0x63, 0x68, 0x61, 0x74, 0x62, 0x6f, 0x74, 0x2e, - 0x20, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x69, 0x73, 0x20, 0x68, 0x65, - 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x6b, 0x69, 0x6e, 0x64, 0x2c, - 0x20, 0x68, 0x6f, 0x6e, 0x65, 0x73, 0x74, 0x2c, 0x20, 0x67, 0x6f, 0x6f, - 0x64, 0x20, 0x61, 0x74, 0x20, 0x77, 0x72, 0x69, 0x74, 0x69, 0x6e, 0x67, - 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6e, 0x65, 0x76, 0x65, 0x72, 0x20, - 0x66, 0x61, 0x69, 0x6c, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x61, 0x6e, 0x73, - 0x77, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x72, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x73, 0x20, 0x69, 0x6d, 0x6d, 0x65, 0x64, 0x69, 0x61, - 0x74, 0x65, 0x6c, 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x77, 0x69, 0x74, - 0x68, 0x20, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, 0x6e, 0x7b, 0x7b, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x7d, 0x7d, 0x5c, 0x6e, 0x7b, - 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x7d, 0x3a, 0x20, 0x7b, 0x7b, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x7d, 0x22, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, 0x22, 0x63, 0x68, - 0x61, 0x74, 0x22, 0x2c, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x22, 0x63, 0x68, - 0x61, 0x74, 0x22, 0x20, 0x7c, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x68, 0x61, 0x72, 0x3a, 0x20, 0x22, 0x4c, 0x6c, 0x61, 0x6d, - 0x61, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x3a, 0x20, 0x22, 0x55, 0x73, 0x65, 0x72, 0x22, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x65, - 0x64, 0x69, 0x63, 0x74, 0x3a, 0x20, 0x34, 0x30, 0x30, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x3a, 0x20, 0x30, 0x2e, 0x37, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, - 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x3a, 0x20, 0x32, 0x35, 0x36, 0x2c, - 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x2c, - 0x20, 0x2d, 0x31, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, - 0x74, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, - 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x31, 0x2e, 0x31, 0x38, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x5f, 0x6e, 0x6c, 0x3a, 0x20, - 0x66, 0x61, 0x6c, 0x73, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x3a, 0x20, 0x34, 0x30, 0x2c, 0x20, - 0x2f, 0x2f, 0x20, 0x3c, 0x3d, 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, 0x75, - 0x73, 0x65, 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, 0x7a, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, - 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x39, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, - 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, - 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, - 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, - 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, - 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, - 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, - 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, - 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, - 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, - 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, - 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, - 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, - 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, - 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, - 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, - 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, - 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, - 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, - 0x72, 0x6f, 0x62, 0x73, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, - 0x6e, 0x6f, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, - 0x69, 0x65, 0x73, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, - 0x69, 0x6e, 0x5f, 0x6b, 0x65, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2c, 0x20, - 0x2f, 0x2f, 0x20, 0x6d, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, - 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x65, 0x61, 0x63, 0x68, 0x20, 0x73, - 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x72, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, - 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x3a, 0x20, 0x27, - 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, 0x52, 0x54, 0x3a, 0x20, - 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, - 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x22, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, - 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, - 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, - 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, - 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, - 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, - 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, - 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, - 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, - 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, - 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, - 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, - 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, - 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, - 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, - 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, - 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, - 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, - 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, - 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, - 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, - 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, - 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, - 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, - 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x7d, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, - 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x69, 0x66, 0x20, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, 0x20, 0x61, 0x6e, 0x79, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x72, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, - 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, - 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x20, 0x69, - 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x6e, 0x20, 0x66, - 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, 0x20, 0x22, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, - 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x61, - 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x7b, 0x20, - 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x22, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x64, 0x61, 0x74, 0x61, - 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6d, - 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, - 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x69, - 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, 0x75, 0x63, 0x63, 0x65, - 0x73, 0x73, 0x66, 0x75, 0x6c, 0x6c, 0x79, 0x20, 0x69, 0x6d, 0x70, 0x6f, - 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, - 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, - 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, - 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, - 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x69, 0x6d, 0x70, 0x6f, - 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, - 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, - 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, - 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6e, - 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, 0x6e, 0x67, 0x20, 0x4c, - 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x22, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, - 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, - 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x74, - 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, - 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5b, 0x27, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x74, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, - 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, - 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, - 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, - 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, 0x61, 0x75, 0x74, 0x6f, - 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, - 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, - 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, - 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, - 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, - 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6c, 0x61, - 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, - 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, - 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, - 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x61, 0x73, 0x74, - 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x4e, 0x6f, 0x20, 0x61, - 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, - 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, - 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x77, 0x61, 0x73, - 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, - 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, - 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, - 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, - 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x41, 0x75, - 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x77, 0x65, 0x20, - 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, 0x74, - 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x20, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x65, 0x74, - 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, - 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, - 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, 0x44, 0x61, 0x74, 0x65, - 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x27, 0x6e, - 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, - 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, - 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x73, 0x20, 0x27, 0x20, - 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, - 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, - 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, - 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, - 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x69, 0x74, - 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x70, - 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, - 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, - 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, - 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, - 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, - 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, - 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, - 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, - 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x45, 0x4e, 0x44, - 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, - 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, - 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, 0x73, 0x20, 0x74, 0x68, - 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, - 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, 0x74, 0x3f, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, - 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, - 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x74, 0x72, - 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, - 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x74, 0x72, 0x2c, - 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x78, 0x74, 0x72, 0x61, - 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x74, 0x74, - 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x2c, 0x20, 0x2e, 0x2e, - 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x73, 0x74, - 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x41, 0x6c, - 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, 0x2e, 0x2a, 0x3f, 0x29, - 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, 0x28, 0x5f, 0x2c, 0x20, - 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x61, 0x73, 0x79, - 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, 0x61, 0x72, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x3d, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, 0x6c, 0x72, 0x65, 0x61, - 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x22, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, - 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, - 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, - 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3a, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x61, 0x70, 0x69, 0x5f, 0x75, 0x72, 0x6c, - 0x3a, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, - 0x61, 0x74, 0x68, 0x6e, 0x61, 0x6d, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2f, 0x2b, 0x24, 0x2f, 0x2c, 0x20, - 0x27, 0x27, 0x29, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, - 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, - 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x24, 0x2f, - 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x3a, - 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, - 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, - 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, 0x27, 0x2c, 0x20, 0x73, - 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, - 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, 0x20, 0x21, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x22, 0x54, 0x68, - 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x20, 0x77, 0x61, 0x73, - 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, - 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, - 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, - 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, 0x6e, 0x27, 0x74, 0x20, 0x62, 0x65, - 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x2e, 0x22, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, - 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, - 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, - 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, 0x6e, 0x64, - 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, 0x6f, 0x20, - 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x3d, 0x20, - 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, - 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, - 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, - 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x22, 0x2c, - 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, - 0x61, 0x70, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, - 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, - 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, - 0x29, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, - 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, - 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, - 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, 0x29, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x60, 0x41, 0x20, 0x63, - 0x68, 0x61, 0x74, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, - 0x61, 0x20, 0x63, 0x75, 0x72, 0x69, 0x6f, 0x75, 0x73, 0x20, 0x68, 0x75, - 0x6d, 0x61, 0x6e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x6e, 0x20, 0x61, - 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x69, 0x61, 0x6c, 0x20, 0x69, 0x6e, - 0x74, 0x65, 0x6c, 0x6c, 0x69, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x61, - 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x2e, 0x20, 0x54, 0x68, - 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x20, - 0x67, 0x69, 0x76, 0x65, 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, 0x75, - 0x6c, 0x2c, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x2c, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x6f, 0x6c, 0x69, 0x74, 0x65, 0x20, - 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, - 0x68, 0x65, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x27, 0x73, 0x20, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x5c, 0x6e, 0x55, - 0x53, 0x45, 0x52, 0x3a, 0x5b, 0x69, 0x6d, 0x67, 0x2d, 0x31, 0x30, 0x5d, - 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x7d, 0x5c, 0x6e, 0x41, 0x53, 0x53, 0x49, - 0x53, 0x54, 0x41, 0x4e, 0x54, 0x3a, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, - 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, - 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, - 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, - 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, - 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, - 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x2c, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, - 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x5d, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x7b, - 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x22, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, - 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, 0x61, - 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, 0x2e, - 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, - 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, - 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, - 0x20, 0x22, 0x22, 0x29, 0x2e, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, - 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x5b, 0x5f, - 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x41, 0x72, - 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, - 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x20, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, - 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, - 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x3d, 0x20, - 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x20, 0x3d, - 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, 0x72, 0x74, - 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x5d, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x6c, 0x6f, 0x61, - 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, - 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, - 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x28, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, - 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, - 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x66, - 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, 0x20, - 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, - 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x3d, 0x20, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, - 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, - 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, - 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, - 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, 0x6d, 0x61, 0x67, 0x65, - 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, 0x62, 0x61, 0x73, 0x65, - 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x69, - 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, - 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, 0x44, 0x61, 0x74, 0x61, - 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x28, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, - 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, - 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, - 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, - 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x6e, - 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, 0x20, 0x3d, - 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x68, 0x69, 0x63, - 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, 0x20, 0x26, 0x26, 0x20, - 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, - 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x28, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, 0x20, - 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x3d, 0x24, 0x7b, 0x73, - 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x24, - 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x6c, 0x6f, 0x61, - 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, 0x6e, 0x75, 0x6c, 0x6c, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x6b, - 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, 0x65, 0x6e, - 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, - 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, 0x73, 0x6f, 0x6d, 0x65, - 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x22, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, 0x22, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, - 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, 0x20, 0x64, 0x69, 0x73, - 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, - 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x7d, 0x3e, 0x55, - 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x3c, - 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, - 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, 0x69, 0x73, - 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, 0x67, 0x65, 0x6e, - 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, 0x62, 0x75, 0x74, - 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x72, - 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x3c, - 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x43, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, - 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, - 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, - 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, - 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, - 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x61, - 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, - 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, 0x69, - 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, 0x67, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, - 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x72, 0x65, - 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x3c, 0x2f, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x60, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, - 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, - 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x63, 0x72, 0x6f, 0x6c, - 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, - 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, 0x65, 0x64, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, - 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, - 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, - 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x70, 0x20, 0x2b, 0x20, - 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, 0x66, 0x66, 0x73, 0x65, - 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x2b, 0x20, 0x33, 0x30, - 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, - 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, 0x2c, 0x20, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x27, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, - 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x5b, 0x75, - 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x2c, 0x20, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, - 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, - 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, 0x3e, 0x20, 0x30, 0x20, - 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, - 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x64, - 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, - 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, - 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, - 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, - 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2b, 0x2f, 0x2c, 0x20, - 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x73, 0x43, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, - 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, - 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x3d, - 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x74, - 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x75, 0x73, 0x65, 0x72, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, - 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, - 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, 0x3c, 0x2f, 0x73, 0x74, - 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x73, 0x43, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, - 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, - 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, - 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x60, 0x20, 0x3a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, - 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x61, 0x6e, - 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x45, 0x64, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, - 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x69, 0x6e, 0x6e, - 0x65, 0x72, 0x54, 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, - 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, - 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6d, 0x67, 0x20, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, 0x3a, 0x20, - 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, 0x21, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x20, - 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, - 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, 0x20, 0x3a, 0x20, 0x60, 0x60, 0x7d, - 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x65, 0x64, 0x69, 0x74, 0x61, 0x62, 0x6c, - 0x65, 0x3d, 0x24, 0x7b, 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x7d, 0x20, 0x72, 0x65, - 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, - 0x72, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, - 0x7b, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x7d, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x66, - 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, 0x61, 0x74, 0x4c, - 0x69, 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, - 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, - 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, - 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, - 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, - 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, - 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, - 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x49, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, - 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, - 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, - 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x42, 0x6f, 0x6f, 0x6c, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, - 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, - 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, - 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, - 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, - 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, - 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, - 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, - 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x65, 0x74, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, 0x20, - 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x3d, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, - 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x28, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x67, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, - 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, - 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, - 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x28, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, - 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, - 0x3a, 0x20, 0x69, 0x20, 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x66, 0x65, 0x74, 0x63, 0x68, - 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x63, 0x6f, - 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x6f, - 0x6c, 0x76, 0x65, 0x52, 0x65, 0x66, 0x73, 0x28, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, - 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, - 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, - 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, - 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, - 0x28, 0x60, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, - 0x69, 0x6c, 0x65, 0x64, 0x3a, 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, - 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, - 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, - 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, - 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, - 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, - 0x61, 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, - 0x7b, 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, - 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, - 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, - 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, - 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, - 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, - 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, - 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, - 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, - 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, - 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, - 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, - 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x42, 0x6f, 0x6f, 0x6c, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, - 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x62, 0x6f, 0x78, 0x22, - 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, - 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, - 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x42, 0x6f, - 0x6f, 0x6c, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, - 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, - 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x55, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x3d, - 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, - 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, - 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, - 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, - 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, - 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x61, 0x6c, - 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, - 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x6f, - 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, 0x63, 0x68, 0x61, 0x6e, - 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, - 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5d, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, 0x61, 0x6d, - 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, - 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, - 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, 0x62, 0x6e, 0x66, 0x20, - 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x22, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, - 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, - 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x70, 0x2d, 0x6f, - 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, - 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, 0x72, 0x64, 0x65, 0x72, - 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, 0x70, 0x72, 0x6f, 0x70, - 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, 0x20, 0x6f, 0x6e, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, - 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, - 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, - 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, - 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, - 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x20, - 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x46, 0x6f, - 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x3e, 0x50, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, 0x6f, 0x6e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, - 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, - 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, - 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, - 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, - 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, - 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x75, - 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, - 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, 0x22, 0x3e, - 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x72, - 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, - 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, - 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, - 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, - 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, - 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, - 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, - 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, - 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, - 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x3c, 0x2f, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, - 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, - 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, - 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, - 0x68, 0x61, 0x74, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x7d, 0x20, 0x6f, 0x6e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, - 0x3e, 0x20, 0x43, 0x68, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, - 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, - 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, - 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3d, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, - 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, - 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, - 0x27, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x20, 0x3a, 0x20, 0x43, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, - 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, - 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, - 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x65, - 0x64, 0x69, 0x63, 0x74, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, - 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, 0x6d, 0x70, - 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, - 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x20, 0x7d, - 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, - 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, - 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x20, 0x72, - 0x65, 0x70, 0x65, 0x61, 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, - 0x63, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, - 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, - 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, - 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, - 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, - 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x20, 0x4e, 0x20, - 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, - 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, - 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, - 0x6e, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x42, 0x6f, 0x6f, 0x6c, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, - 0x20, 0x72, 0x65, 0x70, 0x65, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x6f, 0x66, 0x20, 0x6e, 0x65, 0x77, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x22, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x5f, 0x6e, 0x6c, 0x22, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x65, 0x6e, 0x61, 0x6c, - 0x69, 0x7a, 0x65, 0x5f, 0x6e, 0x6c, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, - 0x4b, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, - 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, - 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, - 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x20, 0x7d, 0x29, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x4d, 0x69, 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, - 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x6e, - 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, - 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, - 0x79, 0x3e, 0x4d, 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x3c, 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, - 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x54, 0x46, 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, - 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3a, 0x20, 0x22, 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, - 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, - 0x6c, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, - 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, - 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, - 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, - 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, - 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, - 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, - 0x79, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, - 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, - 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, - 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, - 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3d, 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, - 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, - 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, - 0x3e, 0x20, 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, - 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, - 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, - 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, - 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, - 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, - 0x76, 0x31, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, - 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, - 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, - 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, - 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, - 0x20, 0x76, 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, - 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, - 0x74, 0x5f, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, - 0x5f, 0x74, 0x61, 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, - 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, - 0x5f, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, - 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, - 0x65, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, - 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, - 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, - 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, - 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, - 0x72, 0x6f, 0x62, 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, - 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x6e, 0x20, 0x50, - 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, - 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x65, 0x61, 0x63, 0x68, 0x20, 0x53, - 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x72, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, - 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x6e, - 0x5f, 0x6b, 0x65, 0x65, 0x70, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x6e, 0x5f, 0x6b, 0x65, 0x65, 0x70, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, - 0x79, 0x22, 0x3e, 0x41, 0x50, 0x49, 0x20, 0x4b, 0x65, 0x79, 0x3c, 0x2f, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, - 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x61, 0x70, - 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, - 0x7d, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, - 0x65, 0x72, 0x3d, 0x22, 0x45, 0x6e, 0x74, 0x65, 0x72, 0x20, 0x41, 0x50, - 0x49, 0x20, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, - 0x28, 0x70, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x20, 0x3d, - 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, - 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x28, 0x31, 0x20, 0x2d, 0x20, 0x70, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x67, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, - 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, - 0x20, 0x70, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x72, 0x67, 0x62, 0x61, 0x28, - 0x24, 0x7b, 0x72, 0x7d, 0x2c, 0x24, 0x7b, 0x67, 0x7d, 0x2c, 0x30, 0x2c, - 0x30, 0x2e, 0x33, 0x29, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, - 0x73, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, - 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, - 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, - 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x21, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, - 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7c, - 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, - 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, - 0x30, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x4e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x62, 0x79, 0x74, 0x65, - 0x20, 0x70, 0x61, 0x69, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x2e, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x62, 0x79, 0x74, 0x65, - 0x3a, 0x20, 0x5c, 0x5c, 0x27, 0x29, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x70, 0x6c, 0x69, - 0x74, 0x44, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, - 0x28, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x70, 0x72, 0x6f, - 0x62, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, - 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x3a, - 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x62, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, - 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, - 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x73, 0x70, 0x6c, - 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2c, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, - 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, - 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3d, 0x20, - 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x28, 0x70, - 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, - 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x43, 0x6f, - 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, - 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, - 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x27, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x3d, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x62, 0x2d, 0x73, 0x65, - 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, - 0x6d, 0x61, 0x70, 0x28, 0x28, 0x70, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, - 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6b, 0x65, - 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3d, 0x24, - 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, 0x3a, 0x20, 0x24, 0x7b, 0x70, 0x2e, - 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x27, - 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, - 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, - 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, - 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, - 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x7d, 0x3a, 0x20, - 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x4d, 0x61, - 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x62, 0x20, 0x2a, 0x20, 0x31, 0x30, 0x30, 0x29, 0x7d, 0x25, - 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x60, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, - 0x7b, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x20, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x62, 0x61, 0x63, 0x6b, - 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, - 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x7d, 0x7d, 0x20, 0x70, - 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, - 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, - 0x69, 0x6d, 0x29, 0x20, 0x3f, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, - 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x60, 0x20, 0x3a, 0x20, 0x6d, 0x73, 0x67, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x70, 0x6f, - 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, 0x73, 0x20, 0x6d, 0x61, 0x72, 0x6b, - 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, - 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x64, 0x20, 0x3d, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x26, 0x2f, 0x67, 0x2c, 0x20, 0x27, - 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x6c, 0x74, 0x3b, - 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3e, 0x2f, 0x67, - 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, 0x7b, 0x31, 0x2c, 0x36, 0x7d, 0x20, - 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, - 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x68, 0x33, 0x3e, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, - 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x5c, 0x2a, - 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, - 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, - 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, - 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, - 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, - 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, - 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, - 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, - 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x28, - 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, - 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x60, 0x60, 0x2e, 0x2a, 0x3f, - 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, 0x5c, 0x53, 0x5d, 0x2a, 0x3f, 0x29, - 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x70, 0x72, 0x65, - 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, - 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, 0x70, 0x72, 0x65, 0x3e, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x28, 0x2e, 0x2a, 0x3f, - 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x63, 0x6f, 0x64, 0x65, - 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, - 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x27, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, - 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, - 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, - 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, - 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x6f, 0x64, 0x65, 0x6c, - 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, - 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, - 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, - 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, - 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x7d, 0x20, - 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x2c, 0x20, 0x24, - 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, - 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x7d, 0x20, 0x63, 0x61, 0x63, - 0x68, 0x65, 0x64, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x70, 0x72, 0x65, 0x64, - 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x74, 0x6f, - 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, - 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, - 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x70, 0x72, - 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, - 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, - 0x65, 0x64, 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, - 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x70, 0x6f, - 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x6f, 0x70, - 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x4f, 0x70, - 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, - 0x61, 0x6c, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, - 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x74, 0x6f, - 0x70, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x2c, 0x20, 0x6c, 0x65, - 0x66, 0x74, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x20, 0x7d, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x20, - 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, - 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x6f, 0x67, 0x67, - 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, - 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x62, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x63, - 0x74, 0x20, 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, - 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, - 0x74, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6c, 0x69, - 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x60, 0x24, 0x7b, - 0x72, 0x65, 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, - 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, - 0x6f, 0x6c, 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x66, 0x74, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, - 0x6c, 0x65, 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, - 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x58, 0x7d, 0x70, 0x78, - 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, - 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x21, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, - 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x20, 0x3d, 0x20, 0x28, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x21, 0x70, - 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, 0x62, 0x75, 0x74, - 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, - 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, - 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, - 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, - 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, - 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, - 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, - 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, - 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, - 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, - 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, - 0x70, 0x61, 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x7d, - 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x52, 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, 0x43, 0x6c, 0x69, 0x63, - 0x6b, 0x3d, 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, - 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, 0x7b, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, - 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, 0x26, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, - 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, 0x70, 0x6f, 0x72, 0x74, - 0x61, 0x6c, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, - 0x72, 0x52, 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x3d, 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, - 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, - 0x74, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, 0x65, 0x61, 0x63, - 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x28, 0x68, 0x74, - 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, 0x65, 0x6c, 0x6f, 0x70, - 0x69, 0x74, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x2f, 0x6d, 0x61, - 0x73, 0x74, 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, 0x2f, 0x70, 0x72, 0x65, - 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x6a, - 0x73, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x2a, 0x20, 0x52, - 0x65, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x65, 0x73, - 0x63, 0x65, 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, 0x20, 0x69, 0x6e, 0x74, - 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, 0x76, 0x65, 0x6e, 0x20, - 0x43, 0x53, 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x20, 0x2a, 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x65, 0x78, 0x74, - 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, - 0x6f, 0x72, 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, - 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x20, 0x21, 0x3d, 0x3d, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, - 0x69, 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, - 0x61, 0x79, 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, - 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, - 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x2e, 0x62, 0x69, 0x6e, - 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, - 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x26, 0x26, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, - 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x29, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, - 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x2e, - 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, - 0x65, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x6f, 0x64, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x27, 0x20, 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x3a, 0x20, 0x6e, - 0x6f, 0x64, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x73, 0x68, 0x6f, 0x77, - 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, - 0x65, 0x64, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, 0x20, 0x6f, 0x6c, 0x64, - 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, 0x20, 0x6d, 0x6f, 0x76, - 0x69, 0x6e, 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, 0x73, 0x3a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, - 0x74, 0x6f, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, - 0x69, 0x6e, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, - 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, - 0x6f, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, - 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, - 0x20, 0x2f, 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, - 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, - 0x74, 0x6f, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x69, - 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, - 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7d, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x26, 0x26, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, - 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, - 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, 0x68, 0x2d, 0x6f, 0x72, - 0x64, 0x65, 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x73, 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, 0x69, 0x72, 0x73, 0x74, - 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, 0x66, 0x20, 0x69, 0x74, - 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x61, 0x73, 0x20, - 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, - 0x6c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, - 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, - 0x72, 0x6f, 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, - 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x74, 0x43, 0x68, - 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x7b, - 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7d, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, - 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x71, 0x75, 0x65, 0x72, 0x79, - 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x55, 0x52, 0x4c, 0x53, 0x65, - 0x61, 0x72, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x28, 0x6c, - 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x73, 0x65, 0x61, 0x72, - 0x63, 0x68, 0x29, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x22, 0x71, 0x22, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x71, 0x75, 0x65, 0x72, 0x79, 0x29, 0x20, 0x63, 0x68, 0x61, - 0x74, 0x28, 0x71, 0x75, 0x65, 0x72, 0x79, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x24, 0x7b, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7d, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, 0x64, - 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x68, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, 0x20, - 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, - 0x72, 0x74, 0x65, 0x64, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, - 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x6d, 0x61, 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, 0x74, - 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, - 0x20, 0x3f, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, - 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, - 0x3e, 0x3c, 0x24, 0x7b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, - 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x7d, - 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x50, - 0x6f, 0x77, 0x65, 0x72, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, 0x61, - 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, - 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x67, 0x67, 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, 0x2f, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, 0x3e, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, - 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x67, - 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, 0x2e, - 0x61, 0x69, 0x3c, 0x2f, 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x28, 0x68, 0x28, 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, 0x6f, - 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, - 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x27, 0x23, 0x63, - 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x27, 0x29, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3e, - 0x0a, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, 0x62, - 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, - 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, - 0x72, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x66, 0x69, 0x6c, - 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, - 0x6e, 0x70, 0x75, 0x74, 0x22, 0x20, 0x61, 0x63, 0x63, 0x65, 0x70, 0x74, - 0x3d, 0x22, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x2f, 0x2a, 0x22, 0x20, 0x73, - 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, - 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x22, 0x3e, 0x0a, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, 0x61, - 0x6c, 0x22, 0x3e, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, 0x2f, - 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, - 0x6c, 0x3e, 0x0a, 0x0a -}; -unsigned int index_html_len = 34552; diff --git a/examples/server/index.js.hpp b/examples/server/index.js.hpp deleted file mode 100644 index 08b26550d38ca..0000000000000 --- a/examples/server/index.js.hpp +++ /dev/null @@ -1,1911 +0,0 @@ -unsigned char index_js[] = { - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x53, 0x79, 0x6d, 0x62, - 0x6f, 0x6c, 0x2e, 0x66, 0x6f, 0x72, 0x28, 0x22, 0x70, 0x72, 0x65, 0x61, - 0x63, 0x74, 0x2d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x73, 0x22, 0x29, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x28, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x31, 0x29, 0x7b, 0x72, 0x2d, - 0x2d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, - 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, - 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, 0x69, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x75, 0x2b, 0x2b, 0x3b, 0x77, - 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, - 0x3d, 0x5f, 0x2e, 0x6f, 0x3b, 0x5f, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x5f, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, - 0x69, 0x66, 0x28, 0x21, 0x28, 0x38, 0x26, 0x5f, 0x2e, 0x66, 0x29, 0x26, - 0x26, 0x68, 0x28, 0x5f, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x5f, 0x2e, - 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, - 0x6e, 0x3d, 0x21, 0x30, 0x7d, 0x7d, 0x5f, 0x3d, 0x69, 0x7d, 0x7d, 0x75, - 0x3d, 0x30, 0x3b, 0x72, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, - 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x72, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x28, 0x29, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, - 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, - 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x2c, 0x69, 0x3b, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, - 0x6c, 0x6c, 0x79, 0x7b, 0x5f, 0x3d, 0x6e, 0x7d, 0x7d, 0x6c, 0x65, 0x74, - 0x20, 0x72, 0x3d, 0x30, 0x2c, 0x75, 0x3d, 0x30, 0x2c, 0x6c, 0x3d, 0x30, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x73, 0x28, - 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3d, 0x3d, 0x3d, 0x5f, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, - 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x6e, 0x3b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, - 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x5f, 0x29, 0x7b, 0x6e, 0x3d, - 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, 0x74, 0x2c, 0x70, 0x3a, 0x5f, - 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, - 0x74, 0x3a, 0x5f, 0x2c, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x72, 0x3a, - 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x5f, 0x2e, 0x73, 0x29, 0x5f, 0x2e, 0x73, 0x2e, 0x6e, - 0x3d, 0x6e, 0x3b, 0x5f, 0x2e, 0x73, 0x3d, 0x6e, 0x3b, 0x74, 0x2e, 0x6e, - 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, 0x26, 0x5f, 0x2e, 0x66, - 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, 0x7b, 0x6e, - 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x6e, 0x2e, - 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x70, 0x29, - 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x3b, 0x6e, 0x2e, - 0x70, 0x3d, 0x5f, 0x2e, 0x73, 0x3b, 0x6e, 0x2e, 0x6e, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x5f, 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, - 0x3b, 0x5f, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3d, 0x30, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x7d, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x62, 0x72, 0x61, 0x6e, 0x64, 0x3d, 0x74, - 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, - 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x21, - 0x3d, 0x3d, 0x74, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x74, 0x2e, 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x78, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x2e, 0x65, 0x3d, 0x74, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, 0x74, 0x7d, 0x7d, 0x3b, 0x66, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x65, 0x2c, 0x65, 0x3d, 0x74, - 0x2e, 0x78, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x78, 0x3d, 0x65, 0x3b, - 0x74, 0x2e, 0x65, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, - 0x29, 0x7b, 0x65, 0x2e, 0x65, 0x3d, 0x6e, 0x3b, 0x74, 0x2e, 0x78, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x74, 0x3d, - 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x74, 0x3d, 0x65, 0x7d, 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x75, 0x62, 0x73, - 0x63, 0x72, 0x69, 0x62, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6b, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2c, 0x65, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x6e, 0x29, - 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x5f, 0x3d, 0x65, - 0x7d, 0x7d, 0x29, 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x66, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2b, 0x22, 0x22, 0x7d, - 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x74, 0x6f, 0x4a, 0x53, 0x4f, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x74, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x66, - 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x5f, 0x3d, 0x74, 0x7d, 0x7d, - 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, - 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x66, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x73, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x69, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x2c, 0x73, 0x65, - 0x74, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x21, 0x3d, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x75, - 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, - 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, - 0x63, 0x6c, 0x65, 0x20, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x22, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, - 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, - 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, - 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, - 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x66, 0x28, 0x74, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, - 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, - 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, - 0x66, 0x28, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x69, 0x7c, 0x7c, 0x21, 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, - 0x7c, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, - 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, - 0x3d, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, - 0x3d, 0x65, 0x3b, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, - 0x2e, 0x69, 0x3d, 0x2d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, - 0x2e, 0x73, 0x3d, 0x6e, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x70, 0x28, - 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, - 0x2e, 0x73, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x74, 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, - 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, - 0x53, 0x2e, 0x55, 0x28, 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, - 0x3d, 0x65, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, - 0x2e, 0x70, 0x3d, 0x74, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, - 0x65, 0x3b, 0x65, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x65, 0x2e, 0x72, 0x29, 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x65, 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x64, 0x28, - 0x74, 0x29, 0x7b, 0x66, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x3d, 0x34, 0x7d, 0x28, 0x64, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x66, - 0x29, 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x33, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, - 0x66, 0x28, 0x33, 0x32, 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x35, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, - 0x3d, 0x3d, 0x6c, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x68, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x7d, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x5f, - 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x61, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x3b, 0x5f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, - 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x7c, 0x7c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, - 0x74, 0x7c, 0x7c, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x69, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, - 0x31, 0x36, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, - 0x5f, 0x3d, 0x74, 0x3b, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x64, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x7c, 0x3d, 0x33, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, - 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, - 0x74, 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, - 0x7d, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2c, 0x74, 0x29, 0x7d, 0x3b, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x29, 0x7b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x55, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x74, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, - 0x33, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, - 0x74, 0x2e, 0x53, 0x2e, 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, - 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, - 0x3d, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, - 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, - 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x64, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, - 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, - 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, 0x64, - 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x3b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, 0x28, 0x29, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, - 0x29, 0x74, 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, - 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x76, 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x64, 0x28, 0x74, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, 0x28, - 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, - 0x2e, 0x75, 0x3b, 0x74, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x65, 0x29, 0x7b, 0x72, 0x2b, 0x2b, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x69, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x66, 0x26, 0x3d, - 0x2d, 0x32, 0x3b, 0x74, 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x6d, 0x28, - 0x74, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x7d, 0x66, - 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x5f, 0x3d, 0x69, 0x3b, 0x6e, - 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, - 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, - 0x6e, 0x29, 0x6e, 0x2e, 0x53, 0x2e, 0x55, 0x28, 0x6e, 0x29, 0x3b, 0x74, - 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x2e, - 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x79, 0x28, 0x74, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x67, - 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x5f, 0x21, 0x3d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, - 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x4f, 0x75, 0x74, - 0x2d, 0x6f, 0x66, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x65, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x22, 0x29, 0x3b, 0x70, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x29, 0x3b, 0x5f, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x6d, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x29, 0x3b, 0x6e, 0x28, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, 0x33, 0x32, 0x7d, 0x62, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x63, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x53, - 0x28, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, 0x38, 0x26, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x29, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x75, 0x3d, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, - 0x79, 0x7b, 0x74, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x62, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, - 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, 0x64, 0x65, 0x74, 0x65, - 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, - 0x3d, 0x2d, 0x39, 0x3b, 0x79, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, - 0x61, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x67, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, - 0x74, 0x29, 0x7d, 0x3b, 0x62, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x6f, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, - 0x3b, 0x62, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, - 0x69, 0x66, 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x29, 0x6d, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, 0x3b, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6b, 0x28, 0x74, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, 0x65, - 0x77, 0x20, 0x62, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, - 0x2e, 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, - 0x29, 0x7b, 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, - 0x2e, 0x64, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, 0x76, - 0x61, 0x72, 0x20, 0x53, 0x2c, 0x77, 0x2c, 0x78, 0x2c, 0x43, 0x2c, 0x45, - 0x2c, 0x55, 0x2c, 0x48, 0x2c, 0x50, 0x2c, 0x4e, 0x2c, 0x24, 0x2c, 0x44, - 0x2c, 0x54, 0x2c, 0x46, 0x3d, 0x7b, 0x7d, 0x2c, 0x56, 0x3d, 0x5b, 0x5d, - 0x2c, 0x41, 0x3d, 0x2f, 0x61, 0x63, 0x69, 0x74, 0x7c, 0x65, 0x78, 0x28, - 0x3f, 0x3a, 0x73, 0x7c, 0x67, 0x7c, 0x6e, 0x7c, 0x70, 0x7c, 0x24, 0x29, - 0x7c, 0x72, 0x70, 0x68, 0x7c, 0x67, 0x72, 0x69, 0x64, 0x7c, 0x6f, 0x77, - 0x73, 0x7c, 0x6d, 0x6e, 0x63, 0x7c, 0x6e, 0x74, 0x77, 0x7c, 0x69, 0x6e, - 0x65, 0x5b, 0x63, 0x68, 0x5d, 0x7c, 0x7a, 0x6f, 0x6f, 0x7c, 0x5e, 0x6f, - 0x72, 0x64, 0x7c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x2f, 0x69, 0x2c, 0x4d, - 0x3d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, - 0x61, 0x79, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x57, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, - 0x61, 0x72, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x74, 0x5b, - 0x65, 0x5d, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x3d, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, - 0x65, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, - 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, - 0x6f, 0x2c, 0x72, 0x3d, 0x7b, 0x7d, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, - 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, - 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, - 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, - 0x5d, 0x3a, 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, - 0x69, 0x66, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, - 0x72, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, - 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, - 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, - 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x72, 0x5b, 0x6f, - 0x5d, 0x26, 0x26, 0x28, 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x74, 0x2e, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x5b, - 0x6f, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x52, - 0x28, 0x74, 0x2c, 0x72, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x52, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x7b, 0x74, 0x79, 0x70, 0x65, - 0x3a, 0x74, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3a, 0x6e, 0x2c, 0x6b, - 0x65, 0x79, 0x3a, 0x65, 0x2c, 0x72, 0x65, 0x66, 0x3a, 0x5f, 0x2c, 0x5f, - 0x5f, 0x6b, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x3a, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, 0x30, 0x2c, 0x5f, 0x5f, - 0x65, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x64, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, - 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, - 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x69, 0x3f, 0x2b, 0x2b, - 0x78, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x69, 0x3a, 0x2d, 0x31, 0x2c, 0x5f, - 0x5f, 0x75, 0x3a, 0x30, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x77, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x26, - 0x26, 0x77, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, 0x6f, 0x29, 0x2c, - 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, - 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x28, 0x74, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x74, - 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, - 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x42, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x42, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x69, 0x2b, 0x31, 0x29, 0x3a, 0x6e, - 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, - 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x3f, 0x42, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x47, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, - 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x74, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, - 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, - 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, 0x29, 0x69, 0x66, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, 0x74, 0x2e, - 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, - 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x62, 0x72, 0x65, - 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x47, 0x28, - 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x7a, 0x28, 0x74, 0x29, 0x7b, 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, - 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x30, - 0x29, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, - 0x26, 0x26, 0x21, 0x4a, 0x2e, 0x5f, 0x5f, 0x72, 0x2b, 0x2b, 0x7c, 0x7c, - 0x55, 0x21, 0x3d, 0x3d, 0x77, 0x2e, 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, - 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, - 0x26, 0x26, 0x28, 0x28, 0x55, 0x3d, 0x77, 0x2e, 0x64, 0x65, 0x62, 0x6f, - 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, - 0x67, 0x29, 0x7c, 0x7c, 0x48, 0x29, 0x28, 0x4a, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x28, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, - 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x45, - 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x50, 0x29, 0x3b, 0x74, 0x3d, 0x45, - 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, - 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x6e, 0x3d, 0x45, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x5f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x6f, 0x3d, 0x28, 0x69, 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x29, - 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x72, 0x3d, - 0x5b, 0x5d, 0x2c, 0x75, 0x3d, 0x5b, 0x5d, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, - 0x50, 0x26, 0x26, 0x28, 0x28, 0x5f, 0x3d, 0x57, 0x28, 0x7b, 0x7d, 0x2c, - 0x69, 0x29, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, - 0x76, 0x2b, 0x31, 0x2c, 0x77, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x26, - 0x26, 0x77, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, 0x5f, 0x29, 0x2c, - 0x5f, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x2c, 0x5f, 0x2c, 0x69, - 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x2e, 0x6f, 0x77, - 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x2c, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x75, 0x3f, 0x5b, - 0x6f, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x72, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x3f, 0x42, 0x28, 0x69, 0x29, 0x3a, 0x6f, - 0x2c, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x75, - 0x29, 0x2c, 0x75, 0x29, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x69, - 0x2e, 0x5f, 0x5f, 0x76, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x2e, 0x5f, 0x5f, - 0x6b, 0x5b, 0x5f, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x3d, 0x5f, 0x2c, 0x69, - 0x74, 0x28, 0x72, 0x2c, 0x5f, 0x2c, 0x75, 0x29, 0x2c, 0x5f, 0x2e, 0x5f, - 0x5f, 0x65, 0x21, 0x3d, 0x6f, 0x26, 0x26, 0x47, 0x28, 0x5f, 0x29, 0x29, - 0x2c, 0x45, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x6e, 0x26, - 0x26, 0x45, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x50, 0x29, 0x29, 0x3b, - 0x4a, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, - 0x2c, 0x73, 0x2c, 0x66, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x63, 0x2c, - 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x3d, 0x5f, 0x26, - 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x56, 0x2c, 0x79, 0x3d, - 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x6f, 0x72, - 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x6c, 0x2c, 0x51, 0x28, 0x65, - 0x2c, 0x6e, 0x2c, 0x76, 0x29, 0x2c, 0x6c, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x64, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, 0x79, 0x3b, 0x63, 0x2b, - 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x61, 0x3d, 0x65, - 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x29, 0x26, 0x26, 0x22, 0x62, - 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x61, 0x26, 0x26, 0x28, 0x68, 0x3d, 0x2d, 0x31, 0x3d, 0x3d, - 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x3f, 0x46, 0x3a, 0x76, 0x5b, 0x61, - 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x7c, 0x7c, 0x46, 0x2c, 0x61, 0x2e, 0x5f, - 0x5f, 0x69, 0x3d, 0x63, 0x2c, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x61, 0x2c, - 0x68, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x2c, - 0x73, 0x2c, 0x66, 0x29, 0x2c, 0x70, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x65, - 0x2c, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x68, 0x2e, 0x72, 0x65, - 0x66, 0x21, 0x3d, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x28, 0x68, - 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x68, 0x2e, 0x72, - 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, 0x2c, 0x66, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x2c, - 0x61, 0x2e, 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x70, 0x2c, 0x61, 0x29, 0x29, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x64, 0x26, 0x26, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x64, 0x3d, 0x70, 0x29, - 0x2c, 0x36, 0x35, 0x35, 0x33, 0x36, 0x26, 0x61, 0x2e, 0x5f, 0x5f, 0x75, - 0x7c, 0x7c, 0x68, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x3d, 0x3d, 0x61, 0x2e, - 0x5f, 0x5f, 0x6b, 0x3f, 0x28, 0x6c, 0x26, 0x26, 0x21, 0x6c, 0x2e, 0x69, - 0x73, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x65, 0x64, 0x26, 0x26, - 0x28, 0x6c, 0x3d, 0x42, 0x28, 0x68, 0x29, 0x29, 0x2c, 0x6c, 0x3d, 0x58, - 0x28, 0x61, 0x2c, 0x6c, 0x2c, 0x74, 0x29, 0x29, 0x3a, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, - 0x5f, 0x64, 0x3f, 0x6c, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3a, 0x70, - 0x26, 0x26, 0x28, 0x6c, 0x3d, 0x70, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, - 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x29, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, - 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x61, 0x2e, 0x5f, - 0x5f, 0x75, 0x26, 0x3d, 0x2d, 0x31, 0x39, 0x36, 0x36, 0x30, 0x39, 0x29, - 0x3b, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x6c, 0x2c, 0x65, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x64, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x51, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, - 0x2c, 0x6c, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, - 0x73, 0x3d, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x66, - 0x3d, 0x73, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x74, - 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x30, 0x3b, - 0x5f, 0x3c, 0x6c, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, 0x72, 0x3d, 0x5f, 0x2b, - 0x63, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x69, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, - 0x3d, 0x3d, 0x28, 0x69, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x29, 0x7c, 0x7c, - 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x69, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x22, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x6e, 0x75, 0x6d, 0x62, - 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x69, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, 0x6e, 0x74, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x69, - 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, - 0x3d, 0x3d, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3f, 0x52, 0x28, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3a, 0x4d, 0x28, - 0x69, 0x29, 0x3f, 0x52, 0x28, 0x6a, 0x2c, 0x7b, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x69, 0x7d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3a, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x2e, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x26, 0x26, - 0x69, 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x52, 0x28, 0x69, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x2c, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2c, 0x69, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x2e, 0x72, 0x65, 0x66, - 0x3f, 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x69, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x69, 0x29, 0x3f, 0x28, 0x69, - 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x75, 0x3d, 0x5a, 0x28, - 0x69, 0x2c, 0x65, 0x2c, 0x72, 0x2c, 0x66, 0x29, 0x2c, 0x69, 0x2e, 0x5f, - 0x5f, 0x69, 0x3d, 0x75, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x2d, 0x31, 0x21, 0x3d, 0x3d, 0x75, 0x26, 0x26, 0x28, 0x66, 0x2d, 0x2d, - 0x2c, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x28, - 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x31, 0x33, 0x31, 0x30, 0x37, - 0x32, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x7c, - 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6f, 0x2e, 0x5f, 0x5f, - 0x76, 0x3f, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x75, 0x26, 0x26, 0x63, 0x2d, - 0x2d, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, - 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, 0x29, 0x3a, 0x75, 0x21, 0x3d, - 0x3d, 0x72, 0x26, 0x26, 0x28, 0x75, 0x3d, 0x3d, 0x3d, 0x72, 0x2b, 0x31, - 0x3f, 0x63, 0x2b, 0x2b, 0x3a, 0x75, 0x3e, 0x72, 0x3f, 0x66, 0x3e, 0x6c, - 0x2d, 0x72, 0x3f, 0x63, 0x2b, 0x3d, 0x75, 0x2d, 0x72, 0x3a, 0x63, 0x2d, - 0x2d, 0x3a, 0x75, 0x3c, 0x72, 0x3f, 0x75, 0x3d, 0x3d, 0x72, 0x2d, 0x31, - 0x26, 0x26, 0x28, 0x63, 0x3d, 0x75, 0x2d, 0x72, 0x29, 0x3a, 0x63, 0x3d, - 0x30, 0x2c, 0x75, 0x21, 0x3d, 0x3d, 0x5f, 0x2b, 0x63, 0x26, 0x26, 0x28, - 0x69, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, - 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x72, 0x5d, 0x29, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x2e, 0x6b, 0x65, - 0x79, 0x26, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x30, 0x3d, - 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, 0x6f, 0x2e, 0x5f, - 0x5f, 0x75, 0x29, 0x26, 0x26, 0x28, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x64, 0x3d, 0x42, 0x28, 0x6f, 0x29, 0x29, 0x2c, 0x75, 0x74, 0x28, - 0x6f, 0x2c, 0x6f, 0x2c, 0x21, 0x31, 0x29, 0x2c, 0x65, 0x5b, 0x72, 0x5d, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x66, 0x2d, 0x2d, 0x29, 0x3b, 0x69, - 0x66, 0x28, 0x66, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x5f, 0x3d, 0x30, 0x3b, - 0x5f, 0x3c, 0x73, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x29, 0x26, 0x26, - 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, 0x6f, - 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x28, 0x6f, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, - 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x42, 0x28, 0x6f, 0x29, 0x29, 0x2c, 0x75, - 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x3b, 0x69, 0x66, - 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x5f, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x6b, 0x2c, 0x69, 0x3d, 0x30, 0x3b, 0x5f, 0x26, 0x26, 0x69, - 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x69, 0x2b, - 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, 0x28, 0x5f, 0x5b, 0x69, - 0x5d, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, 0x3d, 0x58, 0x28, 0x5f, - 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, - 0x21, 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x73, 0x65, - 0x72, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x2c, 0x6e, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, - 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x3b, 0x64, 0x6f, 0x7b, - 0x6e, 0x3d, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, - 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x7d, 0x77, 0x68, 0x69, 0x6c, 0x65, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6e, 0x26, 0x26, 0x38, 0x3d, - 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, - 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x3d, - 0x6e, 0x7c, 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x74, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, - 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x7c, 0x7c, - 0x28, 0x4d, 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x59, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x3a, - 0x6e, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x69, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, 0x3d, 0x74, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, 0x31, 0x2c, - 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x6c, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, - 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6c, - 0x7c, 0x7c, 0x6c, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x6c, 0x2e, 0x6b, 0x65, - 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x6c, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, - 0x32, 0x26, 0x6c, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x3e, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6c, 0x26, 0x26, 0x30, 0x3d, 0x3d, - 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, 0x6c, 0x2e, 0x5f, 0x5f, - 0x75, 0x29, 0x3f, 0x31, 0x3a, 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, - 0x3b, 0x72, 0x3e, 0x3d, 0x30, 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, - 0x3e, 0x3d, 0x30, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6c, 0x3d, 0x6e, - 0x5b, 0x72, 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, - 0x31, 0x30, 0x37, 0x32, 0x26, 0x6c, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, - 0x26, 0x69, 0x3d, 0x3d, 0x6c, 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, - 0x3d, 0x3d, 0x3d, 0x6c, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x72, 0x3b, 0x72, 0x2d, 0x2d, 0x7d, 0x69, - 0x66, 0x28, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6c, 0x3d, 0x6e, 0x5b, 0x75, 0x5d, - 0x29, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, - 0x32, 0x26, 0x6c, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, - 0x3d, 0x6c, 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, - 0x6c, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x75, 0x3b, 0x75, 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x2d, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, - 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, - 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, - 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, - 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, - 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x41, 0x2e, 0x74, 0x65, 0x73, 0x74, - 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, - 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, - 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, - 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x5f, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, - 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x5f, 0x29, 0x66, 0x6f, 0x72, 0x28, - 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x5f, 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, - 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2e, 0x73, - 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x22, 0x22, 0x29, 0x3b, 0x69, - 0x66, 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, - 0x20, 0x65, 0x29, 0x5f, 0x26, 0x26, 0x65, 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, - 0x3d, 0x5f, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2e, - 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x5b, 0x6e, 0x5d, - 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x6f, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x22, 0x6e, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, 0x29, 0x6f, 0x3d, 0x6e, - 0x21, 0x3d, 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x28, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, - 0x72, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x29, 0x24, 0x7c, 0x43, - 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, 0x69, 0x2c, 0x22, 0x24, - 0x31, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, - 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x69, 0x6e, - 0x20, 0x74, 0x7c, 0x7c, 0x22, 0x6f, 0x6e, 0x46, 0x6f, 0x63, 0x75, 0x73, - 0x4f, 0x75, 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x6f, - 0x6e, 0x46, 0x6f, 0x63, 0x75, 0x73, 0x49, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, - 0x6e, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, - 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, - 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x32, - 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x6c, 0x3d, - 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, 0x6e, 0x2b, 0x6f, 0x5d, - 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x5f, 0x3f, 0x65, 0x2e, 0x75, 0x3d, 0x5f, - 0x2e, 0x75, 0x3a, 0x28, 0x65, 0x2e, 0x75, 0x3d, 0x4e, 0x2c, 0x74, 0x2e, - 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, - 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x44, 0x3a, 0x24, - 0x2c, 0x6f, 0x29, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, - 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, - 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x44, 0x3a, 0x24, 0x2c, 0x6f, - 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x29, - 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, - 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, 0x48, 0x7c, 0x3a, 0x68, 0x29, - 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x24, 0x2f, 0x2c, - 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, 0x22, 0x21, 0x3d, 0x6e, 0x26, - 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x21, 0x3d, 0x6e, - 0x26, 0x26, 0x22, 0x68, 0x72, 0x65, 0x66, 0x22, 0x21, 0x3d, 0x6e, 0x26, - 0x26, 0x22, 0x6c, 0x69, 0x73, 0x74, 0x22, 0x21, 0x3d, 0x6e, 0x26, 0x26, - 0x22, 0x66, 0x6f, 0x72, 0x6d, 0x22, 0x21, 0x3d, 0x6e, 0x26, 0x26, 0x22, - 0x74, 0x61, 0x62, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x21, 0x3d, 0x6e, - 0x26, 0x26, 0x22, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, - 0x21, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, - 0x6e, 0x22, 0x21, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, 0x53, - 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, - 0x6c, 0x65, 0x22, 0x21, 0x3d, 0x6e, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, - 0x20, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, - 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x65, 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x22, - 0x2d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, 0x3f, 0x74, 0x2e, - 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, - 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x73, 0x65, 0x74, - 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, - 0x65, 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x6c, 0x5b, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x74, 0x5d, - 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x2e, - 0x74, 0x29, 0x6e, 0x2e, 0x74, 0x3d, 0x4e, 0x2b, 0x2b, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x6e, 0x2e, 0x74, 0x3c, 0x65, 0x2e, - 0x75, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x65, 0x28, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x3f, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x29, - 0x3a, 0x6e, 0x29, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x2c, - 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x66, 0x2c, 0x63, 0x2c, 0x68, - 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, 0x2c, 0x6d, - 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x78, 0x2c, 0x43, - 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x29, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, - 0x31, 0x32, 0x38, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x26, 0x28, - 0x6c, 0x3d, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x65, 0x2e, 0x5f, 0x5f, - 0x75, 0x29, 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x5d, 0x29, 0x2c, 0x28, 0x66, - 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x66, 0x28, 0x6e, - 0x29, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x45, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, 0x79, - 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, 0x28, - 0x66, 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, - 0x79, 0x70, 0x65, 0x29, 0x26, 0x26, 0x5f, 0x5b, 0x66, 0x2e, 0x5f, 0x5f, - 0x63, 0x5d, 0x2c, 0x67, 0x3d, 0x66, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x66, - 0x2e, 0x5f, 0x5f, 0x3a, 0x5f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x3f, - 0x76, 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x65, - 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, 0x5f, - 0x5f, 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, - 0x6e, 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, 0x28, - 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, 0x20, - 0x71, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, 0x63, - 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x6c, 0x74, 0x29, 0x2c, - 0x6d, 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, 0x2c, - 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, - 0x6e, 0x3d, 0x5f, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, - 0x21, 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, - 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x28, - 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, - 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, - 0x3d, 0x57, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, - 0x29, 0x2c, 0x57, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, 0x2e, - 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, - 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, 0x2c, - 0x61, 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, 0x3d, - 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, - 0x76, 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, - 0x70, 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, - 0x6c, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x4d, - 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, - 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, - 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, 0x65, - 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, - 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, - 0x79, 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, - 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, - 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, 0x2c, - 0x67, 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, - 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, 0x3d, - 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, - 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, 0x7c, - 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x76, 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, - 0x76, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, - 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, - 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, 0x2e, - 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, - 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, - 0x5f, 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x29, - 0x7d, 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, 0x2e, - 0x5f, 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x62, - 0x2b, 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, 0x3b, - 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, 0x5f, - 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x72, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, 0x65, - 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, - 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, - 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, 0x2e, - 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, 0x64, - 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, - 0x74, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, 0x6b, - 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, 0x22, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, - 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, 0x7b, - 0x66, 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, - 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, - 0x21, 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x66, - 0x3d, 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, - 0x2c, 0x78, 0x3d, 0x30, 0x3b, 0x78, 0x3c, 0x63, 0x2e, 0x5f, 0x73, 0x62, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x78, 0x2b, 0x2b, 0x29, - 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, - 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x78, 0x5d, 0x29, 0x3b, 0x63, 0x2e, 0x5f, - 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x64, - 0x6f, 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, 0x6b, - 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x66, 0x3d, 0x63, 0x2e, 0x72, - 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, 0x77, - 0x68, 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, - 0x2b, 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, - 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, 0x28, - 0x5f, 0x3d, 0x57, 0x28, 0x57, 0x28, 0x7b, 0x7d, 0x2c, 0x5f, 0x29, 0x2c, - 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, 0x7c, - 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, - 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, - 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x64, - 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, - 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x4b, 0x28, 0x74, - 0x2c, 0x4d, 0x28, 0x43, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x66, - 0x26, 0x26, 0x66, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, 0x6a, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, - 0x79, 0x3f, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x66, 0x29, 0x3f, 0x43, 0x3a, - 0x5b, 0x43, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, - 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x2c, 0x63, - 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, - 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, 0x2d, 0x31, 0x36, 0x31, 0x2c, - 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x26, 0x26, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x2c, - 0x76, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, 0x2e, - 0x5f, 0x5f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6c, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x6f, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x75, - 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x6c, 0x3f, 0x31, 0x36, - 0x30, 0x3a, 0x33, 0x32, 0x2c, 0x6f, 0x5b, 0x6f, 0x2e, 0x69, 0x6e, 0x64, - 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, - 0x6c, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x6b, 0x29, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, - 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x6e, - 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x3a, - 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x6f, 0x74, 0x28, 0x65, 0x2e, 0x5f, - 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, - 0x2c, 0x72, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x3b, 0x28, 0x66, 0x3d, 0x77, - 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x66, 0x28, - 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x6e, 0x2e, - 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, - 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, 0x2b, - 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, 0x5f, 0x5d, 0x2c, 0x65, 0x5b, - 0x2b, 0x2b, 0x5f, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, 0x29, - 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x77, 0x2e, 0x5f, 0x5f, - 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, - 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, - 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x63, 0x61, - 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, - 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, - 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x73, 0x2c, 0x66, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, - 0x64, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, - 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, - 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x73, - 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x28, 0x69, 0x3d, - 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, - 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x6f, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x69, - 0x66, 0x28, 0x28, 0x61, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x26, 0x26, - 0x22, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, - 0x65, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x3d, 0x3d, 0x21, 0x21, 0x6d, 0x26, - 0x26, 0x28, 0x6d, 0x3f, 0x61, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, - 0x61, 0x6d, 0x65, 0x3d, 0x3d, 0x3d, 0x6d, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, - 0x61, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, - 0x7b, 0x74, 0x3d, 0x61, 0x2c, 0x6f, 0x5b, 0x73, 0x5d, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x4e, - 0x6f, 0x64, 0x65, 0x28, 0x79, 0x29, 0x3b, 0x74, 0x3d, 0x69, 0x3f, 0x64, - 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, 0x28, - 0x22, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, - 0x77, 0x33, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, 0x2f, - 0x73, 0x76, 0x67, 0x22, 0x2c, 0x6d, 0x29, 0x3a, 0x64, 0x6f, 0x63, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x6d, 0x2c, 0x79, 0x2e, 0x69, - 0x73, 0x26, 0x26, 0x79, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x75, 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x76, 0x3d, 0x3d, 0x3d, 0x79, 0x7c, - 0x7c, 0x75, 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x3d, - 0x3d, 0x79, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, - 0x79, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x6f, - 0x3d, 0x6f, 0x26, 0x26, 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, - 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, - 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, 0x7c, - 0x46, 0x2c, 0x21, 0x75, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x3d, 0x7b, 0x7d, 0x2c, 0x73, - 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, - 0x62, 0x75, 0x74, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x76, 0x5b, 0x28, 0x61, 0x3d, 0x74, 0x2e, - 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x5b, 0x73, - 0x5d, 0x29, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3d, 0x61, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, 0x69, - 0x6e, 0x20, 0x76, 0x29, 0x61, 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x22, - 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x73, - 0x7c, 0x7c, 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, - 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, - 0x54, 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x63, 0x3d, 0x61, 0x3a, - 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, 0x73, - 0x20, 0x69, 0x6e, 0x20, 0x79, 0x7c, 0x7c, 0x6e, 0x74, 0x28, 0x74, 0x2c, - 0x73, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x2c, 0x69, 0x29, 0x29, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, 0x29, - 0x61, 0x3d, 0x79, 0x5b, 0x73, 0x5d, 0x2c, 0x22, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x68, 0x3d, 0x61, - 0x3a, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, - 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, - 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x66, 0x3d, 0x61, 0x3a, 0x22, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x70, 0x3d, 0x61, - 0x3a, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x3d, 0x3d, - 0x73, 0x3f, 0x64, 0x3d, 0x61, 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, - 0x3d, 0x3d, 0x73, 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x76, 0x5b, 0x73, 0x5d, 0x3d, 0x3d, - 0x3d, 0x61, 0x7c, 0x7c, 0x6e, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, 0x61, - 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x69, 0x29, 0x3b, 0x69, 0x66, 0x28, - 0x66, 0x29, 0x75, 0x7c, 0x7c, 0x63, 0x26, 0x26, 0x28, 0x66, 0x2e, 0x5f, - 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, - 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, 0x66, 0x2e, 0x5f, 0x5f, 0x68, 0x74, - 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, - 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x69, 0x6e, - 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x66, 0x2e, 0x5f, 0x5f, - 0x68, 0x74, 0x6d, 0x6c, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, - 0x5b, 0x5d, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x63, - 0x26, 0x26, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, - 0x4d, 0x4c, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x4b, 0x28, 0x74, 0x2c, 0x4d, - 0x28, 0x68, 0x29, 0x3f, 0x68, 0x3a, 0x5b, 0x68, 0x5d, 0x2c, 0x6e, 0x2c, - 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, 0x65, - 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x21, 0x3d, - 0x3d, 0x6d, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6f, 0x3f, 0x6f, 0x5b, 0x30, - 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x26, 0x26, 0x42, 0x28, 0x65, - 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x6c, 0x29, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, 0x6f, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2d, 0x2d, 0x3b, - 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, 0x26, - 0x26, 0x4f, 0x28, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x3b, 0x75, 0x7c, 0x7c, - 0x28, 0x73, 0x3d, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x28, - 0x70, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x7c, 0x7c, 0x22, 0x70, - 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, - 0x26, 0x26, 0x21, 0x70, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x70, 0x21, 0x3d, 0x3d, - 0x76, 0x5b, 0x73, 0x5d, 0x29, 0x26, 0x26, 0x6e, 0x74, 0x28, 0x74, 0x2c, - 0x73, 0x2c, 0x70, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x21, 0x31, 0x29, - 0x2c, 0x73, 0x3d, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, - 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x64, 0x26, - 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x26, 0x26, 0x6e, - 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, 0x64, 0x2c, 0x76, 0x5b, 0x73, 0x5d, - 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x74, 0x72, - 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x74, - 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, - 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x2c, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x77, 0x2e, 0x75, 0x6e, 0x6d, - 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, - 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x28, 0x5f, 0x3d, 0x74, 0x2e, - 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, 0x5f, 0x2e, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, - 0x7c, 0x72, 0x74, 0x28, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, - 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x5f, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x5f, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, - 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x74, 0x72, - 0x79, 0x7b, 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, - 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x5f, - 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x69, 0x66, 0x28, 0x5f, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x69, 0x3d, 0x30, 0x3b, - 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x69, - 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, 0x75, 0x74, 0x28, - 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x7c, 0x7c, 0x22, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, - 0x3b, 0x65, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x4f, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, - 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, - 0x6f, 0x72, 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, - 0x2c, 0x72, 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x77, 0x2e, 0x5f, - 0x5f, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x2c, 0x69, 0x3d, 0x28, 0x5f, 0x3d, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, 0x75, - 0x6c, 0x6c, 0x3a, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, - 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, 0x2c, - 0x72, 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x3d, - 0x28, 0x21, 0x5f, 0x26, 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, 0x5f, - 0x5f, 0x6b, 0x3d, 0x4c, 0x28, 0x6a, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x5b, 0x74, 0x5d, 0x29, 0x2c, 0x69, 0x7c, 0x7c, 0x46, 0x2c, 0x46, 0x2c, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6f, - 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, - 0x6e, 0x74, 0x2c, 0x21, 0x5f, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, 0x5d, - 0x3a, 0x69, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, 0x69, - 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x53, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, - 0x6f, 0x64, 0x65, 0x73, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6f, - 0x2c, 0x21, 0x5f, 0x26, 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x69, 0x3f, 0x69, - 0x2e, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, - 0x43, 0x68, 0x69, 0x6c, 0x64, 0x2c, 0x5f, 0x2c, 0x72, 0x29, 0x2c, 0x69, - 0x74, 0x28, 0x6f, 0x2c, 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x73, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x66, 0x74, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x57, 0x28, - 0x7b, 0x7d, 0x2c, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x26, 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, - 0x26, 0x26, 0x28, 0x72, 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, - 0x29, 0x2c, 0x6e, 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, - 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, - 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, - 0x75, 0x5b, 0x6f, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, 0x3a, - 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3e, 0x33, 0x3f, 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, - 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, - 0x29, 0x2c, 0x52, 0x28, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x75, - 0x2c, 0x5f, 0x7c, 0x7c, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x7c, - 0x7c, 0x74, 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, - 0x7b, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, 0x43, - 0x22, 0x2b, 0x54, 0x2b, 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, 0x43, - 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x72, 0x65, 0x6e, 0x28, 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, 0x76, - 0x69, 0x64, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, 0x5f, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, 0x2c, - 0x28, 0x5f, 0x3d, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, - 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x7d, 0x2c, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x7a, 0x28, 0x74, - 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, - 0x75, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x65, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, - 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, - 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, - 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, 0x65, - 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, - 0x29, 0x2c, 0x31, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, 0x61, - 0x6c, 0x6c, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, - 0x64, 0x65, 0x72, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, 0x6e, - 0x73, 0x75, 0x6d, 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, - 0x74, 0x54, 0x79, 0x70, 0x65, 0x3d, 0x65, 0x7d, 0x53, 0x3d, 0x56, 0x2e, - 0x73, 0x6c, 0x69, 0x63, 0x65, 0x2c, 0x77, 0x3d, 0x7b, 0x5f, 0x5f, 0x65, - 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, - 0x61, 0x72, 0x20, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, 0x6e, - 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x28, 0x69, 0x3d, 0x6e, - 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x26, 0x26, 0x21, 0x69, 0x2e, 0x5f, 0x5f, - 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, 0x69, - 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, - 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x26, - 0x26, 0x28, 0x69, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, - 0x28, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, - 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, - 0x72, 0x6f, 0x72, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x69, 0x2e, - 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x69, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, - 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, - 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x2c, 0x5f, 0x7c, 0x7c, 0x7b, 0x7d, - 0x29, 0x2c, 0x72, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x72, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, - 0x45, 0x3d, 0x69, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, - 0x7b, 0x74, 0x3d, 0x6e, 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, - 0x7d, 0x7d, 0x2c, 0x78, 0x3d, 0x30, 0x2c, 0x43, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x7d, 0x2c, 0x71, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x65, - 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x65, 0x3b, 0x65, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3f, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x5f, 0x73, 0x3a, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, - 0x3d, 0x57, 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x74, 0x26, 0x26, 0x28, 0x74, 0x3d, 0x74, 0x28, 0x57, 0x28, 0x7b, - 0x7d, 0x2c, 0x65, 0x29, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x29, 0x2c, 0x74, 0x26, 0x26, 0x57, 0x28, 0x65, - 0x2c, 0x74, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, - 0x6e, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x2c, 0x7a, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x71, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x76, 0x26, 0x26, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, - 0x2c, 0x7a, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x71, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, - 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x6a, 0x2c, 0x45, 0x3d, 0x5b, 0x5d, - 0x2c, 0x48, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x50, 0x72, - 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x3f, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x74, 0x68, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x50, 0x72, - 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, - 0x65, 0x28, 0x29, 0x29, 0x3a, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, - 0x6f, 0x75, 0x74, 0x2c, 0x50, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, - 0x2d, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x7d, 0x2c, - 0x4a, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x2c, 0x4e, 0x3d, 0x30, 0x2c, - 0x24, 0x3d, 0x65, 0x74, 0x28, 0x21, 0x31, 0x29, 0x2c, 0x44, 0x3d, 0x65, - 0x74, 0x28, 0x21, 0x30, 0x29, 0x2c, 0x54, 0x3d, 0x30, 0x3b, 0x76, 0x61, - 0x72, 0x20, 0x61, 0x74, 0x2c, 0x70, 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, - 0x74, 0x2c, 0x79, 0x74, 0x3d, 0x30, 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, - 0x2c, 0x67, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x62, 0x74, 0x3d, 0x77, 0x2c, - 0x6b, 0x74, 0x3d, 0x62, 0x74, 0x2e, 0x5f, 0x5f, 0x62, 0x2c, 0x53, 0x74, - 0x3d, 0x62, 0x74, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x77, 0x74, 0x3d, 0x62, - 0x74, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x78, 0x74, 0x3d, - 0x62, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2c, 0x43, 0x74, 0x3d, 0x62, 0x74, - 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x2c, 0x45, 0x74, 0x3d, - 0x62, 0x74, 0x2e, 0x5f, 0x5f, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x55, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x62, - 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x62, 0x74, 0x2e, 0x5f, 0x5f, - 0x68, 0x28, 0x70, 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, 0x6e, - 0x29, 0x2c, 0x79, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x65, - 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, 0x74, - 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, 0x2c, - 0x5f, 0x5f, 0x68, 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, 0x67, - 0x74, 0x7d, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, - 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, - 0x3d, 0x31, 0x2c, 0x50, 0x74, 0x28, 0x47, 0x74, 0x2c, 0x74, 0x29, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x50, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, - 0x3d, 0x55, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, 0x3b, - 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x5f, 0x2e, - 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, 0x5f, 0x3d, 0x5b, - 0x65, 0x3f, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x47, 0x74, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x5f, 0x2e, 0x5f, 0x5f, - 0x4e, 0x5b, 0x30, 0x5d, 0x3a, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, - 0x2c, 0x65, 0x3d, 0x5f, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x3b, - 0x6e, 0x21, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, 0x5f, - 0x4e, 0x3d, 0x5b, 0x65, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, 0x5d, - 0x5d, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, 0x53, - 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, 0x2c, - 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, 0x74, - 0x2e, 0x75, 0x29, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, - 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, - 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x69, 0x2e, 0x65, 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, - 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, - 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, - 0x3d, 0x21, 0x31, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, - 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, - 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x4e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, - 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, - 0x72, 0x3d, 0x21, 0x30, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, - 0x21, 0x72, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x3d, 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, - 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, - 0x70, 0x74, 0x2e, 0x75, 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, - 0x6f, 0x3d, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x2c, 0x72, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x3b, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x2c, 0x69, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, - 0x5f, 0x7d, 0x72, 0x26, 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, - 0x2c, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x3d, 0x69, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, - 0x2e, 0x5f, 0x5f, 0x4e, 0x7c, 0x7c, 0x5f, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4e, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x55, 0x74, - 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x62, 0x74, - 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x42, 0x74, 0x28, 0x65, 0x2e, 0x5f, - 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, - 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, - 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x65, 0x3d, 0x55, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, - 0x34, 0x29, 0x3b, 0x21, 0x62, 0x74, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, - 0x42, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, - 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, - 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x44, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x35, 0x2c, 0x46, 0x74, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x3a, 0x74, 0x7d, 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, - 0x2c, 0x24, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, - 0x29, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, - 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x29, 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x3d, 0x3d, 0x65, 0x3f, 0x65, 0x3a, 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, - 0x61, 0x74, 0x28, 0x74, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x55, 0x74, 0x28, 0x61, 0x74, 0x2b, - 0x2b, 0x2c, 0x37, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x42, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, - 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, - 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, - 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x79, 0x74, 0x3d, 0x38, 0x2c, 0x46, 0x74, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, - 0x5d, 0x2c, 0x65, 0x3d, 0x55, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, - 0x39, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, - 0x63, 0x3d, 0x74, 0x2c, 0x6e, 0x3f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, - 0x3d, 0x21, 0x30, 0x2c, 0x6e, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, - 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3a, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x62, 0x74, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, - 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, 0x62, 0x74, - 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x28, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x55, 0x74, - 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x30, 0x29, 0x2c, 0x65, 0x3d, - 0x48, 0x74, 0x28, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x70, 0x74, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, - 0x74, 0x63, 0x68, 0x7c, 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, - 0x63, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x2c, 0x5f, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x6e, - 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x5f, 0x29, 0x2c, 0x65, 0x5b, 0x31, - 0x5d, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x2c, 0x5b, 0x65, 0x5b, 0x30, 0x5d, - 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, - 0x7d, 0x5d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x4f, 0x74, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3d, 0x55, - 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x31, 0x29, 0x3b, 0x69, - 0x66, 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, - 0x76, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, - 0x21, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x6e, 0x3d, 0x6e, - 0x2e, 0x5f, 0x5f, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x6e, 0x2e, - 0x5f, 0x5f, 0x6d, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, - 0x5b, 0x30, 0x2c, 0x30, 0x5d, 0x29, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, - 0x22, 0x50, 0x22, 0x2b, 0x65, 0x5b, 0x30, 0x5d, 0x2b, 0x22, 0x2d, 0x22, - 0x2b, 0x65, 0x5b, 0x31, 0x5d, 0x2b, 0x2b, 0x7d, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x74, 0x28, 0x29, 0x7b, 0x66, 0x6f, - 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, 0x74, 0x3d, 0x6d, 0x74, - 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x69, 0x66, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x50, 0x26, 0x26, 0x74, 0x2e, 0x5f, 0x5f, - 0x48, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, - 0x6a, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, - 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x71, 0x74, - 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, - 0x5b, 0x5d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, - 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x62, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6e, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x62, 0x74, 0x2e, 0x5f, 0x5f, 0x62, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6b, 0x74, 0x26, - 0x26, 0x6b, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x62, 0x74, 0x2e, 0x5f, - 0x5f, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x74, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, - 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x5f, 0x5f, 0x6d, 0x26, - 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x6b, 0x2e, 0x5f, 0x5f, 0x6d, 0x29, 0x2c, 0x45, 0x74, 0x26, 0x26, 0x45, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x62, 0x74, 0x2e, 0x5f, - 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, 0x74, 0x29, - 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, - 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, - 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, 0x64, 0x74, 0x3d, 0x3d, 0x3d, - 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, - 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x29, - 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, - 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x6e, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, - 0x71, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, 0x70, - 0x74, 0x7d, 0x2c, 0x62, 0x74, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x77, 0x74, 0x26, 0x26, 0x77, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x6e, - 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x6e, 0x2e, - 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, 0x3d, 0x6d, 0x74, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, 0x26, 0x76, 0x74, 0x3d, - 0x3d, 0x3d, 0x62, 0x74, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, - 0x6d, 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, 0x62, 0x74, 0x2e, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, 0x7c, 0x7c, - 0x49, 0x74, 0x29, 0x28, 0x4c, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, - 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x48, 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, - 0x56, 0x21, 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, 0x2e, 0x69, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, - 0x56, 0x3d, 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, - 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x62, 0x74, 0x2e, - 0x5f, 0x5f, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, - 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, - 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, - 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x71, 0x74, 0x28, - 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, - 0x72, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, - 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x68, - 0x3d, 0x5b, 0x5d, 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, 0x5d, - 0x2c, 0x62, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x72, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x78, 0x74, 0x26, - 0x26, 0x78, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x62, 0x74, - 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x43, 0x74, 0x26, - 0x26, 0x43, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, - 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, - 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x6a, 0x74, 0x28, 0x74, 0x29, 0x7d, - 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, - 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x26, 0x26, 0x62, 0x74, 0x2e, - 0x5f, 0x5f, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, - 0x29, 0x7d, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x52, 0x74, 0x3d, 0x22, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, - 0x6d, 0x65, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x49, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, - 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x63, 0x6c, 0x65, 0x61, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, - 0x74, 0x28, 0x5f, 0x29, 0x2c, 0x52, 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, - 0x63, 0x65, 0x6c, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, - 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, - 0x5f, 0x3d, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, - 0x28, 0x65, 0x2c, 0x31, 0x30, 0x30, 0x29, 0x3b, 0x52, 0x74, 0x26, 0x26, - 0x28, 0x6e, 0x3d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, - 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, - 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x70, 0x74, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, - 0x3b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, - 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x2c, 0x65, 0x28, 0x29, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, - 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, - 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x42, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x7c, 0x7c, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, - 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, - 0x3d, 0x3d, 0x74, 0x5b, 0x65, 0x5d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x47, 0x74, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x77, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, - 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x77, - 0x5b, 0x74, 0x5d, 0x7c, 0x7c, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, - 0x29, 0x29, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x4a, 0x74, 0x2c, 0x4b, 0x74, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x4b, 0x74, 0x29, 0x4b, 0x74, - 0x28, 0x29, 0x3b, 0x4b, 0x74, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, - 0x28, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x58, 0x74, 0x28, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, - 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x5a, 0x74, 0x28, - 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, - 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x46, 0x74, 0x28, - 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, - 0x65, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, - 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, - 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, - 0x61, 0x6b, 0x7d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, - 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x74, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x43, 0x28, 0x65, 0x2e, 0x70, 0x65, - 0x65, 0x6b, 0x28, 0x29, 0x29, 0x26, 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3a, 0x74, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, - 0x65, 0x29, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, - 0x28, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, - 0x7d, 0x29, 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x76, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, - 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x74, 0x3f, 0x30, 0x3a, 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, - 0x3f, 0x22, 0x22, 0x3a, 0x74, 0x7c, 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, - 0x2c, 0x5b, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x58, 0x74, 0x2e, 0x64, - 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, - 0x5f, 0x73, 0x74, 0x22, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, - 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, - 0x74, 0x69, 0x65, 0x73, 0x28, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2c, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, - 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, - 0x2c, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x58, 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, - 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x64, 0x61, 0x74, - 0x61, 0x3a, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, - 0x62, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, - 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x31, 0x7d, 0x7d, 0x29, 0x3b, 0x7a, 0x74, 0x28, 0x22, 0x5f, 0x5f, - 0x62, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, - 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x2c, 0x65, 0x3d, 0x6e, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, - 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, - 0x3d, 0x3d, 0x3d, 0x5f, 0x29, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, - 0x65, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, - 0x3b, 0x69, 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, - 0x63, 0x65, 0x6f, 0x66, 0x20, 0x66, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, - 0x74, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, - 0x7d, 0x3b, 0x74, 0x5b, 0x5f, 0x5d, 0x3d, 0x69, 0x3b, 0x65, 0x5b, 0x5f, - 0x5d, 0x3d, 0x69, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, - 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x7a, 0x74, 0x28, 0x22, - 0x5f, 0x5f, 0x72, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, - 0x7b, 0x51, 0x74, 0x28, 0x29, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, - 0x5f, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x5f, - 0x29, 0x7b, 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, - 0x3b, 0x65, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, - 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, - 0x20, 0x6e, 0x3b, 0x6b, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, - 0x29, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, - 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x5f, 0x2e, - 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, - 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, - 0x29, 0x7d, 0x4a, 0x74, 0x3d, 0x5f, 0x3b, 0x51, 0x74, 0x28, 0x65, 0x29, - 0x3b, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x7a, 0x74, 0x28, 0x22, - 0x5f, 0x5f, 0x65, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x5f, 0x29, 0x3d, 0x3e, 0x7b, 0x51, 0x74, 0x28, 0x29, 0x3b, 0x4a, 0x74, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, - 0x65, 0x2c, 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x7a, 0x74, 0x28, 0x22, 0x64, - 0x69, 0x66, 0x66, 0x65, 0x64, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x3d, 0x3e, 0x7b, 0x51, 0x74, 0x28, 0x29, 0x3b, 0x4a, 0x74, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, - 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x26, 0x26, 0x28, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, - 0x29, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, - 0x5f, 0x6e, 0x70, 0x2c, 0x5f, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, - 0x6e, 0x3d, 0x65, 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x66, - 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, - 0x6e, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6e, 0x5b, 0x65, - 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x21, 0x28, 0x65, 0x20, 0x69, 0x6e, 0x20, - 0x74, 0x29, 0x29, 0x7b, 0x5f, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x6e, 0x5b, - 0x65, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x7d, 0x65, - 0x6c, 0x73, 0x65, 0x7b, 0x6e, 0x3d, 0x7b, 0x7d, 0x3b, 0x65, 0x2e, 0x55, - 0x3d, 0x6e, 0x7d, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, - 0x3d, 0x6e, 0x5b, 0x69, 0x5d, 0x2c, 0x72, 0x3d, 0x74, 0x5b, 0x69, 0x5d, - 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, - 0x3d, 0x6f, 0x29, 0x7b, 0x6f, 0x3d, 0x59, 0x74, 0x28, 0x65, 0x2c, 0x69, - 0x2c, 0x72, 0x2c, 0x5f, 0x29, 0x3b, 0x6e, 0x5b, 0x69, 0x5d, 0x3d, 0x6f, - 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, - 0x5f, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x69, 0x3d, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, - 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, - 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x2c, 0x6f, 0x3d, 0x63, 0x28, 0x65, 0x29, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x3d, 0x3e, 0x7b, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, - 0x74, 0x3b, 0x5f, 0x3d, 0x6e, 0x7d, 0x2c, 0x64, 0x3a, 0x6b, 0x28, 0x28, - 0x29, 0x3d, 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, - 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, - 0x65, 0x29, 0x7b, 0x5f, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x69, 0x66, - 0x28, 0x69, 0x29, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x65, - 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, - 0x2c, 0x65, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, - 0x74, 0x65, 0x28, 0x6e, 0x29, 0x7d, 0x7d, 0x29, 0x7d, 0x7d, 0x7a, 0x74, - 0x28, 0x22, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, - 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, - 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, - 0x66, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, - 0x3d, 0x74, 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, - 0x2e, 0x55, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x6f, - 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x20, 0x69, 0x6e, 0x20, 0x6e, - 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x5b, 0x74, 0x5d, - 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, 0x65, 0x2e, 0x64, 0x28, 0x29, 0x7d, - 0x7d, 0x7d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x6c, 0x65, 0x74, 0x20, - 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x74, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, - 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3b, 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, - 0x29, 0x7d, 0x29, 0x3b, 0x7a, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, - 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x3d, 0x3e, - 0x7b, 0x69, 0x66, 0x28, 0x5f, 0x3c, 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, - 0x3d, 0x5f, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, - 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7d, 0x29, 0x3b, - 0x71, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, - 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, - 0x2e, 0x73, 0x7c, 0x7c, 0x34, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x5f, 0x24, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, - 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, - 0x69, 0x6e, 0x20, 0x74, 0x29, 0x69, 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x5f, 0x26, 0x26, - 0x74, 0x5b, 0x5f, 0x5d, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x5f, 0x5d, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, - 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x69, 0x66, 0x28, 0x21, 0x28, 0x5f, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, - 0x7d, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, - 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x46, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x63, 0x28, 0x74, 0x29, 0x2c, - 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x74, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6e, 0x3d, 0x44, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x4a, 0x74, 0x2e, - 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x76, 0x28, - 0x28, 0x29, 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x44, 0x74, 0x28, 0x74, - 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, - 0x74, 0x3b, 0x4e, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6b, 0x28, 0x28, - 0x29, 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, - 0x65, 0x6e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x69, 0x3b, 0x6e, 0x5b, 0x30, 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, - 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, - 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, - 0x2b, 0x5d, 0x2c, 0x75, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, - 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x72, 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, - 0x5b, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, - 0x2b, 0x2b, 0x6f, 0x5d, 0x3b, 0x33, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, - 0x5b, 0x30, 0x5d, 0x3d, 0x75, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, - 0x5f, 0x5b, 0x31, 0x5d, 0x3d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, - 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x28, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, - 0x7c, 0x7b, 0x7d, 0x2c, 0x75, 0x29, 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, - 0x3f, 0x28, 0x5f, 0x5b, 0x31, 0x5d, 0x3d, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, - 0x7c, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, - 0x3d, 0x75, 0x3a, 0x36, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x31, - 0x5d, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, - 0x2b, 0x22, 0x22, 0x3a, 0x72, 0x3f, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x61, - 0x70, 0x70, 0x6c, 0x79, 0x28, 0x75, 0x2c, 0x65, 0x6e, 0x28, 0x74, 0x2c, - 0x75, 0x2c, 0x65, 0x2c, 0x5b, 0x22, 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x5d, 0x29, 0x29, 0x2c, 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, - 0x29, 0x2c, 0x75, 0x5b, 0x30, 0x5d, 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, - 0x3d, 0x32, 0x3a, 0x28, 0x6e, 0x5b, 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, - 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x3d, 0x69, 0x29, 0x29, 0x3a, 0x5f, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x75, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x5f, 0x7d, 0x2c, 0x5f, 0x6e, 0x3d, 0x6e, 0x65, 0x77, - 0x20, 0x4d, 0x61, 0x70, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x5f, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, - 0x7c, 0x28, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, - 0x5f, 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, - 0x6e, 0x29, 0x29, 0x2c, 0x28, 0x6e, 0x3d, 0x65, 0x6e, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, - 0x7c, 0x28, 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, - 0x5f, 0x3d, 0x31, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, - 0x22, 0x2c, 0x72, 0x3d, 0x5b, 0x30, 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, - 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x28, 0x69, 0x3d, - 0x69, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, - 0x5c, 0x73, 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, - 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x24, 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, - 0x29, 0x29, 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, - 0x74, 0x2c, 0x69, 0x29, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, - 0x28, 0x74, 0x7c, 0x7c, 0x69, 0x29, 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x33, 0x2c, 0x74, 0x2c, 0x69, 0x29, 0x2c, 0x5f, 0x3d, - 0x32, 0x29, 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x2e, - 0x2e, 0x2e, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x3f, 0x72, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, - 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x69, 0x26, 0x26, 0x21, - 0x74, 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, - 0x2c, 0x21, 0x30, 0x2c, 0x69, 0x29, 0x3a, 0x5f, 0x3e, 0x3d, 0x35, 0x26, - 0x26, 0x28, 0x28, 0x69, 0x7c, 0x7c, 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, - 0x3d, 0x3d, 0x5f, 0x29, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x5f, 0x2c, 0x30, 0x2c, 0x69, 0x2c, 0x65, 0x29, 0x2c, 0x5f, - 0x3d, 0x36, 0x29, 0x2c, 0x74, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x5f, 0x2c, 0x74, 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, - 0x5f, 0x3d, 0x36, 0x29, 0x29, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x7d, 0x2c, - 0x6c, 0x3d, 0x30, 0x3b, 0x6c, 0x3c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x3b, 0x6c, 0x2b, 0x2b, 0x29, 0x7b, 0x6c, 0x26, 0x26, 0x28, - 0x31, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, - 0x28, 0x6c, 0x29, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, - 0x20, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x5b, 0x6c, 0x5d, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x6e, - 0x3d, 0x74, 0x5b, 0x6c, 0x5d, 0x5b, 0x73, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, - 0x3d, 0x5f, 0x3f, 0x22, 0x3c, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, - 0x75, 0x28, 0x29, 0x2c, 0x72, 0x3d, 0x5b, 0x72, 0x5d, 0x2c, 0x5f, 0x3d, - 0x33, 0x29, 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, - 0x5f, 0x3f, 0x22, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, - 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x5f, 0x3d, 0x31, - 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x69, 0x3d, 0x6e, 0x2b, 0x69, - 0x5b, 0x30, 0x5d, 0x3a, 0x6f, 0x3f, 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, - 0x6f, 0x3d, 0x22, 0x22, 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, - 0x27, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, - 0x3d, 0x6e, 0x3f, 0x6f, 0x3d, 0x6e, 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, - 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x31, 0x29, - 0x3a, 0x5f, 0x26, 0x26, 0x28, 0x22, 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, - 0x3f, 0x28, 0x5f, 0x3d, 0x35, 0x2c, 0x65, 0x3d, 0x69, 0x2c, 0x69, 0x3d, - 0x22, 0x22, 0x29, 0x3a, 0x22, 0x2f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, - 0x26, 0x28, 0x5f, 0x3c, 0x35, 0x7c, 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, - 0x3d, 0x74, 0x5b, 0x6c, 0x5d, 0x5b, 0x73, 0x2b, 0x31, 0x5d, 0x29, 0x3f, - 0x28, 0x75, 0x28, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, - 0x28, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2c, 0x5f, 0x3d, 0x72, - 0x2c, 0x28, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x32, 0x2c, 0x30, 0x2c, 0x5f, 0x29, 0x2c, 0x5f, 0x3d, - 0x30, 0x29, 0x3a, 0x22, 0x20, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, - 0x22, 0x5c, 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, - 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, - 0x32, 0x29, 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, - 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x21, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, - 0x69, 0x26, 0x26, 0x28, 0x5f, 0x3d, 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, - 0x30, 0x5d, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, - 0x28, 0x29, 0x2c, 0x72, 0x7d, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, - 0x2c, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, - 0x5d, 0x29, 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, - 0x3f, 0x6e, 0x3a, 0x6e, 0x5b, 0x30, 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, - 0x72, 0x6e, 0x3d, 0x6f, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x4c, - 0x29, 0x3b, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x7b, 0x71, 0x20, 0x61, - 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, - 0x6a, 0x20, 0x61, 0x73, 0x20, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, - 0x74, 0x2c, 0x66, 0x20, 0x61, 0x73, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, - 0x6c, 0x2c, 0x65, 0x20, 0x61, 0x73, 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, - 0x2c, 0x63, 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, - 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x76, 0x20, 0x61, 0x73, - 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, - 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x63, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, - 0x2c, 0x49, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x52, 0x65, 0x66, 0x2c, 0x6b, 0x20, 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, - 0x65, 0x63, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x68, 0x2c, 0x72, - 0x6e, 0x20, 0x61, 0x73, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x66, 0x74, - 0x20, 0x61, 0x73, 0x20, 0x68, 0x79, 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, - 0x43, 0x20, 0x61, 0x73, 0x20, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, - 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x77, 0x20, 0x61, 0x73, - 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x73, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x63, 0x20, - 0x61, 0x73, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x59, 0x20, - 0x61, 0x73, 0x20, 0x74, 0x6f, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, - 0x72, 0x61, 0x79, 0x2c, 0x6f, 0x20, 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, - 0x72, 0x61, 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, - 0x2c, 0x74, 0x6e, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, - 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x41, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, - 0x4d, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, - 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x4e, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, - 0x57, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, - 0x6f, 0x72, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4f, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x64, 0x2c, 0x54, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, - 0x2c, 0x24, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, - 0x79, 0x6f, 0x75, 0x74, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, - 0x2c, 0x50, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, - 0x64, 0x75, 0x63, 0x65, 0x72, 0x2c, 0x44, 0x74, 0x20, 0x61, 0x73, 0x20, - 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x2c, 0x5a, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x6e, - 0x6e, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, - 0x61, 0x6c, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x48, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, - 0x3b, 0x0a -}; -unsigned int index_js_len = 22886; diff --git a/examples/server/json-schema-to-grammar.mjs.hpp b/examples/server/json-schema-to-grammar.mjs.hpp deleted file mode 100644 index da5035bf89f42..0000000000000 --- a/examples/server/json-schema-to-grammar.mjs.hpp +++ /dev/null @@ -1,1826 +0,0 @@ -unsigned char json_schema_to_grammar_mjs[] = { - 0x2f, 0x2f, 0x20, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, 0x47, 0x3a, 0x20, - 0x54, 0x68, 0x69, 0x73, 0x20, 0x66, 0x69, 0x6c, 0x65, 0x20, 0x77, 0x61, - 0x73, 0x20, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x20, 0x66, 0x72, 0x6f, - 0x6d, 0x20, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x5f, 0x74, 0x6f, 0x5f, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x2e, 0x70, 0x79, 0x2c, 0x20, 0x70, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x20, - 0x66, 0x69, 0x78, 0x20, 0x62, 0x75, 0x67, 0x73, 0x20, 0x2f, 0x20, 0x61, - 0x64, 0x64, 0x20, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x20, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, 0x66, 0x69, 0x72, 0x73, 0x74, 0x2e, - 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, - 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x20, 0x3d, 0x20, 0x27, 0x22, 0x20, 0x22, - 0x3f, 0x27, 0x3b, 0x0a, 0x0a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x52, 0x65, 0x70, 0x65, - 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x69, 0x74, 0x65, 0x6d, 0x52, - 0x75, 0x6c, 0x65, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, - 0x73, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x2c, - 0x20, 0x6f, 0x70, 0x74, 0x73, 0x3d, 0x7b, 0x7d, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x70, 0x61, - 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, 0x20, 0x3d, 0x20, - 0x6f, 0x70, 0x74, 0x73, 0x2e, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, - 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, 0x20, 0x3f, 0x3f, 0x20, 0x27, 0x27, - 0x3b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, - 0x65, 0x6d, 0x52, 0x75, 0x6c, 0x65, 0x49, 0x73, 0x4c, 0x69, 0x74, 0x65, - 0x72, 0x61, 0x6c, 0x20, 0x3d, 0x20, 0x6f, 0x70, 0x74, 0x73, 0x2e, 0x69, - 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, 0x65, 0x49, 0x73, 0x4c, 0x69, 0x74, - 0x65, 0x72, 0x61, 0x6c, 0x20, 0x3f, 0x3f, 0x20, 0x66, 0x61, 0x6c, 0x73, - 0x65, 0x0a, 0x0a, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x70, - 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, - 0x73, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, 0x20, 0x26, 0x26, 0x20, 0x6d, - 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x3d, 0x3d, 0x3d, 0x20, - 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x24, 0x7b, 0x69, 0x74, 0x65, - 0x6d, 0x52, 0x75, 0x6c, 0x65, 0x7d, 0x3f, 0x60, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x31, 0x20, 0x26, 0x26, 0x20, 0x6d, 0x61, 0x78, 0x49, 0x74, - 0x65, 0x6d, 0x73, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x75, 0x6e, 0x64, 0x65, - 0x66, 0x69, 0x6e, 0x65, 0x64, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x24, - 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, 0x65, 0x7d, 0x2b, 0x60, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, - 0x74, 0x20, 0x3d, 0x20, 0x27, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x3e, - 0x20, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, 0x65, 0x49, 0x73, - 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x20, 0x26, 0x26, 0x20, 0x73, - 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x20, - 0x3d, 0x20, 0x60, 0x22, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, - 0x6c, 0x65, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x31, 0x2c, 0x20, - 0x2d, 0x31, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x28, 0x6d, - 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x29, 0x7d, 0x22, 0x60, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, - 0x6c, 0x74, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x66, - 0x72, 0x6f, 0x6d, 0x28, 0x7b, 0x20, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3a, 0x20, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x7d, - 0x2c, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x69, 0x74, 0x65, 0x6d, - 0x52, 0x75, 0x6c, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x73, 0x65, 0x70, 0x61, - 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, 0x20, 0x21, 0x3d, - 0x3d, 0x20, 0x27, 0x27, 0x20, 0x3f, 0x20, 0x60, 0x20, 0x24, 0x7b, 0x73, - 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, - 0x7d, 0x20, 0x60, 0x20, 0x3a, 0x20, 0x27, 0x20, 0x27, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6f, 0x70, 0x74, 0x52, 0x65, - 0x70, 0x65, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x20, 0x3d, 0x20, - 0x28, 0x75, 0x70, 0x54, 0x6f, 0x4e, 0x2c, 0x20, 0x70, 0x72, 0x65, 0x66, - 0x69, 0x78, 0x57, 0x69, 0x74, 0x68, 0x53, 0x65, 0x70, 0x3d, 0x66, 0x61, - 0x6c, 0x73, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, - 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, 0x20, 0x21, 0x3d, 0x3d, 0x20, - 0x27, 0x27, 0x20, 0x26, 0x26, 0x20, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, - 0x57, 0x69, 0x74, 0x68, 0x53, 0x65, 0x70, 0x20, 0x3f, 0x20, 0x60, 0x24, - 0x7b, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, - 0x6c, 0x65, 0x7d, 0x20, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, - 0x6c, 0x65, 0x7d, 0x60, 0x20, 0x3a, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x52, - 0x75, 0x6c, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x75, 0x70, 0x54, 0x6f, 0x4e, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x27, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x75, 0x70, 0x54, 0x6f, 0x4e, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x60, 0x28, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x7d, 0x29, 0x3f, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, - 0x65, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x27, 0x20, 0x26, 0x26, 0x20, - 0x21, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x57, 0x69, 0x74, 0x68, 0x53, - 0x65, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x28, 0x24, 0x7b, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x20, 0x24, 0x7b, 0x6f, 0x70, - 0x74, 0x52, 0x65, 0x70, 0x65, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x28, 0x75, 0x70, 0x54, 0x6f, 0x4e, 0x20, 0x2d, 0x20, 0x31, 0x2c, 0x20, - 0x74, 0x72, 0x75, 0x65, 0x29, 0x7d, 0x29, 0x3f, 0x60, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x66, 0x72, 0x6f, 0x6d, 0x28, - 0x7b, 0x20, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3a, 0x20, 0x75, 0x70, - 0x54, 0x6f, 0x4e, 0x20, 0x7d, 0x2c, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x60, 0x28, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x7d, 0x60, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x20, 0x27, - 0x29, 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x20, 0x2b, 0x20, 0x41, - 0x72, 0x72, 0x61, 0x79, 0x2e, 0x66, 0x72, 0x6f, 0x6d, 0x28, 0x7b, 0x20, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3a, 0x20, 0x75, 0x70, 0x54, 0x6f, - 0x4e, 0x20, 0x7d, 0x2c, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x27, - 0x29, 0x3f, 0x27, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x69, 0x6e, - 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, - 0x20, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x21, 0x3d, - 0x3d, 0x20, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, - 0x20, 0x2b, 0x3d, 0x20, 0x27, 0x20, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x61, 0x78, 0x49, - 0x74, 0x65, 0x6d, 0x73, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x75, 0x6e, 0x64, - 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x20, 0x2b, 0x3d, 0x20, - 0x6f, 0x70, 0x74, 0x52, 0x65, 0x70, 0x65, 0x74, 0x69, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x28, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, - 0x2d, 0x20, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x2c, 0x20, - 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x3e, 0x20, 0x30, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x69, 0x74, 0x65, 0x6d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, - 0x20, 0x3d, 0x20, 0x60, 0x28, 0x24, 0x7b, 0x73, 0x65, 0x70, 0x61, 0x72, - 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, 0x20, 0x21, 0x3d, 0x3d, - 0x20, 0x27, 0x27, 0x20, 0x3f, 0x20, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, - 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x20, 0x27, 0x20, - 0x27, 0x20, 0x3a, 0x20, 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x69, 0x74, 0x65, - 0x6d, 0x52, 0x75, 0x6c, 0x65, 0x7d, 0x29, 0x60, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x69, 0x6e, 0x49, 0x74, - 0x65, 0x6d, 0x73, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, 0x20, 0x26, 0x26, - 0x20, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, - 0x6c, 0x65, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x27, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, - 0x74, 0x20, 0x3d, 0x20, 0x60, 0x28, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, - 0x52, 0x75, 0x6c, 0x65, 0x7d, 0x20, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, - 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x7d, 0x2a, 0x29, 0x3f, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x60, 0x24, 0x7b, 0x69, - 0x74, 0x65, 0x6d, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x7d, - 0x2a, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x63, - 0x6c, 0x61, 0x73, 0x73, 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, - 0x52, 0x75, 0x6c, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x2c, 0x20, 0x64, 0x65, 0x70, 0x73, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x64, 0x65, 0x70, 0x73, 0x20, 0x3d, 0x20, 0x64, 0x65, - 0x70, 0x73, 0x20, 0x7c, 0x7c, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, - 0x7d, 0x0a, 0x7d, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x55, - 0x50, 0x5f, 0x54, 0x4f, 0x5f, 0x31, 0x35, 0x5f, 0x44, 0x49, 0x47, 0x49, - 0x54, 0x53, 0x20, 0x3d, 0x20, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x52, - 0x65, 0x70, 0x65, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x27, 0x5b, - 0x30, 0x2d, 0x39, 0x5d, 0x27, 0x2c, 0x20, 0x30, 0x2c, 0x20, 0x31, 0x35, - 0x29, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x52, - 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, - 0x53, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x62, 0x6f, 0x6f, 0x6c, - 0x65, 0x61, 0x6e, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3a, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, - 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, 0x28, 0x22, 0x74, 0x72, 0x75, 0x65, - 0x22, 0x20, 0x7c, 0x20, 0x22, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x22, 0x29, - 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x20, 0x5b, 0x5d, 0x29, - 0x2c, 0x0a, 0x20, 0x20, 0x27, 0x64, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, - 0x2d, 0x70, 0x61, 0x72, 0x74, 0x27, 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, - 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, - 0x28, 0x27, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x27, 0x20, 0x2b, 0x20, - 0x55, 0x50, 0x5f, 0x54, 0x4f, 0x5f, 0x31, 0x35, 0x5f, 0x44, 0x49, 0x47, - 0x49, 0x54, 0x53, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, - 0x27, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x6c, 0x2d, 0x70, 0x61, - 0x72, 0x74, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, - 0x6c, 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, 0x5b, 0x30, - 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x5b, 0x31, 0x2d, 0x39, 0x5d, 0x20, - 0x27, 0x20, 0x2b, 0x20, 0x55, 0x50, 0x5f, 0x54, 0x4f, 0x5f, 0x31, 0x35, - 0x5f, 0x44, 0x49, 0x47, 0x49, 0x54, 0x53, 0x2c, 0x20, 0x5b, 0x5d, 0x29, - 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, - 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, - 0x28, 0x27, 0x28, 0x22, 0x2d, 0x22, 0x3f, 0x20, 0x69, 0x6e, 0x74, 0x65, - 0x67, 0x72, 0x61, 0x6c, 0x2d, 0x70, 0x61, 0x72, 0x74, 0x29, 0x20, 0x28, - 0x22, 0x2e, 0x22, 0x20, 0x64, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x2d, - 0x70, 0x61, 0x72, 0x74, 0x29, 0x3f, 0x20, 0x28, 0x5b, 0x65, 0x45, 0x5d, - 0x20, 0x5b, 0x2d, 0x2b, 0x5d, 0x3f, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x67, - 0x72, 0x61, 0x6c, 0x2d, 0x70, 0x61, 0x72, 0x74, 0x29, 0x3f, 0x20, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x20, 0x5b, 0x27, 0x69, 0x6e, 0x74, - 0x65, 0x67, 0x72, 0x61, 0x6c, 0x2d, 0x70, 0x61, 0x72, 0x74, 0x27, 0x2c, - 0x20, 0x27, 0x64, 0x65, 0x63, 0x69, 0x6d, 0x61, 0x6c, 0x2d, 0x70, 0x61, - 0x72, 0x74, 0x27, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x69, 0x6e, 0x74, - 0x65, 0x67, 0x65, 0x72, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, - 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, 0x28, 0x22, 0x2d, 0x22, 0x3f, - 0x20, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x6c, 0x2d, 0x70, 0x61, - 0x72, 0x74, 0x29, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x20, - 0x5b, 0x27, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x6c, 0x2d, 0x70, - 0x61, 0x72, 0x74, 0x27, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, - 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, 0x6f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x20, 0x7c, 0x20, 0x61, 0x72, 0x72, 0x61, 0x79, 0x20, 0x7c, - 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x7c, 0x20, 0x6e, 0x75, - 0x6d, 0x62, 0x65, 0x72, 0x20, 0x7c, 0x20, 0x62, 0x6f, 0x6f, 0x6c, 0x65, - 0x61, 0x6e, 0x20, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x27, 0x2c, 0x20, - 0x5b, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x2c, 0x20, 0x27, - 0x61, 0x72, 0x72, 0x61, 0x79, 0x27, 0x2c, 0x20, 0x27, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x27, 0x2c, 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, - 0x72, 0x27, 0x2c, 0x20, 0x27, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, - 0x27, 0x2c, 0x20, 0x27, 0x6e, 0x75, 0x6c, 0x6c, 0x27, 0x5d, 0x29, 0x2c, - 0x0a, 0x20, 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, - 0x27, 0x22, 0x7b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x28, - 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x22, 0x3a, 0x22, 0x20, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x28, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x73, - 0x74, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x22, 0x3a, 0x22, 0x20, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x2a, 0x20, - 0x29, 0x3f, 0x20, 0x22, 0x7d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x27, 0x2c, 0x20, 0x5b, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, - 0x2c, 0x20, 0x27, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x27, 0x5d, 0x29, 0x2c, - 0x0a, 0x20, 0x20, 0x61, 0x72, 0x72, 0x61, 0x79, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, - 0x27, 0x22, 0x5b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x28, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x28, 0x22, 0x2c, 0x22, 0x20, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, - 0x2a, 0x20, 0x29, 0x3f, 0x20, 0x22, 0x5d, 0x22, 0x20, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x27, 0x2c, 0x20, 0x5b, 0x27, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x27, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x75, 0x75, 0x69, 0x64, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3a, 0x20, - 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x52, - 0x75, 0x6c, 0x65, 0x28, 0x27, 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x27, - 0x20, 0x2b, 0x20, 0x5b, 0x38, 0x2c, 0x20, 0x34, 0x2c, 0x20, 0x34, 0x2c, - 0x20, 0x34, 0x2c, 0x20, 0x31, 0x32, 0x5d, 0x2e, 0x6d, 0x61, 0x70, 0x28, - 0x6e, 0x20, 0x3d, 0x3e, 0x20, 0x5b, 0x2e, 0x2e, 0x2e, 0x6e, 0x65, 0x77, - 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x6e, 0x29, 0x5d, 0x2e, 0x6d, - 0x61, 0x70, 0x28, 0x5f, 0x20, 0x3d, 0x3e, 0x20, 0x27, 0x5b, 0x30, 0x2d, - 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, 0x5d, 0x27, 0x29, 0x2e, 0x6a, - 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x29, 0x2e, 0x6a, 0x6f, 0x69, - 0x6e, 0x28, 0x27, 0x20, 0x22, 0x2d, 0x22, 0x20, 0x27, 0x29, 0x20, 0x2b, - 0x20, 0x27, 0x20, 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x27, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, - 0x63, 0x68, 0x61, 0x72, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, - 0x6c, 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x60, 0x5b, 0x5e, - 0x22, 0x5c, 0x5c, 0x5c, 0x5c, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x5c, 0x5c, - 0x5c, 0x5c, 0x22, 0x20, 0x28, 0x5b, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, 0x2f, - 0x62, 0x66, 0x6e, 0x72, 0x74, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x75, 0x22, - 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, 0x5d, - 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, 0x5d, - 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, 0x5d, - 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, 0x5d, - 0x29, 0x60, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x73, - 0x74, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, - 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x60, 0x22, 0x5c, 0x5c, - 0x22, 0x22, 0x20, 0x63, 0x68, 0x61, 0x72, 0x2a, 0x20, 0x22, 0x5c, 0x5c, - 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, 0x2c, 0x20, 0x5b, - 0x27, 0x63, 0x68, 0x61, 0x72, 0x27, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, - 0x6c, 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, 0x22, 0x6e, - 0x75, 0x6c, 0x6c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, - 0x20, 0x5b, 0x5d, 0x29, 0x2c, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x2f, 0x2f, - 0x20, 0x54, 0x4f, 0x44, 0x4f, 0x3a, 0x20, 0x73, 0x75, 0x70, 0x70, 0x6f, - 0x72, 0x74, 0x20, 0x22, 0x75, 0x72, 0x69, 0x22, 0x2c, 0x20, 0x22, 0x65, - 0x6d, 0x61, 0x69, 0x6c, 0x22, 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x73, 0x0a, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x5f, 0x46, 0x4f, - 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x20, 0x3d, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x27, 0x64, 0x61, 0x74, 0x65, 0x27, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3a, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, - 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, - 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, - 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x22, 0x2d, 0x22, 0x20, 0x28, 0x20, - 0x22, 0x30, 0x22, 0x20, 0x5b, 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, - 0x22, 0x31, 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x32, 0x5d, 0x20, 0x29, 0x20, - 0x22, 0x2d, 0x22, 0x20, 0x28, 0x20, 0x5c, 0x22, 0x30, 0x5c, 0x22, 0x20, - 0x5b, 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x5b, 0x31, 0x2d, 0x32, - 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x33, - 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x31, 0x5d, 0x20, 0x29, 0x27, 0x2c, 0x20, - 0x5b, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x27, 0x74, 0x69, 0x6d, 0x65, - 0x27, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, - 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, 0x28, 0x5b, 0x30, 0x31, - 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x32, - 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x33, 0x5d, 0x29, 0x20, 0x22, 0x3a, 0x22, - 0x20, 0x5b, 0x30, 0x2d, 0x35, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, - 0x20, 0x22, 0x3a, 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x35, 0x5d, 0x20, 0x5b, - 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x28, 0x20, 0x22, 0x2e, 0x22, 0x20, 0x5b, - 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x5b, - 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x29, 0x3f, 0x20, 0x28, 0x20, 0x22, 0x5a, - 0x22, 0x20, 0x7c, 0x20, 0x28, 0x20, 0x22, 0x2b, 0x22, 0x20, 0x7c, 0x20, - 0x22, 0x2d, 0x22, 0x20, 0x29, 0x20, 0x28, 0x20, 0x5b, 0x30, 0x31, 0x5d, - 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x32, 0x22, - 0x20, 0x5b, 0x30, 0x2d, 0x33, 0x5d, 0x20, 0x29, 0x20, 0x22, 0x3a, 0x22, - 0x20, 0x5b, 0x30, 0x2d, 0x35, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, - 0x20, 0x29, 0x27, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, - 0x27, 0x64, 0x61, 0x74, 0x65, 0x2d, 0x74, 0x69, 0x6d, 0x65, 0x27, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, - 0x27, 0x64, 0x61, 0x74, 0x65, 0x20, 0x22, 0x54, 0x22, 0x20, 0x74, 0x69, - 0x6d, 0x65, 0x27, 0x2c, 0x20, 0x5b, 0x27, 0x64, 0x61, 0x74, 0x65, 0x27, - 0x2c, 0x20, 0x27, 0x74, 0x69, 0x6d, 0x65, 0x27, 0x5d, 0x29, 0x2c, 0x0a, - 0x20, 0x20, 0x27, 0x64, 0x61, 0x74, 0x65, 0x2d, 0x73, 0x74, 0x72, 0x69, - 0x6e, 0x67, 0x27, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3a, 0x20, 0x6e, 0x65, - 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, - 0x65, 0x28, 0x27, 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x64, 0x61, 0x74, - 0x65, 0x20, 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x27, 0x2c, 0x20, 0x5b, 0x27, 0x64, 0x61, 0x74, 0x65, 0x27, 0x5d, - 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x27, 0x74, 0x69, 0x6d, 0x65, 0x2d, 0x73, - 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3a, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, - 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, - 0x74, 0x69, 0x6d, 0x65, 0x20, 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x20, 0x5b, 0x27, 0x74, 0x69, 0x6d, - 0x65, 0x27, 0x5d, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x27, 0x64, 0x61, 0x74, - 0x65, 0x2d, 0x74, 0x69, 0x6d, 0x65, 0x2d, 0x73, 0x74, 0x72, 0x69, 0x6e, - 0x67, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x42, 0x75, 0x69, 0x6c, - 0x74, 0x69, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, 0x22, 0x5c, 0x5c, - 0x22, 0x22, 0x20, 0x64, 0x61, 0x74, 0x65, 0x2d, 0x74, 0x69, 0x6d, 0x65, - 0x20, 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x27, 0x2c, 0x20, 0x5b, 0x27, 0x64, 0x61, 0x74, 0x65, 0x2d, 0x74, 0x69, - 0x6d, 0x65, 0x27, 0x5d, 0x29, 0x2c, 0x0a, 0x7d, 0x0a, 0x0a, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x52, 0x45, 0x53, 0x45, 0x52, 0x56, 0x45, 0x44, - 0x5f, 0x4e, 0x41, 0x4d, 0x45, 0x53, 0x20, 0x3d, 0x20, 0x7b, 0x27, 0x72, - 0x6f, 0x6f, 0x74, 0x27, 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x2c, 0x20, - 0x2e, 0x2e, 0x2e, 0x50, 0x52, 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, - 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x53, - 0x54, 0x52, 0x49, 0x4e, 0x47, 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, - 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x7d, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, - 0x52, 0x55, 0x4c, 0x45, 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, 0x52, - 0x45, 0x20, 0x3d, 0x20, 0x2f, 0x5b, 0x5e, 0x5c, 0x64, 0x41, 0x2d, 0x5a, - 0x61, 0x2d, 0x7a, 0x2d, 0x5d, 0x2b, 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, - 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, - 0x50, 0x45, 0x5f, 0x52, 0x45, 0x20, 0x3d, 0x20, 0x2f, 0x5b, 0x5c, 0x6e, - 0x5c, 0x72, 0x22, 0x5d, 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x52, 0x41, - 0x4e, 0x47, 0x45, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, - 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, 0x52, 0x45, 0x20, 0x3d, 0x20, - 0x2f, 0x5b, 0x5c, 0x6e, 0x5c, 0x72, 0x22, 0x5c, 0x5d, 0x5c, 0x2d, 0x5c, - 0x5c, 0x5d, 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, - 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x53, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x27, 0x5c, 0x72, 0x27, 0x3a, 0x20, 0x27, 0x5c, - 0x5c, 0x72, 0x27, 0x2c, 0x20, 0x27, 0x5c, 0x6e, 0x27, 0x3a, 0x20, 0x27, - 0x5c, 0x5c, 0x6e, 0x27, 0x2c, 0x20, 0x27, 0x22, 0x27, 0x3a, 0x20, 0x27, - 0x5c, 0x5c, 0x22, 0x27, 0x2c, 0x20, 0x27, 0x2d, 0x27, 0x3a, 0x20, 0x27, - 0x5c, 0x5c, 0x2d, 0x27, 0x2c, 0x20, 0x27, 0x5d, 0x27, 0x3a, 0x20, 0x27, - 0x5c, 0x5c, 0x5d, 0x27, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x4e, 0x4f, 0x4e, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, - 0x41, 0x4c, 0x5f, 0x53, 0x45, 0x54, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, - 0x20, 0x53, 0x65, 0x74, 0x28, 0x27, 0x7c, 0x2e, 0x28, 0x29, 0x5b, 0x5d, - 0x7b, 0x7d, 0x2a, 0x2b, 0x3f, 0x27, 0x29, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x44, 0x5f, 0x49, - 0x4e, 0x5f, 0x52, 0x45, 0x47, 0x45, 0x58, 0x50, 0x53, 0x5f, 0x42, 0x55, - 0x54, 0x5f, 0x4e, 0x4f, 0x54, 0x5f, 0x49, 0x4e, 0x5f, 0x4c, 0x49, 0x54, - 0x45, 0x52, 0x41, 0x4c, 0x53, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x53, 0x65, 0x74, 0x28, 0x27, 0x5b, 0x5d, 0x28, 0x29, 0x7c, 0x7b, 0x7d, - 0x2a, 0x2b, 0x3f, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x6f, - 0x72, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, - 0x63, 0x74, 0x6f, 0x72, 0x28, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, - 0x3d, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x7c, 0x7c, 0x20, - 0x7b, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x46, 0x65, 0x74, 0x63, 0x68, - 0x20, 0x3d, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x61, - 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x66, 0x65, 0x74, 0x63, 0x68, 0x20, 0x7c, - 0x7c, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x64, 0x6f, 0x74, 0x61, 0x6c, - 0x6c, 0x20, 0x3d, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, - 0x64, 0x6f, 0x74, 0x61, 0x6c, 0x6c, 0x20, 0x7c, 0x7c, 0x20, 0x66, 0x61, - 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x7b, - 0x27, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x3a, 0x20, 0x53, 0x50, 0x41, - 0x43, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x7d, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, - 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, 0x42, 0x65, 0x69, - 0x6e, 0x67, 0x52, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x20, 0x3d, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x53, 0x65, 0x74, 0x28, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, 0x66, 0x6f, 0x72, 0x6d, - 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x28, 0x6c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x73, 0x63, 0x61, 0x70, - 0x65, 0x64, 0x20, 0x3d, 0x20, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, - 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, - 0x50, 0x45, 0x5f, 0x52, 0x45, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6d, 0x20, 0x3d, 0x3e, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, - 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, - 0x43, 0x41, 0x50, 0x45, 0x53, 0x5b, 0x6d, 0x5d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x60, 0x22, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x61, 0x70, - 0x65, 0x64, 0x7d, 0x22, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, 0x61, 0x6e, - 0x67, 0x65, 0x43, 0x68, 0x61, 0x72, 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, - 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x6c, 0x69, 0x74, 0x65, - 0x72, 0x61, 0x6c, 0x29, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x31, - 0x2c, 0x20, 0x2d, 0x31, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x47, 0x52, 0x41, - 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x52, 0x41, 0x4e, 0x47, 0x45, 0x5f, 0x4c, - 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, - 0x45, 0x5f, 0x52, 0x45, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6d, 0x20, 0x3d, 0x3e, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, - 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, - 0x41, 0x50, 0x45, 0x53, 0x5b, 0x6d, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, 0x61, - 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, 0x2c, - 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, - 0x20, 0x3d, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, - 0x52, 0x55, 0x4c, 0x45, 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, 0x52, - 0x45, 0x2c, 0x20, 0x27, 0x2d, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x65, - 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, - 0x6c, 0x65, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, - 0x75, 0x6c, 0x65, 0x73, 0x5b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, - 0x5d, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x3d, 0x20, 0x30, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, - 0x28, 0x28, 0x60, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, - 0x7d, 0x24, 0x7b, 0x69, 0x7d, 0x60, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x29, 0x20, 0x26, - 0x26, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, - 0x65, 0x73, 0x5b, 0x60, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, - 0x65, 0x7d, 0x24, 0x7b, 0x69, 0x7d, 0x60, 0x5d, 0x20, 0x21, 0x3d, 0x3d, - 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x60, 0x24, - 0x7b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x69, - 0x7d, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, - 0x65, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x20, 0x3d, 0x20, 0x72, 0x75, - 0x6c, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x72, 0x65, 0x73, - 0x6f, 0x6c, 0x76, 0x65, 0x52, 0x65, 0x66, 0x73, 0x28, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x75, 0x72, 0x6c, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x76, 0x69, - 0x73, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, - 0x28, 0x6e, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x41, 0x72, 0x72, 0x61, 0x79, - 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x6e, 0x29, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, - 0x65, 0x2e, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x2e, 0x6d, 0x61, 0x70, 0x28, - 0x76, 0x69, 0x73, 0x69, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x20, - 0x26, 0x26, 0x20, 0x6e, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x6e, - 0x2e, 0x24, 0x72, 0x65, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x72, 0x65, 0x66, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x75, - 0x6e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x20, 0x26, 0x26, 0x20, - 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, 0x5b, - 0x72, 0x65, 0x66, 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, - 0x66, 0x2e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, - 0x28, 0x27, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x27, 0x29, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x46, 0x65, 0x74, 0x63, - 0x68, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x27, - 0x46, 0x65, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x65, 0x6d, - 0x6f, 0x74, 0x65, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x20, - 0x69, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x61, 0x6c, 0x6c, 0x6f, 0x77, - 0x65, 0x64, 0x20, 0x28, 0x75, 0x73, 0x65, 0x20, 0x2d, 0x2d, 0x61, 0x6c, - 0x6c, 0x6f, 0x77, 0x2d, 0x66, 0x65, 0x74, 0x63, 0x68, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x29, 0x27, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x66, 0x65, 0x74, 0x63, - 0x68, 0x20, 0x3d, 0x20, 0x28, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x69, - 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x28, 0x27, 0x6e, 0x6f, 0x64, 0x65, 0x2d, - 0x66, 0x65, 0x74, 0x63, 0x68, 0x27, 0x29, 0x29, 0x2e, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x66, 0x72, 0x61, 0x67, 0x53, 0x70, 0x6c, 0x69, 0x74, 0x20, 0x3d, - 0x20, 0x72, 0x65, 0x66, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, - 0x23, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x62, - 0x61, 0x73, 0x65, 0x55, 0x72, 0x6c, 0x20, 0x3d, 0x20, 0x66, 0x72, 0x61, - 0x67, 0x53, 0x70, 0x6c, 0x69, 0x74, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, 0x5b, 0x62, 0x61, 0x73, 0x65, - 0x55, 0x72, 0x6c, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, - 0x74, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, - 0x76, 0x65, 0x52, 0x65, 0x66, 0x73, 0x28, 0x61, 0x77, 0x61, 0x69, 0x74, - 0x20, 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x72, 0x65, 0x66, 0x29, 0x2e, - 0x74, 0x68, 0x65, 0x6e, 0x28, 0x72, 0x65, 0x73, 0x20, 0x3d, 0x3e, 0x20, - 0x72, 0x65, 0x73, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x28, 0x29, 0x29, 0x2c, - 0x20, 0x62, 0x61, 0x73, 0x65, 0x55, 0x72, 0x6c, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, 0x5b, - 0x62, 0x61, 0x73, 0x65, 0x55, 0x72, 0x6c, 0x5d, 0x20, 0x3d, 0x20, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x66, 0x72, 0x61, 0x67, 0x53, 0x70, 0x6c, 0x69, 0x74, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, - 0x20, 0x7c, 0x7c, 0x20, 0x66, 0x72, 0x61, 0x67, 0x53, 0x70, 0x6c, 0x69, - 0x74, 0x5b, 0x66, 0x72, 0x61, 0x67, 0x53, 0x70, 0x6c, 0x69, 0x74, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x20, - 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x72, 0x65, 0x66, 0x2e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, - 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x23, 0x2f, 0x27, 0x29, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x66, 0x20, 0x3d, - 0x20, 0x60, 0x24, 0x7b, 0x75, 0x72, 0x6c, 0x7d, 0x24, 0x7b, 0x72, 0x65, - 0x66, 0x7d, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x2e, 0x24, 0x72, 0x65, 0x66, 0x20, - 0x3d, 0x20, 0x72, 0x65, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, 0x6e, 0x73, 0x75, 0x70, - 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x20, 0x72, 0x65, 0x66, 0x20, 0x24, - 0x7b, 0x72, 0x65, 0x66, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x20, - 0x3d, 0x20, 0x72, 0x65, 0x66, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, - 0x27, 0x23, 0x27, 0x29, 0x5b, 0x31, 0x5d, 0x2e, 0x73, 0x70, 0x6c, 0x69, - 0x74, 0x28, 0x27, 0x2f, 0x27, 0x29, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, - 0x28, 0x31, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x73, 0x65, 0x6c, 0x20, 0x6f, 0x66, 0x20, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x21, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x7c, 0x7c, - 0x20, 0x21, 0x28, 0x73, 0x65, 0x6c, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, - 0x6f, 0x72, 0x28, 0x60, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x72, 0x65, - 0x73, 0x6f, 0x6c, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x65, 0x66, 0x20, - 0x24, 0x7b, 0x72, 0x65, 0x66, 0x7d, 0x3a, 0x20, 0x24, 0x7b, 0x73, 0x65, - 0x6c, 0x7d, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x69, 0x6e, 0x20, 0x24, 0x7b, - 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, - 0x66, 0x79, 0x28, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x7d, 0x60, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, - 0x3d, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5b, 0x73, 0x65, 0x6c, - 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, - 0x5b, 0x72, 0x65, 0x66, 0x5d, 0x20, 0x3d, 0x20, 0x74, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, - 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x61, 0x6c, 0x6c, - 0x28, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x28, 0x6e, 0x29, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x76, 0x69, - 0x73, 0x69, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x55, 0x6e, 0x69, 0x6f, - 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, - 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x69, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, - 0x73, 0x69, 0x74, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, - 0x3f, 0x20, 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, - 0x3f, 0x20, 0x27, 0x2d, 0x27, 0x20, 0x3a, 0x20, 0x27, 0x61, 0x6c, 0x74, - 0x65, 0x72, 0x6e, 0x61, 0x74, 0x69, 0x76, 0x65, 0x2d, 0x27, 0x7d, 0x24, - 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, 0x76, - 0x69, 0x73, 0x69, 0x74, 0x50, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x28, - 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x21, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x2e, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x5e, 0x27, - 0x29, 0x20, 0x7c, 0x7c, 0x20, 0x21, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, - 0x6e, 0x2e, 0x65, 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, - 0x24, 0x27, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x27, 0x50, 0x61, 0x74, 0x74, 0x65, 0x72, - 0x6e, 0x20, 0x6d, 0x75, 0x73, 0x74, 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, - 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, 0x22, 0x5e, 0x22, 0x20, 0x61, 0x6e, - 0x64, 0x20, 0x65, 0x6e, 0x64, 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, 0x22, - 0x24, 0x22, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x20, - 0x3d, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x2e, 0x73, 0x6c, - 0x69, 0x63, 0x65, 0x28, 0x31, 0x2c, 0x20, 0x2d, 0x31, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, - 0x62, 0x52, 0x75, 0x6c, 0x65, 0x49, 0x64, 0x73, 0x20, 0x3d, 0x20, 0x7b, - 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x69, 0x20, 0x3d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, - 0x3d, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x74, 0x44, 0x6f, 0x74, 0x20, - 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x64, 0x6f, 0x74, 0x61, 0x6c, 0x6c, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x5b, 0x5c, 0x5c, 0x55, - 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x2d, 0x5c, 0x5c, 0x55, - 0x30, 0x30, 0x31, 0x30, 0x46, 0x46, 0x46, 0x46, 0x5d, 0x27, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x41, 0x63, 0x63, 0x65, 0x70, 0x74, 0x20, 0x61, 0x6e, 0x79, - 0x20, 0x63, 0x68, 0x61, 0x72, 0x61, 0x63, 0x74, 0x65, 0x72, 0x2e, 0x2e, - 0x2e, 0x20, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x20, 0x5c, 0x6e, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x5c, 0x72, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, - 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x63, 0x68, 0x61, 0x72, 0x73, 0x20, - 0x28, 0x5c, 0x78, 0x30, 0x41, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x5c, 0x78, - 0x4f, 0x44, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x5b, 0x5e, 0x5c, 0x5c, - 0x78, 0x30, 0x41, 0x5c, 0x5c, 0x78, 0x30, 0x44, 0x5d, 0x27, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x27, - 0x64, 0x6f, 0x74, 0x27, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x6f, 0x52, 0x75, - 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x5b, 0x73, 0x2c, 0x20, 0x69, 0x73, - 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x5d, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x69, 0x73, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x20, 0x3f, - 0x20, 0x22, 0x5c, 0x22, 0x22, 0x20, 0x2b, 0x20, 0x73, 0x20, 0x2b, 0x20, - 0x22, 0x5c, 0x22, 0x22, 0x20, 0x3a, 0x20, 0x73, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, 0x20, 0x3d, - 0x20, 0x69, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x46, 0x6f, 0x72, 0x20, 0x65, 0x61, 0x63, 0x68, 0x20, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x6f, 0x66, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x20, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, - 0x2c, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x20, 0x69, 0x74, 0x73, 0x20, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x65, 0x70, 0x72, 0x65, - 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x6e, - 0x64, 0x20, 0x77, 0x68, 0x65, 0x74, 0x68, 0x65, 0x72, 0x20, 0x69, 0x74, - 0x27, 0x73, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, - 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x57, - 0x65, 0x20, 0x6f, 0x6e, 0x6c, 0x79, 0x20, 0x6e, 0x65, 0x65, 0x64, 0x20, - 0x61, 0x20, 0x66, 0x6c, 0x61, 0x74, 0x20, 0x73, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x75, 0x72, 0x65, 0x20, 0x68, 0x65, 0x72, 0x65, 0x20, 0x74, 0x6f, - 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x20, 0x72, 0x65, 0x70, 0x65, 0x74, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, - 0x6f, 0x72, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6c, - 0x61, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x2c, 0x20, 0x61, 0x6e, - 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x74, - 0x6f, 0x20, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x20, 0x6c, 0x69, 0x74, 0x65, - 0x72, 0x61, 0x6c, 0x73, 0x20, 0x61, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x28, 0x77, 0x65, 0x27, 0x72, 0x65, 0x20, 0x70, - 0x61, 0x72, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x67, 0x72, 0x6f, 0x75, 0x70, - 0x65, 0x64, 0x20, 0x28, 0x20, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, - 0x65, 0x73, 0x20, 0x29, 0x20, 0x72, 0x65, 0x63, 0x75, 0x72, 0x73, 0x69, - 0x76, 0x65, 0x6c, 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x64, 0x6f, 0x6e, - 0x27, 0x74, 0x20, 0x74, 0x72, 0x65, 0x61, 0x74, 0x20, 0x27, 0x7c, 0x27, - 0x20, 0x73, 0x70, 0x65, 0x63, 0x69, 0x61, 0x6c, 0x6c, 0x79, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x28, 0x47, 0x42, 0x4e, - 0x46, 0x27, 0x73, 0x20, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x20, 0x69, - 0x73, 0x20, 0x6c, 0x75, 0x63, 0x6b, 0x69, 0x6c, 0x79, 0x20, 0x76, 0x65, - 0x72, 0x79, 0x20, 0x63, 0x6c, 0x6f, 0x73, 0x65, 0x20, 0x74, 0x6f, 0x20, - 0x72, 0x65, 0x67, 0x75, 0x6c, 0x61, 0x72, 0x20, 0x65, 0x78, 0x70, 0x72, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x21, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, - 0x71, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6a, 0x6f, 0x69, - 0x6e, 0x53, 0x65, 0x71, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x5b, - 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, - 0x6f, 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5b, 0x69, - 0x73, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x2c, 0x20, 0x67, 0x5d, - 0x20, 0x6f, 0x66, 0x20, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x42, 0x79, 0x28, - 0x73, 0x65, 0x71, 0x2c, 0x20, 0x78, 0x20, 0x3d, 0x3e, 0x20, 0x78, 0x5b, - 0x31, 0x5d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x73, 0x4c, - 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x5b, 0x5b, 0x2e, 0x2e, 0x2e, - 0x67, 0x5d, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x78, 0x20, 0x3d, 0x3e, 0x20, - 0x78, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, - 0x27, 0x29, 0x2c, 0x20, 0x74, 0x72, 0x75, 0x65, 0x5d, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x2e, 0x2e, 0x2e, 0x67, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x74, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, - 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x72, 0x65, - 0x74, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5b, 0x72, 0x65, 0x74, 0x2e, - 0x6d, 0x61, 0x70, 0x28, 0x78, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x6f, 0x52, - 0x75, 0x6c, 0x65, 0x28, 0x78, 0x29, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, - 0x28, 0x27, 0x20, 0x27, 0x29, 0x2c, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, - 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, - 0x20, 0x28, 0x69, 0x20, 0x3c, 0x20, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x20, 0x3d, 0x20, 0x70, 0x61, - 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x5d, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x20, - 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x2e, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x71, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x5b, 0x67, 0x65, 0x74, 0x44, 0x6f, - 0x74, 0x28, 0x29, 0x2c, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x5d, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x63, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x28, 0x27, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x69, 0x20, 0x3c, 0x20, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, - 0x6e, 0x5b, 0x69, 0x5d, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x3f, 0x27, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, - 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, - 0x6e, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x20, 0x70, - 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x20, 0x73, 0x79, 0x6e, 0x74, 0x61, - 0x78, 0x20, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, - 0x5b, 0x69, 0x5d, 0x7d, 0x22, 0x20, 0x61, 0x74, 0x20, 0x69, 0x6e, 0x64, - 0x65, 0x78, 0x20, 0x24, 0x7b, 0x69, 0x7d, 0x20, 0x6f, 0x66, 0x20, 0x2f, - 0x24, 0x7b, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x7d, 0x2f, 0x60, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x65, 0x71, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x5b, 0x60, 0x28, 0x24, 0x7b, 0x74, 0x6f, 0x52, 0x75, 0x6c, 0x65, 0x28, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x29, - 0x7d, 0x29, 0x60, 0x2c, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x5d, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x29, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, - 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x74, 0x61, 0x72, 0x74, 0x20, - 0x3c, 0x3d, 0x20, 0x30, 0x20, 0x7c, 0x7c, 0x20, 0x70, 0x61, 0x74, 0x74, - 0x65, 0x72, 0x6e, 0x5b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x20, 0x2d, 0x20, - 0x31, 0x5d, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x28, 0x27, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, 0x6e, 0x62, 0x61, 0x6c, - 0x61, 0x6e, 0x63, 0x65, 0x64, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, - 0x68, 0x65, 0x73, 0x65, 0x73, 0x3b, 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, - 0x20, 0x3d, 0x20, 0x24, 0x7b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x7d, 0x2c, - 0x20, 0x69, 0x20, 0x3d, 0x20, 0x24, 0x7b, 0x69, 0x7d, 0x2c, 0x20, 0x70, - 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x20, 0x3d, 0x20, 0x24, 0x7b, 0x70, - 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6a, 0x6f, 0x69, 0x6e, 0x53, 0x65, 0x71, 0x28, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x20, - 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x5b, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, - 0x20, 0x73, 0x71, 0x75, 0x61, 0x72, 0x65, 0x42, 0x72, 0x61, 0x63, 0x6b, - 0x65, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x63, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, - 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x69, 0x20, 0x3c, 0x20, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, - 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x5d, 0x20, 0x21, 0x3d, 0x3d, - 0x20, 0x27, 0x5d, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x5d, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x5c, 0x5c, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x71, 0x75, 0x61, 0x72, 0x65, 0x42, 0x72, 0x61, 0x63, 0x6b, - 0x65, 0x74, 0x73, 0x20, 0x2b, 0x3d, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, - 0x72, 0x6e, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x69, 0x2c, 0x20, - 0x69, 0x20, 0x2b, 0x20, 0x32, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, - 0x2b, 0x3d, 0x20, 0x32, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x71, 0x75, 0x61, 0x72, 0x65, 0x42, - 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x20, 0x2b, 0x3d, 0x20, 0x70, - 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x5d, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x69, 0x20, 0x3e, 0x3d, 0x20, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, - 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, 0x6e, 0x62, - 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x64, 0x20, 0x73, 0x71, 0x75, 0x61, - 0x72, 0x65, 0x20, 0x62, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x3b, - 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, 0x20, 0x3d, 0x20, 0x24, 0x7b, 0x73, - 0x74, 0x61, 0x72, 0x74, 0x7d, 0x2c, 0x20, 0x69, 0x20, 0x3d, 0x20, 0x24, - 0x7b, 0x69, 0x7d, 0x2c, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, - 0x20, 0x3d, 0x20, 0x24, 0x7b, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, - 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x71, 0x75, 0x61, 0x72, 0x65, 0x42, 0x72, 0x61, - 0x63, 0x6b, 0x65, 0x74, 0x73, 0x20, 0x2b, 0x3d, 0x20, 0x27, 0x5d, 0x27, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x71, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x5b, 0x73, 0x71, 0x75, 0x61, 0x72, 0x65, 0x42, 0x72, - 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x2c, 0x20, 0x66, 0x61, 0x6c, 0x73, - 0x65, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x63, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x7c, 0x27, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x65, 0x71, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x5b, 0x27, 0x7c, 0x27, - 0x2c, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x5d, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, - 0x3d, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x63, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x2a, 0x27, 0x20, 0x7c, 0x7c, - 0x20, 0x63, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x2b, 0x27, 0x20, 0x7c, - 0x7c, 0x20, 0x63, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x3f, 0x27, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x65, 0x71, 0x5b, 0x73, 0x65, 0x71, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x20, 0x3d, 0x20, 0x5b, - 0x74, 0x6f, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x73, 0x65, 0x71, 0x5b, 0x73, - 0x65, 0x71, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, - 0x31, 0x5d, 0x29, 0x20, 0x2b, 0x20, 0x63, 0x2c, 0x20, 0x66, 0x61, 0x6c, - 0x73, 0x65, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x20, 0x3d, 0x3d, 0x3d, 0x20, - 0x27, 0x7b, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x75, 0x72, - 0x6c, 0x79, 0x42, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x20, 0x3d, - 0x20, 0x63, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, - 0x65, 0x20, 0x28, 0x69, 0x20, 0x3c, 0x20, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, - 0x5b, 0x69, 0x5d, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x7d, 0x27, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x6c, 0x79, 0x42, 0x72, 0x61, 0x63, - 0x6b, 0x65, 0x74, 0x73, 0x20, 0x2b, 0x3d, 0x20, 0x70, 0x61, 0x74, 0x74, - 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, - 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x20, 0x3e, 0x3d, 0x20, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, - 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x28, 0x60, 0x55, 0x6e, 0x62, 0x61, 0x6c, 0x61, 0x6e, 0x63, 0x65, 0x64, - 0x20, 0x63, 0x75, 0x72, 0x6c, 0x79, 0x20, 0x62, 0x72, 0x61, 0x63, 0x6b, - 0x65, 0x74, 0x73, 0x3b, 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, 0x20, 0x3d, - 0x20, 0x24, 0x7b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x7d, 0x2c, 0x20, 0x69, - 0x20, 0x3d, 0x20, 0x24, 0x7b, 0x69, 0x7d, 0x2c, 0x20, 0x70, 0x61, 0x74, - 0x74, 0x65, 0x72, 0x6e, 0x20, 0x3d, 0x20, 0x24, 0x7b, 0x70, 0x61, 0x74, - 0x74, 0x65, 0x72, 0x6e, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x6c, 0x79, - 0x42, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x20, 0x2b, 0x3d, 0x20, - 0x27, 0x7d, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x6e, 0x75, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x63, 0x75, - 0x72, 0x6c, 0x79, 0x42, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x2e, - 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x31, 0x2c, 0x20, 0x2d, 0x31, 0x29, - 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x2c, 0x27, 0x29, 0x2e, - 0x6d, 0x61, 0x70, 0x28, 0x73, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x2e, 0x74, - 0x72, 0x69, 0x6d, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6d, 0x69, - 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6e, 0x75, 0x6d, 0x73, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, - 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x73, 0x65, 0x49, 0x6e, 0x74, - 0x28, 0x6e, 0x75, 0x6d, 0x73, 0x5b, 0x30, 0x5d, 0x2c, 0x20, 0x31, 0x30, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6d, 0x61, 0x78, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x20, - 0x3d, 0x20, 0x6d, 0x69, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6e, - 0x75, 0x6d, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x21, - 0x3d, 0x3d, 0x20, 0x32, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, - 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, - 0x72, 0x28, 0x60, 0x49, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x20, 0x71, - 0x75, 0x61, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x20, 0x24, 0x7b, - 0x63, 0x75, 0x72, 0x6c, 0x79, 0x42, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, - 0x73, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6d, 0x73, 0x5b, - 0x30, 0x5d, 0x20, 0x3f, 0x20, 0x70, 0x61, 0x72, 0x73, 0x65, 0x49, 0x6e, - 0x74, 0x28, 0x6e, 0x75, 0x6d, 0x73, 0x5b, 0x30, 0x5d, 0x2c, 0x20, 0x31, - 0x30, 0x29, 0x20, 0x3a, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x78, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6d, 0x73, 0x5b, - 0x31, 0x5d, 0x20, 0x3f, 0x20, 0x70, 0x61, 0x72, 0x73, 0x65, 0x49, 0x6e, - 0x74, 0x28, 0x6e, 0x75, 0x6d, 0x73, 0x5b, 0x31, 0x5d, 0x2c, 0x20, 0x31, - 0x30, 0x29, 0x20, 0x3a, 0x20, 0x49, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, - 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x5b, 0x73, 0x75, 0x62, 0x2c, 0x20, - 0x73, 0x75, 0x62, 0x49, 0x73, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, - 0x5d, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x71, 0x5b, 0x73, 0x65, 0x71, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x21, 0x73, 0x75, 0x62, 0x49, 0x73, 0x4c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, - 0x20, 0x69, 0x64, 0x20, 0x3d, 0x20, 0x73, 0x75, 0x62, 0x52, 0x75, 0x6c, - 0x65, 0x49, 0x64, 0x73, 0x5b, 0x73, 0x75, 0x62, 0x5d, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x69, 0x64, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x75, 0x6e, - 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x60, 0x24, 0x7b, 0x6e, - 0x61, 0x6d, 0x65, 0x7d, 0x2d, 0x24, 0x7b, 0x4f, 0x62, 0x6a, 0x65, 0x63, - 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x28, 0x73, 0x75, 0x62, 0x52, 0x75, - 0x6c, 0x65, 0x49, 0x64, 0x73, 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x20, 0x2b, 0x20, 0x31, 0x7d, 0x60, 0x2c, 0x20, 0x73, 0x75, 0x62, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x52, 0x75, 0x6c, 0x65, - 0x49, 0x64, 0x73, 0x5b, 0x73, 0x75, 0x62, 0x5d, 0x20, 0x3d, 0x20, 0x69, - 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x20, 0x3d, 0x20, 0x69, - 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x65, 0x71, 0x5b, 0x73, 0x65, 0x71, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x20, 0x3d, 0x20, - 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x52, 0x65, 0x70, 0x65, - 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x73, 0x75, 0x62, 0x49, 0x73, - 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x20, 0x3f, 0x20, 0x60, 0x22, - 0x24, 0x7b, 0x73, 0x75, 0x62, 0x7d, 0x22, 0x60, 0x20, 0x3a, 0x20, 0x73, - 0x75, 0x62, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x2c, 0x20, - 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, 0x65, 0x49, 0x73, 0x4c, - 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x3a, 0x20, 0x73, 0x75, 0x62, 0x49, - 0x73, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x7d, 0x29, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x66, 0x61, 0x6c, 0x73, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x74, 0x20, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x20, 0x3d, 0x20, - 0x27, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x69, 0x20, 0x3c, - 0x20, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, 0x69, - 0x5d, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x5c, 0x5c, 0x27, 0x20, 0x26, - 0x26, 0x20, 0x69, 0x20, 0x3c, 0x20, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x20, 0x2d, 0x20, 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x65, 0x78, 0x74, 0x20, 0x3d, 0x20, 0x70, - 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x20, 0x2b, 0x20, 0x31, - 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x45, 0x53, 0x43, - 0x41, 0x50, 0x45, 0x44, 0x5f, 0x49, 0x4e, 0x5f, 0x52, 0x45, 0x47, 0x45, - 0x58, 0x50, 0x53, 0x5f, 0x42, 0x55, 0x54, 0x5f, 0x4e, 0x4f, 0x54, 0x5f, - 0x49, 0x4e, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x53, 0x2e, - 0x68, 0x61, 0x73, 0x28, 0x6e, 0x65, 0x78, 0x74, 0x29, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, - 0x20, 0x2b, 0x3d, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, - 0x69, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, - 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x69, 0x74, 0x65, 0x72, - 0x61, 0x6c, 0x20, 0x2b, 0x3d, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, - 0x6e, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x69, 0x2c, 0x20, 0x69, - 0x20, 0x2b, 0x20, 0x32, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x20, 0x2b, 0x3d, 0x20, 0x32, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, - 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x5d, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x27, 0x22, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x6c, 0x20, 0x2b, 0x3d, 0x20, 0x27, 0x5c, 0x5c, - 0x22, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x21, 0x4e, 0x4f, 0x4e, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, - 0x4c, 0x5f, 0x53, 0x45, 0x54, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x70, 0x61, - 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x5d, 0x29, 0x20, 0x26, 0x26, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, 0x69, 0x20, 0x3d, 0x3d, 0x3d, 0x20, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x20, 0x7c, - 0x7c, 0x20, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x27, 0x27, 0x20, 0x7c, 0x7c, 0x20, 0x70, 0x61, 0x74, 0x74, - 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x20, 0x2b, 0x20, 0x31, 0x5d, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x2e, 0x27, 0x20, 0x7c, 0x7c, 0x20, 0x21, 0x4e, - 0x4f, 0x4e, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x53, - 0x45, 0x54, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x70, 0x61, 0x74, 0x74, 0x65, - 0x72, 0x6e, 0x5b, 0x69, 0x2b, 0x31, 0x5d, 0x29, 0x29, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x20, 0x2b, - 0x3d, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x5b, 0x69, 0x5d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, - 0x72, 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x6c, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x27, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x71, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x5b, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x2c, 0x20, 0x74, - 0x72, 0x75, 0x65, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6a, 0x6f, 0x69, 0x6e, 0x53, 0x65, 0x71, 0x28, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, - 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x22, 0x5c, 0x22, 0x5c, 0x5c, 0x5c, 0x22, - 0x5c, 0x22, 0x20, 0x22, 0x20, 0x2b, 0x20, 0x74, 0x6f, 0x52, 0x75, 0x6c, - 0x65, 0x28, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x28, - 0x29, 0x29, 0x20, 0x2b, 0x20, 0x22, 0x20, 0x5c, 0x22, 0x5c, 0x5c, 0x5c, - 0x22, 0x5c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x29, 0x0a, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, 0x72, 0x65, 0x73, 0x6f, - 0x6c, 0x76, 0x65, 0x52, 0x65, 0x66, 0x28, 0x72, 0x65, 0x66, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x65, - 0x66, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x66, 0x2e, - 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x2f, 0x27, 0x29, 0x2e, 0x70, - 0x6f, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x21, 0x28, 0x72, 0x65, 0x66, 0x4e, 0x61, 0x6d, 0x65, 0x20, - 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, - 0x65, 0x73, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, 0x42, 0x65, 0x69, 0x6e, 0x67, 0x52, - 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x2e, 0x68, 0x61, 0x73, 0x28, - 0x72, 0x65, 0x66, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, - 0x42, 0x65, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, - 0x64, 0x2e, 0x61, 0x64, 0x64, 0x28, 0x72, 0x65, 0x66, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, 0x5b, 0x72, 0x65, - 0x66, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x66, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x72, 0x65, 0x73, 0x6f, 0x6c, - 0x76, 0x65, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x66, 0x4e, 0x61, 0x6d, 0x65, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x66, 0x73, 0x42, 0x65, 0x69, 0x6e, 0x67, - 0x52, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x2e, 0x64, 0x65, 0x6c, - 0x65, 0x74, 0x65, 0x28, 0x72, 0x65, 0x66, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x72, 0x65, 0x66, 0x4e, 0x61, 0x6d, 0x65, 0x3b, 0x0a, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, - 0x52, 0x75, 0x6c, 0x65, 0x28, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, - 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x28, 0x4a, 0x53, 0x4f, - 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, - 0x20, 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x66, 0x6f, - 0x72, 0x6d, 0x61, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x20, 0x3d, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x52, - 0x45, 0x53, 0x45, 0x52, 0x56, 0x45, 0x44, 0x5f, 0x4e, 0x41, 0x4d, 0x45, - 0x53, 0x20, 0x3f, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x2b, 0x20, 0x27, - 0x2d, 0x27, 0x20, 0x3a, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, - 0x20, 0x27, 0x27, 0x20, 0x3f, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, - 0x20, 0x3a, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x66, 0x20, - 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x24, 0x72, 0x65, - 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, - 0x65, 0x66, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x75, 0x6e, 0x64, 0x65, 0x66, - 0x69, 0x6e, 0x65, 0x64, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, - 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x52, 0x65, - 0x66, 0x28, 0x72, 0x65, 0x66, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x6f, 0x6e, 0x65, 0x4f, 0x66, - 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x61, - 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, - 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x55, - 0x6e, 0x69, 0x6f, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, - 0x65, 0x2c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x6f, 0x6e, - 0x65, 0x4f, 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2e, 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, - 0x72, 0x61, 0x79, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, - 0x70, 0x65, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, - 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x55, 0x6e, - 0x69, 0x6f, 0x6e, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, - 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x74, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, 0x74, 0x20, 0x7d, 0x29, 0x29, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, - 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, - 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x67, - 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x73, 0x74, - 0x61, 0x6e, 0x74, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x29, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x27, 0x65, 0x6e, 0x75, 0x6d, 0x27, 0x20, 0x69, 0x6e, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, - 0x75, 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x76, 0x20, - 0x3d, 0x3e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x67, 0x65, 0x6e, - 0x65, 0x72, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, - 0x74, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x76, 0x29, 0x29, 0x2e, 0x6a, 0x6f, - 0x69, 0x6e, 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, - 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x28, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x75, 0x6e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x20, - 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, - 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, - 0x74, 0x27, 0x29, 0x20, 0x26, 0x26, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, 0x27, - 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x27, 0x20, - 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x7c, 0x7c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, 0x27, 0x61, 0x64, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, - 0x69, 0x65, 0x73, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x20, 0x26, 0x26, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x2e, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, - 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x20, 0x21, 0x3d, - 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x29, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x6e, - 0x65, 0x77, 0x20, 0x53, 0x65, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x20, 0x7c, - 0x7c, 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x65, - 0x72, 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x4f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x2e, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, 0x73, 0x28, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, - 0x74, 0x69, 0x65, 0x73, 0x20, 0x3f, 0x3f, 0x20, 0x7b, 0x7d, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, - 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x62, 0x75, 0x69, 0x6c, - 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x75, 0x6c, 0x65, 0x28, - 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x20, - 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x2c, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x2c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x61, - 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, - 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x29, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, - 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x75, 0x6e, 0x64, 0x65, 0x66, 0x69, - 0x6e, 0x65, 0x64, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x29, 0x20, 0x26, 0x26, 0x20, 0x27, - 0x61, 0x6c, 0x6c, 0x4f, 0x66, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x71, 0x75, - 0x69, 0x72, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x53, - 0x65, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, - 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x61, - 0x64, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2c, 0x20, 0x69, 0x73, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, - 0x64, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, - 0x66, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2e, 0x24, 0x72, 0x65, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x66, - 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x75, 0x6e, 0x64, 0x65, 0x66, 0x69, 0x6e, - 0x65, 0x64, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, - 0x65, 0x66, 0x73, 0x5b, 0x72, 0x65, 0x66, 0x5d, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x70, 0x72, - 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x27, 0x20, 0x69, 0x6e, - 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x5b, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5d, 0x20, 0x6f, - 0x66, 0x20, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x65, 0x6e, 0x74, - 0x72, 0x69, 0x65, 0x73, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, - 0x65, 0x73, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x65, - 0x72, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x5b, - 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x70, 0x72, - 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5d, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x69, 0x73, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, - 0x65, 0x64, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x71, 0x75, - 0x69, 0x72, 0x65, 0x64, 0x2e, 0x61, 0x64, 0x64, 0x28, 0x70, 0x72, 0x6f, - 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x66, 0x6f, 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, - 0x20, 0x6f, 0x66, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x61, - 0x6c, 0x6c, 0x4f, 0x66, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x61, 0x6e, 0x79, - 0x4f, 0x66, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x74, 0x20, - 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x61, 0x64, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x28, 0x74, 0x74, 0x2c, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x64, 0x64, 0x43, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x28, 0x74, 0x2c, 0x20, 0x74, 0x72, - 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, - 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x70, - 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x20, 0x72, - 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x2c, 0x20, 0x2f, 0x2a, 0x20, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, - 0x65, 0x73, 0x3d, 0x20, 0x2a, 0x2f, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x28, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x75, - 0x6e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x20, 0x7c, 0x7c, 0x20, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x61, 0x72, 0x72, 0x61, 0x79, 0x27, 0x29, 0x20, - 0x26, 0x26, 0x20, 0x28, 0x27, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x27, 0x20, - 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x7c, 0x7c, - 0x20, 0x27, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x49, 0x74, 0x65, 0x6d, - 0x73, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x3d, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x69, 0x74, 0x65, 0x6d, - 0x73, 0x20, 0x3f, 0x3f, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, - 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x41, - 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, - 0x28, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, - 0x75, 0x6c, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x27, - 0x22, 0x5b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x27, 0x20, - 0x2b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, - 0x28, 0x69, 0x74, 0x65, 0x6d, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, - 0x69, 0x74, 0x65, 0x6d, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, - 0x65, 0x20, 0x3f, 0x3f, 0x20, 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x6e, 0x61, - 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x27, 0x2d, 0x27, 0x20, 0x3a, 0x20, 0x27, - 0x27, 0x7d, 0x74, 0x75, 0x70, 0x6c, 0x65, 0x2d, 0x24, 0x7b, 0x69, 0x7d, - 0x60, 0x29, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x20, 0x22, - 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x27, 0x29, 0x20, - 0x2b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x27, 0x20, 0x22, 0x5d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x52, - 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x69, 0x74, 0x65, - 0x6d, 0x73, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, - 0x3f, 0x3f, 0x20, 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, - 0x20, 0x3f, 0x20, 0x27, 0x2d, 0x27, 0x20, 0x3a, 0x20, 0x27, 0x27, 0x7d, - 0x69, 0x74, 0x65, 0x6d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x69, - 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x2e, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, - 0x20, 0x7c, 0x7c, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x61, 0x78, - 0x49, 0x74, 0x65, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2e, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, - 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, - 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x22, 0x5b, 0x22, 0x20, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x5f, 0x62, 0x75, 0x69, 0x6c, - 0x64, 0x52, 0x65, 0x70, 0x65, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x73, - 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x75, 0x6c, 0x65, - 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x27, 0x7d, 0x29, 0x20, 0x2b, 0x20, 0x27, 0x20, 0x22, 0x5d, 0x22, 0x20, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, - 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x28, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, - 0x75, 0x6e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x20, 0x7c, 0x7c, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, - 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, - 0x29, 0x20, 0x26, 0x26, 0x20, 0x27, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, - 0x6e, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x76, - 0x69, 0x73, 0x69, 0x74, 0x50, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x28, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x61, 0x74, 0x74, 0x65, - 0x72, 0x6e, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x75, 0x6e, - 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x20, 0x7c, 0x7c, 0x20, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x20, - 0x26, 0x26, 0x20, 0x2f, 0x5e, 0x75, 0x75, 0x69, 0x64, 0x5b, 0x31, 0x2d, - 0x35, 0x5d, 0x3f, 0x24, 0x2f, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x28, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, - 0x20, 0x7c, 0x7c, 0x20, 0x27, 0x27, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x50, 0x72, 0x69, - 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, - 0x3f, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, 0x3a, 0x20, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x50, 0x52, 0x49, - 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, - 0x5b, 0x27, 0x75, 0x75, 0x69, 0x64, 0x27, 0x5d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x28, 0x73, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x75, 0x6e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x20, 0x7c, - 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x27, 0x29, 0x20, 0x26, 0x26, 0x20, 0x60, 0x24, 0x7b, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x7d, 0x2d, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x60, 0x20, 0x69, 0x6e, 0x20, 0x53, - 0x54, 0x52, 0x49, 0x4e, 0x47, 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, - 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, - 0x69, 0x6d, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x60, 0x24, 0x7b, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, - 0x74, 0x7d, 0x2d, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, - 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x50, 0x72, 0x69, - 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x28, 0x70, 0x72, 0x69, 0x6d, 0x4e, - 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x5f, - 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, - 0x5b, 0x70, 0x72, 0x69, 0x6d, 0x4e, 0x61, 0x6d, 0x65, 0x5d, 0x29, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, - 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x27, 0x20, 0x26, 0x26, 0x20, 0x28, 0x27, 0x6d, 0x69, - 0x6e, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x27, 0x20, 0x69, 0x6e, 0x20, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x7c, 0x7c, 0x20, 0x27, 0x6d, - 0x61, 0x78, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x27, 0x20, 0x69, 0x6e, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x63, 0x68, 0x61, 0x72, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, - 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x28, 0x27, 0x63, - 0x68, 0x61, 0x72, 0x27, 0x2c, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, 0x54, - 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x27, 0x63, - 0x68, 0x61, 0x72, 0x27, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x69, 0x6e, 0x4c, - 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, - 0x6d, 0x69, 0x6e, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x7c, 0x7c, - 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x61, 0x78, 0x4c, 0x65, 0x6e, 0x20, 0x3d, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x6d, 0x61, 0x78, 0x4c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, - 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x22, 0x5c, 0x5c, - 0x5c, 0x22, 0x22, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x5f, 0x62, 0x75, 0x69, - 0x6c, 0x64, 0x52, 0x65, 0x70, 0x65, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x63, 0x68, 0x61, 0x72, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, - 0x65, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x4c, 0x65, 0x6e, 0x2c, 0x20, 0x6d, - 0x61, 0x78, 0x4c, 0x65, 0x6e, 0x29, 0x20, 0x2b, 0x20, 0x27, 0x20, 0x22, - 0x5c, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x29, 0x20, 0x7c, 0x7c, 0x20, 0x28, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x28, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, 0x29, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, - 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x50, - 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x28, 0x27, 0x6f, 0x62, - 0x6a, 0x65, 0x63, 0x74, 0x27, 0x2c, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, - 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x27, - 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x5d, 0x29, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, - 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, - 0x69, 0x6e, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, - 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, - 0x55, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x67, 0x6e, 0x69, 0x7a, 0x65, 0x64, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3a, 0x20, 0x24, 0x7b, 0x4a, - 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, - 0x79, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x7d, 0x60, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x4f, 0x44, 0x4f, 0x3a, - 0x20, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6d, 0x69, 0x6e, - 0x69, 0x6d, 0x75, 0x6d, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, - 0x6d, 0x2c, 0x20, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, - 0x4d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x2c, 0x20, 0x65, 0x78, 0x63, - 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, 0x61, 0x78, 0x69, 0x6d, 0x75, - 0x6d, 0x20, 0x61, 0x74, 0x20, 0x6c, 0x65, 0x61, 0x73, 0x74, 0x20, 0x66, - 0x6f, 0x72, 0x20, 0x7a, 0x65, 0x72, 0x6f, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, - 0x69, 0x76, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, - 0x3f, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, 0x3a, 0x20, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2c, 0x20, 0x50, - 0x52, 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, - 0x45, 0x53, 0x5b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, - 0x65, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, 0x61, 0x64, 0x64, 0x50, 0x72, - 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x20, 0x3d, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, - 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x64, 0x65, 0x70, 0x20, 0x6f, 0x66, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x2e, - 0x64, 0x65, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x65, 0x70, 0x52, - 0x75, 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, 0x54, - 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x64, 0x65, - 0x70, 0x5d, 0x20, 0x7c, 0x7c, 0x20, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, - 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x52, 0x55, 0x4c, 0x45, - 0x53, 0x5b, 0x64, 0x65, 0x70, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x64, 0x65, 0x70, 0x52, 0x75, - 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x52, 0x75, 0x6c, 0x65, 0x20, - 0x24, 0x7b, 0x64, 0x65, 0x70, 0x7d, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x6b, - 0x6e, 0x6f, 0x77, 0x6e, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x21, 0x28, 0x64, 0x65, 0x70, 0x20, 0x69, 0x6e, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x29, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x50, 0x72, 0x69, 0x6d, - 0x69, 0x74, 0x69, 0x76, 0x65, 0x28, 0x64, 0x65, 0x70, 0x2c, 0x20, 0x64, - 0x65, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x3b, - 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, 0x62, 0x75, 0x69, - 0x6c, 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x75, 0x6c, 0x65, - 0x28, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x2c, - 0x20, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, - 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, - 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, - 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x70, 0x72, 0x6f, - 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x73, 0x6f, 0x72, 0x74, 0x20, 0x62, 0x79, 0x20, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x28, 0x69, - 0x66, 0x20, 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x65, 0x64, 0x29, - 0x20, 0x74, 0x68, 0x65, 0x6e, 0x20, 0x62, 0x79, 0x20, 0x6f, 0x72, 0x69, - 0x67, 0x69, 0x6e, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x6f, - 0x72, 0x74, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x20, 0x3d, 0x20, - 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6d, - 0x61, 0x70, 0x28, 0x28, 0x5b, 0x6b, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x6b, 0x29, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x28, 0x61, 0x2c, 0x20, - 0x62, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6f, 0x72, 0x64, 0x65, - 0x72, 0x41, 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, - 0x65, 0x72, 0x5b, 0x61, 0x5d, 0x20, 0x7c, 0x7c, 0x20, 0x49, 0x6e, 0x66, - 0x69, 0x6e, 0x69, 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, - 0x42, 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, - 0x72, 0x5b, 0x62, 0x5d, 0x20, 0x7c, 0x7c, 0x20, 0x49, 0x6e, 0x66, 0x69, - 0x6e, 0x69, 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, - 0x41, 0x20, 0x2d, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, 0x20, 0x7c, - 0x7c, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, - 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x28, 0x28, - 0x5b, 0x6b, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x6b, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x61, 0x29, 0x20, 0x2d, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x65, - 0x72, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x49, 0x6e, - 0x64, 0x65, 0x78, 0x28, 0x28, 0x5b, 0x6b, 0x5d, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x6b, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x62, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4b, 0x76, - 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x20, 0x3d, 0x20, - 0x7b, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x70, - 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x5d, 0x20, 0x6f, 0x66, 0x20, 0x70, 0x72, 0x6f, - 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, - 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, - 0x28, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, - 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x3f, 0x20, - 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, - 0x27, 0x2d, 0x27, 0x20, 0x3a, 0x20, 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x70, - 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4b, 0x76, - 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x5b, 0x70, 0x72, - 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x5d, 0x20, 0x3d, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x24, 0x7b, - 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x3f, 0x20, 0x27, 0x27, 0x7d, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x27, 0x2d, 0x27, 0x20, - 0x3a, 0x20, 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x4e, - 0x61, 0x6d, 0x65, 0x7d, 0x2d, 0x6b, 0x76, 0x60, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x24, 0x7b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, - 0x65, 0x72, 0x61, 0x6c, 0x28, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x70, 0x72, 0x6f, 0x70, - 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x29, 0x7d, 0x20, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x20, 0x22, 0x3a, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, - 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, - 0x6d, 0x65, 0x7d, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, - 0x65, 0x64, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x6f, - 0x72, 0x74, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x66, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x28, 0x6b, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x65, - 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x6b, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x6f, 0x72, 0x74, 0x65, 0x64, - 0x50, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, - 0x28, 0x6b, 0x20, 0x3d, 0x3e, 0x20, 0x21, 0x72, 0x65, 0x71, 0x75, 0x69, - 0x72, 0x65, 0x64, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x6b, 0x29, 0x29, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, - 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, - 0x73, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, - 0x74, 0x27, 0x20, 0x7c, 0x7c, 0x20, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, - 0x65, 0x73, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, - 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x3f, 0x20, - 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, - 0x27, 0x2d, 0x27, 0x20, 0x3a, 0x20, 0x27, 0x27, 0x7d, 0x61, 0x64, 0x64, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x60, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x52, 0x75, 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x61, 0x64, 0x64, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, - 0x72, 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x74, 0x72, - 0x75, 0x65, 0x20, 0x3f, 0x20, 0x7b, 0x7d, 0x20, 0x3a, 0x20, 0x61, 0x64, - 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, - 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x73, - 0x75, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x2d, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x4b, 0x76, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, - 0x65, 0x73, 0x5b, 0x27, 0x2a, 0x27, 0x5d, 0x20, 0x3d, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x24, 0x7b, - 0x73, 0x75, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x2d, 0x6b, 0x76, 0x60, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x24, - 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x50, 0x72, - 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x28, 0x27, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x27, 0x2c, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, 0x54, - 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x27, 0x73, - 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x5d, 0x29, 0x7d, 0x20, 0x22, 0x3a, - 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x52, 0x75, 0x6c, 0x65, 0x7d, 0x60, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, - 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x27, 0x2a, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x75, - 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x22, 0x7b, 0x22, 0x20, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x20, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x72, 0x65, 0x71, 0x75, 0x69, - 0x72, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x6d, 0x61, 0x70, - 0x28, 0x6b, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4b, 0x76, - 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x5b, 0x6b, 0x5d, - 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x20, 0x22, 0x2c, 0x22, - 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, - 0x3d, 0x20, 0x27, 0x20, 0x28, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, - 0x65, 0x64, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, - 0x3d, 0x20, 0x27, 0x20, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x20, 0x28, 0x20, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x74, 0x52, 0x65, 0x63, 0x75, 0x72, - 0x73, 0x69, 0x76, 0x65, 0x52, 0x65, 0x66, 0x73, 0x20, 0x3d, 0x20, 0x28, - 0x6b, 0x73, 0x2c, 0x20, 0x66, 0x69, 0x72, 0x73, 0x74, 0x49, 0x73, 0x4f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x5b, 0x6b, 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x72, - 0x65, 0x73, 0x74, 0x5d, 0x20, 0x3d, 0x20, 0x6b, 0x73, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6b, 0x76, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, - 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4b, 0x76, 0x52, 0x75, 0x6c, 0x65, - 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x5b, 0x6b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x65, - 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x6b, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x2a, 0x27, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, - 0x20, 0x3f, 0x3f, 0x20, 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, - 0x65, 0x20, 0x3f, 0x20, 0x27, 0x2d, 0x27, 0x20, 0x3a, 0x20, 0x27, 0x27, - 0x7d, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x2d, - 0x6b, 0x76, 0x73, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x24, - 0x7b, 0x6b, 0x76, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, - 0x20, 0x28, 0x20, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x20, 0x60, 0x20, 0x2b, 0x20, 0x6b, 0x76, 0x52, 0x75, 0x6c, 0x65, 0x4e, - 0x61, 0x6d, 0x65, 0x20, 0x2b, 0x20, 0x60, 0x20, 0x29, 0x2a, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x66, 0x69, 0x72, - 0x73, 0x74, 0x49, 0x73, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x60, 0x28, 0x20, 0x22, - 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x6b, - 0x76, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x29, - 0x3f, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x20, 0x3d, - 0x20, 0x6b, 0x76, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, - 0x65, 0x73, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, - 0x20, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x20, 0x2b, 0x3d, 0x20, 0x27, - 0x20, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, - 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x24, 0x7b, 0x6e, - 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x3f, 0x20, 0x27, 0x27, 0x7d, 0x24, 0x7b, - 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x27, 0x2d, 0x27, 0x20, 0x3a, - 0x20, 0x27, 0x27, 0x7d, 0x24, 0x7b, 0x6b, 0x7d, 0x2d, 0x72, 0x65, 0x73, - 0x74, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x74, 0x52, 0x65, 0x63, 0x75, 0x72, - 0x73, 0x69, 0x76, 0x65, 0x52, 0x65, 0x66, 0x73, 0x28, 0x72, 0x65, 0x73, - 0x74, 0x2c, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x72, - 0x65, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, - 0x20, 0x2b, 0x3d, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, - 0x50, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x5f, - 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x67, 0x65, 0x74, 0x52, - 0x65, 0x63, 0x75, 0x72, 0x73, 0x69, 0x76, 0x65, 0x52, 0x65, 0x66, 0x73, - 0x28, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, - 0x70, 0x73, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x69, 0x29, 0x2c, - 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x29, 0x2e, 0x6a, 0x6f, 0x69, - 0x6e, 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x71, 0x75, - 0x69, 0x72, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, - 0x20, 0x2b, 0x3d, 0x20, 0x27, 0x20, 0x29, 0x27, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x27, 0x20, 0x29, 0x3f, - 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x27, 0x20, - 0x22, 0x7d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x72, 0x75, 0x6c, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, - 0x61, 0x72, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x65, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, - 0x20, 0x27, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, - 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5b, 0x6e, 0x61, 0x6d, - 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x5d, 0x20, 0x6f, 0x66, 0x20, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x65, 0x6e, 0x74, 0x72, 0x69, - 0x65, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, - 0x65, 0x73, 0x29, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x28, 0x5b, 0x61, - 0x5d, 0x2c, 0x20, 0x5b, 0x62, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x61, - 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, - 0x72, 0x65, 0x28, 0x62, 0x29, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, - 0x2b, 0x3d, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x20, - 0x3a, 0x3a, 0x3d, 0x20, 0x24, 0x7b, 0x72, 0x75, 0x6c, 0x65, 0x7d, 0x5c, - 0x6e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x72, 0x61, - 0x6d, 0x6d, 0x61, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a, - 0x0a, 0x2f, 0x2f, 0x20, 0x48, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x20, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x67, - 0x72, 0x6f, 0x75, 0x70, 0x20, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x20, 0x62, 0x79, 0x20, 0x61, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x0a, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x2a, 0x20, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x42, - 0x79, 0x28, 0x69, 0x74, 0x65, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x2c, 0x20, - 0x6b, 0x65, 0x79, 0x46, 0x6e, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x6c, - 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x20, 0x3d, - 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, - 0x20, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, - 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x20, 0x6f, 0x66, - 0x20, 0x69, 0x74, 0x65, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6b, - 0x65, 0x79, 0x20, 0x3d, 0x20, 0x6b, 0x65, 0x79, 0x46, 0x6e, 0x28, 0x65, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x6c, 0x61, 0x73, 0x74, 0x4b, 0x65, 0x79, - 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x20, 0x26, 0x26, - 0x20, 0x6b, 0x65, 0x79, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x6c, 0x61, 0x73, - 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x79, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x5b, 0x6c, 0x61, 0x73, - 0x74, 0x4b, 0x65, 0x79, 0x2c, 0x20, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x6f, 0x75, - 0x70, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x4b, - 0x65, 0x79, 0x20, 0x3d, 0x20, 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x67, 0x72, 0x6f, 0x75, - 0x70, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x79, 0x69, 0x65, 0x6c, - 0x64, 0x20, 0x5b, 0x6c, 0x61, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x2c, 0x20, - 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, - 0x7d, 0x0a -}; -unsigned int json_schema_to_grammar_mjs_len = 21866; diff --git a/scripts/xxd.cmake b/scripts/xxd.cmake new file mode 100644 index 0000000000000..f5ad6ab9b1a79 --- /dev/null +++ b/scripts/xxd.cmake @@ -0,0 +1,16 @@ +# CMake equivalent of `xxd -i ${INPUT} ${OUTPUT}` +# Usage: cmake -DINPUT=examples/server/public/index.html -DOUTPUT=examples/server/index.html.hpp -P scripts/xxd.cmake + +SET(INPUT "" CACHE STRING "Input File") +SET(OUTPUT "" CACHE STRING "Output File") + +get_filename_component(filename "${INPUT}" NAME) +string(REGEX REPLACE "\\.|-" "_" name "${filename}") + +file(READ "${INPUT}" hex_data HEX) +string(REGEX REPLACE "([0-9a-f][0-9a-f])" "0x\\1," hex_sequence "${hex_data}") + +string(LENGTH ${hex_data} hex_len) +math(EXPR len "${hex_len} / 2") + +file(WRITE "${OUTPUT}" "unsigned char ${name}[] = {${hex_sequence}};\nunsigned int ${name}_len = ${len};\n") From a7b79ed2d74aee4d1806204e5850b32035db9cfe Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Mon, 22 Apr 2024 15:22:44 +0800 Subject: [PATCH 014/101] smart buffered stop sequence workaround for SSE streaming mode. --- koboldcpp.py | 231 +++++++++++++++++++++++++++++---------------------- 1 file changed, 133 insertions(+), 98 deletions(-) diff --git a/koboldcpp.py b/koboldcpp.py index 6c632d5162d22..94d401596b129 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -606,6 +606,15 @@ def bring_terminal_to_foreground(): ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 9) ctypes.windll.user32.SetForegroundWindow(ctypes.windll.kernel32.GetConsoleWindow()) +def string_contains_sequence_substring(inputstr,sequences): + if inputstr.strip()=="": + return False + for s in sequences: + if s.strip()=="": + continue + if s.strip() in inputstr.strip() or inputstr.strip() in s.strip(): + return True + return False ################################################################# ### A hacky simple HTTP server simulating a kobold api by Concedo @@ -646,6 +655,93 @@ def bring_terminal_to_foreground(): last_req_time = time.time() last_non_horde_req_time = time.time() +def transform_genparams(genparams, api_format): + #alias all nonstandard alternative names for rep pen. + rp1 = genparams.get('repeat_penalty', 1.0) + rp2 = genparams.get('repetition_penalty', 1.0) + rp3 = genparams.get('rep_pen', 1.0) + rp_max = max(rp1,rp2,rp3) + genparams["rep_pen"] = rp_max + + if api_format==1: + genparams["prompt"] = genparams.get('text', "") + genparams["top_k"] = int(genparams.get('top_k', 120)) + genparams["max_length"] = genparams.get('max', 100) + + elif api_format==3 or api_format==4: + genparams["max_length"] = genparams.get('max_tokens', 100) + presence_penalty = genparams.get('presence_penalty', genparams.get('frequency_penalty', 0.0)) + genparams["presence_penalty"] = presence_penalty + # openai allows either a string or a list as a stop sequence + if isinstance(genparams.get('stop',[]), list): + genparams["stop_sequence"] = genparams.get('stop', []) + else: + genparams["stop_sequence"] = [genparams.get('stop')] + + genparams["sampler_seed"] = genparams.get('seed', -1) + genparams["use_default_badwordsids"] = genparams.get('ignore_eos', False) + genparams["mirostat"] = genparams.get('mirostat_mode', 0) + + if api_format==4: + # translate openai chat completion messages format into one big string. + messages_array = genparams.get('messages', []) + default_adapter = {} if chatcompl_adapter is None else chatcompl_adapter + adapter_obj = genparams.get('adapter', default_adapter) + messages_string = "" + system_message_start = adapter_obj.get("system_start", "\n### Instruction:\n") + system_message_end = adapter_obj.get("system_end", "") + user_message_start = adapter_obj.get("user_start", "\n### Instruction:\n") + user_message_end = adapter_obj.get("user_end", "") + assistant_message_start = adapter_obj.get("assistant_start", "\n### Response:\n") + assistant_message_end = adapter_obj.get("assistant_end", "") + images_added = [] + + for message in messages_array: + if message['role'] == "system": + messages_string += system_message_start + elif message['role'] == "user": + messages_string += user_message_start + elif message['role'] == "assistant": + messages_string += assistant_message_start + + # content can be a string or an array of objects + curr_content = message['content'] + if isinstance(curr_content, str): + messages_string += curr_content + elif isinstance(curr_content, list): #is an array + for item in curr_content: + if item['type']=="text": + messages_string += item['text'] + elif item['type']=="image_url": + if item['image_url'] and item['image_url']['url'] and item['image_url']['url'].startswith("data:image"): + images_added.append(item['image_url']['url'].split(",", 1)[1]) + + if message['role'] == "system": + messages_string += system_message_end + elif message['role'] == "user": + messages_string += user_message_end + elif message['role'] == "assistant": + messages_string += assistant_message_end + + messages_string += assistant_message_start + genparams["prompt"] = messages_string + if len(images_added)>0: + genparams["images"] = images_added + if len(genparams.get('stop_sequence', []))==0: #only set stop seq if it wont overwrite existing + genparams["stop_sequence"] = [user_message_start.strip(),assistant_message_start.strip()] + else: + genparams["stop_sequence"].append(user_message_start.strip()) + genparams["stop_sequence"].append(assistant_message_start.strip()) + genparams["trim_stop"] = True + + elif api_format==5: + firstimg = genparams.get('image', "") + genparams["images"] = [firstimg] + genparams["max_length"] = 32 + genparams["prompt"] = "### Instruction: In one sentence, write a descriptive caption for this image.\n### Response:" + + return genparams + class ServerRequestHandler(http.server.SimpleHTTPRequestHandler): sys_version = "" server_version = "ConcedoLlamaForKoboldServer" @@ -669,91 +765,8 @@ async def generate_text(self, genparams, api_format, stream_flag): from datetime import datetime global friendlymodelname, chatcompl_adapter is_quiet = args.quiet - def run_blocking(): #api format 1=basic,2=kai,3=oai,4=oai-chat - #alias all nonstandard alternative names for rep pen. - rp1 = genparams.get('repeat_penalty', 1.0) - rp2 = genparams.get('repetition_penalty', 1.0) - rp3 = genparams.get('rep_pen', 1.0) - rp_max = max(rp1,rp2,rp3) - genparams["rep_pen"] = rp_max - - if api_format==1: - genparams["prompt"] = genparams.get('text', "") - genparams["top_k"] = int(genparams.get('top_k', 120)) - genparams["max_length"] = genparams.get('max', 100) - - elif api_format==3 or api_format==4: - genparams["max_length"] = genparams.get('max_tokens', 100) - presence_penalty = genparams.get('presence_penalty', genparams.get('frequency_penalty', 0.0)) - genparams["presence_penalty"] = presence_penalty - # openai allows either a string or a list as a stop sequence - if isinstance(genparams.get('stop',[]), list): - genparams["stop_sequence"] = genparams.get('stop', []) - else: - genparams["stop_sequence"] = [genparams.get('stop')] - - genparams["sampler_seed"] = genparams.get('seed', -1) - genparams["use_default_badwordsids"] = genparams.get('ignore_eos', False) - genparams["mirostat"] = genparams.get('mirostat_mode', 0) - - if api_format==4: - # translate openai chat completion messages format into one big string. - messages_array = genparams.get('messages', []) - default_adapter = {} if chatcompl_adapter is None else chatcompl_adapter - adapter_obj = genparams.get('adapter', default_adapter) - messages_string = "" - system_message_start = adapter_obj.get("system_start", "\n### Instruction:\n") - system_message_end = adapter_obj.get("system_end", "") - user_message_start = adapter_obj.get("user_start", "\n### Instruction:\n") - user_message_end = adapter_obj.get("user_end", "") - assistant_message_start = adapter_obj.get("assistant_start", "\n### Response:\n") - assistant_message_end = adapter_obj.get("assistant_end", "") - images_added = [] - - for message in messages_array: - if message['role'] == "system": - messages_string += system_message_start - elif message['role'] == "user": - messages_string += user_message_start - elif message['role'] == "assistant": - messages_string += assistant_message_start - - # content can be a string or an array of objects - curr_content = message['content'] - if isinstance(curr_content, str): - messages_string += curr_content - elif isinstance(curr_content, list): #is an array - for item in curr_content: - if item['type']=="text": - messages_string += item['text'] - elif item['type']=="image_url": - if item['image_url'] and item['image_url']['url'] and item['image_url']['url'].startswith("data:image"): - images_added.append(item['image_url']['url'].split(",", 1)[1]) - - if message['role'] == "system": - messages_string += system_message_end - elif message['role'] == "user": - messages_string += user_message_end - elif message['role'] == "assistant": - messages_string += assistant_message_end - - messages_string += assistant_message_start - genparams["prompt"] = messages_string - if len(images_added)>0: - genparams["images"] = images_added - if len(genparams.get('stop_sequence', []))==0: #only set stop seq if it wont overwrite existing - genparams["stop_sequence"] = [user_message_start.strip(),assistant_message_start.strip()] - else: - genparams["stop_sequence"].append(user_message_start.strip()) - genparams["stop_sequence"].append(assistant_message_start.strip()) - genparams["trim_stop"] = True - - elif api_format==5: - firstimg = genparams.get('image', "") - genparams["images"] = [firstimg] - genparams["max_length"] = 32 - genparams["prompt"] = "### Instruction: In one sentence, write a descriptive caption for this image.\n### Response:" + def run_blocking(): #api format 1=basic,2=kai,3=oai,4=oai-chat #flag instance as non-idle for a while washordereq = genparams.get('genkey', '').startswith('HORDEREQ_') @@ -846,7 +859,7 @@ async def send_kai_sse_event(self, data): self.wfile.write(f'data: {data}\n\n'.encode()) self.wfile.flush() - async def handle_sse_stream(self, api_format): + async def handle_sse_stream(self, genparams, api_format): global friendlymodelname self.send_response(200) self.send_header("cache-control", "no-cache") @@ -855,8 +868,10 @@ async def handle_sse_stream(self, api_format): current_token = 0 incomplete_token_buffer = bytearray() + async_sleep_short = 0.02 await asyncio.sleep(0.25) #anti race condition, prevent check from overtaking generate try: + tokenReserve = "" #keeps fully formed tokens that we cannot send out yet while True: streamDone = handle.has_finished() #exit next loop on done tokenStr = "" @@ -876,19 +891,37 @@ async def handle_sse_stream(self, api_format): tokenStr += tokenSeg if tokenStr!="": - if api_format == 4: # if oai chat, set format to expected openai streaming response - event_str = json.dumps({"id":"koboldcpp","object":"chat.completion.chunk","created":1,"model":friendlymodelname,"choices":[{"index":0,"finish_reason":"length","delta":{'role':'assistant','content':tokenStr}}]}) - await self.send_oai_sse_event(event_str) - elif api_format == 3: # non chat completions - event_str = json.dumps({"id":"koboldcpp","object":"text_completion","created":1,"model":friendlymodelname,"choices":[{"index":0,"finish_reason":"length","text":tokenStr}]}) - await self.send_oai_sse_event(event_str) + sseq = genparams.get('stop_sequence', []) + trimstop = genparams.get('trim_stop', False) + if trimstop and not streamDone and string_contains_sequence_substring(tokenStr,sseq): + tokenReserve += tokenStr + await asyncio.sleep(async_sleep_short) #if a stop sequence could trigger soon, do not send output else: - event_str = json.dumps({"token": tokenStr}) - await self.send_kai_sse_event(event_str) - tokenStr = "" - + tokenStr = tokenReserve + tokenStr + tokenReserve = "" + + #apply trimming if needed + if trimstop: + for trim_str in sseq: + sindex = tokenStr.find(trim_str) + if sindex != -1 and trim_str!="": + tokenStr = tokenStr[:sindex] + + if tokenStr!="": + if api_format == 4: # if oai chat, set format to expected openai streaming response + event_str = json.dumps({"id":"koboldcpp","object":"chat.completion.chunk","created":1,"model":friendlymodelname,"choices":[{"index":0,"finish_reason":"length","delta":{'role':'assistant','content':tokenStr}}]}) + await self.send_oai_sse_event(event_str) + elif api_format == 3: # non chat completions + event_str = json.dumps({"id":"koboldcpp","object":"text_completion","created":1,"model":friendlymodelname,"choices":[{"index":0,"finish_reason":"length","text":tokenStr}]}) + await self.send_oai_sse_event(event_str) + else: + event_str = json.dumps({"token": tokenStr}) + await self.send_kai_sse_event(event_str) + tokenStr = "" + else: + await asyncio.sleep(async_sleep_short) else: - await asyncio.sleep(0.02) #this should keep things responsive + await asyncio.sleep(async_sleep_short) #this should keep things responsive if streamDone: if api_format == 4 or api_format == 3: # if oai chat, send last [DONE] message consistent with openai format @@ -907,12 +940,14 @@ async def handle_sse_stream(self, api_format): await asyncio.sleep(0.05) - async def handle_request(self, genparams, api_format, stream_flag): + async def handle_request(self, raw_genparams, api_format, stream_flag): tasks = [] + genparams = transform_genparams(raw_genparams, api_format) + try: if stream_flag: - tasks.append(self.handle_sse_stream(api_format)) + tasks.append(self.handle_sse_stream(genparams, api_format)) generate_task = asyncio.create_task(self.generate_text(genparams, api_format, stream_flag)) tasks.append(generate_task) From e9b4a1bf68c18beff4e33f23ea62c1245b296915 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 21 Apr 2024 00:17:47 +0000 Subject: [PATCH 015/101] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/1042fd8b148a9105f3c0aca3a6177fd1d9360ba5?narHash=sha256-3sbWO1mbpWsLepZGbWaMovSO7ndZeFqDSdX0hZ9nVyw%3D' (2024-04-10) → 'github:NixOS/nixpkgs/5c24cf2f0a12ad855f444c30b2421d044120c66f?narHash=sha256-XtTSSIB2DA6tOv%2Bl0FhvfDMiyCmhoRbNB%2B0SeInZkbk%3D' (2024-04-19) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 2e2ab493289af..9c1b0af3797a7 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1712791164, - "narHash": "sha256-3sbWO1mbpWsLepZGbWaMovSO7ndZeFqDSdX0hZ9nVyw=", + "lastModified": 1713537308, + "narHash": "sha256-XtTSSIB2DA6tOv+l0FhvfDMiyCmhoRbNB+0SeInZkbk=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "1042fd8b148a9105f3c0aca3a6177fd1d9360ba5", + "rev": "5c24cf2f0a12ad855f444c30b2421d044120c66f", "type": "github" }, "original": { From c0956b09ba845a7cd787d5580d7c8b96e80f40f5 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Mon, 22 Apr 2024 13:22:54 +0200 Subject: [PATCH 016/101] ci: fix job are cancelling each other (#6781) --- .github/workflows/bench.yml | 2 +- .github/workflows/server.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 7587966320c81..d50af0b70e17c 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -32,7 +32,7 @@ on: - cron: '04 2 * * *' concurrency: - group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}-${{ github.event.inputs.sha }} + group: ${{ github.workflow }}-${{ github.ref || github.run_id }}-${{ github.event.inputs.sha }} cancel-in-progress: true jobs: diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 521cc29aea976..3e68a3c8c433f 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -23,7 +23,7 @@ on: - cron: '2 4 * * *' concurrency: - group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }} + group: ${{ github.workflow }}-${{ github.ref || github.run_id }} cancel-in-progress: true jobs: From 8960fe86ae075c846c5df8848230d1904ba8877f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 22 Apr 2024 15:41:11 +0300 Subject: [PATCH 017/101] llama : fix typo in <|im_end|> token text (#6745) --- llama.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 7440c740fefbc..a25d115c1d82a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4340,7 +4340,7 @@ static void llm_load_vocab( } } - // find EOT token: "<|eot_id|>", "<|im_emd|>", "", etc. + // find EOT token: "<|eot_id|>", "<|im_end|>", "", etc. // // TODO: convert scripts should provide this token through the KV metadata LLAMA_KV_TOKENIZER_EOT_ID // for now, we apply this workaround to find the EOT token based on its text @@ -4351,7 +4351,7 @@ static void llm_load_vocab( // need to fix convert script //vocab.id_to_token[t.second].type == LLAMA_TOKEN_TYPE_CONTROL && (t.first == "<|eot_id|>" || - t.first == "<|im_emd|>" || + t.first == "<|im_end|>" || t.first == "" ) ) { From e931888d5024de814ce7119a18d6a959bfff3821 Mon Sep 17 00:00:00 2001 From: Dave Airlie Date: Tue, 23 Apr 2024 00:05:06 +1000 Subject: [PATCH 018/101] ggml : fix calloc argument ordering. (#6820) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Latest gcc complains here: /home/airlied/devel/llama.cpp/ggml-alloc.c: In function ‘ggml_gallocr_new_n’: /home/airlied/devel/llama.cpp/ggml-alloc.c:374:59: warning: ‘calloc’ sizes specified with ‘sizeof’ in the earlier argument and not in the later argument [-Wcalloc-transposed-args] 374 | ggml_gallocr_t galloc = (ggml_gallocr_t)calloc(sizeof(struct ggml_gallocr), 1); | ^~~~~~ /home/airlied/devel/llama.cpp/ggml-alloc.c:374:59: note: earlier argument should specify number of elements, later size of each element and a bunch more. calloc is specified to take nmemb first then size, so realign the code. In a couple of places there was a * x, 1 so I fixed those to use calloc properly. --- ggml-alloc.c | 16 ++++++++-------- ggml-backend.c | 18 +++++++++--------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/ggml-alloc.c b/ggml-alloc.c index 7ceafec309d0c..1fbd376edf410 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -371,16 +371,16 @@ struct ggml_gallocr { }; ggml_gallocr_t ggml_gallocr_new_n(ggml_backend_buffer_type_t * bufts, int n_bufs) { - ggml_gallocr_t galloc = (ggml_gallocr_t)calloc(sizeof(struct ggml_gallocr), 1); + ggml_gallocr_t galloc = (ggml_gallocr_t)calloc(1, sizeof(struct ggml_gallocr)); GGML_ASSERT(galloc != NULL); - galloc->bufts = calloc(sizeof(ggml_backend_buffer_type_t) * n_bufs, 1); + galloc->bufts = calloc(n_bufs, sizeof(ggml_backend_buffer_type_t)); GGML_ASSERT(galloc->bufts != NULL); - galloc->buffers = calloc(sizeof(ggml_backend_buffer_t) * n_bufs, 1); + galloc->buffers = calloc(n_bufs, sizeof(ggml_backend_buffer_t) * n_bufs); GGML_ASSERT(galloc->buffers != NULL); - galloc->buf_tallocs = calloc(sizeof(struct ggml_dyn_tallocr *) * n_bufs, 1); + galloc->buf_tallocs = calloc(n_bufs, sizeof(struct ggml_dyn_tallocr *)); GGML_ASSERT(galloc->buf_tallocs != NULL); for (int i = 0; i < n_bufs; i++) { @@ -646,8 +646,8 @@ bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, c free(galloc->hash_set.keys); free(galloc->hash_values); galloc->hash_set.size = hash_size; - galloc->hash_set.keys = calloc(sizeof(struct ggml_tensor *), hash_size); - galloc->hash_values = calloc(sizeof(struct hash_node), hash_size); + galloc->hash_set.keys = calloc(hash_size, sizeof(struct ggml_tensor *)); + galloc->hash_values = calloc(hash_size, sizeof(struct hash_node)); GGML_ASSERT(galloc->hash_set.keys != NULL); GGML_ASSERT(galloc->hash_values != NULL); } else { @@ -667,7 +667,7 @@ bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, c // set the node_allocs from the hash table if (galloc->n_nodes < graph->n_nodes) { free(galloc->node_allocs); - galloc->node_allocs = calloc(sizeof(struct node_alloc), graph->n_nodes); + galloc->node_allocs = calloc(graph->n_nodes, sizeof(struct node_alloc)); GGML_ASSERT(galloc->node_allocs != NULL); } galloc->n_nodes = graph->n_nodes; @@ -697,7 +697,7 @@ bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, c } if (galloc->n_leafs < graph->n_leafs) { free(galloc->leaf_allocs); - galloc->leaf_allocs = calloc(sizeof(galloc->leaf_allocs[0]), graph->n_leafs); + galloc->leaf_allocs = calloc(graph->n_leafs, sizeof(galloc->leaf_allocs[0])); GGML_ASSERT(galloc->leaf_allocs != NULL); } galloc->n_leafs = graph->n_leafs; diff --git a/ggml-backend.c b/ggml-backend.c index 189b5c140a8a7..e91d97cd9dcfc 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -1725,23 +1725,23 @@ ggml_backend_sched_t ggml_backend_sched_new( GGML_ASSERT(n_backends <= GGML_SCHED_MAX_BACKENDS); GGML_ASSERT(ggml_backend_is_cpu(backends[n_backends - 1])); // last backend must be CPU - struct ggml_backend_sched * sched = calloc(sizeof(struct ggml_backend_sched), 1); + struct ggml_backend_sched * sched = calloc(1, sizeof(struct ggml_backend_sched)); // initialize hash table sched->hash_set = ggml_hash_set_new(graph_size); - sched->tensor_backend_id = calloc(sizeof(sched->tensor_backend_id[0]), sched->hash_set.size); - sched->tensor_copies = calloc(sizeof(sched->tensor_copies[0]), sched->hash_set.size); + sched->tensor_backend_id = calloc(sched->hash_set.size, sizeof(sched->tensor_backend_id[0])); + sched->tensor_copies = calloc(sched->hash_set.size, sizeof(sched->tensor_copies[0])); const size_t nodes_size = graph_size + GGML_SCHED_MAX_SPLITS*GGML_SCHED_MAX_SPLIT_INPUTS*2; - sched->node_backend_ids = calloc(sizeof(sched->node_backend_ids[0]), nodes_size); - sched->leaf_backend_ids = calloc(sizeof(sched->leaf_backend_ids[0]), nodes_size); + sched->node_backend_ids = calloc(nodes_size, sizeof(sched->node_backend_ids[0])); + sched->leaf_backend_ids = calloc(nodes_size, sizeof(sched->leaf_backend_ids[0])); sched->n_backends = n_backends; sched->n_copies = parallel ? GGML_SCHED_MAX_COPIES : 1; const int initial_splits_capacity = 16; - sched->splits = calloc(sizeof(sched->splits[0]), initial_splits_capacity); + sched->splits = calloc(initial_splits_capacity, sizeof(sched->splits[0])); sched->splits_capacity = initial_splits_capacity; for (int b = 0; b < n_backends; b++) { @@ -1972,10 +1972,10 @@ static void graph_copy_init_tensor(struct ggml_hash_set hash_set, struct ggml_te struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, struct ggml_cgraph * graph) { struct ggml_hash_set hash_set = { /* .size = */ graph->visited_hash_table.size, - /* .keys = */ calloc(sizeof(hash_set.keys[0]), graph->visited_hash_table.size) // NOLINT + /* .keys = */ calloc(graph->visited_hash_table.size, sizeof(hash_set.keys[0])) // NOLINT }; - struct ggml_tensor ** node_copies = calloc(sizeof(node_copies[0]), hash_set.size); // NOLINT - bool * node_init = calloc(sizeof(node_init[0]), hash_set.size); + struct ggml_tensor ** node_copies = calloc(hash_set.size, sizeof(node_copies[0])); // NOLINT + bool * node_init = calloc(hash_set.size, sizeof(node_init[0])); struct ggml_init_params params = { /* .mem_size = */ ggml_tensor_overhead()*hash_set.size + ggml_graph_overhead_custom(graph->size, false), From 192090bae47960f0d38d4967abe398a5d190057e Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Mon, 22 Apr 2024 15:00:36 -0400 Subject: [PATCH 019/101] llamafile : improve sgemm.cpp (#6796) * llamafile : improve sgemm.cpp - Re-enable by default - Fix issue described in #6716 - Make code more abstract, elegant, and maintainable - Faster handling of weirdly shaped `m` an `n` edge cases * Address review comments * Help clang produce fma instructions * Address review comments --- CMakeLists.txt | 16 +- Makefile | 4 - ggml.c | 8 +- sgemm.cpp | 957 +++++++++++++++++++++---------------------------- 4 files changed, 412 insertions(+), 573 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f134a153bb4ff..58a1805ba10fd 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -43,17 +43,11 @@ else() set(LLAMA_METAL_DEFAULT OFF) endif() -# TODO: fix this for Android CI -# https://github.com/ggerganov/llama.cpp/pull/6716#issuecomment-2061509191 -#if (CMAKE_SYSTEM_NAME MATCHES "ANDROID") -# set(LLAMA_LLAMAFILE_DEFAULT OFF) -#else() -# set(LLAMA_LLAMAFILE_DEFAULT ON) -#endif() - -# TODO: temporary disable until MoE is fixed -# https://github.com/ggerganov/llama.cpp/pull/6716 -set(LLAMA_LLAMAFILE_DEFAULT OFF) +if (CMAKE_SYSTEM_NAME MATCHES "ANDROID") + set(LLAMA_LLAMAFILE_DEFAULT OFF) +else() + set(LLAMA_LLAMAFILE_DEFAULT ON) +endif() # general option(BUILD_SHARED_LIBS "build shared libraries" OFF) diff --git a/Makefile b/Makefile index b0b2ea997ee12..24acb80136516 100644 --- a/Makefile +++ b/Makefile @@ -384,10 +384,6 @@ ifdef LLAMA_OPENBLAS MK_LDFLAGS += $(shell pkg-config --libs openblas) endif # LLAMA_OPENBLAS -# TODO: temporary disable until MoE is fixed -# https://github.com/ggerganov/llama.cpp/pull/6716 -LLAMA_NO_LLAMAFILE := 1 - ifndef LLAMA_NO_LLAMAFILE MK_CPPFLAGS += -DGGML_USE_LLAMAFILE OBJS += sgemm.o diff --git a/ggml.c b/ggml.c index a3b312e4aef59..086db96af7fcd 100644 --- a/ggml.c +++ b/ggml.c @@ -10825,7 +10825,7 @@ static void ggml_compute_forward_mul_mat( #endif #if GGML_USE_LLAMAFILE - if (nb10 == ggml_type_size(src1->type)) { + if (src1_cont) { for (int64_t i13 = 0; i13 < ne13; i13++) for (int64_t i12 = 0; i12 < ne12; i12++) if (!llamafile_sgemm(ne01, ne11, ne00/ggml_blck_size(src0->type), @@ -10878,15 +10878,13 @@ UseGgmlGemm1:; const size_t row_size = ggml_row_size(vec_dot_type, ne10); #if GGML_USE_LLAMAFILE - if (nb10 == ggml_type_size(src1->type) || src1->type != vec_dot_type) { + if (src1->type != vec_dot_type) { for (int64_t i13 = 0; i13 < ne13; i13++) for (int64_t i12 = 0; i12 < ne12; i12++) if (!llamafile_sgemm(ne01, ne11, ne00/ggml_blck_size(src0->type), (const char *)src0->data + i12/r2*nb02 + i13/r3*nb03, nb01/ggml_type_size(src0->type), - (const char *)wdata + ggml_row_size(vec_dot_type, - nb12/ggml_type_size(src1->type)*i12 + - nb13/ggml_type_size(src1->type)*i13), + (const char *)wdata + (i12*ne11 + i13*ne12*ne11)*row_size, row_size/ggml_type_size(vec_dot_type), (char *)dst->data + i12*nb2 + i13*nb3, nb1/ggml_type_size(dst->type), diff --git a/sgemm.cpp b/sgemm.cpp index 6900f04cfb242..531e12af361cc 100644 --- a/sgemm.cpp +++ b/sgemm.cpp @@ -50,6 +50,7 @@ #pragma GCC diagnostic ignored "-Wignored-attributes" #include "sgemm.h" +#include #include "ggml-impl.h" #include "ggml-quants.h" @@ -65,22 +66,6 @@ #define VECTOR_REGISTERS 16 #endif -// there will be blocks -#define BEGIN_KERNEL(RM, RN) \ - int ytiles = (m - m0) / RM; \ - int xtiles = (n - n0) / RN; \ - int tiles = ytiles * xtiles; \ - int duty = (tiles + nth - 1) / nth; \ - int start = duty * ith; \ - int end = start + duty; \ - if (end > tiles) \ - end = tiles; \ - for (int job = start; job < end; ++job) { \ - int i = m0 + job / xtiles * RM; \ - int j = n0 + job % xtiles * RN; - -#define END_KERNEL() } - #define MM256_SET_M128I(a, b) _mm256_insertf128_si256(_mm256_castsi128_si256(b), (a), 1) namespace { @@ -122,6 +107,45 @@ inline float16x8_t sub(float16x8_t x, float16x8_t y) { return vsubq_f16(x, y); } inline float16x8_t mul(float16x8_t x, float16x8_t y) { return vmulq_f16(x, y); } #endif // __ARM_FEATURE_FP16_VECTOR_ARITHMETIC +//////////////////////////////////////////////////////////////////////////////////////////////////// +// VECTORIZED FUSED MULTIPLY ADD + +/** + * Computes a * b + c. + */ +template +inline U madd(T a, T b, U c) { + return add(mul(a, b), c); +} + +#if defined(__FMA__) +#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) +template <> +inline __m256 madd(__m256 a, __m256 b, __m256 c) { + return _mm256_fmadd_ps(a, b, c); +} +#endif +#if defined(__AVX512F__) +template <> +inline __m512 madd(__m512 a, __m512 b, __m512 c) { + return _mm512_fmadd_ps(a, b, c); +} +#endif +#endif + +#if defined(__ARM_FEATURE_FMA) +template <> +inline float32x4_t madd(float32x4_t a, float32x4_t b, float32x4_t c) { + return vfmaq_f32(c, b, a); +} +#if defined(__ARM_FEATURE_FP16_VECTOR_ARITHMETIC) && !defined(_MSC_VER) +template <> +inline float16x8_t madd(float16x8_t a, float16x8_t b, float16x8_t c) { + return vfmaq_f16(c, b, a); +} +#endif +#endif + //////////////////////////////////////////////////////////////////////////////////////////////////// // VECTORIZED HORIZONTAL SUM @@ -213,36 +237,6 @@ template <> inline __m512 load(const ggml_fp16_t *p) { } #endif // __AVX512F__ -//////////////////////////////////////////////////////////////////////////////////////////////////// -// ABSTRACTIONS - -/** - * Computes a * b + c. - * - * This operation will become fused into a single arithmetic instruction - * if the hardware has support for this feature, e.g. Intel Haswell+ (c. - * 2013), AMD Bulldozer+ (c. 2011), etc. - */ -template -inline U madd(T a, T b, U c) { - return add(mul(a, b), c); -} - -/** - * Computes a * b + c with error correction. - * - * @see W. Kahan, "Further remarks on reducing truncation errors," - * Communications of the ACM, vol. 8, no. 1, p. 40, Jan. 1965, - * doi: 10.1145/363707.363723. - */ -template -inline U madder(T a, T b, U c, U *e) { - U y = sub(mul(a, b), *e); - U t = add(c, y); - *e = sub(sub(t, c), y); - return t; -} - //////////////////////////////////////////////////////////////////////////////////////////////////// // FLOATING POINT MATRIX MULTIPLICATION @@ -265,226 +259,179 @@ class tinyBLAS { private: NOINLINE void mnpack(int m0, int m, int n0, int n) { int mc, nc, mp, np; - if (m - m0 <= 0 || n - n0 <= 0) - return; - if (VECTOR_REGISTERS >= 32 && n - n0 >= 5 && m - m0 >= 5) { + switch ((std::min(m - m0, 5) << 4) | std::min(n - n0, 5)) { +#if VECTOR_REGISTERS == 32 + case 0x55: + mc = 5; + nc = 5; + gemm<5, 5>(m0, m, n0, n); + break; + case 0x45: + mc = 4; + nc = 5; + gemm<4, 5>(m0, m, n0, n); + break; + case 0x54: mc = 5; + nc = 4; + gemm<5, 4>(m0, m, n0, n); + break; + case 0x44: + mc = 4; + nc = 4; + gemm<4, 4>(m0, m, n0, n); + break; + case 0x53: + mc = 5; + nc = 3; + gemm<5, 3>(m0, m, n0, n); + break; + case 0x35: + mc = 3; nc = 5; - gemm5x5(m0, m, n0, n); - } else if (n - n0 >= 4 && m - m0 >= 3) { + gemm<3, 5>(m0, m, n0, n); + break; + case 0x43: + mc = 4; + nc = 3; + gemm<4, 3>(m0, m, n0, n); + break; +#else + case 0x55: + case 0x54: + case 0x53: + case 0x45: + case 0x44: + case 0x43: + mc = 4; + nc = 3; + gemm<4, 3>(m0, m, n0, n); + break; + case 0x35: +#endif + case 0x34: mc = 3; nc = 4; - gemm3x4(m0, m, n0, n); - } else if (n - n0 >= 4) { - mc = 1; + gemm<3, 4>(m0, m, n0, n); + break; + case 0x52: + mc = 5; + nc = 2; + gemm<5, 2>(m0, m, n0, n); + break; + case 0x33: + mc = 3; + nc = 3; + gemm<3, 3>(m0, m, n0, n); + break; + case 0x25: + mc = 2; + nc = 5; + gemm<2, 5>(m0, m, n0, n); + break; + case 0x42: + mc = 4; + nc = 2; + gemm<4, 2>(m0, m, n0, n); + break; + case 0x24: + mc = 2; nc = 4; - gemm1x4(m0, m, n0, n); - } else if (m - m0 >= 4) { + gemm<2, 4>(m0, m, n0, n); + break; + case 0x32: + mc = 3; + nc = 2; + gemm<3, 2>(m0, m, n0, n); + break; + case 0x23: + mc = 2; + nc = 3; + gemm<2, 3>(m0, m, n0, n); + break; + case 0x51: + mc = 5; + nc = 1; + gemm<5, 1>(m0, m, n0, n); + break; + case 0x41: mc = 4; nc = 1; - gemm4x1(m0, m, n0, n); - } else { + gemm<4, 1>(m0, m, n0, n); + break; + case 0x22: + mc = 2; + nc = 2; + gemm<2, 2>(m0, m, n0, n); + break; + case 0x15: + mc = 1; + nc = 5; + gemm<1, 5>(m0, m, n0, n); + break; + case 0x14: + mc = 1; + nc = 4; + gemm<1, 4>(m0, m, n0, n); + break; + case 0x31: + mc = 3; + nc = 1; + gemm<3, 1>(m0, m, n0, n); + break; + case 0x13: mc = 1; + nc = 3; + gemm<1, 3>(m0, m, n0, n); + break; + case 0x21: + mc = 2; nc = 1; - gemm1x1(m0, m, n0, n); + gemm<2, 1>(m0, m, n0, n); + break; + case 0x12: + mc = 1; + nc = 2; + gemm<1, 2>(m0, m, n0, n); + break; + case 0x11: + mc = 1; + nc = 1; + gemm<1, 1>(m0, m, n0, n); + break; + default: + return; } mp = m0 + (m - m0) / mc * mc; np = n0 + (n - n0) / nc * nc; mnpack(mp, m, n0, np); - mnpack(m0, mp, np, n); - mnpack(mp, m, np, n); - } - - NOINLINE void gemm5x5(int m0, int m, int n0, int n) { - BEGIN_KERNEL(5, 5) - D c00 = {0}; - D c01 = {0}; - D c02 = {0}; - D c03 = {0}; - D c04 = {0}; - D c10 = {0}; - D c11 = {0}; - D c12 = {0}; - D c13 = {0}; - D c14 = {0}; - D c20 = {0}; - D c21 = {0}; - D c22 = {0}; - D c23 = {0}; - D c24 = {0}; - D c30 = {0}; - D c31 = {0}; - D c32 = {0}; - D c33 = {0}; - D c34 = {0}; - D c40 = {0}; - D c41 = {0}; - D c42 = {0}; - D c43 = {0}; - D c44 = {0}; - for (int l = 0; l < k; l += KN) { - V k0 = load(B + ldb * (j + 0) + l); - V k1 = load(B + ldb * (j + 1) + l); - V k2 = load(B + ldb * (j + 2) + l); - V k3 = load(B + ldb * (j + 3) + l); - V k4 = load(B + ldb * (j + 4) + l); - V a0 = load(A + lda * (i + 0) + l); - c00 = madd(a0, k0, c00); - c01 = madd(a0, k1, c01); - c02 = madd(a0, k2, c02); - c03 = madd(a0, k3, c03); - c04 = madd(a0, k4, c04); - V a1 = load(A + lda * (i + 1) + l); - c10 = madd(a1, k0, c10); - c11 = madd(a1, k1, c11); - c12 = madd(a1, k2, c12); - c13 = madd(a1, k3, c13); - c14 = madd(a1, k4, c14); - V a2 = load(A + lda * (i + 2) + l); - c20 = madd(a2, k0, c20); - c21 = madd(a2, k1, c21); - c22 = madd(a2, k2, c22); - c23 = madd(a2, k3, c23); - c24 = madd(a2, k4, c24); - V a3 = load(A + lda * (i + 3) + l); - c30 = madd(a3, k0, c30); - c31 = madd(a3, k1, c31); - c32 = madd(a3, k2, c32); - c33 = madd(a3, k3, c33); - c34 = madd(a3, k4, c34); - V a4 = load(A + lda * (i + 4) + l); - c40 = madd(a4, k0, c40); - c41 = madd(a4, k1, c41); - c42 = madd(a4, k2, c42); - c43 = madd(a4, k3, c43); - c44 = madd(a4, k4, c44); - } - C[ldc * (j + 0) + (i + 0)] = hsum(c00); - C[ldc * (j + 0) + (i + 1)] = hsum(c10); - C[ldc * (j + 0) + (i + 2)] = hsum(c20); - C[ldc * (j + 0) + (i + 3)] = hsum(c30); - C[ldc * (j + 0) + (i + 4)] = hsum(c40); - C[ldc * (j + 1) + (i + 0)] = hsum(c01); - C[ldc * (j + 1) + (i + 1)] = hsum(c11); - C[ldc * (j + 1) + (i + 2)] = hsum(c21); - C[ldc * (j + 1) + (i + 3)] = hsum(c31); - C[ldc * (j + 1) + (i + 4)] = hsum(c41); - C[ldc * (j + 2) + (i + 0)] = hsum(c02); - C[ldc * (j + 2) + (i + 1)] = hsum(c12); - C[ldc * (j + 2) + (i + 2)] = hsum(c22); - C[ldc * (j + 2) + (i + 3)] = hsum(c32); - C[ldc * (j + 2) + (i + 4)] = hsum(c42); - C[ldc * (j + 3) + (i + 0)] = hsum(c03); - C[ldc * (j + 3) + (i + 1)] = hsum(c13); - C[ldc * (j + 3) + (i + 2)] = hsum(c23); - C[ldc * (j + 3) + (i + 3)] = hsum(c33); - C[ldc * (j + 3) + (i + 4)] = hsum(c43); - C[ldc * (j + 4) + (i + 0)] = hsum(c04); - C[ldc * (j + 4) + (i + 1)] = hsum(c14); - C[ldc * (j + 4) + (i + 2)] = hsum(c24); - C[ldc * (j + 4) + (i + 3)] = hsum(c34); - C[ldc * (j + 4) + (i + 4)] = hsum(c44); - END_KERNEL() - } - - NOINLINE void gemm3x4(int m0, int m, int n0, int n) { - BEGIN_KERNEL(3, 4) - D c00 = {0}; - D c01 = {0}; - D c02 = {0}; - D c03 = {0}; - D c10 = {0}; - D c11 = {0}; - D c12 = {0}; - D c13 = {0}; - D c20 = {0}; - D c21 = {0}; - D c22 = {0}; - D c23 = {0}; - for (int l = 0; l < k; l += KN) { - V k0 = load(B + ldb * (j + 0) + l); - V k1 = load(B + ldb * (j + 1) + l); - V k2 = load(B + ldb * (j + 2) + l); - V k3 = load(B + ldb * (j + 3) + l); - V a0 = load(A + lda * (i + 0) + l); - c00 = madd(a0, k0, c00); - c01 = madd(a0, k1, c01); - c02 = madd(a0, k2, c02); - c03 = madd(a0, k3, c03); - V a1 = load(A + lda * (i + 1) + l); - c10 = madd(a1, k0, c10); - c11 = madd(a1, k1, c11); - c12 = madd(a1, k2, c12); - c13 = madd(a1, k3, c13); - V a2 = load(A + lda * (i + 2) + l); - c20 = madd(a2, k0, c20); - c21 = madd(a2, k1, c21); - c22 = madd(a2, k2, c22); - c23 = madd(a2, k3, c23); - } - C[ldc * (j + 0) + (i + 0)] = hsum(c00); - C[ldc * (j + 0) + (i + 1)] = hsum(c10); - C[ldc * (j + 0) + (i + 2)] = hsum(c20); - C[ldc * (j + 1) + (i + 0)] = hsum(c01); - C[ldc * (j + 1) + (i + 1)] = hsum(c11); - C[ldc * (j + 1) + (i + 2)] = hsum(c21); - C[ldc * (j + 2) + (i + 0)] = hsum(c02); - C[ldc * (j + 2) + (i + 1)] = hsum(c12); - C[ldc * (j + 2) + (i + 2)] = hsum(c22); - C[ldc * (j + 3) + (i + 0)] = hsum(c03); - C[ldc * (j + 3) + (i + 1)] = hsum(c13); - C[ldc * (j + 3) + (i + 2)] = hsum(c23); - END_KERNEL() - } - - NOINLINE void gemm1x4(int m0, int m, int n0, int n) { - BEGIN_KERNEL(1, 4) - D c00 = {0}, e00 = {0}; - D c01 = {0}, e01 = {0}; - D c02 = {0}, e02 = {0}; - D c03 = {0}, e03 = {0}; - for (int l = 0; l < k; l += KN) { - V a = load(A + lda * (i + 0) + l); - c00 = madder(a, load(B + ldb * (j + 0) + l), c00, &e00); - c01 = madder(a, load(B + ldb * (j + 1) + l), c01, &e01); - c02 = madder(a, load(B + ldb * (j + 2) + l), c02, &e02); - c03 = madder(a, load(B + ldb * (j + 3) + l), c03, &e03); - } - C[ldc * (j + 0) + (i + 0)] = hsum(c00); - C[ldc * (j + 1) + (i + 0)] = hsum(c01); - C[ldc * (j + 2) + (i + 0)] = hsum(c02); - C[ldc * (j + 3) + (i + 0)] = hsum(c03); - END_KERNEL() - } - - NOINLINE void gemm4x1(int m0, int m, int n0, int n) { - BEGIN_KERNEL(4, 1) - D c00 = {0}, e00 = {0}; - D c10 = {0}, e10 = {0}; - D c20 = {0}, e20 = {0}; - D c30 = {0}, e30 = {0}; - for (int l = 0; l < k; l += KN) { - V b = load(B + ldb * (j + 0) + l); - c00 = madder(load(A + lda * (i + 0) + l), b, c00, &e00); - c10 = madder(load(A + lda * (i + 1) + l), b, c10, &e10); - c20 = madder(load(A + lda * (i + 2) + l), b, c20, &e20); - c30 = madder(load(A + lda * (i + 3) + l), b, c30, &e30); + mnpack(m0, m, np, n); + } + + template + NOINLINE void gemm(int m0, int m, int n0, int n) { + int ytiles = (m - m0) / RM; + int xtiles = (n - n0) / RN; + int tiles = xtiles * ytiles; + int duty = (tiles + nth - 1) / nth; + int start = duty * ith; + int end = start + duty; + if (end > tiles) + end = tiles; + for (int job = start; job < end; ++job) { + int ii = m0 + job / xtiles * RM; + int jj = n0 + job % xtiles * RN; + D Cv[RN][RM] = {}; + for (int l = 0; l < k; l += KN) + for (int j = 0; j < RN; ++j) + for (int i = 0; i < RM; ++i) + Cv[j][i] = madd(load(A + lda * (ii + i) + l), + load(B + ldb * (jj + j) + l), + Cv[j][i]); + for (int j = 0; j < RN; ++j) + for (int i = 0; i < RM; ++i) + C[ldc * (jj + j) + (ii + i)] = hsum(Cv[j][i]); } - C[ldc * (j + 0) + (i + 0)] = hsum(c00); - C[ldc * (j + 0) + (i + 1)] = hsum(c10); - C[ldc * (j + 0) + (i + 2)] = hsum(c20); - C[ldc * (j + 0) + (i + 3)] = hsum(c30); - END_KERNEL() - } - - NOINLINE void gemm1x1(int m0, int m, int n0, int n) { - BEGIN_KERNEL(1, 1) - D c = {0}, e = {0}; - for (int l = 0; l < k; l += KN) - c = madder(load(A + lda * i + l), - load(B + ldb * j + l), c, &e); - C[ldc * j + i] = hsum(c); - END_KERNEL() } const TA *const A; @@ -521,120 +468,97 @@ class tinyBLAS_Q0_ARM { private: NOINLINE void mnpack(int m0, int m, int n0, int n) { int mc, nc, mp, np; - if (m - m0 <= 0 || n - n0 <= 0) - return; - if (m - m0 >= 3 && n - n0 >= 3) { + switch ((std::min(m - m0, 3) << 4) | std::min(n - n0, 3)) { + case 0x33: mc = 3; nc = 3; - gemm3x3(m0, m, n0, n); - } else { + gemm<3, 3>(m0, m, n0, n); + break; + case 0x32: + mc = 3; + nc = 2; + gemm<3, 2>(m0, m, n0, n); + break; + case 0x23: + mc = 2; + nc = 3; + gemm<2, 3>(m0, m, n0, n); + break; + case 0x22: + mc = 2; + nc = 2; + gemm<2, 2>(m0, m, n0, n); + break; + case 0x31: + mc = 3; + nc = 1; + gemm<3, 1>(m0, m, n0, n); + break; + case 0x13: + mc = 1; + nc = 3; + gemm<1, 3>(m0, m, n0, n); + break; + case 0x21: + mc = 2; + nc = 1; + gemm<2, 1>(m0, m, n0, n); + break; + case 0x12: + mc = 1; + nc = 2; + gemm<1, 2>(m0, m, n0, n); + break; + case 0x11: mc = 1; nc = 1; - gemm1x1(m0, m, n0, n); + gemm<1, 1>(m0, m, n0, n); + break; + default: + return; } mp = m0 + (m - m0) / mc * mc; np = n0 + (n - n0) / nc * nc; mnpack(mp, m, n0, np); - mnpack(m0, mp, np, n); - mnpack(mp, m, np, n); - } - - NOINLINE void gemm3x3(int m0, int m, int n0, int n) { - BEGIN_KERNEL(3, 3) - int32x4_t zero = vdupq_n_s32(0); - float32x4_t c00 = vdupq_n_f32(0.f); - float32x4_t c01 = vdupq_n_f32(0.f); - float32x4_t c02 = vdupq_n_f32(0.f); - float32x4_t c10 = vdupq_n_f32(0.f); - float32x4_t c11 = vdupq_n_f32(0.f); - float32x4_t c12 = vdupq_n_f32(0.f); - float32x4_t c20 = vdupq_n_f32(0.f); - float32x4_t c21 = vdupq_n_f32(0.f); - float32x4_t c22 = vdupq_n_f32(0.f); - const TA *Ap0 = A + lda * (i + 0); - const TA *Ap1 = A + lda * (i + 1); - const TA *Ap2 = A + lda * (i + 2); - const block_q8_0 *Bp0 = B + ldb * (j + 0); - const block_q8_0 *Bp1 = B + ldb * (j + 1); - const block_q8_0 *Bp2 = B + ldb * (j + 2); - for (int l = 0; l < k; ++l) { - c00 = vmlaq_n_f32( - c00, - vcvtq_f32_s32(vdotq_s32(vdotq_s32(zero, load_lo(Ap0 + l), load_lo(Bp0 + l)), - load_hi(Ap0 + l), load_hi(Bp0 + l))), - unhalf(Ap0[l].d) * unhalf(Bp0[l].d)); - c01 = vmlaq_n_f32( - c01, - vcvtq_f32_s32(vdotq_s32(vdotq_s32(zero, load_lo(Ap0 + l), load_lo(Bp1 + l)), - load_hi(Ap0 + l), load_hi(Bp1 + l))), - unhalf(Ap0[l].d) * unhalf(Bp1[l].d)); - c02 = vmlaq_n_f32( - c02, - vcvtq_f32_s32(vdotq_s32(vdotq_s32(zero, load_lo(Ap0 + l), load_lo(Bp2 + l)), - load_hi(Ap0 + l), load_hi(Bp2 + l))), - unhalf(Ap0[l].d) * unhalf(Bp2[l].d)); - c10 = vmlaq_n_f32( - c10, - vcvtq_f32_s32(vdotq_s32(vdotq_s32(zero, load_lo(Ap1 + l), load_lo(Bp0 + l)), - load_hi(Ap1 + l), load_hi(Bp0 + l))), - unhalf(Ap1[l].d) * unhalf(Bp0[l].d)); - c11 = vmlaq_n_f32( - c11, - vcvtq_f32_s32(vdotq_s32(vdotq_s32(zero, load_lo(Ap1 + l), load_lo(Bp1 + l)), - load_hi(Ap1 + l), load_hi(Bp1 + l))), - unhalf(Ap1[l].d) * unhalf(Bp1[l].d)); - c12 = vmlaq_n_f32( - c12, - vcvtq_f32_s32(vdotq_s32(vdotq_s32(zero, load_lo(Ap1 + l), load_lo(Bp2 + l)), - load_hi(Ap1 + l), load_hi(Bp2 + l))), - unhalf(Ap1[l].d) * unhalf(Bp2[l].d)); - c20 = vmlaq_n_f32( - c20, - vcvtq_f32_s32(vdotq_s32(vdotq_s32(zero, load_lo(Ap2 + l), load_lo(Bp0 + l)), - load_hi(Ap2 + l), load_hi(Bp0 + l))), - unhalf(Ap2[l].d) * unhalf(Bp0[l].d)); - c21 = vmlaq_n_f32( - c21, - vcvtq_f32_s32(vdotq_s32(vdotq_s32(zero, load_lo(Ap2 + l), load_lo(Bp1 + l)), - load_hi(Ap2 + l), load_hi(Bp1 + l))), - unhalf(Ap2[l].d) * unhalf(Bp1[l].d)); - c22 = vmlaq_n_f32( - c22, - vcvtq_f32_s32(vdotq_s32(vdotq_s32(zero, load_lo(Ap2 + l), load_lo(Bp2 + l)), - load_hi(Ap2 + l), load_hi(Bp2 + l))), - unhalf(Ap2[l].d) * unhalf(Bp2[l].d)); - } - C[ldc * (j + 0) + (i + 0)] = hsum(c00); - C[ldc * (j + 0) + (i + 1)] = hsum(c10); - C[ldc * (j + 0) + (i + 2)] = hsum(c20); - C[ldc * (j + 1) + (i + 0)] = hsum(c01); - C[ldc * (j + 1) + (i + 1)] = hsum(c11); - C[ldc * (j + 1) + (i + 2)] = hsum(c21); - C[ldc * (j + 2) + (i + 0)] = hsum(c02); - C[ldc * (j + 2) + (i + 1)] = hsum(c12); - C[ldc * (j + 2) + (i + 2)] = hsum(c22); - END_KERNEL() - } - - NOINLINE void gemm1x1(int m0, int m, int n0, int n) { - BEGIN_KERNEL(1, 1) - float32x4_t acc = vdupq_n_f32(0.f); - const TA *Ap = A + lda * i; - const block_q8_0 *Bp = B + ldb * j; - for (int l = 0; l < k; ++l) { - acc = vmlaq_n_f32(acc, - vcvtq_f32_s32(vdotq_s32( - vdotq_s32(vdupq_n_s32(0), load_lo(Ap + l), load_lo(Bp + l)), - load_hi(Ap + l), load_hi(Bp + l))), - unhalf(Ap[l].d) * unhalf(Bp[l].d)); + mnpack(m0, m, np, n); + } + + template + NOINLINE void gemm(int m0, int m, int n0, int n) { + int ytiles = (m - m0) / RM; + int xtiles = (n - n0) / RN; + int tiles = xtiles * ytiles; + int duty = (tiles + nth - 1) / nth; + int start = duty * ith; + int end = start + duty; + if (end > tiles) + end = tiles; + for (int job = start; job < end; ++job) { + int ii = m0 + job / xtiles * RM; + int jj = n0 + job % xtiles * RN; + float32x4_t Cv[RN][RM] = {}; + for (int l = 0; l < k; ++l) + for (int j = 0; j < RN; ++j) + for (int i = 0; i < RM; ++i) + Cv[j][i] = vmlaq_n_f32(Cv[j][i], + vcvtq_f32_s32(vdotq_s32( + vdotq_s32(vdupq_n_s32(0), + load_lo(A + lda * (ii + i) + l), + load_lo(B + ldb * (jj + j) + l)), + load_hi(A + lda * (ii + i) + l), + load_hi(B + ldb * (jj + j) + l))), + unhalf(A[lda * (ii + i) + l].d) * + unhalf(B[ldb * (jj + j) + l].d)); + for (int j = 0; j < RN; ++j) + for (int i = 0; i < RM; ++i) + C[ldc * (jj + j) + (ii + i)] = hsum(Cv[j][i]); } - C[ldc * j + i] = hsum(acc); - END_KERNEL() } inline int8x16_t load_lo(const block_q8_0 *b) { return vld1q_s8(b->qs); } + inline int8x16_t load_hi(const block_q8_0 *b) { return vld1q_s8(b->qs + 16); } @@ -644,6 +568,7 @@ class tinyBLAS_Q0_ARM { vdupq_n_u8(0x0f))), vdupq_n_s8(0x8)); } + inline int8x16_t load_hi(const block_q4_0 *b) { return vsubq_s8(vreinterpretq_s8_u8(vshrq_n_u8(vld1q_u8(b->qs), 4)), vdupq_n_s8(0x8)); @@ -679,217 +604,143 @@ class tinyBLAS_Q0_AVX2 { } private: - NOINLINE void mnpack(int m0, int m, int n0, int n) { + void mnpack(int m0, int m, int n0, int n) { int mc, nc, mp, np; - if (m - m0 <= 0 || n - n0 <= 0) - return; - if (m - m0 >= 4 && n - n0 >= 3) { + switch ((std::min(m - m0, 4) << 4) | std::min(n - n0, 4)) { +#if VECTOR_REGISTERS == 32 + case 0x44: + mc = 4; + nc = 4; + gemm<4, 4>(m0, m, n0, n); + break; + case 0x43: + mc = 4; + nc = 3; + gemm<4, 3>(m0, m, n0, n); + break; + case 0x34: + mc = 3; + nc = 4; + gemm<3, 4>(m0, m, n0, n); + break; + case 0x33: + mc = 3; + nc = 3; + gemm<3, 3>(m0, m, n0, n); + break; + case 0x42: mc = 4; + nc = 2; + gemm<4, 2>(m0, m, n0, n); + break; + case 0x24: + mc = 2; + nc = 4; + gemm<2, 4>(m0, m, n0, n); + break; +#else + case 0x44: + case 0x43: + case 0x42: + mc = 4; + nc = 2; + gemm<4, 2>(m0, m, n0, n); + break; + case 0x34: + case 0x24: + mc = 2; + nc = 4; + gemm<2, 4>(m0, m, n0, n); + break; + case 0x33: +#endif + case 0x32: + mc = 3; + nc = 2; + gemm<3, 2>(m0, m, n0, n); + break; + case 0x23: + mc = 2; nc = 3; - gemm4x3(m0, m, n0, n); - } else if (m - m0 >= 4 && n - n0 >= 1) { + gemm<2, 3>(m0, m, n0, n); + break; + case 0x41: mc = 4; nc = 1; - gemm4x1(m0, m, n0, n); - } else if (m - m0 >= 1 && n - n0 >= 4) { + gemm<4, 1>(m0, m, n0, n); + break; + case 0x22: + mc = 2; + nc = 2; + gemm<2, 2>(m0, m, n0, n); + break; + case 0x14: mc = 1; nc = 4; - gemm1x4(m0, m, n0, n); - } else { + gemm<1, 4>(m0, m, n0, n); + break; + case 0x31: + mc = 3; + nc = 1; + gemm<3, 1>(m0, m, n0, n); + break; + case 0x13: mc = 1; + nc = 3; + gemm<1, 3>(m0, m, n0, n); + break; + case 0x21: + mc = 2; nc = 1; - gemm1x1(m0, m, n0, n); + gemm<2, 1>(m0, m, n0, n); + break; + case 0x12: + mc = 1; + nc = 2; + gemm<1, 2>(m0, m, n0, n); + break; + case 0x11: + mc = 1; + nc = 1; + gemm<1, 1>(m0, m, n0, n); + break; + default: + return; } mp = m0 + (m - m0) / mc * mc; np = n0 + (n - n0) / nc * nc; mnpack(mp, m, n0, np); - mnpack(m0, mp, np, n); - mnpack(mp, m, np, n); - } - - NOINLINE void gemm4x3(int m0, int m, int n0, int n) { - BEGIN_KERNEL(4, 3) - __m256 c00 = _mm256_setzero_ps(); - __m256 c10 = _mm256_setzero_ps(); - __m256 c20 = _mm256_setzero_ps(); - __m256 c30 = _mm256_setzero_ps(); - __m256 c01 = _mm256_setzero_ps(); - __m256 c11 = _mm256_setzero_ps(); - __m256 c21 = _mm256_setzero_ps(); - __m256 c31 = _mm256_setzero_ps(); - __m256 c02 = _mm256_setzero_ps(); - __m256 c12 = _mm256_setzero_ps(); - __m256 c22 = _mm256_setzero_ps(); - __m256 c32 = _mm256_setzero_ps(); - const TA *Ap0 = A + lda * (i + 0); - const TA *Ap1 = A + lda * (i + 1); - const TA *Ap2 = A + lda * (i + 2); - const TA *Ap3 = A + lda * (i + 3); - const TB *Bp0 = B + ldb * (j + 0); - const TB *Bp1 = B + ldb * (j + 1); - const TB *Bp2 = B + ldb * (j + 2); - for (int l = 0; l < k; ++l) { - float da0 = unhalf(Ap0[l].d); - float da1 = unhalf(Ap1[l].d); - float da2 = unhalf(Ap2[l].d); - float da3 = unhalf(Ap3[l].d); - __m256i e0 = load(Ap0 + l); - __m256i e1 = load(Ap1 + l); - __m256i e2 = load(Ap2 + l); - __m256i e3 = load(Ap3 + l); - float db0 = unhalf(Bp0[l].d); - __m256 d00 = _mm256_set1_ps(da0 * db0); - __m256 d10 = _mm256_set1_ps(da1 * db0); - __m256 d20 = _mm256_set1_ps(da2 * db0); - __m256 d30 = _mm256_set1_ps(da3 * db0); - __m256i f0 = load(Bp0 + l); - __m256i u0 = _mm256_sign_epi8(f0, f0); - __m256i s00 = _mm256_sign_epi8(e0, f0); - __m256i s10 = _mm256_sign_epi8(e1, f0); - __m256i s20 = _mm256_sign_epi8(e2, f0); - __m256i s30 = _mm256_sign_epi8(e3, f0); - c00 = madd(d00, updot(u0, s00), c00); - c10 = madd(d10, updot(u0, s10), c10); - c20 = madd(d20, updot(u0, s20), c20); - c30 = madd(d30, updot(u0, s30), c30); - float db1 = unhalf(Bp1[l].d); - __m256 d01 = _mm256_set1_ps(da0 * db1); - __m256 d11 = _mm256_set1_ps(da1 * db1); - __m256 d21 = _mm256_set1_ps(da2 * db1); - __m256 d31 = _mm256_set1_ps(da3 * db1); - __m256i f1 = load(Bp1 + l); - __m256i u1 = _mm256_sign_epi8(f1, f1); - __m256i s01 = _mm256_sign_epi8(e0, f1); - __m256i s11 = _mm256_sign_epi8(e1, f1); - __m256i s21 = _mm256_sign_epi8(e2, f1); - __m256i s31 = _mm256_sign_epi8(e3, f1); - c01 = madd(d01, updot(u1, s01), c01); - c11 = madd(d11, updot(u1, s11), c11); - c21 = madd(d21, updot(u1, s21), c21); - c31 = madd(d31, updot(u1, s31), c31); - float db2 = unhalf(Bp2[l].d); - __m256 d02 = _mm256_set1_ps(da0 * db2); - __m256 d12 = _mm256_set1_ps(da1 * db2); - __m256 d22 = _mm256_set1_ps(da2 * db2); - __m256 d32 = _mm256_set1_ps(da3 * db2); - __m256i f2 = load(Bp2 + l); - __m256i u2 = _mm256_sign_epi8(f2, f2); - __m256i s02 = _mm256_sign_epi8(e0, f2); - __m256i s12 = _mm256_sign_epi8(e1, f2); - __m256i s22 = _mm256_sign_epi8(e2, f2); - __m256i s32 = _mm256_sign_epi8(e3, f2); - c02 = madd(d02, updot(u2, s02), c02); - c12 = madd(d12, updot(u2, s12), c12); - c22 = madd(d22, updot(u2, s22), c22); - c32 = madd(d32, updot(u2, s32), c32); - } - C[ldc * (j + 0) + (i + 0)] = hsum(c00); - C[ldc * (j + 0) + (i + 1)] = hsum(c10); - C[ldc * (j + 0) + (i + 2)] = hsum(c20); - C[ldc * (j + 0) + (i + 3)] = hsum(c30); - C[ldc * (j + 1) + (i + 0)] = hsum(c01); - C[ldc * (j + 1) + (i + 1)] = hsum(c11); - C[ldc * (j + 1) + (i + 2)] = hsum(c21); - C[ldc * (j + 1) + (i + 3)] = hsum(c31); - C[ldc * (j + 2) + (i + 0)] = hsum(c02); - C[ldc * (j + 2) + (i + 1)] = hsum(c12); - C[ldc * (j + 2) + (i + 2)] = hsum(c22); - C[ldc * (j + 2) + (i + 3)] = hsum(c32); - END_KERNEL() - } - - NOINLINE void gemm4x1(int m0, int m, int n0, int n) { - BEGIN_KERNEL(4, 1) - __m256 c0 = _mm256_setzero_ps(); - __m256 c1 = _mm256_setzero_ps(); - __m256 c2 = _mm256_setzero_ps(); - __m256 c3 = _mm256_setzero_ps(); - const TA *Ap0 = A + lda * (i + 0); - const TA *Ap1 = A + lda * (i + 1); - const TA *Ap2 = A + lda * (i + 2); - const TA *Ap3 = A + lda * (i + 3); - const TB *Bp = B + ldb * j; - for (int l = 0; l < k; ++l) { - float db0 = unhalf(Bp[l].d); - __m256i f = load(Bp + l); - __m256i u = _mm256_sign_epi8(f, f); - __m256 d0 = _mm256_set1_ps(unhalf(Ap0[l].d) * db0); - __m256 d1 = _mm256_set1_ps(unhalf(Ap1[l].d) * db0); - __m256 d2 = _mm256_set1_ps(unhalf(Ap2[l].d) * db0); - __m256 d3 = _mm256_set1_ps(unhalf(Ap3[l].d) * db0); - __m256i e0 = load(Ap0 + l); - __m256i e1 = load(Ap1 + l); - __m256i e2 = load(Ap2 + l); - __m256i e3 = load(Ap3 + l); - __m256i s0 = _mm256_sign_epi8(e0, f); - __m256i s1 = _mm256_sign_epi8(e1, f); - __m256i s2 = _mm256_sign_epi8(e2, f); - __m256i s3 = _mm256_sign_epi8(e3, f); - __m256 g0 = updot(u, s0); - __m256 g1 = updot(u, s1); - __m256 g2 = updot(u, s2); - __m256 g3 = updot(u, s3); - c0 = madd(d0, g0, c0); - c1 = madd(d1, g1, c1); - c2 = madd(d2, g2, c2); - c3 = madd(d3, g3, c3); - } - C[ldc * j + (i + 0)] = hsum(c0); - C[ldc * j + (i + 1)] = hsum(c1); - C[ldc * j + (i + 2)] = hsum(c2); - C[ldc * j + (i + 3)] = hsum(c3); - END_KERNEL() - } - - NOINLINE void gemm1x4(int m0, int m, int n0, int n) { - BEGIN_KERNEL(1, 4) - __m256 c0 = _mm256_setzero_ps(); - __m256 c1 = _mm256_setzero_ps(); - __m256 c2 = _mm256_setzero_ps(); - __m256 c3 = _mm256_setzero_ps(); - const TB *Bp0 = B + ldb * (j + 0); - const TB *Bp1 = B + ldb * (j + 1); - const TB *Bp2 = B + ldb * (j + 2); - const TB *Bp3 = B + ldb * (j + 3); - const TA *Ap = A + lda * i; - for (int l = 0; l < k; ++l) { - float da0 = unhalf(Ap[l].d); - __m256i f = load(Ap + l); - __m256i u = _mm256_sign_epi8(f, f); - __m256 d0 = _mm256_set1_ps(unhalf(Bp0[l].d) * da0); - __m256 d1 = _mm256_set1_ps(unhalf(Bp1[l].d) * da0); - __m256 d2 = _mm256_set1_ps(unhalf(Bp2[l].d) * da0); - __m256 d3 = _mm256_set1_ps(unhalf(Bp3[l].d) * da0); - __m256 g0 = updot(u, _mm256_sign_epi8(load(Bp0 + l), f)); - __m256 g1 = updot(u, _mm256_sign_epi8(load(Bp1 + l), f)); - __m256 g2 = updot(u, _mm256_sign_epi8(load(Bp2 + l), f)); - __m256 g3 = updot(u, _mm256_sign_epi8(load(Bp3 + l), f)); - c0 = madd(d0, g0, c0); - c1 = madd(d1, g1, c1); - c2 = madd(d2, g2, c2); - c3 = madd(d3, g3, c3); - } - C[ldc * (j + 0) + i] = hsum(c0); - C[ldc * (j + 1) + i] = hsum(c1); - C[ldc * (j + 2) + i] = hsum(c2); - C[ldc * (j + 3) + i] = hsum(c3); - END_KERNEL() - } - - NOINLINE void gemm1x1(int m0, int m, int n0, int n) { - BEGIN_KERNEL(1, 1) - __m256 c = _mm256_setzero_ps(); - const TA *Ap = A + lda * i; - const TB *Bp = B + ldb * j; - for (int l = 0; l < k; ++l) { - __m256 d = _mm256_set1_ps(unhalf(Ap[l].d) * unhalf(Bp[l].d)); - __m256i e = load(Ap + l); - __m256i f = load(Bp + l); - __m256 g = updot(_mm256_sign_epi8(e, e), _mm256_sign_epi8(f, e)); - c = madd(d, g, c); + mnpack(m0, m, np, n); + } + + template + NOINLINE void gemm(int m0, int m, int n0, int n) { + int ytiles = (m - m0) / RM; + int xtiles = (n - n0) / RN; + int tiles = xtiles * ytiles; + int duty = (tiles + nth - 1) / nth; + int start = duty * ith; + int end = start + duty; + if (end > tiles) + end = tiles; + for (int job = start; job < end; ++job) { + int ii = m0 + job / xtiles * RM; + int jj = n0 + job % xtiles * RN; + __m256 Cv[RN][RM] = {}; + for (int l = 0; l < k; ++l) + for (int j = 0; j < RN; ++j) + for (int i = 0; i < RM; ++i) + Cv[j][i] = madd(_mm256_set1_ps(unhalf(A[lda * (ii + i) + l].d) * + unhalf(B[ldb * (jj + j) + l].d)), + updot(_mm256_sign_epi8(load(A + lda * (ii + i) + l), + load(A + lda * (ii + i) + l)), + _mm256_sign_epi8(load(B + ldb * (jj + j) + l), + load(A + lda * (ii + i) + l))), + Cv[j][i]); + for (int j = 0; j < RN; ++j) + for (int i = 0; i < RM; ++i) + C[ldc * (jj + j) + (ii + i)] = hsum(Cv[j][i]); } - C[ldc * j + i] = hsum(c); - END_KERNEL() } inline __m256i load(const block_q8_0 *b) { @@ -911,10 +762,10 @@ class tinyBLAS_Q0_AVX2 { } static inline __m256i denibble(const uint8_t *p) { - const __m128i tmp = _mm_loadu_si128((const __m128i *)p); - const __m256i bytes = MM256_SET_M128I(_mm_srli_epi16(tmp, 4), tmp); - const __m256i lowMask = _mm256_set1_epi8(15); - return _mm256_and_si256(lowMask, bytes); + __m128i x = _mm_loadu_si128((const __m128i *)p); + return _mm256_and_si256(_mm256_set1_epi8(15), + _mm256_insertf128_si256(_mm256_castsi128_si256(x), + _mm_srli_epi16(x, 4), 1)); } const TA *const A; From 4e96a812b3ce7322a29a3008db2ed73d9087b176 Mon Sep 17 00:00:00 2001 From: Anas Ahouzi <112881240+aahouzi@users.noreply.github.com> Date: Tue, 23 Apr 2024 02:53:18 +0200 Subject: [PATCH 020/101] [SYCL] Windows default build instructions without -DLLAMA_SYCL_F16 flag activated (#6767) * Fix FP32/FP16 build instructions * Fix typo * Recommended build instruction Co-authored-by: Neo Zhang Jianyu * Recommended build instruction Co-authored-by: Neo Zhang Jianyu * Recommended build instruction Co-authored-by: Neo Zhang Jianyu * Add comments in Intel GPU linux --------- Co-authored-by: Anas Ahouzi <112881240+aahouzi-intel@users.noreply.github.com> Co-authored-by: Neo Zhang Jianyu --- README-sycl.md | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/README-sycl.md b/README-sycl.md index 2aa46507094dc..dc98c7b3ebd9c 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -229,12 +229,12 @@ source /opt/intel/oneapi/setvars.sh # Build LLAMA with MKL BLAS acceleration for intel GPU mkdir -p build && cd build -# Option 1: Use FP16 for better performance in long-prompt inference -#cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON - -# Option 2: Use FP32 by default +# Option 1: Use FP32 (recommended for better performance in most cases) cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx +# Option 2: Use FP16 +cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON + #build all binary cmake --build . --config Release -j -v ``` @@ -250,12 +250,12 @@ export CPLUS_INCLUDE_DIR=/path/to/oneMKL/include:$CPLUS_INCLUDE_DIR # Build LLAMA with Nvidia BLAS acceleration through SYCL mkdir -p build && cd build -# Option 1: Use FP16 for better performance in long-prompt inference -cmake .. -DLLAMA_SYCL=ON -DLLAMA_SYCL_TARGET=NVIDIA -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON - -# Option 2: Use FP32 by default +# Option 1: Use FP32 (recommended for better performance in most cases) cmake .. -DLLAMA_SYCL=ON -DLLAMA_SYCL_TARGET=NVIDIA -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx +# Option 2: Use FP16 +cmake .. -DLLAMA_SYCL=ON -DLLAMA_SYCL_TARGET=NVIDIA -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON + #build all binary cmake --build . --config Release -j -v @@ -416,6 +416,10 @@ mkdir -p build cd build @call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force +# Option 1: Use FP32 (recommended for better performance in most cases) +cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release + +# Option 2: Or FP16 cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release -DLLAMA_SYCL_F16=ON make -j From f449da3ba93c0addd76614051acf48f80d970aa0 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Tue, 23 Apr 2024 18:26:16 +0800 Subject: [PATCH 021/101] add additional warning for skiplauncher --- koboldcpp.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/koboldcpp.py b/koboldcpp.py index 9f504f7333657..51c199048b6a4 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -2862,6 +2862,8 @@ def main(launch_args,start_server=True): exitcounter = 999 ermsg = "Reason: " + str(ex) + "\nFile selection GUI unsupported.\ncustomtkinter python module required!\nPlease check command line: script.py --help" show_gui_msgbox("Warning, GUI failed to start",ermsg) + if args.skiplauncher: + print(f"Note: In order to use --skiplauncher, you need to specify a model with --model") time.sleep(3) sys.exit(2) From 583981ede0f6888b22d0b04231861ef2ec0e635c Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Tue, 23 Apr 2024 19:03:27 +0800 Subject: [PATCH 022/101] try to fix makefile for llava cublas --- Makefile | 65 +++++++++++++++++++++++++++++++------------------------- 1 file changed, 36 insertions(+), 29 deletions(-) diff --git a/Makefile b/Makefile index a41228637198c..c282eb6f3000c 100644 --- a/Makefile +++ b/Makefile @@ -62,10 +62,9 @@ endif CUBLASLD_FLAGS = CUBLAS_OBJS = -OBJS_PARTIAL += ggml-alloc.o ggml-backend.o ggml-quants.o unicode.o unicode-data.o sgemm.o common.o grammar-parser.o -OBJS_FULL += $(OBJS_PARTIAL) llava.o llavaclip.o -OBJS_SIMPLE += ggml-alloc.o ggml-backend.o ggml-quants_noavx2.o unicode.o unicode-data.o sgemm_noavx2.o llava.o llavaclip.o common.o grammar-parser.o -OBJS_FAILSAFE += ggml-alloc.o ggml-backend.o ggml-quants_failsafe.o unicode.o unicode-data.o sgemm_failsafe.o llava.o llavaclip.o common.o grammar-parser.o +OBJS_FULL += ggml-alloc.o ggml-quants.o unicode.o unicode-data.o sgemm.o common.o grammar-parser.o +OBJS_SIMPLE += ggml-alloc.o ggml-quants_noavx2.o unicode.o unicode-data.o sgemm_noavx2.o common.o grammar-parser.o +OBJS_FAILSAFE += ggml-alloc.o ggml-quants_failsafe.o unicode.o unicode-data.o sgemm_failsafe.o common.o grammar-parser.o #lets try enabling everything CFLAGS += -pthread -s -Wno-deprecated -Wno-deprecated-declarations @@ -422,17 +421,25 @@ sgemm_failsafe.o: sgemm.cpp sgemm.h ggml.h #there's no intrinsics or special gpu ops used here, so we can have a universal object ggml-alloc.o: ggml-alloc.c ggml.h ggml-alloc.h $(CC) $(CFLAGS) -c $< -o $@ -ggml-backend.o: ggml-backend.c ggml.h ggml-backend.h - $(CC) $(CFLAGS) -c $< -o $@ llava.o: examples/llava/llava.cpp examples/llava/llava.h $(CXX) $(CXXFLAGS) -c $< -o $@ -llavaclip.o: examples/llava/clip.cpp examples/llava/clip.h - $(CXX) $(CXXFLAGS) -c $< -o $@ unicode.o: unicode.cpp unicode.h $(CXX) $(CXXFLAGS) -c $< -o $@ unicode-data.o: unicode-data.cpp unicode-data.h $(CXX) $(CXXFLAGS) -c $< -o $@ +#these have special gpu defines +ggml-backend_default.o: ggml-backend.c ggml.h ggml-backend.h + $(CC) $(CFLAGS) -c $< -o $@ +ggml-backend_vulkan.o: ggml-backend.c ggml.h ggml-backend.h + $(CC) $(CFLAGS) $(VULKAN_FLAGS) -c $< -o $@ +ggml-backend_cublas.o: ggml-backend.c ggml.h ggml-backend.h + $(CC) $(CFLAGS) $(CUBLAS_FLAGS) -c $< -o $@ +llavaclip_default.o: examples/llava/clip.cpp examples/llava/clip.h + $(CXX) $(CXXFLAGS) -c $< -o $@ +llavaclip_cublas.o: examples/llava/clip.cpp examples/llava/clip.h + $(CXX) $(CXXFLAGS) $(CUBLAS_FLAGS) -c $< -o $@ + #version 3 libs ggml_v3.o: otherarch/ggml_v3.c otherarch/ggml_v3.h $(CC) $(FASTCFLAGS) $(FULLCFLAGS) -c $< -o $@ @@ -525,25 +532,25 @@ clean: rm -vrf ggml-cuda/*.o # useful tools -main: examples/main/main.cpp common/sampling.cpp build-info.h ggml.o llama.o console.o $(OBJS_FULL) $(OBJS) +main: examples/main/main.cpp common/sampling.cpp build-info.h ggml.o llama.o console.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) @echo '==== Run ./main -h for help. ====' -sdmain: otherarch/sdcpp/util.cpp otherarch/sdcpp/main.cpp otherarch/sdcpp/stable-diffusion.cpp otherarch/sdcpp/upscaler.cpp otherarch/sdcpp/model.cpp otherarch/sdcpp/thirdparty/zip.c build-info.h ggml.o llama.o console.o $(OBJS_PARTIAL) $(OBJS) +sdmain: otherarch/sdcpp/util.cpp otherarch/sdcpp/main.cpp otherarch/sdcpp/stable-diffusion.cpp otherarch/sdcpp/upscaler.cpp otherarch/sdcpp/model.cpp otherarch/sdcpp/thirdparty/zip.c build-info.h ggml.o llama.o console.o ggml-backend_default.o $(OBJS_FULL) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -imatrix: examples/imatrix/imatrix.cpp common/sampling.cpp build-info.h ggml.o llama.o console.o $(OBJS_FULL) $(OBJS) +imatrix: examples/imatrix/imatrix.cpp common/sampling.cpp build-info.h ggml.o llama.o console.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -gguf: examples/gguf/gguf.cpp build-info.h ggml.o llama.o $(OBJS_FULL) $(OBJS) +gguf: examples/gguf/gguf.cpp build-info.h ggml.o llama.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -gguf-split: examples/gguf-split/gguf-split.cpp ggml.o llama.o common/sampling.cpp build-info.h $(OBJS_FULL) $(OBJS) +gguf-split: examples/gguf-split/gguf-split.cpp ggml.o llama.o common/sampling.cpp build-info.h llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) #generated libraries -koboldcpp_default: ggml.o ggml_v3.o ggml_v2.o ggml_v1.o expose.o gpttype_adapter.o sdcpp_default.o $(OBJS_FULL) $(OBJS) +koboldcpp_default: ggml.o ggml_v3.o ggml_v2.o ggml_v1.o expose.o gpttype_adapter.o sdcpp_default.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(OBJS) $(DEFAULT_BUILD) ifdef OPENBLAS_BUILD -koboldcpp_openblas: ggml_v4_openblas.o ggml_v3_openblas.o ggml_v2_openblas.o ggml_v1.o expose.o gpttype_adapter.o sdcpp_default.o $(OBJS_FULL) $(OBJS) +koboldcpp_openblas: ggml_v4_openblas.o ggml_v3_openblas.o ggml_v2_openblas.o ggml_v1.o expose.o gpttype_adapter.o sdcpp_default.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(OBJS) $(OPENBLAS_BUILD) else koboldcpp_openblas: @@ -551,7 +558,7 @@ koboldcpp_openblas: endif ifdef FAILSAFE_BUILD -koboldcpp_failsafe: ggml_v4_failsafe.o ggml_v3_failsafe.o ggml_v2_failsafe.o ggml_v1_failsafe.o expose.o gpttype_adapter_failsafe.o sdcpp_default.o $(OBJS_FAILSAFE) $(OBJS) +koboldcpp_failsafe: ggml_v4_failsafe.o ggml_v3_failsafe.o ggml_v2_failsafe.o ggml_v1_failsafe.o expose.o gpttype_adapter_failsafe.o sdcpp_default.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FAILSAFE) $(OBJS) $(FAILSAFE_BUILD) else koboldcpp_failsafe: @@ -559,7 +566,7 @@ koboldcpp_failsafe: endif ifdef NOAVX2_BUILD -koboldcpp_noavx2: ggml_v4_noavx2.o ggml_v3_noavx2.o ggml_v2_noavx2.o ggml_v1_failsafe.o expose.o gpttype_adapter_failsafe.o sdcpp_default.o $(OBJS_SIMPLE) $(OBJS) +koboldcpp_noavx2: ggml_v4_noavx2.o ggml_v3_noavx2.o ggml_v2_noavx2.o ggml_v1_failsafe.o expose.o gpttype_adapter_failsafe.o sdcpp_default.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_SIMPLE) $(OBJS) $(NOAVX2_BUILD) else koboldcpp_noavx2: @@ -567,10 +574,10 @@ koboldcpp_noavx2: endif ifdef CLBLAST_BUILD -koboldcpp_clblast: ggml_v4_clblast.o ggml_v3_clblast.o ggml_v2_clblast.o ggml_v1.o expose.o gpttype_adapter_clblast.o ggml-opencl.o ggml_v3-opencl.o ggml_v2-opencl.o ggml_v2-opencl-legacy.o sdcpp_default.o $(OBJS_FULL) $(OBJS) +koboldcpp_clblast: ggml_v4_clblast.o ggml_v3_clblast.o ggml_v2_clblast.o ggml_v1.o expose.o gpttype_adapter_clblast.o ggml-opencl.o ggml_v3-opencl.o ggml_v2-opencl.o ggml_v2-opencl-legacy.o sdcpp_default.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(OBJS) $(CLBLAST_BUILD) ifdef NOAVX2_BUILD -koboldcpp_clblast_noavx2: ggml_v4_clblast_noavx2.o ggml_v3_clblast_noavx2.o ggml_v2_clblast_noavx2.o ggml_v1_failsafe.o expose.o gpttype_adapter_clblast_noavx2.o ggml-opencl.o ggml_v3-opencl.o ggml_v2-opencl.o ggml_v2-opencl-legacy.o sdcpp_default.o $(OBJS_SIMPLE) $(OBJS) +koboldcpp_clblast_noavx2: ggml_v4_clblast_noavx2.o ggml_v3_clblast_noavx2.o ggml_v2_clblast_noavx2.o ggml_v1_failsafe.o expose.o gpttype_adapter_clblast_noavx2.o ggml-opencl.o ggml_v3-opencl.o ggml_v2-opencl.o ggml_v2-opencl-legacy.o sdcpp_default.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_SIMPLE) $(OBJS) $(CLBLAST_BUILD) else koboldcpp_clblast_noavx2: @@ -584,7 +591,7 @@ koboldcpp_clblast_noavx2: endif ifdef CUBLAS_BUILD -koboldcpp_cublas: ggml_v4_cublas.o ggml_v3_cublas.o ggml_v2_cublas.o ggml_v1.o expose.o gpttype_adapter_cublas.o sdcpp_cublas.o $(CUBLAS_OBJS) $(OBJS_FULL) $(OBJS) +koboldcpp_cublas: ggml_v4_cublas.o ggml_v3_cublas.o ggml_v2_cublas.o ggml_v1.o expose.o gpttype_adapter_cublas.o sdcpp_cublas.o llavaclip_cublas.o llava.o ggml-backend_cublas.o $(CUBLAS_OBJS) $(OBJS_FULL) $(OBJS) $(CUBLAS_BUILD) else koboldcpp_cublas: @@ -592,7 +599,7 @@ koboldcpp_cublas: endif ifdef HIPBLAS_BUILD -koboldcpp_hipblas: ggml_v4_cublas.o ggml_v3_cublas.o ggml_v2_cublas.o ggml_v1.o expose.o gpttype_adapter_cublas.o sdcpp_cublas.o $(HIP_OBJS) $(OBJS_FULL) $(OBJS) +koboldcpp_hipblas: ggml_v4_cublas.o ggml_v3_cublas.o ggml_v2_cublas.o ggml_v1.o expose.o gpttype_adapter_cublas.o sdcpp_cublas.o llavaclip_cublas.o llava.o ggml-backend_cublas.o $(HIP_OBJS) $(OBJS_FULL) $(OBJS) $(HIPBLAS_BUILD) else koboldcpp_hipblas: @@ -600,10 +607,10 @@ koboldcpp_hipblas: endif ifdef VULKAN_BUILD -koboldcpp_vulkan: ggml_v4_vulkan.o ggml_v3.o ggml_v2.o ggml_v1.o expose.o gpttype_adapter_vulkan.o ggml-vulkan.o sdcpp_default.o $(OBJS_FULL) $(OBJS) +koboldcpp_vulkan: ggml_v4_vulkan.o ggml_v3.o ggml_v2.o ggml_v1.o expose.o gpttype_adapter_vulkan.o ggml-vulkan.o sdcpp_default.o llavaclip_default.o llava.o ggml-backend_vulkan.o $(OBJS_FULL) $(OBJS) $(VULKAN_BUILD) ifdef NOAVX2_BUILD -koboldcpp_vulkan_noavx2: ggml_v4_vulkan_noavx2.o ggml_v3_noavx2.o ggml_v2_noavx2.o ggml_v1_failsafe.o expose.o gpttype_adapter_vulkan_noavx2.o ggml-vulkan.o sdcpp_default.o $(OBJS_SIMPLE) $(OBJS) +koboldcpp_vulkan_noavx2: ggml_v4_vulkan_noavx2.o ggml_v3_noavx2.o ggml_v2_noavx2.o ggml_v1_failsafe.o expose.o gpttype_adapter_vulkan_noavx2.o ggml-vulkan.o sdcpp_default.o llavaclip_default.o llava.o ggml-backend_vulkan.o $(OBJS_SIMPLE) $(OBJS) $(VULKAN_BUILD) else koboldcpp_vulkan_noavx2: @@ -617,17 +624,17 @@ koboldcpp_vulkan_noavx2: endif # tools -quantize_gguf: examples/quantize/quantize.cpp ggml.o llama.o $(OBJS_FULL) +quantize_gguf: examples/quantize/quantize.cpp ggml.o llama.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -quantize_gptj: otherarch/tools/gptj_quantize.cpp otherarch/tools/common-ggml.cpp ggml_v3.o ggml.o llama.o $(OBJS_FULL) +quantize_gptj: otherarch/tools/gptj_quantize.cpp otherarch/tools/common-ggml.cpp ggml_v3.o ggml.o llama.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -quantize_gpt2: otherarch/tools/gpt2_quantize.cpp otherarch/tools/common-ggml.cpp ggml_v3.o ggml.o llama.o $(OBJS_FULL) +quantize_gpt2: otherarch/tools/gpt2_quantize.cpp otherarch/tools/common-ggml.cpp ggml_v3.o ggml.o llama.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -quantize_neox: otherarch/tools/neox_quantize.cpp otherarch/tools/common-ggml.cpp ggml_v3.o ggml.o llama.o $(OBJS_FULL) +quantize_neox: otherarch/tools/neox_quantize.cpp otherarch/tools/common-ggml.cpp ggml_v3.o ggml.o llama.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -quantize_mpt: otherarch/tools/mpt_quantize.cpp otherarch/tools/common-ggml.cpp ggml_v3.o ggml.o llama.o $(OBJS_FULL) +quantize_mpt: otherarch/tools/mpt_quantize.cpp otherarch/tools/common-ggml.cpp ggml_v3.o ggml.o llama.o llavaclip_default.o llava.o ggml-backend_default.o $(OBJS_FULL) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -quantize_clip: examples/llava/clip.cpp examples/llava/clip.h examples/llava/quantclip.cpp ggml_v3.o ggml.o llama.o $(OBJS_PARTIAL) +quantize_clip: examples/llava/clip.cpp examples/llava/clip.h examples/llava/quantclip.cpp ggml_v3.o ggml.o llama.o ggml-backend_default.o $(OBJS_FULL) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) #window simple clinfo From c8297c6af5693555652c40b95974b95d49d2674d Mon Sep 17 00:00:00 2001 From: liuwei-git <14815172+liuwei-git@users.noreply.github.com> Date: Wed, 24 Apr 2024 15:00:37 +0800 Subject: [PATCH 023/101] llama : add phi3 support (#6852) * add explicit phi3 support * add explicit phi3 support * remove unused code * convert : add BOS token * llama : match EOT token <|end|> * llama : minor / style * llama : tabs -> spaces * convert : fix lint checks --------- Co-authored-by: Georgi Gerganov --- convert-hf-to-gguf.py | 85 +++++++++++++++ gguf-py/gguf/constants.py | 16 +++ gguf-py/gguf/tensor_mapping.py | 2 + llama.cpp | 192 ++++++++++++++++++++++++++++++++- 4 files changed, 294 insertions(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 4fd916cba3ed7..4ace13eb63149 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1979,6 +1979,91 @@ def set_gguf_parameters(self): self.gguf_writer.add_add_bos_token(False) +@Model.register("Phi3ForCausalLM") +class Phi3MiniModel(Model): + model_arch = gguf.MODEL_ARCH.PHI3 + + def set_vocab(self): + from sentencepiece import SentencePieceProcessor + + tokenizer_path = self.dir_model / 'tokenizer.model' + + if not tokenizer_path.is_file(): + print(f'Error: Missing {tokenizer_path}', file=sys.stderr) + sys.exit(1) + + tokenizer = SentencePieceProcessor(str(tokenizer_path)) + + vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size()) + + tokens: list[bytes] = [f"[PAD{i}]".encode("utf-8") for i in range(vocab_size)] + scores: list[float] = [-10000.0] * vocab_size + toktypes: list[int] = [SentencePieceTokenTypes.UNKNOWN] * vocab_size + + for token_id in range(tokenizer.vocab_size()): + + piece = tokenizer.id_to_piece(token_id) + text = piece.encode("utf-8") + score = tokenizer.get_score(token_id) + + toktype = SentencePieceTokenTypes.NORMAL + if tokenizer.is_unknown(token_id): + toktype = SentencePieceTokenTypes.UNKNOWN + elif tokenizer.is_control(token_id): + toktype = SentencePieceTokenTypes.CONTROL + elif tokenizer.is_unused(token_id): + toktype = SentencePieceTokenTypes.UNUSED + elif tokenizer.is_byte(token_id): + toktype = SentencePieceTokenTypes.BYTE + + tokens[token_id] = text + scores[token_id] = score + toktypes[token_id] = toktype + + added_tokens_file = self.dir_model / 'added_tokens.json' + if added_tokens_file.is_file(): + with open(added_tokens_file, "r", encoding="utf-8") as f: + added_tokens_json = json.load(f) + + for key in added_tokens_json: + token_id = added_tokens_json[key] + if (token_id >= vocab_size): + print(f'ignore token {token_id}: id is out of range, max={vocab_size - 1}') + continue + + tokens[token_id] = key.encode("utf-8") + scores[token_id] = -1000.0 + toktypes[token_id] = SentencePieceTokenTypes.USER_DEFINED + + self.gguf_writer.add_tokenizer_model("llama") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_scores(scores) + self.gguf_writer.add_token_types(toktypes) + + special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + special_vocab.add_to_gguf(self.gguf_writer) + + def set_gguf_parameters(self): + block_count = self.find_hparam(["num_hidden_layers", "n_layer"]) + + rot_pct = 1.0 + n_embd = self.find_hparam(["hidden_size", "n_embd"]) + n_head = self.find_hparam(["num_attention_heads", "n_head"]) + rms_eps = self.find_hparam(["rms_norm_eps"]) + + self.gguf_writer.add_name("Phi3") + self.gguf_writer.add_context_length(self.find_hparam(["n_positions", "max_position_embeddings"])) + + self.gguf_writer.add_embedding_length(n_embd) + self.gguf_writer.add_feed_forward_length(8192) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(n_head) + self.gguf_writer.add_head_count_kv(n_head) + self.gguf_writer.add_layer_norm_rms_eps(rms_eps) + self.gguf_writer.add_rope_dimension_count(int(rot_pct * n_embd) // n_head) + self.gguf_writer.add_file_type(self.ftype) + + @Model.register("PlamoForCausalLM") class PlamoModel(Model): model_arch = gguf.MODEL_ARCH.PLAMO diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 06cb26a7d495b..d2f1de19831e3 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -124,6 +124,7 @@ class MODEL_ARCH(IntEnum): QWEN2 = auto() QWEN2MOE = auto() PHI2 = auto() + PHI3 = auto() PLAMO = auto() CODESHELL = auto() ORION = auto() @@ -200,6 +201,7 @@ class MODEL_TENSOR(IntEnum): MODEL_ARCH.QWEN2: "qwen2", MODEL_ARCH.QWEN2MOE: "qwen2moe", MODEL_ARCH.PHI2: "phi2", + MODEL_ARCH.PHI3: "phi3", MODEL_ARCH.PLAMO: "plamo", MODEL_ARCH.CODESHELL: "codeshell", MODEL_ARCH.ORION: "orion", @@ -550,6 +552,20 @@ class MODEL_TENSOR(IntEnum): MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.PHI3: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], MODEL_ARCH.CODESHELL: [ MODEL_TENSOR.TOKEN_EMBD, MODEL_TENSOR.POS_EMBD, diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 10de36fa883ee..e5750d4191f6b 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -117,6 +117,7 @@ class TensorNameMap: "h.{bid}.attn.c_attn", # gpt2 "transformer.h.{bid}.mixer.Wqkv", # phi2 "encoder.layers.{bid}.attn.Wqkv", # nomic-bert + "model.layers.{bid}.self_attn.qkv_proj" # phi3 ), # Attention query @@ -234,6 +235,7 @@ class TensorNameMap: "h.{bid}.mlp.c_fc", # gpt2 "transformer.h.{bid}.mlp.fc1", # phi2 "model.layers.{bid}.mlp.fc1", # phi2 + "model.layers.{bid}.mlp.gate_up_proj", # phi3 "model.layers.layers.{bid}.mlp.up_proj", # plamo "model.layers.{bid}.feed_forward.w3", # internlm2 "encoder.layers.{bid}.mlp.fc11", # nomic-bert diff --git a/llama.cpp b/llama.cpp index a25d115c1d82a..30fe190373b43 100644 --- a/llama.cpp +++ b/llama.cpp @@ -211,6 +211,7 @@ enum llm_arch { LLM_ARCH_QWEN2, LLM_ARCH_QWEN2MOE, LLM_ARCH_PHI2, + LLM_ARCH_PHI3, LLM_ARCH_PLAMO, LLM_ARCH_CODESHELL, LLM_ARCH_ORION, @@ -246,6 +247,7 @@ static const std::map LLM_ARCH_NAMES = { { LLM_ARCH_QWEN2, "qwen2" }, { LLM_ARCH_QWEN2MOE, "qwen2moe" }, { LLM_ARCH_PHI2, "phi2" }, + { LLM_ARCH_PHI3, "phi3" }, { LLM_ARCH_PLAMO, "plamo" }, { LLM_ARCH_CODESHELL, "codeshell" }, { LLM_ARCH_ORION, "orion" }, @@ -793,6 +795,23 @@ static const std::map> LLM_TENSOR_NA { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_PHI3, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_PLAMO, { @@ -3955,6 +3974,16 @@ static void llm_load_hparams( { ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + switch (hparams.n_layer) { + case 24: model.type = e_model::MODEL_1B; break; + case 32: model.type = e_model::MODEL_3B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; + case LLM_ARCH_PHI3: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + switch (hparams.n_layer) { case 24: model.type = e_model::MODEL_1B; break; case 32: model.type = e_model::MODEL_3B; break; @@ -4352,6 +4381,7 @@ static void llm_load_vocab( //vocab.id_to_token[t.second].type == LLAMA_TOKEN_TYPE_CONTROL && (t.first == "<|eot_id|>" || t.first == "<|im_end|>" || + t.first == "<|end|>" || t.first == "" ) ) { @@ -5375,6 +5405,33 @@ static bool llm_load_tensors( layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); } } break; + case LLM_ARCH_PHI3: + { + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), { n_embd, n_vocab }); + + // output + { + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), { n_embd }); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), { n_embd, n_vocab }); + } + + for (int i = 0; i < n_layer; ++i) { + ggml_context* ctx_layer = ctx_for_layer(i); + ggml_context* ctx_split = ctx_for_layer_split(i); + + auto& layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), { n_embd }); + + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), { n_embd, n_embd + 2 * n_embd_gqa }, false); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), { n_embd, n_embd }); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), { n_embd }); + + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd }); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), { n_embd, 2 * n_ff }); + } + } break; case LLM_ARCH_PLAMO: { model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); @@ -6326,7 +6383,7 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); cb(kq, "kq", il); - if (model.arch == LLM_ARCH_PHI2) { + if (model.arch == LLM_ARCH_PHI2 || model.arch == LLM_ARCH_PHI3) { // for this arch, we need to perform the KQ multiplication with F32 precision, otherwise we get NaNs // ref: https://github.com/ggerganov/llama.cpp/pull/4490#issuecomment-1859055847 ggml_mul_mat_set_prec(kq, GGML_PREC_F32); @@ -8967,12 +9024,140 @@ struct llm_build_context { cur = ggml_add(ctx0, cur, model.output_b); cb(cur, "result_output", -1); + ggml_build_forward_expand(gf, cur); + return gf; + } + + struct ggml_cgraph * build_phi3() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, lctx, hparams, batch, model.tok_embd, cb); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = build_inp_pos(); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = build_inp_KQ_mask(); + + for (int il = 0; il < n_layer; ++il) { + auto residual = inpL; + + // self-attention + { + struct ggml_tensor* attn_norm_output = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + NULL, + LLM_NORM_RMS, cb, il); + cb(attn_norm_output, "attn_norm", il); + + struct ggml_tensor * Qcur = nullptr; + struct ggml_tensor * Kcur = nullptr; + struct ggml_tensor * Vcur = nullptr; + + if (model.layers[il].wqkv) { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, attn_norm_output); + cb(cur, "wqkv", il); + + Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0 * sizeof(float) * (n_embd))); + Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1 * sizeof(float) * (n_embd))); + Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1 * sizeof(float) * (n_embd + n_embd_gqa))); + } + else { + Qcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wq, attn_norm_output), model.layers[il].bq); + Kcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wk, attn_norm_output), model.layers[il].bk); + Vcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wv, attn_norm_output), model.layers[il].bv); + } + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + + Qcur = ggml_rope_custom( + ctx0, Qcur, inp_pos, n_rot, rope_type, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Qcur = ggml_scale(ctx0, Qcur, 1.0f / sqrtf(float(n_embd_head))); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, Kcur, inp_pos, n_rot, rope_type, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, NULL, + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); + } + + if (il == n_layer - 1) { + // skip computing output for unused tokens + struct ggml_tensor* inp_out_ids = build_inp_out_ids(); + cur = ggml_get_rows(ctx0, cur, inp_out_ids); + residual = ggml_get_rows(ctx0, residual, inp_out_ids); + } + + cur = ggml_add(ctx0, cur, residual); + residual = cur; + + cur = llm_build_norm(ctx0, cur, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + // FF + // special-case: the up and gate tensors are merged into a single tensor + // TOOD: support into llm_build_ffn + { + struct ggml_tensor* up = ggml_mul_mat(ctx0, model.layers[il].ffn_up, cur); + cb(up, "ffn_up", il); + + auto g = ggml_cont(ctx0, ggml_view_2d(ctx0, up, up->ne[0] / 2, up->ne[1], ggml_row_size(up->type, up->ne[0]), 0)); + auto y = ggml_cont(ctx0, ggml_view_2d(ctx0, up, up->ne[0] / 2, up->ne[1], ggml_row_size(up->type, up->ne[0]), up->nb[1] / 2)); + + y = ggml_mul(ctx0, y, ggml_silu(ctx0, g)); + cb(y, "ffn_gate", il); + + auto down = ggml_mul_mat(ctx0, model.layers[il].ffn_down, y); + cb(down, "ffn_down", il); + + cur = down; + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, residual, cur); + cb(cur, "l_out", il); + + inpL = cur; + } + + cur = llm_build_norm(ctx0, inpL, hparams, + model.output_norm, + NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); ggml_build_forward_expand(gf, cur); return gf; } + struct ggml_cgraph * build_plamo() { struct ggml_cgraph * gf = ggml_new_graph(ctx0); @@ -10474,6 +10659,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_phi2(); } break; + case LLM_ARCH_PHI3: + { + result = llm.build_phi3(); + } break; case LLM_ARCH_PLAMO: { result = llm.build_plamo(); @@ -15393,6 +15582,7 @@ enum llama_rope_type llama_rope_type(const struct llama_model * model) { case LLM_ARCH_QWEN2: case LLM_ARCH_QWEN2MOE: case LLM_ARCH_PHI2: + case LLM_ARCH_PHI3: case LLM_ARCH_GEMMA: case LLM_ARCH_STARCODER2: return LLAMA_ROPE_TYPE_NEOX; From 3fec68be4e9577fc53158366d3b3af039c17bb1f Mon Sep 17 00:00:00 2001 From: Junyang Lin Date: Wed, 24 Apr 2024 15:16:21 +0800 Subject: [PATCH 024/101] convert : add support of codeqwen due to tokenizer (#6707) * add support of codeqwen due to tokenizer * override load_hparams * fix typo * fix load_params * convert : fix whitespace --------- Co-authored-by: Georgi Gerganov --- convert-hf-to-gguf.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 4ace13eb63149..5763b6664e832 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -363,6 +363,16 @@ def _set_vocab_sentencepiece(self): scores.append(-1000.0) toktypes.append(SentencePieceTokenTypes.USER_DEFINED) + if vocab_size > len(tokens): + pad_count = vocab_size - len(tokens) + print( + f"Padding vocab with {pad_count} token(s) - [PAD1] through [PAD{pad_count}]" + ) + for i in range(1, pad_count + 1): + tokens.append(f"[PAD{i}]") + scores.append(-1000.0) + toktypes.append(SentencePieceTokenTypes.UNUSED) + assert len(tokens) == vocab_size self.gguf_writer.add_tokenizer_model("llama") @@ -1789,6 +1799,12 @@ def write_tensors(self): class Qwen2Model(Model): model_arch = gguf.MODEL_ARCH.QWEN2 + def set_vocab(self): + try: + self._set_vocab_sentencepiece() + except FileNotFoundError: + self._set_vocab_gpt2() + @Model.register("Qwen2MoeForCausalLM") class Qwen2MoeModel(Model): From abd3314064cd3c513f9eef34c3ba6c23a107442c Mon Sep 17 00:00:00 2001 From: Tristan Druyen Date: Wed, 24 Apr 2024 10:52:37 +0200 Subject: [PATCH 025/101] llama : add phi 3 chat template (#6857) * Add phi 3 chat template & tests * test : fix chat template result --------- Co-authored-by: Georgi Gerganov --- llama.cpp | 9 +++++++++ tests/test-chat-template.cpp | 4 ++++ 2 files changed, 13 insertions(+) diff --git a/llama.cpp b/llama.cpp index 30fe190373b43..e4ca34bd13389 100644 --- a/llama.cpp +++ b/llama.cpp @@ -17447,6 +17447,15 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|start_header_id|>assistant<|end_header_id|>\n\n"; } + } else if (tmpl == "phi3" || (tmpl.find("<|assistant|>") != std::string::npos && tmpl.find("<|end|>") != std::string::npos )) { + // Phi 3 + for (auto message : chat) { + std::string role(message->role); + ss << "<|" << role << "|>\n" << trim(message->content) << "<|end|>\n"; + } + if (add_ass) { + ss << "<|assistant|>\n"; + } } else { // template not supported return -1; diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp index cddf86a4105ea..4fe9183b92cfd 100644 --- a/tests/test-chat-template.cpp +++ b/tests/test-chat-template.cpp @@ -49,6 +49,8 @@ int main(void) { "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif false == true %}{% set loop_messages = messages %}{% set system_message = 'You are Command-R, a brilliant, sophisticated, AI-assistant trained to assist human users by providing thorough responses. You are trained by Cohere.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% if system_message != false %}{{ '<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>' + system_message + '<|END_OF_TURN_TOKEN|>' }}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|START_OF_TURN_TOKEN|><|USER_TOKEN|>' + content.strip() + '<|END_OF_TURN_TOKEN|>' }}{% elif message['role'] == 'assistant' %}{{ '<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>' + content.strip() + '<|END_OF_TURN_TOKEN|>' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>' }}{% endif %}", // Llama-3 "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}", + // Phi-3 + "{{ bos_token }}{% for message in messages %}{{'<|' + message['role'] + '|>' + ' ' + message['content'] + '<|end|> ' }}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|> ' }}{% else %}{{ eos_token }}{% endif %}" }; std::vector expected_output = { // teknium/OpenHermes-2.5-Mistral-7B @@ -77,6 +79,8 @@ int main(void) { "<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>You are a helpful assistant<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>Hello<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>Hi there<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>Who are you<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>I am an assistant<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>Another question<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>", // Llama 3 "<|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nHello<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\nHi there<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWho are you<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\nI am an assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nAnother question<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + // Phi 3 + "<|system|>\nYou are a helpful assistant<|end|>\n<|user|>\nHello<|end|>\n<|assistant|>\nHi there<|end|>\n<|user|>\nWho are you<|end|>\n<|assistant|>\nI am an assistant<|end|>\n<|user|>\nAnother question<|end|>\n<|assistant|>\n", }; std::vector formatted_chat(1024); int32_t res; From c0d1b3e03e27634ac2871761f5033cf9324d472d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 24 Apr 2024 12:00:07 +0300 Subject: [PATCH 026/101] ggml : move 32-bit arm compat in ggml-impl.h (#6865) ggml-ci --- ggml-impl.h | 260 ++++++++++++++++++++++++++++++++++++++++++++- ggml-quants.c | 287 -------------------------------------------------- 2 files changed, 256 insertions(+), 291 deletions(-) diff --git a/ggml-impl.h b/ggml-impl.h index 0c997d3ed521f..2ffacc299e4f1 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -45,7 +45,7 @@ extern "C" { // 16-bit float // on Arm, we use __fp16 // on x86, we use uint16_t -#if defined(__ARM_NEON) && !defined(_MSC_VER) +#if defined(__ARM_NEON) // if YCM cannot find , make a symbolic link to it, for example: // @@ -53,8 +53,262 @@ extern "C" { // #include +#ifdef _MSC_VER + +typedef uint16_t ggml_fp16_internal_t; + +#define ggml_vld1q_u32(w,x,y,z) { ((w) + ((uint64_t)(x) << 32)), ((y) + ((uint64_t)(z) << 32)) } + +#else + typedef __fp16 ggml_fp16_internal_t; +#define ggml_vld1q_u32(w,x,y,z) { (w), (x), (y), (z) } + +#endif // _MSC_VER + +#if !defined(__aarch64__) + +// 32-bit ARM compatibility + +// vaddvq_s16 +// vpaddq_s16 +// vpaddq_s32 +// vaddvq_s32 +// vaddvq_f32 +// vmaxvq_f32 +// vcvtnq_s32_f32 +// vzip1_u8 +// vzip2_u8 + +inline static int32_t vaddvq_s16(int16x8_t v) { + return + (int32_t)vgetq_lane_s16(v, 0) + (int32_t)vgetq_lane_s16(v, 1) + + (int32_t)vgetq_lane_s16(v, 2) + (int32_t)vgetq_lane_s16(v, 3) + + (int32_t)vgetq_lane_s16(v, 4) + (int32_t)vgetq_lane_s16(v, 5) + + (int32_t)vgetq_lane_s16(v, 6) + (int32_t)vgetq_lane_s16(v, 7); +} + +inline static int16x8_t vpaddq_s16(int16x8_t a, int16x8_t b) { + int16x4_t a0 = vpadd_s16(vget_low_s16(a), vget_high_s16(a)); + int16x4_t b0 = vpadd_s16(vget_low_s16(b), vget_high_s16(b)); + return vcombine_s16(a0, b0); +} + +inline static int32x4_t vpaddq_s32(int32x4_t a, int32x4_t b) { + int32x2_t a0 = vpadd_s32(vget_low_s32(a), vget_high_s32(a)); + int32x2_t b0 = vpadd_s32(vget_low_s32(b), vget_high_s32(b)); + return vcombine_s32(a0, b0); +} + +inline static int32_t vaddvq_s32(int32x4_t v) { + return vgetq_lane_s32(v, 0) + vgetq_lane_s32(v, 1) + vgetq_lane_s32(v, 2) + vgetq_lane_s32(v, 3); +} + +inline static float vaddvq_f32(float32x4_t v) { + return vgetq_lane_f32(v, 0) + vgetq_lane_f32(v, 1) + vgetq_lane_f32(v, 2) + vgetq_lane_f32(v, 3); +} + +inline static float vmaxvq_f32(float32x4_t v) { + return + MAX(MAX(vgetq_lane_f32(v, 0), vgetq_lane_f32(v, 1)), + MAX(vgetq_lane_f32(v, 2), vgetq_lane_f32(v, 3))); +} + +inline static int32x4_t vcvtnq_s32_f32(float32x4_t v) { + int32x4_t res; + + res[0] = roundf(vgetq_lane_f32(v, 0)); + res[1] = roundf(vgetq_lane_f32(v, 1)); + res[2] = roundf(vgetq_lane_f32(v, 2)); + res[3] = roundf(vgetq_lane_f32(v, 3)); + + return res; +} + +inline static uint8x8_t vzip1_u8(uint8x8_t a, uint8x8_t b) { + uint8x8_t res; + + res[0] = a[0]; res[1] = b[0]; + res[2] = a[1]; res[3] = b[1]; + res[4] = a[2]; res[5] = b[2]; + res[6] = a[3]; res[7] = b[3]; + + return res; +} + +inline static uint8x8_t vzip2_u8(uint8x8_t a, uint8x8_t b) { + uint8x8_t res; + + res[0] = a[4]; res[1] = b[4]; + res[2] = a[5]; res[3] = b[5]; + res[4] = a[6]; res[5] = b[6]; + res[6] = a[7]; res[7] = b[7]; + + return res; +} + +// vld1q_s16_x2 +// vld1q_u8_x2 +// vld1q_u8_x4 +// vld1q_s8_x2 +// vld1q_s8_x4 +// TODO: double-check these work correctly + +typedef struct ggml_int16x8x2_t { + int16x8_t val[2]; +} ggml_int16x8x2_t; + +inline static ggml_int16x8x2_t ggml_vld1q_s16_x2(const int16_t * ptr) { + ggml_int16x8x2_t res; + + res.val[0] = vld1q_s16(ptr + 0); + res.val[1] = vld1q_s16(ptr + 8); + + return res; +} + +typedef struct ggml_uint8x16x2_t { + uint8x16_t val[2]; +} ggml_uint8x16x2_t; + +inline static ggml_uint8x16x2_t ggml_vld1q_u8_x2(const uint8_t * ptr) { + ggml_uint8x16x2_t res; + + res.val[0] = vld1q_u8(ptr + 0); + res.val[1] = vld1q_u8(ptr + 16); + + return res; +} + +typedef struct ggml_uint8x16x4_t { + uint8x16_t val[4]; +} ggml_uint8x16x4_t; + +inline static ggml_uint8x16x4_t ggml_vld1q_u8_x4(const uint8_t * ptr) { + ggml_uint8x16x4_t res; + + res.val[0] = vld1q_u8(ptr + 0); + res.val[1] = vld1q_u8(ptr + 16); + res.val[2] = vld1q_u8(ptr + 32); + res.val[3] = vld1q_u8(ptr + 48); + + return res; +} + +typedef struct ggml_int8x16x2_t { + int8x16_t val[2]; +} ggml_int8x16x2_t; + +inline static ggml_int8x16x2_t ggml_vld1q_s8_x2(const int8_t * ptr) { + ggml_int8x16x2_t res; + + res.val[0] = vld1q_s8(ptr + 0); + res.val[1] = vld1q_s8(ptr + 16); + + return res; +} + +typedef struct ggml_int8x16x4_t { + int8x16_t val[4]; +} ggml_int8x16x4_t; + +inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { + ggml_int8x16x4_t res; + + res.val[0] = vld1q_s8(ptr + 0); + res.val[1] = vld1q_s8(ptr + 16); + res.val[2] = vld1q_s8(ptr + 32); + res.val[3] = vld1q_s8(ptr + 48); + + return res; +} + +// NOTE: not tested +inline static int8x16_t ggml_vqtbl1q_s8(int8x16_t a, uint8x16_t b) { + int8x16_t res; + + res[ 0] = a[b[ 0]]; + res[ 1] = a[b[ 1]]; + res[ 2] = a[b[ 2]]; + res[ 3] = a[b[ 3]]; + res[ 4] = a[b[ 4]]; + res[ 5] = a[b[ 5]]; + res[ 6] = a[b[ 6]]; + res[ 7] = a[b[ 7]]; + res[ 8] = a[b[ 8]]; + res[ 9] = a[b[ 9]]; + res[10] = a[b[10]]; + res[11] = a[b[11]]; + res[12] = a[b[12]]; + res[13] = a[b[13]]; + res[14] = a[b[14]]; + res[15] = a[b[15]]; + + return res; +} + +// NOTE: not tested +inline static uint8x16_t ggml_vqtbl1q_u8(uint8x16_t a, uint8x16_t b) { + uint8x16_t res; + + res[ 0] = a[b[ 0]]; + res[ 1] = a[b[ 1]]; + res[ 2] = a[b[ 2]]; + res[ 3] = a[b[ 3]]; + res[ 4] = a[b[ 4]]; + res[ 5] = a[b[ 5]]; + res[ 6] = a[b[ 6]]; + res[ 7] = a[b[ 7]]; + res[ 8] = a[b[ 8]]; + res[ 9] = a[b[ 9]]; + res[10] = a[b[10]]; + res[11] = a[b[11]]; + res[12] = a[b[12]]; + res[13] = a[b[13]]; + res[14] = a[b[14]]; + res[15] = a[b[15]]; + + return res; +} + +#else + +#define ggml_int16x8x2_t int16x8x2_t +#define ggml_uint8x16x2_t uint8x16x2_t +#define ggml_uint8x16x4_t uint8x16x4_t +#define ggml_int8x16x2_t int8x16x2_t +#define ggml_int8x16x4_t int8x16x4_t + +#define ggml_vld1q_s16_x2 vld1q_s16_x2 +#define ggml_vld1q_u8_x2 vld1q_u8_x2 +#define ggml_vld1q_u8_x4 vld1q_u8_x4 +#define ggml_vld1q_s8_x2 vld1q_s8_x2 +#define ggml_vld1q_s8_x4 vld1q_s8_x4 +#define ggml_vqtbl1q_s8 vqtbl1q_s8 +#define ggml_vqtbl1q_u8 vqtbl1q_u8 + +#endif // !defined(__aarch64__) + +#if !defined(__ARM_FEATURE_DOTPROD) + +inline static int32x4_t ggml_vdotq_s32(int32x4_t acc, int8x16_t a, int8x16_t b) { + const int16x8_t p0 = vmull_s8(vget_low_s8 (a), vget_low_s8 (b)); + const int16x8_t p1 = vmull_s8(vget_high_s8(a), vget_high_s8(b)); + + return vaddq_s32(acc, vaddq_s32(vpaddlq_s16(p0), vpaddlq_s16(p1))); +} + +#else + +#define ggml_vdotq_s32(a, b, c) vdotq_s32(a, b, c) + +#endif // !defined(__ARM_FEATURE_DOTPROD) + +#endif // defined(__ARM_NEON) + +#if defined(__ARM_NEON) && !defined(__MSC_VER) + #define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) #define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) @@ -75,8 +329,6 @@ static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { #else -typedef uint16_t ggml_fp16_internal_t; - #ifdef __wasm_simd128__ #include #else @@ -221,7 +473,7 @@ static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { #endif // __F16C__ -#endif // __ARM_NEON +#endif // defined(__ARM_NEON) && (!defined(__MSC_VER) // precomputed f32 table for f16 (256 KB) // defined in ggml.c, initialized in ggml_init() diff --git a/ggml-quants.c b/ggml-quants.c index 32360a1f1be50..11e11c2196c2c 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -14,41 +14,6 @@ #include // for qsort #include // for GGML_ASSERT -#ifdef __ARM_NEON - -// if YCM cannot find , make a symbolic link to it, for example: -// -// $ ln -sfn /Library/Developer/CommandLineTools/usr/lib/clang/13.1.6/include/arm_neon.h ./src/ -// -#include - -#else - -#ifdef __wasm_simd128__ -#include -#else -#if defined(__POWER9_VECTOR__) || defined(__powerpc64__) -#include -#undef bool -#define bool _Bool -#else -#if defined(_MSC_VER) || defined(__MINGW32__) -#include -#else -#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) || defined(__SSE3__) -#if !defined(__riscv) -#include -#endif -#endif -#endif -#endif -#endif -#endif - -#ifdef __riscv_v_intrinsic -#include -#endif - #undef MIN #undef MAX @@ -276,258 +241,6 @@ static inline float hsum_float_4x4(const __m128 a, const __m128 b, const __m128 #endif // __AVX__ || __AVX2__ || __AVX512F__ #endif // defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) -#if defined(__ARM_NEON) - -#ifdef _MSC_VER - -#define ggml_vld1q_u32(w,x,y,z) { ((w) + ((uint64_t)(x) << 32)), ((y) + ((uint64_t)(z) << 32)) } - -#else - -#define ggml_vld1q_u32(w,x,y,z) { (w), (x), (y), (z) } - -#endif - -#if !defined(__aarch64__) - -// 64-bit compatibility - -// vaddvq_s16 -// vpaddq_s16 -// vpaddq_s32 -// vaddvq_s32 -// vaddvq_f32 -// vmaxvq_f32 -// vcvtnq_s32_f32 -// vzip1_u8 -// vzip2_u8 - -inline static int32_t vaddvq_s16(int16x8_t v) { - return - (int32_t)vgetq_lane_s16(v, 0) + (int32_t)vgetq_lane_s16(v, 1) + - (int32_t)vgetq_lane_s16(v, 2) + (int32_t)vgetq_lane_s16(v, 3) + - (int32_t)vgetq_lane_s16(v, 4) + (int32_t)vgetq_lane_s16(v, 5) + - (int32_t)vgetq_lane_s16(v, 6) + (int32_t)vgetq_lane_s16(v, 7); -} - -inline static int16x8_t vpaddq_s16(int16x8_t a, int16x8_t b) { - int16x4_t a0 = vpadd_s16(vget_low_s16(a), vget_high_s16(a)); - int16x4_t b0 = vpadd_s16(vget_low_s16(b), vget_high_s16(b)); - return vcombine_s16(a0, b0); -} - -inline static int32x4_t vpaddq_s32(int32x4_t a, int32x4_t b) { - int32x2_t a0 = vpadd_s32(vget_low_s32(a), vget_high_s32(a)); - int32x2_t b0 = vpadd_s32(vget_low_s32(b), vget_high_s32(b)); - return vcombine_s32(a0, b0); -} - -inline static int32_t vaddvq_s32(int32x4_t v) { - return vgetq_lane_s32(v, 0) + vgetq_lane_s32(v, 1) + vgetq_lane_s32(v, 2) + vgetq_lane_s32(v, 3); -} - -inline static float vaddvq_f32(float32x4_t v) { - return vgetq_lane_f32(v, 0) + vgetq_lane_f32(v, 1) + vgetq_lane_f32(v, 2) + vgetq_lane_f32(v, 3); -} - -inline static float vmaxvq_f32(float32x4_t v) { - return - MAX(MAX(vgetq_lane_f32(v, 0), vgetq_lane_f32(v, 1)), - MAX(vgetq_lane_f32(v, 2), vgetq_lane_f32(v, 3))); -} - -inline static int32x4_t vcvtnq_s32_f32(float32x4_t v) { - int32x4_t res; - - res[0] = roundf(vgetq_lane_f32(v, 0)); - res[1] = roundf(vgetq_lane_f32(v, 1)); - res[2] = roundf(vgetq_lane_f32(v, 2)); - res[3] = roundf(vgetq_lane_f32(v, 3)); - - return res; -} - -inline static uint8x8_t vzip1_u8(uint8x8_t a, uint8x8_t b) { - uint8x8_t res; - - res[0] = a[0]; res[1] = b[0]; - res[2] = a[1]; res[3] = b[1]; - res[4] = a[2]; res[5] = b[2]; - res[6] = a[3]; res[7] = b[3]; - - return res; -} - -inline static uint8x8_t vzip2_u8(uint8x8_t a, uint8x8_t b) { - uint8x8_t res; - - res[0] = a[4]; res[1] = b[4]; - res[2] = a[5]; res[3] = b[5]; - res[4] = a[6]; res[5] = b[6]; - res[6] = a[7]; res[7] = b[7]; - - return res; -} - -// vld1q_s16_x2 -// vld1q_u8_x2 -// vld1q_u8_x4 -// vld1q_s8_x2 -// vld1q_s8_x4 -// TODO: double-check these work correctly - -typedef struct ggml_int16x8x2_t { - int16x8_t val[2]; -} ggml_int16x8x2_t; - -inline static ggml_int16x8x2_t ggml_vld1q_s16_x2(const int16_t * ptr) { - ggml_int16x8x2_t res; - - res.val[0] = vld1q_s16(ptr + 0); - res.val[1] = vld1q_s16(ptr + 8); - - return res; -} - -typedef struct ggml_uint8x16x2_t { - uint8x16_t val[2]; -} ggml_uint8x16x2_t; - -inline static ggml_uint8x16x2_t ggml_vld1q_u8_x2(const uint8_t * ptr) { - ggml_uint8x16x2_t res; - - res.val[0] = vld1q_u8(ptr + 0); - res.val[1] = vld1q_u8(ptr + 16); - - return res; -} - -typedef struct ggml_uint8x16x4_t { - uint8x16_t val[4]; -} ggml_uint8x16x4_t; - -inline static ggml_uint8x16x4_t ggml_vld1q_u8_x4(const uint8_t * ptr) { - ggml_uint8x16x4_t res; - - res.val[0] = vld1q_u8(ptr + 0); - res.val[1] = vld1q_u8(ptr + 16); - res.val[2] = vld1q_u8(ptr + 32); - res.val[3] = vld1q_u8(ptr + 48); - - return res; -} - -typedef struct ggml_int8x16x2_t { - int8x16_t val[2]; -} ggml_int8x16x2_t; - -inline static ggml_int8x16x2_t ggml_vld1q_s8_x2(const int8_t * ptr) { - ggml_int8x16x2_t res; - - res.val[0] = vld1q_s8(ptr + 0); - res.val[1] = vld1q_s8(ptr + 16); - - return res; -} - -typedef struct ggml_int8x16x4_t { - int8x16_t val[4]; -} ggml_int8x16x4_t; - -inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { - ggml_int8x16x4_t res; - - res.val[0] = vld1q_s8(ptr + 0); - res.val[1] = vld1q_s8(ptr + 16); - res.val[2] = vld1q_s8(ptr + 32); - res.val[3] = vld1q_s8(ptr + 48); - - return res; -} - -// NOTE: not tested -inline static int8x16_t ggml_vqtbl1q_s8(int8x16_t a, uint8x16_t b) { - int8x16_t res; - - res[ 0] = a[b[ 0]]; - res[ 1] = a[b[ 1]]; - res[ 2] = a[b[ 2]]; - res[ 3] = a[b[ 3]]; - res[ 4] = a[b[ 4]]; - res[ 5] = a[b[ 5]]; - res[ 6] = a[b[ 6]]; - res[ 7] = a[b[ 7]]; - res[ 8] = a[b[ 8]]; - res[ 9] = a[b[ 9]]; - res[10] = a[b[10]]; - res[11] = a[b[11]]; - res[12] = a[b[12]]; - res[13] = a[b[13]]; - res[14] = a[b[14]]; - res[15] = a[b[15]]; - - return res; -} - -// NOTE: not tested -inline static uint8x16_t ggml_vqtbl1q_u8(uint8x16_t a, uint8x16_t b) { - uint8x16_t res; - - res[ 0] = a[b[ 0]]; - res[ 1] = a[b[ 1]]; - res[ 2] = a[b[ 2]]; - res[ 3] = a[b[ 3]]; - res[ 4] = a[b[ 4]]; - res[ 5] = a[b[ 5]]; - res[ 6] = a[b[ 6]]; - res[ 7] = a[b[ 7]]; - res[ 8] = a[b[ 8]]; - res[ 9] = a[b[ 9]]; - res[10] = a[b[10]]; - res[11] = a[b[11]]; - res[12] = a[b[12]]; - res[13] = a[b[13]]; - res[14] = a[b[14]]; - res[15] = a[b[15]]; - - return res; -} - -#else - -#define ggml_int16x8x2_t int16x8x2_t -#define ggml_uint8x16x2_t uint8x16x2_t -#define ggml_uint8x16x4_t uint8x16x4_t -#define ggml_int8x16x2_t int8x16x2_t -#define ggml_int8x16x4_t int8x16x4_t - -#define ggml_vld1q_s16_x2 vld1q_s16_x2 -#define ggml_vld1q_u8_x2 vld1q_u8_x2 -#define ggml_vld1q_u8_x4 vld1q_u8_x4 -#define ggml_vld1q_s8_x2 vld1q_s8_x2 -#define ggml_vld1q_s8_x4 vld1q_s8_x4 -#define ggml_vqtbl1q_s8 vqtbl1q_s8 -#define ggml_vqtbl1q_u8 vqtbl1q_u8 - -#endif - -#if !defined(__ARM_FEATURE_DOTPROD) - -inline static int32x4_t ggml_vdotq_s32(int32x4_t acc, int8x16_t a, int8x16_t b) { - const int16x8_t p0 = vmull_s8(vget_low_s8 (a), vget_low_s8 (b)); - const int16x8_t p1 = vmull_s8(vget_high_s8(a), vget_high_s8(b)); - - return vaddq_s32(acc, vaddq_s32(vpaddlq_s16(p0), vpaddlq_s16(p1))); -} - -#else - -#define ggml_vdotq_s32(a, b, c) vdotq_s32(a, b, c) - -#endif - -#endif - #if defined(__ARM_NEON) || defined(__wasm_simd128__) #define B1(c,s,n) 0x ## n ## c , 0x ## n ## s #define B2(c,s,n) B1(c,s,n ## c), B1(c,s,n ## s) From 28103f4832e301a9c84d44ff0df9d75d46ab6c76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 24 Apr 2024 11:08:36 +0200 Subject: [PATCH 027/101] Server: fix seed for multiple slots (#6835) * Server: add tests for consistent results * sampling: separate rng per sampling context --- common/common.cpp | 2 + common/sampling.cpp | 13 ++++- common/sampling.h | 47 ++++++++------- examples/lookup/lookup-stats.cpp | 1 - examples/lookup/lookup.cpp | 1 - examples/main/main.cpp | 1 - examples/server/server.cpp | 3 +- .../server/tests/features/results.feature | 57 +++++++++++++++++++ examples/server/tests/features/steps/steps.py | 34 +++++++++++ llama.cpp | 7 ++- llama.h | 9 ++- 11 files changed, 145 insertions(+), 30 deletions(-) create mode 100644 examples/server/tests/features/results.feature diff --git a/common/common.cpp b/common/common.cpp index 06f252ea6914b..a0d1f8d59cb62 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -242,7 +242,9 @@ bool gpt_params_find_arg(int argc, char ** argv, const std::string & arg, gpt_pa invalid_param = true; return true; } + // This is temporary, in the future the samplign state will be moved fully to llama_sampling_context. params.seed = std::stoul(argv[i]); + sparams.seed = std::stoul(argv[i]); return true; } if (arg == "-t" || arg == "--threads") { diff --git a/common/sampling.cpp b/common/sampling.cpp index 45d68b26c2b93..f2466550168a7 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -1,4 +1,6 @@ +#define LLAMA_API_INTERNAL #include "sampling.h" +#include struct llama_sampling_context * llama_sampling_init(const struct llama_sampling_params & params) { struct llama_sampling_context * result = new llama_sampling_context(); @@ -33,6 +35,8 @@ struct llama_sampling_context * llama_sampling_init(const struct llama_sampling_ result->prev.resize(params.n_prev); + llama_sampling_set_rng_seed(result, params.seed); + return result; } @@ -62,6 +66,13 @@ void llama_sampling_reset(llama_sampling_context * ctx) { ctx->cur.clear(); } +void llama_sampling_set_rng_seed(struct llama_sampling_context * ctx, uint32_t seed) { + if (seed == LLAMA_DEFAULT_SEED) { + seed = time(NULL); + } + ctx->rng.seed(seed); +} + void llama_sampling_cp(llama_sampling_context * src, llama_sampling_context * dst) { if (dst->grammar) { llama_grammar_free(dst->grammar); @@ -203,7 +214,7 @@ static llama_token llama_sampling_sample_impl( sampler_queue(ctx_main, params, cur_p, min_keep); - id = llama_sample_token(ctx_main, &cur_p); + id = llama_sample_token_with_rng(ctx_main, &cur_p, ctx_sampling->rng); //{ // const int n_top = 10; diff --git a/common/sampling.h b/common/sampling.h index 639b819ab4fb2..cf7081e3674f1 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -4,9 +4,10 @@ #include "grammar-parser.h" +#include #include -#include #include +#include // sampler types enum class llama_sampler_type : char { @@ -20,25 +21,26 @@ enum class llama_sampler_type : char { // sampling parameters typedef struct llama_sampling_params { - int32_t n_prev = 64; // number of previous tokens to remember - int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. - int32_t min_keep = 0; // 0 = disabled, otherwise samplers should return at least min_keep tokens - int32_t top_k = 40; // <= 0 to use vocab size - float top_p = 0.95f; // 1.0 = disabled - float min_p = 0.05f; // 0.0 = disabled - float tfs_z = 1.00f; // 1.0 = disabled - float typical_p = 1.00f; // 1.0 = disabled - float temp = 0.80f; // <= 0.0 to sample greedily, 0.0 to not output probabilities - float dynatemp_range = 0.00f; // 0.0 = disabled - float dynatemp_exponent = 1.00f; // controls how entropy maps to temperature in dynamic temperature sampler - int32_t penalty_last_n = 64; // last n tokens to penalize (0 = disable penalty, -1 = context size) - float penalty_repeat = 1.00f; // 1.0 = disabled - float penalty_freq = 0.00f; // 0.0 = disabled - float penalty_present = 0.00f; // 0.0 = disabled - int32_t mirostat = 0; // 0 = disabled, 1 = mirostat, 2 = mirostat 2.0 - float mirostat_tau = 5.00f; // target entropy - float mirostat_eta = 0.10f; // learning rate - bool penalize_nl = false; // consider newlines as a repeatable token + int32_t n_prev = 64; // number of previous tokens to remember + int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. + int32_t min_keep = 0; // 0 = disabled, otherwise samplers should return at least min_keep tokens + int32_t top_k = 40; // <= 0 to use vocab size + float top_p = 0.95f; // 1.0 = disabled + float min_p = 0.05f; // 0.0 = disabled + float tfs_z = 1.00f; // 1.0 = disabled + float typical_p = 1.00f; // 1.0 = disabled + float temp = 0.80f; // <= 0.0 to sample greedily, 0.0 to not output probabilities + float dynatemp_range = 0.00f; // 0.0 = disabled + float dynatemp_exponent = 1.00f; // controls how entropy maps to temperature in dynamic temperature sampler + int32_t penalty_last_n = 64; // last n tokens to penalize (0 = disable penalty, -1 = context size) + float penalty_repeat = 1.00f; // 1.0 = disabled + float penalty_freq = 0.00f; // 0.0 = disabled + float penalty_present = 0.00f; // 0.0 = disabled + int32_t mirostat = 0; // 0 = disabled, 1 = mirostat, 2 = mirostat 2.0 + float mirostat_tau = 5.00f; // target entropy + float mirostat_eta = 0.10f; // learning rate + bool penalize_nl = false; // consider newlines as a repeatable token + uint32_t seed = LLAMA_DEFAULT_SEED; // the seed used to initialize llama_sampling_context std::vector samplers_sequence = { llama_sampler_type::TOP_K, @@ -79,6 +81,8 @@ struct llama_sampling_context { // TODO: replace with ring-buffer std::vector prev; std::vector cur; + + std::mt19937 rng; }; #include "common.h" @@ -93,6 +97,9 @@ void llama_sampling_free(struct llama_sampling_context * ctx); // - reset grammar void llama_sampling_reset(llama_sampling_context * ctx); +// Set the sampler seed +void llama_sampling_set_rng_seed(struct llama_sampling_context * ctx, uint32_t seed); + // Copy the sampler context void llama_sampling_cp(llama_sampling_context * src, llama_sampling_context * dst); diff --git a/examples/lookup/lookup-stats.cpp b/examples/lookup/lookup-stats.cpp index 41b62c2fe9f76..87ecc0a4f1394 100644 --- a/examples/lookup/lookup-stats.cpp +++ b/examples/lookup/lookup-stats.cpp @@ -30,7 +30,6 @@ int main(int argc, char ** argv){ // load the model std::tie(model, ctx) = llama_init_from_gpt_params(params); - llama_set_rng_seed(ctx, params.seed); GGML_ASSERT(llama_n_vocab(model) < (1 << 16)); // tokenize the prompt diff --git a/examples/lookup/lookup.cpp b/examples/lookup/lookup.cpp index 9526e898fe763..eebbd00a58e66 100644 --- a/examples/lookup/lookup.cpp +++ b/examples/lookup/lookup.cpp @@ -38,7 +38,6 @@ int main(int argc, char ** argv){ // load the model std::tie(model, ctx) = llama_init_from_gpt_params(params); - llama_set_rng_seed(ctx, params.seed); GGML_ASSERT(llama_n_vocab(model) < (1 << 16)); // tokenize the prompt diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 1180734b9760d..a74d4d9c72364 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -240,7 +240,6 @@ int main(int argc, char ** argv) { return 1; } session_tokens.resize(n_token_count_out); - llama_set_rng_seed(ctx, params.seed); LOG_TEE("%s: loaded a session with prompt size of %d tokens\n", __func__, (int)session_tokens.size()); } } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 25bc296396772..68c63f9f1b1a0 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -854,7 +854,7 @@ struct server_context { slot.sparams.penalize_nl = json_value(data, "penalize_nl", default_sparams.penalize_nl); slot.params.n_keep = json_value(data, "n_keep", slot.params.n_keep); slot.params.n_discard = json_value(data, "n_discard", default_params.n_discard); - slot.params.seed = json_value(data, "seed", default_params.seed); + slot.sparams.seed = json_value(data, "seed", default_sparams.seed); slot.sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); slot.sparams.min_keep = json_value(data, "min_keep", default_sparams.min_keep); @@ -1028,7 +1028,6 @@ struct server_context { send_error(task, "Failed to parse grammar", ERROR_TYPE_INVALID_REQUEST); return false; } - llama_set_rng_seed(ctx, slot.params.seed); } slot.command = SLOT_COMMAND_LOAD_PROMPT; diff --git a/examples/server/tests/features/results.feature b/examples/server/tests/features/results.feature new file mode 100644 index 0000000000000..f17120f7b9fbc --- /dev/null +++ b/examples/server/tests/features/results.feature @@ -0,0 +1,57 @@ +@llama.cpp +@results +Feature: Results + + Background: Server startup + Given a server listening on localhost:8080 + And a model file tinyllamas/split/stories15M-00001-of-00003.gguf from HF repo ggml-org/models + And a model file test-model-00001-of-00003.gguf + And 128 as batch size + And 256 KV cache size + And 128 max tokens to predict + + Scenario Outline: Multi users completion + Given slots + And continuous batching + Then the server is starting + Then the server is healthy + + Given 42 as seed + And a prompt: + """ + Write a very long story about AI. + """ + + Given 42 as seed + And a prompt: + """ + Write a very long story about AI. + """ + + Given 42 as seed + And a prompt: + """ + Write a very long story about AI. + """ + + Given 42 as seed + And a prompt: + """ + Write a very long story about AI. + """ + + Given 42 as seed + And a prompt: + """ + Write a very long story about AI. + """ + + Given concurrent completion requests + Then the server is busy + Then the server is idle + And all slots are idle + Then all predictions are equal + Examples: + | n_slots | + | 1 | + | 2 | diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index ca400efa41b9e..f71e0d706cca9 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -61,6 +61,7 @@ def step_server_config(context, server_fqdn, server_port): context.server_metrics = False context.server_process = None context.seed = None + context.draft = None context.server_seed = None context.user_api_key = None context.response_format = None @@ -107,6 +108,11 @@ def step_n_gpu_layer(context, ngl): context.n_gpu_layer = ngl +@step('{draft:d} as draft') +def step_draft(context, draft): + context.draft = draft + + @step('{n_ctx:d} KV cache size') def step_n_ctx(context, n_ctx): context.n_ctx = n_ctx @@ -254,6 +260,15 @@ def step_n_tokens_predicted(context, predicted_n): assert_n_tokens_predicted(context.completion, predicted_n) +@step('all predictions are equal') +@async_run_until_complete +async def step_predictions_equal(context): + n_completions = await gather_tasks_results(context) + assert n_completions >= 2, "need at least 2 completions" + assert_all_predictions_equal(context.tasks_result) + context.tasks_result = [] + + @step('the completion is truncated') def step_assert_completion_truncated(context): step_assert_completion_truncated(context, '') @@ -1020,6 +1035,23 @@ def assert_n_tokens_predicted(completion_response, expected_predicted_n=None, re assert n_predicted == expected_predicted_n, (f'invalid number of tokens predicted:' f' {n_predicted} <> {expected_predicted_n}') +def assert_all_predictions_equal(completion_responses): + content_0 = completion_responses[0]['content'] + + if 'DEBUG' in os.environ and os.environ['DEBUG'] == 'ON': + print(f"content 0: {content_0}") + + i = 1 + for response in completion_responses[1:]: + content = response['content'] + + if 'DEBUG' in os.environ and os.environ['DEBUG'] == 'ON': + print(f"content {i}: {content}") + + assert content == content_0, "contents not equal" + + i += 1 + async def gather_tasks_results(context): n_tasks = len(context.concurrent_tasks) @@ -1148,6 +1180,8 @@ def start_server_background(context): server_args.extend(['--ubatch-size', context.n_ubatch]) if context.n_gpu_layer: server_args.extend(['--n-gpu-layers', context.n_gpu_layer]) + if context.draft is not None: + server_args.extend(['--draft', context.draft]) if context.server_continuous_batching: server_args.append('--cont-batching') if context.server_embeddings: diff --git a/llama.cpp b/llama.cpp index e4ca34bd13389..3a4a03d8f29fb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -13667,7 +13667,7 @@ llama_token llama_sample_token_greedy(struct llama_context * ctx, llama_token_da return result; } -llama_token llama_sample_token(struct llama_context * ctx, llama_token_data_array * candidates) { +llama_token llama_sample_token_with_rng(struct llama_context * ctx, llama_token_data_array * candidates, std::mt19937 & rng) { GGML_ASSERT(ctx); const int64_t t_start_sample_us = ggml_time_us(); @@ -13680,7 +13680,6 @@ llama_token llama_sample_token(struct llama_context * ctx, llama_token_data_arra } std::discrete_distribution<> dist(probs.begin(), probs.end()); - auto & rng = ctx->rng; int idx = dist(rng); llama_token result = candidates->data[idx].id; @@ -13690,6 +13689,10 @@ llama_token llama_sample_token(struct llama_context * ctx, llama_token_data_arra return result; } +llama_token llama_sample_token(struct llama_context * ctx, llama_token_data_array * candidates) { + return llama_sample_token_with_rng(ctx, candidates, ctx->rng); +} + void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar * grammar, llama_token token) { const int64_t t_start_sample_us = ggml_time_us(); diff --git a/llama.h b/llama.h index 4effca42cc65d..7bfd13740cf25 100644 --- a/llama.h +++ b/llama.h @@ -987,7 +987,7 @@ extern "C" { struct llama_context * ctx, llama_token_data_array * candidates); - /// @details Randomly selects a token from the candidates based on their probabilities. + /// @details Randomly selects a token from the candidates based on their probabilities using the RNG of ctx. LLAMA_API llama_token llama_sample_token( struct llama_context * ctx, llama_token_data_array * candidates); @@ -1074,8 +1074,9 @@ extern "C" { // Internal API to be implemented by llama.cpp and used by tests/benchmarks only #ifdef LLAMA_API_INTERNAL -#include +#include #include +#include struct ggml_tensor; @@ -1112,6 +1113,10 @@ std::pair, llama_partial_utf8> decode_utf8( const std::string & src, llama_partial_utf8 partial_start); +// Randomly selects a token from the candidates based on their probabilities using given std::mt19937. +// This is a temporary workaround in order to fix race conditions when sampling with multiple sequences. +llama_token llama_sample_token_with_rng(struct llama_context * ctx, llama_token_data_array * candidates, std::mt19937 & rng); + #endif // LLAMA_API_INTERNAL #endif // LLAMA_H From 37246b1031b1680c0dcaf20aef736d6b446203fa Mon Sep 17 00:00:00 2001 From: Kyle Mistele Date: Wed, 24 Apr 2024 05:15:29 -0500 Subject: [PATCH 028/101] common : revert showing control tokens by default for server (#6860) * fix: revert showing control tokens by default * feat: revert changes to default behavior of llama_token_to_piece; provide overridden declaration to receive "bool special" param to toggle showing control tokens * feat: use the overridden declaration of llama_token_to_piece from common/common.cpp to specify "false" so that control tokens are not shown in chat completion responses" * common : simplify --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 6 +++--- common/common.h | 5 +++-- examples/server/server.cpp | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index a0d1f8d59cb62..97f55b053eee1 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -2328,12 +2328,12 @@ std::vector llama_tokenize( return result; } -std::string llama_token_to_piece(const struct llama_context * ctx, llama_token token) { +std::string llama_token_to_piece(const struct llama_context * ctx, llama_token token, bool special) { std::vector result(8, 0); - const int n_tokens = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size(), true); + const int n_tokens = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size(), special); if (n_tokens < 0) { result.resize(-n_tokens); - int check = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size(), true); + int check = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size(), special); GGML_ASSERT(check == -n_tokens); } else { result.resize(n_tokens); diff --git a/common/common.h b/common/common.h index cca44268e6df5..157b54a3e9e08 100644 --- a/common/common.h +++ b/common/common.h @@ -237,11 +237,12 @@ std::vector llama_tokenize( bool add_special, bool parse_special = false); -// tokenizes a token into a piece +// tokenizes a token into a piece, optionally renders special/control tokens // should work similar to Python's `tokenizer.id_to_piece` std::string llama_token_to_piece( const struct llama_context * ctx, - llama_token token); + llama_token token, + bool special = true); // TODO: these should be moved in llama.h C-style API under single `llama_detokenize` function // that takes into account the tokenizer type and decides how to handle the leading space diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 68c63f9f1b1a0..3acbd17df2ac7 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1117,7 +1117,7 @@ struct server_context { bool process_token(completion_token_output & result, server_slot & slot) { // remember which tokens were sampled - used for repetition penalties during sampling - const std::string token_str = llama_token_to_piece(ctx, result.tok); + const std::string token_str = llama_token_to_piece(ctx, result.tok, false); slot.sampled = result.tok; // search stop word and delete it From 3fe847b5747676ee1bf90371c46ed0bc66b57240 Mon Sep 17 00:00:00 2001 From: mgroeber9110 <45620825+mgroeber9110@users.noreply.github.com> Date: Wed, 24 Apr 2024 12:54:24 +0200 Subject: [PATCH 029/101] server : do not apply Markdown formatting in code sections (#6850) --- examples/server/public/index.html | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/server/public/index.html b/examples/server/public/index.html index 9fe61eb1b1b13..2961999f2451a 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -881,11 +881,11 @@ .replace(/&/g, '&') .replace(//g, '>') - .replace(/^#{1,6} (.*)$/gim, '

$1

') - .replace(/\*\*(.*?)\*\*/g, '$1') - .replace(/__(.*?)__/g, '$1') - .replace(/\*(.*?)\*/g, '$1') - .replace(/_(.*?)_/g, '$1') + .replace(/(^|\n)#{1,6} ([^\n]*)(?=([^`]*`[^`]*`)*[^`]*$)/g, '$1

$2

') + .replace(/\*\*(.*?)\*\*(?=([^`]*`[^`]*`)*[^`]*$)/g, '$1') + .replace(/__(.*?)__(?=([^`]*`[^`]*`)*[^`]*$)/g, '$1') + .replace(/\*(.*?)\*(?=([^`]*`[^`]*`)*[^`]*$)/g, '$1') + .replace(/_(.*?)_(?=([^`]*`[^`]*`)*[^`]*$)/g, '$1') .replace(/```.*?\n([\s\S]*?)```/g, '
$1
') .replace(/`(.*?)`/g, '$1') .replace(/\n/gim, '
'); From 15ed96c25a5a88ca38102bee85d695e462c3f9c1 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Wed, 24 Apr 2024 19:35:35 +0800 Subject: [PATCH 030/101] added support for resizing the window --- koboldcpp.py | 45 ++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/koboldcpp.py b/koboldcpp.py index 61a859d991ccd..13f6f04dd00ca 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -9,7 +9,7 @@ # scenarios and everything Kobold and Kobold Lite have to offer. import ctypes -import os +import os, math import argparse import json, sys, http.server, time, asyncio, socket, threading from concurrent.futures import ThreadPoolExecutor @@ -1503,16 +1503,51 @@ def show_new_gui(): import customtkinter as ctk nextstate = 0 #0=exit, 1=launch - windowwidth = 540 - windowheight = 500 + original_windowwidth = 540 + original_windowheight = 500 + windowwidth = original_windowwidth + windowheight = original_windowheight ctk.set_appearance_mode("dark") root = ctk.CTk() - root.geometry(str(windowwidth) + "x" + str(windowheight)) + root.geometry(f"{windowwidth}x{windowheight}") root.title("KoboldCpp v"+KcppVersion) - root.resizable(False,False) + root.resizable(True,True) + gtooltip_box = None gtooltip_label = None + window_reference_width = None + window_reference_height = None + previous_event_width = None + previous_event_height = None + def on_resize(event): + if not event.widget.master: + nonlocal window_reference_width, window_reference_height, previous_event_width,previous_event_height + if not window_reference_width and not window_reference_height: + window_reference_width = event.width + window_reference_height = event.height + previous_event_width = window_reference_width + previous_event_height = window_reference_height + else: + new_width = event.width + new_height = event.height + incr_w = new_width/window_reference_width + incr_h = new_height/window_reference_height + smallratio = min(incr_w,incr_h) + smallratio = round(smallratio,2) + if new_width != previous_event_width or new_height!=previous_event_height: + lastpos = root.geometry() + lparr = lastpos.split('+', 1) + lastpos = f"+{lparr[1]}" if (len(lparr)==2) else "" + previous_event_width = new_width + previous_event_height = new_height + windowwidth = math.floor(original_windowwidth*smallratio) + windowheight = math.floor(original_windowheight*smallratio) + root.geometry(f"{windowwidth}x{windowheight}{lastpos}") + ctk.set_widget_scaling(smallratio) + + root.bind("", on_resize) + # trigger empty tooltip then remove it def show_tooltip(event, tooltip_text=None): nonlocal gtooltip_box, gtooltip_label From b4e4b8a9351d918a56831c73cf9f25c1837b80d1 Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Wed, 24 Apr 2024 08:10:07 -0500 Subject: [PATCH 031/101] llama : add llama_get_pooling_type function (#6862) * add llama_get_pooling_type function * fix argument name, move with ctx funcs --- common/common.h | 4 ++-- llama.cpp | 4 ++++ llama.h | 6 ++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/common/common.h b/common/common.h index 157b54a3e9e08..87361e8e91500 100644 --- a/common/common.h +++ b/common/common.h @@ -86,8 +86,8 @@ struct gpt_params { ggml_numa_strategy numa = GGML_NUMA_STRATEGY_DISABLED; - llama_rope_scaling_type rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED; - llama_pooling_type pooling_type = LLAMA_POOLING_TYPE_UNSPECIFIED; // pooling type for embeddings + enum llama_rope_scaling_type rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED; + enum llama_pooling_type pooling_type = LLAMA_POOLING_TYPE_UNSPECIFIED; // pooling type for embeddings // // sampling parameters struct llama_sampling_params sparams; diff --git a/llama.cpp b/llama.cpp index 3a4a03d8f29fb..3a84b4916bd30 100644 --- a/llama.cpp +++ b/llama.cpp @@ -15599,6 +15599,10 @@ enum llama_rope_type llama_rope_type(const struct llama_model * model) { return LLAMA_ROPE_TYPE_NONE; } +enum llama_pooling_type llama_pooling_type(const struct llama_context * ctx) { + return ctx->cparams.pooling_type; +} + int32_t llama_n_vocab(const struct llama_model * model) { return model->hparams.n_vocab; } diff --git a/llama.h b/llama.h index 7bfd13740cf25..0eb2a1e9ab0a2 100644 --- a/llama.h +++ b/llama.h @@ -390,8 +390,10 @@ extern "C" { LLAMA_API uint32_t llama_n_ubatch (const struct llama_context * ctx); LLAMA_API uint32_t llama_n_seq_max (const struct llama_context * ctx); - LLAMA_API enum llama_vocab_type llama_vocab_type(const struct llama_model * model); - LLAMA_API enum llama_rope_type llama_rope_type (const struct llama_model * model); + LLAMA_API enum llama_pooling_type llama_pooling_type(const struct llama_context * ctx); + + LLAMA_API enum llama_vocab_type llama_vocab_type (const struct llama_model * model); + LLAMA_API enum llama_rope_type llama_rope_type (const struct llama_model * model); LLAMA_API int32_t llama_n_vocab (const struct llama_model * model); LLAMA_API int32_t llama_n_ctx_train(const struct llama_model * model); From cb2dbe9e9a8107de542e571641b27c44cc737783 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Wed, 24 Apr 2024 21:29:21 +0800 Subject: [PATCH 032/101] improved rep pen speed --- gpttype_adapter.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/gpttype_adapter.cpp b/gpttype_adapter.cpp index 1712ae11424e8..87da30928d525 100644 --- a/gpttype_adapter.cpp +++ b/gpttype_adapter.cpp @@ -429,9 +429,15 @@ void sample_rep_pen(int n_ctx, int rep_pen_range, float rep_pen, float presence_ const int64_t t_start_sample_us = ggml_time_us(); + // Create a frequency map to count occurrences of each token in last_tokens + std::unordered_map token_count; + for (size_t i = 0; i < last_n_repeat; ++i) { + token_count[last_tokens[i]]++; + } + for (size_t i = 0; i < candidates->size; ++i) { - const auto * token_iter = std::find(last_tokens, last_tokens + last_tokens_size, candidates->data[i].id); - if (token_iter == last_tokens + last_tokens_size) { + const auto token_iter = token_count.find(candidates->data[i].id); + if (token_iter == token_count.end()) { continue; } From 784e11dea1f5ce9638851b2b0dddb107e2a609c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 24 Apr 2024 21:29:13 +0200 Subject: [PATCH 033/101] README: add graphic for matrix multiplication (#6881) --- README.md | 4 +- media/matmul.png | Bin 0 -> 265705 bytes media/matmul.svg | 1238 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 1241 insertions(+), 1 deletion(-) create mode 100644 media/matmul.png create mode 100644 media/matmul.svg diff --git a/README.md b/README.md index 1d4e9d41742b7..72b5a2c881961 100644 --- a/README.md +++ b/README.md @@ -1117,7 +1117,9 @@ docker run --gpus all -v /path/to/models:/models local/llama.cpp:server-cuda -m - Clean-up any trailing whitespaces, use 4 spaces for indentation, brackets on the same line, `void * ptr`, `int & a` - See [good first issues](https://github.com/ggerganov/llama.cpp/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) for tasks suitable for first contributions - Tensors store data in row-major order. We refer to dimension 0 as columns, 1 as rows, 2 as matrices -- Matrix multiplication is unconventional: [`z = ggml_mul_mat(ctx, x, y)`](https://github.com/ggerganov/llama.cpp/blob/880e352277fc017df4d5794f0c21c44e1eae2b84/ggml.h#L1058-L1064) means `zT = x @ yT` +- Matrix multiplication is unconventional: [`C = ggml_mul_mat(ctx, A, B)`](https://github.com/ggerganov/llama.cpp/blob/880e352277fc017df4d5794f0c21c44e1eae2b84/ggml.h#L1058-L1064) means $C^T = A B^T \Leftrightarrow C = B A^T.$ + +![matmul](media/matmul.png) ### Docs diff --git a/media/matmul.png b/media/matmul.png new file mode 100644 index 0000000000000000000000000000000000000000..786a20492c02b4ee83fcb2a2bcefa0699ee7a55c GIT binary patch literal 265705 zcmeFZXHZqy7B0GNTWu4x0TmQ5f{K7BNCs^Y6crQ+l9ebRAUOxyZV@Dy06|iNNRTW! z8VE|1oI%MM$r|{X@?p z*t+)LFYxyZ@do<;e);+0$MXMvbyj@4CAsaNe@;D1IQ{Scr%;|e{VQ&vB?)q<+Yotx+_`fKA3%Uq7*$0a+tso%aP!1{k~i6zP4khY#*y^*Y4CvN^;;=|rN zH{TcL<=N8G!Y%c44R-u5r%^^Ka|3?G+5eQqK+PT=ec9Ly&vcup%h&3~Otp**5#kO1V=^_o^ zH|O;JH?KOH+m|iP`@Y`pox1d#rQi~4plpxQZk2oIgeJe9&r)GKal-w>gAGOQ^rtN| zjGUNmjUFKl^}Sle>(__d>?g`ps_EV=^ZxCJ zBs|?uR|uK;4o>&3nO1A;XCl`g-Hg}jU|yLmDz8^mR2(ZkUGaj!+%vL8M?c^9ey>iM z|FC^vU?5{eu%qn}7V402&Kzx*wlG2oW{(%Mg|81hRNNk1@#TwauETmT{n~N_CBrqU-&_?` zpXsYSL?iU${U)i^8nSb7Y7b`SH`Ur_+xJg+U7UKJ$Q!9Zr5>dm+o`7R_fMTtOgaly z)dXwlZ;i67u4AwIrxK67yq?O&IVq)07as2_2;es>&TdaLZf`KoTawI4GiuF5SuT{< z|7Z1v8xjv~+O)|sccdojhKgmQwszg_ojb)$OicV{O6F?{9zBv4T{8ErCo5;PdN*=7 z%iP9oLgVA(2M=~+4#u%fbdDV?7Omy~sB@gGEah+_ijZ$0GAtC=xNRGk{p^oOg?Kf; zDZ8d%X1;s3{AQLdoO0E2Y`YyZ4^b@C<&S$yhTHvy-;6XupTN@788#&!o6sCNc(7b_ zWpTDdeOabw>F>?E3l(z5y=6uXTQhk?mS;O}rruRNp_p_yE^{FI&exE0auU?CbQXA< zp3e71I+&?7%gSqLD5;`j=<7e{Puxrhsj*{MPIZ@iEAKtG_;Bl9jY4<&M6EB+8nSK7 zGFUIJy*5Lg;|Uc_iJB7o%)g#Xv>a}d8MPXyIVLjK^;{{4pe71MzixhNuroL@F(xMF z4o-ONbTF^6+CG!~ZQ5;lKR#~mq;;A-&o1@rSr)bh1zr5YElysvuu~7pw{2UyeoJJ5 z9wo$5_{aOTCgWdNM;HB1>q=!xkVd0_4u6vGR30RY%gM<}j#fjm-i_GulW0k%qs_q^ zzNvS=wj&oY;EC16kyN>U$bk25!%0&!GtHHy8LtO40!Ia`M)tL4T09ewC$@?YT{L z?8c{4Zhv}cQhY2zR+*DrDCJ_>24?${I;AfYo2Nd0{HU~9aQE)rMd>D;D9#+anLD`B z*wSClwmu1@_QzY&WN&BZDx8?m>D=GOWgf?oyV1?_*v(C&_{A~pHc{+F817zfWEFJz z-5PF6F&T_a^RXR$E-ez3!cSWD?BYa4N%rWC-D_s8sHJ-tFY}HM%_z)qXiT&Os?l1tiNDnWYpEwl~2mKyV!f|!v^O3$DFx)ICEz;cJAKo zn@0b=&UmLK1N+HJiOLj%df_;nbdA2R|0Le24Bb?gWj)sMQqaoCZzgbn ziml(bbu{V*w{}J_`n-6NKh*fnz}^RYV%mNqL6r8a9u<#ACSjbh#xZi=`O0KK3LV?$ zfkhIoGiuA`e@M$5l}$dvhHoQy zBcxcTJissgOux7`HnFqkm9doU!qB^lLR9&Vife5;`slf#C~nt!YEd{}nNnYwQs**m z%N9$aYaEP^!D#le8GUrf^AQcBK+WtHN{ukJ;A9)~U3cuLP0_DCjxqYwx^>jPSI~9f zjA7Hx`0R1X&?eNk7*4$3jF;&0ET;G6PAFiQ;I=J z_J7x*jJJK?US6XsD(W|H-h5E~!YSJ9uO%BwF?EET^}Z4;A}q2rSuK|0-57ZnGe0zw zj!ph>wB6EFeaU~8Gt+3i>&sY?=yKv86(=x$<C2Q>Ha40R((B4vF9Gg}S+FU@U6ED5%0Ax3-tkh?^HMU+>V1bkd-iPh1Fhrl zQL@n=E6V(N-iqrS=iuO&*}-5pGZHwvM4?RoT-!@k2@{nP%#3L0F+b)niE0=(ryAPD z96Ns8?bwYsFKnFNJFmGOua=Ylj*1k{zOh*xE9zXDX^*dYM^0UjUhd>b)@WgGu#;Mk zLvZGAXKhTYgvn)C)T53yI`Umz^*7M$yBWpwcsHk%tZ#5|UXlO*+#wP;Ge$B8J6g;t zTIc59sxRp?M3A?NK7alC^%m0-c5lbrHy_Kp9R@VSy4%lFZnf_xgZgYZhBQa2QLBGs z>Zx-bz*~j>JOc)AVW=xA#f(CY>~wLU|M9BfK6W*%-%lb+dwS3b+bz@;G; zqmp5oHi!x7ioF>cn?paZ6y0E%hEPkR+jxA0rOIfP%*j6EQWY@Y#*XMzv zI2tbnEcE?m7N&>y0y)(J+UB@WH{08Xd#D9)44DA$Y?r>Eo6$b3loW)#okq z?BZ10huU@n5can9FUVs6lFOc|Q?A6Vk$KnE+glXwtHwYo{Ob_)!mhn?d;GYhB>BSC&e^&S-U>$yk@Osmb{G2_F%_(y(_QIu)n$Dw|(8ZbzCML`T0xA zsdwq6Mdq}k3?#(34eCzdpaom!Bn_ptJIbl8!V3Lj_OzDmU`Or4^WQ!^2$K#L$x?ai z@jMW}tJu9Xhj}gAZ$p_DArh4L$N_9)q_X4myWj#UtN{CtjLt8~=zdhgsZL?}NHm-@hZVz-=8K;`n8uX>4 zq`5PKt?sP?r<ziX%-%i3p{!I5ATIq{75-pr~4ied+U8p zi(_(Xqz3>kW`bDd);zdilsnfWLx89Rla1CDf9|Lj3#*}qZKRxl78ObYDA(jxDUx$$ zceS*r2d(I0g1oK6uz~vH_Qf9FL{MJqZf}KOSGBSI2bXbx3Yv}ca+;^m!V0~d7CTP` z4`S|wPC?#K>~ExmWKv<{wF6{QqrofnVE-Ukpk5#Ib6s2C^XLEJ?u?t)Fo{6ck zBa97;`(o~{FNf;(soyQv?|=RqQ9r0J5u2j+Rw3S#Ek64cVEAq}`IyL*pK3tVNJeka z&1~y2qk3tfvHbLJe{L>zIHT8)k&uue^KQ-AtDhy&mCR|eYJi)P;h`3Tb%d-4*Euc? zs(Xnz%oFMp1G0s)7#10M6cS0C5zv#pnC!1}XL|g@%S&<3==Qe|#r6Nz4EPR;bOo(}Q60P+LmKU0R_ZXBYdW zotoxsQ74taAFC3|(3f?)RepC`o(Q(ea-8e-EsDGzwmr5oNxNhgJnZmOQk;HITG^$=-a@EJEQvN69)fq#E znN}!ZJ zP9;^Z;)74%E*dzwtN7si)P}7LJ0PambWpw)}jVc3i z4GOOb;a=q;tGI~V_zo$)e?AWCV(Rnv20Tx91jV7F5+NXwGQ+giU0`ETQ@K(&1vjXt z;K{|R1hbXpr9-T&5i-J_BJ+LuDb*3u@v^H&J(7)Z)!E>cx!~l8)3M&KB@8j#5@IYS-)&Y9mH?lpP@;4x~Mqnc%D0?AU| zwiV@9ROqp{B(@RsxHL#uFn2m7UM`cCBD-N#Zadl^RSrnNb?d#eM7QWMyc`kl={7Ep2FBGly;i0J{}*; zJ*0w->sCWsq;A{`1JrpUhL>21+*ZMgKtW!UVK6bpa5d_yJaP5mP$hHf=v14xf*L)T z?>%ojzD9cIOff2MH}#-D&2n9}v(SUg6S)DoYuBEnqvw0Y%B-^GRVEhY@`M|to^)&d z{-@kSLLlu8nHB~qlfOC_Q`N^1l}n$>S%d9O@pB1P4&X5KDCz4R^Qgb9svPRo?BC(R zD0n6a^)552>M&JXYZ`4PCZ?vQw$J+8eY+So4h~O(b}ma(B3A{|;$(E@{54%`)~=V0 zsu!MVd7e(jm`THJ7rCf^;TpPG;Y{AjlFTR%xx~!=H~PcAljgl-`WvWIA|nGa?2&hN7PN9{?@Tzc&Eae6}nd_(!8WfIqw~ ztHi~5^q*arw$~G>mk(N`UYZ5`8YaPjgpsnnvy&5c%(`2{y>;Hl5Uz-QbG(QDG?nT)&kbUb8S5`u`TrYEE97bc@F6`^=O_#UOI@$r9L|SME^*yvQ z0jzk%-S=GYSpt_8tB+GPcx@#mCDqY{-K+h)h4MsfJTDPH+n}30*Yj$u<1r^o9J8>k zIl)l5b7jVb2VC}TUl`4EA|R|TPIWJt)ffl`uP;3(oGKC8%cim_;38{?0-d3cIima0 zs7;4LZ}8>W5t9$=c2o679vd4Qp1PG!w4zs37V>vU=L3Qn+}%YvD*a!*u*w+1I2UmT zPU~DD93>HpZSJ)b*ru!ec?k7oZy6`xv|2CE^>(s~&RvSRnHbvA1ZrRO`RSp~zjjDH zg2YEOl<);2vhHG6+KCC|g?X3{DTy9X@6zJpOzuohIuZ5~KG8fz%MrJ<17xQ*ljUFiAXy)K zJ2_a-Nfg#T0P?P`S#>$;g;FTFU5r`)(+((LU|AK{jcux z|HksJuFwCr0nVgao%(MUfb##f8UFvjfx0Ra)zCpcPlQ=impo32?_AuCzN ziB5Bp_T}t}azVS8LEu3Mq#?Uleu>N^A}%_zy3y4K>$iqtoTqS;N;%ug>o7lQ-5Y2{ zRP`~Kq=lgu9)li4w&?l;DhCQ!xP^HN9JierVI-;ydPrSWzMmm_&=|B9Z{-_A?gP{` z&^zcHt(zOEs8hTT zVFr74tD0XY{GUi8`ik#3l#;YAB^%Tmd0CYBa<)gT1UP4EgJ;;;?}#L9O{2g~$^W+y z4adna<^pKTgMsmI)0PM|VqnE;Ps_d2DQn6Q&AavCPrMv_CzG)P>PSAw%bTWD7xQ>| zIxVjN&aZ;eRYylhsA)+)WR-{^9GAB|Z)#93Fc@RqAMUHKcQEA5Dnq)MMm%{k2JWD3 z5|-NZCpffOPpRwoPc9PR7o7ud<2@n3$~8ApPtP^qtJu48K&ZZ|@XOE$g==@2dp>%3 zN?URZqX0Mf@oq>Bc6ZydGn(qw!w(`HIiX@61GR$?AJFV0m|{;!GxeBi!v7K*wGx^x z5!DQ0RR4h{JWyuU_}uMDWT!uuC@(z1J2GmzNd@uBke+%ByPLKp+q<2frawh|;_-)cr@> z4s}#r=Je?3=twR_H5O1`NSYp4LdE*~--*Bs=3d(gkG{Wl1JUBEg;(9Tnn>oQJ_+I~ zy7S4+xv~a5Vv9=;m6!Ix)r=F3szFx>BD6`Pwr>F_ca?Cl)M(C(nu`5c!5P^#lt_zu zRV!5@#tG&Ts_@3{Ffr%=WfNwn;++CVxGafym5y^%Eez_gadr4cZl%5mgf=+=E+9U( z@rj8roZjm=1ha4pG%yAe9TujKmrY}{3t;%{saG+(>Rq>n;_ODo<$nOET?WzGh*EJp z_UZHIJK$G8ef4TDd^hUU#sYauyXNBhs2PAA(n5)ngOEPnQEzQ9_GaS2a>3E;2AuW3 zgNL#G_MLm%A5}{UvS-5VzUt527^{P88Edl}E$I64JS-$+KZ{hr&F|0sq0BHBDe4B& zxiHz@`CFHoxp2P1Z8P4L4n>1)Q3d;_0WH-8jYc9^#32-V4Y|G60UvhdU8A`RM$n6n zL1^B4TBkJfS}?S%{3E_9+ws*=3kM42)Ol)v!r9*v`cNSP2gfH%A%;#7$U_<#Av1g^ zJISpA5@KEO`rqGQ+nL&I^myYAmVCqlh?)Et)Qe>#&1Rx!&zZY7|JM0=ktwK#bYvrl z&E^{X1v-U6e9jp%;W_gbO;yawc!<4p>cMAXYZ*HXDvv=jZXX*fVlF}$qLU;Oh($8O z59pu1vb?aPel6wNLXr%fuqmL^z!Os854X@iOV7m-9wT6kNHN`Qgea5H0+BSurbtcz zo$mq^_UIPDYGOU(prsXM!mW`2L)E{HMo18k`#%1p7Jtl!f_TTNx9C@B3H!}j?tR*7 zf+}K(8^y1Y8YLHfe8v$0lup6+IjN2Z*I@i=jCB;CAe6h?{CbR?7L)_5+HAV(h+pd7c1SN+?NH{|aBIM)ldtzht#{~$>L5S~zo2@@2#+#eAv zCA1o|(+Uk=be9khy_g zH#=XTHA-y ze?_()bjo@lDk6GGtk=n^(}PzW^|!rN1bPmSz-6&~oLI||?rTpipRmwN=5z*oUjxu#LH}AnrFj%2Y$uLJrt$6l zeKHF5mzbU(hr=_|X2%%>Ez=r#2~83KAvT3Rj$SBky%L1A4}Qy*eS9aCQt$ft*ceoU zVRZbxTkTO=n=Kn+0Gc?VS@uGA2%Lg4Q3-iq5BHsGTNt>HjulyoixG6=rJ(M>(}tt{ zQrOOt;B?<9qCXOs94_1iTvbQRyK*;dg~H&>O@FwU`IwQ6VE0tEoNY6q0Z}EL_3 z#%?Ga#Mu>t9v%w#NW30mw2~kLkBIKomm&_f#26xhHu8R@*zUAjc1A`c9HQ0I!c#SU zIJW*QZMEpK8c>YKU~(5C3=xW)Owzk|VZ^V;#-?{YyWG)a5Px;%CBcVB*r9e1-B~Op zt<_SQbQK<<>tckRKD*4}s*dQLU5vwUO1(zc=clz*X(Y!5$7&DsxhsCBj!>mEk-K7I zFv3ESFCzlE_-I=Wb*zEbXDjK1m{MCfQg0j!y(_t+h}{&COC>8wP}ruN2;%V;kkpx3 zy9HU0zs{SPnH>@pb?SATK~{i~2y7{iUYD?Q?64O*;HtU7vN%b$30EpO+S}Ddy#mZ; z6J!;rEAM`~{1JrA2<^6}X9y$=)I^N|iclpATs6sYsOpoDRv)qtq#IHvA`WF3w$8PV z6D0&6*czs|CY*3C*c>-8IyrdQm6C~?PykG_2k{3__*bkTW^ae>PL#lAW}-TeC_ZXN z16Z5va3b^@)d2|dJ2ARlKhX%D@Dvz`B$zSa-p7q5Fcc-A^>|b$w&Fx=Bjtg_dBp!+uZ3K+^L?wtpcpBF=P`9Nm&sO;}K`O#1{~g^ECX2 zjdG^y`jBt*kN7J|J3M~^ps8dL>XIw^k&~AN7=st-h&Xccn{PN|9H@zwd#ifpe(^Y2 zCO`jjZ9MMZUQ7|oYwg?@W zfUp!28FAB6j}uN{hv;&rXa^cRC-2>PQY=5df9gaPM1M7|gQ$K1LOFRGlP0~0BaS1% zr1ur=GrN+(gH~3%@+DouZx|;ofewgJ3u_y(pYdX z(?WXka1#?6^OODEb;Q7t6S<{>v*=2r=?5qXDJiDKztdBYLj;b)snMB%S~dg#M2GB! zk!Jxb5$u3+9`#i(f}E#+&bt(XQ5SEW%SFN9jjd#@^X}y>O;d3h@+>M*=P!}Ix961B z#S9*Rk{OQCSQ6JA!9t|tw3RHKK5>-Nbwad;rJcUe{FQ=eNM}+JY z=JoGzOq(1RkAeKR3QfG#IihYrwrfsGc~XuIT>?;Wo!7+9xLat2<#6y5pweTY7=X=*T8` z_dH_4zA#Vd2w;x?lW8OI!Rlep#3jvax&~ zV3(`Q^87LXOzBLa?n_-{$fV4Ku<=Iqa}t=zmYH?vgaJKzz&;Q8(M~lLeYC^F!xe!< zbLkiu1}ZDq;C<9#axKeCXlR5^&31Yz-@kvKsoRQgUmfVt_wm8+fC*A&wDhOC#20bE zW1&>qEzWQK?YD#=yAgi2W5>RP+d8lV`_qefLe{?Nqpi4`Q!ROZX4IH}AB|U#F93id z(h-ZwjALVC@^9s1g&fs9r?S z)?&dV0#}UkiIRgV*UV&`Yd;tB^pJ!S@+3y}-2m zTN*ehu*Z+jBvc|r$wxa-*siWGgWqX6yISS%O?1U4B>=WglqAK#8NCUx_LA=R-@jE> zyoGjB2O_;9Uj!M!JziCN@+qRHgXsBIbNqp#PPx@B>adMHW+A3114&J%w##L->~Z#p2Z5^>8k<{8k~SlwWUIxrkxL$3^oA9d(; zt#MzHR8}080iR>`@tf|cohj;cEMTc9mH3~M>#;G=Uw}u+C z95`@} zG9%)N9%81vxx04njsT5qu5!x8nqc~Lqzo$iX!pdwKM0E`tA!^R19wL}w5kRvPid z?nD{X#|>15xzOz6`(~o*#JSnwM3 zvsV4Dx?1q1Y=;ty?v@?9g!Hf%@y8^%}Zwu=sJ(a43y`^`F$mitJ=!8tK+wE?0i_6gzt! zk%c<&q?ThPpON){jmcf@bME3|1zN<;y?fsxq0sCzfrSD{em*_g9*>?K4b1B1<(0;s zch~{G+B}esVVRzy4{*v7PR ziW*d-|B-71s2;7JmkZs!H-!gf9gf4>YB0CP>9=t!f^eJzG0$1U(!s!OGM>G9i=W-a zp>k*ED<7XX@86$|FahhnhB~xQGU&lw)r4Q(y@cUHR=>W!geFccCmIRMh9F*C8;+Pf z0JjCUta;*c72YWp@7p}RkbLj^AF%Wb4YpQ*ObO$WCjXPS|5O;aBldC5vl+~6@`7km zT`7T?8)%tgP+Z54A1|JLiMC-e+SWR2ujlgO#m=eWW(8b@JPx%(>WDhBg_mHqRiVY@ zsz~#43~2x&_SUjZL9V;wOAWGS{>mP8Wt8<*j*mj z%-4&dS(9UD6+EP5*Z1`R4|2ND)D;Tc=yGz^EWgEmA4A$Z5-AJ;%b}OCSs#}xZ%7M= z^Kt44e-++J^?6Mx_bhP5THx+e%526u|PSqE_GR}s{Rij&L(Y~dF?Z;imxDa`|uWtEN`s&qMBnXoYo6oFUHK#`pE+($izt^+9Hd5tV1e?N^cpXa(KH_FC+v*2Qbal zu8E$I2@3#{Oc*k;$Vb^Z#5Vt(wmvXk(E3LR%axZQ5CiC$Poo(H)8HjBC4>>>b{deI zs{)&>O*2*}Z?zju9Xo6QJYM{q)yvQ(LOR$;nPdOyjXzcojq0=K&r=*0EF(+~95{g6 z^rSsL9+#ZV-3rJk;JBDxQ7?7z;x#mM^X_80;CPQxGy`^}Z0p3ll9M`$K5C$aPQFiv zAxD&3JM)T&s3Gd^=F66u0!?KQ%J<}4CQ#8p|HRdiXK5VG^|Qt?cZF;M%-J6tFi% zu&`)VCoAijwY{u(;p6O^}2R_m_aKxm?#w1XeHhI~_sR1HO~GQl@!SNs;#n zMH#x|vezI90cC~kV3q1aP=RCg)DN7TK4hz_!5hArZoS8`fB#v8S>!Mq<=$MO?=JPb z0k~HQ#KVItQMPo9z4p(a2&9Y1Maf*id9OpiOwlgch2JR)*Qvs!yxY31Tj!7S=WBtP zFgK#{^n}}^M_*BXC>Rn|6aV)D)dE2462W)y(8# zU(R^!?w9ZP`XI{-pl-=HmkC19Y!E=E5|8hFH{D7$okGD`;C6!+7v#$}=0PHj0;XM$ zN{Rp_6+siPKRtNi1UQav1Nj+uD$k8pCbfdQ;C*7NP&zC9FLOg~ms9+Ab@?Yw#j zh9;7cQgT)c^Ry<#K*5_$LF)VW?*ewyG05h|V&o09WSEKb%gD&!(N3AEp~mV?+fypx*eB|d}7*L&fY(>^$giTh&`F+8)Qn7CSgt}fI`HSSdfrU(AO`|SycrJ zzu$|L^gd47>89NrF3}z@lk<)-@T1 z*xoWnttN^y5s#kgPFf#PT(f3P-FaGJEyKp7ZXf=c;bvW7@Kt%wef;hd>o;srwwoR@ zQ5r>W(tp7zA|moh2$;{*zY}=YLO;oF?%yoH++4OFT9Kft0kR=d6X>mVvvb+Y+xG4? z01ut=;YXZK7b{)Xon0>qqN8#3<;f3#o_Tu9xPBxC+miOtxLnVL>hh_7;vPC&nts{S zLoOsnrHqDAesTpds#u^#hF(bOG1dMK$3XfxM0sh2C!YXzL@D>Q+Ho#Zi5JtiQP-jE7*!DN05eV~vZo#zfgfs&Ummz*3$Kspu@KD}rKbZsJ< z1k`2O&zVMKxb0#e;Fh+x&qh2=XEA;4+O@HWAmtM|lih5j@m&$aDXm9z`}W-p;5~de zA3*L&wQcK?!*Ztt4o+?8h0O*(zI~{J%wc2?>9(SOo-#CnC(>5>t;_%imN_}r`1$@+c`3?`t6bdU|l1E311p(xXwZfQ{ zGNU^tldl&9d?iqJ+N49wH>l(lKkB}+Q}kcnXqsUR9>zI{qnTEk;k4q&#==rKoO{#U zJUIp8oN`Yd`svjBwX}PB_wwBfsqVdvS!GnqKu@m+5LqsXClRzquu+6CAdr^fK7mA4 zn_gLNl5Pc)Z?G>^bfr%&v}xP7Z#VL@LJ7#<|7Bf^V<3EhO7wzn>=VrB$7JM~yZn*tFu!H!h^h;}I&`0L8WW93m;0VXkhfI*T&K9Jm(r_$q1>`|NlrlRtbhqsm ziBdiE_%v8mRTYDll+p4&Qa0+@6w>z$!W)cp=iXc||N8Y>L`Dm}XKAo#9vVOd%!42!yDdMNo>f+T$&CfDA8=s_xq~3F&O`aN{#WP-CQE)K;iiJy0u!@Rc zIw#A8b<6j$;UT!fIXne2%+30#T&E)Z)As~C#jrMvp#ntM`{?zL-OD%8?cCSbR99Df z@CxbGL_V#yAK4-yE+L_e#&^vh}B1D0IUrI>YG&!OuhJ{mW{jNIWpgEuD01*~90T`Vy zJNci@>q^MxbDHd-n8k*LBrB z#?U+yJ(gI+M6yEe8Y_{|5fH_VxgHa*b_lu`w___lf`Kp}(|*f%bU|qKPtn5kcdkGb zp>JoYrURyvQ&9=XT^zp*%|v3&n&j7584BgIBznHaPOGe$UKCmsFa>>qJn#x$O9qtJ zV?R1rX0eu;V4=#XyTD`VcTTL+#F#IqWW+)D-F6ZTT|7|GQDAQ z7~rbK69JhBz;cfdmXrZQ=EfwZmg6#_b_303Yo1t}`_cOd zRz!;)8WXqMX~x;&7-X=c5FQouFyq_j548>o(a1OhG(KEyP$hFaatr>@>`Dy2RUABY zXbOv+!JVywVMKH(KF6VkL=K+YzpPz51yg{&z@NKVWej$}QFP(%POycDzP>vVCU{WK zG~z^_K}&ifvJyR2TnKS-F@*TO#$$ncc-n!`uX1%fp~PcQSE?8A#h?pk%7S6X&YfB< z+Li~=RcF*zTHATN)Z&JaPBc3+Z(u#0v_+Oc$tBrfjQ+- z0H$*dh-G~2%d8s4CJLBGR#tR*U5QV;m`Cg9#lTY>z6h8v14k&Z&~Hn@tI1x3vWKfQ z=ifr}Xg^x65dReIw(w}&wSWEqWDS17D$V3uk5ua4V=O1ImGY?X5(6!WaxCOjyY1#Q zGEjTZ{F7~(;M9kuL-DcA*gkrS@q{BnSG8y-Y4uT1i{M}#3u^$*%F3Z}oIyA_^6dRz zIGF2!qr(VEAyT3VzZ1kDj}EZU9@7zJU`x;Sv`jy-8<1;(%UVW%_=Ocil@0xxe*$v+GO1k zPgn*@zWAuma&|ACm^RA3oo|8QRt=+7%k7;DnDCK6=^XXi3hlLY7z_#kA*}*u4j%m2 z=U*_~5L^M3R?YhRI$y81xT{K<)~eC0$FFF*%$2V^}807_Ou#=GB}Rpy$5xM0sx5Ht^*T$MtYYIhEJqBGxA0JNj zkulpwIsAL;vW;KcDS{`|%8Y$+$3&M3+n2U5GMuK7q&XG)a&h0~7f1iWAhBW3+xT23 zT2L@#(YrM(Fx>!MK|Jdv;;3>@pFU-Z2MiYpV{C&gnrzxF=~e&yH7>EnXY~=s{br{d z-McJD$}XR3dL;IY$+qu+uhqs}KQy-5c=dKY6|5eS^T=f(MbpZZ%iaHY*U@Wdf!)WH z90gn9^A%cc%=V6H3bvYIc&s&TcDF9ZuetYPmKrO|tEbjY!P1v5J=IoZ>P~pv&%Xpi zdiu_TpuvH<*tbCX0v-To!IgUr0yfgn$io;GE?=v~vSpRz|F&fdw_6sYv6n?Fe$>4K zpjoropI`^K)3IBfdM{0UeZ6t-u3bITMw@aST^w*Gh&@%!W4t>kTj~mfNo8fFQE%Fy zMbfPgzbN{6e=ov^&U98XVBppu4rv%we?xI8Kn=zSz;spT&jpz{O+S(oXgBdpF)#o7 zz4RQx!LqNxGa|+_sz6Nx(;=_p7bakX{@|0)X-ug%YI6*<7R+;n9?m~pk!3gYWe}e= z^iuC(;MMY1_#gqIsu&u~j|VuyQ*2J++V#Yb3gw(nd@?_K3N~1Yw{-;;II!^7b?a=B z*AA)k-1+*xZo-G| z-br9suf3n0poyCisZlS0v3C;OqN(l<^?6;2pE3wQK%q*OC42RjjEqmO)n#&QEkJmS zQ%UOg&dxNmUa97~Qlq}CWv?$%9gTGHJ1Sm>hwDKUN;Mk#e(brKEeN(@PGL`pnLvH> z6FM8mc9PuR|NJDQWiI4!$O1u-lmtD{G2Q<7JNFsNn_2cAe z=n!xR7FGX*)A0{Ek<-~2+LL|qfo*ML^Om-Oe9s{u;)D?S67I(W=dGn=L zbdt2GX;NkDM-2^)yQ*ojQ8Exgf;m*upWNQJWHVD~wG>?XH8>CHO5*-2n-MgHtsg(? zf`wP?_w?PkaU*#)89q=L1ca8pieR{X?cuHW5D*5*#fWe1odnZArVyth#jEv?C~eEW zpFQ;Br%xUoQUQG5F$JK!SzkWIU@2lV{#xe74MWD_^V2)%>H9~wzj*PYYqDd_BIl7K z7hGtW>+lIdM)pJPdFn_tSA)|>gK9YW_Pr3T=r2d1>6^VlQ)lX>-ZLd_gTnj-^sgVL zJkx?E9e-WV`!b^D`RV&LEO;_VM`Q@mnsAY~4 zLj(w3Rk!Xvyb@R)PrKAx;#)CnG3x-K85Op0^m^!Dm8vnG%%YCXG=i@p@yO7y0hYJ1 zKVbqx-%x#b3fOl-o|#9^bP6u20R%LGd*7T-l#E*`8l4u7uzM>ydbgp` z(@bE=M(e`QMsrj#>Gky%bod+f34ALIG z6GBt~6I2E2HV)sZxJBk8*$NNz9`Lk#4 zp01A`-OEZ<=308tkStEQ$HK*Roik_hnaIkb*%4OO1-zuTi&-YG{)Sue$dX2*PL?@{ zWqEejtxS5?yoCuolk`r&VxWsV4hKd65eAQnrN$DjFMhCY;FYY~u%S6u#CfO}h=&2l z5MdaXX4?(D%3p?-Y^3cb@Gzh}3Q2fG$2=Y-;%Qj0sR z`DSSjQL&}|iRN(MPk3-unz(@2qCBwi-cgzCLh}c7#sSq=P2aqJeH6OI@O^FDiOQE#qF8gJQ$Gy#2q$>Y->gkOZXW-tNerqGN4Xj=}0VKB(yZal5+1@Zo0B{<3=t4V%>1mca;+9cc(8HW^?`e zCk_E34oZM2M3v5YCW_A`B)AY%Rbf2qPcs4Pg?*L;h>oKQmzUw5K!$_cQ3fEof=$)q z?8Pa>>u$-}N6CfMwVp6H>v~NEsBvD4s zkL-LBxRpuhCZy|$qY~IhoQ!b%7lr?#@#x2Kg}9jGa?z1c`<^+r>wT`a5Gvqjq~4Ae z4|!)8nbqm|Kp7&IibVERNCgQ+A?-t7pz~%q`x2&=*>UJo(z)!LMMF-iWV&sUhKiNG_}IKZQFrM4B)2FKf>;&X-}I8Rq{b@lnOA>b7Sn{;{n3HR%xfxe62U`7t$+-QtL;jxjwCCxQ^`eox59?CS%= z+pR***gSajWv*V1g&$n0Xk_*Ix%WpfCr)Q(DgN=tpSbiv!~*U@s1P@S$h6!X;&;hd zZCsr*0`5y{{2(X#MeYnT4||K`qh&84OskgGW$_OCDjO*dIXC;JVs=`4O3X z^vf>+D-lLYO8!CkKo(}%$t+B9hOI9z$>61o=ckCeQKW{z2n1cLs8siTqK++*e$aZa zHrGw2;b-Twe%-I!m}&7A&inv(9Edy7z}}T|j02DoM+Sf;HP+_a`!!+8quSv=yV8$9 z*e?tPf&6CT2)qHOtV}?7=_frm5gVP=Uw_q>A7&Hme_xeo_CvU5-lmqrY*VIv6>D=- zY^U_Y(h)+`N5;7rUV|^&xQNTA+vZa0S(dS}R)6AChWxIp$O}ZQwbWQml}LdRSpa|U zE#QE2pqo}=hc7%dk@2vvj&DbTQM_mcPbYAxmH6Sompwc;t{P-9;xV);vZHSO91DmE zKdg+s7I^96#UFewjEg2x03z9|Z$jP0cI1X1SJHfbHVNQCo(Wcqy9i6Jj!FjiGBDVd z3~pvwdWIa?5;7JXwZ2b>8cF~RXMHX?iSY5=BqnkZ`Jlf>M=K>O5=PQ1Mb5xubknVm zlL1CDd!I1=ob&y7ZqdbL2uwyZyE7>ZY3+DaEdy2agHe;yKSPu?AcqKkVk)8A`RZrW z!;>OHV!;_GP@Pjl-}-;=#NysH6kmI9ge3Je(Fu@3Kjc*p&w#6`v0?X>idS$>W%c!= ziw46ZA_ie;rCqP9PEksVydB zcp8DJH{o%7ML&o^4W!5}ZVqIoKsyt)ukL?)dv#}JKq}l_I^bWBFa|y!ww)~{08T4( zo0FbSh*uiTO}4#7^(6Hj0c;&I0_Y7_D>FIUKOgHoNO$3h>n$qr&9xV#cJhZk>d z-~o!AO0GRR8I5qsWl#CVA3zz8pba}ybT+vF=P`0 z3VDoLZbF4)ir3u$p&>VO2dnc6SARt>bUdGwD^_Qw|IQl8y!_$A2dPdpLGq-4O?De1 z)(BvAaK|Aep4#$#`)9YGjcp3j5HV6$$5&m}Xc5ld$d4{qGl?)Lsth=RiG3GrkX7nm=c(aIX>L=JyV1P3*m1 z9IBaJW5dJ50wwZb*gi`DZCYlLmOCyU%6PsjK+U%QNLT|PTxQNHF*}P%spdGR-8ly)Y;h# zV=S%S%z3u48hJum5octf`y?0cV=hpVSG>N8Il8CXQ*;cS%K}1p1#4bgO zf+!^xL`4x1K~aPxDqF=G5EZdR0V#qV5J3|=C@7$SfY=ZhMN~i(Dd#uW!tD1u?>D~j zjq{zq&Nz;>DKhOB*;E~OdWH;}1W2<4D3UOn2ZZ~Y^G|83ju z^AZ0YooF7ug||7^7&|+E5B(%?DvS@8R&6i4Bz#14O7_p}I(EaWCn($1@7BQ6riaFy zAPy%U$U8m25R}QlyY}p;&}Og4v8RVzKX&ZcA|jtl>eWlq9=_{<`Nh~z>1QeHX(u(>wr6pa$$R&9ymQbnZ%;LgO4`&3XER?z z7=3wmdZlydegq!i;+$L7ITyC0)$*Ru5*gh5@iq(mI(&mqbI_R;Yj|^Jb7swe2 z;-np&e3Oi8E7=(;7qx64_(yVA-IBh#+Qm3siI66PrQa|0w5|O!*2RV5gKPa83~vVO z$4b%gw(~fTzf^{Fr_a(XHRjdoZJ$1ES_Wj{BO-(YGj^%J=Ot5%cHRXT=R$Mp*xeeH zHB;pFVq4leLYz8!07j`9uSQse&kX209fuyC{(ksc#2V({6Wc1+{z;R6(h78JW9=R` zyMKB>YfXb0z^!g^!xY`LtfKdt=QGvw4F;tL>rbcGq7(9mcj(U>zrH@Ixo|AF#>$9@ zb==A-5H!5325(uO8MY0%1$~pt<;%!qmSh~ewQUCkmD)sKj#?GnQvOQoNv6S@>S z@g=FR?uz0rSXFa=WP&40JLtC9ub4Dh=ob&51o}VizP47mOWpSS>|t*Il%?ks4+ zrv?rkY615+^j^$9A8b|noO3CdO?T@7jF2Po@$s#KS9Z{`y(@gSSckE~n_w?@{ZMcV zIpskijcbg;isp;Y_tqEJp=XY!U|`s$c4}yL@ZiB1mO>O!SIYElZbme{HXQ7SYovKT z+HKWNUH;<=7~RY=Qxdp0XGbeswrtxb&ODj%lz~fV&R>t~R=`YVy9fn8>r$~~T>+`H z-IkiG-we9=OI<}4i5szTpf{4TPOm=S#kYbK$hS&!VZ}fxe^(6f%`5ze;Pfj0JC(TA z1vc5F*yP&C)Eg|MIb@5H>I$%Dc|S|Cjvt|b=u*_?tFP|KE^fOuoW>1+`aL)*^M@@> zhMNNhIF8;)wLJF{toqFJA%Px+@Z4qjl+(~)Bw@!+Z9{6dvG@H$ZNL6HgN5tVb*xv_ zB58|(ji(LPp~M&F9Brh<7r!zB`9e_h)w9{YWj8+rPGHtP9!|g=B|V_dW7fr!Y-2VD z86caj1Y$(AzyK^H+OyWcWyJVnFrNF38T069XL>a;aH#H%NQQvbdGIg34HbutZDCi#zm&Dq2rIsx&$o(_yquPEdy%(n?80a)L$Ya`)$5#dJ2#O8He z#vQ761ixBBJR;l&!V~F|R6=}Q!~^PFvT2k?HVA->Dog$Pl~QU5iv+hK;X@OA?4`D( zEeOG0MQmn&lxmvR^J9<3Hok^52?N3D!bc4XC*xz^%Ysq8yt<;$0XB&h*U^BvMh=U* z!Rlj8MK-_!4A#*QV{eu3N762dRKet=|bVnG)*LN`bJ4bQ@V3G#G* zf0RI355MSo#brI-ac}PrML{~V<#sPGRPPdnC=ZI)2hpY5%-Y`DBi+T-w#nQD=| zsrV2`PWM)Sd#AmofxGKs>*>mx8^b)xX32_YIqHsa;s47H9o74h)Ty>Hd6u${pz6H| z)0T(}00K3iCmB%rIyN@h5DttIzUZ?dcvFb!UBY~ZNuVv=c&DWeZ`6NeY%s` z-aJO#p!^Y8Ro<+?mD#Z?fn2YJm)h(9a#;~u7X9_*ONiO4(tR2-3mxCzC-fIrI^Se@ zWf3)x0L#I*FuIySZ43A;oqc5Kuh%+zR%wX^`tHl+-G z;W}a%Q|prc=rMJ#$Abdicm4qHAi~;y%{5s3`L-}I*1b)vVF~nyjA-w0Llbc+#BJ~A ze`p?rW5?#BC8u22JE*~pA6b7wWkEwa_0aOiZzcoNX0k#8Xi4r^xviVwL#L!b3L)B@ zCPa}px4iV*algd+{xBk(oi(&ACeQ?`p1oe7uTNN@`nwh7{CUfkPWw$F8h~D2lGuVE z89x7`n%0Wm{OhfRW6`&?nN2hG%iKea8cpdXUA6F1>Lp76pqM=aZ2sgnkmzvp_kqci z9{%DFahE)&3M6ICveuS#jk0MZ2r)QsWd|Cb5c9LBp_i?7QKThtT!`e{)^5eO{*Wn= zfwnx9SDxM9Ip+z?hXEq0$z&grRMy`rJtbVuWark_xWMbR z++BqR-fPv~97{p9sU|PFHH)G@AkmPN7xBD$@Y+!+Ws-YU3vYq!z}~Juv7H?AqKFpA zV%9ot6JwQ!C+GhD>SCqooXQrEX=uniO!iI`LYK44^n8}9N~81Og!7y_(Ju1}R9csvHACg)M&AIIoA$TW}+ zz@#1BPLS?inCU>yR#jUXD@+XiWFbw3>-lZiFyfBT=u7#&PSUUWY)Ftc@g z+;rN+RR%$4RaX9T+9<(pjWK7l*ismFx(*rQtR+d0d2HtMYLEWBA!(5g^ccBY4W>1MJ`Hn!X%>3y zSp0#!4ovM6WEd|+1vhDX*9hLog%Hv(TS;w%p~EPr9feZ!vDJd%ZBk zmyi_BPH+>UQ8*bvr?$BeUyaG(t$6&b^ULN4A{kp@JcfQUd1x5*yGRaAThd#2}@q5)Za0;7-xoT}@eP7{dcT{J?xm&qSrtu%Pu=EslN zXF0)U?vCyY3Q0G+4JsIKx+HzCYIR{ z?jjklZp&IUzx>go;?W7VL1!qhv+2-m$~NxVv!`55(s(Wwo2mcVQUqyIEZ`GxA}cHF zlK0qNy(X|lnnO_&t6G?Ujn`{rDzjFGUa6z(%x13(8U3>^(D%g^rVbPqK#oKG2hBcv zTwr6FRp1|%Fg6FSQsj10?V-Ny_BbkO>((O5U-jncL^2*M?x(U2&FSc}=Y)$Mf-`$` zDvZmVfo3uw*9b~T8Ql-28}D?BzyA#Klhav0=F`IrBPSJAhXFMA0jU<~Tc~8#&y1X* zoZk97McfX;p5_SGapFHiGUQH2mF_YLfd!DY6+nA|SLM$XbtrwMlcYtR%P!c@?65EF z&h@D)6Jz~C8cR6mgU0{mZ7VNSeQfg`ySX(eq2a^(n&aMGRv}Tow{`UNX41P86^oR& z8#Zj{QvU(wm=?5R*}q7#Mn7D0KaY?k{S%R}0>WBTsQ&i*?=X!bNg*PsUFW0Ac4Fy?Ll zkt1!SW`$X0=+JglqxtOF4-cg&VQ5Ip1IZD2^}l2yLh+)Nw7~PH231;l zvy2@_7J=l_i5ffF>)6hs9DC>4X^c=XWE~eeDo)z)=6J;8(BdBM*QWu&d`>=**2Si% zu#YOv`p)?Zc^pKZ8Id!zeS)6u^7Zu{@MeTDabbYbvY6(NJw=NK@v?jktNH}ldCtM? zIV2v-mnJvA>{;hIR-MPIE~fr}Hx;Vs#Q$)af_T|eBowND3r11Pl=Zz?sD{rK`P*SC zJ+1g&ak(eyr8)V<@^$tUdSk6ry$6M2FP)PGV^yUg7l@xr649X3OflMmOkJ`mo>2Wy zhfUL=&aSzl`!{MW9Y2l>rh;WjdYV3?d{}4JByK{DdqrH^aa9dp!L)n#lb&MT#*n7- zCS9K3>u`?~00Wq*8#iv$X6m3E&5|_lvzBc}7$!rY9CKW4c}ll9+L3uGZUnH3c}qF~ zM<%Q};Z)+8(>we|2YA#iXEuM`e$zkS0Pf<2q9%V#GN97%PtU3t_wJ-e`D}q{T)I#9 z>2`B=^Ex~A(~Ywdi3CmMrhntkKXn~Ji_E6gr-8Qrwtf3hFq#jE|5&V^4uo0|iH8CF z4&QAm#G8-5{*7&T7EAQ4CuW!Ge+#;hdEO;-Jgpe2$njUT4f#!QJ-w4G)b*wOxar@` z;lh!|AuIy+MT5>4zIQ%d_@p%CB~vTqoR=C;?N6Ur&_xn#im$Bj>D_&TnqU6rw@;6C zxm<@hMwGcHQc>ay?37O*JH3%^X|*zQvcPk1i_s00ZiJ!i`PV;{?$x(%|K7c?N#t6mMQ#1{*MDwPN4v+-KG&K{?q)znOmbH0@7c=}4sM{l3xLc>>a!DIc)>))@BsR{L} zD60P=#jtJgorSuZA~HDq9`mBePPz;CrYRI7?m=LT=FGGlclCrEm+%1J5hOI~-vT=P ziE1V|O_5?aJyWew_-E9x^waq7b8^=IWbO{QmFF%W^F18}>F;9FhwQbJITgd)-u_YV z?$2^ZbxSX36?`X*^MZ`Vym<2D2tAKZ{rZjF@~~3>}+gVsgU z>cJ^gcgBqMG^BsReutm&Da_`E&X_Ugz*rx9uzY?7(E7IOxxMA&{!7w7JK3UPnP3{O zp9y}zFf1{+Qr^1z7+dU66Q0kP9uD!lb^E}!A`BuU^S*UMGUXfH*4{QV|KJKhNFitY z_7vG+DG=VKEh#$~yIF7{b(?ko-+5TEp1{FnvB8xSc!^t=w~`oXhyZ5F)dy*u8_HKaLDZE=>_z{wOKWy@hX&*V%^Zkx(2~NnqIY zzV@NvxYzWbXfrhb9}DiHyR<_zx_5VYUr8GFdlO!vP-tumZo18;i}4@-)7SN%==RSa z?)mSLL1FaYBZzPB{~l>B@BZ%*>6fYhJ#wAw^}jir{!eaiO>i4|rpJ$%KIZ_Vp=)Tv z&~3_70|EnkckkYv`Glq0bl#6O3<+4!GmtZ6=IUSwH=X*q|GHP;u0f+cnUx=EYXNYN zzfSo0;lsW(k8fD^_o#XT*|q4y^e1)h)TvWSYN{orR#8PoELD*ENa%*j1#vrr zg2DmAK@%izBu}y=OwbaY6<=ImS0}1y=7wxw$;ruKXPR9;4r&uY3AJ%-TO>)dRPp6x zoV0alP9cS=rEMOtYuAz6w~g1VUVSq=+nNu4{pFWt^xc5%kI}>1Z|d<4rmGv%4SDj# zix*!&MEAtTk-|aHYNNE`{-`ev@&?vXGjB|1&-by0UNlr!w@9hfc5rZDc-p`E0DMw% zz02}pA!R>Eu$E`&-CHRsJ0-!;p+ldSmk*Rb{`j#!m=m>goy}*5v(!`J7f-~E_t^-~ zkrqkcfSh~KyJD|3Ym}rJ)|A&cs2(Bm-@0<;%CF~MJ!7?rYb2lt$6&Ug@!$UA%A};E zRT!6PQB-iw$qs>uTJE_Jl%H=anIQumC??3f+3&p@I0WE0SqFvHRPiL>F}uzvS>LAn zm}p=bZ5MgAOS zcqB`{&45*dcp0-#&D#jdbTlVt77eMh6AnRqtpD4PBRxIets4TWW-87%VoHQhstkAU z+B#;)#7UDPDO?4Yp(c2F*h2AYgF4&u*mdYoEa0IuR;2%mJ9oq-QwksmH^Y!D0~t<- z#w%KC`kHf#mt+SqB_Zp~;NX*8h*x9jH(2f9E-W{jP zhY1f2h1xF4H~LA8as(eBp2}$jf*s4219hYX9ldORn$882U0h-~P^TR>hm!^5TRMDB zM6#*9{XtG;iiS#EipPT`D33@Gaq5c#1@gccTlKH26A(Ox+ndnZ*#G%WvxaPjP>2rR zfcKI1Nf)}Ue%uTc#h9wqBYIVLMs>g8+Vh-$Iv?0ED$Aj-uC66e-aXdBFxssckWAv$ zS-86oyAo%8l0LcA_MrL+%z2~wRpQSB&LiMo*@t=m-numtx>dEbYtVruHc2a(rJH?NRsS|#hG3DQo$1K8rDsGgDUmz6fOwnqhB3~RATV8^RW;Im5 zC?+4BJUM;W$nd3Y0EVLiSp$<2F9JyY=f}l16<8zwX4O;w^p#c#@=N=DEkYWt30OjF zXRD0#tat<=>@4@cicprxV~KqST%YsCzo$(^6}lC}z7-;jqz~lSx!)>%s4FvJ5+TNj zrN-T}`0fXv`ig^vc!LxqpO2aYpoMfKinS|-0WoXe0)BXbmkMAd-YFi9jqMaQYF>)3 zg^{uG4C;nzj!PR)pkDcM_P%}l1X=~?KMaT+_;TRmd=unQOv)p85Q<-J8#{^8KpN*6 z>S2IQiEU0N?k9YLy_qr-r9-OOFhImB;L=F*Ohx1Va<)|*kx0ycgebP2?v!6lAUt|X zmea-ROz9UJ-la8!TmUu+bbc|8XL9jj`MLnQA|tL-6ZH824vVDrG-L&z#ta3>N)5F8%k`~`c7e62hsZGiBY5ZHupK>DMK6G4b& zcGJ3zSpb?Q&@ZjW^4c^f>-lGLhoNp2vK(A&w}%(3R=j;aRfV;|n_9(x50CMNLL#ku zP!{YWxOc0;$k73loatnm+Ql6{JWiUtP|WC_lxLkg@e5czT!i~5#-C-a!XQZC0AW-B z%A4AC@ja)`P!3O!cLuNTCqPWApMLuF)b{Q^b>=h)??RjM*Bg;ZP@(VNMOe}iEpr;3 zp@4v~CP*GPzjfMzGr3i4f9yUPFE2Y=S?!Vj7`-KLcGKTZwQ+(uYDJi0kqZd%$w!zc># z#T3b5D&+xHLmSz}q(3CgT7!uhhk_;;gA0q|+Mt!Pm8jmCf2$Dc^X11#EVxZm5kxCB z{GhqrXOvGskW<~+viEH5W|#;Uc%>hLASs%E?nbErrSQ-~9!D5>fgvHw3|jimRp*>q zdq8>R2KRJ1eKvQgECg(;*JNt9!QFlK#0e8#(t-`M)S1g0#sHv{KtDX>>9@JLxk8Mg zELdhxG*m2KL?-~TL_P?gSw6@Xzz1c+Wi6G^H5n43PSAbbo)b1pa!wru(KZRUeE~C0 zeDp-gZ*c8jjIJTp<=XoC+3qR5NuIJGMy9qp_An3pSiCocp5dP2h^6y4Fd*-|G|All zmIhoTPk|kZpE4j-{NMl~-AaF2sj$AGMn|J~DM1{G(2c=Pb5Ch6{Sz0@E0sVb?ohmD z*WNAT9`AA9pQFK*7<`@uuLPAz;cq3#hy(3_mBIf`T3T>nCSNsk;leOwK!0gt>0ud{ z+3DJz;NU34(tmdsvnXXR_5Q43NxCT1AHH_KX@Y3}sy0q+w2n)`~F9ug!L~ky; ziu?7Mf>lGpu%0w&lTsrwhg*OGTJn9Y6CEufpBOu7q!QW1I`QxB(pLQES;v#e2vt;i zNnw^T9II>!pm@@;>eR~E}jlh6_1qMZt$qlv6tve09>OScdq?cDbDyww=e;zePi*i`{PduYZxJI7j z98e1PrfK~_JE%m`5JVIFX6v1Wne%rTzWBZ+5>yyLi-?e6x?2VUx=W#}^dFjf^X6TW z=Hl;tB7Ux|d{@E3%*u${s(zi9&tcRtiR_inl=CG*w=i}J$dxVtz0#lr!{Kb8tloPB z(76Vow?z*MABgF3p2{ML1sJBAQYKU+dWj}Z8hPyDLb%H8q-gbviUWSR{kE>k14`|AB8@IS4rQ7dU*wVZ> zPmv1F(Vz)~cA*!Yd`}g%j$V~+8Z@oUPN6bqg>UMp)8oSqnh){MbKm*QA?+rCS!i(d zssw|eWSh0@FVfyOaSF&013#t*ILPA#O+my$xV(@4c4d1?!a*fWcbX$zPRTQs{ua3) zfkHepI67nL@L*g#|0GmrYKOkXxg;^Gby;J1VgYEA}uE=fd-Y>1k;X@F%f- z(dTue2fks399_V@^(Nz$6qkKYsi81c7xTWU9DgAmNjlzB21C0bZr^dAw3%b?7tv|g zS#!9iJA2~;${860?p9i844?2v5&tk_d+*6vy-C*sn2&kCo$=FfoUzmafS=`qE4qtJ~YI+bVh2`T@qpwr_5d9iN!8Q z=VO{rBL^pbB>|Pu?M3NLpc)opzL1SRlgT2JL7Zik zP4Q;plyi-g8nA}Hn?*vdj;kK0=%xmBz5q$@Jc@?7T!VRJzqVXYVlDxMX^`!{$!;A8 z6Z!TqV4dC`D0HsfxY40uijk3Shzdz9ddOD<1_d$hw`GdZsNSEZb)MGU5^Su9iQ>_( zqv!sSh0<^A*h2S7`v}>llySNt=1VyybidK1)R#WOg%%-GNVCgR)%CmUX3Azc|B-9F zprxoEVm1JVn{?rntrFKjOEKxIVMH}2K8u;1JSv6ONxDh|DMGy=Nv^R|&PL*rYVT=k z3F%J!yh?F-(`uD>j<=8hW%HJaBDzhg45#rd9Hnh&HhF-hw^D!$fM#q4~@%R zx?k66yV8H&ia5hYJl2%5nKvQ-kN?-wtR&N&@E$yhWHL{z)(HhJrq%+KXHFl~CmA`QRZTu|nboJ}IydS(K6t?*__e}+~ z^-Cm+(rwtCcu;9mze@VZY{2}Aq;?Xf?S4xC{tuCRtZ%IMz8K-lXN)n>8a^KPg5ZJhkUb4=9Px6U8o#!DAHvs0T>q}%vM{_TFfz0Dd2wyBnPPUKrzy}B5# zsA~c=cON6ivZ?*y(P?2hDL3>0tuPX2sgNgwt-N3DK9`r$k3AdsjdfH7|5-KFH(iOZ zOm50D=f{^8DR!T8$Wu*{zvdD&@H94YvfVZ6(9zk|=WM*E2L#SMG)nPNt-bQ?r7bVz zxA8XB)i&o+?W5~zqq!-bMTi1y)we48rD)RxOi#@~kHCu!nCWn6W@6bjZi+M`C7tE-#LUY3omKhi4xT>VP48z=|27jJ z2G2~t9sSU8kd?PLzwO)qk!|^_^n!(FlzfntKjK?;O4+M^P2z8(&Xkm%;WlPfR|ooC zDp-EsY59GfwJU0}n{NB#)98$@Q69NlXF7CW`>55yQJ<1<#Z(JPDnIv2q2WQotMpHe z|F|-)Uw6!}PjKLzuCR7np1(@Bdzu?O?|!ig;Z`V|-D)a_-n!tlkZ)M? z>cy-#xt00N;tiIo`#8~m6&4oq<^}5pWK~~kPIWc^d!MOmJUptsrdqpa)@(?M|5*Cj zV|!M87=>K>hS4KNmcydud#2HADe((UaJd4tj# z_Z@Mdz0B7)!!+g9UGALkeV2Ee9%=V(-u-M&f!DKJTYipd2JYv@C{PO9Wq!qsGHjZ& zG797ag9_-&vuvKfatgFE?CDWs=x}P@!~(a}fnmBj&v@;Hg${rOgW2PL{paQj&*wLN zRZ(^aoW2bHnmc;*=m8&Vqc8R?`>Vh~eRQ*;EQ-h{?I_kM*ypYtJ9ZE>5n*D`91=Ty3_ zx*?ATQ%Q%li+TBA&hk~Iaei#gN0m+O+4zp0V=Paomj5m+uQVnxEu+Kwlf`#V zy3MgksC549bze2~qycGYSjgjVK|#)z6&?%9uSqE6J?u&D{WvD^)lk00s?gG^qQ7uY zw0N{nDX^`7;Gk_<{rexf!l@A^fN&DL>G$thuRq=MQKi+GMvvm+S81`WFCU8jI9=7? zGR?*UjW>S}pq$3X-n950p5Cvps~;BfI*8M>`J?{Bsr*3D1sD8obEs48PV@g%0v zL5)!P&KF(*BqlNtPw>w@$#%2c3eTi~%mkahHv&F=TV1Wk@1!#XIL#I&i~|67cH~#E z^;0JH8aJ|un)EY-CRLB8Rbm(9HOdW_Fu!mr;NU0r<+PG?yWMb;jQi%;0RzlHj-A=b zN1Q<}Kuabo4C!CCMjfkWO{a_$S1)$S4(;1JDz{GQ1tj?J?Y7=UUw@Q7l>jHQn-^A6 zMw}>W`NXIvzjtr`DF+(&FD6=Cmxfk!fnXk+IMwxDzI=H-$7$(@%(Vu6q)Qq|hEP&? z=~7<7yvJYrdBIe0Pc$rqXCCLBYNWoA5syeFO#0>oMyh_eUx5?2;T>Fxc_W{ubE#@~3!xW?e=cv%FXP#O zmMO@3fc)Lp%eI{_1<1B8#Y4xxXc|0({$2^!%G2a52k>An7wqTfXYXA-lgqs*DhdTq zpf=hC09ryq%_c183}uRqox#x@W^LBVx#RiMA2c<~t>R{U_TKqZT}BcYO{zO^sxMiD|U#~xhtpX|HE zXhTCi3RKQ62R1rKqhBdfcBTLKrC4*(D&)S5rN@MliyQxrNqFbLNK4d$qy9$ny(cg`MV}w=8a7 zn$|hYlqZOVGvkQb@L(90^#(_?#Jh!dVdC?H;Hjq}+AcNtOfhYlpz7az)4y)l?%neY zRJ6Yz#TQe@b9`(spwC4BW!&2d&I=H^HqW^i{TWr$47O;Hk9QcK#XtXIG$gwFqL}21 zqcdA_vWgGS#-^K?VXdF^XUbh_9sdbGT*Fy&Zz!$r3=C zM@y1t8qS*V#+MR4ailU}=wfu2sY!PVrQJrm2Mob?=tzXTfUw{A;EM)uC~M+n`pQdm zaju5y^o*DpwFiIP0kTPJJHAD|_mUi*CD23IYA^wLGPI*R_?t2dedj_F2r5=kjKsRO zW5whKjT+@#RQ!9H z?OQ@YLgi`^N8a}r|1~+Kq`*DX1Y8lM-H0Mk>}8F^bL4c=YPv)`@gDFkOEDJxMD5W3 zfrSJ_`7@T`d@z#LQcEL$)s@!zdrePm6;Tx14BzI+bkeCz+7N{o+Pw}A>y9|bBwTk- znJLaeqOIfDW-rD7^i)@xhmRgTlFN~!)Us-98=7}igg&cnC*!B+`btESuv(TGJR2k$ z3+am+T{6jQ6e~_?$B|$<&|Nr1?@=fOnc5zfWNIhy=g3$~{y82Ao;7%aNqEkEAc0VD zMcEIzS)sHW6*RR?6xkfJ5`zWJEUzz5!LXEOxp>wZTyqiA8|i_8?T1-5SfJ@22E-T_ z|L^BxkzZ@HYd2rncP>~XnuX`KVB=Y)OI zY;vD4%d#}s9H7~!%37xr9_6HjP&WHgD%x!RK882F&otpSo%1Ns0dW~V<3gWaIwHN7 z|8WV&2eB&~Ug7@LVR!X9Gg}G*%3(p7;=h}zy^+(7|D+%;;i0AJ(XCrIIZ=dQz91sF zmF&o{4kDNXbcw=9QaP_iT5rmLpBcbR1XPY6x?D=76F9!&V&!(B3K*4tU>^Y%PaBAd zl==~4LfJHH5wW|gCZ)C{?ZnF{WMW+RI!_6g{w}(Si21;u?E=H8dG|n6oF1x|ozC+sYDR?w*TNn^NYY@nD;0F=v$KW|*b4ccfvk z=YrPX=EC8oepb=Q*7vbLw`Ar(n!@5H!$#$QegvbU$a)<4hv#06-2+4DT}SNI9%9@?Mn!Gmdy>09K0 z7(h#LL)%qt^3^xrWRhBm*0^WOeaz}Coa!_|0o#duWR^bfYEsO{dfrvkqeNS2HfUHwsHwe|KyNu!gya0j7Moy|+DA7P>E&P)g0eVdx;is?PzReih_+h$G z(r|(!5p3gd28EVb@o{<#>c=1bVx4A^uS?H}PTCAnGDUYn#K6AMDbfGR*B38dtm4%4 zCBWcvF5!XG#op`INyAP2e(}?BeeY!jFY5%J{)>TQrD1Zw?JHAqO4S3PTp8&3d;LFn zgJTBAY0!F{O7jP#_>b`}W0Q*Z%&B7KlKhYNVxui8FJD%pXKNyJltX>w?VzD>zuB2t zh}0$1o&E?;aq+eVFV^)Yjopt^x6rQeK^XP?-EL%CFQri>?h+&&{n4<#a*oGz$aNXV zgB7l;+=goK@lHMFMmQUODHVtuTJTRD5Dm>sB+U6rK*ctO~bP!Ignx5Fx!VY2I*j^2`0sF{eIuChTUD>AT|?Bq=V>>*4U3Z|So{ z3_3XbnvLjZCfYsdnL?#N9lb|cYR3`~`VG}XEC97LS|ZZ~lkEGP);AFXfh8&#@!la* z3Q(4cxtutPh{y}g0z&AyZAK1uUj=k=S6tbUpEwhE7%uC5kEY!rY2fwU6U74J7fDWV zSUENS)LLAf%peV%g~B4NJ$COIuCA{8l3%pe&p#}*1o&`wiQB^#-2B$B>w_ki>2{8p z{7fiJKy%q>*$zt;Mp7Nx;6<__r7#JfJADz85QCYV0KQdoYr%Lp_@cKLAGKPXgGJQD zUJ+&Npi9YwLhdfvolG4J{er*gwB=iHmTW@@vUX;Wi3oP-yADH7GqiEJa(n0DOOLY? zh)hQ^24-D~`$+ojhv0{Wvi+AJNB82~Zt*D`^}}bb`nGKlBg&r~Y>z)P1d31J8iJ)x$sW#iwW?YFwOJU)k~{3L;oGDl~v z@IYZC46Li?RllvNv4&(Rge{gNcF(+DgXbPDo~!QYWs83y2gn_w#Up+)>~=CBVlPI& zT6jZ%?9*y1mlaY7SClYtdzvX~~NH`p`!L2sVB(Wo1iYdUU@_&LFBe@g-x5$jMBMqGM-^mSzo2 z$8OAZx%g(1_+XHOUnIjGV%(mD@n6*Kpb) zH9g(h#YHD>!Kj;3;gXzAoIKg#o%1J%CP1IpFVss&175kY(k{RtClapvuS08aSa-YT z`OEX@j1}v=y`@k@JygU+Z{6Aj(~^+gDj-3h+CkmQT#fw}1 zjTn31)TPGXB`ih#*=Yd)Lxdz|vq`1n=|$po$9 z#cg0DC%X%@Zp|EK6TQS@cpMiioxCs~gD`w*e6OZ2Cy zeRz=?apF(Ljl|mTIA#c+I-v?hUGM*0t;+7E*He@VwMJO;baVlk#Kx9z7!;@9xaR2; z(Tmc;5l;U5d_-tH1RGfFGH}$WIdr%P|AD=viK1rOZ%WNC^r=`w$p`WMQxrCHfhWaDuF#AFO%%)&d6^HcvUZmgHZGLFuWOHFdK_T!-KyN2gMh2+c#bPUuZr z>yq^IU4Mt1iUUkE*Ak5})Xy`2ka|FXM6xcM)nY~-+HMBd2nY)D%oVBmpX|r6vfH71 z7iKi~35f8^;e0HVLtR#8<%|d!H&cnn#5r*Mu)p zMHwLX0m~L*$zsT{wXj`#O$98H<4Ch;RJ1tM%r-Dw9j#*q!K_0=Doz_3Kl~8JJnOLb z?uTj2NAQzyFb8E=uuBGILlk$G8hCJ!vEN~u$T zFb#MkAzKQ>r|p}zhA_Btzi(~ea0?@<7>0I^_#d)h&p{Q*T{?VJ)`cC6jg*|yaH9K7 zDQo%Z!VHyA<>bOJae*fSLbf!;#~T96Fir21uWsu;HhIGDY1TL&Tv~)ls7QgBRJa=~ z^0TPf4OJUwpovmAqcagB%^&1R$&SS1L{bvcDk=SiYZE@NhEoNj7)EU-IQ*d5n_Gm$ zKEG76j?d;)PMn421`d^^zf6+AMO?^(^Q_=!NgMJ0?`_8%r}g&mh=y;XS~@mvpBPOD z(hFxzPE6RT$2UuTNdjUmu+lr z1lECt$%Gb{g@O3i-Hhvg)=%iZl(tL2)lrv}?bNf~%oU*z%Fgjg1z=MQV@6dvaK)te zPq(h?*)*Da&Vkziacd12U@R=;@j9kjT3TXY8ph%}4t1f;sI4BonX;;nQTOstE+mPj{|T#1C&``lk>Q!;^$66Z6m=dC%U`h0Iu!^39%wxy(LD!xdV5}-hzu3o6Wq73QxB7$489L}}fu!JR zauKSz2riOTTM|{|#<_St_EO>q4W01KLm(&xQHTQS5CoHf3puT$$Zl%Re*X~{A88H( zB|f}aOK`Mg3bRn{VqEO8F-c%q6=i)^*60C4MI2QTIJoXB+@2mNWH`hZcllX3`!K*4 zG;xealwiu4lqSGXQwg4`$+k4-mj(6I^ij+{FAPco{`h15v8Gw|eh0SBDj5x3mR(O$ zssEOMMv6Y?$|LVPTcGZU8y_~osdFn_*XU7;S&E(jfeJ|2X*OGtEs0U*uD`NLTav0W zjNt6sB|NOeU_#}-%8TioFmp6%f?&LMo&`l`jm#9Yx~ofyEq4~4k&vXaHNiB|U^1J7 zH85pTPlQlF+(l2_HVLSYSB+5Fl^Re~O=JpC;+lky$Y%|UvyFI0yKfPx8fG+hjFhh6 zpG}zO0FpB~G=3OvTAA8G?^!2*8JsDc_UM zbcAOOwq(ax!~<7ow^p1{J?Q1eQ=WxS%%ez+iS+JpPL=?{I>a)zqe@3 zG@G?0XZ5U!+EF`<%(Y{NIj*o7)nkg?FRds1`lH=X6J~t*Rqri7AMdmCu7!`!`<`ue zrY`KW@0RDn)t6$nmhQe?vo5{(fT3GLO@4mMll|Bm1%eV1ZKnCx!b2-1U<;06GI_;u zrXom=>Nzz-$H~GX^%Aqr;O$Rc*_|&y2e+HI`<)L-P9`zN@br_BkdsBakUMCKSm*M8 zFKG_zX*SWgs4$-vE|tA=l`wyOnIHJ+@UAqV$>SE;y-M@QTi%#Qr0l)X6YsK#P6ZE_}=vSXV+ zkWgaKD{T+=fF7E-5K-A@!kAxZpz1R#6r=O`kaU~11`SGg5BRwV)-|g}N54M4mkx(_ zi&Ye9np~CvlwfZ({EH6Rw0K1xwr{)nF_wwI8FHet=C!#EAh@dY95s(lAruQMgS~e# zUpIb{9QCd+J4PB)N}wO6d8H2!jBJyELZF?tSz@SaD&IJHi@P2jcGwH*xNQC`%&?<5 z&mol7>>c!-1f(5fYHzx?3hqimD2fnav}q~KdPwZxWmo!80_@waTIuf_#mkL+Eq-F_ ze!OC7Yk}IBOx8ZdsIuwbT-LeY{Q2|s7T)fdIi%}TUSbBPwA#9ze(fW*4*?nTkEPrn zXqk#-&W($Y0=+!}88gil9mlEOo$t>gfCQBrta(L8II^ICCO5QpNvXg)37eneD2h?G z0cFSDyP^N1C>5`J9&JG1zLR;R<0>-nw9TA+VZ%((<2uY$IrqWjhQjxI*1lLf?P)|R zSm54^(Ny?~`bwNpiMhI(*A|kGm_Y<6=(qAGv3*m`y17Ae$|!B2p_)f0P{&DIMOOCq zd1v6Z^YRK$w0zB3CzIq3=G295DU~n*I_z~-(M|F0NUrd;#{J54C_6uKKW#(ReDa@{ zK(Ig<`_gJ-Xepav7{J{A?dD6xeSVJV)G=fpVA~4~CY?eAbD>4dFd6R&d&yM7=4<3p#5_KKWd>xN81z<|L$;J7)pbqPz9nZ+m;N-EVr*4vu^d+> zJb7hD#k5X$@Q+X^zE&Nb>Nt05ZN1(~GKdTw++^~pJhIKQ^Oj!dIXL@ydcytZ8%sI( zDTWryUhY)1wd|F0ix)|?T=YynPLc?Vo;d&QK6N4C>nv(%(Fk)p%culY8la9I5;thL z6}Tz2Glxuj&gG;}ZF1veDmoK6a8rKDf%9I%WWRATPXY&#n^o5X56%^d&SLc< zD^7lbFyN?)=RQORgf4$G;#4)Fp?<2BJuekWhOmIRjpflY4zqjGo^gO%qD5?&w zdC`gQ>%|gb0EaU5nB))0FSzr~_@7Qx(V7_Y@6y zIz;p6Rhba7=L}hT9)APLZ}>JjM67`muJ&)A8$@cZ5}NscAB=HujBUa^1Bnm(%g;ya zTHy9`nNJ==53y*G!KORl67r7sZ-4TwO8E4phYg47sbi+a?xvtMn>=5u?-pe)Q;673A+&DRv*F?=U-(b^tq+E|qgjM6_ z8|7lCT{KB%a^A3lS7^#bcQk**RhmC?3c~*FP3e>V9wMx4!TjP@(wq zEutjh-81G>O-S z>);Vi2AfO>2+jt-jZZM@>5R{{h-7ZK?v*Y(bSOerBzdYF4=#^FnqxT6V1%Mn9kh22 z0%amK9Ikog2uf@zhV$Jaq;s^4BnA9bZjCs%cl%s98Y=%!?YwTtuudtzv?7hQp^wfN z91*PnJ(Iovj9xVv>%Q%eU0<0+jmETEynhM7{Xk{dY`AQ{gKKn0mnRn&6)mQI4x6vg z`eK+0M7}AiXH^#1%5P=$(+qjr*fB6Nt}*Kid#_p0tc<0f#U!6+Flm>FE(nucI!*In zQx`b?prpFkR`a+r-vUXB`P0#-!h!+=i{36*7=wo~OzfgHXvX5Je9^;ckd@tXZ{=5c z)n`Yue1G+{`k-3fzge#1L?<0Pv+Asm$q+Q4@3Sh!JV={kp!rPdojY>sva;jEw^>e) z+HGy(5V|JQq7e5-xeun7;8#SN=Mm>psJ4T*n;nxk#QTun1IkoWRLO+_$buS@qvPp) zg@LhU4pEF~iiK}k4Z+(VYe;EIHScUN=?L}aQiI`K74{ zhyZ?aSSYH0@tv$=ivu_GpH6GL;iC9mW)3+pkC2;^lEMaJcChYt;#<#U`JRebO#`s| z7$8N0`}`$Sm*t+m=DwkMJ@Zf0A0{_kJ6k7J?3TV-=646K2kJC8_~8>5pyDHYylMj- z!fz^?oMmh?WxY)1bIIqKPiPCX-CKoY@+RgA#y7Mys@LeoS19tEp0(Fhk`s~tYF=4v zdv4c9f@N6G5rI*UJg3T8eLUVuNkHXJDL_n)*N)|rm&TB zNag6r=Z*A^hH9!9aV{ed+7DuXTA91}^uhHNcIT{=${Is53{eX!JLn^e!yXnf#^->b;Gaf-(<-)vGt|?3^P9 zpGRiDj7;cyO+6qMB8B);|4H&^uAauKWbenRDK~Ep*8D4imiF`Av9WLQM33Cq+xkw? zlo+pVqnnn%<;H~eB~$r9R=+#0_ieX7-;(`u%f7DVR`HV_TTN1@#ZxUn5O|%)$clHX zey!cVe=2irO|5rOd`(zA}7cF^YE4T_jfusO?UN+JCEhj)DdXs z1oL!_SBaet5^i24X~%dKs-y4Y{iBoY`y{yBxt&>ISX0o|`eKbNmF=IV5G$Ro?>Jxgp5-RH>SSw9R{&?0Uxn+h z{OqtgGin5B)CBKhw;l$PBV#}*HADFB9ZjD_*4XWa4zt+cw@gkV&2S9=YuJ*kn@7X@ z$G_EWT87!rZ=Fjanh-;^rYBcGWa0dZeaem--#$0!mwvHR%(}v6BQ`m&uare|ILl_h z4-~mX_wd*6-dT~Rk-F~$?ZpygIWGsMg*Xb3rOF9VjPQyg=gOKIPi66+y=WJVzA&Zf zYl~k3Auv;)rDS{Hmv9_@AWt?Ca+96cdq*O-P+uT~DYpUftv&!36h9@Y#Sn1I>D z%D?YB2V(03O*xTlBVG=H8a6D;-T!B?ED>@wyBZ9;+dqE)_=lz{Y8`n&#>6|u@*0Y)0d0m*e0#kQE1BYuiwVPc^gw(6~ z*OUAw`^C^n2#VXEn8|E{O-Txv(yp}f)$7|FynfgHbH9~-AKM&$Zu>EBx`gvk7oq(2 z9Fa91Yxo~FPMe-s!5Eq0Aw%vZJ|&S3my)@$XV0D;1@;mVFyJe!J|K7WMx^e~f6x^aw-5Qs_JfP6|GB_;A_f*%}x^cD@E z9U8t|$!C3=^4YtHOlTY3CcEf_eT4GM;YzF_Zd^3DO=Cn|hxYkl>$PR4AI<~WrWdwH z*>QL$R3P9A&W!HrwQHRL`^c+&3(yG1l1sd3q(Aj(AF<(0)EsZtjCAiJRzHl?t-V9$ z16PxfkH^h*Drqiny^oN-IR{D7N%$xiCbnBa_hGWh&j(?W!$Tx)n0>8IBwha?M~`JV zCqO$X2wQ<1DR2I(wba=8kt@~$JY0Ye!W13?gDTEz6CYMVT|>Ic19Z!bZ~gdz8;lbc z;=o30*>0xD5W5zlFiWyTXVf2G;2^{!Gr{>bd`Xvz$%pm;Z$q{Wh%JmQz>K zTzOR9d2G_fi}-_&j$2>wps?YcJ|wM7_g0BwVk%bq%8u7Wgg|eH7?)`0^CIbDe%$CG zz>%s(Z&UWS`b*E~WrcEJn(+O&Qy{Jbd2RLf0w~Q{A6rR%tCG0Q*O)`+pqe8*nelOO zqx#`MLjg>l7z_!@(V&H|#q;FCK7zix$~im+~n0XbQacRmBXdK=nw z((h9uk#HHAaAn@9m)OEu!Z}iEUCRIqO7`=^GyG+2!wNQS*szEbTxeSj)(p*&;}FIAWk}ig5W%qnPjqb7 zaDfGO!SP?~SyAex+xzW9=hz6MJC$)L6|~qm6r5Ik+MXT&{|e;AZ_DK*Mi{5e{NY=# zQ9%8mA~zq+DdIfSyFfE!zPpIxO)%0jsD_c2;oEto4-j+7>*tlu8t{XVNnL6xw0(C! z>oQVS#zwYtl}{Bt6VUSg+vf)o-G%f2;M$C-7g`v4jYFHmj(#GzxC<452k^ zhF)dR#;BNHKzhcLPu&u$-aQUpb^0t%VFyzV3H5NE!FKU;!=!lRwHfhe<+M(gw*kdI zDs`bp5&Mv{O_dH?i030YGa_?{DqR})FAiai0+6&4L|XG&AG~`WNMOIAb&5#2SV)AJ zR+2t0;5$V3Lb1Jk$MH&I_-{O-HaI7F;A{`Dw3jI`Iumc!u(25-02;;lmHoh)|iO071s zj7p&V>x}*CD928TxM0ro%w%(+|I7Jk2#;`Z9HJR+_QTeR;u4x?;xJh$NRn_cVR3YL z{{bVr8642!cd!x&QI0!63lfbPXOHX_Uwk3j3a`31H}0cGj9YiVkIV(d!P_h=U;!(P zJ>*1?%oSUt%g43}O5(Q|^_>*TvPJ9aX1X8vnXPCszjQIQc|VElNepO2_r5IW zH#U&}1*c{W-MX}|I`Dr&JRY4}2C0A-UZ-AqHZgFQHFfNH%umDOZNFPun7#(;9a3oqwnbmU0iB*@7-b%X9 zv%%F!ts}33IT1JkqB`9mmOIUc&Evz#?93kkZ|D`IvR@a-Hb6Y!RIu5VT5Z zs^#;Tq?4#v3VJ(O^O`o~!n42p)!gLJm;(0zJkzggarT>Im*0#cnfye+SX-CQo2keI zIm1_U-}dGmJOufNdMu}oG4~=4RN@0C6M;>+dfxQTg2D=Aen{pbnG%ZEz2t0wty`sk zYD{y6>Yl?dhXyVCQYV6ZQMe>hVjQje36}XeY#UU$zDBr~WthLLg=)?#=idA~>rDd} zdbd|!=h1OOw$S5M!oCy}QGc*})1Nh1e6eLYK&RMf(K+0*s*d5T<|&B(R`lsspIUj9 zP65(`EYkTc^&#V!TM}O2$)3*{>C#@Ee4or+7FQ5P=`N;JVMnLbdOxjV2^c1SI0+L} z?6f5MW}5e^Kj=g+QOCyC^Q_oN0fgZzr>8lQ&;SZ-4yJ@YXuiRAOC%9<szZ*Vn^$xOPs7E_-ke{cB83sFngo$5qj z#BQdYybmUE!h|!2csp{`mAELqee-6B=8XvXI5dh#UKn@3TVfqTCZXjpY42A*X(zT_ zwomu9{qoDZ;8gUvKS_|?otyB=+C3$E_a@U{ba%GBK4 z+}@8WAkA2lRtR-GafcoBBgx_Ejs%XF3@@!42ZIU4;_BKpL(}UsrMuv3xxvrRnOG`a zFlsaZqL*acfS~e!T~f{Vv(OK-xE8~ooD{yG3g(v(DyYBa4j8|<3szY|_(3kwDb);h zzBq*tI;w7N{_cwi_*%H)Rk5)nc}}0TUBrvD)%9vGjkPT#qPORmZx|#Nql#&B3ClLv zezVgKwgow^rcHrbrxtJk#;tU*$nE8zyX$UpBpTVOI9;A<6?#JCL`iGoMAa4jB;&@7 z$vzwjCsv=G>kBUpLx$|k{F4)aq}CoV86uH5=1Uv>`p|$rY|bn-J0NS$qu+9>D?@pY z-J*&-wo|GiTbJH_(uWtpZ3=Yp&%mSh(Er8TnFr*Ye{a7Tvl?T*V_&kbA;wZs!i-(Y zlFHIz52a|crehjT5~m=8z=VFWa`Yn-Zmdn7TYd*gJ^* z46EronMW)Lt{f7Wxed;A^j=LcW(Nqxl%}LxkS_>$uFQ$B6EHu^nXex+RGfMs9?5Tt zd7;0HB}_)K`Vzzr&D0;~>xGBN6w00vSI;qBa1Nm<|E-nMQ)d~7va3a<){zZ8;Mz#| z5>+E4yXWiKLmlLFE`Bwwq`V0~n=)j#HYOJ+!o$gxuth>?AFX9-)nB)NZvtm~gtkc( z?C)GxLkD?liO;5s$XE&&f^TCdij%Z>8s46BfA|c#=t!6tG#(d+Oa^WRrWLDouHI}T zNkuu~2?@XLK@&C+XJnR0Y*${qcrjmZgn1$Bu7>W{*i}_fAYNeV6}fgjYgbfdIjF~t zpAJP(+{M6A$8EQtqbh`#@7cTe0lBH91XB71mBhWg^hl}>x|?ee%reqH8WLadODrcD zM^Rc(i45?`HkIekX53U3Ywp8RlaEGpOlAg%l1BPwbO;4UUW*WBjQNUYH{?Y3%d%!1 zFbJ9wKf!S-nwb6Eg48MpK-D8oNi>_)m`8Zbf^<}Tu`wz#QVbSp8IOIbv{g9yj_oE4 z2eBp>DPQBl&pK1F!+y~~GKNEP)cfM^%?Qq)zr-R`Xd{%Or(cB~8gpcu_g zXUF)}K@HWfuZY6s^Oa`vU5K|f>{p5VGGPO=HPA=Qx)?PvWALEq_0hyUkA01U&A|y} z6+G~&5n*Q(5ktylt5K@zY2IeiS;Q3e*1NT!FYWgH{=4XkifWK(TF*W{s%=g-i4xH_ zOU8C6@k?V%e2uBdCcl0A3ClWB2&z1gHM{~c!;MK-IiVy?6{;cMBl4LP3LBCNd{4@( zH>8doO?B4NCgZU>JS-p=z6TW{oOdF;CjSY2{>l_V0I~I8;Yn3B1n99({nl;U%HabD zcTk3T^l4(oIDOPT3l5bzm>A-3KLq*dk)TNWvBedY@RqRyju36PKK}rt*zl5X&Q<`! zYO^LoaCieYA`>T4h+%V6`FBE~(%n6wCTofKu-tg-vI#Vak2=QrCYc=bo4=HJvCXK8$dF#OT>QFq^Gz(E&duRmNKA)!(9KgCWczMxPOF zb^>&XBq4|JJz-$bk)iWq!#T_Js)yk=(3KFr5ANqYAwg281sY9z)dARjJ|81AZ_Xf% z2@@=7@w+q{zNeH4CIHCKALyyCzWBtiB@mypFyxf{c12e?jR_wMEjoYu?J1adVk+sj zIl&qtoh)$5i`!*ADL1nHnj5UI&aGaWKE8S{UeVVZ3a3w<+I6-94I3OLIUuvS8AHRb zoaQdY_HeGOf5dH9W@PPS@F$aTJITG^iWt-0^Er{*$dMDm5TwvK6#YMpx4CM0lEPT1 zo&djC#8MSwC43N)7|zSrrO$|gk)gsGGy)Asgwmk;7NW*m1{{G4h&w|4q0LQ)Nu=3GH50ZqUBAa-Ke%c_N$XalW&;4 z%RQgbEWu=X`l)N$E(y@YdTtF5d)?nBuN(SNg-sO?xDG5W-a^ONIozA%#q!8+rhS!- zO8j~mxm9)v-G63kIf&y_=w_B%iaJ6kDFtRjglsO$5sLpgZW5&U92d)oiZSa$Y184V8gS~6G>e7 zBDBI50RzqFiy*V`Sci`o5zcS&9m36}8vFT4znjky5aH#7u%kTABBRYxpUrRGXW2** z>mL}H0!YenC}!b&M;HUc)c-zDT3<+^X@GNrt#dxrV)WAMKY8z{gw!stpTp4vmK~>Y zY~R}BD3AY`nnsqo7#2mQI@>vuuk6^f=Z*|(L6=F*g)7eAcZ^M^ogz}w0e1`Cn`a65 z@z_a58A!=FAlB(7YYyNvBa>Ru?R$-C8aw=_iRkgnvm3W6`S3ixH4`W5jCpxkz|5_E zX3Hh9RGOby!xbR=KFZYdp=SRaGJy^}Jr3(&p0cz&C}Pf#t&i2Lg~WJ~u|GL`q0uh@ z2LM^=&(IgpsKSSiE`?;UVq=)lOn+9$mUse=?UV45^&@h3oTi3e3znX#b_*v4?aPcA z5o#Xmp~NKB@(xqtvM@LQao{7g0!Rwyk^oYV`M-PrzF6nbnK$q1>O@GKH5qW`a;aKU_@;v7F0K@uiHz_)M}a7~ zu?I_OYN6$|0DrcQJ$m?9`C(3RX`9=UZhNsK4L}*LQ)~do&inep<}jXOr&xiAnM;XX zJ1LuI=}dyr_dxW*Ved;OHZS5Hol%<*XhlloH*enjkj?pH!;9Hk78JO9d`U}mP*a=X zAw*$PuFmg{lW>X3C773hYLW^{UI5d|`|W-PY^Asn{x~^j+dJ`?{mCRNMZCtvnf257 zK52AFPD?8EXuB^*U=U}JwvOJCVV*zixj*1rfR7U=PV~)|8iz!~v=*{!20A=vDH=MG z3dT9p4_}e1;`gbIoktLcSYq+mns$qvz-J4~pRy}5WkJ@InRvggWD|=LlK#8LKezQ> zbJYcpFklqI@QPEh_QFmVC_rRkELuf|8cy;QfgB@Zj%hZ+z*FFZYBPVGH}8#rVm!y%G8TvVy4sa^XAa?xqJ z^}-{SZ^OQ9x#O9r{zOniaqoa6ZyUC!b1}e-F{@mJ%Y_&hFwJ^qoI}xWL3*T$pm8B) zD4{(Oc@33QDfSkDhl!LJjh}BWzX#ZPK5Xc@h%Mv|rU$wssctx~f6n24_d;N^P-_cXJ;4T*zu<4>XOfUZoXFBNapTDA+yB7I_0mH#pCXX3NMr7 z(^!Tkzn4Ab8jT4=k`*oQ(agRJAt2_G*+h&c-mXQ>RSq!TqVe;^Y&wK5!XyPOu12fi zhH19#!wcUNtm+aIAu)(|NXTuv6B)(oGQ`8WH-7HL`Mid2(5@;iRU)jE^M(v#s7KC$ z<0<)tQ-CHG z5f2Xj&H$lwn;-oK4H&Q}|3<-TYsx9$vioIm-OHa|dXVI;WwY9>?eFieB(z+!`?r+a zGFTECrpJs=PuDl|V6o^9m`=GRuEPu)-wUWAo?jAA)aE+B=6j+A!x^qOAbX#!Mp+iT zgi2Sym8H(9#EA3Si&foOwY=LLG;bK}SYlBtCwyoybsrq-pJVg}B>KU2`VDha~BAWFSR~?6z*to+^ll zZ0H>s={_c%V~UP(9mezLpYZ5M*DFjtChmTnwu-#iQ#v=2!%I4Hb^8`Mz@RzH*qq_l zk!>G&g;x|F@VEv=th=ms>G5>c-!(hRJ!<>a@7%>MTe*uduL*x+vRJ{v= z$Kuu_R}22=!8lfsjNEV8%c)0Jy4?6`_niAHw?C`x&d!X8D_E4826ySn zMU&d}t-VZ`75=|8z{=`od26Pokt7VsDj+|&lv{D)(4l#u35q@w?{+_E)le~aOECn_px{nqZMxfMjn!Z5Q>di1FN8$VLH*_;tll_3dtwQsF- zE1;#G6aD=ARazTu3cuzwPRY*BUU#c`wZYF$jH)(G)C-x=ivZU*pjETzz6bKC)`Q+B0>y|;z*%Q49gd$T5Eg&9%YAvhXj(xUY4|$E9KXmu;FD#sW z86|w^8n?@K{F+&k&2i0X8ge@wpCMq5grIwMaTG0U;GO_6N5^Gz?b(`7h0I*Vrq0g?4R1&5m^P7I{cy$pqkTi-vZ$|p*ezAUY zL$zr(9#iYUAWgHuCi}Q~MQLjN^HjUbBZs_wS{tobNBB=j|IR~2pyM%kL$i3epm#zT zS@`SML*hPh1`qkzYI1D1~h|P&b z5B-m|9EYog=^`dyspTN@HqM6Y)|DAflwi&j*xH(hL_ACQ76HS=&mdDv;t=3Q|2qw8 z2fN!4S4;MI-z>OQzU24;<$x!x@m@@cnSo;1TK3}QVrxEFQ}g`aTGSM?=FBlWK5A9d zt0?yG!u&*#Bq@wjx6u9CbGNVfCTmxjEe59z4g2RG4a=Z()?b+yQ|g^;>@Q#9!Iu-) zQkxsePsF#X>ulA*aPm787cbhlvQKpO{z_pGvrrluzC>=KHP4*&Vj(&()1 zO!cy9QUbg#Yev6(+H=ziBkR@wR98n0O*yZkeyXWG45eOl>D9hnyXDc>-z5&}hyCQE zF@~Fg_gzXN2BPcyWSi###nQYqJ8tlAn#LNo8khfU zzi+m^bi^+CUP)(K3|+E`W{_M@uYQhQi& zYwc%gD=5F0T8sJ%+XCR3f~!_KI7hGER* znpbzdue<8(+E%4Yw{HQ2!;o^@wdPIsZxx>zGhjNz7gEfMf=BI@{plsbT+6|B|J=Ecoy&c9{E5s0xj{ta+Q7r(d&Qd1gq-Vf9O6^a_tX zGc8duSZMB>ezN-tqdE!(l-dEDgvpTYV_?mdjGH$jW}l_{R@w}bEzoS(gNER>Y1*&JMfj%=scCPpV7kTtH)9%mc0XZrN%&3$%h zLH4>Z^k)U;?NW3(^S`&ZBIV*w)F!Sp;?Z#!@#~b;=;?%zvrRPuN-&f$B(Yobu9cyM z^I@kx1e%z(OUkM_#6&mjgx3wvfzRx(*4+xzA3b_>4FPwtg|&TDH4|Sidro^z%9L}2 zXP~UAc&e`oslY?SVzC0x|8@2zKEoiy26%6Mwsd16;{vGLU0x~yVIq(faVZh=0i+zp zYV8`mPLli#8gzTi+k@N-l-_)m%ha%lg~QerUQI~dwpF)fk3M#Q(F63l z+&N!Fp~#PPE3Obo+dfLU$Evt>>(&r0t;~Q%90t}o|MV*fA+LBv#*Pmc#MG$*<0^~e zUN!J<=u>hR(!QNtWv?AHwiI4P`fuUtMU<*j#wb7^=TE;@NITpb)B5?R-k`iSWXVkba6_ULA+t7Rzgd z?J^a&Q%XW?noTKNAK-WMufLMg)5DXV2?}o9zM4s|i+GMNFS>l>ziU^t?qEvYDvN7w zD4AFWuMn`UBDa}(wHtZ?Bhxz2MRFH3h_m9$ zWF-TF)#gUd6|**9z~?X94-}$FaRLX9pi)98Vclh8jZR?{@p1NB>N`T<)XJkgwTaDj z=~J)Xc_rg{lx3l-Z601YMXj+tBb}-?6DDNH?L?=c;A=Ph zwN!(NWKf@uS)aRrg#5zVmtf9rJxdCrqwz2K^+ zR$Aat?!D8&mhw@$bHF*(gQi%c@Ir5g9BTo1yfeW)q<(zUyJx@dcuSeHL?2kVdL4W_ z;WjdRjCI3w4<|$`Bt{8V$gHG|vmg6xet6@E2};XIHps7ar=Ebh-3-@t~kZOd{E;BS9(h@>^ zgD5wr8tD$7MS?K78!7iL?eWa*y7X|AW40Rv8=Cm0OP8`lmkjG9&yakvSfAbqA9fX3 zO~l#b_s4epP$nJKrV9^V$b8OGYVaUNlJnQrZ^NRNNV7axF*D59=RE~t^QQrJZ2u`B z?qL4U=utBFAgjEf(Kxcl%~j(;9kxC{ zH2p+mEvVvO4^$H3Y9fkr5oi!3E_=;>MkXKtKG2w~OFCM^8^|3Wv4t@@!Lx5T()ucA zt%{bd`qh|0N=83dIULT!95IX%{2*hV`A+~~607O67M`k~Kx!?j%9Cq9Kp5eyYSE05 zC3{a@mN|J?$1Yu7T+j#B494Dz*L1UZg$VB;2|@!zZ`dyCHz9@UgE_T*l=0t+Gv+ah zjI7}@xirY0H)>_D!7l!#_O7b(VI;qcG0m1SNN-TO=xlbROlE%W-vU%@-VH7;=#)6m z6_u2fylSkXHRdPdPI$}vPmu?!Z- z1bD-uCvN|J@$^J(z?=vuYj*ztu=z^)ZAsbw|a_O>2iwIGI84+G* zzgi9z#_7deQ7VyFFKtLB`G!#5U8|vyF2Pn*j={bJqi%a=<51-`aW@u~E+`i?|3oW_ zLQa~0oG$P&#k8AXWj7@1@qLZS zCEG*PfmeY&H{)|s0j{5@fBv=GLPU^^z`i;qKf$`w8bRz zVE+RlHSMyVK*j82dvr7{^}vdFGM~VTp)+|(WshRQ-{VU@lhQSt+*T}yRDCuo4(YQo z>m-txfz5igM4YkC&8MZxPTcIuPYNzoT3Q-M>cgfFBveW5S^T98pL0~Y%4x;P_TSDQ z%A^Ar+7XFLBysp*V^Awd)O)m!w9~;#Fe%)m=Y;KJc_IXTd1xFD^2yY~LDE-bZbLED z^!5vQk|s!D@n}Ft<%$u$;(ny*L?J*nEhjUDg7^N4x5RyEFo-xAj200m!?ci~BC=Ps ze2!=?F(rvfcPpri3RsGISKGoUL3;3(zEPVXb(ySIF)lTDpD@rB02t(SWOMvucXu^u z?y?k2cm2{q=LWkQ?dY-6ScO_7f=wcuNk$GW8d3IHBE5ej-BpCzL~2XscKS=p7_3o{ z1CC<3SNUN;(YC#II)tc}*A1D@gsL4YVrdL!lJ`H{9!kuFe0G*5yZ)!_JQSyDW9Z2&;rDN&Eod-DO1joh>9Mi z=wY*t^fZD>#yLEn(KI>{*SvVYI>0_rPF5K}Y$?{C!ZPE7ZFP4V{6_DPSi)-w!${5$@O{@?!ertg4*mg&`he!g1$hU~q;4h*a2PaeKN}Dr zynT6a;x3GR4XLgfn7)grrPYz{zhM19lST%6oym@ex%o-kg`STTLIqn&dVsXOAWxnT z?)P$Gxh^xUf~>FQG(bYZ#xrpa4&B0vqfJ^atP^@w!cQ04(KMZ>%J=MkHy3wqA%n1E zSCvh2Zx#byX?n4iOd^)wd;oV$IM8gem=)*CFT1?-64!45v4O7BUf0fqm;q%3*hh=Z zNtBA=Jk966x?WA##|{-!by?7)44#SUz^-p-=af;`Ux_U)TmBxW zFF+uJQ=)CpC(j9-<9F#%t;=c0~sAki*zY5F18%hl11RUyX^X zhsu>nH(sCgj5BrXC9yn*q#YTYw3g z@41Ey%IuzUQ^KVJ|Fp|)w(!ZPJ`vG38TX+(a5w8@b#q)qG41M87(5)(Y6d4O>MVJ` zWOQI0kyB2{nX-@e(FpVBtRU_-YqmWA(N*rcr zW;PelHo+6UDUJ&hM)ufqWwfc=CiosfN{?B~QaJH{6WrzC+htkBzmXsucwSnn0S#l9 zWl9-KOoF;#-Le%Xhl^V~P1~$1P2>7QET1kzHxxDMTQ$)ZV}*1$mab4Q!xkdU86lJl7D_rs=6&`<52c zaC`eIZ!0m)_(SnbV!lQ88YXtI3}#?3nzfOtkTcj+h_DD)?{j{BmS)onxM$ReC-3cN zd7k4MBqHD2f1({(s#LQO@tAZNowbn!NZ2p{9d|m~CqiNM5~3V%&JZLg7TzW+p}#oP z_4S=WSAP7~y}weOZ{?p%%!;I5itSmDAYoT4M*TYMqLCjc{85F)?C!H}+ERp{DUKreW`*%ajKj_kg1h7l}Od!X!YDXgiY_iB~#32zg2}{9+iW=57HbGMh8@ zY8|AU^-9;CgZtrI(Q=+IvNhPGH>)~3a@@7h&^!e5~v zvwN~gju12!aCXUWh1Rhe7PFv7>u!FzMkI1s$};#NmqE%*#%)Am`JL|Oudj7eDzP2x zLqmQ(+j_6B-8MaGVbNsR=R4-*V48p?z>tzxW@t1oM^I0i%aqxpupj}F6lqq4z+6!$ zjv7-wLRt;zu!--w&@Dvu%jfk7ZEr(f$vv5f4OH%h-kl}gkY_7W0~#@)BR`AI7Y`|w zXFJ<>{2~aqo_g1HnNtBmh`!tVLIOP*Q(3oA6$}IOmUHBanqQbzB0e*fav)Kl$?+aw zY!?AXm*<6?ZI)AOwV#zYy1^{5xhY;S&eBwG4dZ|{Nc!V9wob}vpi}=jU%n#~)Pk;$ z57-eDIAI)>12~QLaabZ`2aa*c;4zYsG*K!cvMRpJhUS|L?X*}d_wIaoM`IjQQ^TsW z65rUuj-g6(i8VG;ja@E&d+-P}8Y1E1;!iBM#j;HKVJGq2)QE|Oa?cvR<)78S!Firg zrXho*$?GY01Wb`>WK{J2qg=arGKD8|@+_VVjZIoE6$=$ZMLR9jiP~!?_)CCTSX}e_ z+gGG+HVkYAjIIT^s##s&%QKRx{g9?`D8%`20i$Z2OMt z>bCrfc)epH)B`Fgzmi$s4$I^NCapD)laL#6gBQO%>(|F zoCvAk_t#O#Uv7KrRA#)BURp3M$m3~A7oqg&*4)Wu6csln-_pIax6wEEsVrHe7I{E< zD3;2Rs+i#JXMn@u2!TBVMn|sX=8W_%cZ2;(J?@p3)Q+Q-v9Yl)AD2n{lq840^q}Nd z`lNZPb1R%%+6wI^_IT%Z@O@FJ(Jgws4(_|KK1C}=W6^7phBcgY@+ItGnIHB!G=ThS zQ}WL<&bno7N4{COrn&<%gfu8Ktb7rJ0m3>HQ7vP&=e<>nzc1M(&u>$5>Vxhqf>Niz z`4D(Y8O9swb4ZhX$0d1A28r&YpgHQXY)JvZGP)k`#G%8se#An--A%`6(-4JIrJ7o( z4Q^>75L>!;*d8Dt&U0Q!F7TnC+8JGQMuDXZUd^LL8g2%+G1}*l*lpg2d&bMo1-Gku zU_UdT^GUHPTTN=KRMfquDe7}vtan&`5d$gm=>|eUe)qUli=z65kBD3uI6rWak4UE#tF`O#*D!~MiX_LhOXi^d zB75L-NCVF3d%!C`=8-s|0P1IS?Rtq^RWay1I&wY1P+?H`QSV(!XNJdCBy}=l<&}0} z4`G_*or^!nYZO&z0_Sp$8BY77rF+{5Fp{9oFIehONI|6)N6}2qCs9Jm=(?$BUAij| zMrxVTz45rX?*o)rg#jlymf}S1H`#oB>Ikc=d8L-As?KEjtLkdUpdS-BmDef*`4F;I zkTW^Ms43wTjrQ4SCiw$V8?Vw-a#X4Fp6}36Mht|(Bu3gM;Jx+2Rn)rG9H3urqAe# z#Nnt%6UawnV=4zHAO?3C`99q)^3O7x37VRz{ze&Bua~9waCcR$wE&ua8njYyf<@sa z$?4A?v1RUOGyS#!v+QWBG2`ksvA$B=*8HfYB6BrRqBfjTq508v*&3S;(0DZ#fkNUr};HQQS0OvMN6G5akh1yXUK$F zj=ECbS-@yJ8)(&tcZ=OE?#Kb{E0avJ8wl`DG# z3puu}+qTvAITQxAkNHx*)6U6EO{}5S#C4@4ci*40s_5$!a4L3V3jf%Dz<_Z#%lmxv z_%nQ!7*i>1aM-j-$4AjYh;6s+$m$AI^y&0zaW4b~@z^Qd`)GNp_p+i(6%Iw)TD|&4 zxol^6fKCo`ZEf#@isx;oAL7_H=Ol=CZM!0~WcW-d`$fs=77mT|%I1T+E8uK7JV(B*nBZ|Zbv2=QxLILJR9U0s#BR-v zG5EqqlsQf$C6`pkdhFhRTdbdTGjefRWt#sSi;qJG4@!CLm9}AU$b@ZKEs{{ZOnZKI zX%{4I6zs9=jC&X|=0>JpJwZXU?xWAl+$uQ$97}Z`e!TRdfU?(`OOpLgA3aOCblhl0 z=k)x&kawXkrSeu5)#OAKD-j0w<_$HC7=UkE9riB+@UeO=Pr)EDZr1w}6L-_U9CdEs zr@b<>eVc_X%3yBKcQ*D9Dd@+0To|3=8@l;`scmFT-i8B{>{M0;Gn=o>N;|@1BjVk0 zx}|B}7`wzdYOE;U{&eGiTt?E@yqX@9R)*i-n|Go3{5#yp)lT$K7K#053av}*(f=59 z;B`s5z3#J4VNH2obr`%}p5rrFkVn0}y-W8lx%?i>^~TtEe4ICo<lLMlz--a8_7o~W2RS3Iaa9W+iWWHyK~$_&80KehuYIJ zNMT+cz~?Js#G&Cs!@u0N;lsmg6l{$jxRqSL%G56E^N_F*1g_74q01f*E-#~Gi>TE2kTz^?mP$dDLKa2%$WDNNI0N6PogXncFS5bnm))%CRg z!Y!%F&3(>x4zIn(^j!1!@gHDezW@}Pxj!B@jrvEX8Bb#L@Rr}*^vEungyij!@wLWtqgQ;zt> z4=wEO?zXVDaGeA^hdf%u_|f+fSrUV9qT|XO?djB({-P(u2hdSX4zi;Y-F6^JK|uGAHqIXPV{?Ju8|bOZ#!MAmgz7sx*24E%->r604ZLh`((dB zlrUa7rsEluT>O5$ufPD<-cP#F2eudi{xFEp={`q zHFt+m2ht06<;ujTORibrc^)=ME(qTrOFmyfS1u_#+Q!VF*Ob098L)|Z^JUVad6zdV zL#U;|=~j=9!_5~8%AowNm^WS{z=#dnzN>%#-pfesU@P!dx zq(P!`m=QAPm$~Et7A9flIT}WWm5g+>mauap?8Gu-QljnBkRbJvnWHy|7#~*%9>XX4 zqwL<9UHFVUc>3ua29X8p&?W<+z>73K;Z?EDX2Klb@zNV18F7d%@f~3`0gB7bf8|1M zr3?-3?x2CmxE-x&+tNAr3cp)=O--Bz#F-q(S%+MbBFfLW&F9ts}|C2`#s zmA8j@t*kv<4)KZt-$#8m>8&bv>%`onbRwh-0pnS4Zf+l*h&Ge(fn^Ck^gZ=5H8Qc^ zG#cH(ZP1+Y|NN5i_1yzqM*e_&ywuT@jV@k&9A!OV?VHW=KF265*MAA0iLv<_nIZ<} zb8tPA~c!A#Tl0Ml~95nmxH3-u)({*l};Ex!eEHpP`yZs_Jq{N|C9%t-OF-6(W0RQ!}i-SgV7_wl7xIY_*sq}sq8uQS^U?`*O zB5j(Gia0sVQ=m5pR$vkXMEyL&r)>LV#=vD#R(_D;j%EjD@B#RqPNJDYX1Yr5C8O8! zR=pjSN5H`_P888nFp7=Pp=T?7Yo}?R9R|;x#EjT{{=qPCWiD<|%4ME33V1(o{E_zv z42dlTkAKmrAAS|18%E5NoMr_WLZ*;~(^5oUKE^bF_}%cp&vS~GPpnhwdWHp>Ef@N=$Oa6Rm)-$M|?GgB9@`;%bi%+nYDo6Dv2w8SlK!G#A`DmcHl5pSBi@ z71Q6_4VWFcav_@nN4wuweq{dW-j<`7!L$L>#N{)ZK#e5meSv&ngZKAVq+aHz-@lmjN2R;E>_i=`7O= z{Lae`_E(2lUI8PmkrCEpr&*U|+FDwWKYsk_?7+NC!fZIJOt@YnO*Mi|CLQb} zrzUg+Ff|u||H4$z454Onb}Yb@J1}Sa-E(4;%A1aSGjNsf_$|^yJ4Y#p={8R18$I1> z%!$cjA>DDT$)(j8p0n+OnJ!!Se&ySC17^wSh!7lr{lEJ~xyrh} z22)x5_Lma6so->miii{VFrO7A9ztGeGt9V@iyNO!3*35S`80Vp^a?IL|1w=taxs;b z89}Gr`LVIFr4Voa26VF59bmx{7sB7}&Liwv%uZsb@OEpMc@IKu0;GB#@kImy_|Z{5 z%QwOO!AZD$*tS0%A?~o2e=ztaaJ7l_8UgSP!s-YpX3_-Ui~%XmhiV$!T%Xu%%z`F8f6_)K)H2vP3rLtVG<2lTOY@jX{vY{Wjkzb?W#Cw+VDk;9WJ9@z3EM9-iE3S% zaUd#$V+AQuA2}!ZRq35_|HH?<*gl!I$i>yE-3%JJ2~kiQE&q1z4$z^{^fa+UXg~Q zF}qZEs`RQwZ-wA>-004?KY#v>DVRy}(Gm6+pWX68FlsJ&*S3oe7En3$;2iJ=2CK@O?Kkt=`y=jZ==M*%{bJ+1`**Y@I4COUOG?JgcXM^3REhswE z&))~+0Tu|gL2&Su4;y4zgLVzrneTYN)i>i$ism7_@#x*1G`*f>kKmAjqBoH4H0Su( z+u`3_^KwXEpa>%ISc@OyQ-WpA7WIsA4ykO=g{T7l4Nxy4d+MC28$^DLeVE>SiRN|LnB<5xCb1g2oeJ)i-gzNlIP2>kbspr7a_jBVIz zD*3xGYK8JdXz}c~s~6RMIgAo9BFaQqbC=aO?vBdrY|b4ygq>ij1#Wok?CUp!nt-l#K`fg4*^@o|LOH4 zidIP(DN;it;M=aOwK+V*WEPXw87xYF^o4_G9O~n9J89h^7Lk_x03YEcsfNdoi9?r$rTp-kz|7 zW&V``&)fTNhCP8toyl0XbMf7-IClO(Kl^Uh&fhRWNuqkIgPwl3phj!FWFh&8I?7qK};o)`#9lwe3rP znDq-sTzT~q<77@v4O?94;l#e3@e5N>`J^uX424ti?R*pfvl z3yB7@WADkjqIyT;uCmz69xbLzSyzbLLRtliF6G-qPEIroaw8sUjVH!Ki}gg!B#_cS zh;w_$+>?_$6RkjrGGSne02PjdnHb6lXv8gS(2A|@aamr7dNImwnRFespudXy2UJQ4 zMZl{vg3OU1wyX4#1#9E@4kK?!x^ZK&cvVB@klYLEqW9H~f%Aa@z(i)t%O^>vix~=6 za4`1ua4N(I0i5$NSSHU2m+R$27DYZ!EwLmm65^4irDcHq&7kM$epV_CDCu$Y`+_Bl zCJoU+EZBjP>2b|`Z=G?Gg*C(??jmOmWtBg9<2fd0Pb@e%lQT^Ou0;@!0R(pN_tK9A zzqJpmd-(97+j&VHYCW}3Em-^BR2hccZHEG;$k=Pb+guyabZO+vU_S(-DO^7ve;8H~ zkowS4zl2$vHf<7NVgebdz!727Fl@T3)pw(U2w|e@CK^vDo=SSbfuiE3ibWT@dZ?C` z2|Qsri4oBIND=}*=swBqc?f}Jho<)&={nSCK!TRrv$=HozVN~93F0ITFwZyX8`f{N zLGvS5-Tly)JY_RFl9^0gidq&F08Z`t)RG;<1ULO!=30`;p`qp_)a4=TBnn*`_61jV zR(<9^S{dZrZ^(9)sa9cc20kAm+SA`YO#~D3h|)( z*Pm-^zk5Ls?!>oBfcIbjqBiVz<#GP+zx)4d8Q#0I(Bc(Gx;Pvdsb-(m9(SsntpZ*F z*9r)BNbVGSH><;&xu6mlP;-B?F>>_3uU-DzK#98k`>)*xjBFWZ`S91*Jd`Ethri%I zmd^a(V*TN-FQ~{uAO3>>SlI4=@hCm9R&hyjTOF0t<}klw{C|C2r2DBQmq#g$#{VdY z`h4lv|L^a1kAM8v9JM6Zu_oOwe5OoB|DTto;aA%7Gn3NTAbtco$7(-_nQatEoc!a# z2_HVJcjwoyUdak;skqhDDhGcU(r)=AWPZN+UpxYS{QvK2`fz>!t2ZQnIQcou@}#sJLg?r&6MLjpcHTbVbLEqa$E$@itCbT4TQFc zE8D8myZY+6SEw2gioPLnMG7teM9JF7TWAw$7Xa2GQ z!tX_#c16@%dVQP{&=H_$oTv;RLFx(sVd6ilqtLyi0tK*exSXpcd=xsRlGq6;kl_Um zzH#|hO8|+tY~6|?lL53uVV5mHN$0bZQN0;ufIM%ubm`+%EhaMlc@$O$5w;+*n4Yfh zjZEb`b$>Pm!(Zr^q(ZLZ-}t@jX@f!DfrLP zZp}LgFETJovpl{iyEx^j@O0%5MQj*#sf_bTujNOFl1&h@nB~Lyb-kR9Otdgdk=+)!8GRU41Y=`wE_I(-3TUubdhw*W;)^G*m%94_V|omr{Qgm%o=iwn2|Y(k znu{K)t*w0{u@<;T4o+d@qXIdUvq#EaA&b(UuAW&RKbDVW4t0c^n_DA@XV)fID8Ql` z`nIYtoL=`V57D@V*d~lSIakkkSo{Dnox^z$ze9TW^pbaA={N>1L=uYhV-1=F$>dsu zYmvw2dqUVFZHIlXW^r6iCDOo@k|kYc$CS{g!;ie|=2 z1iw6>)&>dO&6{oWW5HDV{;;?E$Q4*6HeK5FT|+nxaWsV%z!fDy4G~HX0M1;=1hMqt zuZ9}!xVneou@r`zphfUl7VLB@6cPl}6)#bKhB1HSO}bYV6Bu!us?xj?_drsUI#EI4 zhqT(J`6{?l!E0+nNjV^_JFB!C7mQNtd`LCnRdV$3VbO{Hp8QOPmOv7Y27O=hMn=I= ztb_j%Fze{>^FoJ*=YG*R!e+*d8DcyZLP1c*us}9DdbHMy6NvIGfVUuW*OFzCQ|pWD zgwf5gXCyR&HOs^bXv%lSKc~ewlIjYAptbJ7-{N~nFNZ0GoKsoOaG-6NLy(Bg3^${k(Z`g-6*W5pXi|ml(eBxI)=Q5v1 z6gUTjF6Il9!`j0bX2Kzv>>7&RGGmkb{NDq-AcWaYBEEoyKRGOla3?HemQFT%SKZ-j zYsjxsYZL0fXR>I+!_Rw>YQI5h;0_anX~l?h}q2=L7++&tY7l{XnImI(~%uJl7T0|yQ)<27Zq zpEF*DP>KW!Aiox-Fth(b(CmvJ{+@#9!(6ylGQa{;a{w@gmW-wWJJe88_q zKQ}uo+z>#0o+oLkI1zh)?H^zoOWC;Vi8~OF6sb6B{F2~|o#+&!-?wh9(DbvG5NXj>KgGZC zlADtMDQ|UQpbFw#h#;nfC9$IwioZ`k;<0=adcAPK=fcqwD^_{u25mOg*ViK(A#CRZOULoDt#4 z&=BSzi@={&u9wlq%N+GVtUz&n-rMF_a||bfO!k82q5S+4Xyyfxb7DKN@<#bMIKI;v zJU9tobU9#1wPODIyc_1rL9|;3X7~7{Rc}|-t~kgCB|v}m8oTjy>0vh!MIoTB`2B0k z>Cl-)I6zb9aU5hxB0~B@jZWkGpl!>1bGC5B0`ss#4wY9ZuN*&c-lTL`$H2LRHXlha zkw?a{rRAfnYuwFObLhy=46C`7z>0*bF5Mn&epzG#cvgE8ie9^jz%hSqsi4w5{Z2Ib z1>Z3E+Sv+*lSC7O_DXPbkhxISu>}hSlwZ`AS@Z#HOfRF4mUdAF&PtRDOa%7?+As_} zTMp4tPA}nH1wNFF759h`a+_mDGqdTgG5680$HX0Z^ciG|QS=^pg8ZTl#t0%+O9$Qn zs<*HaatxU+ogo<@Y`^b5+~jiNi&%TXZsj5MCBdT$lZo|#^b&;JPLjD6P%XgtzR26M zoE1)TyYuCk15q|(^_3o80->#xK#Q_fWdi1 zT%T{eC*Q&8Tn_w6^XJdiQ9%&Sg=|@RSWiTD#o!=_9stYALPpp%ogKd4ij;y)>>pWC z{WLWV$%tf7Rel^2860ZlEl2Q76>HUf&k{x<1;D@aSc7>N@Fp@S{CWDROkRgGjCL&TGF}NCBp0LOqx~g+<(02wQV6a}##SsC7t;=iPB zAVkl$et468U4!MrqSyFWIIl?Uh4>jRo4O64i)+dpaP3U62<*#CuP zH)x+G69BYe)x)W72Z%*VBHU-rE#*u`WYMiVf2KJtui95kBk7pY9Z%>xYgl;ndztH= zB8mwumFeY^Y#<(4PEgyozftoDp$tIeGi0;kN-V&wK2QUr|jaP z?qv9-54x{_4f{a$Dc?;m_4$Ipwaqc)6*6~Wf8MX~n-`{L6oioOoc{(}FZZXUY`C^H5QWOIb zs?+tW88c_j(H`|6-3_f_D2tIa{E? zLp0J;cbiP^q@eNq>$0)MdD(8z5s=h*3YP7mWwKhK((|Yvkpm-y&PCmz^a&I(B0d+m z&q$1;VCkdKA|li*Q(nP|jBip@gt#kk8JLD=bzLIU)6M)nf}^+F=+%3P!d>WQmi{su z9l8wSqkIc7Po~0!go9p(lDv9JTw7?03bjNvJk(&o@Q~sJxiOs5Rtq(8e#L>AH3da{ zrv)|Z%^;(5J^3DEzr(K!ucDF4sJXQDDV)Y;elx!EIlUnP#@UTY7a4HT=->Z&vDDKl z4qG${P)5S}S4OPDRl0p{2<4$&s~C@qlw=?Z_Xu5Q0|Me6c=bYCsPvaM*n(#%#x5SF zGNoiVNG++4@{E3Wls3?i0RwKrliJRBqtV7$HPrDIPk(vT9Ve%!9cmq!iw1_z;|FB! zfp!m1v=1xa>3AQY%PNrz4%(2b#@gE}cJ8W|T_L`nRrjBBBFA(Sc@ z!P-7c&C*6CA*Vm2k;)Cq#6ScmfI)<5&smT~%`HrOX2KOVpIy}))Rf1q-K?22?{P%K zt_y2ws<$U6wU@w8p@fX-NO)A&-&qmsQ0-^yQwi!{E{y)F$lq$57)D9;T~V`Mg0^w;7^{C z=E4itUfWwb-PBLuz*39>-0C5;YZ%YMFeEG>lA-0ZaS7olg9=b+go%k0?JYlX_(&O2 z^H$chKN!Ubp|fX!xkF}26H#u_R!33IA6~a^G^}}UixaV*nME~4xwemT_o@x*K(PuJ zUM2z<&r-17Z!s<4>zi4-6t zFyZ}^0Aw(S3h&>PQF$oJmuaQ|T-oYAyJYU1V33ul!)4ImF(K%%*5A2R-C*|4L97Z8Zz{^ z-o2mBZxiY1{-3j7{1!idz{vmfzEky$+pBMlR}VaX$947V?dF}ci|*!?T(v4OpFTXH zM88V$>R*inv(@Qw-6r&k*%n4fkQ&A?yQ8?J(UP*p4)bzJ>Z_9S$^$(@J~H3G+*%%% z$62U#oWd3XD>kPB9L=6SIE$wD8_o^y{ZF(^6DmE#;0C{WE%~6*F9t}uP5HN6cW3s( zI$tI5?yj|H%f#?2q1){UNejOJ{>RcTmWRB(ZAu4zCqom8Ynon>o}HiUmaX=x&29zZ zX@-Y1aX}clN-a~@?MII2_1gB6NqBsJaF`oTpM=)|MTz=9#<(~2TLwD(e){R-M7t@= ziy9}n74-8AjW;{)=U0_ICn>7T85@e4@?t$MT5bHUmiv?SzyGm`ss6-};@2I9`W4DM zoc?M^h+U^8b-u4Yaho_%xoS3xM*t=nn7n2)895~=0 zhXASB%U_nLGQ{R*RrRZg+n)N<L>kiW^oyWhV9uDZ&Atku>m%w9V z6Um{rJ@;*bb?U@?>^J>YRZa-=61x&+jb>>+j@u^QWd6k;HFRzHd`U6;Z8l-QDO6_$ z|5+FwZ<0P@{P;XB9jB);Jx(n&O&B!(=3jN9&=U7#XZPfqqMMMif`s=<&#~uhRSU@a ze|*zzk+~QB3UkL{AR+b%HJ-cLeblr^wQ54+ON+$Vn;Y0uZnv|ePB;D3;F2W$sx9Pl z_XJr)vwc*@@orQEqSH*YPdKQ`mi;5jCio6oRHL55C`fSa&x02Ilk1qD&<`z^ZzjB3 zdi*Qjqnb+s@OCyrMOW}2!=oUG`;gzJ_i_RMxA;@z$Z(mIu_rZhqcM@#s;-d`?YWZ?j$Fxe2Itj2&sIw z#ls~15Kmi{3)WR^c6p%WcA)Y@w&#O2!q_}HTk+XF+U|lkQR@GTd-KVbC4(?6&z)0i zB+uP^(`a7Gk%I@9>h3h7dHhhWIepvhn?J1D)Y{i=$qTgOol&^SeKhi;g?CBj#Vf|Z z4S5;E>GJm7XAPpd_FF0syBzI4tPND={HVEh{S zTK~zH1NZoh*zx`2S=X!Wzw#N;(>^B3@DoJ@n0pWW-z0H|YsSbcj5GXbnpXU!sb=w| z*GB=6zPVq1pQmT_Vpzm+CGBwO ze99FfR=1&5-_}<<$2@u|Ra0q~0k2DJ3L$c4d^_te=i2=|;Gtv3zWeX5PyFWcZq;(` z&q;|dC>G=$+wPjoaXK2CU;c~5JhI1)p{TkHOE{E|t!c)$+}o{tJ)M6u&7{SNWH3PdBy7&P-#lP@PD0M{1yBQ&-GXP z)m1qk`0cmv3g*RMZmgqqZt-?2{j{mRx5h&G4=?_>LAC6Lcb%~P-KqnNUUw!vCn_y) zdv~K3R8jWE?M01odg^V#7W16i8~~g6v8dNnDa&(SHpL8B_TIOO{P_2Y6=3|nEWjg2 zkD5In-Sdz#053HKwDR}!FcU>GV<}s@_>W2mEYZxi*yO{d0(;JftTX%etIHu_akDVP z<{`;k1~k@)(k?wW@m#w%?})m1x`@*~*)AsJU=hZ;fdMKXb_WqC3FM2wsYGq?jn#LzI zyo{qps*$jFEZ6%|X)R;H|y%AEUg& z4Vko?tezUjZ=c5nYH|G;BTqPtRi6+QSQad}!~VzZLMno`&Vqjyl<#`}X@IxgsDe*( zP>%K(X8c`VqMKXHEdWysc8le#RxJVDJix8FiPqqG-UDrnG4z@;Mw&bDM(Pdkt%p4b z_^=YyE>dtsjS9CLCshhb?BNGUUxmt%U;P%sF$}Omr#3SaJL$z|Btl4ja;&e zD;=0Ia{Ow>(iA@3lRfHFXKXI(`?%-TREg4ae+lpKh2Cgz7~$2Sr}^-ui=W1STvnqV zgcbwOd=)GH&7RXscHKBdLxL}hq!9H>Ug9Oa3z_0tG2?yow%_z7o>)4%qvD#c=~mK- z6^)1en84FO>9HwqneNUMm_5#q*2f&*|KwXCz+h=*czZ(*sc4n)7K8bq{(f=0cxKPr z|C*E5AGukmd;u%W@58%^iCY~-yi+B8Q`^9QlDUH(XUy4AHGj3A!)1APx4Zm3_{=E& z7TiFbJ^7|iTsExXdro^D;(2VYVQcDdb+=z2A@}y+qd+bx`PE@|2S5Ad6H%hDG?L_t zKV~p}N>AfrS8o6)+Hm@#bfA<{kk4z^;2v@~@dBU6f26m0_AFh+?)zSMCmw1ct zE3bE}x7}4bc-33!uTf41thfyNx7aYgf=lbFI`ZOeKhL8v7vhs8;VBA z>n`nbbYt|xSO8Es2sI9>X2RuXP~41xl)11dNJgcyzrp*!t|vGNs8``q zIBdFrA?eyTy8t67(41+<-@VrFCHkjsI={rX+ggGTJj;{`tHv-sUs?z` zybq_f_03N{Zuml(HqCO{xFX$5dk|-UbQC~%Gv+1Vw2Kkv+d+eo1*ouc z_RkY?A89m)z)|T2(A-F1r2?go@he-?#8}!o0n%6eAM)Nis>-wL`rXDPCPq!M7c_~Q zsMrfl5MzzKfTBpTqKG2W1O*{QO)MA{1w_FHh$yJ2NKr^ouz)Bah=8C7ND&YOK?J_v z+Cbjt{k}8K8Rv{|oN>lD{P8@7viH63`?{{R)|_+A<$iCe2gV?g7+#+LHCfU4TXRz6 zcaHoL^W9Z-lNNJZDH)a;7~35#8q`;&h4FBL*wVvTGR zjOCBSKIdWd7YYhALK&6Z7qa)jKV+V)x{9AZ7M*?*v_|RZWUuY_WBV8zT(O{g!e+T3 z@0_hnh7}*0hO6~|vm?M}Umwln!tq};EhenLg^^#@`1wJ?y4ay6jh(wWj>}IVG-L-&{~8r~$-wx?11= z%<~wd;qB3a43r5Y0;p8 z%KnNZ-|{WG=gZ-Lo^7w+s0>~U&y&!JAK3PHZ?Eln=Oi0y(7*`BnX-hV7JARAJxWp_ zQd;kBLrwwrRH&vhb&_H!tEGb zeAMklSJ^?Z_mFcNwC=4D`OoUR-!#10?=n{9wkmO1tR96@aa%Zr3t_dKCXl8qh&lzs zD7q!iGY0j|M(-pPD1%MoY`b_IzOtehF`KR?eXEuO)_p44md5H3m!gQ0U~c;#ltQw0 zq&22!kjW^*D`)sK>ur}W9@kuJ=GVfd)6C0gxpi8*<2epx zsGi)$qCrzcP?yK+dijOITFHkAkJp9P#avFZIHry zSWv1Q$*QP4BQA#A24qNF%)#xiZ$>gad6uPv<5OwUBJs!J_MUAQy!d3FNqd2q&BVVs z$2)Ky-yNOZQOqg$BV)VAip|lU@86)kxc}VB(@mU2gl(li zpvLP(>}4@dW!J9unl@7SE+h)r8_wb`7r)SHB1$iP;qzt3j*nT1)ztX))jBfu1>&-p z1NYi3$7$21iPJo@ZbF@c582PCuR?usHkYU~vMKBvS$n=>H01Ijub<2ngv{KRf4$(S zg*Z3UQvGPj&fQ{V43}a`bIujA4t`EtdyB<=ZT9ImGsi%nplvY*{T+GaB+FeMiu4fv zxi5S7d}IS6Zx>PAqv=!Km~D#~EHuHRXW1*%0Aj5hwdwHp)^Z&R#q|fx27EHv3hm(D ze9>zG#fvG|s-8m_OmkVp=WwgNRTUlK(k6eYa|Q<`Y2KJNT7N9t9Ee*0QBiPR>LcqH z_BVK$W%zCR94tE-t}a`ciuY0UtAtx z-X-!YC@cdjE2|wBdZRqVUREXpZ_inDU4xMIUM5fimm5(*hdjJWbUY_Uv@Uh`O^3`G zvo}Xvb0JfgK7Z%LZlhy1n2%pX^?mlD-Gx)+=OU<=Z)BqLe+JbPIn*=|&f%7J0{llehgBr()11&TK9EC4+Rq5ai4fK9*HfRcyHUUFNpN zYnhf+#X9|vOj2?vJ0$QpA9Cek`g)<-Xio-JE@rZtSS_ZfQFNvK_T_}L+%6gYEgezm zY3Bw-HsX+;?pmi6M&uTQ8eFq#&JI$5~Bjc)^H-tc<24K8FWH>{dMp#%cPfc>YV zwuL<+ey(8uv*bgQ+-j?Z>Aqs=s4@0Y8cg6xL*pa%N^OoAw4iE4bU&=s9&akE16Iq} zaP7W{_tw-On%AhdD#20GZ^k_Y$?R>mKwt0iu=McHspz??k(`|4CHi`2rhfvBFP%o* zDhcAu-B{5#b6yAfe<(FWIOJb3%RHTNhQl*+g2a1P`ti5s`)opP;lt=oX}6oFQ|%5) z$7P4$Je(|(n7PsGkPS@L>gF*Wfbj^JWe;ox z#B%rJ!aa_)h>fAKh!iC{eA)T=c!7Ljhie_<_r6DION_z$c zMgCM}ou}CUMW;?_T!(bp&p;IH1F;|-k;gSQPeLB5KXOHS_ zi@Wc4lXptX^T9K$!a=I4vl#CoIz|Ep!()F$iH=$wJZRjpXJMkgA%Z`m`A&6)mQ(FT z-p4jKrj2;#Gt)$zKt;jKjBJMbAaoNVvQxVz3zNl=!UFLCA^lL)R8~h?D}osj6$9s@ zA9T#n@jZ6TC{Vd?I3i@Y2@iUdN;8V3FhLtBW0A!W6S3+=#_!t68)MXte!-(=$VRnH zzlO_n?F{B}Sth5yo~(%?ZxQEXGTqw)X6Rz=>v!WG@j-R``1)j%HF%(4Tx-o!`~LfX zthVOB)QinEmb^ca%jiKu-DFtmYow9X@(a0z41HT2mMM3UhbgWb_)y_VIovq(J|fvb zv0X<_EAau2e^z<}0y&T1$k^xCOCti#oY6n`!*ps%QTU~yIwbQUXn4DQTs!vW0?BZ5 z?LZ7gECthTrZv;=8y*e8@+_F4|7m?Kma%w)shm!J?lZ!K>20#frIS{q7J1DF%!ywS z|48>#bHR0ZE+0`4PL323&_I9}nO6WtVCz0}AUBvQll}rSRcl=Cg`VaZi?QtzQ~2i4 z(B@avO(U$5DOUZ_Dm+yTd3f8|v8M2s%4Qx2N2>h@*ky`hYu!nG`wLP8sq&RgvyEtrn#8? zukR@6TWT)%{_}fBrZn%n|NP#pQ%Z>9KfmQVu=R8Q*LPzM4zh0{l>YtZ@)^w$?%!|l zkBb9#H8<1T8^O*JU7t#9Ikchwooz`EmVFT@niq|Q3HristIleoJ8K~>N{oFMt zv}@(2{#-z=9*lZ`hsoBvOrM|liFkW9Lw$-`FZ9gC%vK72yOmHWQ)2#}BGkyy3KHGUhJZK?HHOuq`m|G@vxI=Nn4tRSwZ4C@htdLqXiKC1 ze4uuEvF9WRW2$zmIy4T8n9$a0XUl$@Z?4hut(M=(?fIa+&xlhc zV_aX%Z#hqfbcHFDZ?(Q@e%^X$xkuGKU0b*4+28LitoX1@-u+AIjHd20*{ZXp%!uQ+ zck6Uc7lR|vfv?_o&^Kw`idGp&R}UXPyhiI&g~OEX+iwb9A!UP6)k9u9MD8?0i@auq zgl)|9r}*sA{8EGNGA2!_0pRa$d5u~7wy80vDTT#*f83YGzQ5dS0gr|=O3@=N#B>v{ zzoeJ3Z=2fo&9^3I3ms8Y309#+JrubM0C>f}b9w=vF*oPmUF1o5q^(AYT>qA|uNa_A zW-s5*_G)Iuxf?EB5anMAXKPCt`bzxo#nVZK7LejpKlz|YcoEP%x#vW3)vez`z=Xwk zoMkW=Q7Yh4&AP8!ENc~t#+O9vzFt2L9XcB!nKISb@MNS2mHStHbg@(OrC;9w%ysX< z1DPqNy%JV6f>Q2&b#2o1>l!k;80+qZvTjvWUjO}j^8qcaX1_CQepOGa z+g%Mj-4whc5+nMY9>1QnHbTAa;odFB`-z2h^p3Dx{ppI29P3k-*t&k)bCC7g(mhmy z`e69JfNf)3_p}~Uhj*&(cwoEf-z6JrG}zXkUpV;hYTS|B{&>-|786>9>lXLL%8Lqa z=D!t++8a6DwNh?057*%>%j=y_j>-0JdCoh z@d5KEwS=CEWzq^R%}uEAl&*ArT7NxNOpQV4i#QTwh7GwZIH7bmi?GhE-Iwokni>O2 zOE*B08IW#{Q)%bAza^Z{H>Fm%gDiQc2-K206HY|H;veIJuO^@hh|Aj3I3! zFq4weV$~bOXP8BdFgS3ft}jYDVc4qsGy;gul}d~K@Zsa;`{X)PjDWaFS408%!jl-c zO(!1j2a&4ca2I}S6eVeEf+DeuY}OAModw9}KsA-R3sBI-{hDfbH3HR1JDwCk=ir;R z4a*TYgqJEI0U78P{wA*XjcXH_Pj26mW}KB~@S~Vvu`u=5pR}1;3oIqT^ujMtB~=`e zI0Y`|(Fktn>GrYmK76=^J=!TziBqv`4t!YmPHavYb^YpGP;fGcMCltR>F?lG7Oy`! zMiHa+s`-FBJ60^`;lKp_m}b+ZTQ{+!*joF1m&j$9ymg|I3{IcU3+|45#Uu^oXcz8} z*1+&WsZ)ZZDrZX3Mn_)0&j{iNssa(n-_z-?IPv0P^Nig2gGG}_nK#bll~Zx*VCi^( zQTcXcj2y z+9-y!Z`mNxj5%8>L}UaHMw}r|FBKUrVgn0OziBqjFU%9~$j(l8X{t93Nfc}L5^w|S z63-7GoJ2ApR2M5E-*2@-v8iR5$(W=4nMo}-irPVr^M8k&DV|-#m;VM# zpQTD?Dlu1AMVMPSer`OF^5iGb+~*)|#f@pvk|nRwv=j=9X6{OnyG5pvQB4;!RdfHN zJ~GV!*|vCC^-gTv>aObII_>xC3-C0-|8+l9{3iXHRJg_Bc(H1~jz85$K>S)Wvj~0t z8!Q+ZMuAE3rCD`t6lv3%PluRIqW=QkOyfYeMYDQ(sg#xgbK)-WAGc`^mKY*OI_73U z8Yj~?TM7y3+d@k|1PHfC8>oL$pl>v^Vlh@dh@M|oYT{|0xPlKVPqbcU z%t###xR%T=LW~dqz9b_rpfZMtu&3X`zPwEc4x zux22^d_AeQ--GIMm!@TyI1fFj&LRoRw)e)r=G9gQh3mYQgWf5D+x`|WG56X!S%Xk` z44N^Ofg|G34r-t}Vnj%) zmfwSAm?_ET#(*=7N1=1%9U`_=9zw9Vut@h$${PU2$83Ti!m9XS3=gRS=+jiO!PZc; z5p(0Sna?cEZ`JsC9@esiY@v0mbWOR~WN>`iM|`S?dG};abM5Ys#_4^U&IHjhJ)|fm z9~=^}>Mb@4&X>Cz@**6@MQCzVd-gw1i zBf~Frr=JHJ1)10Sx$!c#mej=F{Mlts`hzKGkGt)m-A|CetF4jIb@yiat);(LoHAuH z$oR7BlR<#&3||eMq1*8GkhDG-Z@%3%f^G+9VEt*mhNba{up}2Gw7B1-wKHT0g|xk~ zM+LZ%1TAG1BkJ4_9C&V<#ff2bZ+TeeIC_Tz^5`#K@KOhUkUogW)WMOEL6X(X@=_=Y zTB^hC1;LSJ$v}C4yTPNAF3E5X8BYV0>E+hMVhbYAQ|5Rwifwe_W>m-0NQO>ZhNQhp z9>s!6hm>Ma-TW8gT4(_PLMz=6z2OEF{V2qK22dX8X3q8-J$kgXqK4$RZXW$(=;F+2 zd!aej_nxg2wU-d492MdE6OE3wOnOOyCOquzz06+|6hvTL>E8ibSG6nUM6<=HWTDIm zpA4sF=h#5gyp~ERh?=rstp}`rBm*5TUcC6tesPZ!rbmFn{MiacXiIvftLO85VGikI zngE=9qDZ?e>2U>;5p8vUO=-x0y}tM9 z1u+#q)bGJNB(7S=1Ndy0GmS^+$rLyAjx$vNCVGl7(CzJMQI0m!zc}?-SjGlP4&)Yc zPS+H+iP_bX9u9g$BcGVrNE3kHn-(u3Thze&3#5M@Y^)&JKX8Eb<oPoplkmW-i+XX%0u5<(Cd~xRrLMaZLJxwVTnk zN~et?W=zYYa))qJIu)gC=5|J~6a|Y5d8t81*v>Eel_gKR09!!{l)v#yJo`QQB zSZmsFyGkGg4Jg88QU>XMe)6;-t&4G>O;2(2YXu4ER_0@wSTP9a}PyW70i3tMsYWqnySX zb?Az}gER$!CnZbvyCiFqwIWW4QqBa0A9H-ZqdPnKBr5h^tdy8dLN#(cN4||eJfhhR zZ{lrf&DCGBWJ$3n@~(&vzsFscREtm4@O^pYWlM07ERzs6*4%gvRr^kG#?r8C? z0^AvK;{nB*XLGLJB-9_9J`i3~gbmgEK-IE(V=ET^2Wojy))8q2&mStR8w1XPJZmZGK-N*Ct@{YUJQ8yfO3q(tl zY0*}3zD06Xu?nsA(UB{DEkjAPQvFjhqjT;0I!q)4Z_!F8;LjXtznz&S^pl$h*cfp{ zi_2~*mErfka?ikdBhJ4%r-8-ER?0V!>Hz^It(#)`BvJtZW=XkG;QHSw+*VWi2+sE$ zgl%e(5fssoOHb-hJXjB&2ev9W0qK30I+Egzy+*D4oz&2@xQ{SoM9cFK^FJRH1e(Mu z0B&{BeZRfY`7;k(R?V3ObopDh(&vTxk1;|r%Lg7t|4)^##QlR%97ujRw=k0v<5Icr z_$?3oRu-X2lV-l#-Lz)OxT7m{%y>kMiYCRK%L8lvT0V&_17Z-uN{W0!6hgU~4AX~f zD%bYr!J5K%r5UFok#7~f;q*u&(cUx6_Q)B@H-$69oFpZj3S>)>FJt1}eSJ-fYst5W z*qX=Ry`wxl=0>F3yv;=6x~T=p{E#Y107=|yk2Y)+aL{d>LL!JUw?MC4H!Zv|oX`z338^z7m)uRXd z9({k+c)Q}4ws|9Ub)%n5|3`+G6@5TDBM9ZJr8_(HJcR;VLnVxY*`q)S26BZbPW`@f zrzCd6_lu4sWfX+T(3%F`SwqJjE7yVc9*sAI(<}0%9)Qi(OAeX0-8Sy|cX zxLrFBb4!J5c}r#XVLpaGJI=4VocrW(d{yi9%s((>y-Op8xm_>M-8{T3solpP4;nDw zOzI_$LOl?{OpEIaOZxz3)kJY~Tj0jplO#Fqmlg^t^_N}i)l_@}|JC)f+oGDn{{8!N z1d^;2)1{Ib{juuS9P z(%fKDa0jQkt(WZdi)!M})EdI%?GG_V&o!%wV!zb(14_7TlT~Qw=f*QD`HAucTR4)5 zUf~fl-jj_j^}i4DQTrGGiVp0*W`*c2T-d?C`OcPEt#O$^jmK&Fc6}!Ut?`>OuO+^A zr1hlOwgYAUe8<*=X^g?{QvjH_v6=SSEQ4;LW}!FKpu|szqS9lb@xro z|Lf?{V%yoI!E(q|_aT*5wM&qeHin8_*DmB>3P8$0x*MoBowRy*(&vhbjV-yxJwJG` zx$%rx@D^vFPtLS1rkEya%JKWPkn-r6`Ex5rk#!V#BDuS+okPlc+^q=l$$b`|Q@<9B z%esghH)Ry6;54H7qOz+p#g}qv9`3&*-Wjz%sM^s6HHBdm?m)Z2Sr($vSQWQ-!Q#b> z4fsmFF_?U~^GeueO zo$G|mL8C`sP7N3HN2x23(A<)vRw+5T(%FtB%K`o3=?ntsN3D0@Plk4J*GrS12yRX1 zeyT|CaQ*!zUVLhsJ{0OoIVdBlfV>_8pG7sEZ5a@&P&iEmGi5+)_kcWMPjnlK=Fw`E z!og`vfL|F;VzZ9JNW#uI<_J)bOeYAZh{r|o2WWiJCLb$skTjSnf#^U=F2DQYi!X#I z6>$J5#AdA|3@Y*0EVJ#Sh;h+q;j3zI3PH9{{n>nb`)eteyu7@KJMK7%VoWeuOn^8I zD0CJxE?g*P6b=y|Eo@zYxtYz5Dq}t3NNyHW5-$4r=Rc%YML8%1HtX(QV&Y?JLMgjO z=U>MOcN}~M9+@XoH`5!NildXri-e+<&R80IWFnYH+GdSDbSPVUjs8gC)V=i{2Q3Q= z772np^Wpst`TB+szv0y#NBs*^HwF=`1#K0aqjHqs&Umb9AC*=s^=#WiU>YUKwUVTR znOcU>X5NF*K;h2Zldy~y6rgrjWLd;4r9ieBsbu}?4^h_iICsQ9M0_W+naq;E?}eS_d)!m?kwBl;B!d>~FU>xk_9wlz(zvqx z?~lNC9_QKcLi?LzhHLzkY17uzNTTdD`i~c)o=X_2RA^VHgTyoDEP8dr^P(P zl+ALtq%@>^$vUJbqC!z2f}l?hFZ@VRJG|vQ?<-bWOLFj6LYn>#=siVlmEt7h~AK5+=c(VYnEkBufE#e_WY@%L7MBQ}V z_B@{(X9F4V&T^CyxwJalS~MhIw^qs4$+`U5=bvvr+JEfW)Dl3#<}bfJNWMrnW~%5N zDd_j0=8xZg`z_lil*k1dc6Fz61d#N;6<6KNMAB!ST9!;9yZ1FTRkB zkYF54xX33lg&9=s34kZwEz&yyOb|5sLdiL#JOSy?4oPNy@4;e9wsl&BCi1>Y4OA}T zog(5h?w3eB&Y14mPlU!ER>snyV445&3x#WUsQ|Gkx$$-@43{*mcs6tF4k;iV;xZsa zig0!$d4A-nm`mX@GmXV0HaqT~p05snp9A(RV1dm06|)FZI(D;=)#8W8V!Hk8W5tOj zEh~(iYoHiY=u{<)@h~Am&f#Dm)$+DHtW#Momj4F)mRqaPSF|jk)^G6t|4$?hP5riQ z>AS|JA%63Rap@h#B>dB3_1$M3_LW}l9A`OX*20YX9g_xET2y^;e7F0;R{@C!^;dtf zbYaGQ!#jrSdc^+GGxX>Rv$FF2Iiq$ZWR<^rkhL+PYO|Y7!S?9#^7<t+9c>U#%09#Ff65j1Yv-gRrVnU#whDTzt%hb^nWs zAosw~vf-^C?BD;lo)r2oW!?JC{Zsyb@x3mSfUTuB;j)WZKV=VWO_8j=*tDg4>&>vb zL65{b;&(;}O>$XF{&gAFT=l=&@cwVVb1z?)u|9pIT9Ek(gxS;ha14Qnq?#lnU5F>W zxN1{e58-qRv>SkX9V!1K>e5ppqaz_9HRLeVCuqTiY!sV5NDYrL^`?JwLdy{hPB`*- zec~GIROt6bnr?WNIEJs&I?{OMkx!SLyPL~&YKT7Qwx95-9Q=l z2HoLZ#==0zC?rjvuq8w9B_!CsI!Y81a7a$=sNTM@tX7ffeyt ziU5|<;59tTwZses?U|lXj$GvIaR4hAW=Up6Jizg8DS3M0772XQ&@he#Eh758eA>*W zxZaR+9Pu6BDD5EXKLD@Ce9)AAEvyCn7d=G6q8>B&R-qVlMVxksQGp^7zhZu9#p%$u zO(QlpRK$KM<7&wL$A~w}@UvYMxwFTPZ#(fjFwC*S+!43#*tm&dAOr?$KB0Q^)@c%z zaKWF>xSBgQDy%tiQhbbm7DQ6MCQcTv2hdbZC6$qRP-=^nTWDk+$dDd5nAlxLY)BMo zJJ{kiiyrrMjiVeEAiqc^7qET(bX>c|RQBydA$TWB0|2%amB&_5%8O7IL4JA6J%u88 z8plzWl@R4=RBICgXMN0 zc7F6-9XHrg<`YPJ|9A7Q@t;Jt_f09kNdiGdun=A2@MT*FtHO$~e)=3N2iIPV+_i4y zNaoXp!3`F}OxWM)9!T5mIRv;={YyKY?V!l*q`Z?-WC*FyDQP@twKLqff8OcSr^S@A zBJL$%O9{B%%E@$&YKY%dDn_Hgv-h(7low;!8M<#F#phe7?Qq+9+o}o@#W}Li%>rr- zDP}as{>T$HVmX}YZsPFwb4Optf}VmcJ*Sj@FkSAFh-_0K8IB)up(#QpMyxO*Q^U6H z1kWiT^%98F4LIVtcR9z}dyvS`%xxrk7_z6x`^v|c^@G%RK>p^Wr^AiaIYPt)^v$ZEM7tlr#JXv`qU{rRs1M+)gk2#gKFq-j~ zRwq-`$D2ClcZFQlu;U7>wvC5x5Kl<@xbC#xi2nVY-yT47ae>9?5u(GE5SJ43y3os) z-@ZIFZwQBixdlrvGi6MYc3$fCozYE=%OrtMscfq_7JGmFRl{uKk5Nj@VAlA4 z1V@Wl{*jm6$F5RpS<+t_;;d%@{56;2@WGob`cnjEK&|}G$o<$lNpr7on2ehf`>?CX z3n=%V=&PWsNRa{Nw>h1&Wa<$)qsvX7DVJ!vlIbtMENY|3?ZLy)hqbLKfl-mk;rqI_ zQOwuCXKJbl;=w>Y3j1{1GcZZdi)CU58j0X3apX1Zg@s z?M9!JgHJJ$en%1-0ol-cm*j||e~o^ZA$XaxsAdYR42{q+YiG6XZ~D$&;YyQLKoF6aKp=^QtZ5pjKzoM`}&wuFv0SzM$H zib$oWcKhPXK2-g2 zeVr@TTc@582bRhKWOOC(jzvs5Qu3uc6m7;_aNI3WZvG+LtYTuXQhD(-*jS{;RTkm2guNh zG4tqb6$Q#p|27lb{1iIIH8Yt7_ZIjdf9rM{N0~#ruBJrN7|z(IL{}j&1y2E*m&|~e zukSy*C(C6X4DEcLyNpu`NOguwBYe*kmWGpAHVd-tvkq@w&|g0vI7(`JQK`d6UJbwN zc?eEH!noQh*Z_Z4Mj(fXS{|K+vZ-P_J`!S|efnWeJXFmY-n|4-xLSzto#Qa1^&WL_ zEZ|RUd;<;oMtt0n37;q?uGq|Fz^V;RuM%#JG$z!PW&jHi5(#kIn;NzN7Yzmhz^IlP zAZNe|B_$HcIu9cv&u#p%h3_bDuQ26HrZ-`LlyuXy^39tCfZ{B*GZHR|&+Bt;Hq3eF zWM&j;KSySdCF=Ix_DHFIIx$ivuESc{Xm^Kag8R+3-2f0JoT%hUGSUN4lL(;XUNRBo z@CZ{Aq0>0SWq>FU-@WJK)~;D&P9iBDn?n1^931WeVtNBU6d2GMG#~<%(DS$ey2oZ? z@ZCfbJ85`|{aeK>|7wOK;a`f-6J(DN*XKHOI%cqy zAo6zfNrHiud;npoluSTs18Yd; z+uSc(i|w=ljTI_1 zhdk!2e(StDEG(>A8Z*fWQdU$qlHsP200!YYUZlmGEpuliV&};<>@Uc``4o%V_wRef z4PAQq)(RI%Q5CsGv~?UTsXxuYf~B+xu1~yk=OO@L4zbNRvgYEYJ-=cOUPNM}P3srkKomsvEK`i;C;&zm_{mJD@#uLI$Fe`krh zdGamWo6NZCQ4qwV^I89iBG+Ez3f@ij)22>cdLgZ;e9yb$$M=Js-vFbO&?k9;mY{&x zVq=;luYxJrb9OVjNJff9mU-~t)_4j&<6ng&?&(Nhod%NQcb7j?v_HwBJ_iyAmvjiu zXL{rhgIAlGt;cd*05KDX=bx-uz4~Ri8_7-(0$#z4fEZS%Uxm`!it31=U~`m~pTQZp ztgc8P?yry738sB-<)wyscgfZ^IS$C-|2o>}f`&^$5U54RkhE|p*Sc)EKqu~P)1j-< zd(Y=B;zjP+4KbKY;i1Hr;DDt^tV2J|wfC{%lr~2>P{3(MJB_}zPlo0I56k*IsaT#p#>)th+Ab=8V&k`^#)cAiioblFJV|f|RsK z6f@!qT>EjX3q}+|G{~q4?$gnTIOaZ}-~ju(x*?7ck8?!}B~!Bv=9nQ(dpt7EC+Tx8 z!;h!pJxYvDSsb*8P_sx> zkG56yd@6BEx8GFIs7L=U8`2-)OXh~C3GiMtQ*9`?yksc!`p6shoM;@fmScV(N{cPg zoOt`vCo4Q7c{Tr%zs^9-^p4wr3?TTJLea$x?M?*Dxmo%T#kxx`EEcVm9Aj7|m)>lo zT$PgG%IdoZopcJ+>6+pg)5nm>-N(nW_$caM=}*-3sj9A)kyvA49<|ezNXL%HGPH}> zT%bCg{I##kht0wZ*qeGI7?ArQ-Yc9ym5Zj+fF#PU!PvU&X2Re@JS`Y8>YG<5d9D;3 zRGl|Jz3@^#4~oIjdqdZ{+++@kc${>|qYqyZ6D9p=oYZyq4xrZt8J>wd$la92_q4K; z5Oo&YHXaH@r?l@%S@C3VJ^gbcrtlucE?b0VVNOUkOIfDRn$70QZo2FzH#tFWQduPO zH=gM0hyqqwYII}u2>Ki}&$~(aB#uK6DQ<=P@hkM~KXhn<)km?qqTv`>;Hwvun7VRo zV4bhbz}-Tl#+Us@m_VOMw%@B=JS|+8xaU ztwyhuW&MF+Q;%3%VoNa7`ayjn3!>-t;m^yBt0YnYWs8^_tiAogcP;{iPze7uA+)TPHG7*%^UP zm<_QG51&q%Ki?3q5}WM#Iyi|wuLerLylu`HZ%?Z9`;B#U-4mg&^z4Vegh;z9eKTCd zuA8G+N(F+4`uFn^$r3q3;)>pZ>Jp|CJ3j6$s!Y)X4?WTpZFiHqaTVIZTW$LB&GVG3 z^vF$O@KF=3wC%ncr1`Y3*cYI%H6Kz#^myPibK|8YZ$Y{x7yS#lgZCTr48&6#q~5=PNk7w?~*VleD>p&;aG~Nz^G*PN%9Ieu$rpdRuN+(DM_c51*7b+ z5fk~ap7%;Zo=isL?vq`Ojk?BxU>{<@Q5)+CFGeh3x0XJe-4XDgr@b)_587hV1p9od zXty!YgR$`yCR8QT&q%g*Y;in46?RGQ61m7D8HC!V?WgNVW_Iq6Qqn`$)5sP@lNEcN zldc%xXN#c|z+ID>V}emAB= z&(XI6+ujm;f7VI1^#>ZyM@FS3=)~)K^}HPC++NY8gFNitWG)05%hjHs&`E$=&jRgM z({YJJJ2JnP%B*7u(EF+E5=2yYM;34fXLNiot1K zZtN^`-x|i@eg4G3Ia_W#6b+j}D_ki6n`kl=?JI)Sgz67`&f{t)E2fd(L0Mf6Tmmgb z&25NiZOi&O>egw6AIr$oKFh61kGWfS%d)^t$XpNl0!qH2giA+;T z1~bk}x=dRO2x78iL-`4N;E~M1I@DG@@X_%O;4@4_CFCP#COFmM;l_sD_DtoDzGq28 zY>y`8PaGLf2_mymUOEhiWI!hB7HBB_@H%g9brz|M40ORDP3qAcr>cEikC{WXEy7o5 zq5|^W&*~EhOe$qDa(`ZpHu4dz*{87T&?^Z)=#}(&z(t~aR`JF+yEmFOJ#^VkRByN8t-h2&PyJ+*uN&MxN3Nn-XJI))%Yr* zvU$#{&iOF#sbgR5B^(YR+(+{10Z1!>R?J4Rc9KjW%gLyJD_KfjW=G z2!F|j4nL|V9UXUHA5ej>7)=>JDEXac*=s74#exoibV(G7)p_IV(x@44RtUzbXL`Px zXikXUKFbXdR+RBd;{$O4prqlC@L~>1zx9PfnP|q7{J=_b;UXEUKuZPC73y9skLPxLi~Y%9~&$ekO*7 zhBYPZi&%@w_v`G(q#JKCvNltpdfhonI+vIC+jILA9u8?0mm+uz_3GzuI32B>k|$#~ z0Vul3!5R3SdGRln^*X*O6)1%x_`yh7YIL-1?4gWGR#B4Jym@hZ=NwhqG!yJrR_(LS z^vxlt1-{!(A#^j*jzO1Vi6hz7&TuM^-myKlj11M?Wag_Al0IrLTcZB=-+zBaEthol zQC$RU)SNK8vyO*#IPVSMoLnt3hqmw-{MD^%isiS&W6eLcYTWXHFUs0fq@LjUtz$fe*ucJICtM|Y14)%8v3ZsFIY`$P86 zf?Q*?*CVd$;DuA98)`+CC>OBbj@2+2Q>FQn$-6>x__dffGIfGd|CxR%P_bnX<}G>W z{hmk7p{lBSzu+o;^rU1*0}W*iXso9|E_fqkge1QGTEoVn_v92sOcmk$#6wOV@t`QG z1rG=kv}ZGDbXXtE=ng%%1ALv2xl*7ZJhDGewPzeC#*g)D=b4YTFYxa211`$B4(zmX zDhQDm03hUyEaP>;`i!xOMwU9BlQCU?7*x8Xr7N-k4d>+ZC-ySoN%ZgaG5{sp_#4HE zzs?dwq3)M!Jxkc`Fn3$k!SHw9g54U=LIvC5-6;@Q!O&AQjd%R;J?K;?&_+Ge+KAy0 zJ$&l0o;Jj(KG9B?%jotj=tp*f)@1f$Ybrmayz6&vRxe8$AL{$dh&63uUjOaTr<)?n zA7paV`(=NnOBlnG)+_-y5izl!x0|v=668kdH+hvpvsWCGKN4&w%-ET7%H~k!s`Zva zA_SzbDA3wg43mEG3r;*3z0wGOqyjrfX*w%z(8Lg1BCv%(%(lbm|J^xiELy+zp&(*e zSJobIcYhhT9yoOdO0xX?{54t=1`N10@`r#CY15`phrwtZ9aV#z#Qt?sJ5>F|G__Ua z(vnXfeNZ&Gw65H`|8$qR1k4r8<*l9UWl9rEMYIra(U_!yI1n1peW)=!pft*e}96i2k#k6TTzq_B=yLYd*Njz-J z``W19(m=u9za06)GVGW_UDT?2Y#Fov;{89prkyIuxjeRN?YbXKx0i4f$5AxDex^OP zYLoNaewKYTt#4ICkY7m8g8k!Fx7Ot;3C>)HnzE$&+S)tu@$qJ%rMmA523EEGNgT`~ zi$~>=1a?aDGJWvjSj^eFWh-5Pb+>ss?tY=`RTtLi`%Vwd3kmhuC`*E%efp}GaSe6M z^6l|#o<^weNkDAJdmR;4$D{T=_}DdbBC_9EX_^G|S!s9g?(6K85OwLU#i2_&-kxQa z5f4iG*FtUVMb1`w^M?_gPv9u^mKr6~`FQ`4e=D7@jFJ}BR|#st-~ajJ&79R-j_a@e zM=%-k`Gb1D-rnbrXC%dtF=q9J`<}m!co)9O9VO?%X-=pq=$WG=jz4-bhYSmt-*2Lf+&a0v7S+7Nk0x4&?C;MNs56UsvF+TGMm7R%0puaK4gUE zJ>vOn4ikhzQ1hiq8QXe z6bnDvJkHD6=b3S9tj+1Bd15oaHxNyVu^n7XT$vUCk65lpuIDNF{Jm&@p0XC{GTi5~ zZuwwRp^mFa?KTzI{L1k+W)zs!moEo%AItQPJuaR7WgCi#XCi^H+`?hFa};q&a0#MT zp9p(~7S<%0J?ryl;8GA9| z6AFHIA;B{EYy9knSb~B2mt81*{yH&prQCw+#$JwJE+z36EbX~PI^9OkHLLC?j64WP zmU%q=8L~}o2!Xe}urL7jf?qNSTsW|P?b_^s4Ce53+B`6vq#~udc}RHO8kkv_Pk306`^zu&Aq@~a@Z*uE$e2dsgWqo z$m?I8IwzN>_~f|PU}vx;0fpkV|H<}}d3->;K(+FFaa(Voq{(4VGxtAkVMq%=-B zo`S~xK6&S}7ZwMReHNr@L5iKEu`bpuf$UBi zHc~Y87)_(|%PBZxf18gKS-X^rnuLokiym=_fU`wc*|xVwnvpyCSApE!EBN@c8ABGI ze=^M5)2z6b2Qnv}m~n>Fv}YXgGtQQ`&NT#L8@pH8%zyZB{;$LnzQTT^cB-qd?;RJu z(&d>RYd1p)`3Wxx`rkXw%ltAaOfU%b3FT&x(ISC%4vvnRA115;k3l6G+A|OAq`2yH zg=@W({==6SY%U>?q*JWt{#DJEVk&&yGBVR3Pz6ohD-NbspQJLZkLsdx z?ujLhAFAf1A6x)q8*MPF$9OfY z?V*mUQ#jb~EaGn8?%C<{%>gJvd&QmS?!4V*HpaKJez4I}DrNPHBrG1@)YJiMdy4m|Jv67k2HdlCeN&%~}9(r?oxaA$7>HPRPt$MkO;g z${vyc^~aT-osn~rjrS-JsdwPb82d;c1S2{1DVIjM@>a$TX`%#gC<{djT@sdQWH9QMKFo+B9S_; zFq-Qt2V&AA`p{!Nsep9A)MU_Om6kXYdj8{lRhPNqDG*FOFe;3sMFxg1rCw%C(t)Sm>(5>hU&ok(bAUpid+9rBGY_=e& z*2t3k$MF(-9e_jZ+_dO=EXsn~Nf+fU%*fRZ84D>D!=Il{*7=2{B&;HjKip^k?%iU7 zW}O`!{fJjFGjQgVxXNfdF_ zhb%btyLilS2B5H#5j#S#3o=1*cQkmF)JXdeM>2R#6}RWZ)SI>&;kQH~VDwV(YZl^L zfP3%VIY?xYKowPJ!DNYby{Yu2Cu#r}P?iXE?r0d#Nh703tD|nOd<2xJq_B;c5Bre* z>q`Ay1EhO=;7s>BRn-#IxcvdrPAH?Jx<=#Rz%5L?i(G`S_7O5n4>n&!cPlo4JcvOX zu`=Ss=y!umZ$=3tgH|eLWDVDnPJ--Pc$HP@UTDzWbM+|4LiE{oRRZ{MUKZ$D%YU8yOeN$@GiRP@a-4R7LOh=VuUmt(`Pl;cr7%X_UrbF_C3gV zV|Ktco~8{|t06AyXZ%os(q3KpK=Xbw-EeDq7@Ya}YYE)u`_j?D7@{)T-F-xv`v)%K zjks_+(}h`CQ}ELixh_S#Biz}z?H?Srcsw5^03}Dg)-V?~RW(slYZxPxtfJs7U(q#T zwWX&73%zznMFa#wZD+V$;c6pcb2tr^~}xp#j2l*`a_hv*Z&qMOd1o3{+2<1 zue>B$`3Rnc%8^h63hx10?Jy>PVrLBCI}XpgU20=xig4dIKXAamOeAh!rSX%*B{H&$ zTw%<{e^y?4#cA^x$I0vof8ElXjq@vwE>=q>n3|f34hX!{YKbxt63q@K`dM3}Uy{#) zV=ZRsZC)bnfTBAf+q2^rT}vv4_f=$_*_3lr0s!5ZeU7HDymXeV+W&`M_3%C53e(y+Cw&{ANh`irDYc$UmjO2E<~Jx_{z32Cf)4mf`qf*i&51#nSU z(B6sE1x9Px&6_=!qOlRb4N>gzJ^^6Jdg+2VNl_vSJhTRp7S}1Y#5o@?p`}9i%__4X5&23*9}4{xe)Ed-2qB4NTRA?E`SqBW2ttyK zy9&UNdb`E#<;D#9S&T&l{*^XbK4jJ5AR?C-2*6SbI|5St^ruA;52_ogG(KaPTzXWDZN1xfUgKm%7x3~6x6XX~d)sDh|n&ky|Td$K{5$SnfOyD7O> zsjo_qYNWDeS8km{f^#pY$2>*Fd@2}yGWtZ%sM#BOfR6#_Ta1yC);|AjW#vNfan_AE zUUO%paUxk%O3(}NrrFkWIGxGShjAljarYPU&J;{$=@Cs5*`!rfNiWa?BMcU2sln{o z33E0RqS`ZM2^<}Dn`R03y+8V83XwO3)`(nXOd(Qn;GJ$NzNZX=Qtg1oS&B_aMu#5i!Cza`C!wX@z zmJRJDydylm7~e?DU=PT+!?LRz%~4vAz?hBsVMMg4^5LCDzc%~fc|22O#n(ix^wYT|i07(+D@HG^G7AkoC*chO)0704 zlX7MHQ+2kDw=8|q#`Tm&GDXggXKhSw;;%vQkmroPO^@tEJOaz~KU1A z4tGQ<^fEtxAO{wf#@B5dxbH(4+l7FN1$rbbY}?HrS6wx zX@9n-0a&i!rxb#($_^q2evF<&sfZgmP%Ld48ykgfA>$&;D=4eXZ6gwN^fVkSOGVIy zoF@y6l~VsBnGKB|hL^$y>O-H#PoDk?i8YxTeC9my48OR$+pyxYgOLJWF$`Q=p2Ql#^dgY&BFD3uVfqvF0N^e%l>sxFnkK2d8{#vNIli37()-ip z>#v_zZtt!L>rO~j#0&#rlR3z*v%>W#+zh~X@#zaBXR0Xp3dx3saE4;TC(1|U?28%u z#b!&rwsD#NR#|ik?R$7Vo#b&B;jeKH`(XJoXD{h{^5D)lIB$;$p@>{$J_hchKh56u zkha6Jjfy8bsJaz@o$lS+Cq$KOTGowNF+(&tZgt`&xsK0zQsKl1&1l|w0`hXW} z`fvt+9jvbHtFuk5ehE~r7`v}g|7=usPcFJkdT%Ugqdc9@lctMAiQ7sJ{i9%kmnJDb zX(O`CZw)lZY3KYAm;})G?cKoVb<<`yn?E}2KGuutS$O6%J>NEpiN74wi>0- zzC%nmxoNI*rgcn8Z+@$f|NSRdWO=QZ&!$_ce5UzdJz4K?^J`w<y>*jd<{zm9pmj`9|rc*tc_7vx{)_w80q<&)L^7~1;!4x?NC8DmFftWfAxCRaGA z$aT2}+?~aBam`kdGX4cK37>|oockgG?ybr#nmD&~WAioe%a_8MKVbV2N00xaJ#)s4 zaP!KJiY}iDAx!`bLZVxs`lX)lFTYv<-S7VMPjV1N?pLfuS$RzFJszUXd(EFOu=EM6 ze54f3HvUv`U7scqITLa6E7WW!udorZAuD)`@)aTt9K$j+>iJ($vbYsLFJwALto5h{HsSSUY@AqYf|!u9LI)!2TFS(ycHBhF*|5m8eW(L%dLpT%SdVbu}Qh4Svn;3?o@ zEqEXw8?t_yizi~YQ~c=kF7nRr_Uve9&L`E-BOS=_Z8!hbF(xkk$R5KU#Z{^y*xWMDISVL1vY2>UGgPBAZUja}+1 zC?u<8kurcBnE|Cirt`2Aye3@({@k}u#8P%t=#e37wxVdhjnNN!leeYmDBGV;{*GJq zr7if77*E4P*qh#eYsOAA7agp$k1|oM%);wghb|_w!Ic#|E0bR(D1Rfcbfc}=)@Hie zxb@!^t83J>#@y)HvfI29l>fzv^08U(Ef{Hr}=C=O!qSk)_&z?HgtNDG}pKJbCj38nP z*JW8xny-8QL*q@h6X@q4&$=Zagk>Bb@$6L}vL8}|(PHJ3yg$GXh8YWM3tNlGb*VkxAP8ySY?0Q z%cbmk-p?O=%ShziJQHhVnd< zUz+%xEq2g&dasSPd~)%-l(sB3y&=Hq&?OU$AXQ*UzCkj zD2D5k?jjP3i0lwE7?n&A-7|8IS)D}l5lHS*HoWbiw^8sZkF%#L9@L;0E@>_z6^iak z4=dWy`2&sHdh>MrfNviv-*BC@9BDgTlzpSPxQuZHGO{?5E$#XR*uWehEnB%r87;=; z@NZsD3q>jt*S@@5m#s>~{ofnV{145Opc()D%XPXS9RK~xb$X@!mNu@D7?A{_zvL4r zautH_ARFO=vM{?{LTXbsJnvd}@Kfk7Z#TqbGnsX2dG4&$AA9y76s?>SU+eO$4tAzX z=gu}Mm%g96`z(?@exc$zT@jqba=0s8Q`}L-QjZVXjs` znO`)gYu8TmwBP(b0nwtjo7nlve|ZlX(nqvl9OzlL=i%CkY1VpEcfGw{2C8A{MY-lI}SCE9S`O9^gLu6#@OSzUv?E$-!xcztwf8G3}J3SBm-+jl{*Hrvp zT-pE2zb8GoJ-qPjppnC!j*Bfhix84!EjdsndNOn;X_T;(wv1UAI2gnoVqcKiUw{2o z*~~&UfUkH2b{#KEN=nKue-Q=ZC;nXG1B_K#7e=Eo+RM-1{~U;-2+ffy=;KK#bp+!P zq;o&pxMt&XFhlhQU1ma@mqz@6fVEA!rq?zsm0zPgW9&k(GUIP~1Aqs40~8gT>XZUv zrrjG*mV`F>hR_TUdH>V-3etBAGUE)F9HR)^8GxWd1T@zFfX}C-;^bkXYy$R)j4b_V z1P%jqtfX_v67R^iWOxgI#0w`i+Jr0(^@lYVJfhKOivnO0e7gut8Wo!hQ`_~sroW$1 zvFn`ewo;Xm>-X+{();Y7BRvj12^naxYVyDu``3RCy!~X~s7Fs$?KygO$xk2u{i%JI zTU$GiGdXSRt`@CZxx@F91EUK|b!>c+3_l#W`h8Wxc@O7;QpdIBKWE+8)*V&k&$DMU zQLNX7jT;wSS@-Dtw%!H?29(b2($dmSowhJBGU@|q>U!tSo%7qgX@g2%|12XzGpOMH zVbveC)W`}Bh(>WINc!rAcT|Y8*P9GiQ|rbDIk;L-`%br=@njOUvEd+?*Px zPdT@s%P0KeQk%o{k}R!S)2Fbbs-~u9NyJ*Ws!p9cS=rjYys@v%nlz^k8-`LVF1NJo zv~1b3w5YEy7W{c<{*%(#$v8&DvFJh@$3uNA?-lD%Q`!>;V0E~few>K4IAnSUxR_qI zT0!O4g+|Gr$jcXiJ+~eB*cW7L1v!m|eQ6KQpMCD`?%ECYRL%dSq;v+W+8Gw6?yYZ) zFtaDM&TVv2x#i4ZYWgr{UemLma22W)s9$~cl`CsHzbGg3>C;oNG3zp~4MSplyL)lvw?l_^ zASflAW6IE5?S^e@*RFNgy7dKz+1ZV=Ug^xLnLdC1S8RPNfNexPZH)?(QRET;re=gyYZQ{ zl-AL{(&fh=1JWz=-^If)GW;|AGLR$37-LhrNP z_))+hDSie^A8;QPl|>ioNb!b%5-vo1GS&B#J2KS6lp%vHEiKQbIZ3w~S$#fG*Qt36 z>ZpKB?~@DW{OLAz^+=FOmkR|3=a+n^b>tuv*?BBc0#OoM>aGH6_3OO};K5xKazZqn zOTPhpagT9g?yREEbabaKaEJDZK2nZo?<Ka0CoE|*) z4xb!q2Gjcnq-7_)x=dYw)ao#t@?gr1g;xvsKo|D*R-YTbrQGM0o<5{Sh2c%mXRdOJ zmcYdxaYv~L|8$!&<)AsOf3xy#!S_{u7!TU^?&*z0n&UeDrB=?0bEjy9L)RZ%(Xg6b zbr|R2!RaBU-JK1SXU(0vSkPXm{iT<#T^s(;F6`R1#X?DLz}|Vr+_~MH*C#`G9EG{+ z;zv?+-myQY{X)T?9ky*#J#ys8ClO`r+2qGgEDw|WJ^S<-s@3hl-wY6ab@iw&zI3ma z>^?Q<9#qa@$e}^JziNesDJAq8D9f49aIaDhnvktsrr7mEmA#vl^GooylaqG1v%!Y& z-uKdPgaw8pzZq?rrQrv9`blsdXT@NS2KN_B-Am=Rp)*zGyjtK_5DCO4YJ11V586tK8T<#)p%Kt&zd&l*>|NsB? zUKth2C@M3hL^drE%3c*E6heto#>r@jl+i1*a5Ri$YglQJgd!s&DqG6v2*2z7bjIA%gUJ zDIXXmYbwbRK(HGOoYHS#cx>3w!U0s#rqS&AOnXJ$H(BLp+a9hp?jU^lHx^(*k_V$= zxZ=Z1S?OGaLCyw!xgqtj){G<%PUbt)w7u)^a~Cdjryr|UP}!5;l5ZQ54Id&R;2U_b zZ0eKi`~8(v7#R@q3hQF+=;&B5j*pdDnRRK4boR~5ohmivR=&(?HJ7xw-j5D5zz@s+lw+UF_r-Y%f{-rBMq_v9EggqW7Cng8y{1pR zZ=CBg&(PSg-@qSLRo#I5^?A_$DNMeudU{r61G2#=SZ3o~C(U~NA$_0daY&qs+!U9b zNc)8-wAOr%kG@XZS^{a?w1HrY@Gwv!JXD9v)2 zr{x{#Z?!DD$D@9Ht)aEu61?xOLEeTGKGw3u=3Q(Wr65c32Pk|wC#NYL3!}0`XHY9^ zaRalFUO2brck1vh+xPAbrG^|sO%OJ?Im17Ei*6FFXNZwko;=aSuNi;uw-bqjD=6j4 zu?3ybEZLv2jY8j0Ow%S7_%pU$y}>}F+aV7ZKtcV={rlfG&u~-;8$v%S21~ljXhBPa ze)au3^yt-V^S;F2rIcwb37QO`qYPSI{a}BFwT(?W32F#&`)rT{xamlF5}y0g{rhSp zRM${TYO*?+4oegDJ~aRV$N?!$Ru77tTTl(^v79TmubfIw_?^nsCu)#$<&j1wbX>J+RbE(tm7$MfU;VX|#V=jGS{umF75WfOymr%@cGfJVJ5+CA=wu*> zb7;c>ATVUIXP@|($h^~s?K_?Ve9xKZT7Yw|Hz0J!%yUa>GVjB%zDtPr{55rxN+ix# z)c;gn<#BV*Ks5@_*TKVms(d}cU!yc#@taEG)Cx15GMEgU)Tff1lA_YOweyK$jX^70 z$fdBC#96aX6wfzbwoLB64kO}Jl{?3fMo{8N=?~x6H*={>4QIGEl&55j zw!-1tTHGScE3q*0;Ga9Q?$NAb&&}cU7HJJrJYTn@)U*znfzDLcj%29z>ej6xCFhNQ z{;7-ka=GX9C*ww8f@AtR5a)*YENuIob$~YT)C-=mdF~_h2`<@~PGCHAz8xZ3RsrsA z#^{CoO7D?W!rtCOOH2LB12bn|oTs#rK9%bX8uVChMxY7J|5E*qzH9M6e2B9AE=tRf zA3t6KOgE;(jCyH5zqZHKpYVKb=bv`!yQp|caEGhtcRnkH#qCRN1*&RuiA!QS3`{C% zZ#3Vr@?%SsJk3ttzENGxNBhjaQU@eWN37((NDQP}iM zg6p(pz_d9>I9DSMOVBZh-Jl9~k+R9-4sf>HBag^us^osff|~p0|%0LpFb8 zQoJe?Fz?ili)=7Q$Z$(mtT4&4Z?EOk7gJq%YDi?9UZYfwTfyGd69n&V4KU0jzy&V- zSTuO{fRNXK3FcD)5=ox}0{}Jy!00P+!~AAW#O#dUMiyO%K`T4;w6w6OC(Z6tuKA6G zHGM+kI*pB*#54)?mgk2OPeSUbLu*G>;UCbX6syA1WKg7BU&3hB11A+kAk#2sc2D9( z8AA;5`Vzv_VN@nR(Ku_&pGYMst@o6Zmi&+F?GC5Vb5mj5m@4$j%Ca!-DJax?@l%=; zA;;g=pq9Sw;>Cd&EIp(8lF>AnzHeXsU%zMO)7RH_&c6#-(w6{9=i7@T(Rx!)ItOmu zU?mKyru4F|n39VFA)?w10(W+Mae0Hxw5d@PAtEgE5gd{GI8%9BNJuf-tOo3Xy*c7W_xq$_pyihq%54N_}R-VRCKi# zIVSZTHSR`uEK)$~rk;Aja54_@n&;}^hOA(oI=i-@+3X5C9eXYAk@@Rrom zFZ_BDhJzrybHbS3x#w$T)qtAh!5>E}B!?m5v`%{SOg0P1rziIZg>^Ia`^yLu-ZkgU ztlWJ=GZ47+99o3_B35>!BJdP_E|KoQQJpN)tM>)#SotQuv7EhX)M^1mtND-587zK$ zuCr;7R5zOtq+ijob=baZR{*5ux2Yw8%)0%dS5@9M9;B@qim&UG?)#n1a2qbKz<{G; z)Mw6_qeUQmj^y0=^P|d}3`(l`1$w7ke$1FARC@dg%^R<22CH}@{|m-u z2k}_9dtN^QJ7Yn+w^y%Rfht-X>cbc=zv}bLh6o7svdI~@cTiheB!-1;UImu@c9NNi z504eE1z-0BG_^ci`-*3mYfDB((7+AF?>!?=!VU$8epV3H%>ngy#l1n(=PZh{ho^f0 zIO<`V?e^hmTJv$^#>FgN;53E;7s1-5+xa?Aow3B<$um5sRhS*l%*`G5rrbqT4mI4R z?(SK3`||upKTFbyEuI$_Yujj_P!R1ud-ZAqi#?DVlCug&>%y$gavJiduU{=Bl+rs+ z9M{>`&(A!g6D!=X-O*dQwSOB~c)l&LiTjaF(!XF|`y;PdW1l{MzCvbfX6cq|&_O@@m>oRxY$(ch#^C*eC-E|~G}y)1 zh|kCHwbp#qmDiJ*Z_&Pe57R~}4X?S(+qPF#cS0NU%fvf6Um7(97+}r^B-hNdohX_u z!eX{dJDN)LPH`k4^gtYf!If|CH>c*Kruk~K=!cOgj81A9J6*O3y6Sas8qqPydE_Yu zuJ`ov3<8d<#Y=CX*FJi1Q8nq9I$ED)>Ty(vETPeF-L_53G`l@&IW2)Owb2|oiBO{q zM4e}=2l_tWKXpcjO+P0)!*zg9r5+MC`o3Sz?(RA3LTk#YEi|?7OW{RAnV*>_J~zj_ zE!d=5TDopcYR7CJhsg8VFR5gAG7WkIP6XXsHyC39q1%qKkE798TlrUIY?)r%j1(VZ zWN-p7gsjQ%X-euOzyE>pB9a=tKuwJ5q}ohWhH+I$uCu>IlQ0GJZ=w^4Di zd3=+u_&2QOUPiYqC!>)?b~e2}1G>37*S-YYw<{lK)zpI+#mA2`=KS(>nYvlTI1o%D zxrMPoQNr}ojR;CwYmRIqbr>P2<;T@^KMBl64$bmxlHWw5e+$-DYt7=LJi`{jqm>N* z2?ow+LIp`{I)2SOBXc{Z;_{6fJ>JeEvW(Ui*Ryf{Qnj75RG%m14hc8C7_Fy1#J4{X z#%`uTu4M7$P&%z1sfWD=a-)bW&OD01ignuCZF~;0R>Vc!S5pY;1jtA9!aR!F;1vLl zac$B9_1wu?|G*mrhK9wkN~;;gjr;cPV;%$9M7$dE6TM8WL+OnKGX}16#u}ex;@0Dn zUVi&0) zz?uhgHMXN`sRgE5Fu<`J{eh>By6B@Oqdna|IXA*@afk1JhKVYG-Lb_#;%?S!IBiO! zoAEe0ZL&7ckeL!nBPp$Ay0JT~ti1Rs`ZhOL{Q2jfF$m$Fqx;DKmVvHzDH#rj#Q^We zme%Vi|C>{kyG=RkH_6qtyWBi@mRB3|;cV#^9Xj+jwLV59eD(hQDV|Lx)u~(exe}k} zyTT>s>A9sNJtJz}$jlr@25wyOx!ofr zg3*i_hLgFC_RpYIU&nYx%%moBT@J^H-FY!7iR0UHj%J_5ye7b>_iUn-?GLRe8bfSl zW79^n>wWnyj>G23mbs--?x~lF4cJ-Xl>3~}JBW5_oe4zZHegSi`b$wkr9%WfIe427F>c5B$ek-ZK&;P*$`q%G|S zl$PduKxml0Y-VB(vy9B0WJU?-Ya5> zBeyrJa%r?w_|1M-d=>BV>scQha=6>0Dj$#_Xtrp zsMWl~pcb0`0U-f7x4H~IY9tE$P01-(A~Prf2zwy#{V@B89XslQV90s3I*&{PEfIZF zi@lVqMung63)$Wg42L3K^JvNHJTeJU^)^{VVs1Q>I;N3jiBHitVdt9^;gWefEe zS@l?mOXU3W^2Cv(VvPGBIh+37$QNTM-QIuPyRVGd`9s@XN^xc{SW&GR3I=o&6D4Cy zvT*;MwHMy_Q+c@xvq2Uo`ie3deA*1;VwrlCAuc*6RgW&+!9s~VXk1+<3}KC(W!<ejFC_MzTq?tV>KveP?3Bg!ogG0gmSJ?K%yVUB+%Z*z{*O!yOxg8P^ph2c0Qz zSK9jcn0{PD3~hDx)a`e>!@|~4^$;AJXN2#B)^!PEEjYlYOwaoyTrUm!VxA$j1}uBo zfa*|JrtjdNe>#EZRrnnKBq0yM&-@P`y+{fb_I2vjz)`HnH3ea;2Uv|7l~*0y5TGSh zrToY9$a-L)MV~)*)N|^gZa$57tN9^u6RrJn zI_g4;U4LOF`*YU3j9EROcJ6@fHi1!OPbpNTl^b$zUjP5wQP z;@!&Kw|>yHvy1nXsI9BHcK`lPHpU96tze;|osP^Q4T;vqex0C9qq{08i9D5@kBgs& zir~`j4@qBU`R3g?xV?QA)8+dme+pkKf#R%~7C3yI{^S}DV@XP{m0N^h&R$Tb)rYY5!eSV6 zmlN+-Ed`Fr_i(lcBDXJYW1e?&kmcg}3u^9V^0knD#P{KhRLCY;VMk7qVIGSgaF{?z zhJ41Eg_rr9S;qKul82>~)A9_^-faB1=ELbB&S)x~lL(MGYuNCzMEp#Mwlcx}>Qai; ze}OgOEnKNBvIep_p{a}MA91JbMpJ&d;A&WZYDKg}mFVzb`fdAF_A9DWub%6i@dVv&vauPhC|nM#IgBZ=PiDnxFf*oHwr<4#2GF>=Re;Y7!Wd=donA6 zu_FGH1|Ke$rJaohFtZ%LeX`ed;`#eF;R8P9)*3W@!I;@Ot-=hxS5>`!v#Jn%h$SQw zyxoT4WOYhbQOA6m*Q#CHIO%N@nsBF-=Cv?t4=1YWnf~!cRK~7dyT&Stp(fjzzB9KJ zd+NFV>>Z~Z$eiuj`rN#XB~GhOl8Hy?MwT@&t85f%GDn^c-Q^af60E{I4l8P~ll{vY z#67yOya(w*Z&QmJxO&x-~sZTG#)x#g3* z@BWqQx43rWyZaS0KwqC*{uYblo~}^SHZ^TWmw_pGL;Fv8$^I2bt!dv?K%7DL?Ji}&nsxqxKFLzP~ z*CRqkRP z{0fqeIWzzVZpG08esuBCO}Aj7VDKH71}sxW-~% zNA&+1ON%ZeGps}V#dq)AIk@J-C%oL~-yuVWO0+J4vca;08_d6d2D)gz^J(+IvboE0 z#;D9K&d;hHsWTC{Yybnn>-vBC_DxN*v*pRs8^?(Uo{>&QJ0;srow}g746T8)2Td99_Z~vvWh{&09p%?x&>w4w%3S}a+ zF2J6ys{YR#9jnT}mpko771uLFW9e=+QNhDSFzL%#32H>x42O!Wdh5N%Y@V#T{|kQZw4uWKNeV zw^b9Z!Gkr~^$bYZ-j9o%7wd#>-n@PFNs}~|_d)kDor#dL8%IX%@gAyg!5PSvO-EOC z{c$7M=a_e7u1)UK*&|1e4BNF!kLCNbV`RpaKekmBm0%;!ftM^(ueTKXZERHFw2C={ z$XGe>CE7{d3~|!Rc-L08mz5^jBguoAC~Hu!;_2^SYLt9cHMM6z9kmm?;O zXkt;K(3N-S!8?4Z-WGW?*pI`=te+T7Xu)8~@mTDzE*`U5N8Hr6-h{TrnFTssyVfO? zR>kc8YW{K2wFb?atv;%2uFs$y+8ghj6WrP3%pWKsrjxr#VW;*SDNvQT%u7jqA2F(+ zBHVcAbmt*MBa>pE11aViM}NFO>tm_q!LB~PS4DiOcFsAZ8ksw;WMSRy)HaD5F;~3? zJFFP?yVMeAaoAigb?j388u*52?c}hVh4+>Kb<9&KDigb{tsZi7c~_5q#+fszRY^c| z-^1E)i!mJ&tI?ywzZNF9b58kCL)OV=2|ne;eJko_{@6MMPKIY4|7~BYTbmDuTgwDt zj7I)H9&K8#K2Qy*LCfD)wKu3-y_&~zp6>^NrAP#>v&W3fg%UUI)y}`;S=_`3WGKc^BAF)%PM;5r0Qo{-kbzE|CZ!tF6+>-CF$P6>ysbG`r z8nH-a!MdW~s%^}}yasKT`TOaczba>*v-2}tb+nUnu<B@3SLe8y|soqBp1F_0e zIoZ{%_i1?h)`cstD%31_oV*`ZDhmROes`EVc<3~bSo4C4;*`z(dH&#{+$}uHm|!k& zcu8*8w*%hzsi;rO9arEpK0KjRPX?gi?O12m${__cK8Kq4quN1m z?iFN~*@NsAUANi+MN`ynkqDCll&K$|of91MO)<{K#vCOAEK;3NljyRa7jF18Kk)5s zvrk19t*0Fy$a=C}o%!(H!baa_o*Z)U{*Tk8%Xw%weO{vMXLZ0S9FX!+6G}7wdz(m8 zaWqUmXr~i$@3#X~yHB)YA%&M~%spD)ILjy0u)tq&Gbd;E+__z`1jBz9Z-#jjQM|e6 z_`snK>CaEAUA=zY!m|z=Q7fMZ*u8geZ___615qQGIVu}frHOSxg_FDlPm|LsvR%p+ zV;GILv>Cswep0g_S??rZl~{jS(9@#!ynp|m8SG!be*J&u;K9R(SMJ=g^9;Fvq9mPJ zovxvwaj@;9z%hG@GTPJ3?3U3!#bL#?+}ze576AldA?-*oKA@ViuOovJsVbLz_s)z< zPW#y7%dc711#fRD)HX5c*Bu)bCntDpXLmW~1YgCN^itNN3tZ;4Z{PmOajVIYGOVNSS364k+Ey0hgBD#jcDtt9O)98d!fu$uO~vMVlsq%Ro0*ow!MN$>iTv$jW|m{73Q*2jt|BhgqVLU-keaX%WP-*tOh%# z2da=g-u3Ogg#X8205R#s1TnJ+#H(mt-j79L4YN)&83Cu;Kc?2s99UHL^I^q>;Hz%> z8x!vD3Am>$!Q9Nbb?n$Nj8w!wck*3rIB$ag-~Ui9GP93swt$9eq1kx`3p{i8(fzEP z=kMP2#4gb7HTz7`Ib6^PTZ%H~Yh@2)ZhA~SQUk;qR_A(lb|U~QB&9QOqsrUt+rTgK zzATQROJsp$37PMXBDcLyX9vD~_U>I1juYd1htU>a3GYMxn6U4sYuNJLE0!IvuJVM%i__a7KapGp6x z<(}d`*V@cm*DWdOrvA6s1N4XP+*=47caCHh3_kJv;Ua$*lfBj za6A~ywsO-i6>YW-jB3YrVI7Y@Hgf<^X;E@v+W>xd)LYB0{y%SZ7QzB=g<4XZn>nvE zv;a4vwm4Eomfn-+dXqaIIX8TtXliJTPh0J$yoeh;o_FuxKc^XFy76+RX;DPaX)WO; zw${Ar=$yJ{K7#%%IBmC-+QVSBE+70{&m)Hxp31B3Sh^Z;#kJYphpDdq|NF8c<@$s$LoOYZ~SEOcHLDrS$p-^?lxH*F?^ z`Q%t!uOBO>@+3gSNAQ1m7=ds^kBKxSL(2R8yubd&y`>d!!M4CH2nsE?NbokG4(m^j z6BIhT1;zLEoP>a3b{@t{yv}<34miYY^iTpr5197t+n2_hIZ>Y-5HsF-{8)p8kJ!l4=csqeUbV0t%ilkgeZTc*vIL_iw^VRYz@1l(Y}f)Li0c1| z6{%vhcV6uEwX%}zbOY%QHw(WopjHi*r!EHT6k>*+>ot&jv3RICpfSnKh`(HP3*EHS zDt+wDrXRYWI|XFl>lGvJ7FW0r`?b{mxUs{L@tvhtK}2lWx33e1$>sIO_6Dy*+50s< z9bfZvv}c`sPM99?*V%hO20DFdlvyP@2?Q|KTZ})xejP~6MyV=>+Rml?{7Hn%3l2zZ zN_FF)ea!)B&K=zc4H`5a^cxAX?vLg5*Mg1-4BdZ$u+A!FPW?~0W@kD~+46?N8%f;w ziQC4aVE*rz)Gf87(qgOJ0Ly4(>$CQvDMFCu9$~WXm;}Gt1OfxdD5aWE%#CvstjBPO zK%^#nK&tNEy&D@5W}o3*n0q5Vy)Mu^hW{7_@x%*Nx zjp2**CL2C=JEB(EaRcU}`+VrEth6uaHb!H9xJ(c}At@=T_msO`?$kaS5`6v@JNjpr zL{3!xaJ`D^Pc-c=Lh(sdw`V&!X>&c$eVt6N%$PQHYT1XbroaJarEVR?D<8Izg}bI= z?%IC0Vu7w65xcJ*hc0_dNQb@Y-7GjT0Y5N;A8p0_Y+PvbV-*HZ#! z*($*Q#87UQR32je@_f&Mh!4DeGjJdfW@X`Uq%i`rQmJPxZm0iv_44K6O!gR9Ij!g% z2XVMicDcn`F84a_wHN2?FiX~80}MC|l9{gno)k@DBR3!wn!RXIFG?z>?C=q0X5O7w zB~?fFA&q?pYs*`hTft$kkUAr)%R zibDeLQ7kywZE}M8Kb;g&sS2E=8#rcC;+%BPinhW<&q2`D5MLs@yfdYI}Gr^3B&rO~d8akf$Z!R`ajb_Q9S-CvQtBEG#Hc1+2h}x`1R+ zcvr2YxU4t*6s6oa0Owa|+%rvWh?Qpbxqh+OA>PEtLs6dYoJ z%aHJ8((urmcw}pt`uWe`cixs%t$?*^RG~51eIJJ1<`2#`=Wn30)N?B9rHENFN{sHO z(q?zRy=%9PqP&|F9^MKSR9x%2NstUZ+y{!Fh+4e*O^$7uV6u%{W&rA;e{+Japxa{O zjg+$9yy$!djF{)$i53T^pmS)(Aem+8^*OrPgx0~`oBr>2-`qE}BOZe>p3Y4@%Uc{k zvO-aqsCAdgHm-0Ss7tls?wIfa5)a#F7QJgyeRu7VO;v^*z7c}l0F5mVPtSW3i*^HI zvzTC$K*hHpogoNnbN62FZDYrdRb)qnBzVDn}!!T?Gwf4s4m7UgL z*vEA!O6hVn;aByIS93;}^q&sJIB7I@*=go|M2TWEG2$1)F) zvtPNB70#p_8zz>6n6<_9`CeYE7kV->Gjce_heO58Of+)0@^OaA`0Oql*~c6nLE(BK zfINlULnN2G1E`Z~zARu)&{e^(EE^B}I45^!%}Y}p{0^Nj@<*X)ztXL?wgQy*ckTQ0 zL4LCfd20&xU(r*Tf9#th$E@ig5W;WlIELei(V|v)kCZ*C?Jo21SP2|Qn*>ylu)-PdYCMlX+QyWRzy*=91b1^ z$3#$XV$-{plbc&GY8p}xpT?a^+9d{z0j5-JG|-`*00S~aWS^(MnU|-c?8nCpqas^R z8NGg9h^0M(8UfE&07N0@{e=eANq4U!XDl}$K9E*@IJr+%ObXJ31Ez~4ESQ-woasg{ zDSrHcTHu_tWz9rINiR{EN9cK<5fz1TR-nf1J>uxz3AkY}BgbpHGRkr_IlUwhpe z`+`|%s|WY&)yv#6YbW;L0NM=9^k`IfZ=ZIPmSXyqE4AA9TX=h6&T*QR9s^s=6>%n= z_TOtb((qHvs>1@1$D?Sjw0nBqF7EDIkP5JUvnpe^AY1a7*16zf*(*MSimj2;BxRfYYMiU_kF`-0GxW}$x&sxL7~u-$5BHIT$#Q!$(8^1C03t5IdRgk_ zC7X@1&pN2#?FGh0=k$zge)k;xfJFJv(IL~2GqL$N-`qI}fa)hsH^=kL1M9dkK0clu z{uuBc>DOc zmJE5fsF7Ol5L##p=4^O#vrX9PuWP^9Kx^=qBQNkToG{%Axw_s=JYb8<3u57~V&V_R z?sT9g9HI<5Y#qJ4ycE+NqtUj1kTh?L&WfGh`D_8BNh47{QRgf_Mn;w(6CVvpm)&Iz zfhCcKW=@)(jdKw4B!^<_>JTCXpe!E{YU>E$>@xTjqof~m@YN^SIX1`Y{EB0aqme6O zbZM5|HM-dHdoV&BL0}6n6&2Y7Jm}_-I*7vlp{(tf0*^Qfm!UICWgQDQ}>mq+ly4^Zgotz*|0 zv9p%kY#8qazD;}d=+O%wo@&TmZKY{0C&8ru4G6GJa>+S?NKDS+zGKZQqcPagJomJr z((2My33EPGzoGXI`DAb)=}I#@e)HtLCjCuKU(kh7%-H_T z#&Ako>PD)XuZ~jU-K6aD9Z-nVe=%*Gi@P;(jG1L;t(wZAr`%j4qQ{hGVDloD{TRTj zR-+85&T*fhMGa79;T)7EB_*GmE=3(^1i~#Ylh0)aBW?&Op|uF4gG&?-$Wk@{(E>P7 zNC4uoB8)-UI046OBGg394FMDTKs^94H!-{XFqMl2oGTWy@C0lPa7D1?~({hd!d|@Ip&#)_Ly=9muRFSHsUS zx75|))BLaDxY!U+m-fHr%js;aiatNV-Y&N~b`bq+S!F2(jKkQ|V{ z`=RcD?j)?4D=b6+FhQZ)gq=fUHLdrVG1*yUJc)pNVpA6am}pPSR1YRE2%>|cA~+kH zW9KJu3pG%ee}spJGxdED{TG}~T*Je>%T++EYL9-Uj$)0%npn?@1wbk+DIsbCs|T+( z909Y2K#js&+XS)=r6+iS_6ixplm&^HB-YG&tjiqsBDl=Mpt>HUtTI`+a3KiM0G{~; z-Ag?bq{{U4C4u4ey`IbOjv3y%_e^!*!B@mzTYBf9Ull|Pa`cysehd zjeGS6wSyQ?C!xVh{Wjg8Wp(COR@`9ykXYA14 zPS9$qsvD)gH$N2H;*Psg=7UOZ(75qP zxHRias9{~EiX?#M=li}~mq=LJ?$Fm-r^cP9OhGl~HXHK5fRMR_APt%}HOF6XD1qX2 ziP&C@)044|Qfo^IoE)`h-I~e6XP$O#gY(u(UtvKIOcdEbG&z5+=tkVQdx&2^k+~KUG5z>x7n%FiIX02^)iN@gnEhm4px$qS zCy5n$+9#_E1-jTdC-=sM9|;Pu?&|>(S$nvn%jPEi%kO>LjZDJA@Q{bf2G;oH-R~qv z6owzE!sKXo>0+jsban8cK`q*~3*x&0O#dIRS52HP4(qo5A4? zn`y{K<8`=>6(#3Opb7c_C+plw6X1*mZO5#9(D=)iFHh$$>;0}H?)id*OUI-L4VYF4 zR$O6$Zy57|qabV2SiJMIcjx0LPo26Z>;s6f%pI%(Ssj34mLSmGzW7Y^lrhp=|8#%Ww4UGihx)~)&C_{~I*rxa_2ZS^meu^SFv?}cU zgj?usI?kTmxeJD^E=lqh5ELRsY*kN1$^^+JZugF?V^G2sXJe-(YK6j-!5KD;_a?+g z!cfU%iv^X{H8R(+cJ105dbb!4k)F9JIC#B^KuNn#pBALGss&}PixzDriF?K-(wlK6 zVQZ7?JCvZv5vyjAb#Hs>S)LQzuRglZ^tUgQCEw@-tBls18ZCDy$98ILOyCo(Sg?l1m5L z&_|H=y}U&?o^?{pv8j#BhXe>- zu*pv7^wDZL^4h$G?36^H?>MuY&QG7+@{QF6Ls3*zWHG>nO>hF}79PE!!`vwc(O!nM z)%ZInS%sV&eDw5bV+vD2LZNHlZ_KxN^W;E(|Jp3H^^67Lctv^{%)gEVmH6|>$Bkno z;n2V1ljLz2kNu2TU0~jn%r|?@q(^jf_kdQMLf$Xy{f?sv2@>3P#xPLUHGY%$+%7o0-D zHwxF0bwuAL;V%O`!YBBIIF4HM>MfZ1NkccPocOECC_WYrkuNVk{-r+Ga9aK z0Rjjvxa94mrmCu{fcm?D1lvp?*7o4=Z0c0N1g`^B6U<=*$|_%+=8MeaCouKeJQG<> zpG@mqOzUOrQ;W830|DKhq=0-ccwZvHS-;GA3klH^L6^)-Tdm1&Cjb2AW=kx%^x37F z;EV@CIx=lVe0U-I*><*=`A96`6r2l%~sX<}}5U)@V&D|3iJMzk|@hEo8(zVFH$ zGzy@Ms)2O6#QIzRu9i+XAJamAUeEaBhYz73e>$KBttGH|0aiUuThoz@-i)}Zv8OgGe!1_j zs#2WiRZ>|4$4fQBXLfs`%(IBDw75KkLSb&v zD2l=nXben8)Ol!EocH}bq<;PS7{oSk+S1to@G|ezdeY-{Bm_JOX(3~F6uUxo-+sS> zovnI!>>f}{4R#C5U`@3F9bPmkewW1-4wyi`AiU1UsebJ-)Q93O)|1T$(-{n!Dv9T< z4OaHZfP{{NbOT+m|H@CDL{fbEgvnS!;5*H+jh)$Q0tx^4+F^XEZ&iX~AM}v8_?F}f ztr(~bRzM;FYb`(hf~YBOsQ}mFjCYHW2C1X~LE(fuozlD?U^Nyc?`v2^f#Ax?iYQb} zdcOi}){pOy(d4&1VF}A2Yp!WC-qH*fB7qY7`0b-5!SAGtXA|+~YirhC0sc2%b-}@nB?hA8yQ@0tvp1qo22DB+(O}tGf>s&3cTgZ;_|ArgEwOv> zELF&Rz)S@o(R7*JgaJomgGxsaT-XM7qQ@1}Er@lTxd4{zp%N##KSMQ_q`A0TOnvI>Mud1IVXcDvG?n>WXUaKLu#$B651 zHt`*{!vqg=jS8aJMZhnZAaxeeIfO}|xR!$gf&$P9{0Am>Hx4dX4B$J6t%sbop{~T6E7pe7Lo9 z(`S(jCE(j|{J1_G2bn{bR`w|Ns|{fCY|y0^gOm^91%wz0kDvPi8JV_7&^N83FttW1Y~+$<4bDS zswMs&i0aIr&5-ngaBUP{LWu7E?B2s~t~nemrN=ZsK3D$GA!rlV}G?phwf=D&+!+J|WF$8yOkhFHT0p1v9bNxPTJOqf4=;bCIIJ zvzQ5!sa31+#ELU#&eR(_*39RUZfuvq z6XtuZwxu{Yp4rbc7!!K*XfJBFo3Q$Up>vk)-m^!E`q-G2c>44PsEnLfBSnrqNsK#g zIq%G(8F{v2g7)m0Rp4)A*qgDKG-cd@$Jl~RG-?q8Fz3@+T6fxvJo>KZj8?m&qS{kE zl)*+tM{rwcHD1B{w}2-O)lx607TZ=9GH4Hx)}cOq?vEuksbU?#c)_<)tI>(ccbmQO z!Go!y#sp)Z_YpOw@bC!*f2f}{@yY&>u9(vL)geXh%#b?63g&c)GO>tyKfij-?;j4bM zYlg0&jK9z=mA>z^Z~bOI2o3nGgj!8-^U`66|M}Ur?to&?To?QXkzb6odt!EJtXlOW zL1(662TEO>wA~PJL`%)|m*BCY$$}6!*ef*)C*V`enBXjU9WN*2sIb3+E&p?4GqdX%RZ@CU{%ouH|L=QCJ*0o6^}kO;ThDj*KEcTvH{)HSylcH{&X{AQJ3Pj~pu@Np z0}WLBB$~`=-J`qL@PxG+u5B|@F+bRCc~Nuka`%>|N#CaTeVq38^T=bxg&w&xx47ln zSKOTLdP@!-dqy`U&aZ=tN?`bo9Xh79!j{lXOFr9K>rS9R$W;RL+RLQ2jVr7ompq4< zkn27{;r{b;Q0;hbPVG0ha?_~u#QCZ6%3<>f@!M#w<4vr-jWMs^DZA;}Ebafs%CPsy zegpqqp(?MG$SZE~zaRMT?_oV<1%oL3Z!E)>(#HIEn4D*`kZcz>$$v*Uyr%M9;e^w< z+-oTz>_+nUzk9M~W(T_wDPN<-XaDmXCf1hIMb-rdSm|tB=+&DOX-S|b{urdC)lwJn zGmHk^i-vQZoB#}*dD=qdH7Irs1ONp4-t*Jq;DhsyxRi3OIXH>GhpttJ4!8c?tn=$0*NyEXF>Hsyt5M_;z=5hD=2p5n_ z=+O3U9H?CIk*0u4E}rle~m+3*(CqphgR^49isO9 zySur4Nbr9*>(W>@3A6Lx-JNIt&&{SOH_3lbE1M)cGxPhSH6H&NATue8eEiaI(u4^+ zdj8K8ANc7pci#WlF}>Z9TCbPR{Xce3F80^Fq`v>Nt6KlRe;jhKfA5*~hwRz^)*ZiZ ztM=c!TdB$>_@8x$xjOeh?|!n6n&w(PlmB^ryQioiQyi7rr9U2Tt~QxgL*+y*T=wMI64frC!I$r zagBqX*tLF@zkJK)&2?)Ch6k+WFsj*tGVXt1bqanoe`SSF&Z7qp_7h7-7psJY5_vu& zeU(t17%sm2`oo9qX!59Pjyp?035|O*xNQ`Re!VX`j4V)e@$1R3{a`W$Kw7z$kulOX zV#>D)AYso@Oyq#6=O8ZD#8(lueqvV3I!cI6pGUaX+q%EWxrJH?swBT2&78}@o{p5# zNOvzi-o52Pldc{gI-%JE&Hu!yQ@+ceETU?jaTl=jsVNmWnerNI!3L%(_MHWAv3q

^T4<4*3k@KXu*48_FZ>H)U6l!T^8n$iQXV+tfTvA6z2d}@{cuLl)_E)`u zyd!2K--QH3KnL7Jl}d^^18L6vZYsthV@Vp~Rumv`H$9E>qkPO`3#)_xC5nw?C)Bkh z8->R=?bj@1mIa)fgpHa0d=(%t#8B-l{*;!CJA2n zJC*p5vk-!bNDmN}Q`hTTDvK<{XXLSkzkc4&@Fp>-EUr&q^Zi=eFNxL~BM ztJmb#b%`g_>8dW_U_N;g#=5X57vk>=j(4#D$#I+5Rw8KnFYC~(2n%;};Eay=A64|c z+55Gi!QA@#aSEM$l8TX%guHyYUVwRE)EL_4kkjDT8}b{`7nMRCsU^JcoX;O;Or5%) zq!?7n5E8HFe2+9|ld(e$Vo@lZ*Vr9GJ$#((-3rZ628cNB@~vCVC`fJqkVRu?J-K>& z*4YAEh5$OJsCDfgyh>HZYt^^j5Q9!KUd6tMDn2VK%Ul&TLIQ1hUD!TS0%L9XoIWvY zGL@JbBsrqWcKpMl@6n@waem2j+^jqYewhIabIM^@Qsd<4<;%3};CZ^_s9nNp$X%ry zB11_k?vT<`vHj+cs{3xKq*90$m?;jo7jC#Fp~!*WA7OR8am2IF}RvweztXgEg@Uw4%3`h22NiZ644Fj*M~}9lQ1yzor?t?4CxlOjf^l+QZil%Dsl?s$Wws(% zntDQx@pBc4L-Nbt+snz1T4@GnI#MR6Jgy*X*dPLk)y74_+Lfq|G3;u!zb>RPjM;LY&LE#ktIF7JT$zRZbBYDky}U$JA|qg`i4vbR zDHjil!HHlC^q{P6_xRCc;xLD{PuWj@e-?8$mzdug{L94HDmGOQT60R(MUh6dEkX}u z&0NxPzjZ_2q8za1$8O4(vF&%@B|YC#&s%eG3mD3&>xTTvQ~thib^yH4>;;WW$PBV6 zH~;>s5xsv6mA->xI9KI7V%ehRh$fB*cOIe~5)>P|;J~qqEPhe*n@eI~EnK<@;r3Qm z0WY3EKY#~Fysb>G(m|$k{EI58l)=d73kpTfR^Uj7Q3u@QP{C~Kpb=Qk;33B5wyh5l zp8d{l+q!iZk(=be^1mF6*L-+9;P3ZlyXY}PcA(y7lr2qV@n@5;M@Ve?XV(-|&*l=& zTa$h^_IrZSY6JXeE~N)+k@J}lUPM-`RI7v*(XI0Ial1o7hESPtN0g+lx_>o+ih#~aG#zz49KwcQpL5@gqYmDhYD8BwBFnt9b;cbNoDZhC2>QM5^@)4D>*6CA?JF2ScR1PY9H^zK3uwD#j z(9WGZ?M}N+nzbe)yWpJ$0@5JUV?R3%i62UR9`1f{OLRHjX|yrFrR4G+;zc*Cwu*eR zk6^P04P;xk(fU0xTrJZ0_g^2)jDKT3yZ7$hn-pXH?FwgQAd>YA{89BIziVur#+3t* zf~;S+Za1(s2APAqe!AcIAfrA3`quire=dYSogj*+6m9xtF4qU>_u|8c-Z_o$8V-P` ze&?`F`-0>!&gkie_?0!T*K=0ILa)Qo7H3>a3G36JK6QZNu>JCNZ%a!{{)#@PqFaH(GMAF}k)wHy6C6+_ZA7Usfx1|Hq8Q~= zO)0)DU!Jq{#`owKVBtoh{>hJ)W~Ippuh$YEwV+F)ot2aJ_+JOIEz2J+Xyg%3s5>2gXgrsfbo)QwE^EeqeZL zI28!0^XRWOU_oC+E}|)Fhz*-Jx1_asn~?q8qq83!W~_?uO5uWY|Aw(A<+xyZ9j+(x zZOK(7QM*i+$6fxAG{Mg9S!wAe8ZdL{DGXTLRpR7>D}E>6&%avjyJSfpfR1ue4G!pb z%xKuOX*y_t+rSO!zua=n$RifCe-^BtIt&MAKnY3X?CIVi>=J~JigA+HsoRJ05%rh- zr!QmkitrkHDIKrT)JvnF`YDI_z5o?wj;-tCUNmUXnqQAQWxyQI!@uPL-K+!}0h7DW zgQX%aMT00B2L~PA;K9;UT4X`IzNNGX=jUlrt7-eLKP9^(6aH*glDluq%a_g!;%M=% z5{ppJmb~zCD6N?H4^PWcyq7~`+MwKqcvyWoDTaq`f2f4p1)!wvudylM6p2Ka!6`-q zg+T~b(HviS0$^5*<~MK)83Hc_bSPSEB9bU={$sNSqUqZVCryJAQ4AKBFmMo5%67Er zV>lWi2We{!7MAfLLqpiWd+SWVjPQZ{Fy8DtyGZAE#K&X*%duzIV8RgWXUoqEx$ z7qJcxT`xvsLO+pV178BLehkjc?j8}LWlfwEjk~&hps7_pV72o!*x`%bjIvjOPn5~IYkFm5esUR zQq7Mso|C$=ycT`WVFa5?5nPbR!F;I?T|J0cXY<2|>x%kcKR(?;8`crfk%gNz?36Zx zX+ru_?H0W|O0DhRpTk#J65K**+K3R3+y+xTob_G~Xuj=HmA*tj4!-4e>EQ~4S#E>H zOGl>C0&~F&cfTw{ggATY3q9U;yNu4>pw? z>-B-c0kHpx0k6=-Yw~sDgvH1^{Q+)U_57s0bPDq*iTfX$x;304B#7I#ATssr;;)Dj zmwbs3XZ@4Pj;I4*-P3Kg1jSS=>+m7 zv)p`q>$*^3%z=b2rCi^+Ed~GQFJ9E5HcRFuKme+0L9{)uUcLJK*|W8zUhNj%KfjxT z?4^thL2zY|=WSl?Yb%0-Xp~67RJ10yHW4{eq@ZdN{=^u&WC~VIKF)U8LShrsG3`i! zFY#!}tv$g4EPXJdr#Z~uAO#o-F%H1HTAq9+$s7)N9V_@5%MILzo~YQMdk3_s>6xo@ z7t?5NNp#Q}kV+^X?_)qI<>a$d5x$LxxGTFJguxV;YQ9AR=|!&JiZ z+e9kp)K1Ct>!p&lNp|>AlYyHyZWL`wT4?*r^h70{ebNWu`;+5F&|E_S#=ij8xbwx) zJA|16HdM+$=+#rx&_DM6bUho?;^K^nXNrPtVw@m3x%7aSBWk6FP%g#~Y24~TMYhL-$v$gqUYSpM< z?bAx^KeD7&AH#QZm=&d^1AGcZZ05v?t=azUyloO!AFbboZjo8U6%ityn-Ih#^xP+KPmVsM*?8;w9R8e4v^wrDJ*O&n9!Ky=)Wai zp@K-cJ-S%kOup2#{YI4ChI>Y&(R^vf-6iQ1s9D#@;vC-0%(5y91PMD&;7NjG!_)!o z)O}LB{AS=_+xRi<{n3UdQ|_8yyPO5?%Xw+6(-yyKeiUXU?6 zYvQQ|DTUtHK9RJW_{b)W1&YAZC8dyx!7hA1X|7WABqD9V>gxW|Y@zQtiwT>SIHxW; zOMw7UfI(-2aFtVUXWg%+=|zs%<%#NB@&pbX-g6?~GSJG8O)dvh9rC%3nE!f!a-+V@ zw6|gMMzZxcyk8c%hts;`ZX(LVh`9B(xpw`d~UWXI2`cZI-hMt^OcClU{HZ+-n>w{Fki)DJ#i1ej-e(<8f#H*y4 zw>bn6$0BG(yqhDZ=v;YvX3`9_cmz93B~qZ2G8h8~CUpz2s!wbx!?_?MN(9otU0rZX z_A7r;myUNJZ;@wvvXY)9p9Wkq6LH1&U`>%)j)*gOv79w10S_E0B(z5G{)ZlP?a~9S zUtLa|vg_itP}zZ6mQ;bYQA!ZztA~xUZ4JVvl@jQ#7bmKwl2l8EVSCb(UalnmN}UUH zxja(6;h!VNx#Zk9VYvUH2r{CIzMwP)Z{CdLW0uJkl{Rf0{JylI-QWF{$1TEJQK1F` z4gmRspddwev!cX?tE0x&gzv5Oj=QBk{|^*+AQny2t^B^3E^4 z?&3n8L0%atQUrPf9)hnwH8iNv2;0^Y(I_=ex>}0vB&SUBhXB15k?_!SHbokq&B`C$ zY7SPHFlz${w>;Zrm56@}3PNeT%#8&hLArqY5wd`xz?5L7|}* zM6vv$Hk**2Cf4yew{Ys~qN1WAa)up`GH|LVD1Qiq)UgCj#(okRND*#@AIfnVjEyk} zmg|^Kfd2U;?SjAM#apSVZ6btvP8A1?eHpp3^a|OebStB4eJ(G*EC(~?p+>_9joaJv z$=F33RH(<%fh@+4%6Hg_Wraz)D2SL_+U9^3l$e!oL{vRq$Z#{0B@`M(y0miO0dRKF zKyB}Qb5aSLEmh_}PrHnF)`@m?cxk_W31`~3Q8(0kb3f3W~R z-p*Op4M+@_pnEsl$H>{Kp>gjwA_xZ<1;6QWO%5$~<@ zm7}Fv<&25=Npc}fgG&7&uhO9$if$vn_5+Bchz)-XUa>ZSA&7|Ma2;zg6NsJTAMll0 z{&{nfDoEdq%3o-GS`)m20MFjD^(2y6tKTo?9R3^nX#KRsJ_*xr-|W3NDB6*RgGAb8 zdK(VwH4<64$XSE%O9l{XL=3iuz#JJTxWl@{l;lzQ>5Gq33sdJDLzi2xSn_D-!|smj zj~pAlVCFIWrXp==@kTDW18*#bOzzI>izGRHkgysOl|jImX4~4A1GMj?y{;UGf4Q@` z$IdANjpCvZTXq5ikux_uLq=p}X08RSp;W)JPDS|`INun4FBTW5Dm<4|5M(2##Ang0 zW&PfNoSg`+Skcj)(DeBEUYiAu0(2}W)K-u3he+DG*w3u<>!V2~DWHz2^#KyG=2&Qk z)j@!x@v}zClKwTyE$5a9y#w@@FRUfkXwufO4pqG9nAX|UiH*LsEWw4ISXgKX?T#Z- zRq+*f?~UF?$Sy-v;1lBDbyVlD+7B>z@Gt=jH{}fCLtNHTU2cH399`?I2E3|^FQP24W>{j zz@+`uKGipnD$)Yi5$~2u*beSbs&EKsa&mzqVFQe$T>t@;VyYIYqIlvU}38K!)7eS%swYm7G4&a zqS09&N0EVZU2OdRKIv3b@=#XFQ|Vz4jsN-<;gx+Nbz9E;t4&+rMbF%WL5+)$EdMmJdAao(+aNI4!y z0ucaU0fHndH7`;4%7d*mWep6MuBI(jMwyMyxdm|gPNlUMz9YP3a`T%nNwY|)MZyqJ z5XHAUFN1fAI4hQvckV+`0+cRWNYoXfb}98s4-KO(Z@}pSHU)!hZIgsi)BEI(1?1oT zSA9)jcz2N8ZW;2IRe0k|HHOjO5X%e2jA#Jgts#RY&@k|{tV?$yH~j2fdHm5E7*A|+Fzf8dKW(B8XVp7-AAZ;08h|f3 z=WEId>@e^3N|Jgdv8Pk?%(B_Z-T)d!%%9!2bH_T#0~g8rB~+CR2FIWC-s~*e zYBv26QhTE@=3#S{*V_fvyu_`Dik3%?Ge%zV=*7={(%d0%4_1mzjvZT2+35Qv2b-&) z6%{{@O;2*!tkZt7gPe9kITgm8KALHlDBj9A5R#s|c+u@x35zKZW5f#36HW)r5&=7C zEAifqb8dmw%6sfQbuzBa_^jdygJ5J*bN6<}#?|#u5vlU5F&jDB%?ZN;W9tT>Nn`mt zdC+qzHA16nR%uuyjME9lIjC0}nY7)po$~qoxYu79k^Y`#Fw8f6MR79c{Y=RiXWdg` zTKY8reh~Iy{-Hu|8aC%Rh=`BTo<9&<+60fFSarGoKx%^uMQs2tjqyjYqfddEbEL?~ zgG{4zzu0v(h)Bh(%V9_Kra}%yR2f?30Sb~0sq#TD%|(H(9J(>$%aE~Fr7Su$na4U|;I95 zb6t&`r6RTp>-qxBED`})M8YN>Ys`GYk)1rhL9tBE<8nRx+6#vwN8;dOVWZ^Vv_6Xm zyAW5!u0iW&9?F;ghgDzR$l~8iE6Z$g+L;Xm;1HftSViev@zkqoIRXXOh==4wSp~1> zOe!5iFiSHRb&~U(S$Tyh4-E$!91Oq#NWpvIvyJK7D$P3)5(y|PS_zvFoU@+X&@>hOqDBZ!l1|7i z95=I8L=A)GP>d00IW!dMwf`eb>0T6qqvq?cVD0+F*l-H+v384VK4XafFRIQ1uIKi9 zki|6_M zyM1V1$251m+Wo)EO^+8i$=l%mxos0g2?HHe zM?Q*I>*Nx`&PM;Ohba_3Mum6W z`h3TPPiuPV=R6{Xg4^B+8yAc!H!3_QEt6Na;IU>j!l=X+Y5|-_&Tc$euu0{w@hU1& zjn{p+@D{egZxiA}^T@q3DhFmd+|l(7yPZ40cybdwh!3~Q)1Ui1-l{a|&9^?SqpCuLvkep=E2 zd-jOEm2@=@&uwd~a3_l0FZvlOT^1V)I_SJp55XYi?xj-*mk=SL!;OWA+i+ zzTbkbUGjwE`jaeJQ8#ZN+93X0&2iEax}wE6)wzt z-=npE`y0My0ZbTOh;x}pH54FHfaawwUUBiPIw!dt4lU@PMO?DVdtpk zDy2+H)`uh7$_j*Z{ov=9R!&^9uJiMR)wWo01-iPv;v7qZk(H2#h=rlru$J?$zjRf9 zL)?Ro#9rLLe17}&M_3FDp>1)PyS*azu$FaNz>gyPn<1mdutj~vhjOsl&$HSE%XP%2 zue3boM@ZVun_JI>D$e2!Ig3Z`+=Z&c+*x@|&Ibl)fk)=Aiot7=9WGeh|Q?ekG;RWc-q7nN@4!A z9WcO#0<&sz3$t$?&u5f-;H~-R70BLt@g$G@SZO*aTdVpd+m+Uzb7$O%loer(Zj5hS z(dE_(e~824i}>eVcb(5uGVUATrf>E6aKCC>#^?&apRzG#=RUEFxSP^J zLqY*Sqf^)?+XoNKyPqd6T>dd>YjJ!d)fv*gXZXsC!#`Z;J?q=t5Ie3j) zwH+AK_?+}aHYegBjSyD;mC{F6$zh|VlQdqUfVA0 zNM?+c3Jpy$-0F~2;&uY{gFM{qT%8%0Lc1v&bSpSLOU$twdoP^!a3zkGE3K2=@U>iH z(9KsiR(^LiXnGy@j8+@$BvdJki@oLXrXbttO^X~%@?p{&|B@X*QTqVE%y=mM$ zE^9wqjNzoLJ)a@4xTX9~onf#3pAV*doyl@c)Moto49ZH7CH|k^B;7DIg=OXUKensl z!TH(jk1_86BNQSmv|L(&#aXxid`rb)*cTcLmFw8V4kwYV(r+hl1}ct40`P7)J{)qd zAoKqEaLRmgOU=J0>8Ny$WxpkN$EE0&f{p0lIP&wG{YP>q!o`jDusw7X)q%D})TX?q z)`6$BGUc|@5wlzKAUAzAC0-vlzQmn7ckWkM!12qyJL-pq?wga1b^`+foc~7YKU&vZ zVX7OJyRh}vWfx!J-kSD^8Op=6)arb5e_rLhLIb94Q~~lWs2G~y4bF-*#PVEZSHSONUYaXy8m~{yooR3H2@gO# z9Fb2*e?*aI?Ap2WN=nK|w$N6$i=t%$P{(N1r=g~)N%_6eIiD-929`X7b{$dvW6~rT zlIBK(B(=F!|4D9cap^C$t-m~EZgsp(T`Vcb=6g?I(ey*7z&&!k5F=HV>!mMM0v_J{ zlh4Ghu2dh6uq|TRY;QaS_a#)qx;k3mJ;6unjJ{7gS{$}I9QCwzy(cZsQ>E*00g2UB zzD7$_vgse2m~t!fbgIb}^(WiRIx|;Sk!@x(un5l!_VZC>p$A|Xvdh=~&g2#?S`@hS z!a_yf61d7kOf^^r5`Lhd#(Z)~Ib(km#igl&12luAB>(j6*;FhfFFm|CEzQs5u=sR5 zFW=t$$dMxvwjYLmU%AFJaMhk218fJYb(_{wVZGRG zmAccSN>{Z6f#~~wejbIv7S>l1de!#BeK6%C{YJwRVcW%>QEdu)jHWU7hY)%VfPmgE zZ-0en=~}XN+V_HaTvQ5EodfaoV)k2~N?tAA(Nhuo2^*Be#o~-<@I2jTsY#4`K{%3<~!a9;XI7$y?#JRL9C1hbmTsuXl(G zCw`^Ld`QW3A$WUTXp1aoE*^8`^SAZRUOqJcXqoa=?CF-h=|~7ro9lchZB}vRuiRPc z+gg9lJ-uKoTwR5P2V+T_3%wNQd-$;WAi{@JSa)DFH~ zyQHQ82HTceZ7q_J6=L0TkZzi4p)(8$^M^l;#I21<>dcvmiNqEi!na2XluqV$1ew@UI!yulC|((f9}3 zjL9&))5z?Vw0@$zQ!JGfK1j7%*_6> zcizWHu8@$|_^5#rj1w;^g(J)XWvs65OkFm}_D3&_ z@~Yq;jRWDn+3(Men8*Fq9d;u(h9Cx!-X$e$sphlf_lw0*o!$7zD?l;bHH_iJx07A4Px=&x~GAgNY$CRB#jLHs?7wiEQvI~K1DAr~zXpPOx@6Zvenhy2-5{tO;C)>HZ;M--Hp`}id%Tw&yxPFw!suTK|fbeL{z zsQHC)A6z=!Kud=M67bWF@r1&g(-xp#&%hvw!yj33DL`FR*-;^>Rb<$eaE`!nnIdof zyJymXdntOXZc!1CaAwlf`ygy)*yON0G9~`dl(S6X@)iTgJ4fryL-G&cPhV}ejB?^N z*CvFloAV$BtKS2(b4OToo6taQm~2ykW`S^d(fx|7?(JTbvcEsM1Y^O~RjRxUMR&@_udqS=asu}aItMa$G-P+n<q0b6A+O{Ff)D zJ#_`WZaE(?H0pfq6OUFgm*|ir*O)evp`b6HiuvPVY#gPACjm94>1-j2xv-7t)xAif%U%LtG=iRw{`Z zQig|brxvjA>EZ#dr+fqvkhGWi3Nv|BG&{u?_4d@8!wdYrshVX=zbh(|efblLj#+}( zk1Hd|8FBLIKC9SiHYbhx2~&&MEj=#!QzE3`vgzHZQKO$ff9}UGfj+1SA<5u;^FB%t zznd%Oe9|gB#8vm(k+a6@moN7}`M3mM>itjD7r_YXSzc8vTST40Lr3mjM^NmUwqx;o ze)PfFKw9XI7Rst-QSf;%>A6(eNCVGWMXRf*2rUlu$fH_k-AS6)$HvCa)VMdNWZ!Z+ zz%sy*1+sT`7TWEYhwhY#Utw;U42XIg0nQ?$wb_JQWkNYiEEs=lnEnnZx6L23n3r2jeJ2e`f4wYtfQZo%Ll z7i>|sSS(g~X>wVVHR+NEfc>lieHB%{2~YkS3BVG)7d>$d5ZMw5J99I~3F`sAxP%c3H+iRiOF#I)#!X+i z$O+rj7>_hd>NY(DeZz59eC8@V{7y5Th-O&dE$^q6{jF|EsUE%2}&q$jk*9Y zO9PhmDleuO$6)F&c{O{-e7!ZkteB+rs*wdT!K8 zQ_5sd8cXvgG(lTpNW-8G`U;fEwoCT7U{P;J8T_pEmTAnkmuZl^w)j%Oqt#DNn9r3D ziz<;0^+X5Cjq*@-)^uEV6tcC|_3PF(xTO*T?*faUGcjovPe^Frw7^kZj~Bs_ik^bOh7@Ug?3?}nZzWGTqFi;iFjF5nvxO%ob);w@1Tg zDGAS-2qXMq_zENP)hA;{yqdO$xPL$~6Akc_yM8SkMqCl^X?o%3U}~J{yXaSTMt<2v zu6gl1#5W46=ut>v(^164yDikS;QvMQ!p0lEWLxLpt~SlZ!4?i0>xcGMe078{+|TsP z@J&rcT#*--1K($sQ|M_@@g|%w#grLYqMK9SDgtY`;~dheN#_@lp2y629&Z4nUPXWy zvp9@wG}*tkSUYhx-KBEm=0*ELcESlbOzfdt>Qr_}Vpi52n z_SSh98o)P2<70gG>83}Bok;P5=LKE{`F@O3C~RIR*{abI@SKIIx#aNpnf|?HJ`jXx z?h+a8hy{)%H0n;I8noJ7AK6vJlf>J@+Q=BNsTIYV4Cv%)^YyoHKyVYY8-+EySw$3y ziqP)6?*0{R2aCyz9r2n%u|~xvNUAl!CvU)3u^K(x*tne`o)>sEfP_X~Fme$9=Vp+VE8+RqJJtEz1xD6^s=;#~;NADGzNZ}E$Ewu#}WL2wHUBYdI8$8&J~C*GA=JXyd4Z!Xz3v#u~W`=C60S3 z{HSq#6|*7WRjoVA9k9=bT(MtfwS+gdq-r!4v1J9p}`e~0kz z!$z#d;2pvZ4e5G~btjBo($b6egE9^82wJUK%^<=qooHTQv(eyh& z_WfjDG&UcF{UM4;{)LJDsX z;0x4z90OMuT*mP=Xwd2(l8v7r3}0lv!KZ#q#e@!@{>lAWX#2 zO1#!&nFBZ7$*EK44SLwT)uE6{62ogkzw&LAL7s5K_mlcw)o&^s0oCTsbxx&& zcg09d-OdU0}PVC-1pxPNX_SIV@t=&9`d!ep1fzB1G8Azx)lz+CeiqbF%D= z&X#s{0K}RI3z`$5vhw>>q^#6v)#XHhgs*dKkjUp@)U3>;I9`X)H*EH7ZNw^CoxM(Q zYt$7wJtbskmszA^a}fDXCwCdL(!*pP4}Tc>lgxBhjJj^Ae?08#I?Im2!esumXe8ej z;bOQ^vgEwV0_<=9h^%5}X56ML1hUs!2=}O3oF5)>I`t+bBt#r!@E`3Kr+b8)t0jgI z>n~imVAf(NqEs((sGp^!`5u}dj}UTHjVjA}5CAsE$1x45FZYXBva#k(Kc3TP5TuMd z-ZZUbhE6(RdEsTki47!6YL0|FTMQoh1Apw z@ve}eAih?OHy!<*p>$WgU#pYeig9BLi}S5c&3SZpc5-kEm9S*r;g8Dtf4=Ai)8^XK zr&E6DI$30wgH7xNtQ$P=7x`RM1LwvF0qPW%z_U9BB~OYcW;_puUU;uFv^F`m!XVAh z&3Ez`xcI>XYP+Vz@6t(EadkU+^Vo6ep+YXKDp?Mqhs`TKa3M+5w=KA^V zs!)-yg;*`|oc;$(8rPi;17Mn{_`i|PK9e+dpWe1!e>u=S#MXK!% z>mu&5>x7}YlZ$8*i*NkAXXn;Zvo-(F$GF46)UP=t{<2B(stGC4fyO01hVl2*md+S| ze$fmzDE`-tVUO2rmD>Uofuus*PY|n#koYxn(P7J%_ko2t9{Idit7K|%epozrd4iFT zJtsv_n_j`Tm1uC4q^oY}(|g(3tqCaCVMrm7v^)N0YcF`XcQ`FmL0 zQ~Ra*>NWc=c)2Fg_BH;5*XaQ*8_RLSfvPY7sbeGkH7|V4xnU^u%--5|aK;gefQ_5>{4cHU zv_eNm!C3bD_qLo$X5r`%{v>&&>S(W%6@Fs9h*&|=+?YbkT`0;b5qC2Y&~2Yd|J4v?yg*AkDsx%vhcP%xxE5LZ;uHHqVCR((%Nw=k4RrjdLc!4KZ@ z(S_$%q$6*3d3vTU#6(F8E$56saq5&Y2$(oW1HL%0%dVb@4KRZafG?##;GhhgASIi4 zG>1E~E(3GqjmRT)1Lv#eMu1z2MXZRlVJ^m;u?c4~2^hf|@$JPBniAh5AoPlsPQKti z1qFWWkwc}e<`5d_G}W)Ds6aUSu+ri4IRPTnT zul?rX8^cESykorilJ1$N#dAi5*>_ewq2gQDzPrQr*Arhc>rQA$P>`xO*lCQg}doMUGvLx5TibZ^>w ze)c-x=7;LXjU0(D-C}?n)QD-Ox=Y1ESOnVe`IrObPl}|iInOCz<$%=)-+(lSIXyLK z-n6M5O%+KaIgE*V<4XGR+=TrYbJX9z9%j-~>Mi-4-AZic=QKx|o=EoV7to1bKn4W7 zbj0!kI%F6^+4iGS#J1CR#;R=(OQjzyQgJAE(Tfy_wp#86!RBTb6%ZRd<5c5w}?V+*|EaDZRc z0Jk}??OeZp{RFK6a@+zABrdp=*`^zr$oD>@hTL2v0D6nwnnWJ$9p_-dcR_}S8tP>Z_ZjbLW^NzYt zJd%u7l(Rp6(h&KI!9ndClg=$@TeDuvNwaqohZt1Xfb`Pyt8e(JCKik34j;O{jtp@6 z@;XTcwMQM|3V}p*mBam;H*3k-BB?@BS+!%wgXTY0n%DRS%Gs$SpkDe;cX1I5y4%U_RKI<4 zhKUQER0NcnuaOuO>WLqUb810;eonKZ&a)esc`Pz=Q13ixYi^HTUAm0$I-l$aa*hjZrIIBLb^pmw<4QT-$G8TEYAGi7XJAin#fb!kqS))j$udbPMZg z2L>gSZZ_4sN4-$mh1u^62?--dD|~U`(xpqjC%d-dDq2q#Z728JjWV1_s&DBA%npk3 zl0lIP$1=a~9c=PgnpdZ}S6Yz9?#&&X3Vtl|PB5Xa-+sc#B+F??+TGbXhNPq_>{6MW zcBuJ^2VCHL5M}hj%eC z?M@@eW=ACR1m3xN5w}FHlBx5qw6s*#lYss1QBlpBH8r=MpUJKZ+ET)eShCZIlRYiL zql?aPPNq<6-0m>K*EGp`N4@h7*=cF@*#{b%OJF-EzF|pKC}V(cuJFEuQEgj95{_ey zL#{LZ)FNthBCt#YjA&x5NFQ2b8ltKuDTBt|T%@hBuaAt{Kwd7^VYFnrpnd6Nl=kAn zvT>QG$DCjE&JLG4qcmtL)&IT3@^g85ctdsDavvK2f47G3s8l4CA(_x})#;!*cIm?g*g!OJu?Qbt zS1uQ#_uZm>-12h1LDTF|!g=4Qje$9DgW<;o_ZPN&(90F2bIfcn`yD=|Zee6e8c||g zTge1IC1Me|Z7@ZNh=_PeWa(~T;C9v!dlmjRpu7eL&xu{V@u8fOU;Fc0Lkfh@@ zw!~hW$z@jYHWz!{yMI4u^{Wb^K|oMYdyOI%Kcz>Gm_x@-8{p4?;ho2hbrqbP1!0eoXNlflLZy8Y-+T)`SU7>eQ_}on%@P zt{XSBUVNV4VH)&{w>8X^3gZ-|HpUBo{_hhA%!i`C7oMkE+js@0U%VPFmu$TpIJNXXI5 z2<`PC7n3l?m2T!qR9l{~W;7|H^hY3TP)Ai+p5VEFctoXI7hX^}HiYI;N59soQ|BJ` z4Br^`bLE#_MP7P)Ot{6V2Jt2UvL~$OixZs429T&Mo|R-D7Z=BfO*PtX)ddb6YHVCx z`cPZ9u7QfI^m$n?eI7UIET7{}jqbiFSFR}0FCBv4my!}w4hr=`SK4;Kr36kO2apSX z(ZL?=%zZX*uFH3S`=}T1N6%-g?Nc`2AW;5k-3~(m;wrLpcGg*#6LJ*GMH0p}Sm3Ow zyl@5=%|R%3P)s4yQ0md+q+zbxC;f?rINT{rvFb+UtQ7jEQ&Y|h~+>kAJ)V# zGGo-y%WC**toAX24PhLU6H8vdph3BP912|UV1n8~e$&Emsf6nd#r>1tcb|_;r z1t}L5Y1TZjwvw?+D;SX}{kg=c+lKA-jSdB983Jm(?aZAz8yWrVCA>2}zxly1*G-!? z;k;6dMWr{c6CWQBkD)8hhq|sh#+J9<=jH8ZU1StWX7p?|a^iitn)WU(sdHM?{*o2- zlg>-$(A1)6h^B9!j6{Pm^0MhYV$PTR2fC=@p#)XunZgxN#+D)-&khizm=86}={t#k zzFS?*A64oP9Xhn6!w+`!HD>&B4lMgrkS+({wLN>cTvdXHRBS?@CDv8`{G57e12=PX zF|+TKU^d9uvLWDS19kNspI4H^xSa_YGje1i*^0zN;ueunoxYCTR^VJh*AkBwVx2cj zajSJt-_4tYSJ&iZ<%Xvz0kYwLt%Ng{RLktX#{QMGVg&e_R(D*mlqwwih6Uqx7Zj1S z0Ni?II?ZkcWK_~&s*W`&6Lup<`A>U=d&{wMTk74mop~=66~C(L7ur8~b$as>9=^Cy zMc=>IkBC+H&&ZJrIRxIHl)>d^kn*qkmPHg)7#N;kRDyIDn(hiR-PeWj4Be|g{ z?Vol1go#>pB>{WMi8sJF$V+)-@4EwhUPh|e^=I0TOpc+w)9I8A=XTwM zIil+F_;?j1X>C)_)Sydf>eglec5H%30=0n~-5K(wiwn(JQB_Ip1iUHe1Id&Lt+td|@=+LM zHyw7W%*09WtM>MO<=D=D{kum>&cORn7Ocku=(|<|21bTnbVZCNJ2;r4elpXZ;pt3uV*{2tlDyOjffk zrZPdro@$dOM>A$XcO-#WX{(pr%bH0L1*BYZUyoe9ssCmdrrevuBT( zfRMNAS{k96@?xR@sU%#CFZS;iaWs3*d)5^E&a=^8qiw;#-h`ZRSP()Ulyo+~4^tN* zb&XhX*@2+%x9aoS%-zGsEZs*%Xw%NHMZ*Ds3oAu5VJD~Xa1Ut-VRLsc|K8GeQ1 z#@(`?I2A^=;?Fy>JPjV1*^v!5-@JK~&WpAiceRwL_9`!LQeJ+zy?_v0ylU=;egW|S z@OM77IUqolaMU6GGbH#cj~}1Ar?58z05MMP(9ZlcOz9>Uy6%J0!cKZ44Bm{dK&x2x zdbtnVAI8v`HOz{Dl+!lm(t!hQS<(%h{G4BxVO!ECl9`dgv8_)ncy)D%(PEh;0o2>H zdGjV0x*hv#rybFnOd*{v1$6VsNeky?U)%^4MlJeG;NHEPNu>_Z2ZCBN{X`)HKD*&Q zr~J&R(1K5R1M?JRccUN6m^D{m|87|RnDCkYGaVhZyF4TxSJ``fcPMto(gIGXtgx7@ zK-#D~R5~|EzJ9${_IL`Tmb_f!{IeV{&$-Gg3++kE2_PRw#1eU5!U#wt53zG}v^)~F ztZS3MvjZ|W#5woS-&IF^qDUd4?kyx12$n_0x-F1gw5FKjkn%Tq3{|MV=R$0o_Vds2 zwp5RdA2B`toCmL1UxOJBbMayFN1LIugih*8*@+~loisNbInvp^y!g@1{>zRX`Ywi# zyj5Z5A<{`YC%yp5N^8vXUwhM_tRws#cunr#;@7UNQt|UmeQ8v240NK?7V9=K zbaZlZo#D)#CX+G`ox#+09o;}wXTke60QCbh1b<#h@!*-_donG$!I zRv~k1>vKIi#MC9RlpLI{wp(#!I&cBFP!VXjso+$w2W>}dObcCq6urJYP& zu}kkpG=TOxAsSw3;z)wge72r_+et0=nIamL)JU}eAT z{w~I$ZyfV0eC_H_%gL`NU0QMaR^jrJLQUzm#2jVMi7_A-R$DkCv<)U&PmAz4wJ&*HojdJ2c9bzYcY1s+1AJiT4@l@DRtQ z&zf~`K|TB9JE4rpAItcUVBg0p#0Vt#!L z2cqfAWrd@Ha~Dz|KuZ~K`Bc6q9(6>N@e3zJ%)4c>chJHIc$ve`mH{j@kbu|(+7gfo z{zlk#Eu#f5l}KF+SAB*I7dD}3RVC0C84D%NVw!wgw{Krh{a{Z6j$HE8%db*nFg>>P z`$yf?AISpA2##>9Zm=Tsm%_q3YCKLr4o>RV3N}-6$rjxVQwfB|6iqX+o}x zFRec%ZF`;#1ds-s@Xzv>s)+?Dxx8Aw7L*Okfe5kI&=wL}Ef=`VyMbhQmHT|y(ZGw? zeU>p-7#>JSnStKvUF2bYw>}TG*XnOX3iIlmv9V?fh(98afYmh}XU>#j9E~{B^kQCq zCHO&0S`!;P|MF*)_^M*45j+aYM|YRU-G!P%Rfqt*`YDhh~9=<_HOuo(u)V&QyF`C8lPcA z&|Yt?_SIxwL5-(x57I36{EBC;Z{sP}lm3pgF&`&A{}+`N8>A@xRY9)oOMlRg6x#Cy z3C^UKj=5s#b1N5wHHi;i)VMDTxfxb$6WT!HpQA!74_GV1%^*)~OVFAbx7f_Nhy8?C zw8XtfYdH*9=aV^>^*jL!3u;5(7eDB0Zw9z^Ao+4z+M_I!3qxhk2SZX&GnJ6hd-|wC zZy3R@8@S>U%O$t4ctj?CoUnM&qT{q(={Fl6H=3JR`ezOMeYpg34ofsb^9zG7f1*j0M}lDX#6iO)<1FJ$o}t5 z_YN}4%gYn~0@t+8sdEp3-DK=^_i2z!FHbozbJi>eo|R|vSxQTYS0!VY+%0BGrX2zu z&l8l|#p%`Ga@8d|2wU~BmXR7V6_@r=tF8V!Ips2o0@6(91z|nDRP-+s`W}qa#O2(U zw|*2;alIjf^yDZRwH~#-bEgFc|4?>H0lyg!a!oLVSTmw_isV` zvFg)HH9&_saH_vwd+{-N_hMA&`Bk~Jju08Ke$ZK7hE*so(OHI#_AUmd75GSsTRVn0 zFRgthUF+YPBs!EtO&G2<#xALu{V0@E>Pxm-`-bkJUPW*d+6qkllZ)kT|Mb2S*>WdV zS9+K&R$JX4Q;#ku4X=_S#)Kx>aJV;MZBy>)*Yg;^& zUDViRM^KO#hF&)-_hP%(^z5Oay-6*Tga5|sJPXdK_NlF<2V}ROar^$k`f}`NuIUBC z(%9zHsct=2 zhGjL$?O{j%`{v!dUB68dNA96(VGUl6)XOrust9#fCkb?Ua6jV|Jjxw@8z*05u1pFuExvuHBh zotFzKPGG?{4ju(k{SveB$6c9xT=7Go>F>kNAE@awWw1$p3+I39rsy4zT*RO(?CDty z_v7QeckIV@qiAnt=cryicpFk)-&>IgHN8Kj2J4>f#guL@61>FkktKtdvcW)2;riV#MNJ!Giw|D52_t zAR#pOG`tm9%+_BXLvi)JJA)3J=UiERb@h(@w4Bsmb*gZ_@%V95?qKu0osvbLN%_1w z3tx@IH>7Buv>|=&%6M?aZ@pcemXf0C(k*A?=CIZsXBM~npF?wr>LeU4RM^uNO`FOv zwUUR^7R{J7t9_?V@59f*Eeq4y!*=duxm)ORWGX6igQo0Owb)U`blKeXK6yh)x#}7kmFW|rNU^!0)Hc}y6nhV>2AJDn=j{!}ZgZ3WyKvYuTZVj0 zO5L40Kj1Olro>^Bi;IiDmX$0{KfR0R#kEoEQ%+N6b#Slx`i{&R_C~8ebYBUa2OO>+ z`4se`at{TDn&DmM{eibAzia>Y+EH_(&R?1sr{<@Xs4hM^I`yr~|NG8;`e?gvI&fh6 zfuOx>FBWeWFA(}6o+nl_9>TjwHE_yq$;w6XK56C1apTmjuBCkL({|YNbUz;xHQ&-^ zZQkB5`T+J>cq#pRG?%XKo-JGJ7Zhdr7yq65Rh&Vu-#hE@qHLmR)S(9fUVd6vtV+Mr*}HwRcrKnyl~5_J6dr zCgm;bEBh_d*zB`aqhKxQLGMo2RW<8mmN{(1C{w_QQ4tysXPUHfdgeH5@#3O}?>lN~ zMH!c-jH9_!_L(F>-L=1ko~gPs(wo=;mHbgSV^Mjz8^uF+|8`ddBBMT1_GK=^y#iRCkdvY>u-+l_oc z$$5S0U{dG@;^jMh`1(vIld(4WMaCMh9doNT#mvV!mk|};t`=-iPqd$R2zt+`R|7#t z+CBza9scUvttd7SY+5gBE;FaEJnnwdzN5Z~94j`CKHE%2>ph^A4AyA1;sp?90K3X7 zFY#Ax5a#L{g|w)pr&yn!>w`83^IGay1J z+VC}_?<3mRWV(^A`K7a`R4sPX6j=+%mGO%K^AlH$)@jw}$$4SOuciBiR$h7T4-%05 zd@`Ne;lvko;#FxhD7zE^6I0@(L59-RJ zMXrn^g}+^YE2GF5dJnKT8)psI3fdSy`1=8&x}I@@88Jl9YHiZrA%7pmx8$)Fl`E-_ zxl?Pj=$?m1Z`iP*JtIbzq*6?o9rijjluN=y*}ip8t{zpZ+^8^zzxAR?|BcaUwpiyu zS<~EFH+|)v4^wODjS6E6^dFZv?g?`5u8(K(x3D+jZ-~L#@x40RcTH^kKO2`tV76O(Ch2rEjZs{Nu^V@NxJb6(zU59D!J_(8 z=9UPVDC~7~!~Z_n$Ppv{q15yI2-3C|5LhR2+^*5gS4%|^E67B8c&JW zs`2ripQ{FZZ&=g}TGJN%gtQ}#xMSmQ9P9Ew2a$FM4c0ij3&a8{>F2djulELw~xT}zn3NLn}ZXgLd^v~%V>b*6s({#ujich3P#84FPYq4gWV$b?S)7eo4W@ zF~^@nOTn)3nSHSQ4u(u4sv5^BS+%*jIy)E#MzV z2N0tvINFZ+{b_)Sz_fcdCs?J+4XutHrTItCS9OyC#bjbH&HvfO515h&2e8%|6-d1t z?Jpmm^hiHJ>9T<0N2+h?tLvm#3u)T9Kcz9Rfi1nKUP4*n?I9DJ$|MV@pVi-xx`_p| z=SLqO(~|&aLNoCE2$&>NE^>Mw|1Lo(4ao1n9EnBE@d($!zsxUaUi9^U^OWz%clI1w zN=khlwZ`tx$2Qh*gI`0!@69(0=Zk`>81VRfLv)-r>YI1m|H*yTDj7JDxRiopJ?#hR zLtYT~!oeyt$>q7|` zW@7D`bEk!TC^Ere0eT{$Us(@c1eHTY%HSY4?!J7v%mW(XLc+6-I9z71^YiIy!57+`A z>+t=<6VLjAGNTj()( zWCp)Ku~FHa=SX>TyAt}zNZ$K07|RAO%~5;TKl~3UhH0YOpn(!3oM?--U6NDoDhp0> zeATa|+wfzEYq`YLRYWOJ7;T*N7I*B&+-cn|QE~smn#M}B1~!0qisfg9AtSYg_DU0c zkdq!Ch<3LbbHa4HNO+?-@}hsU&^x!`jlT=8T4fLv1^O#BWVS-SrHLPF;3+pW?F}&| zdVhlD>FS*FgCim!AgisjbYVJrC90jDg}1x;sg1Du7l#U)D($oLi!_DP0Lw;az-2m( zaP-?hJ_bI+5%)fPc+QDm5L`QndWS&Sq<<4WTNIxSNcRm}K&%)t-T*c>6*j?&sbl{8 zuUmO#UXJ*0bwl%!2?{lo&>GHlo8&tK zb_rMY8+ZxA_TXaftZ3owp@D!SVH1PFr55lAnL}DR6-Z<@6 zBPGWM;l933q23408c&slv&#yq@y0#B<;AzEyik)Jq}T%HqM!c4vZg77!|TNS9B6>q6N%4l39p++u

  • eah}idnAlmkUAny`?6^c$aXFX)`+9s5xEPSZ5%7X8oZkPmRW>k>&LlL0EcA#G&5$yu~u z0m9O19;io^WCYOH(&9O|8@JchRS~itvv7qZMtK+fRu+$w-dgF)_m5K~WRZSjB(Me$ zoV&w}yP)B!QW@gO=}z)Ja5O0sx|>X;0~}rs6<9(x$sd6(5#KIeT|w=G$^}P_3vP$W z+KZ=TAP`%PT5m1PO*p1CNuPbTY#D(gjos|owS)yj#{)|C@zbY;M;JNwf+a$S9a~B; z`NgNBy@7!mHR2&Wk=#?^ab81?K$5$s@&4+l3~}gg4(*?uWi82Pn=W0tr0tAlI=p+T z`m&@B?J~yF1}ZEX1Lu+cy3EjmDR)5X7)mG_8so8g5(XF)Y7xC;I?cI=+6KwWfV9(N zkeC4R3a5$G?~7rqBTce%jO8JR?wD|9B(p-XPuB0zL&!Y*5c^z>qb9Uf9*du0dSJyk z%tB%2cTG665oXdBH%@uUd97us^fWisIO;BuNpm3zo!G#0SAJEE-o3j8TUtqp!bHGa zA&+5j&t3$HtSKx_atsWEECa7$>*5c2q@@z{=`AFVZD4Xh_(FZNfu8{Z2+r=U8W2bN z*-dBT1^XoB;OjNAKkq>)FiVt(SANYoQQxQ<@nkymdmaH_P!Knr+nSGfM3&hbni3o# zEoe`uGxtauTHH5uZ+I$z8`UAt=)|VocGP#fggb~cmP%_O4AT4_AGK-n^Lh78#EF}< z0^;uZgdbFR)Zw>R|DdbvdHS@0{2XM|==%?s!<3R+Qyw3Km#BZFQ^Xd8rerr8$sTR! zo5?uc3`oA%P|AMt?jcYfg`7^mE+R6rEn8##`t^mMk8Ajsgc&X|HqPoorAf4aUnsvJ z9LLe4*TMd0*#`DYlabCmYQr?$!J|iSDl=q2a5bTC@;Z3kch}bs49#7f>rRk7>@qN* zmlV!mV(%O(h(^K&B~N6{-Z@<6_SM+z2Cfrr@n|`mnL@ovkg<}}V?OOnyVP;Uj2Sfu z;#~I^P5hj<%_POt{%&}oOzfRB7cLe6pTq*OOLZkE9PROc7PcuL6keO`I0pv@OKv&o zdRuqB^2>X+RY_6cspZ~bRe|z@zTJ)@T@L9rSI4^YRz#PiOm?+60?_pvPQJ

    jU?` z^1Cv2SE%o(D-zZE=2Z&a3J#t8Ohx$*+eE9EM%-mx;56(9hRb_?p*Qax8#a2>4vfVm zO3DioU&+Cv?|lUlmoZDQ&tIYQB>9ilzMqjxiv_Xf2wsw@6)|6S#=_-AogrGh9(RBB_#`VUEd6m9Vj zoG!NBAo>zk(-vrVkwO$Erep&IgiSB=PlxJYIe^iipR_N~oV)KITLn?tPv|MAd)J|G z(KgJXhx03aXSZ81wIVzS{WS3AW*)Pd64z3Sd?%Dv=7(bdc(>TX*l?pjGmfdq9>D+mp~C$px@HiJnqn zP>NF4C!4PF`#M)Cx+WlQRRxKp-w{x~{3rsJQ6G1H<`M5J6TSIZKa&}d+*nC6m63~Y zrKALWe34+wh?lPS@87q`IzOC3=^@Ae30so|%e3aAbpdaPL+#b6*w7YU{}Xb^8QhHl zr6Vjy5$oc}t!}Zy9Eo72zb@^d)_KTawI>kuBKiCimMl5-flhTW+)%4%hs!W&;wVOM z(rV}H&EON>4thbn@oQXwB2y#D`m+1^z>}I8UEQArWriHtcC4jHZ~88b-^d$V z@EM_nc#rB73@XAz*Q|lrdtq+SmOsD=M_Ho`pm;R%C+UmEsvFtJO`^XVSdo!n3b9d? zs@Nc!8r4rmwaCXO1}8EqL2cT`u%7)%n#tTHw}X`QhHApp4^(LZ{Z!E~kv(zAO;~>V zb%yxt8zGMt)igF%^+=*jH&=pQ&?6PWk<0)Dp4iqr6d>H>zUbh9WG0`Tw;6(_0<<_9 zI%~!Zv0#{{G5|?vhW(s)mPAr_>^)o0gc3jw$Jh1uzd@tbjAxJXRp;VvJEx^DuT+7F z9Y(=Zt4$rMQ8+ko3nXUsb4CNpPu#2{6(ZDYaoRN$Rby=icn z9r)7K&pQQ!WC$)YHxjJ0J)g$ugn`ZI(RGA+hEmWDDWwd0GJj))i#rFwm^OeOeum!5 z8g)JjW>ZmG(2cn{bvWY5GTHo_YH3M=D9eA!QtjHK|8d^Zv;#cW}jx(n0Id)X27 zLY93?ekEh4OgE1+j;sdXc(U%?q)gN`eATNM+X5SzFCW!w*su{J9EcHQxk(7_EVFjR z_jk>2EJjPxB!5>UbkClaJTM`>;+tX`ntE&``L!rEg-ONR=x7vCx9)@z0s(&^xMT45 z$WQPhMSVc~LKTkY1Seb0Q{fO_DFr&7Y!t9U1fls$NbH73uKL>Jd*$1Gxzb(? zR(D73Av_rg8DENvt(HH%mhqPARyey9fato97FM)2T)u`IUS^C^vRk>W&hNY|H4V=l z_NLB9@*EWi?IVB!zHle%y6X359|dT2np<2#e_a%N9Pb<8u$I{`xP66`NSJgk9(sYE zto$Y)?*w;o@alu3M~!04mj*;Ov9O37`!l0w{nmz(2tYtJk~HF7IT(IjCgARRxTS;z zD}#|I3Afm^P{qcaGGd>6q+Zy_DVk2XXKK5qgAq=@gJ}^+R$|DI`*-EhhiRCTyeiYs z5srdC>ZIWBJ76?Y<%R)&qt6VU&~1~H73}q^w{IUdZn94WOcJ)WNFJGKFiBn}HFdPS z450brQ%;T0W>{v~a!6s|&oMyD}#`=|mwgSB9&(J~Y&Jqk8j233;H` z*6-Ax=8N4KXKfI`wW<4qCww$HfRtTSdR@~mKM-x7W{V<`$HI>l5^}Ju%i-OW26CXU zT?@aiQkTm8OHt8Kg6LPi&-U3bj4cbrV*%F5^5x67Ef47U!`)&2eA{-t4?P&kg+}7` z52_hh;}OQc?v`Kb->=_mvJlOzv;F9aXniB<@zc!eEswvS_LyQB=OWk0>dNwE8-KK7E-gP}$8DvVeKr$K z(LIPaVzBzIzR7AG6hkQSXo4-?xO(-3e@NcENIMYkn!jF^>tDj{8NB?3Il*)YUSN72 zWy|S5^DWOiT&63gSy8^r$8`H>t!se5np3`ffM+G=4A;nnhM%W6|Pf9GLU<=T`^m z$xS6r{mSE$IShJDS`8LlNubSOcP$l9N#BAYGZKjuW)cih9q+bi^TEh#kE5t zx5bBh(gZ+_BAsi6l`N$&ePVA1IhHm<$KR(A6 zs(GsjV}wYP2zTfGly)(P3FV@rf$S8Rvk-be16Zf?_JKJv=gBg|bm*#qR7fyl#kbV* za=i0&(3lNaxJ|#Zrpn}#qtXE{WGu6^IYW{WNr_qoR7&2OdX7ntZ5UM5;dx{Z(Fg@e zA{o=EnH{gd-^TRaTLW*L~>+A%jktduD?4K_KT&Y&T{5Nc>Yr;UjCY zF*axFR@fk;VGA=qH8^Wi4J!dnXgf&C+si1ITu}5;Dc!DGeT3$<0PkeUNTZdMWQs7b z5|JV1&ZIW&-rXGVqKwkU{2)x`SzDM>OW3oFN$ZVf)Dwg(%O#kOv0{KbAwl!R4!=cANc?-6v|s%as)?0g_=e=;(z^3&jm; z1bJfz+omsv(&q5?^BWayyUPU}T?+lL5B8`uWFjdfb$hZ-^kn9cXJzwKESR1-g=yRD-A&*iY>|#XvO_x6Vbo zVs%JEMVJtPY|~XoX$_ZK7#}0Bn@PAcy~qv38)2`x>MRm!6Muu@6FGKRBX;%vl?p_*e}>{Q9>jg?Nx_LW!^UjHKWhK$duv|R!KlTXX|PSvc=BM-@=edKHx@?X;TFR9_zI$ zl+P?~#VFvh89sEKo{u;b;FM&RZ&@f?Q-ra^Sou8;ojve^;`g|~98(&~lh<7SEl-s~ z`SAAQE0V8LOa^}XdDv94J{kPC^hZDFVcltW9+`4>tgHiF!P)nt>T=6;a@BD|*?*O~ zl+0p#Keb??zw2Riak^y@pZ(pnsm7E2gZ7R*>g23vKv65eD`iAxbYpEjJynjVXpne< z(PUBUpbDDAYaZANG1p$ki61;*Nv0x{a}oWx>eRUPf3&@KT+jRa$DiXE$BrZ;qpa*G zqsXdsC{$*Sj55krqEL>JBoT!|QHmtVmXejMq@^;ln-VItfA_1-`FuX#e}4b`Za;r~ zKi|{){eF$-xSrQ|T#v`xhX$&#t=fvh6B&lk=Py4IXw|1z#_ZHs{WyCl-B6J?3Qrv7 zLS{2m_svm`Q*LRy{=l#C!h3>q9Xxq_C};2>I>3b7Gk;nWk`R5b-MA5!(5K*0YtpMU z%%6U3^1da>{uZK-CLk0Ikqjra?pI^}VttFQYH>?#H|yaV6Gq*^UIkAM9wP_Hkt1_L z=M*}yU8E?JTK?`9Hl`l*Zt^Z<3Pik@t#*>HlO7fJ00~fdeTf|e@oaX=v@2%?vAxj2 zn4KD@ANQ5qEX{i3S2$EX#-myfKF|}-kqJEm`oapv7JhUbKW9^E+RO}ybyGuo^Ff42 zRbS359XMJJ3brQEhuXS66SbkJAbc^(*J?sgx9yKMJ#WEakw4ANNbTrnV_OsR-J#f# z4&UDS%_-^Me@Y5448qCIbX5B2v>hvhC~9o$I!vVHCuqi;4{f_Mk>b6PGhiY$gt?0& z=k%vDI|IrgPdN|SG+35EHKf;C05A`G4&rnLC{`>h0G(i2qx^X{-c_i%;w7SdVM@x^ z@*c;T3oZ9>w7XJ68#`5%Yh$x#+pbg=qzB%$S;4NS*M%^(He7G0Xd3w++>~kjvF@OB z{ALBvkgE@h7eXJ_&sNsv=uJN|mvs1cY>EeG52XN$@LoDOWYEzuS~Ud=)}+Z(zexYU5Bk}+Xajy zd6gR4%Io*qv!jhtp`ewDOQAX+L&EW0s!TDYX}zkkV!<(4q!5@5{1$u(F*QrQ^ZQ5I;Ic2jQuC9bZ?;yX zUJZ?g?+BmGPIY_1=KRLQPdsKANjwu))uMkFNkg%(1Q{4=t})c4Vt_MA5#s>8x%pb| zg%qUe$?$%$qR4U|_6$Do4>$|5_?wi&5~B6K+qu7lA%Sx;cydsNtT1R!Y>X4MK^sV! z3ZH^4l_5fxwx_b8nGpOhyJ9)+zAH!CeGj1)px}GZ&)|!j(a6aTJuipY#HBI~E%~;O zPV*f*TwR4bfFJb($^YEVr>UNI#XaAi^S11O?HCE%eSGQI2s^{@tDRbZ2#dIBRuDeC z_<(Nqq?bRA>IWZ_yZEmXD8jlv21D@<_T0$xqj^u}p`_1UnedwxUvxuLrp}r_|M>lg zv!iTNdtc6SA2Ry-Fh#Ar1+4^7Xt2~`b2WJ|`b|SqZF(=q1ki#Ok!qtf8;$|yXgiKI z8{Y_AJLH4fPTdYu#ao*CUiUi3%xc2r(3IX6@H==_+gIyvV(J!LkR>fh^fB%a#7{zl zdr!w=N!~M^T8p<1Rn%xCK5ojTh516^?QkQ}V7IcM>KRE|{JRnIU_g}~VvvNDf6_5S zFZvC^`kMyLI9mA(cR@Qp^jXVekYI46`{?XZbA&N4bXK^l(HvpMpelEOKPG)FPT=^~ z`eC@<-d;q4&)dABT{=m zEBD8@Z)E7V@<_Wl!a>#WW=4|g7C}=MV|RMmgJ!&`kbNsJwDQ90a|E@*{vvNg_$4I7 z90^{c^#=ClOsY-g=pSE3Mu*sK?(*x{2sQOxzJ1y&j(5}O)5fz~U~dDHv>B#xBeYxY zxt^@)x^`M4^Mppz_gv9$rRl8_=z7vEG}~2wyS{Z*_18%^Efm}Tcu{oY`OVSkpZ40R zCPbx2b?&=tbj9qVIco593B9XUh3a=wW6~a!q&d+Vl&ascBT^xO)4e`_=^l~YMQ>AY zor$}o8VqTkqH2)-rIEwN>i4+20HJ_`exbs{O2REyrNE+Y5ky8!YAh! zRN%h?^+IuXIle%8Nk{j(rmt(+w!P>tAZ$c`H5fL^CMM}Eyh5Mdty)adpR_Tm({cNK zeM2){E&b`fTKawW7pcy}Iv8l>ok4Umi1nVN=?)LfM!$$MT#@$xlSC*59qhN4QUd}Q z%4~-wjxPv*g(*wE%Q8Bww>gSC4ki_2^86`fF0Gj9Rrn0r*FWSEhYv!zgSTEpHc23A zBKz1*Mc1JpiLw4VhP&|GNR5?1z8v?16vooEr}mvP0a}8K+gdd!0|w}AwX%$+3$dQ# zalrYa1AIRsRn2ohqRm-rb8Pp(_wUXR`O~CtLMkf`XbQ3&W@IJ0fT=+UBU0KXrH_i| z97@z_()=}~iZ%{u_fN&e#YvC^!TsYvgWW<#qGK2;D%|$Jt2xyh^?KE%65SsdmuDXj zot^SHAt8<&D!k+eu~x=MhMLH2*ox$K&`F4tlgqDWzxJ?UGus6;5;w8>&%RYb%(Wv*hxIM)-o5+MwFY)-<|j^7B+yKvWfv=N7uT-j)`52J z0GiZw485c*;P-lPdBCOA2RB`6^aOII<2qKFrAWV_b3nM>-g`!QF=Taru}Y zPHHk_;$LbO-&ERQYJQ2OSqexGbW_`||a{uBD9k0R?vGLXl7g8wWy4))Z?*qW+=GDi%!;iPRE z0z)@Av7@>YZ&&DF-B1ff(D)YdDPyxAr|7SCKa+jT`uNFhCT?7t9iVi1WSx8Mgl57-QxYxKzO~WdYjL5jQCMZ{>;l-s% zY6<<$0_+am-OX;yyryBTOy3PhkuGK25N(uY+s%m{ZPNGU``pl7%@t^cipTN{(7E3< z?*9GvG^6oOO3N&zFqr$L`p=W?oNl~X`pL170)VLRIg__E+4N^Dv=%1eYfY6b9IwdZ z<~DBF7SD(rOr184^&pGfL`3(RkUvDuaz0+`$z5stFYmuNT%ux=)MQrfJ|CYR$cb7n zS+Z>5%W`R&q`b=QQ%R((n?6b3DUu|wWcnBrmSdf|^oA3h!IecWHEj?Yyks^>NPSnj zKB*@F70`Myxhey_N#v9JLY#Ff8#rxxe?`;V|GfY7Rn12!KzO~oWYsJRwTgt;-0-Nf z-y)~8k}s2vD}-{6fjbde4%NGFFHJ4h4>64Rd4y52wBJ6CtXwlzH6I;cYiAdX%%lBf zwHXiuJU6P@@rf3Dt#5(z*6`Ga6NfVYC;NU*+${taNs5-a?E2)_Lz6f${W9*)X}axp zT<{LR8X$057bjYo28V>GK}3t*k&}C1@2Kd+W$zr(D_-}HA}9r3cS02O3r;ib&!fA_ z!o9cKIZieS#}=wrYG&V^i~vI6>|9?9-gt7h*q23z(E@JQu8H zX7=3-y-GXi>*eajTA9mO<yF>_#kjUIKQ7e8b2&*hF>@%q1gGa%W+wI}j>AD_j9-!27hZ5aic37d z!@NLminT5I#lRb{*VV)QuAf|_@Sq}ECAR9vk&?@%tw~MzI5!$oE3H=hQZ03f`;`E% zz}~Mmbm$D`;-Ox&shL2w*bxTt8_toTniTun+CL++d9vV(DZ;s+#=2z^o zjJWYR@iDUwN=q4Ss|)GgpUWnS%(W4{J#{B$SL>ogNCn(Hk9WunIJBrJ_~0~-^wiyF zQD2>^)PUSYYko;Zx6{O)`x_S(FZ}rNqi7WHT;s9}TI62MZfoFG?l^)ooxEe4+PjQY z^y&Q1)GTryqtV~DOFzKNwLM&}w${!HRtdX7bL}AtfF3?bG z!$LFvfMbm83-3B$_Od&zterzh+26%rBSvO0d2)-Zk7x~2dp+O9vXgH>4yb}X_=ad( zxswff+Dcb)ro zGMH4P304O3{gTSrJKX%=%8~tJ@fpk5KsFm%-?~n%pJNZ%67UNzs22BFjV~ zl;;s!`3?WRA!$$s$uQfm*I;ezS@fI65;93i0L3;GNb7AL{qn(IGudA8kl-M=wFft37z$deOPu?O!x(@SA0|I++nMPckP?%C<|{n9xPn z!5B}#O!1##hb}>X!c-sRa+eJl?^RZ5{d93-a-*zpqoVY=brPyIKYJIHJ@k7#)mm?~ z??s>IoX2%%)!%aUs(>zvd5%D@3rzvYIZJqWM70WT6Mg(lsf2xK_Do( zqWIj|oI}hi3?IyaAOG_n$SDqs(R05+N-+Ulyk-cPiyVRyzjUemw)4{NF1b~DJcIKN668&W% zp3uY_7q*`GnrYZ1)D%F#ln2WOV zi2SPS`(x+DdDSY+o?O&Pv}|7)r&Syu(zR&Bqo~aRCrCDQ#ly4rSn|N4rK>k>X*%Qn zF0m{QnL0<`;&$BwJn(zG@7Hp5SB+F<{2AYdz7@qzE+M~v+0Xh3DJ4p>m7tZSr98G^|&Kz~%1ld0c09m$fC%g_wl34Hlv4`}U#3 z;K9iimtDvVQvK79@2d9IS=}A|;Q)=ecNh0?CF#I6T@yKa;K8$rK2EVe5D{)4J7kcS zvuoS&=oVYsZU|_%^QWUGV!r3IcNsnYXF=FX=gii8N-L#Pjac z?%KXbLZP`zs_Hm}P7ikRgUrm?_PZ^nIdsS=H%k#>?#bcafJ_cIW>-fP#RUkly?R z{zF!h^K1SYOz6E20H>vM*&>#HoFB5s*gJ#hH*U8J1^{hW2Wf%1_BosGUu)b>HXr;m z2Q|9}c6OUx;(8R=^$5s`6PCDXH4W3c7GGwM%Ti0Kh`A5BC4PpIYS0*26wAWG{E^T4&|zG>3n> z?5!1QZ)T8Pd|<>J8x*c5y-ZChUe~C^#hC%J*0JvFFaU+)OLyzLz6EBBUtYWYM;9SM zE=r=bJBb1hw2~CY@oe{<)n3qBl*q%S?d#l3b%`8=u0_X?G3w%7T1gPl5b!+X^8w&W z;kit7;A@wIv8tGBj7L2@&GJC?)Ay-m;1#yvovD8%JF5)-9e_*S=~9Hy4sIkKQCB)t zY(^tP#d*Q@h*cB-epJq1R%#vd<<@~EmNEOwqBuy}lV0aScPMhFuQQM*HuzB;?H_Q% zT5lMj`Pb)0+CYA~Zk~Q(m80+IyXAlvME0`Tbc%XXlEfh_ud%+_?%=^EIm+}k)T#te zGd(e`!7mY3`}Wu1(Gc%F(XM&aH{urTts>xw77fT#f;lhukNL)mRog!I)xEkrvo(87 z5l?3D=uP$3whTFF2zpqsyGfe<0JvvFRh^IbYZnBZ{MAHPqrt586X^wC=oK$q+3nVW zf&njYxt@4nX?`K6Z>aDkLXG)PxVZgum1AawW3UKFF!*Rqz4e)CDR&}fIv7v3g(HI4_Tzn`Y%M8CxB(D>1>tOuHdlZQxrn-zSom33Sl2Y<}yM%BE48G zSuk+y?j#D3Ns2hM1=VCs-94wCW(Ne3xRPs9B&R^xi)+r}Y(;tGK+Nbj=NF8RE=ium z=YJ9PCY_y)P$|Fn_y7E6Hz+3k{C4h>L`^BV<*VGw*`-dF`WCG5zQ8gNq+ZSVCl`GW zNw*CC{Id*vBMJ;?t8NQJ;R^I8`%8&)@shh3!OSC}3IgazQfwwlFm=XZDm4+nBy9;^ zC^!$)xzjW+G8=p}c=G8yM;ecl*RfuY6FbBWBj!o_RAu<%%{)Z!l1IDvNJ`&e!iH{{ z%YwSg_`I1BMW7s2x!s@?9McmnG-=_ zMd}1Kd59l{qA|d&OQ{7+ipgL9C%QjUdt?zLnL|BVr=i>IoJ&o0@~8>b<`h^@x>WZW zK%G6Wkp(91+lyrpgmbGcepTO)FO7C-dx#39kewmk7wYlvShP}BZdfZj+iYUfmh-ka ztW};4U@3{QP13YHO)v;@lJrc&U>}R*iBAK+!xQD~(?QK#o7^j7_=ra0@`^pUsb6lM zOn>y)h9Efg^`R0<53N++?I`)xBgS&xWOcR8b0Ed8b)kBV7%{ung;Fjrwovic7|_2& zfj)?JSHu3CHS2KC87ZA;w>)Qy*t>C?f!%IgyOz5$_!h{v>E-Q$cNu$SITgb*x{DO(%)ZY>sfL(@ z?e~zBE1T(L6nGdx6|na6GB2K5?4XtU=x1XC_UH-UicNyF!EQ-TWpjB+(i2Ztr?4`R zLy~_myd~*aWxr5`uau<<7I$4rkQ<{A)odFT@$}Rmb{0+z zoxN!8yY=d?j$KL-^gg+3`sv+wc)9g`U*gay+60f5P=Df)@N1w9y6cSdqRn;ckbxFA zjYz}`XCC9Z9FCwsb(JTi>lpLFSouq0(VA@o`OwBs%SMgmK5FMd|3*!$phTv?h<2tm zM~`2|j_bXhN^{A_0DM7U6c@ku76*(4ogjwMKgF_RY8)zbT!cADv2l>+p zb~v}SAak#+jiNENtLK$VMq_-QATr5u*hhcq($fPh&*jocMh%fCItX;=&x$8=rU5rT zOo5KvZ8A$6zAtivwp4&p<>{n|s534!^Ti9o>sm$X)+<~(sJ~8`IHc`{3$G7Qp^`-U z`Y(SatT<)~#R=s!No;;YNJ)4WN#pXOlg(FcSMJ`Y&qTYh;XrK{l2_$vCsH%Gc4mlI zL$UM4u~u`CCGeeota#eFzh>L*dEjCTpBsrkrXo@Z@-POd_THmENTh}nns-2`zB!d} zl&yp1$!RYkEh-hSZ7F6hdfY8#U{YpAh5#p#H_o7`<7C;fT=-g>!REvquAt2YS_<^Z zdqBOkQ?iow9t)QxpZp_h^G^==iJy=}uiLg100az?ee>=nKO9*aM;O5?pw59ckk{QYc9||D5{w!AHa0dyeBS(S8WcIJw-eh;io#Vj?Rp24Vo%19jyD+y6*uXH!LN!qUd7J#aB%) zfFN^0A2PHkN58~Hs;<|k2Y;umVR-T95e(omhrXsPv3_(x+k3QLvwHO{vYlK#5;D#c zz_EDR2mgSFdFy-F3V`CRYjZ+IZ7uQ>sa%VY4zi>7^pMszX%Vb2#bI@rWei1>9=J4Z z6T#s3RM%XzCPubpuE?qpe4)`z8FzF30UTEY9?*@HOL70`kZk|U)2k|d-%gTufrgk>n{<4#W;m}L&Fnr^tT z_5)5Zt?+$xCPo?<7}%_==&WYG7|j5wSbujOBI`SA713R!L!kvAwOyWO%cb5~(y;`P zVjQ$rOz=?r^G&3qW%B7uVs$j)98WTJUx9F)U)5G&Wz03W^gN&oB1^T7m|x{&Ah2z_zTa>ktf2O&wJp7S{6HjZ>DoiS%=gP>^jdbt&8i8>hCF1PKc<+IdX3D?g9u9Z9tA={L2OM%~iRKV{>X&DXEPL0c|afx<1zW4Eo4C-;AaC&MAQI z>yB9Oi8L&Twxg|WqaAWm^K(L=RK1!?JCpFUh4i7&j(Z%U;=mwpcZfTsMZ#oL_TwQi zH(=u>7tFu3rQxGjG5hSKh67_t_UHCCn87+VX^~QCjy|D~@`@3sROeSd;8a&f?Rjw5 zGgD_}@qpqFDOYXa_{nGHAZ)vQk;*^pe^ShH?^YDosekys&-5aJS+T&@frU$Y)#M@P za?0l~j*hZQ0LjVI0%qCmR|ErEw&g8wK_^1mDR&RtpY)^GpUI=bwax3@!t)0Fl6r63 zNf;N=Bi}`Tis(Y(FWjY)8MaHuCM$Xbq?Ua+!eH`vzq5<;7;z*fIG%|1OTQDF7?_(8 z8MJuD=@w0z+<{I;onz8xD%n@h$@gsJAgCk#zleFp1G6}K#^VqiB9aQCs@!*3N?PO zBF}f_e7uhZsdcZi94FY-H%|PJogL!qC*OAlHf8dq>x_NiD?~9`JRMMP-&otCHQ!?@ zGb8-gYy#bOYsOj(};T@7EP? z)$t1bURoN*>YIwYpW3msV&cjlR72_7Mkp$387iK%v;He^EUh<}GyP0gw;5P|_ zih7)*z|X23guN93o@ewSC7CwS#+k%no-T8+UFINsD9H_MG2#w@?E`pis7CK&0<>ad zDvCjUD~s5p_uWR+X(mv3=$Uo1|2d2snh}*V9a>uu zOQc9C)_mBiHEn#OIAlM-9OC`mxed5)!6?RN^bLg*1~$)$j|#Hr<3a_?Wahv%pClbD zjSjG>>pU6)QN?b-?yl#PU9fl{-R&JvLm(Kr;^olLj)mfuxdwi!-bB}cprk%E^#G)B zmjiz*pf6GKMAJozoGfjF2^v#rf{O%qR_pXFT+J)RQ>Qa!sWZB!=yf1VLuFl=m2{N7 zFSS8I)!2Lg#sX|iqWRE;Q~LUGu#JmSH?keOJimL8&{n4a#3R<5qI{I9r+SuCL4U?t z3q`|z@N1eU<{7kw#f6_}tlut@31C_2bLQIEHa0jiWt)8Eo+c)iCH$slF^cK^wIZ(&lRz#qi@@-%Snk#WW$ZBSv}KGAZ&D{yJ58I|MTGmznzxMw10~hpX}%n; zZmB0rt2cq3JioD*eTT*WVgY`xq|bXM&qp3j`oC*s{mw+5KYt67Vok6fHm>f(_0pKr z5B3pp-#(naFL|_Dc~_Q5675n+rqirYg=We`<2yMunpu*LD$OuX7sM3ANGzW)tzjjXZ`NKMO+b6$RGb$)rs$Q0E;QSFwZ*OPsSza zq_=6-hVk7ZvT2BGhzELNwwsYErJs0w$sIMG{hk44lyf$R>F)T7o;JZe#| z#uNP`qMjVzR>|XCZsID-SA?EfrwMl26THvF>l-~f^qhQDz{l_UHot+I4PM47ou!K=$vItK{t}n9G zSie_Oi{dl!Pa_OxUz%KdYfYl{T;%m^*6;16_MtX&(K2D$+dschxLk;V5Jlvqy+!ma zBG%O3uG{sFg-n&}g zx&b^}AhBQ8|54V9piw#uq+uHizsGqKwm`s0W?IU7!lAB^pUL_3(IAR^PFsrH0FVzQ z?KQzbcK5R^FQz96zV`uev~4HmQmOid^B0G!O}NqUz{`65$=6AtlU585%h8+}5Sxy6 zrd21@K~1Y+l6FndFv`nHKI~Sa(*j84JD?<5P6_lUe85Az&*+F=dQj&h zZXS_R>=;PAU!W(&WOcc|mF~9beK) zMVvph{2QJ1-yk1bNb6qD$gfXR@IbaP(~UC?UQ&eM|AP#>m-M;>bqR3$cu&$TmVTj* zASKTuvo2_9L&v*Ge3jJMEfCk=0%|JbyU)n#D?&sp?XT>@50}#|hdi1w0NlRF@j>VG z2T@Jh-dov;rzWdz>mp5;*fm3xi16}l zTfbbYWWZ#K9I(u=&{{2O`-rGy0nUby5Gc(FeQ*m;L(~V6M~mF>ZQ=AQHul8y_#MKf zM#y9MpyRQ@-@ksnMII4})HTuH?sq(sx(V_v2~22Tcyu!MQn$hDYARL3?%N8-%0K#Y zyV5N;CcGUzZ#Uc*-|!sY@=mh3`}8fBQV5}nJ|JOA3Cq6*L>M4lq@ua-ciLX|%1}`O zd>fAsf7_+HIH1zX8?m^z@y`NrC~E1zuwc?WFnvZ6oo&;rW5a-tb*Ri~qxdkJG|OEM z&*B=O!n-{6^;;)dxro?;=)u4m?#2>2^SyNtBqb!vJm84H$j0~HmS*vc+~McfH`)Q3 z{gobDz36MZ5Ds!_fsz;j9Mc5a`I?s(U+xo;J@BtqZdjGSjY{sPoAi!-hHJ zm*%D3uu74ckAul>dZ;5tP>9(vg7=XO5Y5_c)texACDITJFYHd_SEgmQJ;#CZD>rDb za981z{`p|?-oZ^U*rXG`;fHqB(n@{%(-CnMQNR?;8Ar;yoSY@k0wSi166~)Jdy5N9 z_q2NnO_G<32kvXrOq1D$kV*hZX~l8niZtOi7rjdtE{wxbD6W}H^7Ps9(E)BHe~VK+ zYN&ZJ)$Lk?NEhK60C}Xv1E+0kjprq~(}M_eU zHA&lBNXlr|?MmYXs@=Ca6&GCS+oT=j2@P4G_c3K_zP=rcHHe<3ozIEII9ahK5f|3s zymuo6=rG{CXKL8YmSTr#pE%Fbq>bm`8AVf;x@>!#HC@R4JfO#p%-+@bR!x;>3@kd* zS9*z3%e{H4DL|ayQxQSpq}lPtxuQ9h2(3r?Nfy*bHj6{aAl$`qZq>TA+53uUDqA?G zTSz~51#ra)n)~8%*g5~Z&4Wo1NRy8UiAdiiK4q|zGWaKHrIeG1{w!(dZ9ng=lc)6J zv+IP?=O#i4i0B-;_D%#!`QSbK;2vGhe?!j`6n=QJe)ny3@|_^eDs%CO?4Y7(v(vw# zm{v3V&s!K5;?X_BvNi-U1ES?gZ_btFnOpI4+rTouB`hpVW&qj}afj)sQWPl*PP<93 z=3IhRjAmv*AJfq>pCF!ws6BtTDHv~=fN*4c!bQ@zt3qvWLYO0c=(0M96h82T+sQV{ zg<(ypy)Y1EdV!Xb;oP-;Axzr(goIy-06)-62m@dC{LzZ_HeC3vkd%qYT1%9RXRndA z`6i`I72JK=WWP7kEBkHcZI0BPVE?+(D@^KSYiz8`u7DL%yP~+X>04a8YimC>8M-R# z$)VZ)LM0-$m0>u)=&>?AP)*_T>rw3}9>APcF*}GARCkXxdr%w%Fqmf>Fj;S3!K0dkTNlJwG(N#ZQZr}j% zqN@U9`vD`0ii07Y)PfHKa0zoYs%XElRmHTiqnmLZS>>>E5ns7;oue2Np;$#5RBr%Q zh86pkc1^D19FkpFSXjI)i=-yON167#WY^=N!fD1KuL+auNT{#GDBxJrow%LG>Q^+& zlQx0p1kw3F1`dp<^vikMRx!qnHew0(qD!C8dGP4boxkfhs{Wxnw8;5@msdU9f0%=$ z_E7e-BUwoFC@g%Y_bz!t6#uVGEnPC7UtVoV>%6=mV+ET1n9J)G%a?zTsNlUSuNGMP z@pQJC7kZ$d@rS~2lupU0h*-QjO@=fM>RVWJppjJCiOGR-oOWiSUC{)hN9GyDRV~|8 zw}CC&noA{#&c0?Hhc*Li`D;!B1O0)-2YAj~401rIgOu{@+O^)L(6)LJTh-{`~1v5j_DxgVw<1+IXBIm+hbX9f``iHQMW zDiS?He8WO~%L=*XSlm;Hn?M!9w@c)~y4b_rhjIlClY{O0*9N?I628?bE2SCbFJ68#QUkp|#5jUk0D@9$5w{oIPY8#^F-Kw)SAbc(p#>5N2^YZpYGn8vPO@1>gK1y!UO=yKLUS z@#=38hX{@T>eXXNWxdAjiLfOlECf=Tx8u(XEgKafUZDFh!Zv4H2-a@LD9B+z-zJRMBdC`%NL)*Hfl{ zqh{WQ)}{MGAVN385?j5Z%FX?^wm}Z%_l6D0BAMa7is&M| zMM|eyz)l@%R+%ufP6f!ZA0HOY;o%D|DTV)MUMWL$y5J=z(n^X{7XeGA?*h% zmw#26+^laCHgq`hFTuIE%lG4}|B)Y2RD8Hq4)&q{3Ux_h!((sNzohzV`x^*SX!*Zt zcRZS6`z{r=Swgu|_ojlv%lMZnS?)bp@5jHAKyF6L0uPMz{YQ^Teqp`o^S@ewieCK$ zQ)e~gAMHfB_k&;m|GjuVRki<%Cusb?Z!#&Vo?DZ<{~cy89lVa6*REZwtl12}@^2eA zqoM!%?AI>*zYn_)17|zbyasqQjUOtXbC}lQwJ6lTpcpmIy zNUqAiSBm07|Gr?|LbGk5+Y&=gO-?H#5s>woYOwO^Bvb1uE%N2pQ|&SwN$j7d+3xR} z05E_@j~;F2#9*1RC4MLT>aWw#3$4h6tu+41&8!uqVnC}EDA2&)Y9plV^KkV4=U-dn zgatW-QO7Y)4Xb=v*7vtIXv(86(XKOfhb zfd84WR8tu7L;;&&-LLfD4~&$v1M9bH>-yixWUB7R|MyAiX6b((RrdRTmrC7+{+~x> z$$taHSND`||Jb7c-jBck-<9+KZCq+i*?*sg`EB3jzc;HhM*s6PVr~9s5-yxqQr&WT zP{{DlsIpoKVXSoQa?Aow-%7A1#NY_{>60sX_wl2UP;gG)MeYyGfgx2?g9+);gx4r!JojX>6|bV z)p62huqSROQOk*;&*h1UKc;WOUz95&-X;Db1Ui7^b^vn(wpP4M^wxn#_wk``8k9Fi zqIvuF?eWy8paDDF@l(Y|Tn3uq4V;{G2!tDvCIL=)n?gK}hW4(U<%+_$0@7S<)F@SX z9y%TXo(U@-W+(-u|&5T+;N!@v@*gr?`>HxBOyYH`Q ze+5UM`iruvDGA`H9W)0DB%geT=)VyXlNqZm$$zsKq9q^zaim9AL(Qi!MuwXRkh}-F zBp-FW;qYxdBYeKN@*)ZRkB_ZS#2LuL3f^?$pQTH{^vPF=W<}!&w$4lhnd_or;Eg+jI7g_H>mcCvm`lVZj94&mw#@%O5gDw8L%{x zdGd96T|?P5r@9Inyjv__*5TvWFF3Y?7cSgP^9#BqYLcNM2o?GX=tUveNV2Orf~#|v z=mMCNFj9--PR>p}9s?ua3GG!&mNX)Ifdobk+Qxq*F~9vLr;5#X zj+3~;yJrf?Kn!>ItRgp~Dd#Aivd?lPe}U`PhSiU%Vh`x}gg|bV#rI`b--FOmrZf{t zn!6J?7#CC5oHY&(u6XGXH6CW25v&>-pKS@0Ow##$mFn4Ean3naH;|qB+vN=bmehL4 zI>AD|1Ns`V<>EyBd=X^_qhSi+=|!t{UHT0E zN)rF_=o}l}m;gGf#}R?n-GTz^c37;*aDES10R^we(Z4Fr{dIl6O8}be`fOpg-p^OD zMV~)Qj0Y1|x8cK8@LfA?8Spo|?YCvsjsd#-?klxwlvg_9nvR2vlhp65;vTEZzAvJr zCoR9jX)>Z63>W?zR|67goM5Nm=jLwQZU3wQz0FDrW%%jEiLI-Sl7p-+%u$$16=>1h zOL52zL_`DKKukPgNZG{VQ*6vfgfKpYh`aR+Gyp|vkS;K#uG`G%HCPC5N-urL)+lav zPj=tE+nl*r{MGL#d}`GDBQQno{OUDrNWP;sUNHl*zs>R|W#c}QR%AqbmMi%H92|9^ zs1_o%+en-8(dLkB#3LppMJgGXdefTN2Td7M0Cp#co_xwJKcH&E3DleSY_r*xcD+YX z3j{~ML6~{WY2D=UWpR5(~3Rq^XrZ&ubs^pkSU&AD0@wXG$Cok@`gT50f0NbST! z_9k`2*O9a8BlI?8O9G`asw7l{kaJT{E4G3`yqQwmSM7P^k$M0_yT1C}*1AhVck$Ku zJ`{rI^hXzv+Hk2D!OUwfpT%{P`%9F$#0?91@K+8c%4**wN2q$$@7e!43E#Qx5wkFh zTo`9I+FVChw=aR2IrIeQNF2E9X@|;KlNL88{~sheOyw^i_no!N-QA2y=~13~ay{yt zi*S0}NlPxva-&%~Xkx;fXh(A%oR=d#kZ6rXy_!2q|BTJ^vzbG*U7%8sVJ(ZTAX5J{ zX>$bWC{yrgXJG>VrdRVDeNT@dbobe^WsBLopQnn7VVY3hY@7TztPCHr{iW>C)vjk9 zDKhow+I7rqFi!gq9_(43%Frs!oA+kP+MY0z$%4vp)>R$Hj0+#%MmAb(xs zkOJe6?Jj4}<#(SVU26&lY_spj=VWx@*!&cK6ee_II*LeK~xUHa{{mnl#CrDD*U0fl$k0$^zpWRgL&;9AbEX zK@JB?54=U1Brf#%*+Cv2shnR$Gc#R}6xs@@e$d}9mj$KAB_0isa?BM z*yqq66{Yak&!E=mO*>a(-3c^UA#D@R`cpaSIQLl>PNrb0eUHfNZujq1>sJ^xltfEn zE07_5z9r^8J8^eN;?TgI#~8|UP>qedoKr@~*IT9*o4DQo)N&F%C%EG2=Mk48BWtHa zROd=UC-$IYilc7Q-*+8d%O(Bzd{aF*wa*FsEE3+=%h>~I6f7Cpw1Y#|bStZ^%zc9v+i^51eq5*UzIa#m|mh z*ns13C-CH^oV;eAhBK}D7GL}QW#8^!EP%a>WkF2YNS2d6=|(9Z3IWgM58_)=vD*SJ z#k4%|0t~wM!0ddLQY04OOle=}*}hWFx(x=0l;mATO&(P;RpKK+4&I{k$kQ*@P!gJ5 zJVofcDPBwkRHjd_KkCaqA3ml(=zX~Lcl|S} z6Z)dDB>-+}DyqQB?vhEGHexb-heVr=D00hkb?DTelHI(wr0wa}5cyl)f<9rgkNQMf z(wU#;8Ppc(hT8xx-oAVHH}&;*BpR8Xi0n}#M~ZlIn&)`^7jaU=^mjc*ALayzTi(M% zphv53EGGx-K{`w54h$p8 zs3@PHWGk?(hnsUJnV7_JS$Ce4&6RmN{%Kel4wc$~0Z&iI_f*q_f2|3~00H!u9@_Q2 zw|gzbF+RQ#V6{QzMJnwDPQeaA{<#^a^b&6?@LLPnobZL-rYg*E^0Je?KOYt_(sPs=F-F2EB(fXQOmFJ>r+)W4cqK{v zuyS;5kF27Xeu>uSO(YvY=lo8sblSRBxdF_wkpI=I-%zE{?&5OivQO8p zFO&t_N~_W7^8II?$XvqP708rJjXlZp$jo9^yH=)OO&ug_8`V=K9B;!B*(ut>dDQ;X zVg2sn&L`3JKxIUk;6?Mrnl`Olx57OF6`8&?#wHWMXlERYp)~zzgJPQL zLLJitulg1Tve%I#pb+}HZ-u`?#FEaO>)HPZ+-L%m;C#G?UEgkfZByA`&|*UR7}WgO zqqn$4O9283cJrg|9|opUaXWBUPKO}C`gbg6AQe7XN~vq5b5CGD?0?~B8|N8-S&X<@ zUds`SZP;$zO7^YFh_&4zE89geujxY(M7gvqy#=5ErRS%V7ht28rk++5?wF6iXnqg` zfl#U5yQkl+z5au+GEldTUzQdVFF~anN0)_U@w>HmGa4ZIK%31OBZ%^ZacxP!=AH@r z3OG_F#ZFdww`brP)&!n5B_Oh+3R7IOP8s`dr2>&(zKzgWnS*8rfPsnT#ehMW<6_>z1@ZY+|<(CPnQJxkCbYNCgXLc}o z^XafL+ccZTww_YAsckbiSO=6xo{C1%Pipjm8^xragVA;|u$(k8VFrCXH;s{oRAHMR2SF|6c70q4JS;iVP1Y>r- z-7f!w6y*73OzI)PtKQKxATUhHJV5Sne&f%T48LFxsC|!p)bbJwRhk!u ze>FbYhSKZ?Tz4GiCP}8%*_cTQ1-U+%_C9bQq9-umbx{T-tP^1yJKp7_N<#A^)W2_A zgWIA)4Ic2_`1-cjH~t(@-DTs?_Wsj-;LAQ$3T?`GtjE z_JQ)7E;YY)W^MS;JjFCRo89j2>3LymOAP?rI&jracq2By`#fwkv3x5?=o~hC143+SFaOV{NSL#f3rz&|I zmo)0!%R<&}8eE=XJ&RXrFL~N&-IrIH)8jPAn502taSpfgslB&S4`uhvA6@-sGyl_} zoeUpvyaxW=l2r34?TePy>uN_;?kv}{rQ8$})V+ZHF_RIWyZ%`0hHrr&(*)S+>$(4| z3}w%r_uU8B_z$J3T}9^j>X||B4aNH-5;l(idJPP37`4;qSu;E1!eBLCK>L-~TR}m= zGUwydr}T*SDcVYx2CM2(S=k**YY2kCyU-U0*6JLp8Wz_clNywW96wNr^XvU^Z(U`Je zn#<%mLZ6O3xx6H?bBgKy%gCQ;uY8r^bF6Y64env_n=rYhUv3on@m0WjQ&|~jGkSC% zK|*jEkneR6H8KAx?>!!wTGP69f5f73mvNtjd(b%kT9C}D*M!bVSWy<88;5-X{~39u z>nFni=kicVvLEq$rwP|s5{Ym51>QyeiJhI-n*8*{4*|q>hPd=|Ig>6q&ve5D3xz8A1wklp?{)3)gDX$RTrFpwQ_0@+y)wDPn*%M30hqDQ z!$=TObJG+Dta$?IYL>nCG+t;TzU|tCQ9TqC#!NhF@wpNQIFxuR#OhTN8H3ZHE)MgI zyNWt4cj@^i)+JV^9$S9bUTLqi^pw1^OZ4?zGXW1WnKOKqj%)nUQ5$FA=BJamFrM&d z8vlcpS9jm4(CCPk&3^T41uy|&BzXO}uH*+;R;9r4{5xNeT!5{Ut{aFbIslRx7}InmrRS2G*u;?HBF zT^J*+j+zU%gH%|i({M{Ov5uuQDIM|j>M4&-cUz}*n~fh5h_71Ft8k7gs9I7+YwrbK z)XikDI&Us9ozSo6{_6bv_XiI(7M4$ps!6=j0bnmCwEHwOP_Arqi4outig-_~dCSaGOzm~WB$d1Z?b^W5%xjmt~_U=t*t$aEB z`ZaFFaq*Qr-zoBJ;j+7Q4E^v8Xw9Dd0^D0(Z(q(6?aM1)zbN0adzUmh0ORrXtXDVa zr`qb)fF}3MRap~8YTn{T10#!4+NC*k2CJ&iB??NV_*z9?EhNY~gUnP(qDys(y@&lv zxJL!*m!dg17+{^JV}_CU@az@ynm2j7805?PSI?ukhM72j@ejPEABUJjn@a+X$$n6= zwd33vr|Imibe0tB4Hq_Q_?Me+XAmH*eKq3{(PXkrk4{4cg)QmB!!ycgjI(`nGbTv+ zBp5^!8FIV@8=Z+E67;k2gY#7KzJpiHC2kn1s%|bq1-yA4Cv`~B$xNK!G-67&&Lewo zV@H4`rDXP55d)+U*?2mDn1>U zqL&2c&JrEN5WtpCGTUL{R;=~9a=m&*KJ)GAmt~K>5s79VQdu7&sT=fa>s5?wD)M%kwA}W z=V_y4Z>%|RYnaa3k8U)u+Ugj(GI~!nU!P_`DqN!lrt=49=)pbFy)8hJCroX)de08N?UcX!uXAG0LgY~e&j+@!~o&^eB z|CAnUt1CT>Cv%keJ7%|!Z`(lT%+tT|!ILaOTX+&ftQ3s49kO(K=+$P@F8trO*ILBv zFCiZ+$LKJIf@41jXyRc{ua)En%h?H5j3D-4GPrI{RrT@yn}5unP(8;a6&8Z{0Sdn> zC>ZTI;C^goxJ580sk}7HJDK(;`CUoj%XHJ%msZ)ML?cB7uibkf*a(%WWY&&oTDhv{ zxDi=`r5Qb8UCPZQg0WPR5feNeyWS{cN2+dI#d|@~DNM5rSbw^4h!Y%MReaE&?7Vj* zi-j=h0T-*lLCSex*oR7vIpeIVE11jU1j#Z#O-(;H3mXxfA+qI8BrcecOVrIrtXwNC z+n%4FuL)zKb@Cg2rw=!kTmj1_WHZuI<#-;<{~_r>dV$%@Q}G=NBD&|adXHCRUN8Wh zBKwd8!Wob*y!-&NP0xX~aRYCTwhdmq738=jo-v zk&{BuU>uLr@pbqqCEfV=_#1&6Rv|Iq+4p$m=ZlG#h~3x`ix@!9zTUA8g?=Q&-??kL zVDlJ~MNFCJ%1zy?E)VC_)01WRueoM2vav2M6Jlx(R-YNQI{)ddT{o+K%t<(= zo6bVLj=LnY$+bU;<$ZZI;=`9j5{IY20q`!8?PsItD6}6RIpl1%_rjc(dcVeylpm^k zR~;$jFNb$}^1AuQ)@Nz*7uiIz6nVc)EXOHqp)V#Imva4+@shOBaw=JxqB(LQZ|pa0 zm4BX!f|({jt@y%U`;HQr>e`0Ik|WT`&M~t!+}pYEbl^*HoVMr>uq$2=5o&>VlJ^Bn z8KS;H1c&U!&Cz7wMn2Mrdn)UWkbXW>SNQryy6NtD_!8%8Dq?Gf@OYMVI8SK%nx{NP zmhcc0sobtz4mY9Bl;?p|~J-w{y#|k(yLhvnE5k0Y|>k{rXc{swV^8-)HNui`B@N^V8 zKa$Dou^bOz_Lh;;2(j954;VI{;c$6K0rsojwxT^kEF?476994wYYkV|jhfd|);b12 zL1BZpIebyqE*eiH&-?Ch;7b5_X1Gp4ykDeCjWgT>ckMm2{xmXLop#uT?hDnFElV)w zFD@?GD_^1GM7sFInB3>knOoY~0iJH&tCEhtxu&c8R!v_D9BdO0bvm+m8M7o@Tz za!xNE8=6VPflkbFoR)rLWL<4vm?M3-oC&;6YNIZj=Ie~}i>9-RNls-oUdH66XntAB z`=0AF_>f9*$l19!p0}O5USSwf`%x=Ptzc1@S_c+WzD8lv)AyIjZalQ%?|W2HP%YuK z6pt>prus(RDU^Iqqp4!DEoFWC`@bnDoO{nRV0mmgCV7S@_=TM=Hn?~|`C7;hbN?v^ z`^|oTW<(lC@`!BTS1YWtY;?!h%Zwx5t+Uf37cvOd{)S694rypvXC~c5$qi5}aaonJEW)3UQBk>R93HXCC~u z_I}k1*2dhFv3g1L6NG>Xz#~#nc&o{B^oMdGuQ7M5C?kE?|2mL5^d9=sEC9@KKEX-l2CHRke$*pcUm7b~fTn*2NLV9}JU?UgRLkUL*4ITU* ze^AKlaYcRRT=l-V86Po;<0Ob}!JDqRGG&19TIt|3GiFOF91oqO+>l}a``$S>9Kz8J z!8tr5JQaYRQ@EAgox_c`bmYjKL)2_sjz*G=PHTnT8J3?nv0`xPAM+((8vb-yb7Y;7 z6*BW_;!EezMfZ0k@7KZrK>zvF-ea0cfZl5#t7-y5ZPj|Wn&bL>I;`3`*@`MhZ7ie` zy7oh-Z&VcOTo|);l$ZNkMrUyS3e32ifhn41pYGQh?sRRt-&X6&`TlbEv0*D-SZ^Md z;X&^4dS2tuwael!k)zEe9nT6`N<8;`iq2vst1J%+@H1DK{C@4W`05kBx?@%I$uIM* z2jMxJY4isR5tXN+Ar>y}r={d zLDTL--gDqRUmu^Nv+6$i%5}BJ(#S7>kY8XJ`2}wN>OTLx>WYBXPds>RKhJ%E+!^kZ z>1??@2?pUFjzc7}X9coZwO4kYxus2JrCskKl0Lu#$g=lPx?XCr0V?YiZ*IPp|5_J4 z$)U>}+|6b2l^h_Y%uL-9SG>|r_hnXe*CESq28?E*5JAk>ah(ZL#XyVz1GKjI@YS*m zraTB!N@UTNnrzInBRcgaTNhs*baDC3t0V9q@vRr}J1Xn>tV)JO_627Y8Rj8mQ!T0Tv)fu#2?De zQDtMNT=@Bmu}1 zf&5(N&V;z9)Sz~Tt#4XJR8~r66eqaf)i*^ioHmJhr3ZyQ;>XFFN4_n@s{7;Tn(s7L zj@G-SLAT^_0z%f2Hh74n+TCPy@MXldE1fa#6=VCvcD_M(fv9XMHf!~!K!z|maERG{bT&deX6QD_7CiT zk0{5#H<@6hob%cP`*^+DIqSJy;^9UL3ac9)*?sKd-BE|aQ!z99hG#&RmtyVEC?=?Pk){hvl=crPBjp^+$^zn16kIq@mxW)eP_y_oAZC^Qudwq_{x>ohd zE-T^Pt0M~v92>oDkH>ZzzuuM|tKmE<{6Hw}R?o;ZkMb*=Ove3UNwG0_^1-UzX0IIm zlP=yF{u||9nycI?->%JmVnaOt@X@gbBQ?5pyZy-OlIDS4nX~2(xLy-I>F7ptBy)I>yVq(#awxM>KxO)MNx@ez{lp)4TQ0;*iz=pLTl z@m%rZLD>llo@~tGI^pGdTaFOl1T|6WeVjdHOV;Y4^QiuH4^Du37Efhk537m5*X7~Q zm#T~(H_mv%*E19#%F4>fk#(FCzm2K$SlOkRgW>tQM!#L?dv)F_FdE+K6I!HbuKd6P zN&A^+^XW(NfHkYE+;?fxG8%zI)gs%g9vG)(c?eWlMqXG2tnX8Mv-<(jaB!fP0%AL zxDlTH_)wz#cI!;P$1K;I;I;ZTy*KjVbGucn`hSeVA1!(wd~&`t`la9F1Iv#+So~z8 zCARW1hICYUZczI^8%QKwmMk8%+=tR54O6*_ds#KyPQ>IWTQKBTNXeKI&%3>)Y znp0E=O-PD}@_f#$bwBrSd$)Ic-#?%2`EJ{NuQYtW*L9uec?|opANxVFp}k^(mGWnz zq3u<{hvLDmw3U_5C~Cug)0&|g^>q7AaTNhxPujdkz1Rs@-90Ch(58*aSl?#-zCxi} zo_748|?mbx*vQCR$fBtzM&H#ztm}MCU zE+w^mmAE8y!1#L>!Jj%?Zw39^jz`zrXi?ixTaW(enr|N_m`2dz`5-{`a&^DVt4_Mx zJgUEaYm}ZQ-$xBZ+#GI=WY?~Z>ojloe*XPr2hSdZ21OGBF37d*_#{*B;yFav(Jd1C z{v%ovKF@jO8dMNz6tVjCmTMXmQDV$<*tpd-<91H&+b_xGQQuL9nuvV?jG5A`q>orU z7d9XOuTzRvS{YvFs#E(z4R)RZPj(^rln^-l`OdNtTP#z_JhY!=S+zYo(-UG-zWdTB zA#uAD{eoDq%>^eo)y|9m+OoN!L2$r8ibV#cZOD;3+4?>3G zT6e);4aeLbUvF)aFx0}!9)CJr;{>f)v104c_FAAWb{bdW$CQ2f$>f-s%}a zx8fl6t|_`~|Nh7ax`PVB4eoM^tg_GVt?n$MM`rs@9z1L=ri&gee-WlNoc{f400J|n zH=kzqBCcJ_5mYn^U-@3rbrBY3!RF7?#n9%MlptDwuVRdeCI&3FOr7s7o7_~kFe|?Pz%{6b~n26rf%FU|AlrC5Oedt;Y zkR(qmrMgML$dpzb>Ff=i*-&3^0HeuH(ec!y)Z;Pet)U=yY*(skvBk~}TUJ@GlOAi9 zU*qeD>vMiz*gw(BE<&8_#$TV zUPiRwJR~43EiK`v%*4_3Ha}ex>G-{&o)Q%VnCk(p@2DbO@2&Rz`}fhX5f>=8zw(gq zOM2kIfX3Q%n+|*4RTTO~LJc#9=w_$rEC8DeAt9fcu~FK%Rj-fwO()N#SHC}%>@y~u z4JJOG>b=RO{>D=1VYZr#P_<{}reEiWi$MOtg3MkQJXzM~e89KE`| zr-%fF7?rPr!K;fi)gkME{hVq1PTI=qepYQ6JY-1FGs&=PhNLa##Vrrehym^*MTiin zJPzFhhiqCpZ0Lju*}kJ6AaY!QQUodI=ljFGPJ6!S?KPu0yt=c5o#>j&>v8J>ziu+# za7(R=*vyGi3(S7Jw|8ByX3PBpZDLCDWGc6p`Siu=8eicV$|}+?*Iv^Q$PU~=d8mZZ_ z*Sk;S$c+hZ6qhG`wC%g2`tqya@|MMHefKn{s&dC1HnQF58Q1k_)-^(|7tG^vmx|0T z{qv$Q3Wb*~cD~sA`|@I?d&ereY)S?eERWARv1}SuMsm6uclmRh-YnSu?8!|6K&DRG zP-(<$R{>d*0UAT>@a6{stl0jjDBNIXn1ceB= zZrbPk#e{H1jGG#6_jXzQhG5Pl*{s3B{tXK=&zJ!4<#*^lZ6%=Jve%CTmr+8VH27`^ z;q+(9Pn`Hj4L8Xfc1;K9yPWq<_eHv!>cXV;3MSE~>&Ho*J0-tJrb7KS45dp?eJhyo zr=6H>s(iC$8sXR+9d!i=4n_p&^S{_kem*Cues@`y3`em0C}_)I3pBZe8C_ea2Sjzh zOU3Y%vzv592uUXfCEqtT8dEF%o^r;k%uuuG$&u*UK}4 zZJ=lt51J)o43V8b>X#*YT7AbZn_PVPJzS#Vnce^R!-QBx)v{fz21l7g7UF21w)Sq# zDd2z&!1-K@E+A&i_)a?cXj?C_Rd2l`HGazKfKzB}0~G|9wreMsISm-PPthadHdq{r&g0QFgwI86#HWpbw@2vx+1E^fV!bDrp0no+R&XRar`LTM`6sEN84^QjfI_b&vc(Jyk=|_cS={vVqR)&OxaL%j3Chhr7 z;%W2xq2D5sibW0O-KC@!6qPq!C!_erdkz(XJ<7c2~P(GT3(%}so z54$a|?*|ji12#?Cfw}+>;k2mQzP+~<&bImDH3Ny>D$nhB?ATS?)oHEnB5y4)S~!B2$?o^ zJqboWLpG*jV1>imPKgA6>qqS@X|;wEsh#I2Wxv+#Rh#B4DWjSI!Du0G|M1$A8MPi= z3LponU%Wr)eyoMdo8Eanrd^Tk*g{2AX_~( z^2R*!JR8X@gfvX&^`0eIuD(ZGUQ5JxCe852rdm0Uy55js=W z{jQd2KKHs%=nS#$E{dVyJK}tVeZ808kqf>5 z-s3Ae+Y=|mSt%Pg+Y~GY#uOz#h=rqv4kdbbq@Td+S20LA3<%7#rsizbil5DXA|mAa z+e*?=?!9oSiv-(>a+Kj^i|Nn}(;d4?Y!wYo(?8t@Wm=;{h+PqThWI9N_2{Zc!V6Fq zSjVv@xf3M}&pJDGbky4dkfV++uiWKxjg2fr*kRrJD^rjT2>t9*3}?2!Xb^bA)M}T^ z=IlA$U6iz&U*kmE*hv&M5#`IUeOb!+$#~MkZ`9&+hmO&>OV5D1`HuyOtj*Hoz6tvI z<5EbU0{KhJZ)}dO86cHpKsqc+YZMXRQwVfe{{AmfK_?Ct6-37F1<{4diX}^rS(=^v zt>9?C>D%K&JRXImc^t5{dgX1=3&vVvJ?9=~y2NP_Qepd#1&Z?5r={Hb07Y&^tG+*xOA-gt72vqN}f* zvw-0oZz*NNr@VW5!H1KWL3vpnrOTrsH*X=@{?z;ABxnU&pC5R7OcccKn|G#NuJ(S% z4ekg&Roo9$MexSjHM_9!PtinJM}dGu6teGcQz@_kG#z) zFcE#M7DxFy(;DV^xnS++p3qKBb@rWk(P?*Cgo@c7ZRZx7d5M*^wNdP ziVR?#xvaOS&{CgS3MWrx@Q6|-FWQ$roFFmkCO7l>v3ec9Jq#XVO?g@xzb%I6F|-JD zzpGVAd<4*k9V`N-OQw4VsuR$DM#)`KLE*lmPuB>#fGU#r%3EW&1^WV&3UOUM3`1#s zozfXe2?;wR&>+;ZT0LDa^YNy~NA6-BCpp#RPb%$DEarp={cfl6Re2)Hg%u*`4aAxk zVUGe$4RL_& zE&-kn0r4{?PMq02GzeGy!rH1j+os`l9)y$Vk~?(^aead7H5%UrYey$o!>ut>K4?m$ z)M}U|BD4^P5lWte(faqS{4N}sWVdcS{>74g$3=rM8cvb!k)dnRA<2^B^^ImAy+7;?|roepH* zRw!nhq@HhTXjqUZwJ{(lrk%u|{#Ml=Hr7fNQz%*X6NZMM(uggcJnWzx?7jyNWcTN@ zN|`y`ltR;-zq(*A#CC-@I5l-xx_aHMiX_h{LW&&J+`(1%IEIT=ch>({U!Oyz_&o3X z%+W{p%!F0@ei_zeFqk=r&ajk=6_>=mOX@TYfewGl?ZdbBPx0e4+^6 z3epg2jiw|vM(4MrLtDxdEzB0K$3jEUp>p!DpxjMGx8+z7`3`cCG-!5dXJQosX;+ zt4l?7Sw~6f34=$>-KwY|f>g^`C=o+zpGQQ@Mpcvh^_y#zTNjrhleb`e3R}{@TfYgn z;{rR_OB2k%Rh+u6=^hB04NX{{?@qZyF%on+2@%}5EQ@U~J7zflam(P<>8%ew9HBOt zM_ei48Mm^iBk7z8JE{R*mq+&R)VQku*FqX(L6a`C|b=^$<)c5_+(lL zfjZ}X9Zb38kO6nY=u_5Qv9sRI()odidxhFRrzy|C(DBl@Z(tf7J9S!TFyQv|cb7di z5*IUd-ROutk*xJD2Rk0tBnZUD(Mxk+34el#w5${Qq}^SNAZ zbETR`2EEyWGHg}yHdor!)m2n;dv_nGDhPK7#;pB(`ZDXxa#tjcJ0sERDuD16m0>?! zJEkl%(WuM{SV9$5NL6KifCEj0@ATbGka&!9}z}5(035pj6$f14y^2}ba zZ%IaTsp*5c&ET0JtQ(2@(V~4%Y4ANu(V$0UdK!YOU>H)762T#5JoVL&C@n4%V#c2v z7DyOQW~|7(F`hL!A%{Zx1eR)(eX2msa653|G_+PS#|_ZJJWcnU?1KvwluJ8qg4sL|TAaa8}SvF{cKOVWcozGhI_LBpcOqB0m@%d6Q^1c4!n~`{*!dgRKvwig`-~ z2n7$`hjwV+Ua*|h>=rb~*W_LDLaB<;%J;zEa82_OnY$RWZ8_E}p3czGH~mFUzhc*C zUkZ_yY932|Gz9(32P{!wvGNS;AW01$yF4ko`H-x{Rugg+-s65r%rk)+VbW>^Ngbjw zJ3?u!d2)V<-xZv!KMb?c@@p;NOsBwo#*>kiY5A*fjg9J)t!)T)m4Hs1?>O@kN1ru9 z4PTtbrL&fV&i6LzD|xfNd@K~$TeolDHtKs^?WK$uD+0kcUvR=9wsWBsr_CkvLIBmx z5Lizz|C2i2CCDAi(l#lj2xkLDQIOo(qfF7rF+Sqd3u~s0t<`5M>dn(O;Xj|@r2ZKwkhQ;+NQ<9jxxoC{!^5F?bwv)YFo1pzx zWL$z}`X!Jc{I;E3Q@l6|$}Vl^FwQX>wV` zTH#Z*#nOFxn`J{!1Jn_5)Cy;GZ8txSP@DZ=LOa8x!EZ)#tOPJD0yh5cz@bAn?6u}+ z<*n<~9_aDsQE*jE+JGtri-~A>9~&~WEV=ywky^pTPE3uOuLV@!K(?nJcxIC<_(qJ{#zddx=siY}mF!VyNl8}}7hug%#_4zE9e z7t*I>uV{KTls<|ZL^VLHifwSQw1muY-1jchDY5(cl>Tat;iCVdbWr3aD32=;>=sCm zxQkbV1Qi0m;sF?t#n==Ue*({oAY{Odok?)IYD<)c^y(ENNb^WzTnu?0N*}iwwHd&l}Flby}Q!L4%xxj%r081G_v4WqC9P1et}DiDeE=8>z~{3LyZ-WEz8_7GgbPrq;QBe^wrk z@iS6Sy&+nC;@4#M9G%#%kqH0AU;9fo0N3m>*p`vpv z7Tfs9Yl+jn{^hmUna}SqbeEyxhE%e2X&ISw_pFp3b7~0*m^_dFwMODiO;U-$XLwsV_d=Zc!LH7(iI z3MWEE2Mf-l@PlXNC=weH74%M+oV&tO#8}(gu!s6&`WDI~O9W7B1>%;>?&6gD`F!YR zdt?1?XYZF@8?`CgCs|kYm3VaVKZ@e!NulLl0G;;XOw=NDu~zXH=MNtZsH_1)p$Hko zwXfaG@uKn2>WDjMJe>I~9(KLBb1jeapDLeRrp0p!O$&*3zA@LyDh&Z4^YJgobRDOW zk_y88xt*Uf$NbzVVJeRl0M1tM(u}DV4(~54*rSP0kdD+7l{4?Irem1{A$|oq5V81- z;r&IS3{q;0=`}5?9TQLdCB(py4JfYq&4?@=v_49ZF;geV8Zdq9;-a8ff`fje7lIn) zrL}$@Fa8h=TIIyBiK4ygot>)g#r6_=@`ax)zVP;t%=Y`U{2%QA5*PF} zPNZ*LtajZVG*!_`rr7Xs_HkstrxF#jS0ox^*pdiX4k0 zDC}?o^67=zxLDdD3QJIRqw-uHO-lvr&N0=KmNAhli?K;~f-Nl`4@5Ywja|xy({*jl zKu^FLg1jZPUz@_a08|M`cj8i&7Sz>!Nc*{4YzjO_V*TI+$`77A8DH!< zaiMD>jTL2n&p%9ExO>>Z9zBF=s<7LN`sZhz+GV$C(-kexK@oI>sI9xXb`cXOL?>1w z5zxAC(>J`jP_V?pWyFHYF&pWzv|95>M1VZS$BE`{^lwpkIsV3S26>|>d+;=_z5hOI zsJhUZpc6fsG*Dz`reAI#M7P0Z$AGh!aV#`5=4t!&|8+6Z%NGx&Z?;q;wE(=m~9@lVQM6T8AEnZlWA$?dOSQlav`TApw^n< z+|=&)RLp1|6lc25CyRI~N@@(wEfnszHReg=VMQ}Cx)57WBB&J(3!tvC=EyUCX#)#e zLcfm8&X2P%@A6zc4JxwWtkSm_O$4&-{T5gb`6)ZL-n)0tilK7FS8xB&qK214&+XMU zk7!>|v3eBnuC;R1^%=|hjy0R~%<*rETm84%OgzCQ3T*da?zV#fmgM!J$h=s+zVB`E z{$O9ia?#pBpfO;|rj@5A4ZmWFzGLdQw*&8`r(b+p`e-pvJ%+P}wCUPpywa^^)5M0) zpR;n6W&+z0ye~q|;mN6vZ&ej(u{h=RjidOj$hX2S=s&a@)f-Gq5%zM&%2R5?bEhQ zr3bL>&H-o$-MrC5V$*lHyxWATastz(_}fK%eq+u0wQdxbfnKk*e3uzI@oLlH&e6_f z`4Pmr+G$9S#EXP`g79p-f8deGTC&(#?$u5;}boKnO{_IW`g-Gp(B#O zLrbtec+%&|?$5s5BD{_mK8ocj1d~9X)?TnlWEQ^vgG4Du@WSb|*+r8gk3WnaVg9Mk_m8>f`G!VPhjSopuLFJsxRZ+ z?Qv9VL4EWDXTH-PgTilqNPalHT$S8coC!#<2}p+!6pwnA7JkcM^{`Kvy3afc(QGQC zK2snXBqEJ)3^#C1GT&Wm`iSp|&!*FJIuenJ7Ppe-rP|{G#eM@?9C^&;`mK0Sv1+|0 zYnG|9teb?ya|71RzNGX5SxtIk?U#7AstwaE)V6&*PoPP$S1wGZnz|q8( zuYcO)yq|X!`pjl&yHjZEua0~54C+86+e3IG;*BsK$bn zf@xqeKHT}j-%4pHSrfiLw5*gT))2miT>#V5i1ZBbSrbxFHeKE;yi}q=Y#5mQdD7Yk z(t~L|CLXJ|c8KT&`Z*63@Eqbnf*4h=z#s~N5w1U5OOl;%8D?)jzcRd}J*dI`SoXAn zmD!ZM30t>=k@asF1r5g&fde5AAjtoBRerxtb@z&>ws5L!NbX`w-h3)tyih>nsWPx6xP+4BNO zy2xO}y8vbhpt->Ks+}>${n((XhO`mqLu9ib*dQvJ0|Xxr?9Lc!HyoV^hz0_nJ!3ZL zxH-Yo0`7AO?KHjlT>O||{%KpbvdJ?&zwxW0wh0gq<6&HPCkaDiEg2sSf_YQ z1$9_m(S}Gx@Wa>g%KP4PFbs5cn;_&P1k}QootSD&QCB!h@u;{@?aEz*3SjBa2mky| zI1$c*MXpFoMH1~inLW6|n%SGzgLkjbZFx;SBmo#id<;pF*p)~a?art(;MkIMET;H| zOwF;YyDH=2Bbm2QwzEMmrM0WB*dE> zGk(sX{zp_~EFL${j}N}FuAQXB6Hz@v_AdsrA(-H1sBMYIK@MF-J0Tx}jTSA^C*CTS zW*e)F7Sf%3OfFS}gq`0ui>nqf9KEc$j`k-M(GI~x2mu1}ndf?zt=NP4d8a9IFC5Syo*AFMT>UK^ zA=pftt1)>e?8@hMnbn-spwq4h?Gbkb*KD;voYmD^2*3oz%0~3(I{IuJ5w|Snl1a<3 z_maI8Lo;M{=nAmBTA^6bXNC9niunLb;9GNZB*9H2R3eM*)g!cX*-EsQh_13~j9fFc zLtg;dH$Yv46Uf9x-?@8NeCKX;;6uy=Hp_(F`k97zofAliMbkQgHl7CaFAA`x0?d7r zd2*ikTO0`v>3xZNXtZJI{2l_!5RX<)^iD#&%ERFbYPKL^qg0@0rYrc2oUQE7iYEzf zjhM%c?OH;FdP+|00r591p#6XbG>`NKG=SqhFtJPpX)U?B_6S9Ql+=WgFtoj3b#7o& zeZn+ya`iImBR_up7%Ho;t;VPgVLUjg;${UGA3D1*9wHQ>+)aHobkwYo9e?{RoW-(6 zbV#n6^i{CI#Y3V$NM2=KXUx{QX)ZqMTkV7;0K0rQ!$+H&FBDkp`g1N?Cmg@LGbJEG zsBDgQw{3DGrr!ltGQ@#+m2?y4gp4Jg9PJ!iEKy)blLT~5@;!zcynmVwlKE`W=c`xdi@ZO~y{pDA=*Z8xxKrJ~^LbAn-Z{sS2Bz z=Pfbs;SBpPFU0pHyL2(&(ge#!?GIWOl!F1{H@>u)MZHU;hIvX?1F141C2GvUrKa+1 z6rTE5_1ahvg)x(mz|5py)6_l5h9VYHKcQ(Y@9m2}oRM90Zkl%KA#W!t1;)vaR=ujz7w2L~RO^El#qx3dcv7pf7%l(u<;!;mR?cEo`!#feF%?0T zRJyV30(*-a8g_GBdzsrk^_Pz2DdLwGV^(-#$$o6nW+U$&stF*vpl%^IK+bLE~m)c~{WjseWp)zr% z`sUZ28yX~D^c125@1I~@y2`bCG!==|rpbJlva-_PcC#cEoV5}uUMh%M9lQhy39o)c z1#AVXSBaAln<8Y%bv(LpJ%h;Fkko#~k1^Cv01>oZ^ZUPR$G(W6t*jHg_p3UarOYK% zZAz09+Ez2R(&j%8{y$Gg?$GM>|M|zZj-}Jv4gIbi8#PmWZ%wEFdTJjfCCem+eqR+V zC4RK1L3OaieMzk8V!SR*hkM5Oz#7ZzA#r&W@L?O-tCPe~sW({xMX;&ZwtQQGCvZHA zBAmac#5O$+c+&NDdp3?l?~Dh0{PoG(C$@|C=5^m$G)lKr&itv}+yaa#s~8Vevr#NmGqMXOce~F#|MQg@{OkDq z*LU~${^vvf^WBvrJOag2{_odP_y2l;|NOv&=F|WF9lriQ^X;w%Qyo@NR8*{9B3jGl z|9jET)5{(I{d!g*Ei*wx>pbBP&!_IU`u7L(Wdi^2dK&-U*#FyF?fvPMqV9F%s`by` zThFZWHrLnhvqXCNthojgn>scrFUXJA-LWyD8wI=@l?m8Z_ zimy5yPI{4^>V98&<3xlq*D78++kah$8FfWbMYRui#eP3~B=yU-CpRwN_=9lZ|M5CA z!d)X+|Fl{cWcy@gFO<9^`BoS|UbQ)y`FmI5;*^?`jnsX7e9ni5-><4QTe|c%f`(PK zJ$o3>W1O29PSdyrvce08TU(w-@D5wX-V`TWv5Rst+J^@w2~h6t2sqs zB|;K&senw21~dOlH2gL?37PSKUX80T0%%sNG$&KIs6tLAr)Y$1&sA16zy10R>uaJ_ zFEra!(U(|&t22D>fST^fXu+a78NEQrcK&)986!J(disvZuQ%VwDcX%9=+V@XJ-ihn zB6@MelRwi9$6!pUD$N?SF9C!TNinfs#d zWS{Si)eV2EFsA7isdYaTHG37c{P^*ry4uw3k^AAUUV;Bi?$CW*trsrjKv{po0Xynn z%|+ET;;aqM{rG%2os2<)2M2vUQ)-`i{9M`5_ssDjd<<5$C|wuL3{}ylw)vd{>G7s5 zTk>QV|M#WUH|}yt?TvE)OWtzR|HNKofLm&FeTRe^y%=; zu^_U4m6VuOFLieQ(raeJ6P17WXpbJoOGbk(tm*rQL!TGYFg{p z9m825lB{`0blnf@&T+9pYh1jP!(BaP)cK>$Tec_>iuuMtapO&V9@JCB3E@r^GpI#& zg(V^8RG}EpS{XkGoZmab$OGj9F`<=-mtx#=e)+5Nh0@&A!!!rb`#dw^p^9=#zHOC77uZ~g&}++9>zHvgd1CPrVe z?hF^-k2tFFD2%G*i-1|b-lZE#u$!1lK>|}jGRR?N`daU^fq^7ad&zKa7Z#dqh^4@J zJ}K#I&J*lh!8Uv26=gw)=H}owmTC2>QofFenuXhSCVJx7?zS4h$*Id1Nbpl+J*KgiTPj+UE zMo4e<6^tMx`p^A-i&2`)++v8I6k|7DynTBR<6XTg@RG69#8dy-x%1DUIlDY*Q#($i z7;`GlT2PSJj~_eluTf^$JPy8#Fl?yK0&xiSO_85&j2?P2Q;bKW6=7O(A~k1*iTAH9 zlATx&4l4v_IVcQD{8CqLpFb#HC6mF}80zCat__n_{-X~whYD7bS$6Ky^$yTlKox{AUk?7*(elBXX_U zT^7t!nWCn4hw;=DAm;*}=I~xLv)pB@Y+Wtqtza@(Mh2Z~o9KqroHuXII6s@|hnU#Q zb{CT_g=8-aIa_#uOf^hn^c^98Lf+FXm+6cT?0JygHIXIHRK5QGdB*U`x zbZ-7Mzp>e^&HPCKXthnrrPjc}&18#869s88t&-Z`MdgctR0ArfqEcGo;GCx72}|w; zrS{8?rKj&bPWM5A7ExBD3x|j)wUYtd%N&4t z6raeS%-qDdTmpr+IVx-bHw}YP?7T0+E?QhuyTO;ejujsLi@_!0-AJ432`EngDE~(N zMc>5a%ijqk*5|KW`I8~0TA2)dYY#@InDMRb@L_AkhHo`eJ$h6ZupO1Qepnu6sgj^@ zh>1%^R#u~+rg(8-vXdqm)ww;spy?4<;FQ*}^}{>egJJp#LS>qYr;a5G$;>R6INK!M zsUp4Gn59=FunSMY26MA^*`l@=oqOO}O_QOBvgFn{*L&@A+Oy-o^Nk!7_ zPPj@x|FVG&u7TWA+cDE;;WRNFxk}=n2DZ+x{_Y|)p2Z+3rVHu$AGw&Ne`C1_{d_A3 zkTw81rsiF^#F&=_@4YvrZ%?EuH&o1frNF_M89^Hp!)+gw{N!fu3K3omkdL2V?X>Ry zhFv}kx}8HeZ*J^fLZ*bk<>N9yF<-uae@s-UtIQ7$k!jg zN2q9tm8)4_#$<@;w?>`;3*xR}zC^1*{BmYUW#;C0`Ll<2>j*iymF!x@jN;z=SiF#Z z``SP9SCN9Rx@Y;gzNt^LvbrI(qlp^qi@G{59`RkKC-rpyt+=?D93`C^FokPF`M&TSED|GmmoHwr#$PZuBs=uziI5yIEvbnuoo)7RUMarQiN zWIwwP>3?M8HzBtnW_VH#ooM#Q<{N_$P*h>$=@pn5JfIAPTI_+z`%%H^iq4N2K_CJ= z&+Y>zb?(!rk5;DJ;rmaQGThFn1FeXW@4wy_*-xY`TK4sH9$6(63r-L!-z%xgDysJu z6Fw2fM-wMqK4|AY*e{l(eMwZ&(!VjeNsK^GP^yvCXotx;$R|}mVY0T={kCx}(;V^i zQhn90vw5+rhr-srdA|qejC8St7&r_lntnq}N&+-p1#oHhW+xGX+{lrUgN7)t=`7dN z-DFG8wkW9j2s1Ko-MYp6*3VTXjNhIB;>FN~B^&2{bXz<3o zGIv`%BSOppmBneUGtarUx(FYxGc!XN`aTfA?gHQqJDGW)uDUEmU9`@ih_Cy;i~E~5 zZ^Y~#Dw-ZRna)|Zt5zA<44gb)u4mHCn_?CNWQV^oRt~>r{YaJyRpiz50#h^~W*$sX zS^wg%4oo=c&qwi#zZ2P=cYureWhkSB(hxrStLbE3UY?lRouI@j{UIf__|PgIq6sXK8s zFk|>9w$S=Hgc?|pitQ@Y5_6umJX@+~ke{O3eh)>0kk_*{*GP_W9w{3+TW7%n z6`5mBw`>n58T}k}iCBs9y=Q1<1)x4)b57L+3SU%3^&(n0^xc zl^}T~i00Gl_Cu)oM(6npmkG8Y2FNEk_|J_H3~0YzZNmxYKn>5r}im)@IVqLZM5v@AetDQJt+%DOwT=4YTs=%?wWV) z>5-M0IeYu|IO$D&W7@EBVU*H0r3t4eQ9r2abq4+A43Iq78v27BAQ8YS7i0RaKo z%iA(|&2&e!R)do$lyzcot417r1PvIQD(RHiEU|(b!b}wW2192KHChMK<>0T7SNjKOHs2yPy}C$|Cu`-k?zTMvZbA`S>Yhhx5uwOZ^jxzu0~k#96t+S3tr9~8X~Ntd z+n>n!J;ao(JU*?JnMX&!OnfW+Q#Z}iG%6KvNjPc2{o4@z-731ZoYzAhM281EGsb9_4_*6!bYYZDD|HFNP`~9iXD4@P+2e zKIi<{H)=P@6#+8F{+Jtby?pmTpceroHJ-XlIOrt`KZDkw~uUlwJT7m0!2fPwdI82*L~LGHQI>me7nClbxh zv9pWSk4`inEMjAV_~4~Edduz7+pNBUQ>b1Kuf!l5f%pd}p*%Ef^ymdqH`*dD#yE>f zEMhX2N09PbNg+*beDe1mgbZRd+SJFZD_z}Z$QM6-dY%R`v#e5@v4W>KUFuSU*ph@0 zD6Ns1(^oyp=7goa{rKgB7HS%qw=HTX0o%|g7hW)({HoJ~gUq6Iq&}&H`3fF>FVt|9 zpksGQGs>S!be==kolX(Nyp`+MU;32l<0G9AuzH%uqfMVaeVTxLWK7*eq^54Fmw(}2N44Ykq!$zz*N!*Z;o{OIW2C&ijfyH`v7HQo z>nLT4A$S_~h~iFzBz-CV<>48(8gmzjQLACKa7p0rADUel?FpqqR=Nlk?}WO!KEx`0 zJsv)MIERyNvc={q^0kaz-{9mmEZ_pUA_JB*-fRO7GQfoFk6EO-=8v|(APc}($bM?f zHTQb>`0={FoiX7CSFc@Dg6zlN1%CyPU?|?x+otV^>7pF<78m`l*D?CtZks9anRBcA zXFfj?NY9DD7WTg(X$V$&i)pv^`T>h;AF<+$ykBN8S1RX;g?KK>*Fvw#DjE~sCin-4 zEqn%3SkiJgL!?tEn(qN>`Rh~M)vI@a0!N##UsCDH+q{Glq!E{VtBCQ9*@}I4Y3u0R zfx#A^`s8?>z{5d0qc@+eJ4SxG5;WUDr3oxVXe6qY+C*L$#Bc0ctRTG&Syi_c&7_3g z*kdZ2Ok8Uz?uXd`pjp=$6txuQ=Jv$mM{Tw+GS$#%pW51N6Zk=8O-3hU;SfH()(&23 zp1c=Kk!Z5ZJvB8o>!VYdocQ3u10&BZ|K#nDL@9gIytU>tio!>mxtL*%3mX7zjeke( za5^gN!=!EO?Y+pc?v$04@mxU_!yS*^d;Ama!3olmZrJV}`6Z4veQXa5UyYwv%i8+= zi!DXe>akuKKxMyu)YO-a9Ne~k$9hXkuOzNgE4TM(yURIC({@iMu^%s8%e+ns{5cy2 zuR$BapxQsVEtSAiM-IYt-ZU`}n*SUNmfxo$^5n^r!}Cj*1`*(ny!A5jp;QwgMM!9< zrh>`qr&@Xeal%oE*A}^w0#jMC&HGqt~N-S!;j zmz(7?Mgb_bK}Ic*y82jn9X{MxL9fbPembNjkH*Z!m+(+)oHb|efB|y5wA8%zn-~T+ zsJ6N~;`0xF_EtAo|HU!YvOhn(PzN`@1uwGGwr-r3$pGXJTAN$B{f|ml` zQ(#Co+|pwXW8->6$HWY2*=T8bcf90JSMr4=&ywCaBu|qk9DfOJuMx)~%zGGggr#6$ zSZwZR0Y>gT^l{RhTk=yJtN!GaJg6twOS!DAxyJM87CT_nYI7Q5axEXu{Q9DUu$S8^ zj;T(1yz1S)w!^Q!@WS=U!f>+^{2??EVR9aB;zoLoE86tM$>EJGV<+o>xDm_J*UrFU96(dGnz- zTgi+x3L6^okE&0)xpk!e;ttnlI8%&?CQqFjlhFR}00)L_uBPH-_Jl%sc}RS`kpheZ zZ~ht2T((C+Gt+XN+%@+>!jjxNMD_0-?Vak5NeGv?eZ|Sc_XpM6uav!DtuSrcAkILH zV=18-u-Jakf@#yId;9xGI-g@u@S64O?-mpk=tpqT{qtd%73>aZaR7SgN5?y1T~=9{ zg;nt8xpO;;>=cI97`h5AL}dU+OnaogeGpd`45!9Nojv3dn&WSBr{;LUatgXJh;g$K&dm>~oBu;0(GFNk=; z17aRt_w_Z8Y&(1QY&O*Ln7069)C&{!qm2}MTW`_;zzRgHx(`JHjW_@=rlsxW!auO? zf1{GH;Y32#nW`;ffCMORbis2|Q&YvJz@S1Lm{19b`sRCv;f4VJqs@n)rwA)mbe&391H za0h#N>9izz>0XL+5?{X`X$M*iis=8b<=5UkIJCJqij!~VI!p+#W(ZrjddSbC0tV~_ zvi>KjN-xi#Nmf0j)UxiA3W$hNI?8sU=hdsq90!Z{qt1AdeDr5E;>{|oh@+kY^tM&m zmZt8UDsi5ACXtIMc${~RC4>p4RKpx@wKrE?8rI2;81WJiLCj){wMdDaYymuqev2D3 zd!Lt0Dckf6JDg%@wNgk45?BL7cf6wcR!7I37iBg>2o9IviE zko_ZebJ><%!bBq$tSB9orS%EaS;^s*=1aXAG>KP(rTfONU%S@IxavndLA;OwuKfNh z&TAC6#w2DeW8&fUT+&xY1sqrnOzw$Vxkl7Ce(h1IP}z<@5~~bgqZqK)+luHe;Gu8GgCKv^2)w4AXH9B6RJCz3M0dTtWASNkuD3Xxmgi z`l13ipCREg`Cdx_NtB{!wj5#}FvUTZ_&!`~bz6*df~j+q9*wG>V&*75*H^7)^9D~n z=JH$(m!3+L^OAP0-fbIzUJuLLM;G2y_Y|ykO=;$0)LfR3qU6t2_4^Ib@!yCwDc;{w3&buw$tMXOc#Ws+;6*I z)X8oE2s_*sxhy$DW*uWAA`&rl`@RhcuFE9(9WGZPgprQ zIWME$Iy40<=R5F;P(a8X_u@i8TKZSnGgVrfIksd{m##Z8KEo68_Iau%))hz={Wbhk zn;!``8;$J-)N)%rcI;RsII`?`*cU??jL7i^LZ8DZ>%tYA6G3limlJ5(-Sl)VzeaI% zX{uRvz!G3olw3Qkn^U4gRg`Sq4Vsjt5_31in~n1uK5=40&dg*nt5EP}{$M z|ND<0AB>8MD%(uWS~>LAt!crQcUncUnoB9jq5s@R#!YqQZHZe<&e5q;(#q)61B9KF zSE*`%*A@o{@S+K=wELgOC~GbI$FceVZSEKE^^~~*DLec45DES9NBX6p-9qQk*hOGk zTZ}+6a5Z{UcGJZn$_*>ELdWK;8zdnnq63BAcwn>L#pNXfrtk33qOOnM9~K(Al1?n& z*acyR?50jV-9vNxEh?glH&zrrj5~1t(>b%F&6_@bx+5A}%POJ2#=)S&NoEU<@|*c= z=P?Li&YL^eZPQMw2VGBhGggJzv=s5cuv+ZrQu?wVd&>szsNHMYf7Z+jf_{W9Ev=S7ss|guCeVo+?S@_d>~E#Hwg^S*Y*)J-MSK>DAuX?{c(WP+AO#I=$Ma9IhXNP zsw*cOmFfn~7_59II`^}Hb_n|yy9A}AOc#V_hgPy1h$6-#<|Z8dc1LuFz>?{A;>3xg zi6h&rsQ1(fh3m%;95_&5Crmf%BPD~vy9=(*b3w7?Zi?au@{jkei`9^ z_Atm@^U@9EMHEmE=?~IAM5HS@TXW)QQLb;dv5yF z-jW>=HtDo}{d(QAQw%c7_xyTPKiqeJp{MvBKQr>=hr`KsP)ePRL}dCm7%&r{)IgA2 zNfX|kZ)7;eeF2-2w$`27bnEiv%k8e^T*etd$h)0y)wxG$rHNMK)KNSa#`qXbwP1rm zB@)G70@DGNCm+5!J!6;J>&;We=;xO^gHBOFr2MTg_&YPS1kQGB%jxFn?I`Pt62I(3 z>Xu=RNt=HE{dX~ecEca)>gr<3Ef&nWA~!G3J20@@k|j$5+wA!%jIvel6@ioIWNs3J zl@X5#v*eJoclKsagBjk_V`6^aOik4j0P0`XlWYSxHCdu;UltL7ubm4 z(GmLNFrHShu-8IgMwrMs8qP=gB6%GI@pG*Y?3K z(7N4eKWVHhU!l-yg}{N<_U+rlYJt3Nqi*2#7A#d(HgA=%C;d-6eDjq)Q^=2s#{?EA zPQ|$eub&-~I8Kp|Uh6JThym6}()?<6??|6ZhFOVxhHu92UJUB1tL=Zm67X}Fh-y}= z*viecr5ACE%O^P@T&^^kvqk8Wfy<6J<6J$;qhTe8c~XQ1{u2h`)$$m;GS}KVQa`$C z{dm%7nujmIMi62Y+0*HiA4ie=5XoSddu>=AS)LEk4ac% zV-sb3PE#{mfD_Q&KXp+{mP7LGvLX-jL8q6tnZbNF~B>YI7dVXE~B?q6VeX1UehX zEa5fl)>$7-+4X(0n1w6S!zdN11_owkk&f?3Bnqs@_23l19@B(?P*zEeE++KiLJsda zpI?Wk-!5~R5K{O8D0O>H>hX?zROAs@nHO%|dR|pqbMxjkVqpOV;FPfx?s3N6W(UlFXE9raDLWEE5wH{Ti1RaUk|ZO)U_aQSh4GmA|i^V_2IMTq) zk&1;Y|Ljri7bfW37hK^bD7I)t@vMOaoz69ZWe!LyK7H!cK_IPsrJ|xjOtlZ5va5%<1GG%4Zb76tvT37*#kt3| z|2*sAlpq8@uP`RyaV>9Ulr%+t;>5*mpYiAzefK8yc*gJ*X{md!4zne7q=&Tw+%C14 zRkpU>xbM)57bWjizJ7h4mOE=pD)mMmDg_F0>Qu6C7Hk<&r5I4r`0X59T4aW6CesCZ z6t;^|vK@pF+99#{x|KI>I#%za0AY3X=H@q&w1P)8sIhMk7Q(QId1ZP$q72|I4oRcM zmxiq&X}#knNONBq&Ee~bEZ7h*{KBbTG!b>~tzLW0az7^`C};?pl%Wl3ps{!2)~;Qv zcA1so)s_OHvQ@pjUk3@e&v)XmpKU)}V;66$YZ&;Y30~U~jKxEoUVqYQn1pT>4WB-3 zqE9jSY2>5|iSx!Crg&F|ViFZwwxIg13PaP%_gMw?^;MKI2t*PbaAU_a*U*KK<1$uO zwz+Sc1)D{hI0HO3%o4p-EnmsNcIV06+CZ8Cf5EN*F}z2Rec)By^Ba~gjz+5f%98_# zvr43k9>W^q%6((6Tjh146JfLOC&6?S*g4gn$>W9(??6Ulwsp9xnW^cSk0ZYUg-6Ak zXlWhXM4spr6)$2&VYOhNO$JCBDZQ-Vi3LaWPMq=7dT&C>=i7Gdm?P^wD_Fm?Sk``1 zhPUA7{wv{)@d)%9H1$GUc+(JJMODU1luVbmYHnW)3K9o^6=q3(o8=Bf`nVJW;^XklDB zf8nA50ofYQ@9SRvw~`F@n-zxcCM;td;QgB1KYf|JGHrp zA8%hES8EU!a{uSB=JPR6DhhduUBk#S?!;Y~7ghaC-HRpVX`seHAeNxJqK{SoImJT|Q%8$=i~U|_m@Zou!jV3B@NZaY}M2vkr#*Q9+7p7jfvpMAi zyk#zxgQJ(<_>_|X%~a6mw@p0m=y&#P^rv`1iHC?-Ydh9*m-iTq>(}t~$ka{cw_GMt z1JmD#@PQc6*mf?&x{gD@)@QPu!NthA)1W|I+w^WvyAJSN0^5FLjU6LT{~){#eoC@7 zX!`d2+x=UZd@}$#>D=wx)o6~PIp6@8%KxJ7&BM8F+qPd_bv4hWsL-g=oKz?knlzh= zqC(14NrXaOP38tvDveYKQ7F-j22o0gD3Z(xDUtO#UC;fjcfH&5{=3$;e%p3;b%&qd z_xnB1<2;Ui-;ezO3mY_{lii5--t1@HU&>}S{eAI zVWUTvdQ?D&Xb|vL?U=mqQpRG;Pv)|D?Tt&3pUvF;U_=33T0S@+;S-pOARZS@O&IO9x;(>qMwZ67#uFP z4i5Ut4tSyje2B&b<_Oe4nbJT4(Z$=OI^;DMXsl3)=r>@1eE)15a?rAu-8w`PjSKL0 z^z#=TCrz5P6f@cdr*{zg8pN)(~PEM2HpR-9%& z^duMt%U6D1gez1cCWHZ=r@1%Y96wgwu_!G+i;s2v3Ge@?HrDSEoPL#IsXBW!3Ynz)&L#NoH}BP7il>>4wth;V;u zibjo9oMo2QmN(NUPF!Bob~De=7$MZVq*q;&GXfPmQ5fxo%rLH->G*A^v0ICM$ihps z`}W0I->o6x+`VOiQ$ybGD#PFH0buLZ9nGj54rva5T_K{-!Mk@=DI}Fr&dqU?CFywU z?BnU#^^-{{5J5$=Dx&s*;J#kln`!MocqDKn9S_`xnokGO0@mN$vYLf?>GW_##eTS; z^!BW3%aWRr(kk5Tu5c6m;+nNYLFVY84k1Z zdTgwcV5dV&c|Mxt%Fmk_KOb)8sQ{9#b!~(_!fbJa>)RTI7Rd8x0Qb;GjFoX?6!NF) z>c~d>q@<*SMY=8$^bAg&A>?zc`GZ~6y$~ld{usUY(w{Y&_FcPn{f3XyM}|-TBY>iU z_-*U0u^g&76Q!FA=&>Cyi4ap!ZclX2K7sCLWpo{<^G*eaKZINV0k6m5ueCa&)a~cD zVYr}Bz0T=es0cl@zvVZ#_szE-Ki<8#Ne;5gKGv<(^nQ#rGno$&1<4O@Iyk zccbPI!pYlfBd4O`0J)ftYVQUEFGD6NU?Am*VN-iDPm)d(j_Cbu6uVp9%NZLv0*}U4 zWQC9JKRV`hXd3(FplMQK;sMi=9m=p6hrtCc^}gman_Se>Gtaf{`fDmVaW-eRUynmE zv(sx&^_|iKcZWMWI&J_;a-MUM?;)*ob}r(X_VvYjMp9Iuyh7}$5iu?GhWsBpVP{-? zd}_`8R$p)hJ&U07$XmDii}PotH8*Y5eOH%=FDtaP)Pb3ADQu8a**S)Lr&9*3t&FAg zg_!LH+1UemSc&?-Ja*XIOM1R(0O{#7yG^dLfcZ@G#ceNfqy!biX^zGsFg;KNBQnuA zam&#me8~EjL=a%_Lq0w}EB8=nAL=gs59m`dVe?T1B;`gp8~T=#!Pk(8hfUiG{G4-h zbFXeeK0`;1`dgSdyg$XB9&GJ$%y-0t!i6o*%%(5o_Q;)~4*9WF@_wss- z#^E(07NH*AG{brm)id2UJv+}?(WbwnHl$T}rbFea?TKFtxLtjwZ_Rh-5ts@lHR!UD zkx`!k1I&;5$&nSLy{3aDR^{aeIIgc*;RK7@7a9x!(nL$($I{ZnoDO4WAG`xL{1Z;u zUL`c@iZX3I0t*y%I>qJzlj5SHZ+!bB;93s*zUf!-BN39~VFCIR?bgz@LSCR-pvr`V zkjAq8v9Yl$^GUf6-fv!@sTpP<^65+OheOlAp6L9(P*YZBaihWS`0)d#yQ9Dl8VF^gZNa!eDDT;#~0x42Rb5 zI&gUV2Lfaj!WGcm6?=YUWMrh$Rn=Qs+5;5&6XArn%ThQACTD`i?;BRG#!yY`sNe}%nsNVL!gbHRwlj-XJJX{;$+tsHgua3>!B)rQ4A zg?~{WV690*&U1w*Pi0Ah7m7TNBN~a|=PzG|j~?BLZU~x+9?`wg8#B=#MqX3#vi?gj zth;yr)-t`{`bU+`cO47demCsV*pBPJT3h=cuE#c%$~U#EG|4)Jsy;*Q_tL7+!xDnkB4+hM zaIdE}Pc&|d%Z7auttQPh|2%4Z7rwd|8IdxeB!QxF>Ln$O*aYBnOJko#x_qNfyFq4^;+wGM5Ks5r)$skA!iC0E$oe&n^0~$Ak&?wpv zBDw{kmQoJ%_ZMS?1f~$an}8|y``fo~6$Yxbe?TW(pML%JGn7;zL*%(zcy2UWq`+f| zE`j|KI>-yH-{0y2uV#8tF{?8=`-c=>M`4`^q&Huo^yg*i&l`=vIevWCAht9`fc{5- zZlRMk_=>XO!2P>FxAHEWt%}c|J%;2GNgFM0I^o@d+I?iLo3HvhK0s>6RhImxdp~#uNx~YmMA(OV zQ>}00mqK^Fevxl%Y)tonf`DTO?AMOnv~i!=32ak2Dz;HLq&;EBL^~zqfhqJt&OH}ZOxzCQr5tg;c0I5@ZAIM?_Y4;^97vZ3 zk1^HW%m$=8KRk1YcIc18gI!4cFztxSOnBa`v+nwl!X^D^mjp3}MwF|rt|l*22I1QuYc7jZEs-q7q}L{$HFyhqy%{jes*p4%T=VVWUS%`u#((MugfLokt|3%#TxCt zudO4t?Wh0y53?WC{>T0NY^gyZ*++18gKoo>{evw6r|l=6N%=Hx&Z3MqiOzrj;Q##> z^TloHZreWi-~R9y*w+8_3LGWilmEQ3)S*uol8wu?*wLuhrV#$m`|EnNLEJ2ve_zFS zo$3e%I#mD&C8oleh?W%QEtv}~Xn?J9^v;qB>RQlxoW72-x z?n=+31)8SKVOuUeG`{3fkYe0#*~sW*P4A>s&mQ9j^nd94_h9#WSJz(6kJtChQNAfz zQsX2xs6hWY ze0brz5vWYwj{RSjUyIlOzAv%kAP@yM*jZa&?}7!wf4`o~ZvKB|ftvltCP}RWx%fje zdx3V%a{MNNoE8D;+#NfnroDVA%u{pd?%Y|Hx!u8G0d}V=@vlXL!P)rVpZVXvT4yBm7Hd7@pUx--z#I7s9ww9FC4N zNts0JtIIXxM8|zl<FRiMKzf`GRMZXYsYlcs-smti+(W`jbM zB2ChOI9gr{%zgu^gO1~Y3|El^r|bnnDR7n*(r%7W5{JNft*n`)X*6P!hGwvwu#wm+)%)BLcPwCRp{rhcjh+$$*!(bqD z@XE)Fr2D)+Tn?Q6t-gMmg|O0orjlaI4*{KRuy}TS=%qyFt~rrT>`$4lXrA@}ot(JA z$7H`fJ@j-D1#53ILID(a=wQ}zkMdgzjVos%vI46|-HFcM`I_oR4dx>%zvEQ<@GCccZ)oEB~tuo$)LD=#1ZvxnC1P80t(q{L zJ@mgeuMjXHs~Y9!j?4A%W^J#UCA9i{dVKgbK$$Ln!Qt@R{ASZ{MakX98^mv@C4H${ab23l3fNA7ZYu%&g~06sfDd`w-Pgat-LaMzGbs0>V!7P- zZ-S#KaW&8XKUx6IZ6EHt$ssa0aP{iwZf%!*`8fUhWTy|oHGw`R+Y9Da(-2AAs)=^! zA4M9+l9*wyz(32yQkx30n2$W!LY53$9R`YG$s}`@xC~z36R4LiQ`|W=)rW@ah~<+R z&Mif+wt-sy-@0+pIF8Ea2f;*MU0F65e&vjcB=!A~OyS&s2!noAovVLIM7H8 z*nxlFNG!!S;F6+qUu?g(p<+9h>1k{CrRT#I5=-YQCHKF2lO3Tv-Us+|psjEFtzR$4 zfLRLpAk7?CmY2*%aQy&cdRY)s5b;`d4kLa~it+DP%=lGJUol3w436X2P#*B_2hdIy zo)D|jh0+WW$p##?|Ld)VF9+r;mPTLKG18Z&=u5c6kI@e9ObqBPCwGLNehd?eNF;Bg z0!K9*>p2IM_tw@AgM{a@MV&@99p{cQETe+^d z#q|o1ul?kgK0pt#ET}7={Ma6?JNga+#Qi3#C$HSAwM*-2KQaTln4%*PWp?yNl|NlMCa zx?rf}(IdZ6NGJGqBq9~m)1#p&*Iir)m5MCdFF#`wyf*+q{>qM8p9&Il1MjcIJ-@V_ zf3v_5uKC{-$!pmb25jP7;kR`gl>l!gtZx&#Jg4^&3D|b(CyGc>N8nQ41Lt?`yz{f1 z`XZ_v2nRgz1FWlfy-pQiF1`6@jZK(qJQSrJqIl7=4h%3!Z~iNInMcU2X5JOqU)g-o z){mq&-ci{9{F@5zqgAPh6`Yx8Ze}L*!J<98;JerYQ%7=L(rf5DUv*LaZ{2Gv+a3b- zjZgxLZLi)rRLZTnw$p&|Yb5OK?AmW$qM4%4N)nn8)4>$|1n2r%mEAwa^BLqn+{2O` zvzKmSEQHQ|M$tS#)P@0bCt)wdy7zy-73H(YN1OIq=iHbVz2%VNRq^zs&r{Vf^2P(z z?In7tPSG25?`Kgr;lQGj0)U)uviZ^x=9g#nk4jR;@?k7uvIb-zhgocwDGu?Po5n{F zj|}yQmQ4+{7RSY-%;O;%nsR4AafXf^yV&#$0PSG$vn?O{bUSuTa?DBzC~9>^-%SSh z55;t6xac7wx)eUh?Q<*|5WNR@m7@J1O*y4u9pG*eA`QzDQ6G>&LJWpVDp`4Nj}vX> zorPHqra+4Eo@yfaJPcU9Bz_Jy+wDY_)#O-(!Q{Y6#{QOTdB#{9u8caE9>_s?5LLiKWKm%) zkPu;w0xcJ>?qzMrv|D5ioVYTxm*}m=!m7-(NX6GI}!F@Wvhn0(jAQy(soWoXuc@O^*}U4T;w( z;{YZc>4itw?h)*R+DOjbqg!`ZyqnP8ye)FhIe~s{%p{?wCw7QxG&yeSk^;8b>))WR z{{TCN#?hue4-%>S**@Q1&M4eK_YC;RtDK2!Yj*B@QmOj)Zo~~jwJ=Vb<@CJ+avEU! z{kY{35LaSbE%pG~8LZR-jDO2lp{N&DGXhEj&^iQE)_K?lBu1?Bi$v0EjBR<>dJ_Tj zI;=gc?k`%lOz`DHFt4I5SlA7+)$f>}XQJHS82j6DL=tWhr}UXVeL9foTqdNgWav8c zoeJJs3^H1}v^(Dh2XN@*r1c^k;fHk?o{m82h3@DAj@OeF72hbh>gx6vITC3_5n%Wt zEvm0ny@rHQWM?Fi*F4>51RtWdM~YpO_CA}>7Sr{bxVh8;WT^dNj5G+N zJ?fVho;-GJA1Qnh=k;wGvxQ!q-iZ;|OAvoyY4?hiWn&}cTjDmN;$+DT5c_}uGE$>f zXgHFX`^wGnr2*g*IXw!1KFCaO7PK<$n_fAccXs%yE`$QId$~B9bW%8%pkwTd+xk3O z=P_sZ#iRZbm4F~Z@cOqYpEmZSpm;*wGpVmVXV&!X)3*hT)#*sVlLFo>nFpG^a#C_S z`|c+Qoe-3sMZv&U@XTY9b?>cPw|=Ev2{PD=w=O?>_}-HzPsZGH{qp6D5Kg7Lz(IW? zjtU1}&e}t;9pkD8a@CsP@xmsHhh=H>6L)*3lAI;+6e4kyBn|*j zwiNh;i^c;n$VRU?Q@t~7MbgB|WI$z+&nJ=;R;y!ClU1`h_ax)QoMifByX5_S999p{u<__W`KjNXMVM##T-R%!y6L&7^b95aAyHS>=2U$co(fR3krnrovAeha zoKLuYdjo1#CRY0B=M8>A3>1`%m9-t`8v{T3sCULKVvrD9(vqatxM}NFUm|QV#>|dU zjw(7PCc{Oa1v-PIT~pM3$e};NOCLh-t)I7fG-n)#Rx)d(_VcrTNl#EJPK62FC(6y) zIaTH5o_yKDx(<`U3rJKAlRNEYofbotCv(t+bh~>K%Ef~xx`CGG8W}4KjjG?j$mT&) zjutH&*_!VwGQpnUWhsC)l6e#Gl7vuKR9`Tc+&UWYDA@sR#SUS+9DmEBjR<5S zo6f4mM^1RWhSoiVhRq#0{~}L6SqyQBeo1s+c}cU2#J2Nq_?m}s+a?@jw%{^o%3={v z2#JE7H7ZSSp;BOwRaoTTXrV7s)Ch+)LAfv5HhSFb*)nK|lKJqqbgdd;{WWVq)HcLXjjuaduNzGwWVDqw_U9h-S3y1xpWlfEd{F4b^ z!gbTu*G5p`h~Tap#&+eF=}F#aNa;_cQu8O6%ChjXdinX z%1L9`*hfZYf-o`?*>&hWe_1ZBKg+eBsO(!<|3(Vc?DbpDuHu&HHk1CjlmBMb_UqgC z0JNgH&TW00FFdSU&Uqagt?O~`LkJGziY98mMR8M@|?n%dMmwF%;VLr3@1o>+M<1ll1k+l zQKEZ-#dT3sR19iePg*F%A~hwo;McE1mJCd)>c@lQ{F|R;r9y9Y^6kJW#JHhaC&q`K zT=z09O)Sd!CMK7{_V`#3?mRPu^FPVXK;i4{>|{j5A5Jd*(^wYJp}&a1OT-=T%sG>k zsCK{Q$C)wu*H0DvD0EY!tQyG$M(PPK0UhsFF^@)4m2}lxcDCWEGiNFqei_oxkF&23 z97KHYtN!7)6N}^#4eBa$V$V+?RrCK%NwB1RCPC)!1urbXt+bVyGOk;pR$X|1l2BE~ z7vJIIdGQIo$oE435O1TT-e8jndvi>FVIsx8=MZ?7k_zVk4YUPD_Fq;~Ha{RRn` zWF3d}x!&sTq)z3tmKza%u!g&5ZHL%v?==R?Ew2@K1nE+68wKobVy`xRTq=YUE<+7OJ{T}Fy2p7Y3cox zadw%=NkS3LS7(*~QnJojsc?@qu3wxEyrz9V3R7?(uVJ{j*Y883ZofM7jb_rvk0!` zq>|R(a;=`U{$Brrf`Uiz@Y^!(KzNJ6SmN@E5Fo6sEG1;CyT_L*0#+XV)BOb}&CNWUy*kI0)NympB!*9>l6De}JcAn$IUO=8@E{X|h9FgZ*& z4+wzH<*@2vG91qr3GKIS?*2A4=Ix>Wr~}AmHgG8_$q+>(QoTTt3eIfYMP|B%f!SfX zXYFE`xF8@kTLbG+%mI|bZ5aJSXqI=1mcW%s5J3D_Saz{BO`wxWG#Dd(1xY9$WsW3D zG@O#`XnlBgy%(H|Fu<(;A-%v!XmEnx_mES||GH!xrU*vG-Dd?-&Fa4V)?Nbr*+Ldd zG`*cc=631JL2S_c{6%YQ{ymqqQPEw%MJU{4ZkQx@Ez=t~4dK~5{%?$Vxf3c}ASfYN z3Nqcy=afc86dvzbgHNpZ3Bd1*Cmq#03!(#g1DHtGlBGJr!MB#_(qoV(1mbesSINpu3dy3kAJmtA9#@G8BF6wXDGDZ~8 zoKzLl&e%C{LBbahap5PbTW6n^6__NdBi`>LC%5gDD@la=+o|rzTx9f$QfK;gjF(XgXSL}3uN#}h;po`?{Do36XfO^Zy%8G<$Ub?Cf zu92;3MO?77W=E6bDT(ta)N1KeV4BJJZHq~V;f>b~fer-sL+%yTXs11V>a1K_DVrm9 zkX`NYIwVnEJE=JLL0*j_r$kxBQLXT?=9yP|4pUPzwjC9}lsewfNNx^OKi`&$X`LWZ z7gX*|5flG}uXVYVVXHrd5a*Lf%M8Zt?mTR=X-ap2(1HzsFP{?0|@B6%>`%ZNoS-s-O?`}Bx2NLs-q^e45_B1(; zl9w+6qt&+AUH^6CN9#W?^Ye$$^C9dy!hVXj_MQ2dq?LIEB?z35sR-_=H4FG()8M^*sXGLJq4-`m@XL zNHXrnIFRwHl@>LN_9s#bXPJ7T2gw|$;gfL{#}HJuQp!7m3ywr78d3hUE+0|LD8(s$ zHXDg!W2O!Huq~#HSuRg~Gibw{XK!t4nCLh~3r2Q?3=-PhvFr1Df@r`D~yHvGVYU4<@(Njg8WqdJxeEu=t)Xw=wx z^yh;-lbZq|hYvaVuxaS<$H#*@qc>cvm(Ter9h{$8nw?d-{E>no4BZ%?W2nn?28;qN z=(?ilD+|`(vW?j5sKiPTv2kMuKv#6uTVDyk=!~h)mLu=b@D0<@IEVFZJ{ol_C z)cAlDZm8|?Y~}A~?Ag~y`jUDsx<3wp8{7Y|_`2HK*LmaL#eg=wM)>NNe!>>tVdgcr zqw#vWmKz2CT-HsGsaAMZCiU1M+XnXcIwW;|1*Sk`S+#sr%QGi#A)}!8p$Fjf%jjiP z!K+~RlGnWN-!4?<{OaA-=jmi72^kxAGbTnI`d2r3dYm{PT4t^8yiY76!7UPe>j$Z} z)oo;i;RV&WU5Y*PI^AM#eE5o&XO-VqlY|+N8&rL2*5RW%M96m z$2LAK$f+(W8ZM(qV=PBk=I_q~l?^aqqlfNX@n%bA?X5%H`~UIdqKqe=jzp~&qn*7 zQnr&vscG#mDJNh^-eU0Q)5$_=O#y09W9#ws8)7jtbuy96UAJEJ@3z^H&my|7Y~Ex( zXNO&_?YSdR{Uh?_to=cjE2g3p)RPe{hs8{^_pG8Y_=ox_?0Jdlumh8S?#&itkYz`W zVI4a5lC}O;a{DIdfvhN&7u3vngReA1_@=j`N>E1%^QELzX2)K9)-AQ{@u<7ED8XNL zKkH#jK_H6e1^2^M#pb%}pcni6FPF2JMnuzYos_KjWZAUVdPtfZ5^tXNlv}v+=wJWQ z0;pZLywZ(>Ewn_T$0zj4p5t<)w9w5%Dh!|f;=(-=<{_dc`rX6s(3(ed7rQT#1Kt!# zM^1WLT6Yr26TYf69|`L4s*zuMmLcCU*Kf+v?U{ta+Z|V7}!n>y|qje{m~R4 zVJ2?lg(ih--Tyw3jGhc2zQwBaFY|VY%m(nrUU3=%2 z%F=n7O*m(dw)YtyQ1StR*|V zP+-+jKpjabBZe2FVmsDFZ11|Y{xJsmQq;9cLKR5a7s@e@sg0Y>G<#u$R)ZD znfYquuAP67zB;e}a`0e!uop2?D;6)(8QbMkgfCWpg8hq;E`@jOI#D1@U0mqogDQ%7 zuL8&2KV%n-&t=AHk69Zx9?V*A+kMv-=cLl$#TV5Nu4MAO|3srfM3U7>EikF{>-B7J zW^RZeYA!QfMGu+ONLRiy=7$DWr&)O~szg2}bjB`_bV!IqC zhn>-mSKwa3Ru&|*un9ucteno8u7Oiob8E}7oxg!np#`4qmA3eCzqKV}r0F|?cMAF( z?}AC+e%xI6Bj3ke&9yvgY2RbUqQWeeaj*GaU+=FlQy4k}9tC^awT4ozqHsIJkOwwq z?ax}%EX5DPLX7%VjKG0iZ!9PjcL-u3Ib;?Itip#v_SY@Lzu=TjwG|D_;sR=ioi|$p zzQqj|)RqV*u%u{biYe)x?~ZtlfQRZ>kTqoLj9Y6jsB$vRQ0ai?`2kzTTP?4Vg$L>lg!kszy61=d0YpbA|RZeY03 zTWTL&$`{~T!{SF}&d?a2abhhV!q+BFov`Xs2VpP@^zk4!R~pvSpsO*h*#J~0i&IV-wO8jWE26k7|TJ>q&K+`7lkmM&$VRz z(-X7r(O4-*!KhmT=e}8-F(jE9?3WGjj|dPM1X9Gd7E&KBXM)ku+WImb7FkDls(^Hj z)mQ;#-|pb#AjARa(JlkQ8Y7t)AyODi^`TX7wpeIEWVunLk58q#i>QCb^rsbn5-sfTvF*zOh6Bd$`EeRVyn?2Uuc+z5djhaT&*?NS#FKWIqwn(KYRm=l4^tt26A+#T+RPOvYfEagjzj z)oep2ARwut=O-;agW9)mAKN6WhT0kMb5z#!NkdBg#~buzoZ?(=JyKG4EKb;BR6#N- zM%hozP$Uqj9zTBEp2(m{k}4XO06fD^OVqGVwX5*roEGVD&QJ8{ll)|aX$Zk90DpD< zo&7M0*Hna;!~@tPAC*8IFa-|(>rJyaZryrLX`>h)EQEhTMgadGrf=Fw0-cSh7zv^} zu`+#J4s=*08^d;PH@EgR_6z&J*fv%h3G409b>k;YNT$*XE^?*4NPCK+9OA?QjKJ)V zjwv?JbjS|rgP zC@g&YXPo>uyF1?M|Uy+A6q=*lYtKPTo&9- zNYE;�FgozxUOq!qrY81uyaAt};pn91AAv>8jE8zKNPJ>%1Ms@9J48qDaSZ^;Nmu zEsQ>k$an`8Z#FX%e!WXlIjO^otfm04X5GPL-xH2-Ov(UtcOH&ABNerhC#<`DLwBiZ zASNPV*~y@l!cI%fJMbRdx9@P8P;R|nVHm$vn4L^ZIpX2b2WHIQ_ZOsUa<&OWB{QS; z;r9lR82_dF0&M8{dxu);wKTdxs+t`bm*&PVgOWb%sMi0;g9Ir-Y&FNECO~v3^ljH;` zD1@qV(G+=$WkxfJ?7;=6vuZAe>?Cc4fg~f~Lz5(#q!bzEdl(RT#DIzGPE|QNvRH6E z?M-IiSL8!Q;K(!cn$3KbpzP%-=Yr4GpxusD={bZ|r`rsOA8P9X;jERc7);3(<|;#& zeu7DfCXqO93eLG>R2(X=HPpso8zwb*!(89&wM3gIWC#cO7u{J47p7XL9&5D#V9Zqh z^5WzQ(LIcW7JeYux|jo5zzOaE6+RFHl?a$!2C zt)LcSTL3Xa&PBaX@Jd@K5}l%M!!$XDB#R}-uUW|nq?J1D0H`nbiHVHj9d;ykJbC)G zf*3#AG;>DXMsss{)U^sgO~*_N5G(uNTP`v801j671`t?J`fj$dnXt#jC7JS5DaG5% zs~_!hmq_|DqX~!Mr^0*hNUcCWw$l41UgvtP=ZSfm*tX9}ZC$ZvPofZT3LH1=Hap~` za{;}$N&3ID?jE5$_Y}KRm_`e~N)s3~F$k%Mps20f``9OJ$iMg38K`>r&>=wwTvL!` z3~I1$B`Q!aGTOzdHh_S-$M*GBM_Hz+S4)eOX$kJDa3xw9?!b zqUlBpi%_W*%W7|3yf$Iy1l?;#h5NK_CBMz8L1hK@8PV3~V-&3*utC8^VA%os?}_Oy+i z;E;ZD(s|J&NoQ2L?cM`prcTwibPHQ|Cni)UE0^yMDV%eD&vy?2h=u*+pwm;58d>)N zN3=Vs5Wf?Ryyd8}%Kwp*SQV;0O0d46Kj9k$=b~*3dm1LS!p1h3tM;WQ|1;}5Gj$+G zbePHDwNGX3x+|iGt?9b(UNKo};~7*{RV5-y@S5Odnkf&FAx@R&>71Xv zTi?;iDO4d0rBH-!B}%n$75PDm=1YC&N4DZ48*EuQ>FV$^04$4b+^}K^O?C^GlFXh#xbfOu+$?;ckMzck9(_wY}eNkG`XJj3455 z5@!XB_rwyzxMiht;@&Y)lQ%`%F&rL&d6bY^O`b91r0WU{OBO2pp&SyT#GUIH&sLTp z5&*p!D-EV`tgIgT)9Bo(i@YP$K3{_Q^ zxTQ|Q7n7@j3p85Mw}1bUv)esk_xCRrJ(Jqe=w~93?zoy_nvMZXTd7x(AOgO#jb$Zt6ddz_UsLgGg@)}CO2Je zO>e#M{fC-sN-A{wk-Hg*-O6>xzffujvLI!;dS{;(pDoHqlVqc|S+!1Ge(2D-mR;(5 z*_26<^9hj&X@Q^R9NY$A0;HKPaiY4IBO`@0IQYF7M3gL$LDFl5I2Q!=CTGBv7B}48 z__(9+eh;&`0%}qW^81LKreN)t8LBZaE>}DzL)}i^5E@6B{uq0SkdxnT-n{u3ETt=R zysxi`R*!apVG+I<6iSPeR|`~@3Q~mB2H7)e6GcH}xf*V-{bmzK%EAE4)z?-I6Z7`i zH@cFAR8cU`^Ps*twxV)e(H{zGFHG%gAsC@>L|7sh9x!H=fQE%{mF{l3QAEE8?HAfr zFOd>sFcP|{fl4pL#6PYFq*fauX8Nam@b@CoR{r^HFZM<3RDVT+hH8(ZVaDt#$aV^P;{G)UPetv!*D!o(80C0=$Oc zpBCrk?*2;Q-gL7$ZwUO`KR#$jn~8zm9tfw({5$=i7Xu9Cw~NaXoS2%wZkTQt$fTkj z`yp6^g)?Gs$|j}72y79_8sB&I;p{Jh$a8FIiVc$zpP%9|`2BO=;`X9rC&c%RBD#jb zVrDlgFlLsR{09I2$d_P2YIom4egtatH!N@AX^cSw z?&~&C{Ud<4r!7fLtjg7~sMv(^n+G=TT)1@Ix~<$rl;8es1%q||?LO!#SoA?bwzxOl z+EyrJ%X-$WmAQ)Ii#%EF+AJ|55VM*tn3N}_%0FKsoWx;d=ySN)a~SRN@CCw7s;HsS zO^h?dMrCN00}?W5xByF4*Jek()cL5+L|^>g`0T9#^gW323OIGR)HH%3?IWKqn(~?Z z(tpkf9c}I2Qc|ai3zcL_+3#Y|lR!*U)YRS;4`E?91C&wLcVR zmVyC(z-`0s_h>@eAy7|}-Ec%cY6h!b0y_>)9?B>ma5i5!#+15SVhYV`X z)$CnudVIJ7F>bP(|7M5UX{69z#I?l@rdVjrRP=ct7=-cj*x7kK z+7|YYa7u#HDtbWztFhGAmM|fVs5HdVp%v;XG6r(3X(>f%>CcadGcdglTq(1biu<~X zE4lMq?;Efo`raYt@;Th^&~&9YtcZ{%gxYTtdb#)o<=!DyXe%CQ_B&lNuo1rzl|KZy zU=s$E&o@2GB3qoCi{P((NZ}|&yG7&`Atq%iH|scZLN*t?Zg6@bo0pf9Gp2g8WkCku ziMHayKF4fr&BckE8yIBw_%i2!c6j@)wGR5Lb8~wh?lJgEL&YwdE6cJ@#+*@_s;q2a zY5!DERbOK8D+)3$BqLf9-78RfPd7=0;D&~tF4~GW>jEp+TyEOBqUJ-M~Q59>lvVxmJsk7szz%fh4GhODkiLYq8 zENcjwX?v-tF5r%uH7A_V%w$!BuCO$B_yd=!m%5%XyR(_PJKR@p&>#iLwdPms0t0$b zO<(78+Q2hMA89M2GE>viS=s*1Ev(ehu-vh(1ygIf%-x!1Se2Ger{ApfVNH9VewePM zxj-+;*&al;E4WjDr5xe`k!r+=v6lR-5VA&mkBN>J zU*0%<^yr101?CH$f&MHh3FsGichx5I7jF(zOB6vlov^%%OS~8)ZrEUoSdc|@Y2`(A zPa7sPiEhq_rf191R|~Nxc>C80@-F#io3)|nrfio#6pOYpG%%9OI1FuD{x(~jvy{eT z;+8C4{E!%6Up_8MIGzEZ>uqa1-`cz|EQmcX#04t%@bb}>Q}3J=ZKn-CRKkh+m!m1DxISW@jryBf~>WW&T+@7;ypcyW!P!-k~INsOhwm zj8KsUzo$&Kus^FK;CYJY=%CE#{k_#KLr#H(E-5`B6IiR08~Dy%bm;%9q%@b=hur6# zOAAhIG#k2`+~w{HKBmI;&oeR^#@+TiV&|?lRJ2=e5)w_0=4&e316roak#y)@JT6ZY z0=4p9-Mq^0$CaN+6igg;*&}kaG&xB2%*g|>SR$iVNcr{Zgl{e8Z()-c#ErQKY$+_! zg#wXgi!HGq?y|vq4;*+kClcsIrNQq^$5+T+CfTctL36@b^y)n%WgZmJi(|nsPLL>pSZK* z=hV{Dw7~Xc^fqtWO9+D`ROFsjI9=VGu;|2{&?%5t;%?=nyqZ53E+{}jKOm{#=-Qt@ ze{P8ls{EMhJpcStC6!L2t?l@d*SzM-bt3tAOd)=q-qbHiraE^%;rCOdG9&#p3O zoownM?5NHZ7lKj>a4>nMoT1DkkltHO_m0XGV{_FGRS!G;wAwTeAD@5(`SlToN` z?{3%B`lqJ#*5V}fBA!fPvryKEUP{XXqgr@2kl#x}+6 zn*t41pfZ7*Q)w8X=oc0}CTaCl3?j8BwGhacmXBQW#n(*m2xMbNj7?S*9OGEYs@9MV z9;m)3V(!PUU$4b?b3-F?Vo--H`=OJgtT?jt^8(MDSz=WAX1bch<%}Cq6_v4bs}m%` zCJB0r13&h?{*R*A(8hk9I&~5N#2`7~lQ+K>^b9nx@E!&i&4I6Ljem0Ro7OKzZRmyD z*9yfCFJ5@MJOL5_*lvy+ds9mddV%>!X#@?)(;*)Wb?O%GS6_hk6 zE_-xfg&i3jZlni{o8FW(ot-Y4Gm^CW_bJ)RetyE;)>}XD*srr<476ZwmQ~Fbn1$uZ z5B7_QHKCXEm!&ET1s%PLx;;;uY*A73S59xImZ;LfX0)bXgwN~gYD3+|hj>(W9gy&D z*i+-;X&rM_M&>p~90v!MwogmoR8x_e)9;wY(<3>(k3D>e4jMoL#xdMo73QREPdB=?tS*xWc&J0eiObZzT0u)(6_IzM}+PR zAj$X|aXT#~rK1oSapXou8#y}86jj@aGnd>H>zBsLJzsiTd}zv~&^J#5NLTAt)n8l; z(=3#b`+ta0?Ifbow|W_f>eetQ7@3#z-Bqat1+sk8W9q(XakMUI0EX#@KTfRBIAhDS z8Taq9!#W`Eq`NvKY|UsCaFz!{NlVMC%<<6PEzmGe#TT1qZ2gMy5Fp?PzN0t}`Q9^I zbEQC!MgBrUCtRyQ6a%YRKBXLop77Y3)PN${YMM%<_f)JXN{Yz-(8m$oD>^ii}h8D7&#?AXno(q z?n}3E`ScS~MUM+!{TOn`KSBOIkXVw-=MlzJ{bL6#+H%@S|I0|B%ahaXV@~wzac&!BSdT&L z7o)EeOw9ftwx1sTy%ZVC1(&Dz!9J#Dmdvw9&lROu=H%?GHKCUs;+!C_a<=iMH$VJm zJe#O=#^2xUWpBe%Oz#z-&Vk6W{!mcrtpJNtJ&uu`K-(5rbvcea~1tDnCSLz-uQ15o};OE9p-CT!$KIc+1d&Z1? zjaC!hb-$C5ySty=^7P!&+>8?~r7Jl|$NL3^ERj4dx38bm?GbFH%dB-5&q*-LB|5L`BGzE5gL1iF5!3=frKj79Zag zRi^(e9VdI%hZk9B>5e({{Y~69rHh2b(UUy)Nc`tk5{?Fe>aP0xGdi=&aaCu}PlA?1 zU2AS&F(LkU8gTy$vr_!qJ|}#AY3W+P_&V-mHt=ZmxQxq@@&k9c~1|JuA~`Ep$Bbc;9+^q|bw6EgC!cmAxn@1{pOT#a&(%5`K;TIl;hfJymJlaFnr2 zg2*$FSB)~bxqwnD0qs3C0jewc!>L-n;eH=n8b8Es^FrrimBd`NiE=g7@3#L6Ig;of z7#PYgu_6=DTDmx~VSJ-%z^x zN?!82P@U9wuFF=Z1N;P;Z65eJ`SYfEfkP({VB!y7pcke*@@{oZ(g%PKOIY2Bav2#O zv4BFBQ~|615dAb53`iHv?tAa6f5tSsE-Sw++)&wmt8C8e3Yquqpe9Ix#vw}OZEWFoZf8O9_2GZke z|K8*~qnfbzSbIdGSjW&S!$ zcXC0-9X}~d%`YOAJ9(tj(K%SWXt6S~C_%@UN-T@AbI33VjEkq-sQBI+$+A%47*tpC z$eRbttFE0;?&<40UCY53)Eu8Crw=t>gxj$jvke}bj!Jdv%;1vvmKJ2>Nz>r#PafH| zYxV8s%!S6v;PFTpE{ey(0~3#hXbd?#Wm~T*PZ&X|z`o!mh*=JQ-MRKX$`GHf)jaufxB9!QN@cUFkVE#677+Y?~cS4LJ9Qap*V2-$lksH(0d z_SieFLyD<)WvMiGG5OQ%*TlM5q(QU> zujV{^<<20u)1NP_m47p<%bU`^I#--JUvcVt$HUVzp<21a+=p{x6t}k*J=)w~8;fxx zMre@RvAk@XM%BX7T|-%-dVQrura5=bW`)0xmzQE#`ks#u_6>HOJ$CG~@8)XYPOt!X zz7&eCo;3xx*F}!ZC8Qh{aqP*L`|_%1W|=)nO|6K_+j`Smd1i!@@N zY`woeY5@ITZf?7`HBt`t zmzK6LPKU%WWGf$z6}3yij|gSH*Y-peA)o4Kd;7%#E04P!PHVJ-gb?r z(|>iBkT^@@{s;t`d71Cn==QXhM_|V>dimMOYSk3|&UkU(0*5(lD_^(XA>-xZlgE26 z%l~Se4kAk8kE9C(5z zu;&TgI+bj`Vpl_4h>I=c5$RoD^ExJGn%%nx5ng7SKXw!ur&~+K`nY&}AU7M^f=d+c=gWtojjZQ8RcC<*+@!}Pkceql zVx_M~X5tr`8FJVgMnc6q>2>>q&osmtHs16oMmec+gJwJ50%z8}FnUGn^II_oM@P1s z@7X`v+qQA{OO1}}BnJ%&Ly7gm?lVjwu4m_=+kzI3xLbg%$y~Wi-l!?biMiwJfBl9C zo^T}k_x$IZ;A7&JI;h!~`!8%ti4b%1@-*hE)zRZy?lYs{70Gzg?EZO9zfW%ssI073 z`k;e8M`8wn+PRD=&#|#DD=V#u!%Igj@P37Gc*D*#D7;wi2kG&IY^iN8GX_pDd^=`h z{I5GK1VfHxY~VqKlden(|K&u~p#efGVcMLUF}Vf_(+0ZP$J9|V`c(h=@F8Z9lTfBY zuX>BD29fc$Ua8XyR9g4{^H2XlgDig~nYqe7Jx?!HC>d;CK{X4yA;ayDj__44S$AY- z*{*8Rm4=V|dwm&X$C>fMVU}$K-JLqfe>N4_Zq5Eg#x=X8{!Py#vq|`HQGnICJ8u2j zM91iooXB(yG3|Ls3N3Tr5~2b>58HlT=LTAjxoA%_VPgQl#*0rRUy4-XewCNZ$eR_O zce9?}$!OPg;)&VqovwlWk|>=OpPuz|+4Rctpz?2aeupJj+>?sm0q{TAHHq(0Sbs0E zq}DA$b|d*q|Nc8SSv>?1G}NE7mTn`Zymd8s@d8(j9Wt<&^xqnQ^7x%yfM@W5s=T7L z^F&&DE4b{Dk=}vpR_{=4t*UJQblK9S-mBKp(sp=ZQBLq;KdeQjy_n+`bW2gWb(!mm zyN^ESPm`9DIE5|TedL3es15V|J<>B@x-Gk(px$$Db1BO$o8vvQ*~81L?#EGy2P4o$ z1e`i`jic2H{Y^@0?vyz-9=ny_Ot(98zJewj%G@EcXM%!UjMROzn)i?+MCO~Gt~~cE zd*lTXE95=Ia8h5@>KoRB3*td++0lzd-xIx3_Bx|fmi*n=a(3*O(tZDoLOj6nlqH1F ze!U2@W#8mfl+z@jJd`D4)v-QJ(bW`fR1bo)7BC77{Oz8aWsH%Y;4= zxG;ov261BS+pOuEZ}0ZoczkkYrN#nOJ5c;17vpGiibYp8`40gz+j{<%nA0)T;g--n zn}#G>Q4)o%x_syl-iZ}lK!L39!u(2)|Es+>kLx*Y|NcMBV1`jMBbTyd#*%H6Jz5PT zgGAb-ZAemSrF|Wj;Tj@}vJ_?_lFHUzGGi2?NQrhTZ7S{4Lih7*uIu{#=JELbbwBR= zzq>zXNPRx<&-;Cz$9WvD*Xwv4^FMspwR}hsGCtXRr>+)sUjXZoy+=rCJ)LP{U!m~8 zW9x~V+QHAmUn)vVTtb)@fT*;dmcs6?jcNz&R3wfEfBspQBQd_e^}AK_lKYP4;5c?y zK2^OzG(B+c%%eQ-ZGMXMQ7rgP>!GioqKe8EkyWXEXz1x7MP#bf)2;nAwg-ZbpFFwK zb23KW&+QWLFFAG2kpg(l$zgZUbL*2xRjq9pru(g*ruE()&+|Xnf&`!-x0112Ex-5D z*{|DGq{RT6)8Ri(Sfj0-?lNF-YvKt?(`D@NJ_K&d4Iaiu*L4Rwee?_jgbx3_*e`+3ijR6Icf7$ zm>*%C=$5%G9XkcD>SL=ZCF+}g@zCc!j;MUob>}1L%X{kcL!E&%C|c57-j{BF`p-2h zF3izB_|ejBE}4UqHZEOS7mU-!o-(O3{xxgm_ulMYrtg|N0Hzj4FHLgBKUer!yL|Ar z4Ks4j^OqpI+nbIQxFBW~B|Aao$0b1ltq1e>A>&T6(f@ z)UzD6z1do~u7V!Ne8pN#hvA&i2M->+@jmE5k9SPYqeuF*(zp^`ONu*MTXo?o3MpjN z+aE3LrC?cvq`0KmeMHe7QNoOyFv0GYw*?XG?oZa`sbQ*~Z5MQ>L`Hfq3-N4*rMF?M zL-5twT3=sZz~P&Ax8FSK_u;33+P|qZ&H#bhpufcEg;t~o_2;&kzwQlk?k^A)D?^g=`#NnVuLg(nhIe_;0twy zT7PO2CJP_l7V+)2&)U+U)uQoc3jO&J`vq0RY0&5oNq{o6xlaD6Ur9x!cdeClbu4L*1ss-3{bo*M($FisUr{NNrF&-X znv3WKR^8vY#L?{8ZWoup=6Z)HVv*!dA)ScgUKnSQ6>cDjxc$mnVYkEC?}yH_fA+NB zlw!>QAB>!`R?%rn7^`scyGFb}hD5qj37lGg5SBW>+RCp;TtT3mro6EIrD zpYOr`FVeEB`!%Uqsg@l3asw20zjCY9kjz{vPMOPmd@W~M$?tdGBnjRk&%~jvX6YWf zAVpc0H(dbGBF_YJq^S0*QDTq}-H4kAxR%lAU<*fc9Qu^)T;nB%s89)Qi`*W{?TPvR z*pyJ`mHR)t9{A&XTD|J?4QoY0jl`A4ID<&7(frbSv|QIZ!&^TNX}3t-GnxYUdP0k+ z|FSkm3YzLWhL#-LCx|ozf z_i8j~;jF1s?=HyQekgtK@3{|eNUl<)ZfZ$yKcbf{G#H2(Hj%&R^9hC;sh3o%WZ)77 z>%9VRu*A+j8erw@qkR^dJsTpC#2G?OM=<%l~ic+O`!I2)%%I7;0 zmews;-O{HWw;PRElDhOOhzG5fuh$-*`RZLe*<<8(>j$Yfb_3tnwD{Y583<|!JT!Lm z^(jSFj6hFdqUnLLx0LKeFW`z34E!ouFuco~5%ht6it%nKn=L3nxLb26%T_{WC(an* zz1K$mcQ|qb$OMxwya@tD?;InWsVz7pJTH6{=_P?e2=&jWpu$gGV z9aryovNE-=In4kJL;A?0-P&VhQRwVi??xea3p8S@-0E$grN@#gX+6j#8x;YXIig8w zvW3$L9}D8PP{llJyuG9McWZWkqJruGc0^jvEXM1x8yr;;pe6y&Y!b;ZvNySVVzd|& zK7OPCs(<;d=g9|))o09D4ZoJxc`AD>@3Z(|+NrpDi9}+wFyfs1as8bO1^}k6P;i)x zGd7tnZ?VHwDGUtx)Z2ar+U=wUlycv2+(AtfX~Sh&d>)@#nEmQHHRn83JAN7i#f%-d z+Il7iKw(voM(<@Yv=yYp%W3;r%PoOFpmvxIU}yh6>cR!td-FA=ON+JL8(fYb_`zk; zgGY}Fz!mTaHK@97Y|KDj-S9Q?dr$EBZCQ(<4^Aj=DVT2daHZ4nx8LwT;rKQ;`*&ZR z58`CJQ^Db6XeBWuC&RNf@x9deUojMZ#)x3<_AwNjR*v<(f5n6{M`|Ni==LUDj)r+3C z|306amVV_I?kvZA8wr|&n&KF@$9ag~br8r&S#lB9DJM&_v$JD+dU*6(6h}&r8gcJUOvr<<39g<v~S+6oJC%8W!h{`-PicMleBdzik7upViBmk(V0+#0;)>0<1_z5~v>5o9JF>;JRb z$CZYL@8Yj5AF^Z1xA{-ISl9El#?E8_)((?<4`glMl!I$(TFVPQ4m|c!6 zPo4t5BvE6Ck$V5TyID#bnA7wtg_wuOclm^7>t)wnjp=-M__Gtw1xmHe@|hxxmZ!gr zZmrn?FQ8SjU18d^>O)~cZqw$AIqMH|a}&2Nl4yr(`kN!3xE0t4X+L*V5fD(B!(Ay7 zF0Ddz3n7PZYbpry0ev%63Hy~-eMwL9ox?BYYdWe7SZ(Qi{s$lX7Tu~j$#5dF3D(*D z>5&I>@Ec{+t&@jZd;2gG@tpIvgI5;ff0NXgqtxCm_yH-Y#K1E&(b%`I6T&jmSZg`T z*kyvq3Dp=PqaD?~48y#7pzHor0>MeVi zx%Fa!^l#tM!C2<-(OJtjhpUJ+ya2b#nj1MV9Fn$z9msGV>5H(+&HQ% z*ZDUy3@RVAsm%7S?HO}Pmx?Gyf>#P$zTT1To_Nc0M2MXiH(t~oCcO(YqLS=Oo`-(m zOlq5|`lc3!c9;G;ONi9O$FqcIix#ly{@~h>i1+>dffZw`*(e8kdVIbCU*_T52)x;t zxz+E~IqA#Pb%-E!W9rM7MMXt*?o2>T)!lnTNQmU-+S-WW zS>B|X^Vc49+o$$1ov!y6M3LNOsb(*21A|U3+i#|wdow(IV&unGV+7Pn6Tgj|n8M14 z_Tu~*0ecT@x4v^oj4zb2UhKTJcW5tN@_W)AZl$ikZDOm8ok|=yRg%T8Ch9v+X}qge zU@?S_80&Fl%?cB=cf0Cya_9b0X64oF^X|71QWlR+4x%QnzYqM@uZ_cgg3=y(Lob0m zwjA46ixMz~JV7eyr?f4cECzJ8QRAD{55)VwUxz$>(ZR=$ffpAe8;*R@um7Vk`B0op z&mM~9ilLP+C@KP9yTn|09f$^Te3M~_5kS>88D)$C=n%dedz&mL3aBq@<`T`pB=YT! z)n6Xno>+(H;vuib*rdST-PJTG3ItB~YJI;cCzt^n!+pQisgJ9h&#&};^dBuizYn#T zJ`T-E#R0V^6_+Gg`e{Hz3^44w)FE3js`gZUdg@ddBW1;3@ARulRR7_+M&!1E#hy(G zMWy9dDm-A3WAZ-9bc_aC?zTK}pwdA~bo<&zOO4K(Z(Lwy^u@eqo+ar&{B`?OygDwM z_|nQ!2THNzplf&d@Qv37caE8~9Nu`xkqdV4)Pu!>f?!V7E0yytNy9^MK_Ac`nTHd~)Z z$ZRJ1l_xt};uXI+F$6+m^<>|I8vANw;*sKyJC7OSq(f$1w@EX1-g{OW$2h9dH0ct_7>mBt z!f^q64~wk{t#$bIRl|cP=9)bBn2<~-$QFoUt?T{?V0<}$#N^;#bRPMnTv8c+JqO0D zIHG}bUQ(%_0@InduDDgWT_z;D#{zA}T+7n+rlue{`2EL`F((yY*JIr{Z)?R*$<%K- z8!t@WqjE?j{h*r-znWShhqvk7*$@1I;&c`xITtK5SVLJuABDCNM_&Ts>MBby2{q*? z$N%cV?8X}|E4B8lci@brN@-57_JUbnq(+}`Pg-`~olyJKtH)>R{ywuAqQ;P5re}}9 ztPZ0EeKM*!D?h5-Pe0b6;J|^SUa=wwWrcv@V8YYZWqThyckbK=x@IQa%SAaWt@bY9 z*{B7%XVnFwC^y&qgyP_?ci0i7x_>kIrej6p+!Lex9`0F==b@}5a)r&_6KD>J;KZ;rS|OZn9*s1ZjhK3OT9Qn{&ht<<)DOvWUGplfSoD!-mOu+Z2*44qHyKi>+Q6V1I( zy!!A~2_Ju}e%X7_>xQzlq-SGB3#OhWW4&i?u%cN|+mx4IIB5}9I;qBGBj$vWb(zQC z?!7|&bk)7wQ|yEV8cT?vpb7@W{z9CxB5gkoBSF}S;#2E-kb2iiq)UglSwA}Yi+G#^ zzz#2dp~M=m6wCn1{HZ}|+hiK!o;=wY>F>_cL68|qN+_E^qmr>YS+|4Hc}Zm|Jvt^? z^shYiSeKJ?19A@V0!zRr(cjgkUw!uh`ewY*cV|%Adv!P4r=OonyB+Z)pO({YG8TBE zR{i84!pezyod5`9w?94EBl5l-x{-;P9Q{f?4VLg|=7$?335~xTkYA9hIeO&Cd?6~( z@1yo}^H6HITtS9!k_hu1Nx_j);K%MW$n^8g(Wq1hD%D!E9R0_0K&=P|%{g<{!5gJF zvrK#p{#Cu;S@&-EYeVi(M0xB^pwou@i-%hKLvOa!N=n1ETXuiwZ{Lt}k~mO&IabJ+ zIZt&m4@C&9uPZ52ZmeBvYwLXZJS)*;vSP(vy=>DRrw>%tX=Hx^>X^WexM+FIUE8f1 z6})qFoj6iEN_Xc)BU8yz&8xnMY3TT|&W|2Ec#3TYjV9AB71LUjLWcFtxRd5xj~3=F zEz}4&BMo3b?v#RROGTyyjQ<{4F)^FWUZuV3BOD?g2*#ndBRmy zJk0R8^9@%JJ3%cwXkABtO1c>#He*g~G|)K4dC^nYIE{RCOtT( zW+J*O|I^(|U;ntCbQcV|A+V!4DW2H}u&fK>6c6K;XSRV9uXJ^liFWkPmZPRFWR z+~^88BoB;I7Yh>Y5?gp3W#3J^e@RS*&st=%=utSfTK*|{DS9jhnTxIocIe>ompSqF zE^u(qt_2z2{rsiZ>z`)?R*7xGr8iYvaO|EOhOquapJCt z(&sCCS@#88#vK1}vx(A@VW;ZdsuYT{#M92}jz_u!8dM1E>nL~T?AbU<po!`Kd z540>3m5GR;GrL&kVnK=-@HyQhkcbzOJ#bCCbKLiZCG2{cUza&7;LJIjW5-!R#{ck2 ztvaF_81fSg#xLC0dg4j}Z0_8?W@34BYNMj4>n^_+t-Tr=_mx!WW0&01(_=Q~ zCelL?uGJO3-4T$RO`BF_B3KeASXRjUW-pppI*C)V>tCB=MaALqjBdCBe9~BEYe7_I zW4|EIQl!AMk+Lu=_rYRk1!ZXbujUW$DW+R70pC#JTUKw0@!2ihB%%2qeKa*~+Wh&t z0>5uKNl>$+!#K-et57&14H;zK5okrRmuBH6M_X<&R>=%edEJpr`gyuGCR+F$n*Oo~ zH3qMz&)z^pHVTb}4gzKsTxUhI@q5$}itp2gyod)W^b%m{w(P?bF0mqTf72!H+DMQU z(w3>KkJNFgThNuUY-*kly~T18*=8!UnPooz{5+*YgLpu+^O`w4J7#3=xvIjaT^O2J z%P_G`H>0;9#tSsb00fk@b|x?i5w3s(IDGAt9;8u?Add=@g?-pfzL+_ChZ!B>w6lxP*fG&Rf%672!Umm5!XCM5#n*8PL*{E*%$qiSx((-F zs51?^dLl~jGjyn!D{FtLga4^`EmDtI)0F&+t%%lFfmFMzv{sA-2J<4FaIKASAAfH$ zQAS$5<7l~0`})rMXRF0B#ow1@dg9ozUqDIhsA5lpBj~jo2NpT^(wX=Pa+|$B$p2YM-s3@aDAIK9ODH0Fz2u z*qRwlE0~o;-6MS{E{-S}<`5E55ma(eu7+ zgsikX1K`|Z=6xY4#vIR2#9D)<5o>Mwj2WUYC#%Vi8tY}E(^OZpDR&XZs)+&5> zeMsq>q zAk`K-mYa%gs#o>;>&PF3rWm;J7XD*7F9gg*jDjyeF1&l7%$Ni%mb41E@UyW4F3&hH zT$@XcxkCcdU~GiYYz660iAD*5kJs+^t`G}*tVg_J~ootu=jbdmpu z_Q`_!tZ$eI?e&E7r?f$n9fsWahkRLyr1VSS2LRNTQ~MCu!WRGqP1Vu6$>@~~E&Sgn z;HSDv4Fka?I|(>ht%l`(efB_@r57zjkWg{TskjE(oKUUL@3#WNog0&GHkX!s_fqEr zdX{_LF;;j`=M#K^v$}!V@=r!7rJu(p>oQ6OK+h*9eu{}IRG+jAdV|CwRvi5V`nBuV z8{oZz1hm1eL^+>cPVW9o8oaH5_X_X=Sd2i`Bx{~i<#KFNvO^Okr4yMu7x-pipflbd z*|9QXCYb}^WNZBg#`<<3O*%D-OWX3-Da~R=_eD$64#(sDz6rF9%8v8E)>w~i9iey& zQu?=zfLekG0v)*(_&7KBIsJnoPOJ&@TSB@Vp?KfA1{zbGtk8}kkpbmjn&5Fqj#yMU z!7=?8kt+Uq5@FI{AW@2Xw4VO)Z`v-jB4S7X}Y{x;<`uu-(5gR+G85%g^Z_Ryb zoO)beo$$l<)Zw6WiN0}mA$gKj)!JG&O7fG!wjO@bb@N!*>0QJ^C#^_8ABNd|L!xCpFn)AeIDi&@u%8T7Ya#n;2@dS;ECMC*kqUJFQ zOfumii1irXG5V_k!Px7T3IU-&7#B2i@^%!b>w;)4-i}*NP&W!+ilT6ptW&GZg7IRW zbLn*s(*e2ZOI<%pw`RP9;v{!$g-XUi(v4bBbk#T#we7NmlTQpp4VUWH)X?hZZ~>%K zq}oQk!;cOe1!X;DGsRvc(NsssXjHGh8!IOl4J|Nx_UwmoUyq+ihdCutfTm^Kc*7{=0`dazYm- z8Xw$Ld+hznU$)*!6IeZ~(ZgHTe9WXtv3$xaXrVoeTR45<2EYl})Em!sjrwX}chvbJ zAcweYd2ekoW@|Ax=4Jx0*;%ExMBgHkvC{Ho%9&-g8w*o@s4q>q`E1~@qXN~M2#0vP zwcVfU&9f{r7#s0de@(8ifxBaoQfG~@$DDu2E0JtPhe|rQMCsH*h@BF-jXb(St6}<6u>9hcd z!ouFq#q*H)9ELHSo^QE7d(4#~rY-1MjbMJ&p5F#kA)-dHUveyclkGUjMS`VfUZVnK z1!A0X)~jJwP!}4gqm!2{)byuqEppO+!IhGJ)xP{amDacSSac>%lPnGSSRW%Z@}{MQ zw{MP>CPKc|S4*FR{XR#G2&1d}W|3#|snv@Q#IT1h0f5vP|K4t&R2IehuE+T`FXV$n z<{N^5&b=UD4`6Z^Sv`gx(r>4n7pIDT{W+wby|MQ0>`CsjO5czAq48NlJWr5i=1`Q!Or&f z-Cn8_JS-4diq!jhp)VopIThC;R7F7>ic_zEr*2^q=CcslTv6~QAkpTNrr+B=TU?$lCbksp{=zSsIi9(e+bzDZg(<^v$JoCMi#t9e6((vB(wI68fEh=)K3?~%8Z`SJQba{LmEN(EjRNi0u zlk=70#hCj#qKO%i^xMm$`P{G$>oUuv$)xb4VUUy60)qt=E28jlO6mx; z8)i(LN0=T9wp{!?3d=dsEhCcrS0e+FXI+5hiS@@4+^OWxs@S8#Fdx$=`q-E5oS2k z6bGcbj`J#ZBteO#dYq+|nTwTI|3*w{C3$bB+0|zqlo7_ z7Fip)d2L!xbc{%C1nMHdy*>wZdSLQJ&H}BcYVfVho!ELbp55Q40cI5d<9716if`}U zK>^ZHXk;NuAe|W#OXXe!-lHG0fjW2ISb7VapVC*4jk^Ft#ykMMy1j zH*}rGwQ#yUWu?TjYG0Ojk;IAB7Btp%=*DX`kL9;4*I>X-ES15QEYTeyt<^q&cHbhi zzwMB7`ohBPfc_m&`1K%w`Rvm;alopomYmqSHuZS5Po8>r-Ni8Twio}HMDbC?l^=d> zipoCZJvQXz5IUo+feSU6JnD?Gc8W+Rn6a!S zjusnJmXKh)&2i3Z{L-86MYH1SI1}BX?KiY{cNd9#7VOQ2Oz-LO+8QFxx=SLQ@+?1--;G>4^MDC)d#sqOlfbKG2|CA+GtU(mL$ez ze;GY68xgiRs7X{WVBoBcbE6QHL^xa`OKYge%#`=4`+6+m{w8N%TsuTDG$|`NxImUZ zG0|!0#Wxo-6iq1OqV3F)I5LGqNxi#tcad4k0A^UrcI9keAbQ4nIqxQBCHLLDj^8|t z*wR4fORvRr-$ht$ADFMuJ=871avMZ(sz>XucV$&AZl^Y$vOatL>b1t)oPP?f@rQ|P zUF_1DSV?m*INut0vv4zwEb^|{Fo+{fD@Z;TphLyghZk`r6A%!Aa{tSQ;)EE16djQF z`kuV##;&~PafJ%Zlf5DY3kW=ZKVjEPqjd8gRjQg)zw##QRPTFxGf^$F;y(CU z44n-*OP^h+%WQag^U~9dtCwQ#y@YOE8>(4Cx$lzlo^J8T-PsY=Cr~xzEZbx47H&gj z%)VX3q&-nKiDGtLm5_4C`@H*Bv^5ruEkC8=dWBOi3Xw-jLEnw~!i5B9Ri;3+kD0T|q8hpWQ;0S_5FI!lNfVu?NC18c7d!}$B z*ZWV=q`W&uT2fvWQkAPWHejeVkZUhqu^nVeVtj@Q-KsV739UlbPwbBZI4n*y`0S_= ztk~kMzke+D4+YLf`it&*P-pWy>o~UbpiIY(853gnmc>XeJ2Qh?T!r~!an9rjhOqt& zg)6XAh?vao@db61%5G=#+R{-WWvPupfE|ovFL}`kq)8q5zmuB-C7{F$7Kp#6Pow%* znnF7vjwI4?JES(fg!crfAi=`W$hDDHLa>Cfh~UVNuAmSWvNm2dl8-T)2Io0HJ1YlM zJ92?`H0>LGmYfKn$q42@=V_bzeVEJevtTM43DLP^}kVWhg?TBXio^WiU3RT(V-&Xyg0nts) zKvxX0V<*)#9)-uC!7tHc+VpVodu-V_xD0mgtUF=nMMWY2$L1P|X)OdoA?&7qO>FXq zLWyKl*P8FX)o(%B=G-uBvgFP(%J2d(qfYLxID54Y#YF!4s{paNHR~`;zLpTY@aQ-p zY@uJv77$6`kTpYJf}RLLDwmj#2U3&MvEQh;2e&HD8&b7tRj*$cAOy_ld~Q_U z&uzrrR%W#e(X5NCJKz|d;ulh6+?Mi^Zdqt~uuA4y)k;=>Kux<+A|gYN%H-rPLeVR|}_HxgIn{!O*2+1Aa(O4S zNK?w=@qLcr)L4rcMe$sv2n+P))LeVde+`7d>aOe<3aSDi#|{954J?mM^o`fd2YxW% z#-9M%!?#DTX=1<^ba0!z;G`6d*KF&RT-kFHUBT`Q8!^J7eOo%mh)(S5kK1kv+?r6? zD{8iBS%CG1WG1t>+`3GFfM%L2DB@^0rYgmQ6Wj6xRS=Uxu8`Ws(rp%l_94aa!UKBe zW2^wA2owT!7IA^dYPDm>Y$_XUO5uVYooGK-EtCY=Z7MJ2ByU&TSSazxBfE)zlsB2- zot&JT3=N;37?w)5wvkL)Y#r;g?}nTW1tsO(&6AuKPnog;{4RBYX3N_+(Vdr%i1ZdG z*TqYhaw;kY9{ye^q*SNpMT9-bHKIWxz4<*AOsxza6j(~ymFJOjz9?3fRG+|E&6ZVO zeA7Az^WV6X+%i_Im?#Q5k*=oSwZ?0zkV<i38|9C`qC*d(pwmEky}V~GKl|yZ zQF@4fcR4B(mjRKDpfc?pOU8&W0tZ^isQZ$YLnJ}C6J8ekn4ER+_hw;rg*y^ufLJ+P zqrGF19tf8KrjY_xq_3mvbMaIAMVmSg;aXsOr&Y$XzuMxAkeJ2ws}o5NMe<_zRT;rqjKdRZO@8GI zK%aJg`;Ob-+fRqse!?mIO%=e@@e$$|0&a@@YcvXpnM<&pxXbry1^1w!kX3S*cfG$` zfW?qAc5>rnWd8b{Ya1v~K)Scdjr;P;VDU12;6IXp19Qz%^=V(ABWKh1-~S7yilzbmBR0Eibxs6B|ibKY&l`!g9k|?9_kSbAvR)(~0IBE^ERm&~yN`u_XKm zGbXVgs7{N-1| zB+IvdXH<}YfWy~1-M$J0OTpcF=dyTX$q_X&3q{AOX3!Br*Gsi2eA64ZB{STH z03_dB9U`;nb~#rdxo^fHMSOqNw3WZz8I1|S3C@VYTQ89~tspo+K;jNSO?zi>1{hs? z>FS?JoJKy#%L6!a`Xf8-5}-jO3aDi6ZTySKyVsy=O`)7t!q{ zxo^$E-ic`MG7LT&n3po$NS;ZGy#^|*u5-ke02^l@{OcS$4Lg#np1O1?()t7W55ywLg@=<23KYIYCLZvk@<=UD$sc`;}5dZ zQJOyBJ4;~RpTxx9z5+@i!f?5~lqC64MG1Gmat9fKfrw4SgtRw}kV-sq1B`)$g)nco z4yPIO@zZ(jiDCebXrd8psmNfQoURcJ#uDB|j}zwvD%Y;Ad0ZXM01P^O0@#VL{<__5 zbBt_hxG*DV>65`1AWUFkde|K2C&jSo1U#+JQ#Z|2S2shAj+=%a^`z*vVoy@~7~0a9 z!3R!PHN4cca+A=W$-KDOu4v#hJsnch7A+x z4_rtWEwbWE2XJZd5C8L@<+rOFn+C7_f4;}PJ1q9f-@m@HK%xHcZ#jG`?6R?D@Tg(dC`CVrS#-~@9clR_3!`t z*ZgnXn*Y4U^4s5We*gXJ-$%sXzuEJp*!+M0`agdw{^!~K=birF1&TZI|Ff9>^lvJ@ zF8=M#SN@GQZ)Y9}nEu~|&v!Tfe^=@M?SC4%PfO*!*Ee}0WJuPm()liK#SeS`7YPph Axc~qF literal 0 HcmV?d00001 diff --git a/media/matmul.svg b/media/matmul.svg new file mode 100644 index 0000000000000..1d6cb4bb78a22 --- /dev/null +++ b/media/matmul.svg @@ -0,0 +1,1238 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ARow-major + BTColumn-major + CT=ABTColumn-major + + ne00 + + ne01 + + ne1 + + ne0 + + ne10 + + ne11 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BRow-major + ATColumn-major + C=BATRow-major + + ne10 + + ne11 + + ne0 + + ne1 + + ne00 + + ne01 + + + From 1966eb2615242f224bf9ca939db8905ab6a174a0 Mon Sep 17 00:00:00 2001 From: jiez <373447296@qq.com> Date: Thu, 25 Apr 2024 18:29:35 +0800 Subject: [PATCH 034/101] quantize : add '--keep-split' to quantize model into shards (#6688) * Implement '--keep-split' to quantize model into several shards * Add test script * Update examples/quantize/quantize.cpp Co-authored-by: Georgi Gerganov * Split model correctly even if tensor id is out-of-order * Update llama_model_quantize_params * Fix preci failures --------- Co-authored-by: z5269887 Co-authored-by: Georgi Gerganov --- examples/quantize/quantize.cpp | 15 +++++- examples/quantize/test.sh | 65 ++++++++++++++++++++++++ llama.cpp | 93 +++++++++++++++++++++++++--------- llama.h | 1 + 4 files changed, 148 insertions(+), 26 deletions(-) create mode 100644 examples/quantize/test.sh diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index 64cb6db19d004..da1850dfdf517 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -97,6 +97,7 @@ static void usage(const char * executable) { printf(" --exclude-weights tensor_name: use importance matrix for this/these tensor(s)\n"); printf(" --output-tensor-type ggml_type: use this ggml_type for the output.weight tensor\n"); printf(" --token-embedding-type ggml_type: use this ggml_type for the token embeddings tensor\n"); + printf(" --keep-split: will generate quatized model in the same shards as input"); printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" Advanced option to override model metadata by key in the quantized model. May be specified multiple times.\n"); printf("Note: --include-weights and --exclude-weights cannot be used together\n"); @@ -300,6 +301,8 @@ int main(int argc, char ** argv) { } else { usage(argv[0]); } + } else if (strcmp(argv[arg_idx], "--keep-split")) { + params.keep_split = true; } else { usage(argv[0]); } @@ -332,20 +335,28 @@ int main(int argc, char ** argv) { std::string fname_out; std::string ftype_str; + std::string suffix = ".gguf"; if (try_parse_ftype(argv[arg_idx], params.ftype, ftype_str)) { std::string fpath; const size_t pos = fname_inp.find_last_of("/\\"); if (pos != std::string::npos) { fpath = fname_inp.substr(0, pos + 1); } - // export as [inp path]/ggml-model-[ftype].gguf - fname_out = fpath + "ggml-model-" + ftype_str + ".gguf"; + + // export as [inp path]/ggml-model-[ftype]. Only add extension if there is no splitting + fname_out = fpath + "ggml-model-" + ftype_str; + if (!params.keep_split) { + fname_out += suffix; + } arg_idx++; if (ftype_str == "COPY") { params.only_copy = true; } } else { fname_out = argv[arg_idx]; + if (params.keep_split && fname_out.find(suffix) != std::string::npos) { + fname_out = fname_out.substr(0, fname_out.length() - suffix.length()); + } arg_idx++; if (argc <= arg_idx) { diff --git a/examples/quantize/test.sh b/examples/quantize/test.sh new file mode 100644 index 0000000000000..840f712accfda --- /dev/null +++ b/examples/quantize/test.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +set -eu + +if [ $# -lt 1 ] +then + echo "usage: $0 path_to_build_binary [path_to_temp_folder]" + echo "example: $0 ../../build/bin ../../tmp" + exit 1 +fi + +if [ $# -gt 1 ] +then + TMP_DIR=$2 +else + TMP_DIR=/tmp +fi + +set -x + +SPLIT=$1/gguf-split +QUANTIZE=$1/quantize +MAIN=$1/main +WORK_PATH=$TMP_DIR/quantize +CUR_DIR=$(pwd) + +mkdir -p "$WORK_PATH" + +# Clean up in case of previously failed test +rm -f $WORK_PATH/ggml-model-split*.gguf $WORK_PATH/ggml-model-requant*.gguf + +# 1. Get a model +( + cd $WORK_PATH + "$CUR_DIR"/../../scripts/hf.sh --repo ggml-org/gemma-1.1-2b-it-Q8_0-GGUF --file gemma-1.1-2b-it.Q8_0.gguf +) +echo PASS + +# 2. Split model +$SPLIT --split-max-tensors 28 $WORK_PATH/gemma-1.1-2b-it.Q8_0.gguf $WORK_PATH/ggml-model-split +echo PASS +echo + +# 3. Requant model with '--keep_split' +$QUANTIZE --allow-requantize --keep_split $WORK_PATH/ggml-model-split-00001-of-00006.gguf $WORK_PATH/ggml-model-requant.gguf Q4_K +echo PASS +echo + +# 3a. Test the requanted model is loading properly +$MAIN --model $WORK_PATH/ggml-model-requant-00001-of-00006.gguf --random-prompt --n-predict 32 +echo PASS +echo + +# 4. Requant mode without '--keep_split' +$QUANTIZE --allow-requantize $WORK_PATH/ggml-model-split-00001-of-00006.gguf $WORK_PATH/ggml-model-requant-merge.gguf Q4_K +echo PASS +echo + +# 4b. Test the requanted model is loading properly +$MAIN --model $WORK_PATH/ggml-model-requant-merge.gguf --random-prompt --n-predict 32 +echo PASS +echo + +# Clean up +rm -f $WORK_PATH/ggml-model-split*.gguf $WORK_PATH/ggml-model-requant*.gguf diff --git a/llama.cpp b/llama.cpp index 3a84b4916bd30..0f74cb7aad5f3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3297,6 +3297,10 @@ struct llama_model_loader { return nullptr; } + const llama_tensor_weight * get_weight(int i) const { + return get_weight(get_tensor_name(i)); + } + const llama_tensor_weight & require_weight(const char * name) const { const llama_tensor_weight * weight = get_weight(name); if (!weight) { @@ -14528,26 +14532,74 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s std::vector> work; std::vector> f32_conv_buf; + uint16_t n_split = 1; + // Assume split index is continuous + if (params->keep_split) { + for (int i = 0; i < ml.n_tensors; ++i) { + n_split = std::max(uint16_t(ml.get_weight(i)->idx+1), n_split); + } + } + std::vector ctx_outs(n_split, NULL); + ctx_outs[0] = ctx_out; + // populate the original tensors so we get an initial meta data for (int i = 0; i < ml.n_tensors; ++i) { - const struct ggml_tensor * meta = ml.get_tensor_meta(i); - gguf_add_tensor(ctx_out, meta); + auto weight = ml.get_weight(i); + uint16_t i_split = params->keep_split ? weight->idx : 0; + struct ggml_tensor * tensor = weight->tensor; + if (ctx_outs[i_split] == NULL) { + ctx_outs[i_split] = gguf_init_empty(); + } + gguf_add_tensor(ctx_outs[i_split], tensor); } - std::ofstream fout(fname_out, std::ios::binary); - fout.exceptions(std::ofstream::failbit); // fail fast on write errors - - const size_t meta_size = gguf_get_meta_size(ctx_out); + // Set split info if needed + if (n_split > 1) { + for (size_t i = 0; i < ctx_outs.size(); ++i) { + gguf_set_val_u16(ctx_outs[i], ml.llm_kv(LLM_KV_SPLIT_NO).c_str(), i); + gguf_set_val_u16(ctx_outs[i], ml.llm_kv(LLM_KV_SPLIT_COUNT).c_str(), n_split); + gguf_set_val_i32(ctx_outs[i], ml.llm_kv(LLM_KV_SPLIT_TENSORS_COUNT).c_str(), ml.n_tensors); + } + } - LLAMA_LOG_INFO("%s: meta size = %zu bytes\n", __func__, meta_size); + int cur_split = -1; + std::ofstream fout; + auto close_ofstream = [&]() { + // Write metadata and close file handler + if (fout.is_open()) { + fout.seekp(0); + std::vector data(gguf_get_meta_size(ctx_outs[cur_split])); + gguf_get_meta_data(ctx_outs[cur_split], data.data()); + fout.write((const char *) data.data(), data.size()); + fout.close(); + } + }; + auto new_ofstream = [&](int index = 0) { + cur_split = index; + GGML_ASSERT(ctx_outs[cur_split] && "Find uninitialized gguf_context"); + std::string fname = fname_out; + if (params->keep_split) { + char split_path[PATH_MAX] = {0}; + llama_split_path(split_path, sizeof(split_path), fname_out.c_str(), cur_split, n_split); + fname = std::string(split_path); + } - // placeholder for the meta data - ::zeros(fout, meta_size); + fout = std::ofstream(fname, std::ios::binary); + fout.exceptions(std::ofstream::failbit); // fail fast on write errors + const size_t meta_size = gguf_get_meta_size(ctx_outs[cur_split]); + // placeholder for the meta data + ::zeros(fout, meta_size); + }; const auto tn = LLM_TN(model.arch); - + new_ofstream(); for (int i = 0; i < ml.n_tensors; ++i) { - struct ggml_tensor * tensor = ml.get_tensor_meta(i); + auto weight = ml.get_weight(i); + struct ggml_tensor * tensor = weight->tensor; + if (weight->idx != cur_split && params->keep_split) { + close_ofstream(); + new_ofstream(weight->idx); + } const std::string name = ggml_get_name(tensor); @@ -14702,26 +14754,18 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s total_size_new += new_size; // update the gguf meta data as we go - gguf_set_tensor_type(ctx_out, name.c_str(), new_type); - gguf_set_tensor_data(ctx_out, name.c_str(), new_data, new_size); + gguf_set_tensor_type(ctx_outs[cur_split], name.c_str(), new_type); + gguf_set_tensor_data(ctx_outs[cur_split], name.c_str(), new_data, new_size); // write tensor data + padding fout.write((const char *) new_data, new_size); zeros(fout, GGML_PAD(new_size, align) - new_size); } - - // go back to beginning of file and write the updated meta data - { - fout.seekp(0); - std::vector data(gguf_get_meta_size(ctx_out)); - gguf_get_meta_data(ctx_out, data.data()); - fout.write((const char *) data.data(), data.size()); + close_ofstream(); + for (auto & c:ctx_outs) { + gguf_free(c); } - fout.close(); - - gguf_free(ctx_out); - LLAMA_LOG_INFO("%s: model size = %8.2f MB\n", __func__, total_size_org/1024.0/1024.0); LLAMA_LOG_INFO("%s: quant size = %8.2f MB\n", __func__, total_size_new/1024.0/1024.0); @@ -15077,6 +15121,7 @@ struct llama_model_quantize_params llama_model_quantize_default_params() { /*.quantize_output_tensor =*/ true, /*.only_copy =*/ false, /*.pure =*/ false, + /*.keep_split =*/ false, /*.imatrix =*/ nullptr, /*.kv_overrides =*/ nullptr, }; diff --git a/llama.h b/llama.h index 0eb2a1e9ab0a2..8aa763672be1b 100644 --- a/llama.h +++ b/llama.h @@ -288,6 +288,7 @@ extern "C" { bool quantize_output_tensor; // quantize output.weight bool only_copy; // only copy tensors - ftype, allow_requantize and quantize_output_tensor are ignored bool pure; // quantize all tensors to the default type + bool keep_split; // quantize to the same number of shards void * imatrix; // pointer to importance matrix data void * kv_overrides; // pointer to vector containing overrides } llama_model_quantize_params; From aa750c1ede6232c91de890a14a7731d6daa2bc8e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 25 Apr 2024 14:27:20 +0300 Subject: [PATCH 035/101] tests : minor bash stuff (#6902) * tests : minor bash stuff ggml-ci * llama : fix build ggml-ci * tests : fix CUR_DIR -> ROOT_DIR ggml-ci * tests : fix fname ggml-ci --- ci/run.sh | 2 ++ examples/gguf-split/tests.sh | 16 ++++++++-------- examples/quantize/{test.sh => tests.sh} | 16 ++++++++-------- examples/server/tests/tests.sh | 7 +++---- llama.cpp | 4 ++-- 5 files changed, 23 insertions(+), 22 deletions(-) mode change 100644 => 100755 examples/gguf-split/tests.sh rename examples/quantize/{test.sh => tests.sh} (79%) diff --git a/ci/run.sh b/ci/run.sh index 085dfd42faf32..a75d0f5e367fa 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -161,6 +161,7 @@ function gg_run_test_scripts_debug { set -e (cd ./examples/gguf-split && time bash tests.sh "$SRC/build-ci-debug/bin" "$MNT/models") 2>&1 | tee -a $OUT/${ci}-scripts.log + (cd ./examples/quantize && time bash tests.sh "$SRC/build-ci-debug/bin" "$MNT/models") 2>&1 | tee -a $OUT/${ci}-scripts.log set +e } @@ -184,6 +185,7 @@ function gg_run_test_scripts_release { set -e (cd ./examples/gguf-split && time bash tests.sh "$SRC/build-ci-release/bin" "$MNT/models") 2>&1 | tee -a $OUT/${ci}-scripts.log + (cd ./examples/quantize && time bash tests.sh "$SRC/build-ci-release/bin" "$MNT/models") 2>&1 | tee -a $OUT/${ci}-scripts.log set +e } diff --git a/examples/gguf-split/tests.sh b/examples/gguf-split/tests.sh old mode 100644 new mode 100755 index 879522f7e2512..57588204dd629 --- a/examples/gguf-split/tests.sh +++ b/examples/gguf-split/tests.sh @@ -4,16 +4,16 @@ set -eu if [ $# -lt 1 ] then - echo "usage: $0 path_to_build_binary [path_to_temp_folder]" - echo "example: $0 ../../build/bin ../../tmp" - exit 1 + echo "usage: $0 path_to_build_binary [path_to_temp_folder]" + echo "example: $0 ../../build/bin ../../tmp" + exit 1 fi if [ $# -gt 1 ] then - TMP_DIR=$2 + TMP_DIR=$2 else - TMP_DIR=/tmp + TMP_DIR=/tmp fi set -x @@ -21,7 +21,7 @@ set -x SPLIT=$1/gguf-split MAIN=$1/main WORK_PATH=$TMP_DIR/gguf-split -CUR_DIR=$(pwd) +ROOT_DIR=$(realpath $(dirname $0)/../../) mkdir -p "$WORK_PATH" @@ -30,8 +30,8 @@ rm -f $WORK_PATH/ggml-model-split*.gguf $WORK_PATH/ggml-model-merge*.gguf # 1. Get a model ( - cd $WORK_PATH - "$CUR_DIR"/../../scripts/hf.sh --repo ggml-org/gemma-1.1-2b-it-Q8_0-GGUF --file gemma-1.1-2b-it.Q8_0.gguf +cd $WORK_PATH +"$ROOT_DIR"/scripts/hf.sh --repo ggml-org/gemma-1.1-2b-it-Q8_0-GGUF --file gemma-1.1-2b-it.Q8_0.gguf ) echo PASS diff --git a/examples/quantize/test.sh b/examples/quantize/tests.sh similarity index 79% rename from examples/quantize/test.sh rename to examples/quantize/tests.sh index 840f712accfda..160c12bee22e8 100644 --- a/examples/quantize/test.sh +++ b/examples/quantize/tests.sh @@ -4,16 +4,16 @@ set -eu if [ $# -lt 1 ] then - echo "usage: $0 path_to_build_binary [path_to_temp_folder]" - echo "example: $0 ../../build/bin ../../tmp" - exit 1 + echo "usage: $0 path_to_build_binary [path_to_temp_folder]" + echo "example: $0 ../../build/bin ../../tmp" + exit 1 fi if [ $# -gt 1 ] then - TMP_DIR=$2 + TMP_DIR=$2 else - TMP_DIR=/tmp + TMP_DIR=/tmp fi set -x @@ -22,7 +22,7 @@ SPLIT=$1/gguf-split QUANTIZE=$1/quantize MAIN=$1/main WORK_PATH=$TMP_DIR/quantize -CUR_DIR=$(pwd) +ROOT_DIR=$(realpath $(dirname $0)/../../) mkdir -p "$WORK_PATH" @@ -31,8 +31,8 @@ rm -f $WORK_PATH/ggml-model-split*.gguf $WORK_PATH/ggml-model-requant*.gguf # 1. Get a model ( - cd $WORK_PATH - "$CUR_DIR"/../../scripts/hf.sh --repo ggml-org/gemma-1.1-2b-it-Q8_0-GGUF --file gemma-1.1-2b-it.Q8_0.gguf +cd $WORK_PATH +"$ROOT_DIR"/scripts/hf.sh --repo ggml-org/gemma-1.1-2b-it-Q8_0-GGUF --file gemma-1.1-2b-it.Q8_0.gguf ) echo PASS diff --git a/examples/server/tests/tests.sh b/examples/server/tests/tests.sh index 1c6c5695fcf65..72a0fbad827db 100755 --- a/examples/server/tests/tests.sh +++ b/examples/server/tests/tests.sh @@ -4,9 +4,8 @@ set -eu if [ $# -lt 1 ] then - # Start @llama.cpp scenario - behave --summary --stop --no-capture --exclude 'issues|wrong_usages|passkey' --tags llama.cpp + # Start @llama.cpp scenario + behave --summary --stop --no-capture --exclude 'issues|wrong_usages|passkey' --tags llama.cpp else - behave "$@" + behave "$@" fi - diff --git a/llama.cpp b/llama.cpp index 0f74cb7aad5f3..51aae62af06b4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -14574,7 +14574,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s fout.close(); } }; - auto new_ofstream = [&](int index = 0) { + auto new_ofstream = [&](int index) { cur_split = index; GGML_ASSERT(ctx_outs[cur_split] && "Find uninitialized gguf_context"); std::string fname = fname_out; @@ -14592,7 +14592,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s }; const auto tn = LLM_TN(model.arch); - new_ofstream(); + new_ofstream(0); for (int i = 0; i < ml.n_tensors; ++i) { auto weight = ml.get_weight(i); struct ggml_tensor * tensor = weight->tensor; From 54770413c484660d021dd51b5dbacab7880b8827 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 25 Apr 2024 15:12:28 +0300 Subject: [PATCH 036/101] ggml : fix MIN / MAX macros (#6904) ggml-ci --- ggml-impl.h | 6 ++++++ ggml-quants.c | 6 ------ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/ggml-impl.h b/ggml-impl.h index 2ffacc299e4f1..2087f7ded7b99 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -11,6 +11,12 @@ #include // memcpy #include // fabsf +#undef MIN +#undef MAX + +#define MIN(a, b) ((a) < (b) ? (a) : (b)) +#define MAX(a, b) ((a) > (b) ? (a) : (b)) + #ifdef __cplusplus extern "C" { #endif diff --git a/ggml-quants.c b/ggml-quants.c index 11e11c2196c2c..c147531df1cd5 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -14,12 +14,6 @@ #include // for qsort #include // for GGML_ASSERT -#undef MIN -#undef MAX - -#define MIN(a, b) ((a) < (b) ? (a) : (b)) -#define MAX(a, b) ((a) > (b) ? (a) : (b)) - #define UNUSED GGML_UNUSED // some compilers don't provide _mm256_set_m128i, e.g. gcc 7 From 4ab99d8d4762869506bb675b36501fd7c2af00b2 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 25 Apr 2024 14:38:14 +0200 Subject: [PATCH 037/101] clip : rename lerp function to avoid conflict (#6894) This commit renamesthe lerp (linear interpolation) function in clip.cpp to avoid a conflict with the lerp function in the standard C++ library when using c++20. The motivation for this change is to enable projects that use c++20 to be able to compile clip.cpp without having to resort to patching it. The lerp function was added to cmath in version C++20 (202002L) and is why this is not causing any issue at the moment as C++11/C++17 is currently used by llama.cpp. I realize that llama.cpp uses either C++11 (or C++17 in the case for SYCL) but wanted to ask if this would be an acceptable change just the same. Refs: https://en.cppreference.com/w/cpp/numeric/lerp Signed-off-by: Daniel Bevenius --- examples/llava/clip.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index e431c7f70fb0e..f568f470c8f5c 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -1325,7 +1325,7 @@ bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length } // Linear interpolation between two points -inline float lerp(float s, float e, float t) { +inline float clip_lerp(float s, float e, float t) { return s + (e - s) * t; } // Bilinear resize function @@ -1347,17 +1347,17 @@ static void bilinear_resize(const clip_image_u8& src, clip_image_u8& dst, int ta float y_lerp = py - y_floor; for (int c = 0; c < 3; c++) { - float top = lerp( + float top = clip_lerp( static_cast(src.buf[3 * (y_floor * src.nx + x_floor) + c]), static_cast(src.buf[3 * (y_floor * src.nx + (x_floor + 1)) + c]), x_lerp ); - float bottom = lerp( + float bottom = clip_lerp( static_cast(src.buf[3 * ((y_floor + 1) * src.nx + x_floor) + c]), static_cast(src.buf[3 * ((y_floor + 1) * src.nx + (x_floor + 1)) + c]), x_lerp ); - dst.buf[3 * (y * target_width + x) + c] = static_cast(lerp(top, bottom, y_lerp)); + dst.buf[3 * (y * target_width + x) + c] = static_cast(clip_lerp(top, bottom, y_lerp)); } } } From 51543729ff5b1361ebfb164875c93dff0850d5fe Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 25 Apr 2024 15:48:25 +0300 Subject: [PATCH 038/101] ggml : fix redefinition of vaddvq_f32 for 32-bit ARM (#6906) --- ggml.c | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/ggml.c b/ggml.c index 086db96af7fcd..f574351f408d6 100644 --- a/ggml.c +++ b/ggml.c @@ -858,18 +858,6 @@ ggml_type_traits_t ggml_internal_get_type_traits(enum ggml_type type) { // simd mappings // -#if defined(__ARM_NEON) -#if !defined(__aarch64__) - -// 64-bit compatibility - -inline static float vaddvq_f32(float32x4_t v) { - return vgetq_lane_f32(v, 0) + vgetq_lane_f32(v, 1) + vgetq_lane_f32(v, 2) + vgetq_lane_f32(v, 3); -} - -#endif -#endif - // we define a common set of C macros which map to specific intrinsics based on the current architecture // we then implement the fundamental computation operations below using only these macros // adding support for new architectures requires to define the corresponding SIMD macros From 0ead1f1072fdc70720f92e008a294dc74a826b1d Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 25 Apr 2024 15:23:47 +0200 Subject: [PATCH 039/101] llama : check that all the tensor data is in the model file (#6885) * llama : check that all the tensor data is in the model file * also check for unsigned overflow --- llama.cpp | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index 51aae62af06b4..a43cde109ef31 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2999,9 +2999,13 @@ struct llama_model_loader { ggml_tensor * tensor; - llama_tensor_weight(uint16_t idx, const char * name, const struct gguf_context * gguf_ctx, ggml_tensor * tensor) : idx(idx), tensor(tensor) { + llama_tensor_weight(const llama_file * file, uint16_t idx, const char * name, const struct gguf_context * gguf_ctx, ggml_tensor * tensor) : idx(idx), tensor(tensor) { const int tensor_idx = gguf_find_tensor(gguf_ctx, name); offs = gguf_get_data_offset(gguf_ctx) + gguf_get_tensor_offset(gguf_ctx, tensor_idx); + + if (offs + ggml_nbytes(tensor) < offs || offs + ggml_nbytes(tensor) > file->size) { + throw std::runtime_error(format("tensor '%s' data is not within the file bounds, model is corrupted or incomplete", name)); + } } }; std::vector weights; @@ -3040,15 +3044,15 @@ struct llama_model_loader { get_key(llm_kv(LLM_KV_GENERAL_ARCHITECTURE), arch_name, false); llm_kv = LLM_KV(llm_arch_from_string(arch_name)); + files.emplace_back(new llama_file(fname.c_str(), "rb")); + contexts.emplace_back(ctx); + // Save tensors data offset of the main file. // For subsidiary files, `meta` tensor data offset must not be used, // so we build a unified tensors index for weights. for (ggml_tensor * cur = ggml_get_first_tensor(ctx); cur; cur = ggml_get_next_tensor(ctx, cur)) { - weights.emplace_back(0, cur->name, meta, cur); + weights.emplace_back(files.back().get(), 0, cur->name, meta, cur); } - files.emplace_back(new llama_file(fname.c_str(), "rb")); - contexts.emplace_back(ctx); - uint16_t n_split = 0; get_key(llm_kv(LLM_KV_SPLIT_COUNT), n_split, false); @@ -3082,12 +3086,13 @@ struct llama_model_loader { throw std::runtime_error(format("%s: failed to load GGUF split from %s\n", __func__, split_path)); } + files.emplace_back(new llama_file(split_path, "rb")); + contexts.emplace_back(ctx); + // Save tensors data offset info of the shard. for (ggml_tensor * cur = ggml_get_first_tensor(ctx); cur; cur = ggml_get_next_tensor(ctx, cur)) { - weights.emplace_back(idx, cur->name, ctx_gguf, cur); + weights.emplace_back(files.back().get(), idx, cur->name, ctx_gguf, cur); } - files.emplace_back(new llama_file(split_path, "rb")); - contexts.emplace_back(ctx); gguf_free(ctx_gguf); } From 3fe0596c1817a6114ffffb6dbfd6c36ca7815dc7 Mon Sep 17 00:00:00 2001 From: BarfingLemurs <128182951+BarfingLemurs@users.noreply.github.com> Date: Thu, 25 Apr 2024 09:52:28 -0400 Subject: [PATCH 040/101] readme : update model list (#6908) * Update README.md * missing space * llama3 ! --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 72b5a2c881961..bb2ca9dbd6978 100644 --- a/README.md +++ b/README.md @@ -93,6 +93,7 @@ Typically finetunes of the base models below are supported as well. - [X] LLaMA 🦙 - [x] LLaMA 2 🦙🦙 +- [x] LLaMA 3 🦙🦙🦙 - [X] [Mistral 7B](https://huggingface.co/mistralai/Mistral-7B-v0.1) - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [x] [DBRX](https://huggingface.co/databricks/dbrx-instruct) @@ -119,8 +120,9 @@ Typically finetunes of the base models below are supported as well. - [x] [CodeShell](https://github.com/WisdomShell/codeshell) - [x] [Gemma](https://ai.google.dev/gemma) - [x] [Mamba](https://github.com/state-spaces/mamba) +- [x] [Grok-1](https://huggingface.co/keyfan/grok-1-hf) - [x] [Xverse](https://huggingface.co/models?search=xverse) -- [x] [Command-R](https://huggingface.co/CohereForAI/c4ai-command-r-v01) +- [x] [Command-R models](https://huggingface.co/models?search=CohereForAI/c4ai-command-r) - [x] [SEA-LION](https://huggingface.co/models?search=sea-lion) - [x] [GritLM-7B](https://huggingface.co/GritLM/GritLM-7B) + [GritLM-8x7B](https://huggingface.co/GritLM/GritLM-8x7B) - [x] [OLMo](https://allenai.org/olmo) @@ -135,6 +137,7 @@ Typically finetunes of the base models below are supported as well. - [x] [ShareGPT4V](https://huggingface.co/models?search=Lin-Chen/ShareGPT4V) - [x] [MobileVLM 1.7B/3B models](https://huggingface.co/models?search=mobileVLM) - [x] [Yi-VL](https://huggingface.co/models?search=Yi-VL) +- [x] [Mini CPM](https://huggingface.co/models?search=MiniCPM) **HTTP server** From 853d06ffe26621176d60909a6e3f7c4cd067b305 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 25 Apr 2024 17:06:27 +0300 Subject: [PATCH 041/101] ci : tmp disable slow tests --- ci/run.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ci/run.sh b/ci/run.sh index a75d0f5e367fa..da05f0d48802a 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -160,8 +160,9 @@ function gg_run_test_scripts_debug { set -e + # TODO: too slow, run on dedicated node (cd ./examples/gguf-split && time bash tests.sh "$SRC/build-ci-debug/bin" "$MNT/models") 2>&1 | tee -a $OUT/${ci}-scripts.log - (cd ./examples/quantize && time bash tests.sh "$SRC/build-ci-debug/bin" "$MNT/models") 2>&1 | tee -a $OUT/${ci}-scripts.log + #(cd ./examples/quantize && time bash tests.sh "$SRC/build-ci-debug/bin" "$MNT/models") 2>&1 | tee -a $OUT/${ci}-scripts.log set +e } From 0871c7cbd1145353ca7c86ca8f54b72e47b6bd63 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Thu, 25 Apr 2024 23:07:37 +0800 Subject: [PATCH 042/101] Add additional debug info and increased ctx sizes, fixed a bug loading vulkan config --- gpttype_adapter.cpp | 10 +++++++++- koboldcpp.py | 6 +++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/gpttype_adapter.cpp b/gpttype_adapter.cpp index 87da30928d525..1eae5e8c137c1 100644 --- a/gpttype_adapter.cpp +++ b/gpttype_adapter.cpp @@ -1876,6 +1876,14 @@ generation_outputs gpttype_generate(const generation_inputs inputs) std::fill(last_n_tokens.begin(), last_n_tokens.end(), 0); n_past = 0; + if (debugmode==1) + { + std::string outstr = ""; + printf("\n\n[Debug: Dump Raw Input Tokens, format: %d]\n", file_format); + outstr += get_tok_vec_str(embd_inp); + printf("%s\n", RemoveBell(outstr).c_str()); + } + bool is_mamba = (file_format == FileFormat::GGUF_GENERIC && file_format_meta.model_architecture==GGUFArch::ARCH_MAMBA); if (file_format == FileFormat::RWKV_1 || file_format==FileFormat::RWKV_2 || is_mamba) @@ -2011,7 +2019,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs) if (debugmode==1) { std::string outstr = ""; - printf("\n[Debug: Dump Input Tokens, format: %d]\n", file_format); + printf("\n[Debug: Dump Forwarded Input Tokens, format: %d]\n", file_format); outstr += get_tok_vec_str(embd_inp); outstr += "\n\n[Debug: n_past="+std::to_string(n_past)+" Context Size = " + std::to_string(current_context_tokens.size()) + "]\n"; outstr += get_tok_vec_str(current_context_tokens); diff --git a/koboldcpp.py b/koboldcpp.py index 13f6f04dd00ca..e2754362e31a4 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -1607,7 +1607,7 @@ def hide_tooltip(event): # slider data blasbatchsize_values = ["-1", "32", "64", "128", "256", "512", "1024", "2048"] blasbatchsize_text = ["Don't Batch BLAS","32","64","128","256","512","1024","2048"] - contextsize_text = ["256", "512", "1024", "2048", "3072", "4096", "6144", "8192", "12288", "16384", "24576", "32768", "49152", "65536"] + contextsize_text = ["256", "512", "1024", "2048", "3072", "4096", "6144", "8192", "12288", "16384", "24576", "32768", "49152", "65536", "98304", "131072"] runopts = [opt for lib, opt in lib_option_pairs if file_exists(lib)] antirunopts = [opt.replace("Use ", "") for lib, opt in lib_option_pairs if not (opt in runopts)] @@ -2290,7 +2290,7 @@ def import_vars(dict): if str(g) in dict["usecublas"]: gpu_choice_var.set(str(g+1)) break - elif "usevulkan" in dict: + elif "usevulkan" in dict and dict['usevulkan'] is not None: if "noavx2" in dict and dict["noavx2"]: if vulkan_noavx2_option is not None: runopts_var.set(vulkan_noavx2_option) @@ -3260,7 +3260,7 @@ def range_checker(arg: str): compatgroup.add_argument("--noblas", help="Do not use OpenBLAS for accelerated prompt ingestion", action='store_true') parser.add_argument("--gpulayers", help="Set number of layers to offload to GPU when using GPU. Requires GPU.",metavar=('[GPU layers]'), nargs='?', const=1, type=int, default=0) parser.add_argument("--tensor_split", help="For CUDA and Vulkan only, ratio to split tensors across multiple GPUs, space-separated list of proportions, e.g. 7 3", metavar=('[Ratios]'), type=float, nargs='+') - parser.add_argument("--contextsize", help="Controls the memory allocated for maximum context size, only change if you need more RAM for big contexts. (default 2048). Supported values are [256,512,1024,2048,3072,4096,6144,8192,12288,16384,24576,32768,49152,65536]. IF YOU USE ANYTHING ELSE YOU ARE ON YOUR OWN.",metavar=('[256,512,1024,2048,3072,4096,6144,8192,12288,16384,24576,32768,49152,65536]'), type=check_range(int,256,262144), default=2048) + parser.add_argument("--contextsize", help="Controls the memory allocated for maximum context size, only change if you need more RAM for big contexts. (default 2048). Supported values are [256,512,1024,2048,3072,4096,6144,8192,12288,16384,24576,32768,49152,65536,98304,131072]. IF YOU USE ANYTHING ELSE YOU ARE ON YOUR OWN.",metavar=('[256,512,1024,2048,3072,4096,6144,8192,12288,16384,24576,32768,49152,65536,98304,131072]'), type=check_range(int,256,262144), default=2048) parser.add_argument("--ropeconfig", help="If set, uses customized RoPE scaling from configured frequency scale and frequency base (e.g. --ropeconfig 0.25 10000). Otherwise, uses NTK-Aware scaling set automatically based on context size. For linear rope, simply set the freq-scale and ignore the freq-base",metavar=('[rope-freq-scale]', '[rope-freq-base]'), default=[0.0, 10000.0], type=float, nargs='+') #more advanced params parser.add_argument("--blasbatchsize", help="Sets the batch size used in BLAS processing (default 512). Setting it to -1 disables BLAS mode, but keeps other benefits like GPU offload.", type=int,choices=[-1,32,64,128,256,512,1024,2048], default=512) From d6e1d44f16e5580cf47f13325ff49960bf13ca37 Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 25 Apr 2024 17:59:03 +0200 Subject: [PATCH 043/101] llama : synchronize before get/set session data (#6911) --- llama.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/llama.cpp b/llama.cpp index a43cde109ef31..0c01a353999b3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -16131,6 +16131,8 @@ struct llama_data_file_context : llama_data_context { * */ static void llama_state_get_data_internal(struct llama_context * ctx, llama_data_context * data_ctx) { + llama_synchronize(ctx); + // copy rng { std::ostringstream rng_ss; @@ -16283,6 +16285,8 @@ size_t llama_state_get_data(struct llama_context * ctx, uint8_t * dst) { // Sets the state reading from the specified source address size_t llama_state_set_data(struct llama_context * ctx, const uint8_t * src) { + llama_synchronize(ctx); + const uint8_t * inp = src; // set rng @@ -16587,6 +16591,8 @@ size_t llama_state_seq_get_size(struct llama_context* ctx, llama_seq_id seq_id) } static size_t llama_state_seq_get_data_internal(struct llama_context * ctx, llama_data_context & data_ctx, llama_seq_id seq_id) { + llama_synchronize(ctx); + const auto & kv_self = ctx->kv_self; GGML_ASSERT(!kv_self.recurrent); // not implemented @@ -16704,6 +16710,8 @@ size_t llama_state_seq_get_data(struct llama_context* ctx, uint8_t* dst, llama_s } size_t llama_state_seq_set_data(struct llama_context * ctx, const uint8_t * src, llama_seq_id dest_seq_id) { + llama_synchronize(ctx); + auto & kv_self = ctx->kv_self; GGML_ASSERT(!kv_self.recurrent); // not implemented From fa0b4ad25208d5ec2df6b998ccb29b49b249e82c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 25 Apr 2024 18:59:51 +0300 Subject: [PATCH 044/101] cmake : remove obsolete ANDROID check --- CMakeLists.txt | 6 ------ 1 file changed, 6 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 58a1805ba10fd..425100ff8035a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -43,12 +43,6 @@ else() set(LLAMA_METAL_DEFAULT OFF) endif() -if (CMAKE_SYSTEM_NAME MATCHES "ANDROID") - set(LLAMA_LLAMAFILE_DEFAULT OFF) -else() - set(LLAMA_LLAMAFILE_DEFAULT ON) -endif() - # general option(BUILD_SHARED_LIBS "build shared libraries" OFF) option(LLAMA_STATIC "llama: static link libraries" OFF) From dba497e0c1eff27cf0e85d1d73c86fa08e1b5557 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 25 Apr 2024 21:31:17 +0300 Subject: [PATCH 045/101] cmake : restore LLAMA_LLAMAFILE_DEFAULT --- CMakeLists.txt | 2 ++ llama.cpp | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 425100ff8035a..477c5b57c20e7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -43,6 +43,8 @@ else() set(LLAMA_METAL_DEFAULT OFF) endif() +set(LLAMA_LLAMAFILE_DEFAULT ON) + # general option(BUILD_SHARED_LIBS "build shared libraries" OFF) option(LLAMA_STATIC "llama: static link libraries" OFF) diff --git a/llama.cpp b/llama.cpp index 0c01a353999b3..d728bd499d38f 100644 --- a/llama.cpp +++ b/llama.cpp @@ -17653,6 +17653,11 @@ const char * llama_print_system_info(void) { s += "SSSE3 = " + std::to_string(ggml_cpu_has_ssse3()) + " | "; s += "VSX = " + std::to_string(ggml_cpu_has_vsx()) + " | "; s += "MATMUL_INT8 = " + std::to_string(ggml_cpu_has_matmul_int8()) + " | "; +#ifdef GGML_USE_LLAMAFILE + s += "LAMMAFILE = 1 | "; +#else + s += "LAMMAFILE = 0 | "; +#endif return s.c_str(); } From 46e12c4692a37bdd31a0432fc5153d7d22bc7f72 Mon Sep 17 00:00:00 2001 From: vik Date: Thu, 25 Apr 2024 12:38:31 -0700 Subject: [PATCH 046/101] llava : add support for moondream vision language model (#6899) * add support for moondream vision language model This required making the following changes to the CLIP model: 1. Support for patch embedding bias. 2. Make class embedding and pre-layernorm optional. 3. Add support for post-layernorm. * Update examples/llava/clip.cpp --------- Co-authored-by: Georgi Gerganov --- README.md | 1 + examples/llava/clip.cpp | 71 ++++++++++++++++++++++++++++++++++------- 2 files changed, 61 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index bb2ca9dbd6978..cc667f592f859 100644 --- a/README.md +++ b/README.md @@ -138,6 +138,7 @@ Typically finetunes of the base models below are supported as well. - [x] [MobileVLM 1.7B/3B models](https://huggingface.co/models?search=mobileVLM) - [x] [Yi-VL](https://huggingface.co/models?search=Yi-VL) - [x] [Mini CPM](https://huggingface.co/models?search=MiniCPM) +- [x] [Moondream](https://huggingface.co/vikhyatk/moondream2) **HTTP server** diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index f568f470c8f5c..e3c9bcd4364aa 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -104,6 +104,7 @@ static std::string format(const char * fmt, ...) { #define TN_POS_EMBD "%s.position_embd.weight" #define TN_CLASS_EMBD "v.class_embd" #define TN_PATCH_EMBD "v.patch_embd.weight" +#define TN_PATCH_BIAS "v.patch_embd.bias" #define TN_ATTN_K "%s.blk.%d.attn_k.%s" #define TN_ATTN_Q "%s.blk.%d.attn_q.%s" #define TN_ATTN_V "%s.blk.%d.attn_v.%s" @@ -425,6 +426,7 @@ struct clip_vision_model { // embeddings struct ggml_tensor * class_embedding; struct ggml_tensor * patch_embeddings; + struct ggml_tensor * patch_bias; struct ggml_tensor * position_embeddings; struct ggml_tensor * pre_ln_w; @@ -501,6 +503,11 @@ struct clip_ctx { bool use_gelu = false; int32_t ftype = 1; + bool has_class_embedding = true; + bool has_pre_norm = true; + bool has_post_norm = false; + bool has_patch_bias = false; + struct gguf_context * ctx_gguf; struct ggml_context * ctx_data; @@ -526,7 +533,7 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 const int patch_size = hparams.patch_size; const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); const int num_patches_per_side = image_size / patch_size; GGML_UNUSED(num_patches_per_side); - const int num_positions = num_patches + 1; + const int num_positions = num_patches + (ctx->has_class_embedding ? 1 : 0); const int hidden_size = hparams.hidden_size; const int n_head = hparams.n_head; const int d_head = hidden_size / n_head; @@ -557,16 +564,23 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 inp = ggml_reshape_3d(ctx0, inp, num_patches, hidden_size, batch_size); inp = ggml_cont(ctx0, ggml_permute(ctx0, inp, 1, 0, 2, 3)); + if (ctx->has_patch_bias) { + // inp = ggml_add(ctx0, inp, ggml_repeat(ctx0, model.patch_bias, inp)); + inp = ggml_add(ctx0, inp, model.patch_bias); + } + // concat class_embeddings and patch_embeddings - struct ggml_tensor * embeddings = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, num_positions, batch_size); + struct ggml_tensor * embeddings = inp; + if (ctx->has_class_embedding) { + embeddings = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, num_positions, batch_size); + embeddings = ggml_acc(ctx0, embeddings, model.class_embedding, + embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], 0); + embeddings = ggml_acc(ctx0, embeddings, inp, + embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], model.class_embedding->nb[1]); + } ggml_set_name(embeddings, "embeddings"); ggml_set_input(embeddings); - embeddings = ggml_acc(ctx0, embeddings, model.class_embedding, - embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], 0); - - embeddings = ggml_acc(ctx0, embeddings, inp, - embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], model.class_embedding->nb[1]); struct ggml_tensor * positions = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, num_positions); ggml_set_name(positions, "positions"); @@ -576,7 +590,7 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 ggml_add(ctx0, embeddings, ggml_get_rows(ctx0, model.position_embeddings, positions)); // pre-layernorm - { + if (ctx->has_pre_norm) { embeddings = ggml_norm(ctx0, embeddings, eps); ggml_set_name(embeddings, "pre_ln"); @@ -664,6 +678,14 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 embeddings = cur; } + // post-layernorm + if (ctx->has_post_norm) { + embeddings = ggml_norm(ctx0, embeddings, eps); + ggml_set_name(embeddings, "post_ln"); + + embeddings = ggml_add(ctx0, ggml_mul(ctx0, embeddings, model.post_ln_w), model.post_ln_b); + } + // llava projector { embeddings = ggml_reshape_2d(ctx0, embeddings, embeddings->ne[0], embeddings->ne[1]); @@ -1148,12 +1170,39 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } + try { + vision_model.class_embedding = get_tensor(new_clip->ctx_data, TN_CLASS_EMBD); + new_clip->has_class_embedding = true; + } catch (const std::exception& e) { + new_clip->has_class_embedding = false; + } + + try { + vision_model.pre_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "weight")); + vision_model.pre_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "bias")); + new_clip->has_pre_norm = true; + } catch (std::exception & e) { + new_clip->has_pre_norm = false; + } + + try { + vision_model.post_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_POST, "v", "weight")); + vision_model.post_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_POST, "v", "bias")); + new_clip->has_post_norm = true; + } catch (std::exception & e) { + new_clip->has_post_norm = false; + } + + try { + vision_model.patch_bias = get_tensor(new_clip->ctx_data, TN_PATCH_BIAS); + new_clip->has_patch_bias = true; + } catch (std::exception & e) { + new_clip->has_patch_bias = false; + } + try { vision_model.patch_embeddings = get_tensor(new_clip->ctx_data, TN_PATCH_EMBD); - vision_model.class_embedding = get_tensor(new_clip->ctx_data, TN_CLASS_EMBD); vision_model.position_embeddings = get_tensor(new_clip->ctx_data, format(TN_POS_EMBD, "v")); - vision_model.pre_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "weight")); - vision_model.pre_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "bias")); } catch(const std::exception& e) { LOG_TEE("%s: failed to load vision model tensors\n", __func__); } From 5790c8dac1a4f0aa80b4efee3b962d8c04c829e8 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 26 Apr 2024 09:26:16 +0200 Subject: [PATCH 047/101] bench: server add stop word for PHI-2 (#6916) --- examples/server/bench/script.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/server/bench/script.js b/examples/server/bench/script.js index c4c486cdf93d9..bdf4f5abc87f7 100644 --- a/examples/server/bench/script.js +++ b/examples/server/bench/script.js @@ -90,7 +90,8 @@ export default function () { "model": model, "stream": true, "seed": 42, - "max_tokens": max_tokens + "max_tokens": max_tokens, + "stop": ["<|im_end|>"] // This is temporary for phi-2 base (i.e. not instructed) since the server expects that the model always to emit BOS } const params = {method: 'POST', body: JSON.stringify(payload)}; From 7d641c26ac73956a54b204cabefe85c764823678 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 26 Apr 2024 09:26:59 +0200 Subject: [PATCH 048/101] ci: fix concurrency for pull_request_target (#6917) --- .github/workflows/bench.yml | 2 +- .github/workflows/server.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index d50af0b70e17c..6dc91d27b0b46 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -32,7 +32,7 @@ on: - cron: '04 2 * * *' concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }}-${{ github.event.inputs.sha }} + group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || github.run_id }}-${{ github.event.inputs.sha }} cancel-in-progress: true jobs: diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 3e68a3c8c433f..499611b5a85c6 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -23,7 +23,7 @@ on: - cron: '2 4 * * *' concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} + group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true jobs: From d4a9afc1009f0da88d04b2c5f672d81d5ae94675 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 26 Apr 2024 09:27:49 +0200 Subject: [PATCH 049/101] ci: server: fix python installation (#6918) --- .github/workflows/server.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 499611b5a85c6..955fd9a5a924b 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -100,6 +100,12 @@ jobs: -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON ; cmake --build . --config ${{ matrix.build_type }} -j $(nproc) --target server + - name: Python setup + id: setup_python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Tests dependencies id: test_dependencies run: | From 83b72cb086ce46a33dececc86bfe4648b6120aa8 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 26 Apr 2024 10:41:53 +0300 Subject: [PATCH 050/101] Merge pull request from GHSA-p5mv-gjc5-mwqv * always use calloc clamp n_kv on failure to read a kv * ggml : alternative ctx->header.n_kv update --------- Co-authored-by: slaren --- ggml.c | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/ggml.c b/ggml.c index f574351f408d6..9f99a69d1158b 100644 --- a/ggml.c +++ b/ggml.c @@ -20614,7 +20614,7 @@ static void gguf_free_kv(struct gguf_kv * kv) { } struct gguf_context * gguf_init_empty(void) { - struct gguf_context * ctx = GGML_ALIGNED_MALLOC(sizeof(struct gguf_context)); + struct gguf_context * ctx = GGML_CALLOC(1, sizeof(struct gguf_context)); memcpy(ctx->header.magic, GGUF_MAGIC, sizeof(ctx->header.magic)); ctx->header.version = GGUF_VERSION; @@ -20659,7 +20659,7 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p bool ok = true; - struct gguf_context * ctx = GGML_ALIGNED_MALLOC(sizeof(struct gguf_context)); + struct gguf_context * ctx = GGML_CALLOC(1, sizeof(struct gguf_context)); // read the header { @@ -20696,9 +20696,13 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p // read the kv pairs { - ctx->kv = GGML_MALLOC(ctx->header.n_kv * sizeof(struct gguf_kv)); + const uint64_t n_kv = ctx->header.n_kv; - for (uint64_t i = 0; i < ctx->header.n_kv; ++i) { + // header.n_kv will hold the actual value of pairs that were successfully read in the loop below + ctx->header.n_kv = 0; + ctx->kv = GGML_CALLOC(n_kv, sizeof(struct gguf_kv)); + + for (uint64_t i = 0; i < n_kv; ++i) { struct gguf_kv * kv = &ctx->kv[i]; //fprintf(stderr, "%s: reading kv %d\n", __func__, i); @@ -20747,7 +20751,7 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p return NULL; } - kv->value.arr.data = GGML_MALLOC(kv->value.arr.n * gguf_type_size(kv->value.arr.type)); + kv->value.arr.data = GGML_CALLOC(kv->value.arr.n, gguf_type_size(kv->value.arr.type)); ok = ok && gguf_fread_el(file, kv->value.arr.data, kv->value.arr.n * gguf_type_size(kv->value.arr.type), &offset); } break; @@ -20761,7 +20765,7 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p return NULL; } - kv->value.arr.data = GGML_MALLOC(kv->value.arr.n * sizeof(struct gguf_str)); + kv->value.arr.data = GGML_CALLOC(kv->value.arr.n, sizeof(struct gguf_str)); for (uint64_t j = 0; j < kv->value.arr.n; ++j) { ok = ok && gguf_fread_str(file, &((struct gguf_str *) kv->value.arr.data)[j], &offset); @@ -20777,6 +20781,8 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p if (!ok) { break; } + + ctx->header.n_kv++; } if (!ok) { @@ -20789,7 +20795,7 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p // read the tensor infos { - ctx->infos = GGML_MALLOC(ctx->header.n_tensors * sizeof(struct gguf_tensor_info)); + ctx->infos = GGML_CALLOC(ctx->header.n_tensors, sizeof(struct gguf_tensor_info)); for (uint64_t i = 0; i < ctx->header.n_tensors; ++i) { struct gguf_tensor_info * info = &ctx->infos[i]; @@ -20810,6 +20816,7 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p ok = ok && gguf_fread_el (file, &info->type, sizeof(info->type), &offset); ok = ok && gguf_fread_el (file, &info->offset, sizeof(info->offset), &offset); + // TODO: return an error instead of crashing with GGML_ASSERT gguf_tensor_info_sanitize(info); if (!ok) { @@ -21291,7 +21298,7 @@ void gguf_set_arr_data(struct gguf_context * ctx, const char * key, enum gguf_ty ctx->kv[idx].type = GGUF_TYPE_ARRAY; ctx->kv[idx].value.arr.type = type; ctx->kv[idx].value.arr.n = n; - ctx->kv[idx].value.arr.data = GGML_MALLOC(n*gguf_type_size(type)); + ctx->kv[idx].value.arr.data = GGML_CALLOC(n, gguf_type_size(type)); memcpy(ctx->kv[idx].value.arr.data, data, n*gguf_type_size(type)); } @@ -21301,7 +21308,7 @@ void gguf_set_arr_str(struct gguf_context * ctx, const char * key, const char ** ctx->kv[idx].type = GGUF_TYPE_ARRAY; ctx->kv[idx].value.arr.type = GGUF_TYPE_STRING; ctx->kv[idx].value.arr.n = n; - ctx->kv[idx].value.arr.data = GGML_MALLOC(n*sizeof(struct gguf_str)); + ctx->kv[idx].value.arr.data = GGML_CALLOC(n, sizeof(struct gguf_str)); for (int i = 0; i < n; i++) { struct gguf_str * str = &((struct gguf_str *)ctx->kv[idx].value.arr.data)[i]; str->n = strlen(data[i]); @@ -21328,7 +21335,7 @@ void gguf_set_kv(struct gguf_context * ctx, struct gguf_context * src) { case GGUF_TYPE_ARRAY: { if (src->kv[i].value.arr.type == GGUF_TYPE_STRING) { - const char ** data = GGML_MALLOC(src->kv[i].value.arr.n*sizeof(char *)); + const char ** data = GGML_CALLOC(src->kv[i].value.arr.n, sizeof(char *)); for (uint32_t j = 0; j < src->kv[i].value.arr.n; j++) { data[j] = ((struct gguf_str *)src->kv[i].value.arr.data)[j].data; } @@ -21416,7 +21423,7 @@ struct gguf_buf { static struct gguf_buf gguf_buf_init(size_t size) { struct gguf_buf buf = { - /*buf.data =*/ size == 0 ? NULL : GGML_MALLOC(size), + /*buf.data =*/ size == 0 ? NULL : GGML_CALLOC(1, size), /*buf.size =*/ size, /*buf.offset =*/ 0, }; From 9e4e077ec50fde6049b128662c72d37a3c28e34b Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 26 Apr 2024 11:11:51 +0200 Subject: [PATCH 051/101] ci: server: fix python installation (#6922) --- .github/workflows/server.yml | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 955fd9a5a924b..67bbe6cb73fb3 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -58,6 +58,7 @@ jobs: git \ cmake \ python3-pip \ + python3-venv \ curl \ wget \ language-pack-en \ @@ -100,16 +101,13 @@ jobs: -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON ; cmake --build . --config ${{ matrix.build_type }} -j $(nproc) --target server - - name: Python setup - id: setup_python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - - name: Tests dependencies - id: test_dependencies + - name: Setup python env + id: pipenv run: | - pip install -r examples/server/tests/requirements.txt + cd examples/server/tests + python3 -m venv venv + source venv/bin/activate + pip install -r requirements.txt - name: Tests id: server_integration_tests From 7f5ff558eed0f732af8f25c2ab0645610bdec80c Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 26 Apr 2024 12:15:30 +0200 Subject: [PATCH 052/101] server: stop generation at `n_ctx_train` if `n_predict` is not set (#6638) * server: cap n_predict if not set to n_ctx_train * server: fix infinite loop * server: infinite loop, move in process_token server: infinite loop: set stop limit to true * minor: spaces * minor: spaces * server: include prompt tokens in the EOS limit --- examples/server/server.cpp | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 3acbd17df2ac7..48ef8ff2a237b 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1207,6 +1207,27 @@ struct server_context { LOG_VERBOSE("eos token found", {}); } + auto n_ctx_train = llama_n_ctx_train(model); + if (slot.params.n_predict < 1 && slot.ga_n == 1 + && slot.n_prompt_tokens + slot.n_decoded >= n_ctx_train) { + LOG_WARNING("n_predict is not set and self-context extend is disabled." + " Limiting generated tokens to n_ctx_train to avoid EOS-less generation infinite loop", { + { "id_slot", slot.id }, + { "params.n_predict", slot.params.n_predict }, + { "slot.n_prompt_tokens", slot.n_prompt_tokens }, + { "slot.n_decoded", slot.n_decoded }, + { "slot.n_predict", slot.n_predict }, + { "n_slots", params.n_parallel }, + { "slot.n_ctx", slot.n_ctx }, + { "n_ctx", n_ctx }, + { "n_ctx_train", n_ctx_train }, + { "ga_n", slot.ga_n }, + }); + slot.truncated = true; + slot.stopped_limit = true; + slot.has_next_token = false; // stop prediction + } + LOG_VERBOSE("next token", { {"id_slot", slot.id}, {"id_task", slot.id_task}, @@ -2141,7 +2162,7 @@ struct server_context { }); // process the created batch of tokens - for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += n_batch) { + for (int32_t i = 0; i < batch.n_tokens; i += n_batch) { const int32_t n_tokens = std::min(n_batch, batch.n_tokens - i); for (auto & slot : slots) { From bbe3c6e76157a5d806fdc155451f0ca8936248ee Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 26 Apr 2024 12:27:25 +0200 Subject: [PATCH 053/101] ci: server: fix python installation (#6925) --- .github/workflows/server.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 67bbe6cb73fb3..a044d6044ce86 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -106,7 +106,7 @@ jobs: run: | cd examples/server/tests python3 -m venv venv - source venv/bin/activate + . venv/bin/activate pip install -r requirements.txt - name: Tests From 327682fb97cadc41811b93334ea00bab9c5d4421 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Fri, 26 Apr 2024 20:21:29 +0800 Subject: [PATCH 054/101] undeprecate openblas, since it seems like there are still some valid cases for it --- README.md | 21 +++++++++++---------- koboldcpp.py | 18 +++++++++--------- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index 530b7ddfd6906..7517e40cef4a6 100644 --- a/README.md +++ b/README.md @@ -53,14 +53,15 @@ when you can't use the precompiled binary directly, we provide an automated buil ## OSX and Linux Manual Compiling - Otherwise, you will have to compile your binaries from source. A makefile is provided, simply run `make`. +- If you want you can also link your own install of OpenBLAS manually with `make LLAMA_OPENBLAS=1` - If you want you can also link your own install of CLBlast manually with `make LLAMA_CLBLAST=1`, for this you will need to obtain and link OpenCL and CLBlast libraries. - - For Arch Linux: Install `cblas` and `clblast`. - - For Debian: Install `libclblast-dev`. + - For Arch Linux: Install `cblas` `openblas` and `clblast`. + - For Debian: Install `libclblast-dev` and `libopenblas-dev`. - You can attempt a CuBLAS build with `LLAMA_CUBLAS=1`. You will need CUDA Toolkit installed. Some have also reported success with the CMake file, though that is more for windows. -- For a full featured build (all backends), do `make LLAMA_CLBLAST=1 LLAMA_CUBLAS=1 LLAMA_VULKAN=1` +- For a full featured build (all backends), do `make LLAMA_OPENBLAS=1 LLAMA_CLBLAST=1 LLAMA_CUBLAS=1 LLAMA_VULKAN=1` - After all binaries are built, you can run the python script with the command `koboldcpp.py [ggml_model.bin] [port]` -- Note: OpenBLAS backend is now deprecated and will be removed, as pure CPU is now almost always faster. +- Note: Many OSX users have found that the using Accelerate is actually faster than OpenBLAS. To try, you may wish to run with `--noblas` and compare speeds. ### Arch Linux Packages There are 4 community made AUR packages (Maintained by @AlpinDale) available: [CPU-only](https://aur.archlinux.org/packages/koboldcpp-cpu), [CLBlast](https://aur.archlinux.org/packages/koboldcpp-clblast), [CUBLAS](https://aur.archlinux.org/packages/koboldcpp-cuda), and [HIPBLAS](https://aur.archlinux.org/packages/koboldcpp-hipblas). They are, respectively, for users with no GPU, users with a GPU (vendor-agnostic), users with NVIDIA GPUs, and users with a supported AMD GPU. @@ -88,12 +89,12 @@ You can then run koboldcpp anywhere from the terminal by running `koboldcpp` to - If you want to generate the .exe file, make sure you have the python module PyInstaller installed with pip ('pip install PyInstaller'). - Run the script make_pyinstaller.bat at a regular terminal (or Windows Explorer). - The koboldcpp.exe file will be at your dist folder. -- If you wish to use your own version of the additional Windows libraries (OpenCL, CLBlast), you can do it with: +- If you wish to use your own version of the additional Windows libraries (OpenCL, CLBlast and OpenBLAS), you can do it with: - OpenCL - tested with https://github.com/KhronosGroup/OpenCL-SDK . If you wish to compile it, follow the repository instructions. You will need vcpkg. - CLBlast - tested with https://github.com/CNugteren/CLBlast . If you wish to compile it you will need to reference the OpenCL files. It will only generate the ".lib" file if you compile using MSVC. - - Move the respective .lib files to the /lib folder of your project, overwriting the older files. - - Also, replace the existing versions of the corresponding .dll files located in the project directory root - - Make the KoboldCPP project using the instructions above. + - OpenBLAS - tested with https://github.com/xianyi/OpenBLAS . + - Move the respectives .lib files to the /lib folder of your project, overwriting the older files. + - Also, replace the existing versions of the corresponding .dll files located in the project directory root (e.g. libopenblas.dll). - You can attempt a CuBLAS build with using the provided CMake file with visual studio. If you use the CMake file to build, copy the `koboldcpp_cublas.dll` generated into the same directory as the `koboldcpp.py` file. If you are bundling executables, you may need to include CUDA dynamic libraries (such as `cublasLt64_11.dll` and `cublas64_11.dll`) in order for the executable to work correctly on a different PC. - Make the KoboldCPP project using the instructions above. @@ -127,7 +128,7 @@ You can then run koboldcpp anywhere from the terminal by running `koboldcpp` to ## Considerations - For Windows: No installation, single file executable, (It Just Works) -- Since v1.0.6, required libopenblas, however, it was later removed. +- Since v1.0.6, requires libopenblas, the prebuilt windows binaries are included in this repo. If not found, it will fall back to a mode without BLAS. - Since v1.15, requires CLBlast if enabled, the prebuilt windows binaries are included in this repo. If not found, it will fall back to a mode without CLBlast. - Since v1.33, you can set the context size to be above what the model supports officially. It does increases perplexity but should still work well below 4096 even on untuned models. (For GPT-NeoX, GPT-J, and LLAMA models) Customize this with `--ropeconfig`. - Since v1.42, supports GGUF models for LLAMA and Falcon @@ -141,7 +142,7 @@ You can then run koboldcpp anywhere from the terminal by running `koboldcpp` to - The other files are also under the AGPL v3.0 License unless otherwise stated ## Notes -- Generation delay scales linearly with original prompt length. If CLBlast is enabled then prompt ingestion becomes a few times faster. This is automatic on windows, but will require linking on OSX and Linux. Set `--gpulayers` + `--useclblast` or `--usecublas`. +- Generation delay scales linearly with original prompt length. If OpenBLAS is enabled then prompt ingestion becomes about 2-3x faster. This is automatic on windows, but will require linking on OSX and Linux. CLBlast speeds this up even further, and `--gpulayers` + `--useclblast` or `--usecublas` more so. - I have heard of someone claiming a false AV positive report. The exe is a simple pyinstaller bundle that includes the necessary python scripts and dlls to run. If this still concerns you, you might wish to rebuild everything from source code using the makefile, and you can rebuild the exe yourself with pyinstaller by using `make_pyinstaller.bat` - API documentation available at `/api` and https://lite.koboldai.net/koboldcpp_api - Supported GGML models (Includes backward compatibility for older versions/legacy GGML models, though some newer features might be unavailable): diff --git a/koboldcpp.py b/koboldcpp.py index e8a779040a4e8..e2754362e31a4 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -1593,15 +1593,15 @@ def hide_tooltip(event): tabcontent = {} lib_option_pairs = [ - (lib_default, "Use CPU"), + (lib_openblas, "Use OpenBLAS"), (lib_clblast, "Use CLBlast"), (lib_cublas, "Use CuBLAS"), (lib_hipblas, "Use hipBLAS (ROCm)"), (lib_vulkan, "Use Vulkan"), - (lib_openblas, "Use OpenBLAS (Deprecated)"), + (lib_default, "Use No BLAS"), (lib_clblast_noavx2, "CLBlast NoAVX2 (Old CPU)"), (lib_vulkan_noavx2, "Vulkan NoAVX2 (Old CPU)"), - (lib_noavx2, "CPU NoAVX2 (Old CPU)"), + (lib_noavx2, "NoAVX2 Mode (Old CPU)"), (lib_failsafe, "Failsafe Mode (Old CPU)")] openblas_option, clblast_option, cublas_option, hipblas_option, vulkan_option, default_option, clblast_noavx2_option, vulkan_noavx2_option, noavx2_option, failsafe_option = (opt if file_exists(lib) or (os.name == 'nt' and file_exists(opt + ".dll")) else None for lib, opt in lib_option_pairs) # slider data @@ -1613,7 +1613,7 @@ def hide_tooltip(event): if not any(runopts): exitcounter = 999 - show_gui_msgbox("No Backends Available!","KoboldCPP couldn't locate any backends to use (i.e CPU, CLBlast, CuBLAS).\n\nTo use the program, please run the 'make' command from the directory.") + show_gui_msgbox("No Backends Available!","KoboldCPP couldn't locate any backends to use (i.e Default, OpenBLAS, CLBlast, CuBLAS).\n\nTo use the program, please run the 'make' command from the directory.") time.sleep(3) sys.exit(2) @@ -1990,7 +1990,7 @@ def changerunmode(a,b,c): # presets selector - makelabel(quick_tab, "Presets:", 1,0,"Select a backend to use.\nCPU runs purely on CPU only.\nCuBLAS runs on Nvidia GPUs, and is much faster.\nCLBlast works on all GPUs but is somewhat slower.\nNoAVX2 and Failsafe modes support older PCs.") + makelabel(quick_tab, "Presets:", 1,0,"Select a backend to use.\nOpenBLAS and NoBLAS runs purely on CPU only.\nCuBLAS runs on Nvidia GPUs, and is much faster.\nCLBlast works on all GPUs but is somewhat slower.\nNoAVX2 and Failsafe modes support older PCs.") runoptbox = ctk.CTkComboBox(quick_tab, values=runopts, width=180,variable=runopts_var, state="readonly") runoptbox.grid(row=1, column=1,padx=8, stick="nw") @@ -2029,7 +2029,7 @@ def changerunmode(a,b,c): hardware_tab = tabcontent["Hardware"] # presets selector - makelabel(hardware_tab, "Presets:", 1,0,"Select a backend to use.\nCPU runs purely on CPU only.\nCuBLAS runs on Nvidia GPUs, and is much faster.\nCLBlast works on all GPUs but is somewhat slower.\nNoAVX2 and Failsafe modes support older PCs.") + makelabel(hardware_tab, "Presets:", 1,0,"Select a backend to use.\nOpenBLAS and NoBLAS runs purely on CPU only.\nCuBLAS runs on Nvidia GPUs, and is much faster.\nCLBlast works on all GPUs but is somewhat slower.\nNoAVX2 and Failsafe modes support older PCs.") runoptbox = ctk.CTkComboBox(hardware_tab, values=runopts, width=180,variable=runopts_var, state="readonly") runoptbox.grid(row=1, column=1,padx=8, stick="nw") runoptbox.set(runopts[0]) # Set to first available option @@ -2206,9 +2206,9 @@ def export_vars(): args.noavx2 = True if gpulayers_var.get(): args.gpulayers = int(gpulayers_var.get()) - if runopts_var.get()=="Use CPU": + if runopts_var.get()=="Use No BLAS": args.noblas = True - if runopts_var.get()=="CPU NoAVX2 (Old CPU)": + if runopts_var.get()=="NoAVX2 Mode (Old CPU)": args.noavx2 = True if runopts_var.get()=="Failsafe Mode (Old CPU)": args.noavx2 = True @@ -3257,7 +3257,7 @@ def range_checker(arg: str): compatgroup.add_argument("--usecublas", help="Use CuBLAS for GPU Acceleration. Requires CUDA. Select lowvram to not allocate VRAM scratch buffer. Enter a number afterwards to select and use 1 GPU. Leaving no number will use all GPUs. For hipBLAS binaries, please check YellowRoseCx rocm fork.", nargs='*',metavar=('[lowvram|normal] [main GPU ID] [mmq] [rowsplit]'), choices=['normal', 'lowvram', '0', '1', '2', '3', 'mmq', 'rowsplit']) compatgroup.add_argument("--usevulkan", help="Use Vulkan for GPU Acceleration. Can optionally specify GPU Device ID (e.g. --usevulkan 0).", metavar=('[Device ID]'), nargs='*', type=int, default=None) compatgroup.add_argument("--useclblast", help="Use CLBlast for GPU Acceleration. Must specify exactly 2 arguments, platform ID and device ID (e.g. --useclblast 1 0).", type=int, choices=range(0,9), nargs=2) - compatgroup.add_argument("--noblas", help="((THIS COMMAND IS DEPRECATED AND WILL BE REMOVED SOON))", action='store_true') + compatgroup.add_argument("--noblas", help="Do not use OpenBLAS for accelerated prompt ingestion", action='store_true') parser.add_argument("--gpulayers", help="Set number of layers to offload to GPU when using GPU. Requires GPU.",metavar=('[GPU layers]'), nargs='?', const=1, type=int, default=0) parser.add_argument("--tensor_split", help="For CUDA and Vulkan only, ratio to split tensors across multiple GPUs, space-separated list of proportions, e.g. 7 3", metavar=('[Ratios]'), type=float, nargs='+') parser.add_argument("--contextsize", help="Controls the memory allocated for maximum context size, only change if you need more RAM for big contexts. (default 2048). Supported values are [256,512,1024,2048,3072,4096,6144,8192,12288,16384,24576,32768,49152,65536,98304,131072]. IF YOU USE ANYTHING ELSE YOU ARE ON YOUR OWN.",metavar=('[256,512,1024,2048,3072,4096,6144,8192,12288,16384,24576,32768,49152,65536,98304,131072]'), type=check_range(int,256,262144), default=2048) From 4b1c3c98b442a4c84a788fff6323f6fa7e678a5b Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Fri, 26 Apr 2024 10:05:33 -0400 Subject: [PATCH 055/101] llamafile : use 64-bit integers in sgemm (#6928) --- sgemm.cpp | 170 ++++++++++++++++++++++++++---------------------------- sgemm.h | 6 +- 2 files changed, 87 insertions(+), 89 deletions(-) diff --git a/sgemm.cpp b/sgemm.cpp index 531e12af361cc..4e0159804e816 100644 --- a/sgemm.cpp +++ b/sgemm.cpp @@ -50,7 +50,6 @@ #pragma GCC diagnostic ignored "-Wignored-attributes" #include "sgemm.h" -#include #include "ggml-impl.h" #include "ggml-quants.h" @@ -243,23 +242,23 @@ template <> inline __m512 load(const ggml_fp16_t *p) { template class tinyBLAS { public: - tinyBLAS(int k, - const TA *A, int lda, - const TB *B, int ldb, - TC *C, int ldc, + tinyBLAS(int64_t k, + const TA *A, int64_t lda, + const TB *B, int64_t ldb, + TC *C, int64_t ldc, int ith, int nth) : A(A), B(B), C(C), k(k), lda(lda), ldb(ldb), ldc(ldc), ith(ith), nth(nth) { } - void matmul(int m, int n, int task) { + void matmul(int64_t m, int64_t n, int task) { if (task == GGML_TASK_TYPE_COMPUTE) mnpack(0, m, 0, n); } private: - NOINLINE void mnpack(int m0, int m, int n0, int n) { - int mc, nc, mp, np; - switch ((std::min(m - m0, 5) << 4) | std::min(n - n0, 5)) { + NOINLINE void mnpack(int64_t m0, int64_t m, int64_t n0, int64_t n) { + int64_t mc, nc, mp, np; + switch ((MIN(m - m0, 5) << 4) | MIN(n - n0, 5)) { #if VECTOR_REGISTERS == 32 case 0x55: mc = 5; @@ -409,27 +408,27 @@ class tinyBLAS { } template - NOINLINE void gemm(int m0, int m, int n0, int n) { - int ytiles = (m - m0) / RM; - int xtiles = (n - n0) / RN; - int tiles = xtiles * ytiles; - int duty = (tiles + nth - 1) / nth; - int start = duty * ith; - int end = start + duty; + NOINLINE void gemm(int64_t m0, int64_t m, int64_t n0, int64_t n) { + int64_t ytiles = (m - m0) / RM; + int64_t xtiles = (n - n0) / RN; + int64_t tiles = xtiles * ytiles; + int64_t duty = (tiles + nth - 1) / nth; + int64_t start = duty * ith; + int64_t end = start + duty; if (end > tiles) end = tiles; - for (int job = start; job < end; ++job) { - int ii = m0 + job / xtiles * RM; - int jj = n0 + job % xtiles * RN; + for (int64_t job = start; job < end; ++job) { + int64_t ii = m0 + job / xtiles * RM; + int64_t jj = n0 + job % xtiles * RN; D Cv[RN][RM] = {}; - for (int l = 0; l < k; l += KN) - for (int j = 0; j < RN; ++j) - for (int i = 0; i < RM; ++i) + for (int64_t l = 0; l < k; l += KN) + for (int64_t j = 0; j < RN; ++j) + for (int64_t i = 0; i < RM; ++i) Cv[j][i] = madd(load(A + lda * (ii + i) + l), load(B + ldb * (jj + j) + l), Cv[j][i]); - for (int j = 0; j < RN; ++j) - for (int i = 0; i < RM; ++i) + for (int64_t j = 0; j < RN; ++j) + for (int64_t i = 0; i < RM; ++i) C[ldc * (jj + j) + (ii + i)] = hsum(Cv[j][i]); } } @@ -437,10 +436,10 @@ class tinyBLAS { const TA *const A; const TB *const B; TC *const C; - const int k; - const int lda; - const int ldb; - const int ldc; + const int64_t k; + const int64_t lda; + const int64_t ldb; + const int64_t ldc; const int ith; const int nth; }; @@ -452,23 +451,23 @@ class tinyBLAS { template class tinyBLAS_Q0_ARM { public: - tinyBLAS_Q0_ARM(int k, - const TA *A, int lda, - const block_q8_0 *B, int ldb, - float *C, int ldc, + tinyBLAS_Q0_ARM(int64_t k, + const TA *A, int64_t lda, + const block_q8_0 *B, int64_t ldb, + float *C, int64_t ldc, int ith, int nth) : A(A), B(B), C(C), k(k), lda(lda), ldb(ldb), ldc(ldc), ith(ith), nth(nth) { } - void matmul(int m, int n, int task) { + void matmul(int64_t m, int64_t n, int task) { if (task == GGML_TASK_TYPE_COMPUTE) mnpack(0, m, 0, n); } private: - NOINLINE void mnpack(int m0, int m, int n0, int n) { - int mc, nc, mp, np; - switch ((std::min(m - m0, 3) << 4) | std::min(n - n0, 3)) { + NOINLINE void mnpack(int64_t m0, int64_t m, int64_t n0, int64_t n) { + int64_t mc, nc, mp, np; + switch ((MIN(m - m0, 3) << 4) | MIN(n - n0, 3ll)) { case 0x33: mc = 3; nc = 3; @@ -524,22 +523,22 @@ class tinyBLAS_Q0_ARM { } template - NOINLINE void gemm(int m0, int m, int n0, int n) { - int ytiles = (m - m0) / RM; - int xtiles = (n - n0) / RN; - int tiles = xtiles * ytiles; - int duty = (tiles + nth - 1) / nth; - int start = duty * ith; - int end = start + duty; + NOINLINE void gemm(int64_t m0, int64_t m, int64_t n0, int64_t n) { + int64_t ytiles = (m - m0) / RM; + int64_t xtiles = (n - n0) / RN; + int64_t tiles = xtiles * ytiles; + int64_t duty = (tiles + nth - 1) / nth; + int64_t start = duty * ith; + int64_t end = start + duty; if (end > tiles) end = tiles; - for (int job = start; job < end; ++job) { - int ii = m0 + job / xtiles * RM; - int jj = n0 + job % xtiles * RN; + for (int64_t job = start; job < end; ++job) { + int64_t ii = m0 + job / xtiles * RM; + int64_t jj = n0 + job % xtiles * RN; float32x4_t Cv[RN][RM] = {}; - for (int l = 0; l < k; ++l) - for (int j = 0; j < RN; ++j) - for (int i = 0; i < RM; ++i) + for (int64_t l = 0; l < k; ++l) + for (int64_t j = 0; j < RN; ++j) + for (int64_t i = 0; i < RM; ++i) Cv[j][i] = vmlaq_n_f32(Cv[j][i], vcvtq_f32_s32(vdotq_s32( vdotq_s32(vdupq_n_s32(0), @@ -549,8 +548,8 @@ class tinyBLAS_Q0_ARM { load_hi(B + ldb * (jj + j) + l))), unhalf(A[lda * (ii + i) + l].d) * unhalf(B[ldb * (jj + j) + l].d)); - for (int j = 0; j < RN; ++j) - for (int i = 0; i < RM; ++i) + for (int64_t j = 0; j < RN; ++j) + for (int64_t i = 0; i < RM; ++i) C[ldc * (jj + j) + (ii + i)] = hsum(Cv[j][i]); } } @@ -577,10 +576,10 @@ class tinyBLAS_Q0_ARM { const TA *const A; const block_q8_0 *const B; float *const C; - const int k; - const int lda; - const int ldb; - const int ldc; + const int64_t k; + const int64_t lda; + const int64_t ldb; + const int64_t ldc; const int ith; const int nth; }; @@ -590,23 +589,23 @@ class tinyBLAS_Q0_ARM { template class tinyBLAS_Q0_AVX2 { public: - tinyBLAS_Q0_AVX2(int k, - const TA *A, int lda, - const TB *B, int ldb, - TC *C, int ldc, + tinyBLAS_Q0_AVX2(int64_t k, + const TA *A, int64_t lda, + const TB *B, int64_t ldb, + TC *C, int64_t ldc, int ith, int nth) : A(A), B(B), C(C), k(k), lda(lda), ldb(ldb), ldc(ldc), ith(ith), nth(nth) { } - void matmul(int m, int n, int task) { + void matmul(int64_t m, int64_t n, int task) { if (task == GGML_TASK_TYPE_COMPUTE) mnpack(0, m, 0, n); } private: - void mnpack(int m0, int m, int n0, int n) { - int mc, nc, mp, np; - switch ((std::min(m - m0, 4) << 4) | std::min(n - n0, 4)) { + void mnpack(int64_t m0, int64_t m, int64_t n0, int64_t n) { + int64_t mc, nc, mp, np; + switch ((MIN(m - m0, 4) << 4) | MIN(n - n0, 4)) { #if VECTOR_REGISTERS == 32 case 0x44: mc = 4; @@ -714,22 +713,22 @@ class tinyBLAS_Q0_AVX2 { } template - NOINLINE void gemm(int m0, int m, int n0, int n) { - int ytiles = (m - m0) / RM; - int xtiles = (n - n0) / RN; - int tiles = xtiles * ytiles; - int duty = (tiles + nth - 1) / nth; - int start = duty * ith; - int end = start + duty; + NOINLINE void gemm(int64_t m0, int64_t m, int64_t n0, int64_t n) { + int64_t ytiles = (m - m0) / RM; + int64_t xtiles = (n - n0) / RN; + int64_t tiles = xtiles * ytiles; + int64_t duty = (tiles + nth - 1) / nth; + int64_t start = duty * ith; + int64_t end = start + duty; if (end > tiles) end = tiles; - for (int job = start; job < end; ++job) { - int ii = m0 + job / xtiles * RM; - int jj = n0 + job % xtiles * RN; + for (int64_t job = start; job < end; ++job) { + int64_t ii = m0 + job / xtiles * RM; + int64_t jj = n0 + job % xtiles * RN; __m256 Cv[RN][RM] = {}; - for (int l = 0; l < k; ++l) - for (int j = 0; j < RN; ++j) - for (int i = 0; i < RM; ++i) + for (int64_t l = 0; l < k; ++l) + for (int64_t j = 0; j < RN; ++j) + for (int64_t i = 0; i < RM; ++i) Cv[j][i] = madd(_mm256_set1_ps(unhalf(A[lda * (ii + i) + l].d) * unhalf(B[ldb * (jj + j) + l].d)), updot(_mm256_sign_epi8(load(A + lda * (ii + i) + l), @@ -737,8 +736,8 @@ class tinyBLAS_Q0_AVX2 { _mm256_sign_epi8(load(B + ldb * (jj + j) + l), load(A + lda * (ii + i) + l))), Cv[j][i]); - for (int j = 0; j < RN; ++j) - for (int i = 0; i < RM; ++i) + for (int64_t j = 0; j < RN; ++j) + for (int64_t i = 0; i < RM; ++i) C[ldc * (jj + j) + (ii + i)] = hsum(Cv[j][i]); } } @@ -771,10 +770,10 @@ class tinyBLAS_Q0_AVX2 { const TA *const A; const TB *const B; TC *const C; - const int k; - const int lda; - const int ldb; - const int ldc; + const int64_t k; + const int64_t lda; + const int64_t ldb; + const int64_t ldc; const int ith; const int nth; }; @@ -813,8 +812,8 @@ class tinyBLAS_Q0_AVX2 { * @param Ctype is GGML data type of `C` * @return true if this function was able to service the matmul request */ -bool llamafile_sgemm(int m, int n, int k, const void *A, int lda, const void *B, int ldb, void *C, - int ldc, int ith, int nth, int task, int Atype, int Btype, int Ctype) { +bool llamafile_sgemm(int64_t m, int64_t n, int64_t k, const void *A, int64_t lda, const void *B, int64_t ldb, void *C, + int64_t ldc, int ith, int nth, int task, int Atype, int Btype, int Ctype) { assert(m >= 0); assert(n >= 0); @@ -824,9 +823,6 @@ bool llamafile_sgemm(int m, int n, int k, const void *A, int lda, const void *B, assert(ldc >= m); assert(nth > 0); assert(ith < nth); - assert(1ll * lda * m <= 0x7fffffff); - assert(1ll * ldb * n <= 0x7fffffff); - assert(1ll * ldc * n <= 0x7fffffff); if (Ctype != GGML_TYPE_F32) return false; diff --git a/sgemm.h b/sgemm.h index da23b209c4dd5..f29747d0a477a 100644 --- a/sgemm.h +++ b/sgemm.h @@ -1,11 +1,13 @@ #pragma once +#include #include #ifdef __cplusplus extern "C" { #endif -bool llamafile_sgemm(int, int, int, const void *, int, const void *, int, - void *, int, int, int, int, int, int, int); +bool llamafile_sgemm(int64_t, int64_t, int64_t, const void *, int64_t, + const void *, int64_t, void *, int64_t, int, int, + int, int, int, int); #ifdef __cplusplus } From e2764cd7ca1112d9303eba9e81c9935ee67352ff Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 26 Apr 2024 17:07:42 +0200 Subject: [PATCH 056/101] gguf : fix mismatch between alloc and free functions (#6929) --- ggml.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml.c b/ggml.c index 9f99a69d1158b..34eef23fcf90f 100644 --- a/ggml.c +++ b/ggml.c @@ -20987,7 +20987,7 @@ void gguf_free(struct gguf_context * ctx) { GGML_FREE(ctx->infos); } - GGML_ALIGNED_FREE(ctx); + GGML_FREE(ctx); } const char * gguf_type_name(enum gguf_type type) { From 017e6999b5184234370b22a2f868e1be911e8d88 Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 26 Apr 2024 18:39:58 +0200 Subject: [PATCH 057/101] add basic tensor data validation function (#6884) * add basic tensor data validation function * add --check-tensors command line argument tensor validation is disabled by default and can be enabled by adding `--check-tensors` to the command line arguments. quantize always validates tensors. --- common/common.cpp | 6 + common/common.h | 1 + ggml-quants.c | 284 ++++++++++++++++++++++++++++++++++++++++++++++ ggml.h | 2 + llama.cpp | 90 ++++++++++++--- llama.h | 7 +- 6 files changed, 371 insertions(+), 19 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 97f55b053eee1..9ab8aef7ec832 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1089,6 +1089,10 @@ bool gpt_params_find_arg(int argc, char ** argv, const std::string & arg, gpt_pa params.n_print = std::stoi(argv[i]); return true; } + if (arg == "--check-tensors") { + params.check_tensors = true; + return true; + } if (arg == "--ppl-output-type") { if (++i >= argc) { invalid_param = true; @@ -1554,6 +1558,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf(" -ptc N, --print-token-count N\n"); printf(" print token count every N tokens (default: %d)\n", params.n_print); + printf(" --check-tensors check model tensor data for invalid values\n"); printf("\n"); #ifndef LOG_DISABLE_LOGS log_print_usage(); @@ -1774,6 +1779,7 @@ struct llama_model_params llama_model_params_from_gpt_params(const gpt_params & mparams.tensor_split = params.tensor_split; mparams.use_mmap = params.use_mmap; mparams.use_mlock = params.use_mlock; + mparams.check_tensors = params.check_tensors; if (params.kv_overrides.empty()) { mparams.kv_overrides = NULL; } else { diff --git a/common/common.h b/common/common.h index 87361e8e91500..0005f143ef44d 100644 --- a/common/common.h +++ b/common/common.h @@ -161,6 +161,7 @@ struct gpt_params { bool dump_kv_cache = false; // dump the KV cache contents for debugging purposes bool no_kv_offload = false; // disable KV offloading bool warmup = true; // warmup run + bool check_tensors = false; // validate tensor data std::string cache_type_k = "f16"; // KV cache data type for the K std::string cache_type_v = "f16"; // KV cache data type for the V diff --git a/ggml-quants.c b/ggml-quants.c index c147531df1cd5..444d1e55ebd54 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -12383,3 +12383,287 @@ void quantize_row_iq2_s(const float * restrict x, void * restrict vy, int64_t k) block_iq2_s * restrict y = vy; quantize_row_iq2_s_reference(x, y, k); } + +static bool validate_float(float f, size_t i) { + if (isinf(f)) { + fprintf(stderr, "ggml_validate_row_data: found inf value at block %zu\n", i); + return false; + } + + if (isnan(f)) { + fprintf(stderr, "ggml_validate_row_data: found nan value at block %zu\n", i); + return false; + } + + return true; +} + +static bool isinf_fp16(ggml_fp16_t f) { + return (f & 0x7c00) == 0x7c00 && (f & 0x03ff) == 0; +} + +static bool isnan_fp16(ggml_fp16_t f) { + return (f & 0x7c00) == 0x7c00 && (f & 0x03ff) != 0; +} + +static bool validate_fp16(ggml_fp16_t f, size_t i) { + if (isinf_fp16(f)) { + fprintf(stderr, "ggml_validate_row_data: found inf value at block %zu\n", i); + return false; + } + + if (isnan_fp16(f)) { + fprintf(stderr, "ggml_validate_row_data: found nan value at block %zu\n", i); + return false; + } + + return true; +} + +#define VALIDATE_ROW_DATA_D_F16_IMPL(type, data, nb) \ + const type * q = (const type *) (data); \ + for (size_t i = 0; i < (nb); ++i) { \ + if (!validate_fp16(q[i].d, i)) { \ + return false; \ + } \ + } + +#define VALIDATE_ROW_DATA_DM_F16_IMPL(type, data, nb, d, m) \ + const type * q = (const type *) (data); \ + for (size_t i = 0; i < (nb); ++i) { \ + if (!validate_fp16(q[i].d, i) || !validate_fp16(q[i].m, i)) { \ + return false; \ + } \ + } + +bool ggml_validate_row_data(enum ggml_type type, const void * data, size_t nbytes) { + if (type < 0 || type >= GGML_TYPE_COUNT) { + fprintf(stderr, "%s: invalid type %d\n", __func__, type); + return false; + } + + if (nbytes % ggml_type_size(type) != 0) { + fprintf(stderr, "%s: invalid size %zu for type %d\n", __func__, nbytes, type); + return false; + } + + const size_t nb = nbytes/ggml_type_size(type); + + switch (type) { + case GGML_TYPE_F16: + { + const ggml_fp16_t * f = (const ggml_fp16_t *) data; + size_t i = 0; +#if defined(__AVX2__) + for (; i + 15 < nb; i += 16) { + __m256i v = _mm256_loadu_si256((const __m256i *)(f + i)); + __m256i vexp = _mm256_and_si256(v, _mm256_set1_epi16(0x7c00)); + __m256i cmp = _mm256_cmpeq_epi16(vexp, _mm256_set1_epi16(0x7c00)); + int mask = _mm256_movemask_epi8(cmp); + if (mask) { + for (size_t j = 0; j < 16; ++j) { + if (!validate_fp16(f[i + j], i + j)) { + return false; + } + } + GGML_UNREACHABLE(); + } + } +#elif defined(__ARM_NEON) + for (; i + 7 < nb; i += 8) { + uint16x8_t v = vld1q_u16(f + i); + uint16x8_t vexp = vandq_u16(v, vdupq_n_u16(0x7c00)); + uint16x8_t cmp = vceqq_u16(vexp, vdupq_n_u16(0x7c00)); + uint64_t mask = vget_lane_u64(vreinterpret_u64_u8(vshrn_n_u16(cmp, 4)), 0); + if (mask) { + for (size_t j = 0; j < 8; ++j) { + if (!validate_fp16(f[i + j], i + j)) { + return false; + } + } + GGML_UNREACHABLE(); + } + } +#endif + for (; i < nb; ++i) { + if (!validate_fp16(f[i], i)) { + return false; + } + } + } break; + case GGML_TYPE_F32: + { + const float * f = (const float *) data; + size_t i = 0; +#if defined(__AVX2__) + for (; i + 7 < nb; i += 8) { + __m256i v = _mm256_loadu_si256((const __m256i *)(f + i)); + __m256i vexp = _mm256_and_si256(v, _mm256_set1_epi32(0x7f800000)); + __m256i cmp = _mm256_cmpeq_epi32(vexp, _mm256_set1_epi32(0x7f800000)); + int mask = _mm256_movemask_epi8(cmp); + if (mask) { + for (size_t j = 0; j < 8; ++j) { + if (!validate_float(f[i + j], i + j)) { + return false; + } + } + GGML_UNREACHABLE(); + } + } +#elif defined(__ARM_NEON) + for (; i + 3 < nb; i += 4) { + uint32x4_t v = vld1q_u32((const uint32_t *)f + i); + uint32x4_t vexp = vandq_u32(v, vdupq_n_u32(0x7f800000)); + uint32x4_t cmp = vceqq_u32(vexp, vdupq_n_u32(0x7f800000)); + uint64_t mask = vget_lane_u64(vreinterpret_u64_u16(vshrn_n_u32(cmp, 8)), 0); + if (mask) { + for (size_t j = 0; j < 4; ++j) { + if (!validate_float(f[i + j], i + j)) { + return false; + } + } + GGML_UNREACHABLE(); + } + } +#endif + for (; i < nb; ++i) { + if (!validate_float(f[i], i)) { + return false; + } + } + } break; + case GGML_TYPE_F64: + { + const double * f = (const double *) data; + for (size_t i = 0; i < nb; ++i) { + if (!validate_float(f[i], i)) { + return false; + } + } + } break; + case GGML_TYPE_Q4_0: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q4_0, data, nb); + } break; + case GGML_TYPE_Q4_1: + { + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q4_1, data, nb, d, m); + } break; + case GGML_TYPE_Q5_0: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q5_0, data, nb); + } break; + case GGML_TYPE_Q5_1: + { + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q5_1, data, nb, d, m); + } break; + case GGML_TYPE_Q8_0: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q8_0, data, nb); + } break; + case GGML_TYPE_Q2_K: + { + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q2_K, data, nb, d, dmin); + } break; + case GGML_TYPE_Q3_K: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q3_K, data, nb); + } break; + case GGML_TYPE_Q4_K: + { + #ifdef GGML_QKK_64 + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q4_K, data, nb, d[0], d[1]); + #else + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q4_K, data, nb, d, dmin); + #endif + } break; + case GGML_TYPE_Q5_K: + { + #ifdef GGML_QKK_64 + VALIDATE_ROW_DATA_D_F16_IMPL(block_q5_K, data, nb); + #else + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q5_K, data, nb, d, dmin); + #endif + } break; + case GGML_TYPE_Q6_K: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q6_K, data, nb); + } break; + case GGML_TYPE_Q8_K: + { + const block_q8_K * q = (const block_q8_K *) data; + for (size_t i = 0; i < nb; ++i) { + if (!validate_float(q[i].d, i)) { + return false; + } + } + } break; + case GGML_TYPE_IQ1_S: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq1_s, data, nb); + } break; + case GGML_TYPE_IQ1_M: + { + const block_iq1_m * q = (const block_iq1_m *) data; + for (size_t i = 0; i < nb; ++i) { + #if QK_K == 64 + if (!validate_fp16(q[i].d, i)) { + return false; + } + #else + iq1m_scale_t scale; + const uint16_t * sc = (const uint16_t *)q[i].scales; + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + if (!validate_fp16(scale.f16, i)) { + return false; + } + #endif + } + } break; + case GGML_TYPE_IQ2_XXS: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq2_xxs, data, nb); + } break; + case GGML_TYPE_IQ2_XS: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq2_xs, data, nb); + } break; + case GGML_TYPE_IQ2_S: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq2_s, data, nb); + } break; + case GGML_TYPE_IQ3_XXS: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq3_xxs, data, nb); + } break; + + case GGML_TYPE_IQ3_S: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq3_s, data, nb); + } break; + case GGML_TYPE_IQ4_XS: + #if QK_K != 64 + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq4_xs, data, nb); + } break; + #endif + // with QK_K == 64, iq4_xs is iq4_nl + case GGML_TYPE_IQ4_NL: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq4_nl, data, nb); + } break; + case GGML_TYPE_I8: + case GGML_TYPE_I16: + case GGML_TYPE_I32: + case GGML_TYPE_I64: + // nothing to validate + break; + default: + { + fprintf(stderr, "%s: invalid type %d\n", __func__, type); + return false; + } + } + + return true; +} diff --git a/ggml.h b/ggml.h index 4d1d77fe9330c..86e5a8dc57ad1 100644 --- a/ggml.h +++ b/ggml.h @@ -762,6 +762,8 @@ extern "C" { // use this to compute the memory overhead of a tensor GGML_API size_t ggml_tensor_overhead(void); + GGML_API bool ggml_validate_row_data(enum ggml_type type, const void * data, size_t nbytes); + // main GGML_API struct ggml_context * ggml_init(struct ggml_init_params params); diff --git a/llama.cpp b/llama.cpp index d728bd499d38f..e5e640016d9a8 100644 --- a/llama.cpp +++ b/llama.cpp @@ -75,6 +75,7 @@ #include #include #include +#include #include #include #include @@ -2985,6 +2986,7 @@ struct llama_model_loader { size_t n_bytes = 0; bool use_mmap = false; + bool check_tensors; llama_files files; llama_ftype ftype; @@ -3018,7 +3020,7 @@ struct llama_model_loader { std::string arch_name; LLM_KV llm_kv = LLM_KV(LLM_ARCH_UNKNOWN); - llama_model_loader(const std::string & fname, bool use_mmap, const struct llama_model_kv_override * param_overrides_p) { + llama_model_loader(const std::string & fname, bool use_mmap, bool check_tensors, const struct llama_model_kv_override * param_overrides_p) { int trace = 0; if (getenv("LLAMA_TRACE")) { trace = atoi(getenv("LLAMA_TRACE")); @@ -3223,6 +3225,7 @@ struct llama_model_loader { } this->use_mmap = use_mmap; + this->check_tensors = check_tensors; } ~llama_model_loader() { @@ -3481,6 +3484,10 @@ struct llama_model_loader { file->seek(w.offs, SEEK_SET); file->read_raw(cur->data, ggml_nbytes(cur)); } + + if (check_tensors && !ggml_validate_row_data(cur->type, cur->data, ggml_nbytes(cur))) { + throw std::runtime_error(format("tensor '%s' has invalid data", ggml_get_name(cur))); + } } size_t size_done = 0; @@ -3497,6 +3504,8 @@ struct llama_model_loader { GGML_ASSERT(size_data != 0 && "call init_mappings() first"); std::vector> read_buf; + std::vector>> validation_result; + for (struct ggml_tensor * cur = ggml_get_first_tensor(ctx); cur != NULL; cur = ggml_get_next_tensor(ctx, cur)) { const auto * weight = get_weight(ggml_get_name(cur)); if (weight == nullptr) { @@ -3518,37 +3527,66 @@ struct llama_model_loader { if (bufs_mmap.count(weight->idx)) { buf_mmap = bufs_mmap.at(weight->idx); } + uint8_t * data = (uint8_t *) mapping->addr + weight->offs; + + if (check_tensors) { + validation_result.emplace_back(std::async(std::launch::async, [cur, data, n_size] { + return std::make_pair(cur, ggml_validate_row_data(cur->type, data, n_size)); + })); + } + GGML_ASSERT(buf_mmap || cur->data); // either we have a buffer to allocate the tensor in, or it is already allocated if (buf_mmap && cur->data == nullptr) { - ggml_backend_tensor_alloc(buf_mmap, cur, (uint8_t *) mapping->addr + weight->offs); + ggml_backend_tensor_alloc(buf_mmap, cur, data); if (lmlocks) { const auto & lmlock = lmlocks->at(weight->idx); - lmlock->grow_to(weight->offs + ggml_nbytes(cur)); + lmlock->grow_to(weight->offs + n_size); } auto & mmap_used = mmaps_used[weight->idx]; mmap_used.first = std::min(mmap_used.first, weight->offs); mmap_used.second = std::max(mmap_used.second, weight->offs + n_size); } else { - ggml_backend_tensor_set(cur, (uint8_t *) mapping->addr + weight->offs, 0, n_size); + ggml_backend_tensor_set(cur, data, 0, n_size); } } else { GGML_ASSERT(weight->idx < files.size()); const auto & file = files.at(weight->idx); if (ggml_backend_buffer_is_host(cur->buffer)) { file->seek(weight->offs, SEEK_SET); - file->read_raw(cur->data, ggml_nbytes(cur)); + file->read_raw(cur->data, n_size); + if (check_tensors) { + validation_result.emplace_back(std::async(std::launch::async, [cur, n_size] { + return std::make_pair(cur, ggml_validate_row_data(cur->type, cur->data, n_size)); + })); + } } else { - read_buf.resize(ggml_nbytes(cur)); + read_buf.resize(n_size); file->seek(weight->offs, SEEK_SET); - file->read_raw(read_buf.data(), ggml_nbytes(cur)); + file->read_raw(read_buf.data(), n_size); ggml_backend_tensor_set(cur, read_buf.data(), 0, n_size); + if (check_tensors && !ggml_validate_row_data(cur->type, read_buf.data(), n_size)) { + throw std::runtime_error(format("tensor '%s' has invalid data", ggml_get_name(cur))); + } } } size_done += n_size; } + // check validation results + bool validation_failed = false; + for (auto & future : validation_result) { + auto result = future.get(); + if (!result.second) { + LLAMA_LOG_ERROR("%s: tensor '%s' has invalid data\n", __func__, ggml_get_name(result.first)); + validation_failed = true; + } + } + if (validation_failed) { + throw std::runtime_error("found tensors with invalid data"); + } + // check if this is the last call and do final cleanup if (size_done >= size_data) { // unmap offloaded tensors and metadata @@ -5975,7 +6013,7 @@ static bool llm_load_tensors( // Returns 0 on success, -1 on error, and -2 on cancellation via llama_progress_callback static int llama_model_load(const std::string & fname, llama_model & model, llama_model_params & params) { try { - llama_model_loader ml(fname, params.use_mmap, params.kv_overrides); + llama_model_loader ml(fname, params.use_mmap, params.check_tensors, params.kv_overrides); model.hparams.vocab_only = params.vocab_only; @@ -14360,14 +14398,20 @@ static ggml_type llama_tensor_get_type(quantize_state_internal & qs, ggml_type n } static size_t llama_tensor_quantize_internal(enum ggml_type new_type, const float * f32_data, void * new_data, const int64_t chunk_size, int64_t nrows, int64_t n_per_row, const float * imatrix, std::vector & workers, const int nthread) { - std::mutex mutex; - int64_t counter = 0; - size_t new_size = 0; if (nthread < 2) { // single-thread - return ggml_quantize_chunk(new_type, f32_data, new_data, 0, nrows, n_per_row, imatrix); + size_t new_size = ggml_quantize_chunk(new_type, f32_data, new_data, 0, nrows, n_per_row, imatrix); + if (!ggml_validate_row_data(new_type, new_data, new_size)) { + throw std::runtime_error("quantized data validation failed"); + } + return new_size; } - auto compute = [&mutex, &counter, &new_size, new_type, f32_data, new_data, chunk_size, + + std::mutex mutex; + int64_t counter = 0; + size_t new_size = 0; + bool valid = true; + auto compute = [&mutex, &counter, &new_size, &valid, new_type, f32_data, new_data, chunk_size, nrows, n_per_row, imatrix]() { const int64_t nrows_per_chunk = chunk_size / n_per_row; size_t local_size = 0; @@ -14382,7 +14426,17 @@ static size_t llama_tensor_quantize_internal(enum ggml_type new_type, const floa } lock.unlock(); const int64_t this_nrow = std::min(nrows - first_row, nrows_per_chunk); - local_size += ggml_quantize_chunk(new_type, f32_data, new_data, first_row * n_per_row, this_nrow, n_per_row, imatrix); + size_t this_size = ggml_quantize_chunk(new_type, f32_data, new_data, first_row * n_per_row, this_nrow, n_per_row, imatrix); + local_size += this_size; + + // validate the quantized data + const size_t row_size = ggml_row_size(new_type, n_per_row); + void * this_data = (char *) new_data + first_row * row_size; + if (!ggml_validate_row_data(new_type, this_data, this_size)) { + std::unique_lock lock(mutex); + valid = false; + break; + } } }; for (int it = 0; it < nthread - 1; ++it) { @@ -14391,6 +14445,9 @@ static size_t llama_tensor_quantize_internal(enum ggml_type new_type, const floa compute(); for (auto & w : workers) { w.join(); } workers.clear(); + if (!valid) { + throw std::runtime_error("quantized data validation failed"); + } return new_size; } @@ -14453,7 +14510,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s auto v = (std::vector*)params->kv_overrides; kv_overrides = v->data(); } - llama_model_loader ml(fname_inp, use_mmap, kv_overrides); + llama_model_loader ml(fname_inp, use_mmap, /*check_tensors*/ true, kv_overrides); ml.init_mappings(false); // no prefetching llama_model model; @@ -14814,7 +14871,7 @@ static int llama_apply_lora_from_file_internal( std::unique_ptr ml; if (path_base_model) { LLAMA_LOG_INFO("%s: loading base model from '%s'\n", __func__, path_base_model); - ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true, /*kv_overrides*/ nullptr)); + ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true, /*check_tensors*/ false, /*kv_overrides*/ nullptr)); ml->init_mappings(/*prefetch*/ false); // no prefetching } @@ -15073,6 +15130,7 @@ struct llama_model_params llama_model_default_params() { /*.vocab_only =*/ false, /*.use_mmap =*/ true, /*.use_mlock =*/ false, + /*.check_tensors =*/ false, }; #ifdef GGML_USE_METAL diff --git a/llama.h b/llama.h index 8aa763672be1b..0a79fa765566a 100644 --- a/llama.h +++ b/llama.h @@ -232,9 +232,10 @@ extern "C" { const struct llama_model_kv_override * kv_overrides; // Keep the booleans together to avoid misalignment during copy-by-value. - bool vocab_only; // only load the vocabulary, no weights - bool use_mmap; // use mmap if possible - bool use_mlock; // force system to keep model in RAM + bool vocab_only; // only load the vocabulary, no weights + bool use_mmap; // use mmap if possible + bool use_mlock; // force system to keep model in RAM + bool check_tensors; // validate model tensor data }; struct llama_context_params { From 4ec8a9c57b923dcbc9f4c9ea7ba4091f300924fa Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Sat, 27 Apr 2024 01:12:12 +0800 Subject: [PATCH 058/101] expose stop reason in generation --- expose.h | 1 + gpttype_adapter.cpp | 5 ++ klite.embd | 198 +++++++++++++++++++++++++++++++------------- koboldcpp.py | 63 ++++++++------ 4 files changed, 183 insertions(+), 84 deletions(-) diff --git a/expose.h b/expose.h index 35d9d73361715..fa064b96e071d 100644 --- a/expose.h +++ b/expose.h @@ -97,6 +97,7 @@ struct generation_inputs struct generation_outputs { int status = -1; + int stopreason = stop_reason::INVALID; const char * text; //response will now be stored in c++ allocated memory }; struct token_count_outputs diff --git a/gpttype_adapter.cpp b/gpttype_adapter.cpp index 1eae5e8c137c1..1adf603a1ceee 100644 --- a/gpttype_adapter.cpp +++ b/gpttype_adapter.cpp @@ -1584,6 +1584,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs) printf("\nWarning: KCPP text generation not initialized!\n"); output.text = nullptr; output.status = 0; + output.stopreason = stop_reason::INVALID; generation_finished = true; return output; } @@ -2125,6 +2126,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs) fprintf(stderr, "\nFailed to predict at %d! Check your context buffer sizes!\n",n_past); output.text = nullptr; output.status = 0; + output.stopreason = stop_reason::INVALID; generation_finished = true; return output; } @@ -2334,6 +2336,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs) fprintf(stderr, "\nFailed to eval llava image at %d!\n",n_past); output.text = nullptr; output.status = 0; + output.stopreason = stop_reason::INVALID; generation_finished = true; return output; } @@ -2344,6 +2347,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs) fprintf(stderr, "\nLLAVA image tokens mismatch at %d! (%d vs %d tokens)\n",n_past,llavatokenscounted,llavatokensevaled); output.text = nullptr; output.status = 0; + output.stopreason = stop_reason::INVALID; generation_finished = true; return output; } @@ -2381,6 +2385,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs) printf("\nCtxLimit: %d/%d, Process:%.2fs (%.1fms/T = %.2fT/s), Generate:%.2fs (%.1fms/T = %.2fT/s), Total:%.2fs (%.2fT/s)",(int)current_context_tokens.size(),(int)nctx, time1, pt1, ts1, time2, pt2, ts2, (time1 + time2), tokens_per_second); fflush(stdout); output.status = 1; + output.stopreason = last_stop_reason; generation_finished = true; last_eval_time = pt2; last_process_time = pt1; diff --git a/klite.embd b/klite.embd index 356460670a839..02c279ff90fb2 100644 --- a/klite.embd +++ b/klite.embd @@ -7,7 +7,7 @@ Just copy this single static HTML file anywhere and open it in a browser, or fro Please go to https://github.com/LostRuins/lite.koboldai.net for updates on Kobold Lite. If you are submitting a pull request for Lite, PLEASE use the above repo, not the KoboldCpp one. Kobold Lite is under the AGPL v3.0 License unless otherwise exempted. Please do not remove this line. -Current version: 134 +Current version: 135 -Concedo --> @@ -1396,11 +1396,19 @@ Current version: 134 .widelbtn { - font-size: 12px; - height: 24px; - padding: 5px; - margin: 2px; - font-weight: bolder; + font-size: 12px; + height: 24px; + padding: 5px; + margin: 2px; + font-weight: bolder; + } + .wiarrowbtn + { + font-size: 12px; + height: 18px; + padding: 2px; + margin: 0px 1px 0px 1px; + font-weight: bolder; } .wiinputkeycol { @@ -3065,6 +3073,9 @@ Current version: 134 if (custom_kobold_endpoint != "" && data && data.results != null && data.results.length > 0) { synchro_streaming_response += data.results[0].text; synchro_streaming_tokens_left -= tokens_per_tick; + if (data.results[0].finish_reason == "stop") { + last_stop_reason = "stop"; + } //handle some early stopping criterias if (localsettings.opmode == 3) //stop on selfname found @@ -3213,6 +3224,10 @@ Current version: 134 for (let event of chunk) { if (event.event === 'message') { synchro_pending_stream += event.data.token; + if(event.data.finish_reason=="stop") + { + last_stop_reason = "stop"; + } } } } @@ -3614,6 +3629,7 @@ Current version: 134 var custom_claude_model = ""; var uses_cors_proxy = false; //we start off attempting a direct connection. switch to proxy if that fails var synchro_polled_response = null; + var last_stop_reason = ""; //update stop reason if known var synchro_pending_stream = ""; //used for token pseduo streaming for kobold api only var waiting_for_autosummary = false; var italics_regex = new RegExp(/\*(\S[^*]+\S)\*/g); //the fallback regex @@ -3633,8 +3649,8 @@ Current version: 134 var welcome = ""; var personal_notes = ""; var logitbiasdict = {}; - var regexreplace_pattern = []; - var regexreplace_replacement = []; + var regexreplace_data = []; + const num_regex_rows = 4; var localsettings = { my_api_key: "0000000000", //put here so it can be saved and loaded in persistent mode @@ -5084,8 +5100,7 @@ Current version: 134 new_save_storyobj.wiinsertlocation = wi_insertlocation; new_save_storyobj.personal_notes = personal_notes; new_save_storyobj.logitbiasdict = JSON.parse(JSON.stringify(logitbiasdict)); - new_save_storyobj.regexreplace_pattern = JSON.parse(JSON.stringify(regexreplace_pattern)); - new_save_storyobj.regexreplace_replacement = JSON.parse(JSON.stringify(regexreplace_replacement)); + new_save_storyobj.regexreplace_data = JSON.parse(JSON.stringify(regexreplace_data)); if (export_settings) { new_save_storyobj.savedsettings = JSON.parse(JSON.stringify(localsettings)); @@ -5257,8 +5272,7 @@ Current version: 134 let old_current_wi = current_wi; let old_extrastopseq = extrastopseq; let old_notes = personal_notes; - let old_regexreplace_pattern = regexreplace_pattern; - let old_regexreplace_replacement = regexreplace_replacement; + let old_regexreplace_data = regexreplace_data; //determine if oldui file or newui file format restart_new_game(false); @@ -5329,11 +5343,18 @@ Current version: 134 if (storyobj.personal_notes) { personal_notes = storyobj.personal_notes; } - if (storyobj.regexreplace_pattern) { - regexreplace_pattern = storyobj.regexreplace_pattern; + //todo: remove temporary backwards compatibility for regex + if (storyobj.regexreplace_pattern && storyobj.regexreplace_replacement) { + let pat = storyobj.regexreplace_pattern; + let rep = storyobj.regexreplace_replacement; + let ll = Math.min(pat.length,rep.length) + for(let i=0;i0) + { + for(let i=0;i0) + if(regexreplace_data && regexreplace_data.length>0) { - for(let i=0;i + Pattern ?The regex pattern to match against any incoming text. Leave blank to disable. + Replacement ?The string to replace matches with. Capture groups are allowed (e.g. $1). To remove all matches, leave this blank. + Both Ways ?If enabled, regex applies for both inputs and outputs, otherwise output only. + `; + let regextable = document.getElementById("regex_replace_table"); + + for(let i=0;i + + + + + `; + } + + regextable.innerHTML = regextablehtml; + + for(let i=0;i 0 && idx < current_wi.length) { + const temp = current_wi[idx - 1]; + current_wi[idx - 1] = current_wi[idx]; + current_wi[idx] = temp; + } + update_wi(); + } + + function down_wi(idx) { + save_wi(); + var ce = current_wi[idx]; + if (idx >= 0 && idx+1 < current_wi.length) { + const temp = current_wi[idx + 1]; + current_wi[idx + 1] = current_wi[idx]; + current_wi[idx] = temp; + } + update_wi(); + } + function add_wi() { save_wi(); var ne = { @@ -13263,8 +13356,10 @@ Current version: 134 let probarr = [100,90,75,50,25,10,5,1]; - selectionhtml += `` + - `` + + selectionhtml += `` + +`` + +`` + +`` + ` ` + ` @@ -15256,25 +15351,10 @@ Current version: 134


    Custom Regex Replace ?Allows transforming incoming text with up to 3 regex patterns, modifying all matches. Replacements will be applied in sequence.
    + class="helptext">Allows transforming incoming text with regex patterns, modifying all matches. Replacements will be applied in sequence.
    - - - - - - - - - - - - - - - - - +
    Pattern ?The regex pattern to match against any incoming text. Leave blank to disable.Replacement ?The string to replace matches with. Capture groups are allowed (e.g. $1). To remove all matches, leave this blank.
    +
    diff --git a/koboldcpp.py b/koboldcpp.py index e2754362e31a4..aadf9a8a84e05 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -95,6 +95,7 @@ class generation_inputs(ctypes.Structure): class generation_outputs(ctypes.Structure): _fields_ = [("status", ctypes.c_int), + ("stopreason", ctypes.c_int), ("text", ctypes.c_char_p)] class sd_load_model_inputs(ctypes.Structure): @@ -493,7 +494,7 @@ def generate(prompt, memory="", images=[], max_length=32, max_context_length=512 if pendingabortkey!="" and pendingabortkey==genkey: print(f"\nDeferred Abort for GenKey: {pendingabortkey}") pendingabortkey = "" - return "" + return {"text":"","status":-1,"stopreason":-1} else: ret = handle.generate(inputs) outstr = "" @@ -504,7 +505,7 @@ def generate(prompt, memory="", images=[], max_length=32, max_context_length=512 sindex = outstr.find(trim_str) if sindex != -1 and trim_str!="": outstr = outstr[:sindex] - return outstr + return {"text":outstr,"status":ret.status,"stopreason":ret.stopreason} def sd_load_model(model_filename): @@ -656,6 +657,7 @@ def string_contains_sequence_substring(inputstr,sequences): start_time = time.time() last_req_time = time.time() last_non_horde_req_time = time.time() +currfinishreason = "null" def transform_genparams(genparams, api_format): #alias all nonstandard alternative names for rep pen. @@ -765,8 +767,9 @@ def log_message(self, format, *args): async def generate_text(self, genparams, api_format, stream_flag): from datetime import datetime - global friendlymodelname, chatcompl_adapter + global friendlymodelname, chatcompl_adapter, currfinishreason is_quiet = args.quiet + currfinishreason = "null" def run_blocking(): #api format 1=basic,2=kai,3=oai,4=oai-chat @@ -812,13 +815,16 @@ def run_blocking(): #api format 1=basic,2=kai,3=oai,4=oai-chat render_special=genparams.get('render_special', False), ) - recvtxt = "" + genout = {"text":"","status":-1,"stopreason":-1} if stream_flag: loop = asyncio.get_event_loop() executor = ThreadPoolExecutor() - recvtxt = await loop.run_in_executor(executor, run_blocking) + genout = await loop.run_in_executor(executor, run_blocking) else: - recvtxt = run_blocking() + genout = run_blocking() + + recvtxt = genout['text'] + currfinishreason = ("length" if (genout['stopreason']!=1) else "stop") #flag instance as non-idle for a while washordereq = genparams.get('genkey', '').startswith('HORDEREQ_') @@ -834,15 +840,15 @@ def run_blocking(): #api format 1=basic,2=kai,3=oai,4=oai-chat elif api_format==3: res = {"id": "cmpl-1", "object": "text_completion", "created": 1, "model": friendlymodelname, "usage": {"prompt_tokens": 100,"completion_tokens": 100,"total_tokens": 200}, - "choices": [{"text": recvtxt, "index": 0, "finish_reason": "length"}]} + "choices": [{"text": recvtxt, "index": 0, "finish_reason": currfinishreason}]} elif api_format==4: res = {"id": "chatcmpl-1", "object": "chat.completion", "created": 1, "model": friendlymodelname, "usage": {"prompt_tokens": 100,"completion_tokens": 100,"total_tokens": 200}, - "choices": [{"index": 0, "message":{"role": "assistant", "content": recvtxt,}, "finish_reason": "length"}]} + "choices": [{"index": 0, "message":{"role": "assistant", "content": recvtxt,}, "finish_reason": currfinishreason}]} elif api_format==5: res = {"caption": end_trim_to_sentence(recvtxt)} else: - res = {"results": [{"text": recvtxt}]} + res = {"results": [{"text": recvtxt, "finish_reason":currfinishreason}]} try: return res @@ -863,7 +869,7 @@ async def send_kai_sse_event(self, data): self.wfile.flush() async def handle_sse_stream(self, genparams, api_format): - global friendlymodelname + global friendlymodelname, currfinishreason self.send_response(200) self.send_header("cache-control", "no-cache") self.send_header("connection", "keep-alive") @@ -877,6 +883,9 @@ async def handle_sse_stream(self, genparams, api_format): tokenReserve = "" #keeps fully formed tokens that we cannot send out yet while True: streamDone = handle.has_finished() #exit next loop on done + if streamDone: + sr = handle.get_last_stop_reason() + currfinishreason = ("length" if (sr!=1) else "stop") tokenStr = "" streamcount = handle.get_stream_count() while current_token < streamcount: @@ -893,32 +902,33 @@ async def handle_sse_stream(self, genparams, api_format): incomplete_token_buffer.clear() tokenStr += tokenSeg - if tokenStr!="": + if tokenStr!="" or streamDone: sseq = genparams.get('stop_sequence', []) trimstop = genparams.get('trim_stop', False) if trimstop and not streamDone and string_contains_sequence_substring(tokenStr,sseq): tokenReserve += tokenStr await asyncio.sleep(async_sleep_short) #if a stop sequence could trigger soon, do not send output else: - tokenStr = tokenReserve + tokenStr - tokenReserve = "" + if tokenStr!="": + tokenStr = tokenReserve + tokenStr + tokenReserve = "" - #apply trimming if needed - if trimstop: - for trim_str in sseq: - sindex = tokenStr.find(trim_str) - if sindex != -1 and trim_str!="": - tokenStr = tokenStr[:sindex] + #apply trimming if needed + if trimstop: + for trim_str in sseq: + sindex = tokenStr.find(trim_str) + if sindex != -1 and trim_str!="": + tokenStr = tokenStr[:sindex] - if tokenStr!="": + if tokenStr!="" or streamDone: if api_format == 4: # if oai chat, set format to expected openai streaming response - event_str = json.dumps({"id":"koboldcpp","object":"chat.completion.chunk","created":1,"model":friendlymodelname,"choices":[{"index":0,"finish_reason":"length","delta":{'role':'assistant','content':tokenStr}}]}) + event_str = json.dumps({"id":"koboldcpp","object":"chat.completion.chunk","created":1,"model":friendlymodelname,"choices":[{"index":0,"finish_reason":currfinishreason,"delta":{'role':'assistant','content':tokenStr}}]}) await self.send_oai_sse_event(event_str) elif api_format == 3: # non chat completions - event_str = json.dumps({"id":"koboldcpp","object":"text_completion","created":1,"model":friendlymodelname,"choices":[{"index":0,"finish_reason":"length","text":tokenStr}]}) + event_str = json.dumps({"id":"koboldcpp","object":"text_completion","created":1,"model":friendlymodelname,"choices":[{"index":0,"finish_reason":currfinishreason,"text":tokenStr}]}) await self.send_oai_sse_event(event_str) else: - event_str = json.dumps({"token": tokenStr}) + event_str = json.dumps({"token": tokenStr, "finish_reason":currfinishreason}) await self.send_kai_sse_event(event_str) tokenStr = "" else: @@ -3159,7 +3169,8 @@ def onready_subprocess(): benchprompt = "11111111" for i in range(0,10): #generate massive prompt benchprompt += benchprompt - result = generate(benchprompt,memory="",images=[],max_length=benchlen,max_context_length=benchmaxctx,temperature=0.1,top_k=1,rep_pen=1,use_default_badwordsids=True) + genout = generate(benchprompt,memory="",images=[],max_length=benchlen,max_context_length=benchmaxctx,temperature=0.1,top_k=1,rep_pen=1,use_default_badwordsids=True) + result = genout['text'] result = (result[:5] if len(result)>5 else "") resultok = (result=="11111") t_pp = float(handle.get_last_process_time())*float(benchmaxctx-benchlen)*0.001 @@ -3212,7 +3223,9 @@ def run_in_queue(launch_args, input_queue, output_queue): data = input_queue.get() if data['command'] == 'generate': (args, kwargs) = data['data'] - output_queue.put({'command': 'generated text', 'data': generate(*args, **kwargs)}) + genout = generate(*args, **kwargs) + result = genout['text'] + output_queue.put({'command': 'generated text', 'data': result}) time.sleep(0.2) def start_in_seperate_process(launch_args): From 0c4d489e29e53589bf13a801fe7c94b7b546d8f6 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 26 Apr 2024 20:06:33 +0200 Subject: [PATCH 059/101] quantize: add imatrix and dataset metadata in GGUF (#6658) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * imatrix: save the dataset file used in the output file * llama: support kv overrides type string string * common: factorize KV Overrides parsing between common and server * quantize: add imatrix n entries and dataset KV metadata quantize: factorize KV Overrides parsing between common #6656 * llama: remove kv override str_value initialization as it does not compile on some toolchain * quantize: add imatrix m_last_call as `quantize.imatrix.chunks_count` * quantize: add imatrix filename in KV * llama: add llama_model_kv_override_free * common: add llama_model_kv_override_free common: free kv override if used after model loading * llama: finally move the string KV override value to the stack * llama : minor * no need to add a NUL to the std::vector, std::string can be initialized from a pair of iterators. Co-authored-by: slaren * kv override: ensure string termination --------- Co-authored-by: Georgi Gerganov Co-authored-by: slaren --- Makefile | 2 +- common/common.cpp | 88 ++++++++++++++------------ common/common.h | 2 + examples/imatrix/imatrix.cpp | 77 +++++++++++++---------- examples/quantize/CMakeLists.txt | 2 +- examples/quantize/quantize.cpp | 103 ++++++++++++++++++------------- examples/server/server.cpp | 36 +---------- llama.cpp | 35 ++++++----- llama.h | 12 ++-- 9 files changed, 186 insertions(+), 171 deletions(-) diff --git a/Makefile b/Makefile index 24acb80136516..9dc35410a2945 100644 --- a/Makefile +++ b/Makefile @@ -768,7 +768,7 @@ batched-bench: examples/batched-bench/batched-bench.cpp build-info.o ggml. $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -quantize: examples/quantize/quantize.cpp build-info.o ggml.o llama.o $(OBJS) +quantize: examples/quantize/quantize.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) diff --git a/common/common.cpp b/common/common.cpp index 9ab8aef7ec832..d42fa131de3f6 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -234,8 +234,54 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { return result; } +bool parse_kv_override(const char * data, std::vector & overrides) { + const char * sep = strchr(data, '='); + if (sep == nullptr || sep - data >= 128) { + fprintf(stderr, "%s: malformed KV override '%s'\n", __func__, data); + return false; + } + llama_model_kv_override kvo; + std::strncpy(kvo.key, data, sep - data); + kvo.key[sep - data] = 0; + sep++; + if (strncmp(sep, "int:", 4) == 0) { + sep += 4; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; + kvo.val_i64 = std::atol(sep); + } else if (strncmp(sep, "float:", 6) == 0) { + sep += 6; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_FLOAT; + kvo.val_f64 = std::atof(sep); + } else if (strncmp(sep, "bool:", 5) == 0) { + sep += 5; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_BOOL; + if (std::strcmp(sep, "true") == 0) { + kvo.val_bool = true; + } else if (std::strcmp(sep, "false") == 0) { + kvo.val_bool = false; + } else { + fprintf(stderr, "%s: invalid boolean value for KV override '%s'\n", __func__, data); + return false; + } + } else if (strncmp(sep, "str:", 4) == 0) { + sep += 4; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_STR; + if (strlen(sep) > 127) { + fprintf(stderr, "%s: malformed KV override '%s', value cannot exceed 127 chars\n", __func__, data); + return false; + } + strncpy(kvo.val_str, sep, 127); + kvo.val_str[127] = '\0'; + } else { + fprintf(stderr, "%s: invalid type for KV override '%s'\n", __func__, data); + return false; + } + overrides.emplace_back(std::move(kvo)); + return true; +} + bool gpt_params_find_arg(int argc, char ** argv, const std::string & arg, gpt_params & params, int & i, bool & invalid_param) { - llama_sampling_params& sparams = params.sparams; + llama_sampling_params & sparams = params.sparams; if (arg == "-s" || arg == "--seed") { if (++i >= argc) { @@ -1244,47 +1290,11 @@ bool gpt_params_find_arg(int argc, char ** argv, const std::string & arg, gpt_pa invalid_param = true; return true; } - char* sep = strchr(argv[i], '='); - if (sep == nullptr || sep - argv[i] >= 128) { - fprintf(stderr, "error: Malformed KV override: %s\n", argv[i]); - invalid_param = true; - return true; - } - struct llama_model_kv_override kvo; - std::strncpy(kvo.key, argv[i], sep - argv[i]); - kvo.key[sep - argv[i]] = 0; - sep++; - if (strncmp(sep, "int:", 4) == 0) { - sep += 4; - kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; - kvo.int_value = std::atol(sep); - } - else if (strncmp(sep, "float:", 6) == 0) { - sep += 6; - kvo.tag = LLAMA_KV_OVERRIDE_TYPE_FLOAT; - kvo.float_value = std::atof(sep); - } - else if (strncmp(sep, "bool:", 5) == 0) { - sep += 5; - kvo.tag = LLAMA_KV_OVERRIDE_TYPE_BOOL; - if (std::strcmp(sep, "true") == 0) { - kvo.bool_value = true; - } - else if (std::strcmp(sep, "false") == 0) { - kvo.bool_value = false; - } - else { - fprintf(stderr, "error: Invalid boolean value for KV override: %s\n", argv[i]); - invalid_param = true; - return true; - } - } - else { + if (!parse_kv_override(argv[i], params.kv_overrides)) { fprintf(stderr, "error: Invalid type for KV override: %s\n", argv[i]); invalid_param = true; return true; } - params.kv_overrides.push_back(kvo); return true; } #ifndef LOG_DISABLE_LOGS @@ -1555,7 +1565,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" path to dynamic lookup cache to use for lookup decoding (updated by generation)\n"); printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); - printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); + printf(" types: int, float, bool, str. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf(" -ptc N, --print-token-count N\n"); printf(" print token count every N tokens (default: %d)\n", params.n_print); printf(" --check-tensors check model tensor data for invalid values\n"); diff --git a/common/common.h b/common/common.h index 0005f143ef44d..96a28a6ce1b7f 100644 --- a/common/common.h +++ b/common/common.h @@ -171,6 +171,8 @@ struct gpt_params { std::string image = ""; // path to an image file }; +bool parse_kv_override(const char * data, std::vector & overrides); + bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params); bool gpt_params_parse(int argc, char ** argv, gpt_params & params); diff --git a/examples/imatrix/imatrix.cpp b/examples/imatrix/imatrix.cpp index 98c0e93e4647f..71e7a727f1943 100644 --- a/examples/imatrix/imatrix.cpp +++ b/examples/imatrix/imatrix.cpp @@ -23,6 +23,7 @@ struct Stats { }; struct StatParams { + std::string dataset; std::string ofile = "imatrix.dat"; int n_output_frequency = 10; int verbosity = 1; @@ -46,7 +47,7 @@ class IMatrixCollector { std::vector m_src1_data; std::vector m_ids; // the expert ids from ggml_mul_mat_id // - void save_imatrix(const char * file_name) const; + void save_imatrix(const char * file_name, const char * dataset) const; void keep_imatrix(int ncall) const; }; @@ -199,7 +200,7 @@ bool IMatrixCollector::collect_imatrix(struct ggml_tensor * t, bool ask, void * } void IMatrixCollector::save_imatrix() const { - save_imatrix(m_params.ofile.empty() ? "imatrix.dat" : m_params.ofile.c_str()); + save_imatrix(m_params.ofile.empty() ? "imatrix.dat" : m_params.ofile.c_str(), m_params.dataset.c_str()); } void IMatrixCollector::keep_imatrix(int ncall) const { @@ -207,24 +208,33 @@ void IMatrixCollector::keep_imatrix(int ncall) const { if (file_name.empty()) file_name = "imatrix.dat"; file_name += ".at_"; file_name += std::to_string(ncall); - save_imatrix(file_name.c_str()); + save_imatrix(file_name.c_str(), m_params.dataset.c_str()); } -void IMatrixCollector::save_imatrix(const char * fname) const { +void IMatrixCollector::save_imatrix(const char * fname, const char * dataset) const { std::ofstream out(fname, std::ios::binary); int n_entries = m_stats.size(); - out.write((const char*)&n_entries, sizeof(n_entries)); - for (auto& p : m_stats) { + out.write((const char *) &n_entries, sizeof(n_entries)); + for (const auto & p : m_stats) { int len = p.first.size(); - out.write((const char*)&len, sizeof(len)); + out.write((const char *) &len, sizeof(len)); out.write(p.first.c_str(), len); - out.write((const char*)&p.second.ncall, sizeof(p.second.ncall)); + out.write((const char *) &p.second.ncall, sizeof(p.second.ncall)); int nval = p.second.values.size(); - out.write((const char*)&nval, sizeof(nval)); - if (nval > 0) out.write((const char*)p.second.values.data(), nval*sizeof(float)); + out.write((const char *) &nval, sizeof(nval)); + if (nval > 0) out.write((const char *) p.second.values.data(), nval * sizeof(float)); } + + // Write the number of call the matrix was computed with + out.write((const char *) &m_last_call, sizeof(m_last_call)); + + // Write the dataset name at the end of the file to later on specify it in quantize + int n_dataset = strlen(dataset); + out.write((const char *) &n_dataset, sizeof(n_dataset)); + out.write(dataset, n_dataset); + if (m_params.verbosity > 0) { - fprintf(stderr, "\n%s: stored collected data after %d chunks in %s\n",__func__,m_last_call,fname); + fprintf(stderr, "\n%s: stored collected data after %d chunks in %s\n", __func__, m_last_call, fname); } } @@ -547,6 +557,29 @@ int main(int argc, char ** argv) { } } + gpt_params params; + params.n_batch = 512; + if (!gpt_params_parse(args.size(), args.data(), params)) { + return 1; + } + + params.logits_all = true; + params.n_batch = std::min(params.n_batch, params.n_ctx); + + print_build_info(); + + if (params.seed == LLAMA_DEFAULT_SEED) { + params.seed = time(NULL); + } + + fprintf(stderr, "%s: seed = %u\n", __func__, params.seed); + + std::mt19937 rng(params.seed); + if (params.random_prompt) { + params.prompt = gpt_random_prompt(rng); + } + + sparams.dataset = params.prompt_file; g_collector.set_parameters(std::move(sparams)); if (!combine_files.empty()) { @@ -585,28 +618,6 @@ int main(int argc, char ** argv) { } } - gpt_params params; - params.n_batch = 512; - if (!gpt_params_parse(args.size(), args.data(), params)) { - return 1; - } - - params.logits_all = true; - params.n_batch = std::min(params.n_batch, params.n_ctx); - - print_build_info(); - - if (params.seed == LLAMA_DEFAULT_SEED) { - params.seed = time(NULL); - } - - fprintf(stderr, "%s: seed = %u\n", __func__, params.seed); - - std::mt19937 rng(params.seed); - if (params.random_prompt) { - params.prompt = gpt_random_prompt(rng); - } - llama_backend_init(); llama_numa_init(params.numa); diff --git a/examples/quantize/CMakeLists.txt b/examples/quantize/CMakeLists.txt index 6f374a2bd3b46..6b977fde86ab2 100644 --- a/examples/quantize/CMakeLists.txt +++ b/examples/quantize/CMakeLists.txt @@ -1,6 +1,6 @@ set(TARGET quantize) add_executable(${TARGET} quantize.cpp) install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TARGET} PRIVATE llama build_info ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE llama common ${CMAKE_THREAD_LIBS_INIT}) target_include_directories(${TARGET} PRIVATE ../../common) target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index da1850dfdf517..432cc2b4feadf 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -8,7 +8,6 @@ #include #include #include -#include struct quant_option { std::string name; @@ -53,6 +52,10 @@ static const std::vector QUANT_OPTIONS = { { "COPY", LLAMA_FTYPE_ALL_F32, "only copy tensors, no quantizing", }, }; +static const char * const LLM_KV_QUANTIZE_IMATRIX_FILE = "quantize.imatrix.file"; +static const char * const LLM_KV_QUANTIZE_IMATRIX_DATASET = "quantize.imatrix.dataset"; +static const char * const LLM_KV_QUANTIZE_IMATRIX_N_ENTRIES = "quantize.imatrix.entries_count"; +static const char * const LLM_KV_QUANTIZE_IMATRIX_N_CHUNKS = "quantize.imatrix.chunks_count"; static bool try_parse_ftype(const std::string & ftype_str_in, llama_ftype & ftype, std::string & ftype_str_out) { std::string ftype_str; @@ -113,7 +116,7 @@ static void usage(const char * executable) { exit(1); } -static void load_imatrix(const std::string & imatrix_file, std::unordered_map> & imatrix_data) { +static int load_imatrix(const std::string & imatrix_file, std::string & imatrix_dataset, std::unordered_map> & imatrix_data) { std::ifstream in(imatrix_file.c_str(), std::ios::binary); if (!in) { printf("%s: failed to open %s\n",__func__, imatrix_file.c_str()); @@ -160,18 +163,33 @@ static void load_imatrix(const std::string & imatrix_file, std::unordered_map dataset_as_vec(dataset_len); + in.read(dataset_as_vec.data(), dataset_len); + imatrix_dataset.assign(dataset_as_vec.begin(), dataset_as_vec.end()); + printf("%s: imatrix dataset='%s'\n", __func__, imatrix_dataset.c_str()); + } + printf("%s: loaded %d importance matrix entries from %s computed on %d chunks\n", __func__, int(imatrix_data.size()), imatrix_file.c_str(), m_last_call); + return m_last_call; } -static void prepare_imatrix(const std::string & imatrix_file, +static int prepare_imatrix(const std::string & imatrix_file, + std::string & imatrix_dataset, const std::vector & included_weights, const std::vector & excluded_weights, std::unordered_map> & imatrix_data) { + int m_last_call = -1; if (!imatrix_file.empty()) { - load_imatrix(imatrix_file, imatrix_data); + m_last_call = load_imatrix(imatrix_file, imatrix_dataset, imatrix_data); } if (imatrix_data.empty()) { - return; + return m_last_call; } if (!excluded_weights.empty()) { for (auto& name : excluded_weights) { @@ -197,6 +215,7 @@ static void prepare_imatrix(const std::string & imatrix_file, if (!imatrix_data.empty()) { printf("%s: have %d importance matrix entries\n", __func__, int(imatrix_data.size())); } + return m_last_call; } static ggml_type parse_ggml_type(const char * arg) { @@ -211,43 +230,6 @@ static ggml_type parse_ggml_type(const char * arg) { return result; } -static bool parse_kv_override(const char * data, std::vector & overrides) { - const char* sep = strchr(data, '='); - if (sep == nullptr || sep - data >= 128) { - fprintf(stderr, "%s: malformed KV override '%s'\n", __func__, data); - return false; - } - llama_model_kv_override kvo; - std::strncpy(kvo.key, data, sep - data); - kvo.key[sep - data] = 0; - sep++; - if (strncmp(sep, "int:", 4) == 0) { - sep += 4; - kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; - kvo.int_value = std::atol(sep); - } else if (strncmp(sep, "float:", 6) == 0) { - sep += 6; - kvo.tag = LLAMA_KV_OVERRIDE_TYPE_FLOAT; - kvo.float_value = std::atof(sep); - } else if (strncmp(sep, "bool:", 5) == 0) { - sep += 5; - kvo.tag = LLAMA_KV_OVERRIDE_TYPE_BOOL; - if (std::strcmp(sep, "true") == 0) { - kvo.bool_value = true; - } else if (std::strcmp(sep, "false") == 0) { - kvo.bool_value = false; - } else { - fprintf(stderr, "%s: invalid boolean value for KV override '%s'\n", __func__, data); - return false; - } - } else { - fprintf(stderr, "%s: invalid type for KV override '%s'\n", __func__, data); - return false; - } - overrides.emplace_back(std::move(kvo)); - return true; -} - int main(int argc, char ** argv) { if (argc < 3) { usage(argv[0]); @@ -316,10 +298,43 @@ int main(int argc, char ** argv) { usage(argv[0]); } + std::string imatrix_dataset; std::unordered_map> imatrix_data; - prepare_imatrix(imatrix_file, included_weights, excluded_weights, imatrix_data); + int m_last_call = prepare_imatrix(imatrix_file, imatrix_dataset, included_weights, excluded_weights, imatrix_data); if (!imatrix_data.empty()) { params.imatrix = &imatrix_data; + { + llama_model_kv_override kvo; + std::strcpy(kvo.key, LLM_KV_QUANTIZE_IMATRIX_FILE); + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_STR; + strncpy(kvo.val_str, imatrix_file.c_str(), 127); + kvo.val_str[127] = '\0'; + kv_overrides.emplace_back(std::move(kvo)); + } + if (!imatrix_dataset.empty()) { + llama_model_kv_override kvo; + std::strcpy(kvo.key, LLM_KV_QUANTIZE_IMATRIX_DATASET); + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_STR; + strncpy(kvo.val_str, imatrix_dataset.c_str(), 127); + kvo.val_str[127] = '\0'; + kv_overrides.emplace_back(std::move(kvo)); + } + + { + llama_model_kv_override kvo; + std::strcpy(kvo.key, LLM_KV_QUANTIZE_IMATRIX_N_ENTRIES); + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; + kvo.val_i64 = imatrix_data.size(); + kv_overrides.emplace_back(std::move(kvo)); + } + + if (m_last_call > 0) { + llama_model_kv_override kvo; + std::strcpy(kvo.key, LLM_KV_QUANTIZE_IMATRIX_N_CHUNKS); + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; + kvo.val_i64 = m_last_call; + kv_overrides.emplace_back(std::move(kvo)); + } } if (!kv_overrides.empty()) { kv_overrides.emplace_back(); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 48ef8ff2a237b..6f8ba3fc65d9f 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2392,7 +2392,7 @@ static void server_print_usage(const char * argv0, const gpt_params & params, co printf(" -n, --n-predict maximum tokens to predict (default: %d)\n", params.n_predict); printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); - printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); + printf(" types: int, float, bool, str. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`\n"); printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`\n"); printf(" --chat-template JINJA_TEMPLATE\n"); @@ -2823,43 +2823,11 @@ static void server_params_parse(int argc, char ** argv, server_params & sparams, invalid_param = true; break; } - char * sep = strchr(argv[i], '='); - if (sep == nullptr || sep - argv[i] >= 128) { - fprintf(stderr, "error: Malformed KV override: %s\n", argv[i]); - invalid_param = true; - break; - } - - struct llama_model_kv_override kvo; - std::strncpy(kvo.key, argv[i], sep - argv[i]); - kvo.key[sep - argv[i]] = 0; - sep++; - if (strncmp(sep, "int:", 4) == 0) { - sep += 4; - kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; - kvo.int_value = std::atol(sep); - } else if (strncmp(sep, "float:", 6) == 0) { - sep += 6; - kvo.tag = LLAMA_KV_OVERRIDE_TYPE_FLOAT; - kvo.float_value = std::atof(sep); - } else if (strncmp(sep, "bool:", 5) == 0) { - sep += 5; - kvo.tag = LLAMA_KV_OVERRIDE_TYPE_BOOL; - if (std::strcmp(sep, "true") == 0) { - kvo.bool_value = true; - } else if (std::strcmp(sep, "false") == 0) { - kvo.bool_value = false; - } else { - fprintf(stderr, "error: Invalid boolean value for KV override: %s\n", argv[i]); - invalid_param = true; - break; - } - } else { + if (!parse_kv_override(argv[i], params.kv_overrides)) { fprintf(stderr, "error: Invalid type for KV override: %s\n", argv[i]); invalid_param = true; break; } - params.kv_overrides.push_back(kvo); } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); server_print_usage(argv[0], default_params, default_sparams); diff --git a/llama.cpp b/llama.cpp index e5e640016d9a8..dd8b1f2640e4c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2883,6 +2883,7 @@ namespace GGUFMeta { case LLAMA_KV_OVERRIDE_TYPE_BOOL: return "bool"; case LLAMA_KV_OVERRIDE_TYPE_INT: return "int"; case LLAMA_KV_OVERRIDE_TYPE_FLOAT: return "float"; + case LLAMA_KV_OVERRIDE_TYPE_STR: return "str"; } return "unknown"; } @@ -2894,13 +2895,16 @@ namespace GGUFMeta { __func__, override_type_to_str(ovrd->tag), ovrd->key); switch (ovrd->tag) { case LLAMA_KV_OVERRIDE_TYPE_BOOL: { - LLAMA_LOG_INFO("%s\n", ovrd->bool_value ? "true" : "false"); + LLAMA_LOG_INFO("%s\n", ovrd->val_bool ? "true" : "false"); } break; case LLAMA_KV_OVERRIDE_TYPE_INT: { - LLAMA_LOG_INFO("%" PRId64 "\n", ovrd->int_value); + LLAMA_LOG_INFO("%" PRId64 "\n", ovrd->val_i64); } break; case LLAMA_KV_OVERRIDE_TYPE_FLOAT: { - LLAMA_LOG_INFO("%.6f\n", ovrd->float_value); + LLAMA_LOG_INFO("%.6f\n", ovrd->val_f64); + } break; + case LLAMA_KV_OVERRIDE_TYPE_STR: { + LLAMA_LOG_INFO("%s\n", ovrd->val_str); } break; default: // Shouldn't be possible to end up here, but just in case... @@ -2919,7 +2923,7 @@ namespace GGUFMeta { static typename std::enable_if::value, bool>::type try_override(OT & target, const struct llama_model_kv_override * ovrd) { if (validate_override(LLAMA_KV_OVERRIDE_TYPE_BOOL, ovrd)) { - target = ovrd->bool_value; + target = ovrd->val_bool; return true; } return false; @@ -2929,7 +2933,7 @@ namespace GGUFMeta { static typename std::enable_if::value && std::is_integral::value, bool>::type try_override(OT & target, const struct llama_model_kv_override * ovrd) { if (validate_override(LLAMA_KV_OVERRIDE_TYPE_INT, ovrd)) { - target = ovrd->int_value; + target = ovrd->val_i64; return true; } return false; @@ -2939,7 +2943,7 @@ namespace GGUFMeta { static typename std::enable_if::value, bool>::type try_override(T & target, const struct llama_model_kv_override * ovrd) { if (validate_override(LLAMA_KV_OVERRIDE_TYPE_FLOAT, ovrd)) { - target = ovrd->float_value; + target = ovrd->val_f64; return true; } return false; @@ -2948,12 +2952,11 @@ namespace GGUFMeta { template static typename std::enable_if::value, bool>::type try_override(T & target, const struct llama_model_kv_override * ovrd) { - (void)target; - (void)ovrd; - if (!ovrd) { return false; } - // Currently, we should never end up here so it would be a bug if we do. - throw std::runtime_error(format("Unsupported attempt to override string type for metadata key %s\n", - ovrd ? ovrd->key : "NULL")); + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_STR, ovrd)) { + target = ovrd->val_str; + return true; + } + return false; } static bool set(const gguf_context * ctx, const int k, T & target, const struct llama_model_kv_override * ovrd = nullptr) { @@ -14548,11 +14551,13 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s for (auto & o : overrides) { if (o.key[0] == 0) break; if (o.tag == LLAMA_KV_OVERRIDE_TYPE_FLOAT) { - gguf_set_val_f32(ctx_out, o.key, o.float_value); + gguf_set_val_f32(ctx_out, o.key, o.val_f64); } else if (o.tag == LLAMA_KV_OVERRIDE_TYPE_INT) { - gguf_set_val_i32(ctx_out, o.key, o.int_value); + gguf_set_val_i32(ctx_out, o.key, o.val_i64); } else if (o.tag == LLAMA_KV_OVERRIDE_TYPE_BOOL) { - gguf_set_val_bool(ctx_out, o.key, o.bool_value); + gguf_set_val_bool(ctx_out, o.key, o.val_bool); + } else if (o.tag == LLAMA_KV_OVERRIDE_TYPE_STR) { + gguf_set_val_str(ctx_out, o.key, o.val_str); } else { LLAMA_LOG_WARN("%s: unknown KV override type for key %s\n", __func__, o.key); } diff --git a/llama.h b/llama.h index 0a79fa765566a..8b1b15ed4ad55 100644 --- a/llama.h +++ b/llama.h @@ -195,15 +195,19 @@ extern "C" { LLAMA_KV_OVERRIDE_TYPE_INT, LLAMA_KV_OVERRIDE_TYPE_FLOAT, LLAMA_KV_OVERRIDE_TYPE_BOOL, + LLAMA_KV_OVERRIDE_TYPE_STR, }; struct llama_model_kv_override { - char key[128]; enum llama_model_kv_override_type tag; + + char key[128]; + union { - int64_t int_value; - double float_value; - bool bool_value; + int64_t val_i64; + double val_f64; + bool val_bool; + char val_str[128]; }; }; From 928e0b7013c862cf10701957b3d654aa70f11bd8 Mon Sep 17 00:00:00 2001 From: agray3 Date: Fri, 26 Apr 2024 19:08:30 +0100 Subject: [PATCH 060/101] Reset schedule earlier to allow overlap with ggml graph computation on device (#6933) * Reset schedule earlier to allow overlap with graph computation on device --- ggml-backend.c | 12 +++++++----- llama.cpp | 4 ++++ 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/ggml-backend.c b/ggml-backend.c index e91d97cd9dcfc..f5bdcf07838aa 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -1784,12 +1784,14 @@ void ggml_backend_sched_free(ggml_backend_sched_t sched) { void ggml_backend_sched_reset(ggml_backend_sched_t sched) { // reset state for the next run - size_t hash_size = sched->hash_set.size; - memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); // NOLINT - memset(sched->tensor_backend_id, -1, sizeof(sched->tensor_backend_id[0]) * hash_size); - memset(sched->tensor_copies, 0, sizeof(sched->tensor_copies[0]) * hash_size); + if (!sched->is_reset) { + size_t hash_size = sched->hash_set.size; + memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); // NOLINT + memset(sched->tensor_backend_id, -1, sizeof(sched->tensor_backend_id[0]) * hash_size); + memset(sched->tensor_copies, 0, sizeof(sched->tensor_copies[0]) * hash_size); - sched->is_reset = true; + sched->is_reset = true; + } sched->is_alloc = false; } diff --git a/llama.cpp b/llama.cpp index dd8b1f2640e4c..49f2b559e965e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -11473,6 +11473,10 @@ static int llama_decode_internal( } } + // Reset state for the next token before backend sync, to allow the CPU activities in the reset to + // overlap with device computation. + ggml_backend_sched_reset(lctx.sched); + return 0; } From c230b789062062828eeb659cf4a98dc7ab43ede3 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Sat, 27 Apr 2024 17:57:13 +0800 Subject: [PATCH 061/101] refactored a lot of code, remove bantokens, move it to api --- class.py | 2 +- expose.h | 3 +- gpttype_adapter.cpp | 65 ++++++++------- kcpp_docs.embd | 2 +- klite.embd | 190 +++++++++++++++++++++++++++++++++++++------- koboldcpp.py | 24 +++--- 6 files changed, 212 insertions(+), 74 deletions(-) diff --git a/class.py b/class.py index 0478bef5b91c9..531c1ddaf175c 100644 --- a/class.py +++ b/class.py @@ -270,7 +270,7 @@ def _load(self, save_model: bool, initial_load: bool) -> None: port=5001, port_param=5001, host='', launch=False, lora=None, threads=self.kcpp_threads, blasthreads=self.kcpp_threads, psutil_set_threads=False, highpriority=False, contextsize=self.kcpp_ctxsize, blasbatchsize=self.kcpp_blasbatchsize, ropeconfig=[self.kcpp_ropescale, self.kcpp_ropebase], stream=False, smartcontext=self.kcpp_smartcontext, - unbantokens=False, bantokens=None, usemirostat=None, forceversion=0, nommap=self.kcpp_nommap, + usemirostat=None, forceversion=0, nommap=self.kcpp_nommap, usemlock=False, noavx2=self.kcpp_noavx2, debugmode=self.kcpp_debugmode, skiplauncher=True, hordeconfig=None, noblas=self.kcpp_noblas, useclblast=self.kcpp_useclblast, usecublas=self.kcpp_usecublas, usevulkan=self.kcpp_usevulkan, gpulayers=self.kcpp_gpulayers, tensor_split=self.kcpp_tensor_split, config=None, onready='', multiuser=False, foreground=False, preloadstory=None, noshift=False, remotetunnel=False, ssl=False, benchmark=None, nocertify=False, sdconfig=None, mmproj=None, diff --git a/expose.h b/expose.h index fa064b96e071d..1ef865e730931 100644 --- a/expose.h +++ b/expose.h @@ -55,7 +55,6 @@ struct load_model_inputs const int gpulayers = 0; const float rope_freq_scale = 1.0f; const float rope_freq_base = 10000.0f; - const char * banned_tokens[ban_token_max]; const float tensor_split[tensor_split_max]; }; struct generation_inputs @@ -92,7 +91,7 @@ struct generation_inputs const float dynatemp_exponent = 1.0f; const float smoothing_factor = 0.0f; const logit_bias logit_biases[logit_bias_max]; - + const char * banned_tokens[ban_token_max]; }; struct generation_outputs { diff --git a/gpttype_adapter.cpp b/gpttype_adapter.cpp index 1adf603a1ceee..3eeeec5dc6c25 100644 --- a/gpttype_adapter.cpp +++ b/gpttype_adapter.cpp @@ -837,17 +837,6 @@ ModelLoadResult gpttype_load_model(const load_model_inputs inputs, FileFormat in gptj_ctx_v3.hparams.rope_freq_scale = neox_ctx_v3.hparams.rope_freq_scale = rope_freq_scale; gptj_ctx_v3.hparams.rope_freq_base = neox_ctx_v3.hparams.rope_freq_base = rope_freq_base; - //handle custom token bans - banned_tokens.clear(); - for(int x=0;x0) + { + if(debugmode==1) + { + printf("\nBanning %zu token sequences...",banned_tokens.size()); + } + for(int v=0;v0) - { - printf("\n[First Run] Banning %zu token sequences...",banned_tokens.size()); - for(int v=0;v @@ -299,7 +299,7 @@ Current version: 135 padding-right: 10px; } - #extrastopseq, #anotetemplate { + .inlineinput { background-color: #404040; color: #ffffff; resize: none; @@ -3603,6 +3603,7 @@ Current version: 135 var current_anote = ""; //stored author note var current_anotetemplate = "[Author\'s note: <|>]"; var extrastopseq = ""; + var tokenbans = ""; var anote_strength = 320; //distance from end var newlineaftermemory = true; var current_wi = []; //each item stores a wi object. @@ -3738,8 +3739,8 @@ Current version: 135 passed_ai_warning: false, //used to store AI safety panel acknowledgement state entersubmit: true, //enter sends the prompt - max_context_length: 1600, - max_length: 120, + max_context_length: 1800, + max_length: 140, auto_ctxlen: true, auto_genamt: true, rep_pen: 1.1, @@ -5095,6 +5096,7 @@ Current version: 135 //extra unofficial fields for the story new_save_storyobj.extrastopseq = extrastopseq; + new_save_storyobj.tokenbans = tokenbans; new_save_storyobj.anotestr = anote_strength; new_save_storyobj.wisearchdepth = wi_searchdepth; new_save_storyobj.wiinsertlocation = wi_insertlocation; @@ -5271,6 +5273,7 @@ Current version: 135 let old_current_memory = current_memory; let old_current_wi = current_wi; let old_extrastopseq = extrastopseq; + let old_tokenbans = tokenbans; let old_notes = personal_notes; let old_regexreplace_data = regexreplace_data; @@ -5325,6 +5328,10 @@ Current version: 135 if (storyobj.extrastopseq) { extrastopseq = storyobj.extrastopseq; } + if(storyobj.tokenbans) + { + tokenbans = storyobj.tokenbans; + } if (storyobj.anotestr) { anote_strength = storyobj.anotestr; } @@ -5416,6 +5423,7 @@ Current version: 135 { extrastopseq = old_extrastopseq; regexreplace_data = old_regexreplace_data; + tokenbans = old_tokenbans; } if (storyobj.savedsettings && storyobj.savedsettings != "") @@ -6746,25 +6754,53 @@ Current version: 135 },false,true); } - var pendinglogitbias = {}; - function set_logit_bias() + function expand_tokens_section(targetid) { - inputBox("Enter OpenAI-formatted logit bias dictionary. Each key is the integer token IDs and their values are the biases (-100.0 to 100.0)
    Input is a JSON object, reference here.
    Leave blank to disable.
    ","Set Logit Biases",JSON.stringify(pendinglogitbias),"Enter JSON Object",()=>{ - let userinput = getInputBoxValue().trim(); - if(userinput=="") + let tablist = ["expandregexreplace","expandtokenbans","expandlogitbias"]; + + for(let i=0;i{ + let userinput = getInputBoxValue(); + if(userinput.trim()!="") + { + let ov = document.getElementById("tokenbans").value; + if(ov!="") + { + ov += "||$||"; + } + ov += userinput.trim(); + document.getElementById("tokenbans").value = ov; + } + },false); + } + var msgboxOnDone = hide_msgbox; function hide_msgbox() { //hide msgbox ONLY @@ -8919,8 +8972,20 @@ Current version: 135 current_anotetemplate = document.getElementById("anotetemplate").value; anote_strength = document.getElementById("anote_strength").value; extrastopseq = document.getElementById("extrastopseq").value; + tokenbans = document.getElementById("tokenbans").value; newlineaftermemory = (document.getElementById("newlineaftermemory").checked?true:false); - logitbiasdict = pendinglogitbias; + try + { + let lb = document.getElementById("logitbiastxtarea").value; + let dict = {}; + if(lb!="") + { + dict = JSON.parse(lb); + } + logitbiasdict = dict; + } catch (e) { + console.log("Your logit bias JSON dictionary was not correctly formatted!"); + } regexreplace_data = []; for(let i=0;i 0 && !seqs) { + seqs = []; + } + for (let i = 0; i < srep.length; ++i) { + if (srep[i] && srep[i] != "") { + seqs.push(srep[i]); + } + } + } + return seqs; + } + function dispatch_submit_generation(submit_payload, input_was_empty) //if input is not empty, always unban eos { console.log(submit_payload); @@ -10367,6 +10451,7 @@ Current version: 135 submit_payload.params.dynatemp_range = localsettings.dynatemp_range; submit_payload.params.dynatemp_exponent = localsettings.dynatemp_exponent; submit_payload.params.smoothing_factor = localsettings.smoothing_factor; + submit_payload.params.banned_tokens = get_token_bans(); } //presence pen and logit bias for OAI and newer kcpp if((custom_kobold_endpoint != "" && is_using_kcpp_with_mirostat()) || custom_oai_endpoint!="") @@ -13165,8 +13250,10 @@ Current version: 135 document.getElementById("anotetemplate").value = current_anotetemplate; document.getElementById("anote_strength").value = anote_strength; document.getElementById("extrastopseq").value = extrastopseq; + document.getElementById("tokenbans").value = tokenbans; document.getElementById("newlineaftermemory").checked = (newlineaftermemory?true:false); - pendinglogitbias = logitbiasdict; + document.getElementById("logitbiastxtarea").value = JSON.stringify(logitbiasdict,null,2); + if(custom_kobold_endpoint!="" || !is_using_custom_ep() ) { document.getElementById("noextrastopseq").classList.add("hidden"); @@ -13183,7 +13270,16 @@ Current version: 135 //setup regex replacers populate_regex_replacers(); - document.getElementById("btnlogitbias").disabled = !is_using_custom_ep(); + if(is_using_custom_ep()) + { + document.getElementById("nologitbias").classList.add("hidden"); + document.getElementById("notokenbans").classList.add("hidden"); + } + else + { + document.getElementById("nologitbias").classList.remove("hidden"); + document.getElementById("notokenbans").classList.remove("hidden"); + } } @@ -15291,7 +15387,7 @@ Current version: 135
    - +
    -
    -

    Logit Biases ?Specify a dictionary of token IDs to modify the probability of occuring.
    + +
    Logit Biases ?Specify a dictionary of token IDs to modify the probability of occuring. +
    -
    -
    -

    Custom Regex Replace ?Allows transforming incoming text with regex patterns, modifying all matches. Replacements will be applied in sequence.
    + - -
    +
    Token Bans ?Outright removal for ANY tokens containing a specific substring from model vocab. If you want multiple sequences, separate them with the following delimiter: ||$|| + +
    + + +
    Regex Replace ?Allows transforming incoming text with regex patterns, modifying all matches. Replacements will be applied in sequence. + +
    + + +
    diff --git a/koboldcpp.py b/koboldcpp.py index aadf9a8a84e05..7557a3fd8ddd1 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -56,7 +56,6 @@ class load_model_inputs(ctypes.Structure): ("gpulayers", ctypes.c_int), ("rope_freq_scale", ctypes.c_float), ("rope_freq_base", ctypes.c_float), - ("banned_tokens", ctypes.c_char_p * ban_token_max), ("tensor_split", ctypes.c_float * tensor_split_max)] class generation_inputs(ctypes.Structure): @@ -91,7 +90,8 @@ class generation_inputs(ctypes.Structure): ("dynatemp_range", ctypes.c_float), ("dynatemp_exponent", ctypes.c_float), ("smoothing_factor", ctypes.c_float), - ("logit_biases", logit_bias * logit_bias_max)] + ("logit_biases", logit_bias * logit_bias_max), + ("banned_tokens", ctypes.c_char_p * ban_token_max)] class generation_outputs(ctypes.Structure): _fields_ = [("status", ctypes.c_int), @@ -391,16 +391,10 @@ def load_model(model_filename): inputs.executable_path = (getdirpath()+"/").encode("UTF-8") inputs.debugmode = args.debugmode - banned_tokens = args.bantokens - for n in range(ban_token_max): - if not banned_tokens or n >= len(banned_tokens): - inputs.banned_tokens[n] = "".encode("UTF-8") - else: - inputs.banned_tokens[n] = banned_tokens[n].encode("UTF-8") ret = handle.load_model(inputs) return ret -def generate(prompt, memory="", images=[], max_length=32, max_context_length=512, temperature=0.7, top_k=100, top_a=0.0, top_p=0.92, min_p=0.0, typical_p=1.0, tfs=1.0, rep_pen=1.0, rep_pen_range=128, presence_penalty=0.0, mirostat=0, mirostat_tau=5.0, mirostat_eta=0.1, sampler_order=[6,0,1,3,4,2,5], seed=-1, stop_sequence=[], use_default_badwordsids=False, stream_sse=False, grammar='', grammar_retain_state=False, genkey='', trimstop=False, quiet=False, dynatemp_range=0.0, dynatemp_exponent=1.0, smoothing_factor=0.0, logit_biases={}, render_special=False): +def generate(prompt, memory="", images=[], max_length=32, max_context_length=512, temperature=0.7, top_k=100, top_a=0.0, top_p=0.92, min_p=0.0, typical_p=1.0, tfs=1.0, rep_pen=1.0, rep_pen_range=128, presence_penalty=0.0, mirostat=0, mirostat_tau=5.0, mirostat_eta=0.1, sampler_order=[6,0,1,3,4,2,5], seed=-1, stop_sequence=[], use_default_badwordsids=False, stream_sse=False, grammar='', grammar_retain_state=False, genkey='', trimstop=False, quiet=False, dynatemp_range=0.0, dynatemp_exponent=1.0, smoothing_factor=0.0, logit_biases={}, render_special=False, banned_tokens=[]): global maxctx, args, currentusergenkey, totalgens, pendingabortkey inputs = generation_inputs() inputs.prompt = prompt.encode("UTF-8") @@ -487,6 +481,12 @@ def generate(prompt, memory="", images=[], max_length=32, max_context_length=512 inputs.logit_biases[n] = logit_bias(-1, 0.0) print(f"Skipped unparsable logit bias:{ex}") + for n in range(ban_token_max): + if not banned_tokens or n >= len(banned_tokens): + inputs.banned_tokens[n] = "".encode("UTF-8") + else: + inputs.banned_tokens[n] = banned_tokens[n].encode("UTF-8") + currentusergenkey = genkey totalgens += 1 #early exit if aborted @@ -672,6 +672,10 @@ def transform_genparams(genparams, api_format): genparams["top_k"] = int(genparams.get('top_k', 120)) genparams["max_length"] = genparams.get('max', 100) + elif api_format==2: + if "ignore_eos" in genparams and not ("use_default_badwordsids" in genparams): + genparams["use_default_badwordsids"] = genparams.get('ignore_eos', False) + elif api_format==3 or api_format==4: genparams["max_length"] = genparams.get('max_tokens', 100) presence_penalty = genparams.get('presence_penalty', genparams.get('frequency_penalty', 0.0)) @@ -813,6 +817,7 @@ def run_blocking(): #api format 1=basic,2=kai,3=oai,4=oai-chat smoothing_factor=genparams.get('smoothing_factor', 0.0), logit_biases=genparams.get('logit_bias', {}), render_special=genparams.get('render_special', False), + banned_tokens=genparams.get('banned_tokens', []), ) genout = {"text":"","status":-1,"stopreason":-1} @@ -3281,7 +3286,6 @@ def range_checker(arg: str): parser.add_argument("--lora", help="LLAMA models only, applies a lora file on top of model. Experimental.", metavar=('[lora_filename]', '[lora_base]'), nargs='+') parser.add_argument("--smartcontext", help="Reserving a portion of context to try processing less frequently.", action='store_true') parser.add_argument("--noshift", help="If set, do not attempt to Trim and Shift the GGUF context.", action='store_true') - parser.add_argument("--bantokens", help="You can manually specify a list of token SUBSTRINGS that the AI cannot use. This bans ALL instances of that substring.", metavar=('[token_substrings]'), nargs='+') parser.add_argument("--forceversion", help="If the model file format detection fails (e.g. rogue modified model) you can set this to override the detected format (enter desired version, e.g. 401 for GPTNeoX-Type2).",metavar=('[version]'), type=int, default=0) parser.add_argument("--nommap", help="If set, do not use mmap to load newer models", action='store_true') parser.add_argument("--usemlock", help="For Apple Systems. Force system to keep model in RAM rather than swapping or compressing", action='store_true') From b7368332e24c5b2c8038bf8267f43632783fcc35 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sat, 27 Apr 2024 17:50:48 +0200 Subject: [PATCH 062/101] ci: server: tests python env on github container ubuntu latest / fix n_predict (#6935) * ci: server: fix python env * ci: server: fix server tests after #6638 * ci: server: fix windows is not building PR branch --- .github/workflows/server.yml | 31 ++++++++++++++----------------- examples/server/server.cpp | 2 +- 2 files changed, 15 insertions(+), 18 deletions(-) diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index a044d6044ce86..79cd7d643d20e 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -41,24 +41,16 @@ jobs: sanitizer: "" fail-fast: false # While -DLLAMA_SANITIZE_THREAD=ON is broken - container: - image: ubuntu:latest - ports: - - 8888 - options: --cpus 4 - steps: - name: Dependencies id: depends run: | - apt-get update - apt-get -y install \ + sudo apt-get update + sudo apt-get -y install \ build-essential \ xxd \ git \ cmake \ - python3-pip \ - python3-venv \ curl \ wget \ language-pack-en \ @@ -71,6 +63,17 @@ jobs: fetch-depth: 0 ref: ${{ github.event.inputs.sha || github.event.pull_request.head.sha || github.sha || github.head_ref || github.ref_name }} + - name: Python setup + id: setup_python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Tests dependencies + id: test_dependencies + run: | + pip install -r examples/server/tests/requirements.txt + - name: Verify server deps id: verify_server_deps run: | @@ -101,13 +104,6 @@ jobs: -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON ; cmake --build . --config ${{ matrix.build_type }} -j $(nproc) --target server - - name: Setup python env - id: pipenv - run: | - cd examples/server/tests - python3 -m venv venv - . venv/bin/activate - pip install -r requirements.txt - name: Tests id: server_integration_tests @@ -133,6 +129,7 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 + ref: ${{ github.event.inputs.sha || github.event.pull_request.head.sha || github.sha || github.head_ref || github.ref_name }} - name: libCURL id: get_libcurl diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 6f8ba3fc65d9f..2760aea8fd3e9 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1208,7 +1208,7 @@ struct server_context { } auto n_ctx_train = llama_n_ctx_train(model); - if (slot.params.n_predict < 1 && slot.ga_n == 1 + if (slot.params.n_predict < 1 && slot.n_predict < 1 && slot.ga_n == 1 && slot.n_prompt_tokens + slot.n_decoded >= n_ctx_train) { LOG_WARNING("n_predict is not set and self-context extend is disabled." " Limiting generated tokens to n_ctx_train to avoid EOS-less generation infinite loop", { From 4dba7e8114d84241c842b986e008af8b88d1a019 Mon Sep 17 00:00:00 2001 From: mgroeber9110 <45620825+mgroeber9110@users.noreply.github.com> Date: Sat, 27 Apr 2024 21:02:06 +0200 Subject: [PATCH 063/101] Replace "alternative" boolean operator in conditional compilation directive (#6949) --- common/log.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/common/log.h b/common/log.h index e4edcac7d7735..2b2f0e4551d7a 100644 --- a/common/log.h +++ b/common/log.h @@ -234,7 +234,7 @@ inline std::string log_filename_generator_impl(LogTriState multilog, const std:: // INTERNAL, DO NOT USE // USE LOG() INSTEAD // -#if !defined(_MSC_VER) or defined(__INTEL_LLVM_COMPILER) +#if !defined(_MSC_VER) || defined(__INTEL_LLVM_COMPILER) #define LOG_IMPL(str, ...) \ do { \ if (LOG_TARGET != nullptr) \ @@ -257,7 +257,7 @@ inline std::string log_filename_generator_impl(LogTriState multilog, const std:: // INTERNAL, DO NOT USE // USE LOG_TEE() INSTEAD // -#if !defined(_MSC_VER) or defined(__INTEL_LLVM_COMPILER) +#if !defined(_MSC_VER) || defined(__INTEL_LLVM_COMPILER) #define LOG_TEE_IMPL(str, ...) \ do { \ if (LOG_TARGET != nullptr) \ From 6c3fd5b685427df0d4272aae32d4daf5bb743b55 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Sat, 27 Apr 2024 18:01:13 +0800 Subject: [PATCH 064/101] updated lite (+2 squashed commit) Squashed commit: [d10a731e] update lite [2554b8e6] update docs --- kcpp_docs.embd | 7 ++ klite.embd | 179 ++++++++++++++++++++++++++++++++++++++++--------- 2 files changed, 153 insertions(+), 33 deletions(-) diff --git a/kcpp_docs.embd b/kcpp_docs.embd index 08a5b85f04bc5..83a6708d676c0 100644 --- a/kcpp_docs.embd +++ b/kcpp_docs.embd @@ -203,6 +203,13 @@ "description": "KoboldCpp ONLY. If true, prints special tokens as text for GGUF models", "type": "boolean" }, + "banned_tokens": { + "description": "An array of string sequences to remove from model vocab. All matching tokens with matching substrings are removed.", + "items": { + "type": "string" + }, + "type": "array" + }, "logit_bias": { "default": {}, "description": "KoboldCpp ONLY. An dictionary of key-value pairs, which indicate the token IDs (int) and logit bias (float) to apply for that token. Up to 16 value can be provided.", diff --git a/klite.embd b/klite.embd index 3f33826efb323..6edb9e3ad1c17 100644 --- a/klite.embd +++ b/klite.embd @@ -3651,6 +3651,7 @@ Current version: 136 var personal_notes = ""; var logitbiasdict = {}; var regexreplace_data = []; + var placeholder_tags_data = []; const num_regex_rows = 4; var localsettings = { @@ -5103,6 +5104,7 @@ Current version: 136 new_save_storyobj.personal_notes = personal_notes; new_save_storyobj.logitbiasdict = JSON.parse(JSON.stringify(logitbiasdict)); new_save_storyobj.regexreplace_data = JSON.parse(JSON.stringify(regexreplace_data)); + new_save_storyobj.placeholder_tags_data = JSON.parse(JSON.stringify(placeholder_tags_data)); if (export_settings) { new_save_storyobj.savedsettings = JSON.parse(JSON.stringify(localsettings)); @@ -5276,6 +5278,7 @@ Current version: 136 let old_tokenbans = tokenbans; let old_notes = personal_notes; let old_regexreplace_data = regexreplace_data; + let old_placeholder_tags_data = placeholder_tags_data; //determine if oldui file or newui file format restart_new_game(false); @@ -5363,6 +5366,10 @@ Current version: 136 if (storyobj.regexreplace_data) { regexreplace_data = storyobj.regexreplace_data; } + if(storyobj.placeholder_tags_data) + { + placeholder_tags_data = storyobj.placeholder_tags_data; + } } else { //v2 load if(storyobj.prompt!="") @@ -5424,6 +5431,7 @@ Current version: 136 extrastopseq = old_extrastopseq; regexreplace_data = old_regexreplace_data; tokenbans = old_tokenbans; + placeholder_tags_data = old_placeholder_tags_data; } if (storyobj.savedsettings && storyobj.savedsettings != "") @@ -6756,7 +6764,7 @@ Current version: 136 function expand_tokens_section(targetid) { - let tablist = ["expandregexreplace","expandtokenbans","expandlogitbias"]; + let tablist = ["expandregexreplace","expandtokenbans","expandlogitbias","expandplaceholdertags"]; for(let i=0;i + Placeholder ?The placeholder to match against + Replacement ?The text to substitude on display. Actual context is unchanged. + `; + let regextable = document.getElementById("placeholder_replace_table"); + + let hardcoded1 = ["{{user}}","{{char}}","\\n{{[INPUT]}}\\n","\\n{{[OUTPUT]}}\\n"]; + let hardcoded2 = [localsettings.chatname,localsettings.chatopponent,localsettings.instruct_starttag,localsettings.instruct_endtag]; + + for(let i=0;i + ${hardcoded1[i]} + + + `; + } + + for(let i=0;i + + + + `; + } + + regextable.innerHTML = regextablehtml; + + document.getElementById("placeholder_tags2").checked = localsettings.placeholder_tags; + + for(let i=0;i" + match.substring(0,match.length-2).trim() + "

    "; }); - // input = input.replaceAll(othernamesregex2, function(match) { - // return "{{botplaceholder}}

    " + match.substring(0,match.length-2).trim() + "

    "; - // }); } else { input = input.replaceAll(othernamesregex, "{{botplaceholder}}"); - //input = input.replaceAll(othernamesregex2, "{{botplaceholder}}"); } you = "{{userplaceholder}}"; @@ -15193,7 +15293,8 @@ Current version: 136 - + Language +
    @@ -15450,8 +15551,8 @@ Current version: 136
    Enter OpenAI-formatted logit bias dictionary. Each key is the integer token IDs and their values are the biases (-100.0 to 100.0). Leave blank to disable.
    Input is a JSON object, reference here.
    - - + +
    @@ -15478,14 +15579,26 @@ Current version: 136 - +
    From f6ab0f873fcf05efff133ec698f55e32a608dcba Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Sun, 28 Apr 2024 11:42:26 +0800 Subject: [PATCH 065/101] gui benchmark --- koboldcpp.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/koboldcpp.py b/koboldcpp.py index 7557a3fd8ddd1..3f82bd3c42480 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -658,6 +658,7 @@ def string_contains_sequence_substring(inputstr,sequences): last_req_time = time.time() last_non_horde_req_time = time.time() currfinishreason = "null" +using_gui_launcher = False def transform_genparams(genparams, api_format): #alias all nonstandard alternative names for rep pen. @@ -1499,6 +1500,9 @@ def show_new_gui(): from tkinter.filedialog import askopenfilename from tkinter.filedialog import asksaveasfile + global using_gui_launcher + using_gui_launcher = True + # if args received, launch if len(sys.argv) != 1: import tkinter as tk @@ -1946,6 +1950,10 @@ def togglectxshift(a,b,c): else: smartcontextbox.grid_forget() + def guibench(): + args.benchmark = "stdout" + launchbrowser.set(0) + guilaunch() def changerunmode(a,b,c): global runmode_untouched @@ -2086,6 +2094,8 @@ def changerunmode(a,b,c): makeslider(hardware_tab, "BLAS Batch Size:", blasbatchsize_text, blas_size_var, 0, 7, 16, set=5,tooltip="How many tokens to process at once per batch.\nLarger values use more memory.") # force version makelabelentry(hardware_tab, "Force Version:" , version_var, 100, 50,"If the autodetected version is wrong, you can change it here.\nLeave as 0 for default.") + ctk.CTkButton(hardware_tab , text = "Run Benchmark", command = guibench ).grid(row=110,column=0, stick="se", padx= 0, pady=2) + runopts_var.trace('w', changerunmode) changerunmode(1,1,1) @@ -2174,7 +2184,6 @@ def togglehorde(a,b,c): makelabelentry(images_tab, "Image threads:" , sd_threads_var, 6, 50,"How many threads to use during image generation.\nIf left blank, uses same value as threads.") makecheckbox(images_tab, "Compress Weights (Saves Memory)", sd_quant_var, 8,tooltiptxt="Quantizes the SD model weights to save memory. May degrade quality.") - # launch def guilaunch(): if model_var.get() == "" and sd_model_var.get() == "": @@ -3218,6 +3227,11 @@ def onready_subprocess(): else: # Flush stdout for previous win32 issue so the client can see output. print(f"Server was not started, main function complete. Idling.", flush=True) + global using_gui_launcher + if using_gui_launcher: + print("===") + print("Press a key to exit", flush=True) + input() def run_in_queue(launch_args, input_queue, output_queue): main(launch_args, start_server=False) From 8faa6ed84d94b8bb2f92120d37bed7aea3e2a0b7 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Sun, 28 Apr 2024 15:16:02 +0800 Subject: [PATCH 066/101] clarify some descriptions --- klite.embd | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/klite.embd b/klite.embd index 6edb9e3ad1c17..bb175f13e3550 100644 --- a/klite.embd +++ b/klite.embd @@ -13386,7 +13386,7 @@ Current version: 136 `; let regextable = document.getElementById("placeholder_replace_table"); - let hardcoded1 = ["{{user}}","{{char}}","\\n{{[INPUT]}}\\n","\\n{{[OUTPUT]}}\\n"]; + let hardcoded1 = ["{{user}}","{{char}}","{{[INPUT]}}","{{[OUTPUT]}}"]; let hardcoded2 = [localsettings.chatname,localsettings.chatopponent,localsettings.instruct_starttag,localsettings.instruct_endtag]; for(let i=0;i
    -
    Token Bans ?Token Filter ?Outright removal for ANY tokens containing a specific substring from model vocab. If you want multiple sequences, separate them with the following delimiter: ||$||
  • @@ -594,23 +603,23 @@ Building the program with BLAS support may lead to some performance improvements ```cmd set OPENCL_SDK_ROOT="C:/OpenCL-SDK-v2023.04.17-Win-x64" git clone https://github.com/CNugteren/CLBlast.git - mkdir CLBlast\build - cd CLBlast\build - cmake .. -DBUILD_SHARED_LIBS=OFF -DOVERRIDE_MSVC_FLAGS_TO_MT=OFF -DTUNERS=OFF -DOPENCL_ROOT=%OPENCL_SDK_ROOT% -G "Visual Studio 17 2022" -A x64 - cmake --build . --config Release - cmake --install . --prefix C:/CLBlast + cd CLBlast + cmake -B build -DBUILD_SHARED_LIBS=OFF -DOVERRIDE_MSVC_FLAGS_TO_MT=OFF -DTUNERS=OFF -DOPENCL_ROOT=%OPENCL_SDK_ROOT% -G "Visual Studio 17 2022" -A x64 + cmake --build build --config Release + cmake --install build --prefix C:/CLBlast ``` + (note: `--config Release` at build time is the default and only relevant for Visual Studio builds - or multi-config Ninja builds) + -
    Unix: ```sh git clone https://github.com/CNugteren/CLBlast.git - mkdir CLBlast/build - cd CLBlast/build - cmake .. -DBUILD_SHARED_LIBS=OFF -DTUNERS=OFF - cmake --build . --config Release - cmake --install . --prefix /some/path + cd CLBlast + cmake -B build -DBUILD_SHARED_LIBS=OFF -DTUNERS=OFF + cmake --build build --config Release + cmake --install build --prefix /some/path ``` Where `/some/path` is where the built library will be installed (default is `/usr/local`). @@ -624,21 +633,17 @@ Building the program with BLAS support may lead to some performance improvements ``` - CMake (Unix): ```sh - mkdir build - cd build - cmake .. -DLLAMA_CLBLAST=ON -DCLBlast_DIR=/some/path - cmake --build . --config Release + cmake -B build -DLLAMA_CLBLAST=ON -DCLBlast_DIR=/some/path + cmake --build build --config Release ``` - CMake (Windows): ```cmd set CL_BLAST_CMAKE_PKG="C:/CLBlast/lib/cmake/CLBlast" git clone https://github.com/ggerganov/llama.cpp cd llama.cpp - mkdir build - cd build - cmake .. -DBUILD_SHARED_LIBS=OFF -DLLAMA_CLBLAST=ON -DCMAKE_PREFIX_PATH=%CL_BLAST_CMAKE_PKG% -G "Visual Studio 17 2022" -A x64 - cmake --build . --config Release - cmake --install . --prefix C:/LlamaCPP + cmake -B build -DBUILD_SHARED_LIBS=OFF -DLLAMA_CLBLAST=ON -DCMAKE_PREFIX_PATH=%CL_BLAST_CMAKE_PKG% -G "Visual Studio 17 2022" -A x64 + cmake --build build --config Release + cmake --install build --prefix C:/LlamaCPP ``` ##### Running Llama with CLBlast @@ -694,10 +699,8 @@ Building the program with BLAS support may lead to some performance improvements Then, build llama.cpp using the cmake command below: ```bash - mkdir -p build - cd build - cmake .. -DLLAMA_VULKAN=1 - cmake --build . --config Release + cmake -B build -DLLAMA_VULKAN=1 + cmake --build build --config Release # Test the output binary (with "-ngl 33" to offload all layers to GPU) ./bin/main -m "PATH_TO_MODEL" -p "Hi you how are you" -n 50 -e -ngl 33 -t 4 diff --git a/examples/main-cmake-pkg/README.md b/examples/main-cmake-pkg/README.md index f599fbaec46b5..edf20d8db6616 100644 --- a/examples/main-cmake-pkg/README.md +++ b/examples/main-cmake-pkg/README.md @@ -17,11 +17,9 @@ In this case, CLBlast was already installed so the CMake package is referenced i ```cmd git clone https://github.com/ggerganov/llama.cpp cd llama.cpp -mkdir build -cd build -cmake .. -DBUILD_SHARED_LIBS=OFF -DLLAMA_CLBLAST=ON -DCMAKE_PREFIX_PATH=C:/CLBlast/lib/cmake/CLBlast -G "Visual Studio 17 2022" -A x64 -cmake --build . --config Release -cmake --install . --prefix C:/LlamaCPP +cmake -B build -DBUILD_SHARED_LIBS=OFF -DLLAMA_CLBLAST=ON -DCMAKE_PREFIX_PATH=C:/CLBlast/lib/cmake/CLBlast -G "Visual Studio 17 2022" -A x64 +cmake --build build --config Release +cmake --install build --prefix C:/LlamaCPP ``` ### Build main-cmake-pkg @@ -29,9 +27,7 @@ cmake --install . --prefix C:/LlamaCPP ```cmd cd ..\examples\main-cmake-pkg -mkdir build -cd build -cmake .. -DBUILD_SHARED_LIBS=OFF -DCMAKE_PREFIX_PATH="C:/CLBlast/lib/cmake/CLBlast;C:/LlamaCPP/lib/cmake/Llama" -G "Visual Studio 17 2022" -A x64 -cmake --build . --config Release -cmake --install . --prefix C:/MyLlamaApp +cmake -B build -DBUILD_SHARED_LIBS=OFF -DCMAKE_PREFIX_PATH="C:/CLBlast/lib/cmake/CLBlast;C:/LlamaCPP/lib/cmake/Llama" -G "Visual Studio 17 2022" -A x64 +cmake --build build --config Release +cmake --install build --prefix C:/MyLlamaApp ``` diff --git a/examples/server/README.md b/examples/server/README.md index 918ac1295d660..b96a4444a2bd3 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -74,15 +74,18 @@ page cache before using this. See https://github.com/ggerganov/llama.cpp/issues/ - Using `make`: ```bash - make + make server ``` - Using `CMake`: ```bash - cmake --build . --config Release + cmake -B build + cmake --build build --config Release -t server ``` + Binary is at `./build/bin/server` + ## Build with SSL `server` can also be built with SSL support using OpenSSL 3 @@ -99,10 +102,8 @@ page cache before using this. See https://github.com/ggerganov/llama.cpp/issues/ - Using `CMake`: ```bash - mkdir build - cd build - cmake .. -DLLAMA_SERVER_SSL=ON - make server + cmake -B build -DLLAMA_SERVER_SSL=ON + cmake --build build --config Release -t server ``` ## Quick Start From 4025efb4976052d566c582511ddadfa322e0bd3f Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Tue, 30 Apr 2024 00:25:37 +0800 Subject: [PATCH 085/101] anti wi keys --- klite.embd | 62 +++++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 50 insertions(+), 12 deletions(-) diff --git a/klite.embd b/klite.embd index 5a5bb8a13c193..e146ec7ba2963 100644 --- a/klite.embd +++ b/klite.embd @@ -5396,6 +5396,7 @@ Current version: 136 let nwi = { "key": itm.key[0], "keysecondary": (itm.keysecondary.length > 0 ? itm.keysecondary[0] : ""), + "keyanti": (itm.keyanti.length > 0 ? itm.keyanti[0] : ""), "content": itm.content, "comment": itm.comment, "folder": null, @@ -5537,6 +5538,7 @@ Current version: 136 let nwi = { "key": karr.join(","), "keysecondary": "", + "keyanti": "", "content": itm.entry, "comment": "", "folder": null, @@ -5567,6 +5569,7 @@ Current version: 136 let nwi = { "key": karr.join(","), "keysecondary": ((ksarr && ksarr.length) > 0 ? ksarr.join(",") : ""), + "keyanti": "", "content": itm.content, "comment": itm.comment, "folder": null, @@ -5704,6 +5707,7 @@ Current version: 136 let nwi = { "key": key, "keysecondary": "", + "keyanti": "", "content": itm.text, "comment": "", "folder": null, @@ -5763,6 +5767,7 @@ Current version: 136 let nwi = { "key": (keys ? keys : ""), "keysecondary": "", + "keyanti": "", "content": (entry ? entry : ""), "comment": "", "folder": null, @@ -9317,14 +9322,15 @@ Current version: 136 function replace_placeholders_direct(inputtxt) { - inputtxt = replaceAll(inputtxt,"{{user}}",localsettings.chatname?localsettings.chatname:"You",true); - inputtxt = replaceAll(inputtxt,"{{char}}",localsettings.chatopponent?localsettings.chatopponent:defaultchatopponent,true); inputtxt = replaceAll(inputtxt,instructstartplaceholder,get_instruct_starttag(false)); inputtxt = replaceAll(inputtxt,instructendplaceholder,get_instruct_endtag(false)); //failsafe to handle removing newline tags inputtxt = replaceAll(inputtxt,instructstartplaceholder.trim(),get_instruct_starttag(false)); inputtxt = replaceAll(inputtxt,instructendplaceholder.trim(),get_instruct_endtag(false)); + inputtxt = replaceAll(inputtxt,"{{user}}",localsettings.chatname?localsettings.chatname:"You",true); + inputtxt = replaceAll(inputtxt,"{{char}}",localsettings.chatopponent?localsettings.chatopponent:defaultchatopponent,true); + for(let i=0;i wimatch_context.includes(k.trim())); } else { shoulduse = wiks.some(k => wimatch_context.includes(k.trim().toLowerCase())); } - } else { - let wiks2 = wi.keysecondary.split(","); + } + else + { + let wikanti = []; + let wiks2 = []; + if(!invalidantikey) + { + wikanti = wi.keyanti.split(","); + } + if(!invalidseckey) + { + wiks2 = wi.keysecondary.split(","); + } + let t1=false, t2=false, t3=false; if (localsettings.case_sensitive_wi) { - let t1 = wiks.some(k => wimatch_context.includes(k.trim())); - let t2 = wiks2.some(k => wimatch_context.includes(k.trim())); - shoulduse = (t1 && t2); + t1 = wiks.some(k => wimatch_context.includes(k.trim())); + t2 = wiks2.some(k => wimatch_context.includes(k.trim())); + t3 = wikanti.some(k => wimatch_context.includes(k.trim())); } else { - let t1 = wiks.some(k => wimatch_context.includes(k.trim().toLowerCase())); - let t2 = wiks2.some(k => wimatch_context.includes(k.trim().toLowerCase())); + t1 = wiks.some(k => wimatch_context.includes(k.trim().toLowerCase())); + t2 = wiks2.some(k => wimatch_context.includes(k.trim().toLowerCase())); + t3 = wikanti.some(k => wimatch_context.includes(k.trim().toLowerCase())); + } + if(!invalidantikey && !invalidseckey) //all keys valid + { + shoulduse = (t1 && t2 && !t3); + } + else if(invalidantikey) + { shoulduse = (t1 && t2); } + else + { + shoulduse = (t1 && !t3); + } } } @@ -13441,14 +13472,17 @@ Current version: 136 ce.selective = !ce.selective; var tgt = document.getElementById("wiskt" + idx); var tgt2 = document.getElementById("wikeysec" + idx); + var tgt3 = document.getElementById("wikeyanti" + idx); if (ce.selective) { tgt.classList.add("witoggleron"); tgt.classList.remove("witoggleroff"); tgt2.classList.remove("hidden"); + tgt3.classList.remove("hidden"); } else { tgt.classList.remove("witoggleron"); tgt.classList.add("witoggleroff"); tgt2.classList.add("hidden"); + tgt3.classList.add("hidden"); } } @@ -13499,6 +13533,7 @@ Current version: 136 var ne = { "key": "", "keysecondary": "", + "keyanti": "", "content": "", "comment": "", "folder": null, @@ -13514,6 +13549,7 @@ Current version: 136 for (var i = 0; i < current_wi.length; ++i) { current_wi[i].key = document.getElementById("wikey" + i).value; current_wi[i].keysecondary = document.getElementById("wikeysec" + i).value; + current_wi[i].keyanti = document.getElementById("wikeyanti" + i).value; current_wi[i].content = document.getElementById("wival" + i).value; let prb = document.getElementById("wirng" + i).value; current_wi[i].probability = (prb?prb:100); @@ -13550,6 +13586,7 @@ Current version: 136 var winame = escapeHtml(curr.key); var witxt = escapeHtml(curr.content); var wisec = curr.keysecondary; + var wianti = curr.keyanti; var wirngval = (curr.probability?curr.probability:100); var ishidden = false; @@ -13566,9 +13603,10 @@ Current version: 136 +`` + ` - ` + ` + ` + ` + ` + ` - + 📑 From 5539e6fdd1b97e0c37c54d34df67f334fc344a26 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Mon, 29 Apr 2024 19:56:59 +0200 Subject: [PATCH 086/101] main : fix typo in comment in main.cpp (#6985) Signed-off-by: Daniel Bevenius --- examples/main/main.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index a74d4d9c72364..5c693657c8993 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -324,7 +324,7 @@ int main(int argc, char ** argv) { log_tostr(embd_inp.empty()), n_matching_session_tokens, embd_inp.size(), session_tokens.size(), embd_inp.size()); // if we will use the cache for the full prompt without reaching the end of the cache, force - // reevaluation of the last token token to recalculate the cached logits + // reevaluation of the last token to recalculate the cached logits if (!embd_inp.empty() && n_matching_session_tokens == embd_inp.size() && session_tokens.size() > embd_inp.size()) { LOGLN("recalculate the cached logits (do): session_tokens.resize( %zu )", embd_inp.size() - 1); From b8c1476e44cc1f3a1811613f65251cf779067636 Mon Sep 17 00:00:00 2001 From: Clint Herron Date: Mon, 29 Apr 2024 14:40:14 -0400 Subject: [PATCH 087/101] Extending grammar integration tests (#6644) * Cleaning up integration tests to share code between tests and make it simpler to add new tests. * Add tests around quantifiers to ensure both matching and non-matching compliance. * Add slightly more complex grammar with quantifiers to test references with quantifiers. * Fixing build when C++17 is not present. * Separating test calls to give more helpful stack traces on failure. Adding verbose messages to give visibility for what is being tested. * Adding quotes around strings to explicitly show whitespace * Removing trailing whitespace. * Implementing suggestions from @ochafik -- grammars and test strings now print and flush before tests to aid in debugging segfaults and whatnot. * Cleaning up forgotten symbols. Modifying simple test to use test harness. Added comments for more verbose descriptions of what each test is accomplishing. * Unicode symbol modifications to hopefully make log easier to parse visually. --- tests/test-grammar-integration.cpp | 361 ++++++++++++++++++----------- 1 file changed, 225 insertions(+), 136 deletions(-) diff --git a/tests/test-grammar-integration.cpp b/tests/test-grammar-integration.cpp index 2d8f228e3769d..1a4004e2ab175 100644 --- a/tests/test-grammar-integration.cpp +++ b/tests/test-grammar-integration.cpp @@ -10,15 +10,10 @@ #include "unicode.h" #include #include +#include -static void test_simple_grammar() { - // Test case for a simple grammar - const std::string grammar_str = R"""(root ::= expr -expr ::= term ("+" term)* -term ::= number -number ::= [0-9]+)"""; - - grammar_parser::parse_state parsed_grammar = grammar_parser::parse(grammar_str.c_str()); +static llama_grammar* build_grammar(const std::string & grammar_str) { + auto parsed_grammar = grammar_parser::parse(grammar_str.c_str()); // Ensure we parsed correctly assert(!parsed_grammar.rules.empty()); @@ -30,8 +25,10 @@ number ::= [0-9]+)"""; llama_grammar* grammar = llama_grammar_init( grammar_rules.data(), grammar_rules.size(), parsed_grammar.symbol_ids.at("root")); - std::string input = "123+456"; + return grammar; +} +static bool match_string(const std::string & input, llama_grammar* grammar) { auto decoded = decode_utf8(input, {}); const auto & code_points = decoded.first; @@ -39,159 +36,67 @@ number ::= [0-9]+)"""; for (auto it = code_points.begin(), end = code_points.end() - 1; it != end; ++it) { auto prev_stacks = grammar->stacks; llama_grammar_accept(grammar->rules, prev_stacks, *it, grammar->stacks); - assert(!grammar->stacks.empty()); + if (grammar->stacks.empty()) { + // no stacks means that the grammar failed to match at this point + return false; + } } - bool completed_grammar = false; - for (const auto & stack : grammar->stacks) { if (stack.empty()) { - completed_grammar = true; - break; + // An empty stack means that the grammar has been completed + return true; } } - assert(completed_grammar); - - // Clean up allocated memory - llama_grammar_free(grammar); + return false; } -static void test_complex_grammar() { - // Test case for a more complex grammar, with both failure strings and success strings - const std::string grammar_str = R"""(root ::= expression -expression ::= term ws (("+"|"-") ws term)* -term ::= factor ws (("*"|"/") ws factor)* -factor ::= number | variable | "(" expression ")" | function-call -number ::= [0-9]+ -variable ::= [a-zA-Z_][a-zA-Z0-9_]* -function-call ::= variable ws "(" (expression ("," ws expression)*)? ")" -ws ::= [ \t\n\r]?)"""; - - grammar_parser::parse_state parsed_grammar = grammar_parser::parse(grammar_str.c_str()); - - // Ensure we parsed correctly - assert(!parsed_grammar.rules.empty()); +static void test_grammar(const std::string & test_desc, const std::string & grammar_str, const std::vector & passing_strings, const std::vector & failing_strings) { + fprintf(stderr, "⚫ Testing %s. Grammar: %s\n", test_desc.c_str(), grammar_str.c_str()); + fflush(stderr); - // Ensure we have a root node - assert(!(parsed_grammar.symbol_ids.find("root") == parsed_grammar.symbol_ids.end())); - - std::vector grammar_rules(parsed_grammar.c_rules()); - llama_grammar* grammar = llama_grammar_init( - grammar_rules.data(), grammar_rules.size(), parsed_grammar.symbol_ids.at("root")); + auto grammar = build_grammar(grammar_str); // Save the original grammar stacks so that we can reset after every new string we want to test auto original_stacks = grammar->stacks; - // Test a few strings - std::vector test_strings_pass = { - "42", - "1*2*3*4*5", - "x", - "x+10", - "x1+y2", - "(a+b)*(c-d)", - "func()", - "func(x,y+2)", - "a*(b+c)-d/e", - "f(g(x),h(y,z))", - "x + 10", - "x1 + y2", - "(a + b) * (c - d)", - "func()", - "func(x, y + 2)", - "a * (b + c) - d / e", - "f(g(x), h(y, z))", - "123+456", - "123*456*789-123/456+789*123", - "123+456*789-123/456+789*123-456/789+123*456-789/123+456*789-123/456+789*123-456" - }; - - std::vector test_strings_fail = { - "+", - "/ 3x", - "x + + y", - "a * / b", - "func(,)", - "func(x y)", - "(a + b", - "x + y)", - "a + b * (c - d", - "42 +", - "x +", - "x + 10 +", - "(a + b) * (c - d", - "func(", - "func(x, y + 2", - "a * (b + c) - d /", - "f(g(x), h(y, z)", - "123+456*789-123/456+789*123-456/789+123*456-789/123+456*789-123/456+789*123-456/", - }; + fprintf(stderr, " 🔵 Valid strings:\n"); // Passing strings - for (const auto & test_string : test_strings_pass) { - auto decoded = decode_utf8(test_string, {}); - - const auto & code_points = decoded.first; - - int pos = 0; - for (auto it = code_points.begin(), end = code_points.end() - 1; it != end; ++it) { - ++pos; - auto prev_stacks = grammar->stacks; - llama_grammar_accept(grammar->rules, prev_stacks, *it, grammar->stacks); - - // Expect that each code point will not cause the grammar to fail - if (grammar->stacks.empty()) { - fprintf(stdout, "Error at position %d\n", pos); - fprintf(stderr, "Unexpected character '%s'\n", unicode_cpt_to_utf8(*it).c_str()); - fprintf(stderr, "Input string is %s:\n", test_string.c_str()); - } - assert(!grammar->stacks.empty()); - } + for (const auto & test_string : passing_strings) { + fprintf(stderr, " \"%s\" ", test_string.c_str()); + fflush(stderr); - bool completed_grammar = false; + bool matched = match_string(test_string, grammar); - for (const auto & stack : grammar->stacks) { - if (stack.empty()) { - completed_grammar = true; - break; - } + if (!matched) { + fprintf(stderr, "❌ (failed to match)\n"); + } else { + fprintf(stdout, "✅︎\n"); } - assert(completed_grammar); + assert(matched); // Reset the grammar stacks grammar->stacks = original_stacks; } + fprintf(stderr, " 🟠 Invalid strings:\n"); + // Failing strings - for (const auto & test_string : test_strings_fail) { - auto decoded = decode_utf8(test_string, {}); - - const auto & code_points = decoded.first; - bool parse_failed = false; - - for (auto it = code_points.begin(), end = code_points.end() - 1; it != end; ++it) { - auto prev_stacks = grammar->stacks; - llama_grammar_accept(grammar->rules, prev_stacks, *it, grammar->stacks); - if (grammar->stacks.empty()) { - parse_failed = true; - break; - } - assert(!grammar->stacks.empty()); - } + for (const auto & test_string : failing_strings) { + fprintf(stderr, " \"%s\" ", test_string.c_str()); + fflush(stderr); - bool completed_grammar = false; + bool matched = match_string(test_string, grammar); - for (const auto & stack : grammar->stacks) { - if (stack.empty()) { - completed_grammar = true; - break; - } + if (matched) { + fprintf(stderr, "❌ (incorrectly matched)\n"); + } else { + fprintf(stdout, "✅︎\n"); } - - // Ensure that the grammar is not completed, or that each string failed to match as-expected - assert((!completed_grammar) || parse_failed); + assert(!matched); // Reset the grammar stacks grammar->stacks = original_stacks; @@ -201,7 +106,183 @@ ws ::= [ \t\n\r]?)"""; llama_grammar_free(grammar); } +static void test_simple_grammar() { + // Test case for a simple grammar + test_grammar( + "simple grammar", + R"""( + root ::= expr + expr ::= term ("+" term)* + term ::= number + number ::= [0-9]+)""", + // Passing strings + { + "42", + "1+2+3+4+5", + "123+456", + }, + // Failing strings + { + "+", + "/ 3", + "1+2+3+4+5+", + "12a45", + } + ); +} + +static void test_complex_grammar() { + // Test case for a more complex grammar, with both failure strings and success strings + test_grammar( + "medium complexity grammar", + // Grammar + R"""( + root ::= expression + expression ::= term ws (("+"|"-") ws term)* + term ::= factor ws (("*"|"/") ws factor)* + factor ::= number | variable | "(" expression ")" | function-call + number ::= [0-9]+ + variable ::= [a-zA-Z_][a-zA-Z0-9_]* + function-call ::= variable ws "(" (expression ("," ws expression)*)? ")" + ws ::= [ \t\n\r]?)""", + // Passing strings + { + "42", + "1*2*3*4*5", + "x", + "x+10", + "x1+y2", + "(a+b)*(c-d)", + "func()", + "func(x,y+2)", + "a*(b+c)-d/e", + "f(g(x),h(y,z))", + "x + 10", + "x1 + y2", + "(a + b) * (c - d)", + "func()", + "func(x, y + 2)", + "a * (b + c) - d / e", + "f(g(x), h(y, z))", + "123+456", + "123*456*789-123/456+789*123", + "123+456*789-123/456+789*123-456/789+123*456-789/123+456*789-123/456+789*123-456" + }, + // Failing strings + { + "+", + "/ 3x", + "x + + y", + "a * / b", + "func(,)", + "func(x y)", + "(a + b", + "x + y)", + "a + b * (c - d", + "42 +", + "x +", + "x + 10 +", + "(a + b) * (c - d", + "func(", + "func(x, y + 2", + "a * (b + c) - d /", + "f(g(x), h(y, z)", + "123+456*789-123/456+789*123-456/789+123*456-789/123+456*789-123/456+789*123-456/", + } + ); +} + +static void test_quantifiers() { + // A collection of tests to exercise * + and ? quantifiers + + test_grammar( + "* quantifier", + // Grammar + R"""(root ::= "a"*)""", + // Passing strings + { + "", + "a", + "aaaaa", + "aaaaaaaaaaaaaaaaaa", + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + }, + // Failing strings + { + "b", + "ab", + "aab", + "ba", + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab" + } + ); + test_grammar( + "+ quantifier", + // Grammar + R"""(root ::= "a"+)""", + // Passing strings + { + "a", + "aaaaa", + "aaaaaaaaaaaaaaaaaa", + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + }, + // Failing strings + { + "", + "b", + "ab", + "aab", + "ba", + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab" + } + ); + test_grammar( + "? quantifier", + // Grammar + R"""(root ::= "a"?)""", + // Passing strings + { + "", + "a" + }, + // Failing strings + { + "b", + "ab", + "aa", + "ba", + } + ); + test_grammar( + "mixed quantifiers", + // Grammar + R"""( + root ::= cons+ vowel* cons? (vowel cons)* + vowel ::= [aeiouy] + cons ::= [bcdfghjklmnpqrstvwxyz] + )""", + // Passing strings + { + "yes", + "no", + "noyes", + "crwth", + "four", + "bryyyy", + }, + // Failing strings + { + "yess", + "yesno", + "forty", + "catyyy", + } + ); +} + static void test_failure_missing_root() { + fprintf(stderr, "⚫ Testing missing root node:\n"); // Test case for a grammar that is missing a root rule const std::string grammar_str = R"""(rot ::= expr expr ::= term ("+" term)* @@ -215,29 +296,37 @@ number ::= [0-9]+)"""; // Ensure we do NOT have a root node assert(parsed_grammar.symbol_ids.find("root") == parsed_grammar.symbol_ids.end()); + fprintf(stderr, " ✅︎ Passed\n"); } static void test_failure_missing_reference() { + fprintf(stderr, "⚫ Testing missing reference node:\n"); + // Test case for a grammar that is missing a referenced rule - const std::string grammar_str = R"""(root ::= expr + const std::string grammar_str = +R"""(root ::= expr expr ::= term ("+" term)* term ::= numero number ::= [0-9]+)"""; - fprintf(stderr, "Expected error: "); + fprintf(stderr, " Expected error: "); grammar_parser::parse_state parsed_grammar = grammar_parser::parse(grammar_str.c_str()); // Ensure we did NOT parsed correctly assert(parsed_grammar.rules.empty()); - fprintf(stderr, "End of expected error. Test successful.\n"); + fprintf(stderr, " End of expected error.\n"); + fprintf(stderr, " ✅︎ Passed\n"); } int main() { + fprintf(stdout, "Running grammar integration tests...\n"); test_simple_grammar(); test_complex_grammar(); + test_quantifiers(); test_failure_missing_root(); test_failure_missing_reference(); + fprintf(stdout, "All tests passed.\n"); return 0; } From 8843a98c2ba97a25e93319a104f9ddfaf83ce4c4 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Tue, 30 Apr 2024 00:52:50 +0100 Subject: [PATCH 088/101] Improve usability of --model-url & related flags (#6930) * args: default --model to models/ + filename from --model-url or --hf-file (or else legacy models/7B/ggml-model-f16.gguf) * args: main & server now call gpt_params_handle_model_default * args: define DEFAULT_MODEL_PATH + update cli docs * curl: check url of previous download (.json metadata w/ url, etag & lastModified) * args: fix update to quantize-stats.cpp * curl: support legacy .etag / .lastModified companion files * curl: rm legacy .etag file support * curl: reuse regex across headers callback calls * curl: unique_ptr to manage lifecycle of curl & outfile * curl: nit: no need for multiline regex flag * curl: update failed test (model file collision) + gitignore *.gguf.json --- .gitignore | 1 + common/common.cpp | 260 +++++++++--------- common/common.h | 6 +- examples/main/README.md | 2 +- examples/quantize-stats/quantize-stats.cpp | 2 +- examples/server/server.cpp | 4 +- .../server/tests/features/embeddings.feature | 2 +- 7 files changed, 144 insertions(+), 133 deletions(-) diff --git a/.gitignore b/.gitignore index 60f9d1f8d04b9..50ae0973ae3b3 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ *.a *.so *.gguf +*.gguf.json *.bin *.exe *.dll diff --git a/common/common.cpp b/common/common.cpp index fe84039f76e55..099d0356fa4b8 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -67,7 +67,6 @@ #include #endif #define LLAMA_CURL_MAX_URL_LENGTH 2084 // Maximum URL Length in Chrome: 2083 -#define LLAMA_CURL_MAX_HEADER_LENGTH 256 #endif // LLAMA_USE_CURL using json = nlohmann::ordered_json; @@ -1324,6 +1323,29 @@ bool gpt_params_find_arg(int argc, char ** argv, const std::string & arg, gpt_pa return false; } +void gpt_params_handle_model_default(gpt_params & params) { + if (!params.hf_repo.empty()) { + // short-hand to avoid specifying --hf-file -> default it to --model + if (params.hf_file.empty()) { + if (params.model.empty()) { + throw std::invalid_argument("error: --hf-repo requires either --hf-file or --model\n"); + } + params.hf_file = params.model; + } else if (params.model.empty()) { + params.model = "models/" + string_split(params.hf_file, '/').back(); + } + } else if (!params.model_url.empty()) { + if (params.model.empty()) { + auto f = string_split(params.model_url, '#').front(); + f = string_split(f, '?').front(); + f = string_split(f, '/').back(); + params.model = "models/" + f; + } + } else if (params.model.empty()) { + params.model = DEFAULT_MODEL_PATH; + } +} + bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { bool invalid_param = false; std::string arg; @@ -1352,10 +1374,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { throw std::invalid_argument("error: --prompt-cache-all not supported in interactive mode yet\n"); } - // short-hand to avoid specifying --hf-file -> default it to --model - if (!params.hf_repo.empty() && params.hf_file.empty()) { - params.hf_file = params.model; - } + gpt_params_handle_model_default(params); if (params.escape) { process_escapes(params.prompt); @@ -1548,7 +1567,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --control-vector-layer-range START END\n"); printf(" layer range to apply the control vector(s) to, start and end inclusive\n"); printf(" -m FNAME, --model FNAME\n"); - printf(" model path (default: %s)\n", params.model.c_str()); + printf(" model path (default: models/$filename with filename from --hf-file or --model-url if set, otherwise %s)\n", DEFAULT_MODEL_PATH); printf(" -md FNAME, --model-draft FNAME\n"); printf(" draft model for speculative decoding (default: unused)\n"); printf(" -mu MODEL_URL, --model-url MODEL_URL\n"); @@ -1896,59 +1915,75 @@ void llama_batch_add( #ifdef LLAMA_USE_CURL -static bool llama_download_file(CURL * curl, const char * url, const char * path) { +static bool starts_with(const std::string & str, const std::string & prefix) { + // While we wait for C++20's std::string::starts_with... + return str.rfind(prefix, 0) == 0; +} + +static bool llama_download_file(const std::string & url, const std::string & path) { + + // Initialize libcurl + std::unique_ptr curl(curl_easy_init(), &curl_easy_cleanup); + if (!curl) { + fprintf(stderr, "%s: error initializing libcurl\n", __func__); + return false; + } + bool force_download = false; // Set the URL, allow to follow http redirection - curl_easy_setopt(curl, CURLOPT_URL, url); - curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L); + curl_easy_setopt(curl.get(), CURLOPT_URL, url.c_str()); + curl_easy_setopt(curl.get(), CURLOPT_FOLLOWLOCATION, 1L); #if defined(_WIN32) // CURLSSLOPT_NATIVE_CA tells libcurl to use standard certificate store of // operating system. Currently implemented under MS-Windows. - curl_easy_setopt(curl, CURLOPT_SSL_OPTIONS, CURLSSLOPT_NATIVE_CA); + curl_easy_setopt(curl.get(), CURLOPT_SSL_OPTIONS, CURLSSLOPT_NATIVE_CA); #endif // Check if the file already exists locally struct stat model_file_info; - auto file_exists = (stat(path, &model_file_info) == 0); - - // If the file exists, check for ${path_model}.etag or ${path_model}.lastModified files - char etag[LLAMA_CURL_MAX_HEADER_LENGTH] = {0}; - char etag_path[PATH_MAX] = {0}; - snprintf(etag_path, sizeof(etag_path), "%s.etag", path); + auto file_exists = (stat(path.c_str(), &model_file_info) == 0); - char last_modified[LLAMA_CURL_MAX_HEADER_LENGTH] = {0}; - char last_modified_path[PATH_MAX] = {0}; - snprintf(last_modified_path, sizeof(last_modified_path), "%s.lastModified", path); + // If the file exists, check its JSON metadata companion file. + std::string metadata_path = path + ".json"; + nlohmann::json metadata; + std::string etag; + std::string last_modified; if (file_exists) { - auto * f_etag = fopen(etag_path, "r"); - if (f_etag) { - if (!fgets(etag, sizeof(etag), f_etag)) { - fprintf(stderr, "%s: unable to read file %s\n", __func__, etag_path); - } else { - fprintf(stderr, "%s: previous file found %s: %s\n", __func__, etag_path, etag); - } - fclose(f_etag); - } - - auto * f_last_modified = fopen(last_modified_path, "r"); - if (f_last_modified) { - if (!fgets(last_modified, sizeof(last_modified), f_last_modified)) { - fprintf(stderr, "%s: unable to read file %s\n", __func__, last_modified_path); - } else { - fprintf(stderr, "%s: previous file found %s: %s\n", __func__, last_modified_path, - last_modified); + // Try and read the JSON metadata file (note: stream autoclosed upon exiting this block). + std::ifstream metadata_in(metadata_path); + if (metadata_in.good()) { + try { + metadata_in >> metadata; + fprintf(stderr, "%s: previous metadata file found %s: %s\n", __func__, metadata_path.c_str(), metadata.dump().c_str()); + if (metadata.contains("url") && metadata["url"].is_string()) { + auto previous_url = metadata["url"].get(); + if (previous_url != url) { + fprintf(stderr, "%s: Model URL mismatch: %s != %s\n", __func__, url.c_str(), previous_url.c_str()); + return false; + } + } + if (metadata.contains("etag") && metadata["etag"].is_string()) { + etag = metadata["etag"]; + } + if (metadata.contains("lastModified") && metadata["lastModified"].is_string()) { + last_modified = metadata["lastModified"]; + } + } catch (const nlohmann::json::exception & e) { + fprintf(stderr, "%s: error reading metadata file %s: %s\n", __func__, metadata_path.c_str(), e.what()); + return false; } - fclose(f_last_modified); } + } else { + fprintf(stderr, "%s: no previous model file found %s\n", __func__, path.c_str()); } // Send a HEAD request to retrieve the etag and last-modified headers struct llama_load_model_from_url_headers { - char etag[LLAMA_CURL_MAX_HEADER_LENGTH] = {0}; - char last_modified[LLAMA_CURL_MAX_HEADER_LENGTH] = {0}; + std::string etag; + std::string last_modified; }; llama_load_model_from_url_headers headers; { @@ -1956,38 +1991,37 @@ static bool llama_download_file(CURL * curl, const char * url, const char * path auto header_callback = [](char * buffer, size_t /*size*/, size_t n_items, void * userdata) -> size_t { llama_load_model_from_url_headers *headers = (llama_load_model_from_url_headers *) userdata; - // Convert header field name to lowercase - for (size_t i = 0; i < n_items && buffer[i] != ':'; ++i) { - buffer[i] = tolower(buffer[i]); - } - - const char * etag_prefix = "etag: "; - if (strncmp(buffer, etag_prefix, strlen(etag_prefix)) == 0) { - strncpy(headers->etag, buffer + strlen(etag_prefix), n_items - strlen(etag_prefix) - 2); // Remove CRLF - } - - const char * last_modified_prefix = "last-modified: "; - if (strncmp(buffer, last_modified_prefix, strlen(last_modified_prefix)) == 0) { - strncpy(headers->last_modified, buffer + strlen(last_modified_prefix), - n_items - strlen(last_modified_prefix) - 2); // Remove CRLF + static std::regex header_regex("([^:]+): (.*)\r\n"); + static std::regex etag_regex("ETag", std::regex_constants::icase); + static std::regex last_modified_regex("Last-Modified", std::regex_constants::icase); + + std::string header(buffer, n_items); + std::smatch match; + if (std::regex_match(header, match, header_regex)) { + const std::string & key = match[1]; + const std::string & value = match[2]; + if (std::regex_match(key, match, etag_regex)) { + headers->etag = value; + } else if (std::regex_match(key, match, last_modified_regex)) { + headers->last_modified = value; + } } return n_items; }; - curl_easy_setopt(curl, CURLOPT_NOBODY, 1L); // will trigger the HEAD verb - curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 1L); // hide head request progress - curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, static_cast(header_callback)); - curl_easy_setopt(curl, CURLOPT_HEADERDATA, &headers); + curl_easy_setopt(curl.get(), CURLOPT_NOBODY, 1L); // will trigger the HEAD verb + curl_easy_setopt(curl.get(), CURLOPT_NOPROGRESS, 1L); // hide head request progress + curl_easy_setopt(curl.get(), CURLOPT_HEADERFUNCTION, static_cast(header_callback)); + curl_easy_setopt(curl.get(), CURLOPT_HEADERDATA, &headers); - CURLcode res = curl_easy_perform(curl); + CURLcode res = curl_easy_perform(curl.get()); if (res != CURLE_OK) { - curl_easy_cleanup(curl); fprintf(stderr, "%s: curl_easy_perform() failed: %s\n", __func__, curl_easy_strerror(res)); return false; } long http_code = 0; - curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &http_code); + curl_easy_getinfo(curl.get(), CURLINFO_RESPONSE_CODE, &http_code); if (http_code != 200) { // HEAD not supported, we don't know if the file has changed // force trigger downloading @@ -1996,28 +2030,30 @@ static bool llama_download_file(CURL * curl, const char * url, const char * path } } - // If the ETag or the Last-Modified headers are different: trigger a new download - bool should_download = !file_exists - || force_download - || (strlen(headers.etag) > 0 && strcmp(etag, headers.etag) != 0) - || (strlen(headers.last_modified) > 0 && strcmp(last_modified, headers.last_modified) != 0); + bool should_download = !file_exists || force_download; + if (!should_download) { + if (!etag.empty() && etag != headers.etag) { + fprintf(stderr, "%s: ETag header is different (%s != %s): triggering a new download\n", __func__, etag.c_str(), headers.etag.c_str()); + should_download = true; + } else if (!last_modified.empty() && last_modified != headers.last_modified) { + fprintf(stderr, "%s: Last-Modified header is different (%s != %s): triggering a new download\n", __func__, last_modified.c_str(), headers.last_modified.c_str()); + should_download = true; + } + } if (should_download) { - char path_temporary[PATH_MAX] = {0}; - snprintf(path_temporary, sizeof(path_temporary), "%s.downloadInProgress", path); + std::string path_temporary = path + ".downloadInProgress"; if (file_exists) { - fprintf(stderr, "%s: deleting previous downloaded file: %s\n", __func__, path); - if (remove(path) != 0) { - curl_easy_cleanup(curl); - fprintf(stderr, "%s: unable to delete file: %s\n", __func__, path); + fprintf(stderr, "%s: deleting previous downloaded file: %s\n", __func__, path.c_str()); + if (remove(path.c_str()) != 0) { + fprintf(stderr, "%s: unable to delete file: %s\n", __func__, path.c_str()); return false; } } // Set the output file - auto * outfile = fopen(path_temporary, "wb"); + std::unique_ptr outfile(fopen(path_temporary.c_str(), "wb"), fclose); if (!outfile) { - curl_easy_cleanup(curl); - fprintf(stderr, "%s: error opening local file for writing: %s\n", __func__, path); + fprintf(stderr, "%s: error opening local file for writing: %s\n", __func__, path.c_str()); return false; } @@ -2025,12 +2061,12 @@ static bool llama_download_file(CURL * curl, const char * url, const char * path auto write_callback = [](void * data, size_t size, size_t nmemb, void * fd) -> size_t { return fwrite(data, size, nmemb, (FILE *)fd); }; - curl_easy_setopt(curl, CURLOPT_NOBODY, 0L); - curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, static_cast(write_callback)); - curl_easy_setopt(curl, CURLOPT_WRITEDATA, outfile); + curl_easy_setopt(curl.get(), CURLOPT_NOBODY, 0L); + curl_easy_setopt(curl.get(), CURLOPT_WRITEFUNCTION, static_cast(write_callback)); + curl_easy_setopt(curl.get(), CURLOPT_WRITEDATA, outfile.get()); // display download progress - curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0L); + curl_easy_setopt(curl.get(), CURLOPT_NOPROGRESS, 0L); // helper function to hide password in URL auto llama_download_hide_password_in_url = [](const std::string & url) -> std::string { @@ -2049,51 +2085,34 @@ static bool llama_download_file(CURL * curl, const char * url, const char * path // start the download fprintf(stderr, "%s: downloading from %s to %s (server_etag:%s, server_last_modified:%s)...\n", __func__, - llama_download_hide_password_in_url(url).c_str(), path, headers.etag, headers.last_modified); - auto res = curl_easy_perform(curl); + llama_download_hide_password_in_url(url).c_str(), path.c_str(), headers.etag.c_str(), headers.last_modified.c_str()); + auto res = curl_easy_perform(curl.get()); if (res != CURLE_OK) { - fclose(outfile); - curl_easy_cleanup(curl); fprintf(stderr, "%s: curl_easy_perform() failed: %s\n", __func__, curl_easy_strerror(res)); return false; } long http_code = 0; - curl_easy_getinfo (curl, CURLINFO_RESPONSE_CODE, &http_code); + curl_easy_getinfo (curl.get(), CURLINFO_RESPONSE_CODE, &http_code); if (http_code < 200 || http_code >= 400) { - fclose(outfile); - curl_easy_cleanup(curl); fprintf(stderr, "%s: invalid http status code received: %ld\n", __func__, http_code); return false; } - // Clean up - fclose(outfile); + // Causes file to be closed explicitly here before we rename it. + outfile.reset(); - // Write the new ETag to the .etag file - if (strlen(headers.etag) > 0) { - auto * etag_file = fopen(etag_path, "w"); - if (etag_file) { - fputs(headers.etag, etag_file); - fclose(etag_file); - fprintf(stderr, "%s: file etag saved %s: %s\n", __func__, etag_path, headers.etag); - } - } + // Write the updated JSON metadata file. + metadata.update({ + {"url", url}, + {"etag", headers.etag}, + {"lastModified", headers.last_modified} + }); + std::ofstream(metadata_path) << metadata.dump(4); + fprintf(stderr, "%s: file metadata saved: %s\n", __func__, metadata_path.c_str()); - // Write the new lastModified to the .etag file - if (strlen(headers.last_modified) > 0) { - auto * last_modified_file = fopen(last_modified_path, "w"); - if (last_modified_file) { - fputs(headers.last_modified, last_modified_file); - fclose(last_modified_file); - fprintf(stderr, "%s: file last modified saved %s: %s\n", __func__, last_modified_path, - headers.last_modified); - } - } - - if (rename(path_temporary, path) != 0) { - curl_easy_cleanup(curl); - fprintf(stderr, "%s: unable to rename file: %s to %s\n", __func__, path_temporary, path); + if (rename(path_temporary.c_str(), path.c_str()) != 0) { + fprintf(stderr, "%s: unable to rename file: %s to %s\n", __func__, path_temporary.c_str(), path.c_str()); return false; } } @@ -2111,15 +2130,7 @@ struct llama_model * llama_load_model_from_url( return NULL; } - // Initialize libcurl - auto * curl = curl_easy_init(); - - if (!curl) { - fprintf(stderr, "%s: error initializing libcurl\n", __func__); - return NULL; - } - - if (!llama_download_file(curl, model_url, path_model)) { + if (!llama_download_file(model_url, path_model)) { return NULL; } @@ -2133,7 +2144,6 @@ struct llama_model * llama_load_model_from_url( auto * ctx_gguf = gguf_init_from_file(path_model, gguf_params); if (!ctx_gguf) { fprintf(stderr, "\n%s: failed to load input GGUF from %s\n", __func__, path_model); - curl_easy_cleanup(curl); return NULL; } @@ -2145,8 +2155,6 @@ struct llama_model * llama_load_model_from_url( gguf_free(ctx_gguf); } - curl_easy_cleanup(curl); - if (n_split > 1) { char split_prefix[PATH_MAX] = {0}; char split_url_prefix[LLAMA_CURL_MAX_URL_LENGTH] = {0}; @@ -2177,11 +2185,7 @@ struct llama_model * llama_load_model_from_url( char split_url[LLAMA_CURL_MAX_URL_LENGTH] = {0}; llama_split_path(split_url, sizeof(split_url), split_url_prefix, download_idx, n_split); - auto * curl = curl_easy_init(); - bool res = llama_download_file(curl, split_url, split_path); - curl_easy_cleanup(curl); - - return res; + return llama_download_file(split_url, split_path); }, idx)); } @@ -2668,7 +2672,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "mirostat_ent: %f # default: 5.0\n", sparams.mirostat_tau); fprintf(stream, "mirostat_lr: %f # default: 0.1\n", sparams.mirostat_eta); fprintf(stream, "mlock: %s # default: false\n", params.use_mlock ? "true" : "false"); - fprintf(stream, "model: %s # default: models/7B/ggml-model.bin\n", params.model.c_str()); + fprintf(stream, "model: %s # default: %s\n", params.model.c_str(), DEFAULT_MODEL_PATH); fprintf(stream, "model_draft: %s # default:\n", params.model_draft.c_str()); fprintf(stream, "multiline_input: %s # default: false\n", params.multiline_input ? "true" : "false"); fprintf(stream, "n_gpu_layers: %d # default: -1\n", params.n_gpu_layers); diff --git a/common/common.h b/common/common.h index 3233d90e69eb5..8afdf2bdf189b 100644 --- a/common/common.h +++ b/common/common.h @@ -31,6 +31,8 @@ fprintf(stderr, "%s: built with %s for %s\n", __func__, LLAMA_COMPILER, LLAMA_BUILD_TARGET); \ } while(0) +#define DEFAULT_MODEL_PATH "models/7B/ggml-model-f16.gguf" + // build info extern int LLAMA_BUILD_NUMBER; extern char const *LLAMA_COMMIT; @@ -92,7 +94,7 @@ struct gpt_params { // // sampling parameters struct llama_sampling_params sparams; - std::string model = "models/7B/ggml-model-f16.gguf"; // model path + std::string model = ""; // model path std::string model_draft = ""; // draft model for speculative decoding std::string model_alias = "unknown"; // model alias std::string model_url = ""; // model url to download @@ -171,6 +173,8 @@ struct gpt_params { std::vector image; // path to image file(s) }; +void gpt_params_handle_model_default(gpt_params & params); + bool parse_kv_override(const char * data, std::vector & overrides); bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params); diff --git a/examples/main/README.md b/examples/main/README.md index 649f4e0f35820..e7a38743c240a 100644 --- a/examples/main/README.md +++ b/examples/main/README.md @@ -66,7 +66,7 @@ main.exe -m models\7B\ggml-model.bin --ignore-eos -n -1 --random-prompt In this section, we cover the most commonly used options for running the `main` program with the LLaMA models: -- `-m FNAME, --model FNAME`: Specify the path to the LLaMA model file (e.g., `models/7B/ggml-model.bin`). +- `-m FNAME, --model FNAME`: Specify the path to the LLaMA model file (e.g., `models/7B/ggml-model.gguf`; inferred from `--model-url` if set). - `-mu MODEL_URL --model-url MODEL_URL`: Specify a remote http url to download the file (e.g https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf). - `-i, --interactive`: Run the program in interactive mode, allowing you to provide input directly and receive real-time responses. - `-ins, --instruct`: Run the program in instruction mode, which is particularly useful when working with Alpaca models. diff --git a/examples/quantize-stats/quantize-stats.cpp b/examples/quantize-stats/quantize-stats.cpp index 1d05f13911fc2..746df8446b85e 100644 --- a/examples/quantize-stats/quantize-stats.cpp +++ b/examples/quantize-stats/quantize-stats.cpp @@ -23,7 +23,7 @@ #endif struct quantize_stats_params { - std::string model = "models/7B/ggml-model-f16.gguf"; + std::string model = DEFAULT_MODEL_PATH; bool verbose = false; bool per_layer_stats = false; bool print_histogram = false; diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 2760aea8fd3e9..01453af2c97e3 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2353,7 +2353,7 @@ static void server_print_usage(const char * argv0, const gpt_params & params, co printf(" disable KV offload\n"); } printf(" -m FNAME, --model FNAME\n"); - printf(" model path (default: %s)\n", params.model.c_str()); + printf(" model path (default: models/$filename with filename from --hf-file or --model-url if set, otherwise %s)\n", DEFAULT_MODEL_PATH); printf(" -mu MODEL_URL, --model-url MODEL_URL\n"); printf(" model download url (default: unused)\n"); printf(" -hfr REPO, --hf-repo REPO\n"); @@ -2835,6 +2835,8 @@ static void server_params_parse(int argc, char ** argv, server_params & sparams, } } + gpt_params_handle_model_default(params); + if (!params.kv_overrides.empty()) { params.kv_overrides.emplace_back(); params.kv_overrides.back().key[0] = 0; diff --git a/examples/server/tests/features/embeddings.feature b/examples/server/tests/features/embeddings.feature index dcf1434f97124..6f163ce04b3f6 100644 --- a/examples/server/tests/features/embeddings.feature +++ b/examples/server/tests/features/embeddings.feature @@ -5,7 +5,7 @@ Feature: llama.cpp server Background: Server startup Given a server listening on localhost:8080 And a model url https://huggingface.co/ggml-org/models/resolve/main/bert-bge-small/ggml-model-f16.gguf - And a model file ggml-model-f16.gguf + And a model file bert-bge-small.gguf And a model alias bert-bge-small And 42 as server seed And 2 slots From 952d03dbead16e4dbdd1d3458486340673cc2465 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 30 Apr 2024 11:05:25 +0300 Subject: [PATCH 089/101] convert : use utf8 encoding (#7000) * convert : use utf8 encoding * convert : update instructions and warning message --- convert-hf-to-gguf-update.py | 16 ++++++++++------ convert-hf-to-gguf.py | 12 ++++++++---- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/convert-hf-to-gguf-update.py b/convert-hf-to-gguf-update.py index 1c559c3f693be..b019c1e3dc59f 100644 --- a/convert-hf-to-gguf-update.py +++ b/convert-hf-to-gguf-update.py @@ -128,7 +128,7 @@ def download_file_with_auth(url, token, save_path): print(f"chkhsh: {chkhsh}") # print the "pre_tokenizer" content from the tokenizer.json - with open(f"models/tokenizers/{name}/tokenizer.json", "r") as f: + with open(f"models/tokenizers/{name}/tokenizer.json", "r", encoding="utf-8") as f: cfg = json.load(f) pre_tokenizer = cfg["pre_tokenizer"] print("pre_tokenizer: " + json.dumps(pre_tokenizer, indent=4)) @@ -156,15 +156,19 @@ def download_file_with_auth(url, token, save_path): src_func += "\n" src_func += " res = None\n" src_func += "\n" -src_func += " # NOTE: if you get an error here, you need to add the model to the if-elif chain below\n" -src_func += " # don't do this manually - use the convert-hf-to-gguf-update.py script!\n" +src_func += " # NOTE: if you get an error here, you need to update the convert-hf-to-gguf-update.py script\n" +src_func += " # or pull the latest version of the model from Huggingface\n" +src_func += " # don't edit the hashes manually!\n" src_func += f"{src_ifs}\n" src_func += " if res is None:\n" src_func += " print(\"\\n\")\n" src_func += " print(\"**************************************************************************************\")\n" src_func += " print(\"** WARNING: The BPE pre-tokenizer was not recognized!\")\n" -src_func += " print(\"** This means that it was not added yet or you are using an older version.\")\n" -src_func += " print(\"** Check convert-hf-to-gguf-update.py and update it accordingly.\")\n" +src_func += " print(\"** There are 2 possible reasons for this:\")\n" +src_func += " print(\"** - the model has not been added to convert-hf-to-gguf-update.py yet\")\n" +src_func += " print(\"** - the pre-tokenization config has changed upstream\")\n" +src_func += " print(\"** Check your model files and convert-hf-to-gguf-update.py and update them accordingly.\")\n" +src_func += " print(\"** ref: https://github.com/ggerganov/llama.cpp/pull/6920\")\n" src_func += " print(\"**\")\n" src_func += " print(f\"** chkhsh: {chkhsh}\")\n" src_func += " print(\"**************************************************************************************\")\n" @@ -249,7 +253,7 @@ def download_file_with_auth(url, token, save_path): from transformers import AutoTokenizer tokenizer = AutoTokenizer.from_pretrained(f"models/tokenizers/{name}") - with open(f"models/ggml-vocab-{name}.gguf.inp", "w") as f: + with open(f"models/ggml-vocab-{name}.gguf.inp", "w", encoding="utf-8") as f: for text in tests: f.write(f"{text}") f.write("\n__ggml_vocab_test__\n") diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index d1b8cef11277d..2f146d7302a78 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -279,8 +279,9 @@ def get_vocab_base_pre(self, tokenizer) -> str: res = None - # NOTE: if you get an error here, you need to add the model to the if-elif chain below - # don't do this manually - use the convert-hf-to-gguf-update.py script! + # NOTE: if you get an error here, you need to update the convert-hf-to-gguf-update.py script + # or pull the latest version of the model from Huggingface + # don't edit the hashes manually! if chkhsh == "0ef9807a4087ebef797fc749390439009c3b9eda9ad1a097abbe738f486c01e5": # ref: https://huggingface.co/meta-llama/Meta-Llama-3-8B res = "llama-bpe" @@ -310,8 +311,11 @@ def get_vocab_base_pre(self, tokenizer) -> str: print("\n") print("**************************************************************************************") print("** WARNING: The BPE pre-tokenizer was not recognized!") - print("** This means that it was not added yet or you are using an older version.") - print("** Check convert-hf-to-gguf-update.py and update it accordingly.") + print("** There are 2 possible reasons for this:") + print("** - the model has not been added to convert-hf-to-gguf-update.py yet") + print("** - the pre-tokenization config has changed upstream") + print("** Check your model files and convert-hf-to-gguf-update.py and update them accordingly.") + print("** ref: https://github.com/ggerganov/llama.cpp/pull/6920") print("**") print(f"** chkhsh: {chkhsh}") print("**************************************************************************************") From 9c67c2773d4b706cf71d70ecf4aa180b62501960 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 30 Apr 2024 12:16:08 +0300 Subject: [PATCH 090/101] ggml : add Flash Attention (#5021) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ggml : add ggml_flash_attn_ext API * ggml : fix GQA support in ggml_flash_attn_ext * ggml : online attention (CPU) * metal : initial implementation * metal : f16 precision * metal : reduce branches * metal : specialize for head size * wip : 8 rows per simd group * wip : 4 rows per simd group * wip : template for rows per warp * metal : parallelize across KV size * metal : parallel reduce across heads * metal : efficient flash_attn_f16 implementation * metal : avoid redundant loads of the attention * metal : scale and mask in matrix form * metal : fix comment * llama : avoid ggml_cast, use F32 query * metal : add parallel reduce version (disabled) * metal : move output into local memory + optimize - the result from each simdgroup now stays in the registers - significantly reduced SRAM usage - more efficient skipping of -INF blocks - avoid simdgroup barrier in hot loop - add comments * metal : add tests, fix scaling, support C > 32 * metal : improve precision * ggml : fix f16 mad * metal : minor * metal : support Q > 8 * tests : add ATTN tests * metal : disable buffer allocation logs * tests : more * metal : faster inner loop for C == 32 * metal : fix array initialization * tests : ifdef * ggml : switch to padded F16 mask for ggml_soft_max, ggml_flash_attn_ext * ggml : fix ggml_soft_max mask requirement * cuda : fix soft_max to use correct mask size * cuda : add flash_attn kernel (wip) * metal : optimize softmax for C > 32 * metal : optimize softmax * tests : minor fix * cuda : avoid zeroing fragments * tests : update dims * cuda : fix __hisinf() result check * cuda : avoid warp_reduce for smax * cuda : use int instead of int64_t Noticeably improves performance (thanks to Johannes) * cuda : make loops use the same loop values Thanks Johannes again for the tip * cuda : unroll some of the loops * cuda : avoid __hisinf branches * cuda : use half2 in softmax * cuda : switch to 1 warp for bs > 16 * cuda : speed-up reduce part of the kernel * cuda : unroll Q*K^T loop * cuda : fix -INF block check * cuda : simplify softmax * cuda : fix matrix names * cuda : minor * llama : adapt to F16 KQ_pos * llama : adapt new models to F16 KQ_mask * ggml : fix F16 store (ARM NEON) * llama : fix type of KQ_mask and KQ_pos * ggml : fix CPU soft_max * tests : add hs=256 * cuda : fix build * metal : improve perf via smaller int registers * cuda : adapt soft_max to F16 mask and pos * CUDA: faster FlashAttention, kernel for bs == 1 * 16 cols for Phi-2 * no vec for hs, no hs==256 ncols==32 for Volta * adjust kernel selection logic * 4 warps, 256 stride for all D * no ncols == 64 * Multiple parallel blocks for batch size 1 * fix compile warnings * fix excessive KQ_b loads * fix cmake build * fix KV cache padding, NaN from INFINITY (#6438) * llama : flash_attn cparam + fix defrag * server: support flash_attn param * server: bench: enable flash_attn param * CUDA: refactor host code, dyn. par. blocks * fix flash_attn_vec_f16 race condition * flush softmax exp below threshold to 0 * store temp KQ in registers * Calculate KQ as FP32 if KQV has GGML_PREC_F32 * Add __hgt2_mask implementation for CUDA 11 * fix KQ FP32 precision fpr parallel_blocks > 1 * llama-bench : add -fa,--flash-attn arg * metal : add BS=1 kernel for flash attention (#6508) * metal : add BS=1 kernel for flash attention (wip) * metal : support more than 1 warps * metal : opts * metal : opt * metal : switch to parallel reduce * metal : reduce registers * metal : simplify * metal : initial FA vec kernel * metal : use F32 attention accumulators * batched-bench : add fattn arg * llama : simplify llama_build_kv_store ggml-ci * llama : adapt build_olmo to changes * ggml : fix arm fp16 store on windows * metal : clean-up * metal : clean-up kernel code * metal : minor * tests : remove benchmarks ggml-ci * ggml : fix avx512 const correctness ggml-ci * ggml : fix soft_max with bias on CPU ggml-ci * common : print --flash-attn in help * ggml : fix num dimensions in ggml_flash_attn_ext * llama : force disable flash attention for incompatible models * ggml : ggml_soft_max support F16/F32 mask/pos ggml-ci * cuda : uint -> uint32_t * cuda : "constexpr dim3" -> "const dim3" ggml-ci * cuda : try to fix __hgt2_mask ggml-ci * ggml : add TODO's for F16/F32 mask/pos support in other backends * llama : replace bool need_kq_pos with use_alibi * llama : prep ALiBi support for BERT models ggml-ci * llama : fix n_batch requirements ggml-ci * cont * server : add help for --flash-attn arg * llama : disable FA for AMD * tests : remove TMP_ATTN_BENCH ggml-ci * llama : support save/load state with FA enabled ggml-ci * ci : add CUDA save-load-state tests ggml-ci * llama : llama_kv_cache_clear zeroes data + fix save-load seq ggml-ci * llama : fix copy-paste errors, add TODO * llama : disallow incompatible states * llama : update llama_state_get_size after v_trans field * metal : remove tmp log * llama : add static reminder for llama_state_get_size * metal : fix max nsg ggml-ci * ci : fix arg order ggml-ci --------- Co-authored-by: Johannes Gäßler Co-authored-by: Pierrick HYMBERT --- ci/run.sh | 8 +- common/common.cpp | 7 + common/common.h | 1 + examples/batched-bench/batched-bench.cpp | 28 +- examples/llama-bench/llama-bench.cpp | 30 +- examples/server/bench/bench.py | 1 + examples/server/server.cpp | 3 + ggml-cuda.cu | 6 + ggml-cuda/common.cuh | 40 +- ggml-cuda/fattn.cu | 944 +++++++++++++++++++++++ ggml-cuda/fattn.cuh | 3 + ggml-cuda/softmax.cu | 46 +- ggml-kompute.cpp | 7 + ggml-metal.m | 547 ++++++++----- ggml-metal.metal | 672 +++++++++++++++- ggml-sycl.cpp | 6 +- ggml-vulkan.cpp | 5 + ggml.c | 375 ++++++++- ggml.h | 20 + llama.cpp | 564 +++++++++----- llama.h | 5 +- tests/test-backend-ops.cpp | 52 +- 22 files changed, 2917 insertions(+), 453 deletions(-) create mode 100644 ggml-cuda/fattn.cu create mode 100644 ggml-cuda/fattn.cuh diff --git a/ci/run.sh b/ci/run.sh index a1cd9908f65fa..bf21b6b31c52d 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -336,7 +336,8 @@ function gg_run_open_llama_3b_v2 { (time ./bin/imatrix --model ${model_f16} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log - (time ./bin/save-load-state --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/save-load-state --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/save-load-state -fa --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log function check_ppl { qnt="$1" @@ -517,7 +518,10 @@ function gg_run_open_llama_7b_v2 { (time ./bin/imatrix --model ${model_f16} -f ${wiki_test} -t 1 -ngl 999 -c 2048 -b 512 --chunks 4 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log - (time ./bin/save-load-state --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/save-load-state -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/save-load-state -fa -ngl 10 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/save-load-state -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log + (time ./bin/save-load-state -fa -ngl 99 --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log function check_ppl { qnt="$1" diff --git a/common/common.cpp b/common/common.cpp index 099d0356fa4b8..243b88abf1aab 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -947,6 +947,10 @@ bool gpt_params_find_arg(int argc, char ** argv, const std::string & arg, gpt_pa params.cont_batching = true; return true; } + if (arg == "-fa" || arg == "--flash-attn") { + params.flash_attn = true; + return true; + } if (arg == "--color") { params.use_color = true; return true; @@ -1513,6 +1517,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -ns N, --sequences N number of sequences to decode (default: %d)\n", params.n_sequences); printf(" -ps N, --p-split N speculative decoding split probability (default: %.1f)\n", (double)params.p_split); printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); + printf(" -fa, --flash-attn enable Flash Attention (default: %s)\n", params.flash_attn ? "enabled" : "disabled"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA. see examples/llava/README.md\n"); printf(" --image IMAGE_FILE path to an image file. use with multimodal models. Specify multiple times for batching\n"); if (llama_supports_mlock()) { @@ -1885,6 +1890,7 @@ struct llama_context_params llama_context_params_from_gpt_params(const gpt_param cparams.cb_eval = params.cb_eval; cparams.cb_eval_user_data = params.cb_eval_user_data; cparams.offload_kqv = !params.no_kv_offload; + cparams.flash_attn = params.flash_attn; cparams.type_k = kv_cache_type_from_str(params.cache_type_k); cparams.type_v = kv_cache_type_from_str(params.cache_type_v); @@ -2707,6 +2713,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "seed: %u # default: -1 (random seed)\n", params.seed); fprintf(stream, "simple_io: %s # default: false\n", params.simple_io ? "true" : "false"); fprintf(stream, "cont_batching: %s # default: false\n", params.cont_batching ? "true" : "false"); + fprintf(stream, "flash_attn: %s # default: false\n", params.flash_attn ? "true" : "false"); fprintf(stream, "temp: %f # default: 0.8\n", sparams.temp); const std::vector tensor_split_vector(params.tensor_split, params.tensor_split + llama_max_devices()); diff --git a/common/common.h b/common/common.h index 8afdf2bdf189b..0618eea7429b1 100644 --- a/common/common.h +++ b/common/common.h @@ -150,6 +150,7 @@ struct gpt_params { bool multiline_input = false; // reverse the usage of `\` bool simple_io = false; // improves compatibility with subprocesses and limited consoles bool cont_batching = true; // insert new sequences for decoding on-the-fly + bool flash_attn = false; // flash attention bool input_prefix_bos = false; // prefix BOS to user inputs, preceding input_prefix bool ignore_eos = false; // ignore generated EOS tokens diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 1e34de620a41b..2924d8116f44f 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -32,7 +32,7 @@ int main(int argc, char ** argv) { gpt_params params; if (argc == 1 || argv[1][0] == '-') { - printf("usage: %s MODEL_PATH [N_KV_MAX] [N_BATCH] [N_UBATCH] [IS_PP_SHARED] [NGL] \n" , argv[0]); + printf("usage: %s MODEL_PATH [N_KV_MAX] [N_BATCH] [N_UBATCH] [FATTN] [IS_PP_SHARED] [NGL] \n" , argv[0]); printf(" , and PL are comma-separated lists of numbers without spaces\n\n"); printf(" example: %s ggml-model-f16.gguf 2048 2048 512 0 999 128,256,512 128,256 1,2,4,8,16,32\n\n", argv[0]); return 1 ; @@ -41,6 +41,7 @@ int main(int argc, char ** argv) { int n_kv_max = 2048; int n_batch = 2048; int n_ubatch = 512; + bool flash_attn = false; int is_pp_shared = 0; int n_gpu_layers = 0; @@ -66,23 +67,27 @@ int main(int argc, char ** argv) { } if (argc >= 6) { - is_pp_shared = std::atoi(argv[5]); + flash_attn = std::atoi(argv[5]); } if (argc >= 7) { - n_gpu_layers = std::atoi(argv[6]); + is_pp_shared = std::atoi(argv[6]); } if (argc >= 8) { - n_pp = parse_list(argv[7]); + n_gpu_layers = std::atoi(argv[7]); } if (argc >= 9) { - n_tg = parse_list(argv[8]); + n_pp = parse_list(argv[8]); } if (argc >= 10) { - n_pl = parse_list(argv[9]); + n_tg = parse_list(argv[9]); + } + + if (argc >= 11) { + n_pl = parse_list(argv[10]); } // init LLM @@ -108,10 +113,11 @@ int main(int argc, char ** argv) { llama_context_params ctx_params = llama_context_default_params(); - ctx_params.seed = 1234; - ctx_params.n_ctx = n_kv_max; - ctx_params.n_batch = n_batch; - ctx_params.n_ubatch = n_ubatch; + ctx_params.seed = 1234; + ctx_params.n_ctx = n_kv_max; + ctx_params.n_batch = n_batch; + ctx_params.n_ubatch = n_ubatch; + ctx_params.flash_attn = flash_attn; ctx_params.n_threads = params.n_threads; ctx_params.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; @@ -169,7 +175,7 @@ int main(int argc, char ** argv) { } LOG_TEE("\n"); - LOG_TEE("%s: n_kv_max = %d, n_batch = %d, n_ubatch = %d, is_pp_shared = %d, n_gpu_layers = %d, n_threads = %u, n_threads_batch = %u\n", __func__, n_kv_max, n_batch, n_ubatch, is_pp_shared, n_gpu_layers, ctx_params.n_threads, ctx_params.n_threads_batch); + LOG_TEE("%s: n_kv_max = %d, n_batch = %d, n_ubatch = %d, flash_attn = %d, is_pp_shared = %d, n_gpu_layers = %d, n_threads = %u, n_threads_batch = %u\n", __func__, n_kv_max, n_batch, n_ubatch, flash_attn, is_pp_shared, n_gpu_layers, ctx_params.n_threads, ctx_params.n_threads_batch); LOG_TEE("\n"); LOG_TEE("|%6s | %6s | %4s | %6s | %8s | %8s | %8s | %8s | %8s | %8s |\n", "PP", "TG", "B", "N_KV", "T_PP s", "S_PP t/s", "T_TG s", "S_TG t/s", "T s", "S t/s"); diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 8b532c8b6a98a..95c3095dd04da 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -174,6 +174,7 @@ struct cmd_params { std::vector split_mode; std::vector main_gpu; std::vector no_kv_offload; + std::vector flash_attn; std::vector> tensor_split; std::vector use_mmap; std::vector embeddings; @@ -195,6 +196,7 @@ static const cmd_params cmd_params_defaults = { /* split_mode */ {LLAMA_SPLIT_MODE_LAYER}, /* main_gpu */ {0}, /* no_kv_offload */ {false}, + /* flash_attn */ {false}, /* tensor_split */ {std::vector(llama_max_devices(), 0.0f)}, /* use_mmap */ {true}, /* embeddings */ {false}, @@ -220,6 +222,7 @@ static void print_usage(int /* argc */, char ** argv) { printf(" -sm, --split-mode (default: %s)\n", join(transform_to_str(cmd_params_defaults.split_mode, split_mode_str), ",").c_str()); printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); + printf(" -fa, --flash-attn <0|1> (default: %s)\n", join(cmd_params_defaults.flash_attn, ",").c_str()); printf(" -mmp, --mmap <0|1> (default: %s)\n", join(cmd_params_defaults.use_mmap, ",").c_str()); printf(" -embd, --embeddings <0|1> (default: %s)\n", join(cmd_params_defaults.embeddings, ",").c_str()); printf(" -ts, --tensor-split (default: 0)\n"); @@ -393,6 +396,13 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { } auto p = split(argv[i], split_delim); params.no_kv_offload.insert(params.no_kv_offload.end(), p.begin(), p.end()); + } else if (arg == "-fa" || arg == "--flash-attn") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.flash_attn.insert(params.flash_attn.end(), p.begin(), p.end()); } else if (arg == "-mmp" || arg == "--mmap") { if (++i >= argc) { invalid_param = true; @@ -477,6 +487,7 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.split_mode.empty()) { params.split_mode = cmd_params_defaults.split_mode; } if (params.main_gpu.empty()) { params.main_gpu = cmd_params_defaults.main_gpu; } if (params.no_kv_offload.empty()){ params.no_kv_offload = cmd_params_defaults.no_kv_offload; } + if (params.flash_attn.empty()) { params.flash_attn = cmd_params_defaults.flash_attn; } if (params.tensor_split.empty()) { params.tensor_split = cmd_params_defaults.tensor_split; } if (params.use_mmap.empty()) { params.use_mmap = cmd_params_defaults.use_mmap; } if (params.embeddings.empty()) { params.embeddings = cmd_params_defaults.embeddings; } @@ -498,6 +509,7 @@ struct cmd_params_instance { llama_split_mode split_mode; int main_gpu; bool no_kv_offload; + bool flash_attn; std::vector tensor_split; bool use_mmap; bool embeddings; @@ -532,6 +544,7 @@ struct cmd_params_instance { cparams.type_k = type_k; cparams.type_v = type_v; cparams.offload_kqv = !no_kv_offload; + cparams.flash_attn = flash_attn; cparams.embeddings = embeddings; return cparams; @@ -554,6 +567,7 @@ static std::vector get_cmd_params_instances(const cmd_param for (const auto & tk : params.type_k) for (const auto & tv : params.type_v) for (const auto & nkvo : params.no_kv_offload) + for (const auto & fa : params.flash_attn) for (const auto & nt : params.n_threads) { for (const auto & n_prompt : params.n_prompt) { if (n_prompt == 0) { @@ -572,6 +586,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .split_mode = */ sm, /* .main_gpu = */ mg, /* .no_kv_offload= */ nkvo, + /* .flash_attn = */ fa, /* .tensor_split = */ ts, /* .use_mmap = */ mmp, /* .embeddings = */ embd, @@ -596,6 +611,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .split_mode = */ sm, /* .main_gpu = */ mg, /* .no_kv_offload= */ nkvo, + /* .flash_attn = */ fa, /* .tensor_split = */ ts, /* .use_mmap = */ mmp, /* .embeddings = */ embd, @@ -633,6 +649,7 @@ struct test { llama_split_mode split_mode; int main_gpu; bool no_kv_offload; + bool flash_attn; std::vector tensor_split; bool use_mmap; bool embeddings; @@ -657,6 +674,7 @@ struct test { split_mode = inst.split_mode; main_gpu = inst.main_gpu; no_kv_offload = inst.no_kv_offload; + flash_attn = inst.flash_attn; tensor_split = inst.tensor_split; use_mmap = inst.use_mmap; embeddings = inst.embeddings; @@ -731,7 +749,7 @@ struct test { "n_batch", "n_ubatch", "n_threads", "type_k", "type_v", "n_gpu_layers", "split_mode", - "main_gpu", "no_kv_offload", + "main_gpu", "no_kv_offload", "flash_attn", "tensor_split", "use_mmap", "embeddings", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", @@ -753,7 +771,7 @@ struct test { } if (field == "cuda" || field == "opencl" || field == "vulkan" || field == "kompute" || field == "metal" || field == "gpu_blas" || field == "blas" || field == "sycl" ||field == "f16_kv" || field == "no_kv_offload" || - field == "use_mmap" || field == "embeddings") { + field == "flash_attn" || field == "use_mmap" || field == "embeddings") { return BOOL; } if (field == "avg_ts" || field == "stddev_ts") { @@ -787,7 +805,7 @@ struct test { std::to_string(n_batch), std::to_string(n_ubatch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), std::to_string(n_gpu_layers), split_mode_str(split_mode), - std::to_string(main_gpu), std::to_string(no_kv_offload), + std::to_string(main_gpu), std::to_string(no_kv_offload), std::to_string(flash_attn), tensor_split_str, std::to_string(use_mmap), std::to_string(embeddings), std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), @@ -955,6 +973,9 @@ struct markdown_printer : public printer { if (field == "no_kv_offload") { return "nkvo"; } + if (field == "flash_attn") { + return "fa"; + } if (field == "use_mmap") { return "mmap"; } @@ -1001,6 +1022,9 @@ struct markdown_printer : public printer { if (params.no_kv_offload.size() > 1 || params.no_kv_offload != cmd_params_defaults.no_kv_offload) { fields.emplace_back("no_kv_offload"); } + if (params.flash_attn.size() > 1 || params.flash_attn != cmd_params_defaults.flash_attn) { + fields.emplace_back("flash_attn"); + } if (params.tensor_split.size() > 1 || params.tensor_split != cmd_params_defaults.tensor_split) { fields.emplace_back("tensor_split"); } diff --git a/examples/server/bench/bench.py b/examples/server/bench/bench.py index 6ca637bddc3a1..86c5de101445c 100644 --- a/examples/server/bench/bench.py +++ b/examples/server/bench/bench.py @@ -268,6 +268,7 @@ def start_server_background(args): server_args.extend(['--defrag-thold', "0.1"]) server_args.append('--cont-batching') server_args.append('--metrics') + server_args.append('--flash-attn') server_args.extend(['--log-format', "text"]) args = [str(arg) for arg in [server_path, *server_args]] print(f"bench: starting server with: {' '.join(args)}") diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 01453af2c97e3..f60530cf3db56 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2377,6 +2377,7 @@ static void server_print_usage(const char * argv0, const gpt_params & params, co printf(" --embeddings enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: enabled)\n"); + printf(" -fa, --flash-attn enable Flash Attention (default: %s)\n", params.flash_attn ? "enabled" : "disabled"); printf(" -spf FNAME, --system-prompt-file FNAME\n"); printf(" set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); printf(" -ctk TYPE, --cache-type-k TYPE\n"); @@ -2742,6 +2743,8 @@ static void server_params_parse(int argc, char ** argv, server_params & sparams, params.embedding = true; } else if (arg == "-cb" || arg == "--cont-batching") { params.cont_batching = true; + } else if (arg == "-fa" || arg == "--flash-attn") { + params.flash_attn = true; } else if (arg == "-np" || arg == "--parallel") { if (++i >= argc) { invalid_param = true; diff --git a/ggml-cuda.cu b/ggml-cuda.cu index d277104d12177..c30554f0cbe37 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -14,6 +14,7 @@ #include "ggml-cuda/cpy.cuh" #include "ggml-cuda/diagmask.cuh" #include "ggml-cuda/dmmv.cuh" +#include "ggml-cuda/fattn.cuh" #include "ggml-cuda/getrows.cuh" #include "ggml-cuda/im2col.cuh" #include "ggml-cuda/mmq.cuh" @@ -140,6 +141,7 @@ static ggml_cuda_device_info ggml_cuda_init() { info.devices[id].cc = 100*prop.major + 10*prop.minor; #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) info.devices[id].smpb = prop.sharedMemPerBlock; + info.devices[id].nsm = prop.multiProcessorCount; } for (int id = 0; id < info.device_count; ++id) { @@ -2290,6 +2292,9 @@ static bool ggml_cuda_compute_forward(ggml_backend_cuda_context & ctx, struct gg case GGML_OP_ARGSORT: ggml_cuda_op_argsort(ctx, dst); break; + case GGML_OP_FLASH_ATTN_EXT: + ggml_cuda_flash_attn_ext(ctx, dst); + break; default: return false; } @@ -2564,6 +2569,7 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons case GGML_OP_ARANGE: case GGML_OP_TIMESTEP_EMBEDDING: case GGML_OP_LEAKY_RELU: + case GGML_OP_FLASH_ATTN_EXT: return true; default: return false; diff --git a/ggml-cuda/common.cuh b/ggml-cuda/common.cuh index 481065b2a3484..156eba6d1ef74 100644 --- a/ggml-cuda/common.cuh +++ b/ggml-cuda/common.cuh @@ -142,6 +142,7 @@ #define CC_PASCAL 600 #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products #define CC_VOLTA 700 +#define CC_AMPERE 800 #define CC_OFFSET_AMD 1000000 #define CC_RDNA1 (CC_OFFSET_AMD + 1010) #define CC_RDNA2 (CC_OFFSET_AMD + 1030) @@ -271,7 +272,6 @@ static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { return a; } -#ifdef GGML_CUDA_F16 static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { #if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL #pragma unroll @@ -284,7 +284,6 @@ static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { NO_DEVICE_CODE; #endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL } -#endif // GGML_CUDA_F16 static __device__ __forceinline__ float warp_reduce_max(float x) { #pragma unroll @@ -294,19 +293,26 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { return x; } -//static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { -//#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX -//#pragma unroll -// for (int mask = 16; mask > 0; mask >>= 1) { -// x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); -// } -// return x; -//#else -// GGML_UNUSED(x); -// NO_DEVICE_CODE; -//#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX -//} +static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); + } + return x; +#else + GGML_UNUSED(x); + NO_DEVICE_CODE; +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +} +#if CUDART_VERSION < 12000 +static __device__ __forceinline__ uint32_t __hgt2_mask(const half2 a, const half2 b) { + const uint32_t mask_low = 0x0000FFFF * (float( __low2half(a)) > float( __low2half(b))); + const uint32_t mask_high = 0xFFFF0000 * (float(__high2half(a)) > float(__high2half(b))); + return mask_low | mask_high; +} +#endif // CUDART_VERSION < 12000 #if defined(GGML_USE_HIPBLAS) #define __CUDA_ARCH__ 1300 @@ -391,6 +397,11 @@ static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { } #endif // defined(GGML_USE_HIPBLAS) +#define FP16_AVAILABLE defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) ? \ + defined(RDNA1) || defined(RDNA2) || defined(RDNA3) : __CUDA_ARCH__ >= CC_PASCAL + +#define FP16_MMA_AVAILABLE !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_VOLTA + // TODO: move to ggml-common.h static const __device__ int8_t kvalues_iq4nl[16] = {-127, -104, -83, -65, -49, -35, -22, -10, 1, 13, 25, 38, 53, 69, 89, 113}; @@ -404,6 +415,7 @@ struct ggml_cuda_device_info { struct cuda_device_info { int cc; // compute capability + int nsm; // number of streaming multiprocessors size_t smpb; // max. shared memory per block bool vmm; // virtual memory support size_t vmm_granularity; // granularity of virtual memory diff --git a/ggml-cuda/fattn.cu b/ggml-cuda/fattn.cu new file mode 100644 index 0000000000000..df1e80068b334 --- /dev/null +++ b/ggml-cuda/fattn.cu @@ -0,0 +1,944 @@ +#include "common.cuh" +#include "fattn.cuh" + +#include + +#if FP16_MMA_AVAILABLE +#include +#endif + +#define FATTN_KQ_STRIDE 256 +#define HALF_MAX_HALF __float2half(65504.0f/2) // Use neg. of this instead of -INFINITY to initialize KQ max vals to avoid NaN upon subtraction. +#define SOFTMAX_FTZ_THRESHOLD -20.0f // Softmax exp. of values smaller than this are flushed to zero to avoid NaNs. + +template // D == head size +__launch_bounds__(((D + WARP_SIZE - 1) / WARP_SIZE)*WARP_SIZE, 1) +static __global__ void flash_attn_vec_ext_f16( + const char * __restrict__ Q, + const char * __restrict__ K, + const char * __restrict__ V, + const char * __restrict__ mask, + float * __restrict__ dst, + float2 * __restrict__ dst_meta, + const float scale, + const int ne00, + const int ne01, + const int ne02, + const int ne03, + const int ne10, + const int ne11, + const int ne12, + const int ne13, + const int ne31, + const int nb31, + const int nb01, + const int nb02, + const int nb03, + const int nb11, + const int nb12, + const int nb13, + const int ne0, + const int ne1, + const int ne2, + const int ne3) { +#if FP16_AVAILABLE + //In this kernel Q, K, V are matrices while i, j, k are matrix indices. + + const int ic = blockIdx.x / parallel_blocks; // Index of the Q/QKV column to work on. + const int ip = blockIdx.x % parallel_blocks; // Index in group of blocks running for the same column in parallel. + + const int gqa_ratio = ne02 / ne12; // With grouped query attention there are > 1 Q matrices per K, V matrix. + const float2 * Q_f2 = (const float2 *) (Q + nb02* blockIdx.y + nb01*ic); + const half2 * K_h2 = (const half2 *) (K + nb12*(blockIdx.y / gqa_ratio)); + const half * V_h = (const half *) (V + nb12*(blockIdx.y / gqa_ratio)); // K and V have same shape + const half * maskh = (const half *) mask + ne11*ic; + + const int stride_KV = nb11 / sizeof(half); + const int stride_KV2 = nb11 / sizeof(half2); + + constexpr int nwarps = (D + WARP_SIZE - 1) / WARP_SIZE; + const int tid = WARP_SIZE*threadIdx.y + threadIdx.x; + __builtin_assume(tid < nwarps*WARP_SIZE); + + __shared__ half KQ[nwarps*WARP_SIZE]; + KQ[tid] = -INFINITY; + half2 * KQ2 = (half2 *) KQ; + + half kqmax = -HALF_MAX_HALF; + half kqsum = 0.0f; + + __shared__ half kqmax_shared[WARP_SIZE]; + __shared__ half kqsum_shared[WARP_SIZE]; + if (threadIdx.y == 0) { + kqmax_shared[threadIdx.x] = -HALF_MAX_HALF; + kqsum_shared[threadIdx.x] = 0.0f; + } + __syncthreads(); + + // Convert Q to half2 and store in registers: + half2 Q_h2[(D/2 + WARP_SIZE - 1) / WARP_SIZE]; +#pragma unroll + for (int i0 = 0; i0 < D/2; i0 += WARP_SIZE) { + const int i = i0 + threadIdx.x; + if (i0 + WARP_SIZE > D/2 && i >= D/2) { + break; + } + + Q_h2[i0/WARP_SIZE] = make_half2(scale, scale) * make_half2(Q_f2[i].x, Q_f2[i].y); + } + + half2 VKQ = make_half2(0.0f, 0.0f); // Each thread calculates a single VKQ value. + + const int k_start = parallel_blocks == 1 ? 0 : ip*D; + for (int k_VKQ_0 = k_start; k_VKQ_0 < ne11; k_VKQ_0 += parallel_blocks*D) { + // Calculate KQ tile and keep track of new maximum KQ values: + half kqmax_new = kqmax; +#pragma unroll + for (int i_KQ_0 = 0; i_KQ_0 < D; i_KQ_0 += nwarps) { + const int i_KQ = i_KQ_0 + threadIdx.y; + + if ((i_KQ_0 + nwarps > D && i_KQ >= D) || (FATTN_KQ_STRIDE % D != 0 && k_VKQ_0 + i_KQ >= ne11)) { + break; + } + + half2 sum2 = make_half2(0.0f, 0.0f); +#pragma unroll + for (int k_KQ_0 = 0; k_KQ_0 < D/2; k_KQ_0 += WARP_SIZE) { + const int k_KQ = k_KQ_0 + threadIdx.x; + if (k_KQ_0 + WARP_SIZE > D/2 && k_KQ >= D/2) { + break; + } + + const half2 K_ik = K_h2[(k_VKQ_0 + i_KQ)*stride_KV2 + k_KQ]; + sum2 += K_ik * Q_h2[k_KQ_0/WARP_SIZE]; + } + + sum2 = warp_reduce_sum(sum2); + half sum = __low2half(sum2) + __high2half(sum2); + sum += mask ? maskh[k_VKQ_0 + i_KQ] : __float2half(0.0f); + kqmax_new = __hmax(kqmax_new, sum); + if (threadIdx.x == 0) { + KQ[i_KQ] = sum; + } + } + + kqmax_new = warp_reduce_max(kqmax_new); + if (threadIdx.x == 0) { + kqmax_shared[threadIdx.y] = kqmax_new; + } + __syncthreads(); + kqmax_new = kqmax_shared[threadIdx.x]; + kqmax_new = warp_reduce_max(kqmax_new); + + const half KQ_max_scale = hexp(kqmax - kqmax_new); + kqmax = kqmax_new; + + const half val = hexp(KQ[tid] - kqmax); + kqsum = kqsum*KQ_max_scale + val; + KQ[tid] = val; + + VKQ *= __half2half2(KQ_max_scale); + + __syncthreads(); + + if (tid < D) { +#pragma unroll + for (int k0 = 0; k0 < D; k0 += 2) { + if (FATTN_KQ_STRIDE % D != 0 && k_VKQ_0 + k0 >= ne11) { + break; + } + + half2 V_k; + reinterpret_cast(V_k.x) = V_h[(k_VKQ_0 + k0 + 0)*stride_KV + tid]; + reinterpret_cast(V_k.y) = V_h[(k_VKQ_0 + k0 + 1)*stride_KV + tid]; + VKQ += V_k*KQ2[k0/2]; + } + } + + __syncthreads(); + } + + if (tid >= D) { + kqsum = 0.0f; + } + + kqsum = warp_reduce_sum(kqsum); + if (threadIdx.x == 0) { + kqsum_shared[threadIdx.y] = kqsum; + } + __syncthreads(); + kqsum = kqsum_shared[threadIdx.x]; + kqsum = warp_reduce_sum(kqsum); + + if (tid >= D) { + return; + } + + half dst_val = (__low2half(VKQ) + __high2half(VKQ)); + if (parallel_blocks == 1) { + dst_val /= kqsum; + } + dst[D*gridDim.y*blockIdx.x + D*blockIdx.y + tid] = dst_val; + + if (parallel_blocks == 1 || tid != 0) { + return; + } + dst_meta[ic*gridDim.y*parallel_blocks + blockIdx.y*parallel_blocks + ip] = make_float2(kqmax, kqsum); +#else + NO_DEVICE_CODE; +#endif // FP16_AVAILABLE +} + +// D == head size, VKQ_stride == num VKQ rows calculated in parallel: +template +__launch_bounds__(nwarps*WARP_SIZE, 1) +static __global__ void flash_attn_ext_f16( + const char * __restrict__ Q, + const char * __restrict__ K, + const char * __restrict__ V, + const char * __restrict__ mask, + float * __restrict__ dst, + float2 * __restrict__ dst_meta, + const float scale, + const int ne00, + const int ne01, + const int ne02, + const int ne03, + const int ne10, + const int ne11, + const int ne12, + const int ne13, + const int ne31, + const int nb31, + const int nb01, + const int nb02, + const int nb03, + const int nb11, + const int nb12, + const int nb13, + const int ne0, + const int ne1, + const int ne2, + const int ne3) { +#if FP16_MMA_AVAILABLE + //In this kernel Q, K, V are matrices while i, j, k are matrix indices. + + const int ic0 = ncols*(blockIdx.x / parallel_blocks); // Index of the first Q/QKV column to work on. + const int ip = blockIdx.x % parallel_blocks; // Index in group of blocks running for the same column in parallel. + + static_assert(D <= FATTN_KQ_STRIDE, "D must be <= FATTN_KQ_STRIDE."); + static_assert(ncols == 8 || ncols % 16 == 0, "ncols must be 8 or a multiple of 16."); + constexpr int frag_m = ncols == 8 ? 32 : 16; + constexpr int frag_n = ncols == 8 ? 8 : 16; + static_assert(D % frag_m == 0, "If ncols == 8 then D % frag_m must be 0."); + typedef nvcuda::wmma::fragment frag_a_K; + typedef nvcuda::wmma::fragment frag_a_V; + typedef nvcuda::wmma::fragment frag_b; + typedef nvcuda::wmma::fragment frag_c_KQ; + typedef nvcuda::wmma::fragment frag_c_VKQ; + + constexpr int KQ_stride_tc = nwarps*frag_m; // Number of KQ rows calculated in parallel. + constexpr int VKQ_ratio = KQ_stride_tc/VKQ_stride; // Number of parallel VKQ accumulators needed to keep all warps busy. + static_assert(VKQ_ratio <= nwarps, "VKQ_ratio must be <= nwarps."); + + // Pad internal representation of KQ, KQV to reduce shared memory bank conflicts: + constexpr int D_padded = D + 8; + constexpr int kqs_padded = FATTN_KQ_STRIDE + 8; + constexpr int kqar = sizeof(KQ_acc_t)/sizeof(half); + + const int gqa_ratio = ne02 / ne12; // With grouped query attention there are > 1 Q matrices per K, V matrix. + const float * Q_f = (const float *) (Q + nb02* blockIdx.y + nb01*ic0); + const half * K_h = (const half *) (K + nb12*(blockIdx.y / gqa_ratio)); + const half * V_h = (const half *) (V + nb12*(blockIdx.y / gqa_ratio)); // K and V have same shape + const half * maskh = (const half *) mask + (nb31/sizeof(half))* ic0; + const half2 * mask2 = (const half2 *) mask + (nb31/sizeof(half))*(ic0/2); + + const int stride_Q = nb01 / sizeof(float); + const int stride_KV = nb11 / sizeof(half); + + frag_b Q_b[D/16][ncols/frag_n]; + + // A single buffer for temporarily holding tiles of KQ and VKQ parts: + constexpr int mem_KQ = ncols*kqs_padded*kqar; + constexpr int mem_VKQ_parts = VKQ_ratio*ncols*D_padded; + __shared__ half KQ[mem_KQ >= mem_VKQ_parts ? mem_KQ : mem_VKQ_parts]; + float * KQ_f = (float *) KQ; + half2 * KQ2 = (half2 *) KQ; + + float KQ_rowsum_f[ncols/nwarps] = {0.0f}; + float KQ_max_f[ncols/nwarps]; + float KQ_max_scale_f[ncols/nwarps] = {0.0f}; + +#pragma unroll + for (int j = 0; j < ncols/nwarps; ++j) { + KQ_max_f[j] = -FLT_MAX/2.0f; + } + + half2 KQ_rowsum_h2[ncols/nwarps] = {{0.0f, 0.0f}}; + half2 KQ_max_h2[ncols/nwarps]; + half2 KQ_max_scale_h2[ncols/nwarps] = {{0.0f, 0.0f}}; + +#pragma unroll + for (int j = 0; j < ncols/nwarps; ++j) { + KQ_max_h2[j] = make_half2(-HALF_MAX_HALF, -HALF_MAX_HALF); + } + + __shared__ half VKQ[ncols*D_padded]; // Accumulator for final VKQ slice. + half2 * VKQ2 = (half2 *) VKQ; +#pragma unroll + for (int j0 = 0; j0 < ncols; j0 += nwarps) { + const int j = j0 + threadIdx.y; +#pragma unroll + for (int i0 = 0; i0 < D/2; i0 += WARP_SIZE) { + const int i = i0 + threadIdx.x; + if (i0 + WARP_SIZE > D/2 && i >= D/2) { + break; + } + VKQ2[j*(D_padded/2) + i] = make_half2(0.0f, 0.0f); + } + } + + // Convert Q to half and apply scale, temporarily store in KQ: +#pragma unroll + for (int j0 = 0; j0 < ncols; j0 += nwarps) { + const int j = j0 + threadIdx.y; +#pragma unroll + for (int i0 = 0; i0 < D; i0 += WARP_SIZE) { + const int i = i0 + threadIdx.x; + if (i0 + WARP_SIZE > D && i >= D) { + break; + } + KQ[j*D_padded + i] = ic0 + j < ne01 ? Q_f[j*stride_Q + i] * scale : 0.0f; + } + } + + __syncthreads(); + + // Load Q into tensor core fragments/registers since it will be used frequently: +#pragma unroll + for (int i0 = 0; i0 < D; i0 += 16) { +#pragma unroll + for (int j0 = 0; j0 < ncols; j0 += frag_n) { + nvcuda::wmma::load_matrix_sync(Q_b[i0/16][j0/frag_n], KQ + j0*D_padded + i0, D_padded); + } + } + + __syncthreads(); + + // Iterate over ne11 == previous tokens: + for (int k_VKQ_0 = ip*FATTN_KQ_STRIDE; k_VKQ_0 < ne11; k_VKQ_0 += parallel_blocks*FATTN_KQ_STRIDE) { + // Calculate tile of KQ: +#pragma unroll + for (int i_KQ_0 = 0; i_KQ_0 < FATTN_KQ_STRIDE; i_KQ_0 += KQ_stride_tc) { + frag_c_KQ KQ_c[ncols/frag_n]; +#pragma unroll + for (int j = 0; j < ncols/frag_n; ++j) { + nvcuda::wmma::fill_fragment(KQ_c[j], 0.0f); + } +#pragma unroll + for (int k_KQ_0 = 0; k_KQ_0 < D; k_KQ_0 += 16) { + frag_a_K K_a; + nvcuda::wmma::load_matrix_sync(K_a, K_h + (k_VKQ_0 + i_KQ_0 + frag_m*threadIdx.y)*stride_KV + k_KQ_0, stride_KV); +#pragma unroll + for (int j = 0; j < ncols/frag_n; ++j) { + nvcuda::wmma::mma_sync(KQ_c[j], K_a, Q_b[k_KQ_0/16][j], KQ_c[j]); + } + } +#pragma unroll + for (int j0 = 0; j0 < ncols; j0 += frag_n) { + nvcuda::wmma::store_matrix_sync((KQ_acc_t *) KQ + j0*kqs_padded + i_KQ_0 + frag_m*threadIdx.y, KQ_c[j0/frag_n], kqs_padded, nvcuda::wmma::mem_col_major); + } + } + + __syncthreads(); + + // Calculate softmax for each KQ column using the current max. value. + // The divisor is stored in KQ_rowsum and will be applied at the end. +#pragma unroll + for (int j0 = 0; j0 < ncols; j0 += nwarps) { + const int j = j0 + threadIdx.y; + + if (std::is_same::value) { + float KQ_f_tmp[FATTN_KQ_STRIDE / WARP_SIZE]; +#pragma unroll + for (int k0 = 0; k0 < FATTN_KQ_STRIDE; k0 += WARP_SIZE) { + const int k = k0 + threadIdx.x; + + KQ_f_tmp[k0/WARP_SIZE] = KQ_f[j*kqs_padded + k]; + } + + float KQ_max_new = KQ_max_f[j0/nwarps]; +#pragma unroll + for (int k0 = 0; k0 < FATTN_KQ_STRIDE; k0 += WARP_SIZE) { + const int k = k0 + threadIdx.x; + + KQ_f_tmp[k0/WARP_SIZE] += mask ? __half2float(maskh[j*(nb31/sizeof(half)) + k_VKQ_0 + k]) : 0.0f; + KQ_max_new = max(KQ_max_new, KQ_f_tmp[k0/WARP_SIZE]); + } + KQ_max_new = warp_reduce_max(KQ_max_new); + + const float diff = KQ_max_f[j0/nwarps] - KQ_max_new; + KQ_max_scale_f[j0/nwarps] = expf(diff); + if (diff <= SOFTMAX_FTZ_THRESHOLD) { + KQ_max_scale_f[j0/nwarps] = 0.0f; + } + KQ_max_f[j0/nwarps] = KQ_max_new; + + float KQ_rowsum_add = 0.0f; +#pragma unroll + for (int k0 = 0; k0 < FATTN_KQ_STRIDE; k0 += WARP_SIZE) { + const int k = k0 + threadIdx.x; + + const float diff = KQ_f_tmp[k0/WARP_SIZE] - KQ_max_f[j0/nwarps]; + KQ_f_tmp[k0/WARP_SIZE] = expf(diff); + if (diff <= SOFTMAX_FTZ_THRESHOLD) { + KQ_f_tmp[k0/WARP_SIZE] = 0.0f; + } + KQ_rowsum_add += KQ_f_tmp[k0/WARP_SIZE]; + KQ[j*(kqar*kqs_padded) + k] = KQ_f_tmp[k0/WARP_SIZE]; + } + KQ_rowsum_add = warp_reduce_sum(KQ_rowsum_add); + + // Scale previous KQ_rowsum to account for a potential increase in KQ_max: + KQ_rowsum_f[j0/nwarps] = KQ_max_scale_f[j0/nwarps]*KQ_rowsum_f[j0/nwarps] + KQ_rowsum_add; + } else { + half2 KQ2_tmp[FATTN_KQ_STRIDE/(2*WARP_SIZE)]; +#pragma unroll + for (int k0 = 0; k0 < FATTN_KQ_STRIDE/2; k0 += WARP_SIZE) { + const int k = k0 + threadIdx.x; + + KQ2_tmp[k0/WARP_SIZE] = KQ2[j*(kqs_padded/2) + k]; + } + + half2 KQ_max_new = KQ_max_h2[j0/nwarps]; +#pragma unroll + for (int k0 = 0; k0 < FATTN_KQ_STRIDE/2; k0 += WARP_SIZE) { + const int k = k0 + threadIdx.x; + + KQ2_tmp[k0/WARP_SIZE] += mask ? mask2[(j*ne11 + k_VKQ_0)/2 + k] : make_half2(0.0f, 0.0f); + KQ_max_new = __hmax2(KQ_max_new, KQ2_tmp[k0/WARP_SIZE]); + } + KQ_max_new = __half2half2(warp_reduce_max(__hmax(__low2half(KQ_max_new), __high2half(KQ_max_new)))); + const half2 diff = KQ_max_h2[j0/nwarps] - KQ_max_new; + KQ_max_scale_h2[j0/nwarps] = h2exp(diff); + const uint32_t ftz_mask = __hgt2_mask(diff, make_half2(SOFTMAX_FTZ_THRESHOLD, SOFTMAX_FTZ_THRESHOLD)); + *((uint32_t *) &KQ_max_scale_h2[j0/nwarps]) &= ftz_mask; + KQ_max_h2[j0/nwarps] = KQ_max_new; + + half2 KQ_rowsum_add = make_half2(0.0f, 0.0f); +#pragma unroll + for (int k0 = 0; k0 < FATTN_KQ_STRIDE/2; k0 += WARP_SIZE) { + const int k = k0 + threadIdx.x; + + const half2 diff = KQ2_tmp[k0/WARP_SIZE] - KQ_max_h2[j0/nwarps]; + KQ2_tmp[k0/WARP_SIZE] = h2exp(diff); + const uint32_t ftz_mask = __hgt2_mask(diff, make_half2(SOFTMAX_FTZ_THRESHOLD, SOFTMAX_FTZ_THRESHOLD)); + *((uint32_t *) &KQ2_tmp[k0/WARP_SIZE]) &= ftz_mask; + KQ_rowsum_add += KQ2_tmp[k0/WARP_SIZE]; + KQ2[j*(kqs_padded/2) + k] = KQ2_tmp[k0/WARP_SIZE]; + } + KQ_rowsum_add = warp_reduce_sum(KQ_rowsum_add); + + // Scale previous KQ_rowsum to account for a potential increase in KQ_max: + KQ_rowsum_h2[j0/nwarps] = KQ_max_scale_h2[j0/nwarps]*KQ_rowsum_h2[j0/nwarps] + KQ_rowsum_add; + } + } + + __syncthreads(); + + frag_b KQ_b[FATTN_KQ_STRIDE/(VKQ_ratio*16)][ncols/frag_n]; +#pragma unroll + for (int j0 = 0; j0 < ncols; j0 += frag_n) { +#pragma unroll + for (int k0 = 0; k0 < FATTN_KQ_STRIDE; k0 += VKQ_ratio*16) { + const int k = k0 + (threadIdx.y % VKQ_ratio)*16; + nvcuda::wmma::load_matrix_sync( + KQ_b[k0/(VKQ_ratio*16)][j0/frag_n], + KQ + j0*(kqar*kqs_padded) + k, + kqar*kqs_padded); + } + } + + frag_c_VKQ VKQ_c[D/VKQ_stride][ncols/frag_n]; +#pragma unroll + for (int i_VKQ_0 = 0; i_VKQ_0 < D; i_VKQ_0 += VKQ_stride) { +#pragma unroll + for (int j = 0; j < ncols/frag_n; ++j) { + nvcuda::wmma::fill_fragment(VKQ_c[i_VKQ_0/VKQ_stride][j], 0.0f); + } + +#pragma unroll + for (int k0 = 0; k0 < FATTN_KQ_STRIDE; k0 += VKQ_ratio*16) { + const int k = k0 + (threadIdx.y % VKQ_ratio)*16; + + frag_a_V v_a; + nvcuda::wmma::load_matrix_sync(v_a, V_h + (k_VKQ_0 + k)*stride_KV + i_VKQ_0 + frag_m*(threadIdx.y/VKQ_ratio), stride_KV); +#pragma unroll + for (int j = 0; j < ncols/frag_n; ++j) { + nvcuda::wmma::mma_sync(VKQ_c[i_VKQ_0/VKQ_stride][j], v_a, KQ_b[k0/(VKQ_ratio*16)][j], VKQ_c[i_VKQ_0/VKQ_stride][j]); + } + } + } + + __syncthreads(); + + const int offset_k = (threadIdx.y % VKQ_ratio) * (ncols*D_padded); +#pragma unroll + for (int i_KQ_0 = 0; i_KQ_0 < D; i_KQ_0 += VKQ_stride) { +#pragma unroll + for (int j0 = 0; j0 < ncols; j0 += frag_n) { + nvcuda::wmma::store_matrix_sync( + KQ + offset_k + j0*D_padded + i_KQ_0 + frag_m*(threadIdx.y/VKQ_ratio), + VKQ_c[i_KQ_0/VKQ_stride][j0/frag_n], + D_padded, nvcuda::wmma::mem_col_major); + } + } + + __syncthreads(); + +#pragma unroll + for (int j0 = 0; j0 < ncols; j0 += nwarps) { + const int j = j0 + threadIdx.y; + + half2 VKQ_scale; + if (std::is_same::value) { + VKQ_scale = make_half2(KQ_max_scale_f[j0/nwarps], KQ_max_scale_f[j0/nwarps]); + } else { + VKQ_scale = KQ_max_scale_h2[j0/nwarps]; + } + +#pragma unroll + for (int i0 = 0; i0 < D/2; i0 += WARP_SIZE) { + const int i = i0 + threadIdx.x; + if (i0 + WARP_SIZE > D/2 && i >= D/2) { + break; + } + + half2 VKQ_add = make_half2(0.0f, 0.0f); +#pragma unroll + for (int l = 0; l < VKQ_ratio; ++l) { + VKQ_add += KQ2[l*(ncols*D_padded/2) + j*(D_padded/2) + i]; + } + VKQ2[j*(D_padded/2) + i] = VKQ_scale*VKQ2[j*(D_padded/2) + i] + VKQ_add; + } + } + + __syncthreads(); + } + +#pragma unroll + for (int j0 = 0; j0 < ncols; j0 += nwarps) { + const int j_VKQ = j0 + threadIdx.y; + if (ic0 + j_VKQ >= ne01) { + return; + } + const int j_dst = (ic0 + j_VKQ)*parallel_blocks + ip; + + float KQ_rowsum_j; + if (std::is_same::value) { + KQ_rowsum_j = KQ_rowsum_f[j0/nwarps]; + } else { + KQ_rowsum_j = __low2float(KQ_rowsum_h2[j0/nwarps]) + __high2float(KQ_rowsum_h2[j0/nwarps]); + } + +#pragma unroll + for (int i0 = 0; i0 < D; i0 += WARP_SIZE) { + const int i = i0 + threadIdx.x; + if (i0 + WARP_SIZE > D && i >= D) { + break; + } + float dst_val = VKQ[j_VKQ*D_padded + i]; + if (parallel_blocks == 1) { + dst_val /= KQ_rowsum_j; + } + dst[j_dst*gridDim.y*D + blockIdx.y*D + i] = dst_val; + } + + if (parallel_blocks == 1 || threadIdx.x != 0) { + continue; + } + + float2 dst_meta_val; + if (std::is_same::value) { + dst_meta_val.x = KQ_max_f[j0/nwarps]; + } else { + dst_meta_val.x = __low2float(KQ_max_h2[j0/nwarps]); + } + dst_meta_val.y = KQ_rowsum_j; + dst_meta[(ic0 + j_VKQ)*gridDim.y*parallel_blocks + blockIdx.y*parallel_blocks + ip] = dst_meta_val; + } +#else + NO_DEVICE_CODE; +#endif // FP16_MMA_AVAILABLE +} + +template // D == head size +__launch_bounds__(D, 1) +static __global__ void flash_attn_combine_results( + const float * __restrict__ VKQ_parts, + const float2 * __restrict__ VKQ_meta, + float * __restrict__ dst) { +#if FP16_AVAILABLE + VKQ_parts += parallel_blocks*D * gridDim.y*blockIdx.x; + VKQ_meta += parallel_blocks * gridDim.y*blockIdx.x; + dst += D * gridDim.y*blockIdx.x; + + const int tid = threadIdx.x; + __builtin_assume(tid < D); + + __shared__ float2 meta[parallel_blocks]; + if (tid < 2*parallel_blocks) { + ((float *) meta)[threadIdx.x] = ((const float *)VKQ_meta) [blockIdx.y*(2*parallel_blocks) + tid]; + } + + __syncthreads(); + + float kqmax = meta[0].x; +#pragma unroll + for (int l = 1; l < parallel_blocks; ++l) { + kqmax = max(kqmax, meta[l].x); + } + + float VKQ_numerator = 0.0f; + float VKQ_denominator = 0.0f; +#pragma unroll + for (int l = 0; l < parallel_blocks; ++l) { + const float diff = meta[l].x - kqmax; + const float KQ_max_scale = expf(diff); + const uint32_t ftz_mask = 0xFFFFFFFF * (diff > SOFTMAX_FTZ_THRESHOLD); + *((uint32_t *) &KQ_max_scale) &= ftz_mask; + + VKQ_numerator += KQ_max_scale * VKQ_parts[l*gridDim.y*D + blockIdx.y*D + tid]; + VKQ_denominator += KQ_max_scale * meta[l].y; + } + + dst[blockIdx.y*D + tid] = VKQ_numerator / VKQ_denominator; +#else + NO_DEVICE_CODE; +#endif // FP16_AVAILABLE +} + +constexpr int get_max_power_of_2(int x) { + return x % 2 == 0 ? 2*get_max_power_of_2(x/2) : 1; +} + +static_assert(get_max_power_of_2(1) == 1, "Test failed."); +static_assert(get_max_power_of_2(2) == 2, "Test failed."); +static_assert(get_max_power_of_2(4) == 4, "Test failed."); +static_assert(get_max_power_of_2(6) == 2, "Test failed."); + +// Number of VKQ rows calculated in parallel: +constexpr int get_VKQ_stride(int D, int nwarps, int frag_m) { + return (get_max_power_of_2(D/frag_m) < nwarps ? get_max_power_of_2(D/frag_m) : nwarps)*frag_m; +} + +static_assert(get_VKQ_stride(128, 1, 32) == 32, "Test failed."); +static_assert(get_VKQ_stride(128, 2, 32) == 64, "Test failed."); +static_assert(get_VKQ_stride(128, 4, 32) == 128, "Test failed."); +static_assert(get_VKQ_stride( 64, 1, 32) == 32, "Test failed."); +static_assert(get_VKQ_stride( 64, 2, 32) == 64, "Test failed."); +static_assert(get_VKQ_stride( 64, 4, 32) == 64, "Test failed."); +static_assert(get_VKQ_stride( 80, 1, 16) == 16, "Test failed."); +static_assert(get_VKQ_stride( 80, 2, 16) == 16, "Test failed."); +static_assert(get_VKQ_stride( 80, 4, 16) == 16, "Test failed."); + +template void launch_fattn_vec_f16( + const ggml_tensor * Q, const ggml_tensor * K, const ggml_tensor * V, ggml_tensor * KQV, const ggml_tensor * mask, + ggml_cuda_pool & pool, cudaStream_t main_stream +) { + ggml_cuda_pool_alloc dst_tmp(pool); + ggml_cuda_pool_alloc dst_tmp_meta(pool); + + if (parallel_blocks > 1) { + dst_tmp.alloc(parallel_blocks*ggml_nelements(KQV)); + dst_tmp_meta.alloc(parallel_blocks*ggml_nrows(KQV)); + } + + constexpr int nwarps = (D + WARP_SIZE - 1) / WARP_SIZE; + const dim3 block_dim(WARP_SIZE, nwarps, 1); + const dim3 blocks_num(parallel_blocks*Q->ne[1], Q->ne[2], Q->ne[3]); + const int shmem = 0; + + float scale; + memcpy(&scale, KQV->op_params, sizeof(float)); + + flash_attn_vec_ext_f16 + <<>> ( + (const char *) Q->data, + (const char *) K->data, + (const char *) V->data, + mask ? ((const char *) mask->data) : nullptr, + parallel_blocks == 1 ? (float *) KQV->data : dst_tmp.ptr, dst_tmp_meta.ptr, + scale, + Q->ne[0], Q->ne[1], Q->ne[2], Q->ne[3], + K->ne[0], K->ne[1], K->ne[2], K->ne[3], + mask ? mask->ne[1] : 0, mask ? mask->nb[1] : 0, + Q->nb[1], Q->nb[2], Q->nb[3], + K->nb[1], K->nb[2], K->nb[3], + KQV->ne[0], KQV->ne[1], KQV->ne[2], KQV->ne[3] + ); + CUDA_CHECK(cudaGetLastError()); + + if (parallel_blocks == 1) { + return; + } + + const dim3 block_dim_combine(D, 1, 1); + const dim3 blocks_num_combine(Q->ne[1], blocks_num.y, blocks_num.z); + const int shmem_combine = 0; + + flash_attn_combine_results + <<>> + (dst_tmp.ptr, dst_tmp_meta.ptr, (float *) KQV->data); + CUDA_CHECK(cudaGetLastError()); +} + +template void launch_fattn_f16_impl( + const ggml_tensor * Q, const ggml_tensor * K, const ggml_tensor * V, ggml_tensor * KQV, const ggml_tensor * mask, + ggml_cuda_pool & pool, cudaStream_t main_stream +) { + ggml_cuda_pool_alloc dst_tmp(pool); + ggml_cuda_pool_alloc dst_tmp_meta(pool); + + if (parallel_blocks > 1) { + dst_tmp.alloc(parallel_blocks*ggml_nelements(KQV)); + dst_tmp_meta.alloc(parallel_blocks*ggml_nrows(KQV)); + } + + constexpr int frag_m = (cols_per_block) == 8 && (D) % 32 == 0 ? 32 : 16; + const dim3 block_dim(WARP_SIZE, nwarps, 1); + const dim3 blocks_num(parallel_blocks*(Q->ne[1] + cols_per_block - 1) / cols_per_block, Q->ne[2], Q->ne[3]); + const int shmem = 0; + + float scale; + memcpy(&scale, KQV->op_params, sizeof(float)); + + flash_attn_ext_f16 + <<>> ( + (const char *) Q->data, + (const char *) K->data, + (const char *) V->data, + mask ? ((const char *) mask->data) : nullptr, + (parallel_blocks) == 1 ? (float *) KQV->data : dst_tmp.ptr, dst_tmp_meta.ptr, + scale, + Q->ne[0], Q->ne[1], Q->ne[2], Q->ne[3], + K->ne[0], K->ne[1], K->ne[2], K->ne[3], + mask ? mask->ne[1] : 0, mask ? mask->nb[1] : 0, + Q->nb[1], Q->nb[2], Q->nb[3], + K->nb[1], K->nb[2], K->nb[3], + KQV->ne[0], KQV->ne[1], KQV->ne[2], KQV->ne[3] + ); + CUDA_CHECK(cudaGetLastError()); + + if ((parallel_blocks) == 1) { + return; + } + + const dim3 block_dim_combine(D, 1, 1); + const dim3 blocks_num_combine(Q->ne[1], blocks_num.y, blocks_num.z); + const int shmem_combine = 0; + + flash_attn_combine_results + <<>> + (dst_tmp.ptr, dst_tmp_meta.ptr, (float *) KQV->data); + CUDA_CHECK(cudaGetLastError()); +} + +template void launch_fattn_f16( + const ggml_tensor * Q, const ggml_tensor * K, const ggml_tensor * V, ggml_tensor * KQV, const ggml_tensor * mask, + const int nsm, ggml_cuda_pool & pool, cudaStream_t main_stream +) { + const int blocks_num_pb1 = ((Q->ne[1] + cols_per_block - 1) / cols_per_block)*Q->ne[2]*Q->ne[3]; + + if (4*blocks_num_pb1 < 2*nsm) { + launch_fattn_f16_impl(Q, K, V, KQV, mask, pool, main_stream); + return; + } + if (2*blocks_num_pb1 < 2*nsm) { + launch_fattn_f16_impl(Q, K, V, KQV, mask, pool, main_stream); + return; + } + launch_fattn_f16_impl(Q, K, V, KQV, mask, pool, main_stream); +} + +void ggml_cuda_flash_attn_ext(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { + const ggml_tensor * Q = dst->src[0]; + const ggml_tensor * K = dst->src[1]; + const ggml_tensor * V = dst->src[2]; + + const ggml_tensor * mask = dst->src[3]; + + ggml_tensor * KQV = dst; + + GGML_ASSERT(Q->type == GGML_TYPE_F32); + GGML_ASSERT(K->type == GGML_TYPE_F16); + GGML_ASSERT(V->type == GGML_TYPE_F16); + GGML_ASSERT(KQV->type == GGML_TYPE_F32); + + GGML_ASSERT(!mask || mask->type == GGML_TYPE_F16); + GGML_ASSERT(!mask || mask->ne[1] >= GGML_PAD(Q->ne[1], 16) && + "the Flash-Attention CUDA kernel requires the mask to be padded to 16 and at least n_queries big"); + + GGML_ASSERT(K->ne[1] % FATTN_KQ_STRIDE == 0 && "Incorrect KV cache padding."); + + ggml_cuda_set_device(ctx.device); + + const int nsm = ggml_cuda_info().devices[ggml_cuda_get_device()].nsm; + + const int32_t precision = KQV->op_params[1]; + + if (precision != GGML_PREC_DEFAULT) { + if (Q->ne[1] <= 32 || Q->ne[0] > 128) { + constexpr int cols_per_block = 16; + constexpr int nwarps = 4; + switch (Q->ne[0]) { + case 64: + launch_fattn_f16< 64, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 80: + launch_fattn_f16< 80, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 96: + launch_fattn_f16< 96, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 112: + launch_fattn_f16<112, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 128: + launch_fattn_f16<128, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 256: + launch_fattn_f16<256, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + default: + GGML_ASSERT(false); + break; + } + } else { + constexpr int cols_per_block = 32; + constexpr int nwarps = 4; + switch (Q->ne[0]) { + case 64: + launch_fattn_f16< 64, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 80: + launch_fattn_f16< 80, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 96: + launch_fattn_f16< 96, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 112: + launch_fattn_f16<112, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 128: + launch_fattn_f16<128, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + // case 256: + // launch_fattn_f16<256, cols_per_block, nwarps, float>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + // break; + default: + GGML_ASSERT(false); + break; + } + } + return; + } + + if (Q->ne[1] == 1 && Q->ne[0] % (2*WARP_SIZE) == 0) { + constexpr int parallel_blocks = 4; + switch (Q->ne[0]) { + case 64: + launch_fattn_vec_f16< 64, parallel_blocks>(Q, K, V, KQV, mask, ctx.pool(), ctx.stream()); + break; + case 128: + launch_fattn_vec_f16<128, parallel_blocks>(Q, K, V, KQV, mask, ctx.pool(), ctx.stream()); + break; + case 256: + launch_fattn_vec_f16<256, parallel_blocks>(Q, K, V, KQV, mask, ctx.pool(), ctx.stream()); + break; + default: + GGML_ASSERT(false); + break; + } + return; + } + + if (Q->ne[1] <= 8 && Q->ne[0] % WARP_SIZE == 0) { + constexpr int cols_per_block = 8; + constexpr int nwarps = 4; + switch (Q->ne[0]) { + case 64: + launch_fattn_f16< 64, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 96: + launch_fattn_f16< 96, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 128: + launch_fattn_f16<128, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 256: + launch_fattn_f16<256, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + default: + GGML_ASSERT(false); + break; + } + return; + } + + if (Q->ne[1] <= 32) { + constexpr int cols_per_block = 16; + constexpr int nwarps = 4; + switch (Q->ne[0]) { + case 64: + launch_fattn_f16< 64, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 80: + launch_fattn_f16< 80, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 96: + launch_fattn_f16< 96, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 112: + launch_fattn_f16<112, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 128: + launch_fattn_f16<128, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 256: + launch_fattn_f16<256, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + default: + GGML_ASSERT(false); + break; + } + return; + } + + constexpr int cols_per_block = 32; + constexpr int nwarps = 4; + switch (Q->ne[0]) { + case 64: + launch_fattn_f16< 64, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 80: + launch_fattn_f16< 80, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 96: + launch_fattn_f16< 96, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 112: + launch_fattn_f16<112, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 128: + launch_fattn_f16<128, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + case 256: + launch_fattn_f16<256, cols_per_block, nwarps, half>(Q, K, V, KQV, mask, nsm, ctx.pool(), ctx.stream()); + break; + default: + GGML_ASSERT(false); + break; + } + return; +} diff --git a/ggml-cuda/fattn.cuh b/ggml-cuda/fattn.cuh new file mode 100644 index 0000000000000..ad3ca7a8d8e4d --- /dev/null +++ b/ggml-cuda/fattn.cuh @@ -0,0 +1,3 @@ +#include "common.cuh" + +void ggml_cuda_flash_attn_ext(ggml_backend_cuda_context & ctx, ggml_tensor * dst); diff --git a/ggml-cuda/softmax.cu b/ggml-cuda/softmax.cu index fa8f987cf7c1d..6ed225999bddf 100644 --- a/ggml-cuda/softmax.cu +++ b/ggml-cuda/softmax.cu @@ -1,7 +1,17 @@ #include "softmax.cuh" -template -static __global__ void soft_max_f32(const float * x, const float * mask, const float * pos, float * dst, const int ncols_par, const int nrows_y, const float scale, const float max_bias, const float m0, const float m1, uint32_t n_head_log2) { +template +static __device__ __forceinline__ float t2f32(T val) { + return (float) val; +} + +template <> +__device__ float __forceinline__ t2f32(half val) { + return __half2float(val); +} + +template +static __global__ void soft_max_f32(const float * x, const T * mask, const T * pos, float * dst, const int ncols_par, const int nrows_y, const float scale, const float max_bias, const float m0, const float m1, uint32_t n_head_log2) { const int ncols = ncols_template == 0 ? ncols_par : ncols_template; const int tid = threadIdx.x; @@ -43,7 +53,7 @@ static __global__ void soft_max_f32(const float * x, const float * mask, const f const int64_t ix = (int64_t)rowx*ncols + col; const int64_t iy = (int64_t)rowy*ncols + col; - const float val = x[ix]*scale + (mask ? mask[iy] : 0.0f) + (pos ? slope*pos[col] : 0.0f); + const float val = x[ix]*scale + (mask ? t2f32(mask[iy]) : 0.0f) + (pos ? slope*t2f32(pos[col]) : 0.0f); vals[col] = val; max_val = max(max_val, val); @@ -114,7 +124,8 @@ static __global__ void soft_max_f32(const float * x, const float * mask, const f } } -static void soft_max_f32_cuda(const float * x, const float * mask, const float * pos, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, const float max_bias, cudaStream_t stream) { +template +static void soft_max_f32_cuda(const float * x, const T * mask, const T * pos, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, const float max_bias, cudaStream_t stream) { int nth = WARP_SIZE; while (nth < ncols_x && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; const dim3 block_dims(nth, 1, 1); @@ -167,15 +178,19 @@ static void soft_max_f32_cuda(const float * x, const float * mask, const float * void ggml_cuda_op_soft_max(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { const ggml_tensor * src0 = dst->src[0]; const ggml_tensor * src1 = dst->src[1]; + const ggml_tensor * src2 = dst->src[2]; + const float * src0_d = (const float *)src0->data; - const float * src1_d = src1 ? (const float *)src1->data : nullptr; + const void * src1_d = src1 ? (const void *)src1->data : nullptr; + float * dst_d = (float *)dst->data; cudaStream_t stream = ctx.stream(); GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); - GGML_ASSERT(!src1 || src1->type == GGML_TYPE_F32); // src1 contains mask and it is optional + GGML_ASSERT(!src1 || src1->type == GGML_TYPE_F16 || src1->type == GGML_TYPE_F32); // src1 contains mask and it is optional + GGML_ASSERT(!src2 || src2->type == GGML_TYPE_F16 || src2->type == GGML_TYPE_F32); // src2 contains positions and it is optional const int64_t ne00 = src0->ne[0]; const int64_t nrows_x = ggml_nrows(src0); @@ -188,14 +203,25 @@ void ggml_cuda_op_soft_max(ggml_backend_cuda_context & ctx, ggml_tensor * dst) { memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); // positions tensor - float * src2_dd = nullptr; + void * src2_d = nullptr; - ggml_tensor * src2 = dst->src[2]; const bool use_src2 = src2 != nullptr; if (use_src2) { - src2_dd = (float *)src2->data; + src2_d = (void *)src2->data; } - soft_max_f32_cuda(src0_d, src1_d, src2_dd, dst_d, ne00, nrows_x, nrows_y, scale, max_bias, stream); + const bool use_f16 = (src1 && src1->type == GGML_TYPE_F16) || (src2 && src2->type == GGML_TYPE_F16); + + if (use_f16) { + const half * src1_dd = (const half *)src1_d; + const half * src2_dd = (const half *)src2_d; + + soft_max_f32_cuda(src0_d, src1_dd, src2_dd, dst_d, ne00, nrows_x, nrows_y, scale, max_bias, stream); + } else { + const float * src1_dd = (const float *)src1_d; + const float * src2_dd = (const float *)src2_d; + + soft_max_f32_cuda(src0_d, src1_dd, src2_dd, dst_d, ne00, nrows_x, nrows_y, scale, max_bias, stream); + } } diff --git a/ggml-kompute.cpp b/ggml-kompute.cpp index 407062e6fd476..9a469821d8042 100644 --- a/ggml-kompute.cpp +++ b/ggml-kompute.cpp @@ -1427,6 +1427,7 @@ static void ggml_vk_graph_compute(struct ggml_kompute_context * ctx, struct ggml for (int i = node_start; i < node_end; ++i) { struct ggml_tensor * src0 = gf->nodes[i]->src[0]; struct ggml_tensor * src1 = gf->nodes[i]->src[1]; + struct ggml_tensor * src2 = gf->nodes[i]->src[2]; GGML_UNUSED(src2); struct ggml_tensor * dst = gf->nodes[i]; GGML_ASSERT(dst->data != nullptr); @@ -1559,6 +1560,12 @@ static void ggml_vk_graph_compute(struct ggml_kompute_context * ctx, struct ggml { float scale; memcpy(&scale, dst->op_params, sizeof(float)); + +#pragma message("TODO: add ggml_vk_soft_max() F16/F32 src1 and src2 support") +#pragma message("ref: https://github.com/ggerganov/llama.cpp/pull/5021") + GGML_ASSERT(!src1 || src1t == GGML_TYPE_F32); + GGML_ASSERT(src2 == nullptr); + ggml_vk_soft_max(seq, id_src0, id_src1, id_dst, off_src0, off_src1, off_dst, ne00, ne01, ne02, ne03, scale); } break; case GGML_OP_DIAG_MASK_INF: diff --git a/ggml-metal.m b/ggml-metal.m index 9cb4219885cb7..c6d580b8462d0 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -46,8 +46,10 @@ GGML_METAL_KERNEL_TYPE_GELU_QUICK_4, GGML_METAL_KERNEL_TYPE_SILU, GGML_METAL_KERNEL_TYPE_SILU_4, - GGML_METAL_KERNEL_TYPE_SOFT_MAX, - GGML_METAL_KERNEL_TYPE_SOFT_MAX_4, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16_4, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32_4, GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF, GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8, GGML_METAL_KERNEL_TYPE_GET_ROWS_F32, @@ -177,6 +179,14 @@ GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H64, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H80, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H96, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H112, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H128, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H256, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H128, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H256, GGML_METAL_KERNEL_TYPE_CPY_F32_F16, GGML_METAL_KERNEL_TYPE_CPY_F32_F32, GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0, @@ -443,7 +453,7 @@ static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ } /* - GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) kernel->pipeline, \ + GGML_METAL_LOG_INFO("%s: loaded %-40s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) kernel->pipeline, \ (int) kernel->pipeline.maxTotalThreadsPerThreadgroup, \ (int) kernel->pipeline.threadExecutionWidth); \ */ @@ -459,172 +469,182 @@ static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ return NULL; \ } \ } else { \ - GGML_METAL_LOG_WARN("%s: skipping %-32s (not supported)\n", __func__, "kernel_"#name); \ + GGML_METAL_LOG_WARN("%s: skipping %-40s (not supported)\n", __func__, "kernel_"#name); \ } // simd_sum and simd_max requires MTLGPUFamilyApple7 - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD, add, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD_ROW, add_row, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL, mul, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_ROW, mul_row, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV, div, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV_ROW, div_row, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE, scale, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE_4, scale_4, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CLAMP, clamp, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TANH, tanh, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RELU, relu, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU, gelu, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_4, gelu_4, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_QUICK, gelu_quick, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_QUICK_4, gelu_quick_4, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SILU, silu, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SILU_4, silu_4, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX, soft_max, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_4, soft_max_4, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF, diag_mask_inf, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8, diag_mask_inf_8, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F32, get_rows_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F16, get_rows_f16, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0, get_rows_q4_0, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1, get_rows_q4_1, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0, get_rows_q5_0, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1, get_rows_q5_1, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0, get_rows_q8_0, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K, get_rows_q2_K, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K, get_rows_q3_K, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K, get_rows_q4_K, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K, get_rows_q5_K, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K, get_rows_q6_K, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, get_rows_iq2_xxs, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, get_rows_iq3_s, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S, get_rows_iq2_s, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_M, get_rows_iq1_m, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, get_rows_iq4_nl, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_XS, get_rows_iq4_xs, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_NORM, norm, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32, mul_mv_f32_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16, mul_mv_f16_f16, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32, mul_mv_f16_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW, mul_mv_f16_f32_1row, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4, mul_mv_f16_f32_l4, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32, mul_mv_q4_0_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32, mul_mv_q4_1_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, mul_mv_q5_0_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, mul_mv_q5_1_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, mul_mv_q8_0_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, mul_mv_q2_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, mul_mv_q3_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, mul_mv_q4_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32, mul_mv_q5_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32, mul_mv_q6_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, mul_mv_iq2_xxs_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, mul_mv_iq3_s_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32, mul_mv_iq2_s_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_M_F32, mul_mv_iq1_m_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, mul_mv_iq4_nl_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_XS_F32, mul_mv_iq4_xs_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); - //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); - //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_1ROW, mul_mv_id_f16_f32_1row, ctx->support_simdgroup_reduction); - //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_L4, mul_mv_id_f16_f32_l4, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32, mul_mv_id_q4_0_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32, mul_mv_id_q4_1_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32, mul_mv_id_q5_0_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32, mul_mv_id_q5_1_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32, mul_mv_id_q8_0_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32, mul_mv_id_q2_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32, mul_mv_id_q3_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32, mul_mv_id_q4_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32, mul_mv_id_q5_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32, mul_mv_id_q6_K_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, mul_mv_id_iq2_xxs_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, mul_mv_id_iq3_s_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32, mul_mv_id_iq2_s_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_M_F32, mul_mv_id_iq1_m_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, mul_mv_id_iq4_nl_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_XS_F32, mul_mv_id_iq4_xs_f32, ctx->support_simdgroup_reduction); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32, mul_mm_q4_1_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32, mul_mm_q5_0_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32, mul_mm_q5_1_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32, mul_mm_q8_0_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32, mul_mm_q2_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32, mul_mm_q3_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32, mul_mm_q4_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32, mul_mm_q5_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32, mul_mm_q6_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, mul_mm_iq2_xxs_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, mul_mm_iq3_s_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32, mul_mm_iq2_s_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_M_F32, mul_mm_iq1_m_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, mul_mm_iq4_nl_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_XS_F32, mul_mm_iq4_xs_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32, mul_mm_id_q4_1_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32, mul_mm_id_q5_0_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32, mul_mm_id_q5_1_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32, mul_mm_id_q8_0_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32, mul_mm_id_q2_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32, mul_mm_id_q3_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32, mul_mm_id_q4_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32, mul_mm_id_q5_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32, mul_mm_id_q6_K_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, mul_mm_id_iq2_xxs_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, mul_mm_id_iq3_s_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32, mul_mm_id_iq2_s_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_M_F32, mul_mm_id_iq1_m_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, mul_mm_id_iq4_nl_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_XS_F32, mul_mm_id_iq4_xs_f32, ctx->support_simdgroup_mm); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F16, im2col_f16, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F32, im2col_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UPSCALE_F32, upscale_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_F32, pad_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32, timestep_embedding_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARANGE_F32, arange_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, argsort_f32_i32_asc, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, argsort_f32_i32_desc, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, leaky_relu_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F16, cpy_f32_f16, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F32, cpy_f32_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0, cpy_f32_q8_0, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0, cpy_f32_q4_0, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1, cpy_f32_q4_1, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0, cpy_f32_q5_0, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1, cpy_f32_q5_1, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_IQ4_NL, cpy_f32_iq4_nl, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F16, cpy_f16_f16, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F32, cpy_f16_f32, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CONCAT, concat, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SQR, sqr, true); - GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SUM_ROWS, sum_rows, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD, add, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD_ROW, add_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL, mul, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_ROW, mul_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV, div, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV_ROW, div_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE, scale, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE_4, scale_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CLAMP, clamp, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TANH, tanh, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RELU, relu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU, gelu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_4, gelu_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_QUICK, gelu_quick, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_QUICK_4, gelu_quick_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SILU, silu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SILU_4, silu_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16, soft_max_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16_4, soft_max_f16_4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32, soft_max_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32_4, soft_max_f32_4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF, diag_mask_inf, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8, diag_mask_inf_8, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F32, get_rows_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F16, get_rows_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0, get_rows_q4_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1, get_rows_q4_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0, get_rows_q5_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1, get_rows_q5_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0, get_rows_q8_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K, get_rows_q2_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K, get_rows_q3_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K, get_rows_q4_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K, get_rows_q5_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K, get_rows_q6_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, get_rows_iq2_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, get_rows_iq3_s, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S, get_rows_iq2_s, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_M, get_rows_iq1_m, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, get_rows_iq4_nl, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_XS, get_rows_iq4_xs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_NORM, norm, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32, mul_mv_f32_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16, mul_mv_f16_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32, mul_mv_f16_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW, mul_mv_f16_f32_1row, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4, mul_mv_f16_f32_l4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32, mul_mv_q4_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32, mul_mv_q4_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, mul_mv_q5_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, mul_mv_q5_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, mul_mv_q8_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, mul_mv_q2_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, mul_mv_q3_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, mul_mv_q4_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32, mul_mv_q5_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32, mul_mv_q6_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, mul_mv_iq2_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, mul_mv_iq3_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32, mul_mv_iq2_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_M_F32, mul_mv_iq1_m_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, mul_mv_iq4_nl_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_XS_F32, mul_mv_iq4_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_1ROW, mul_mv_id_f16_f32_1row, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_L4, mul_mv_id_f16_f32_l4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32, mul_mv_id_q4_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32, mul_mv_id_q4_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32, mul_mv_id_q5_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32, mul_mv_id_q5_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32, mul_mv_id_q8_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32, mul_mv_id_q2_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32, mul_mv_id_q3_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32, mul_mv_id_q4_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32, mul_mv_id_q5_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32, mul_mv_id_q6_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, mul_mv_id_iq2_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, mul_mv_id_iq3_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32, mul_mv_id_iq2_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_M_F32, mul_mv_id_iq1_m_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, mul_mv_id_iq4_nl_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_XS_F32, mul_mv_id_iq4_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32, mul_mm_q4_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32, mul_mm_q5_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32, mul_mm_q5_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32, mul_mm_q8_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32, mul_mm_q2_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32, mul_mm_q3_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32, mul_mm_q4_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32, mul_mm_q5_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32, mul_mm_q6_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, mul_mm_iq2_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, mul_mm_iq3_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32, mul_mm_iq2_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_M_F32, mul_mm_iq1_m_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, mul_mm_iq4_nl_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_XS_F32, mul_mm_iq4_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32, mul_mm_id_q4_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32, mul_mm_id_q5_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32, mul_mm_id_q5_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32, mul_mm_id_q8_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32, mul_mm_id_q2_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32, mul_mm_id_q3_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32, mul_mm_id_q4_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32, mul_mm_id_q5_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32, mul_mm_id_q6_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, mul_mm_id_iq2_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, mul_mm_id_iq3_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32, mul_mm_id_iq2_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_M_F32, mul_mm_id_iq1_m_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, mul_mm_id_iq4_nl_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_XS_F32, mul_mm_id_iq4_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F16, im2col_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F32, im2col_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UPSCALE_F32, upscale_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_F32, pad_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32, timestep_embedding_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARANGE_F32, arange_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, argsort_f32_i32_asc, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, argsort_f32_i32_desc, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, leaky_relu_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H64, flash_attn_ext_f16_h64, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H80, flash_attn_ext_f16_h80, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H96, flash_attn_ext_f16_h96, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H112, flash_attn_ext_f16_h112, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H128, flash_attn_ext_f16_h128, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H256, flash_attn_ext_f16_h256, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H128, flash_attn_ext_vec_f16_h128, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H256, flash_attn_ext_vec_f16_h256, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F16, cpy_f32_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F32, cpy_f32_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0, cpy_f32_q8_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0, cpy_f32_q4_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1, cpy_f32_q4_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0, cpy_f32_q5_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1, cpy_f32_q5_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_IQ4_NL, cpy_f32_iq4_nl, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F16, cpy_f16_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F32, cpy_f16_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CONCAT, concat, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SQR, sqr, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SUM_ROWS, sum_rows, true); } [metal_library release]; @@ -743,6 +763,7 @@ static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const case GGML_OP_TIMESTEP_EMBEDDING: case GGML_OP_ARGSORT: case GGML_OP_LEAKY_RELU: + case GGML_OP_FLASH_ATTN_EXT: return true; case GGML_OP_MUL_MAT: case GGML_OP_MUL_MAT_ID: @@ -1326,20 +1347,33 @@ static enum ggml_status ggml_metal_graph_compute( } break; case GGML_OP_SOFT_MAX: { + GGML_ASSERT(!src1 || src1->type == GGML_TYPE_F16 || src1->type == GGML_TYPE_F32); + GGML_ASSERT(!src2 || src2->type == GGML_TYPE_F16 || src2->type == GGML_TYPE_F32); + int nth = 32; // SIMD width id pipeline = nil; + const bool use_f16 = (src1 && src1->type == GGML_TYPE_F16) || (src2 && src2->type == GGML_TYPE_F16); + if (ne00%4 == 0) { while (nth < ne00/4 && nth < 256) { nth *= 2; } - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_4].pipeline; + if (use_f16) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16_4].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32_4].pipeline; + } } else { while (nth < ne00 && nth < 1024) { nth *= 2; } - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; + if (use_f16) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32].pipeline; + } } float scale; @@ -2503,6 +2537,161 @@ static enum ggml_status ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; } break; + case GGML_OP_FLASH_ATTN_EXT: + { + GGML_ASSERT(ne00 % 4 == 0); + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + struct ggml_tensor * src3 = gf->nodes[i]->src[3]; + + GGML_ASSERT(ggml_are_same_shape(src1, src2)); + GGML_ASSERT(src3); + + size_t offs_src3 = 0; + + id id_src3 = src3 ? ggml_metal_get_buffer(src3, &offs_src3) : nil; + + GGML_ASSERT(!src3 || src3->type == GGML_TYPE_F16); + GGML_ASSERT(!src3 || src3->ne[1] >= GGML_PAD(src0->ne[1], 8) && + "the Flash-Attention Metal kernel requires the mask to be padded to 8 and at least n_queries big"); + + const int64_t ne30 = src3 ? src3->ne[0] : 0; GGML_UNUSED(ne30); + const int64_t ne31 = src3 ? src3->ne[1] : 0; + const int64_t ne32 = src3 ? src3->ne[2] : 0; GGML_UNUSED(ne32); + const int64_t ne33 = src3 ? src3->ne[3] : 0; GGML_UNUSED(ne33); + + const uint64_t nb30 = src3 ? src3->nb[0] : 0; GGML_UNUSED(nb30); + const uint64_t nb31 = src3 ? src3->nb[1] : 0; + const uint64_t nb32 = src3 ? src3->nb[2] : 0; GGML_UNUSED(nb32); + const uint64_t nb33 = src3 ? src3->nb[3] : 0; GGML_UNUSED(nb33); + + const enum ggml_type src2t = src2 ? src2->type : GGML_TYPE_COUNT; GGML_UNUSED(src2t); + + float scale; + memcpy(&scale, dst->op_params, sizeof(float)); + + id pipeline = nil; + + bool use_vec_kernel = false; + + if (ne01 >= 4 || (ne00%128 != 0)) { + switch (ne00) { + case 64: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H64 ].pipeline; break; + case 80: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H80 ].pipeline; break; + case 96: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H96 ].pipeline; break; + case 112: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H112].pipeline; break; + case 128: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H128].pipeline; break; + case 256: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H256].pipeline; break; + default: + { + GGML_METAL_LOG_ERROR("unsupported size: %lld\n", ne00); + GGML_METAL_LOG_ERROR("add template specialization for this size\n"); + GGML_ASSERT(false && "add template specialization for this size"); + } + } + } else { + use_vec_kernel = true; + + switch (ne00) { + case 128: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H128].pipeline; break; + case 256: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H256].pipeline; break; + default: + { + GGML_METAL_LOG_ERROR("unsupported size: %lld\n", ne00); + GGML_METAL_LOG_ERROR("add template specialization for this size\n"); + GGML_ASSERT(false && "add template specialization for this size"); + } + } + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_src2 offset:offs_src2 atIndex:2]; + [encoder setBuffer:id_src3 offset:offs_src3 atIndex:3]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:4]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:7]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:8]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:10]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:11]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:12]; + [encoder setBytes:&ne10 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&ne11 length:sizeof( int64_t) atIndex:14]; + [encoder setBytes:&ne12 length:sizeof( int64_t) atIndex:15]; + [encoder setBytes:&ne13 length:sizeof( int64_t) atIndex:16]; + [encoder setBytes:&nb10 length:sizeof(uint64_t) atIndex:17]; + [encoder setBytes:&nb11 length:sizeof(uint64_t) atIndex:18]; + [encoder setBytes:&nb12 length:sizeof(uint64_t) atIndex:19]; + [encoder setBytes:&nb13 length:sizeof(uint64_t) atIndex:20]; + [encoder setBytes:&ne31 length:sizeof( int64_t) atIndex:21]; + [encoder setBytes:&nb31 length:sizeof(uint64_t) atIndex:22]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:23]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:24]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:25]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:26]; + [encoder setBytes:&scale length:sizeof( float) atIndex:27]; + + if (!use_vec_kernel) { + // half8x8 kernel + const int64_t nqptg = 8; // queries per threadgroup !! sync with kernel template arguments !! + const int64_t ncpsg = 32; // cache values per simdgroup !! sync with kernel template arguments !! + + GGML_ASSERT(nqptg <= 32); + GGML_ASSERT(nqptg % 8 == 0); + GGML_ASSERT(ncpsg % 32 == 0); + + int64_t nsgmax = 2; + + while (true) { + const size_t smem = nqptg*(ne00 + 2*nsgmax*(ncpsg + nqptg))*(sizeof(float)/2); + if (smem > ctx->device.maxThreadgroupMemoryLength) { + break; + } + nsgmax *= 2; + } + nsgmax /= 2; + + // simdgroups per threadgroup (a.k.a. warps) + const int64_t nsg = ne01 <= nqptg ? MAX(4, MIN(nsgmax, MIN(ne11/ncpsg, (int64_t) pipeline.maxTotalThreadsPerThreadgroup/32))) : 4; + + const size_t smem = nqptg*(ne00 + 2*nsg*(ncpsg + nqptg))*(sizeof(float)/2); + + //printf("smem: %zu, max: %zu\n", smem, ctx->device.maxThreadgroupMemoryLength); + GGML_ASSERT(smem <= ctx->device.maxThreadgroupMemoryLength); + + [encoder setThreadgroupMemoryLength:GGML_PAD(smem, 16) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + nqptg - 1)/nqptg, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(32, nsg, 1)]; + } else { + // half1x4 kernel + const int64_t nqptg = 1; // queries per threadgroup !! sync with kernel template arguments !! + const int64_t ncpsg = 32; // cache values per simdgroup !! sync with kernel template arguments !! + + GGML_ASSERT(nqptg <= 32); + GGML_ASSERT(nqptg % 1 == 0); + GGML_ASSERT(ncpsg % 32 == 0); + + // simdgroups per threadgroup (a.k.a. warps) + const int64_t nsgt = MAX(2, MIN(ne11/ncpsg, (int64_t) pipeline.maxTotalThreadsPerThreadgroup/32)); + + int64_t nsg = 1; + while (nsg <= nsgt) { + nsg *= 2; + } + nsg /= 2; + + const size_t smem = (nqptg*(ne00 + 2*nsg*(ncpsg + nqptg)) + nsg*ne00)*(sizeof(float)/2); + + //printf("smem: %zu, max: %zu\n", smem, ctx->device.maxThreadgroupMemoryLength); + GGML_ASSERT(smem <= ctx->device.maxThreadgroupMemoryLength); + [encoder setThreadgroupMemoryLength:GGML_PAD(smem, 16) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + nqptg - 1)/nqptg, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(32, nsg, 1)]; + } + } break; case GGML_OP_DUP: case GGML_OP_CPY: case GGML_OP_CONT: @@ -2706,10 +2895,13 @@ GGML_CALL static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buff UNUSED(buft); } -static void ggml_backend_metal_log_allocated_size(id device) { +static void ggml_backend_metal_log_allocated_size(id device, size_t size_aligned) { +#ifndef GGML_METAL_NDEBUG #if TARGET_OS_OSX || (TARGET_OS_IOS && __clang_major__ >= 15) if (@available(macOS 10.12, iOS 16.0, *)) { - GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", + GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB, (%8.2f / %8.2f)", + __func__, + size_aligned / 1024.0 / 1024.0, device.currentAllocatedSize / 1024.0 / 1024.0, device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); @@ -2719,10 +2911,15 @@ static void ggml_backend_metal_log_allocated_size(id device) { GGML_METAL_LOG_INFO("\n"); } } else { - GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); + GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB, (%8.2f)\n", + __func__, + size_aligned / 1024.0 / 1024.0, + device.currentAllocatedSize / 1024.0 / 1024.0); } +#endif #endif UNUSED(device); + UNUSED(size_aligned); } GGML_CALL static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { @@ -2756,8 +2953,7 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buff return NULL; } - GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB", __func__, size_aligned / 1024.0 / 1024.0); - ggml_backend_metal_log_allocated_size(device); + //ggml_backend_metal_log_allocated_size(device, size_aligned); return ggml_backend_buffer_init(buft, ggml_backend_metal_buffer_i, ctx, size); } @@ -2844,7 +3040,7 @@ GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, return false; } - GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB", __func__, size_aligned / 1024.0 / 1024.0); + ggml_backend_metal_log_allocated_size(device, size_aligned); ++ctx->n_buffers; } else { @@ -2867,7 +3063,8 @@ GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, return false; } - GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB, offs = %12ld", __func__, size_step_aligned / 1024.0 / 1024.0, i); + ggml_backend_metal_log_allocated_size(device, size_step_aligned); + if (i + size_step < size) { GGML_METAL_LOG_INFO("\n"); } @@ -2876,8 +3073,6 @@ GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, } } - ggml_backend_metal_log_allocated_size(device); - return ggml_backend_buffer_init(ggml_backend_metal_buffer_type(), ggml_backend_metal_buffer_i, ctx, size); } diff --git a/ggml-metal.metal b/ggml-metal.metal index 191880af17942..3d4276ae02b9e 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -352,11 +352,12 @@ kernel void kernel_sum_rows( dst_row[0] = row_sum; } +template kernel void kernel_soft_max( - device const float * src0, - device const float * src1, - device const float * src2, - device float * dst, + device const char * src0, + device const char * src1, + device const char * src2, + device char * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -375,10 +376,10 @@ kernel void kernel_soft_max( const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); - device const float * psrc0 = src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; - device const float * pmask = src1 != src0 ? src1 + i01*ne00 : nullptr; - device const float * ppos = src2 != src0 ? src2 : nullptr; - device float * pdst = dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + device const float * psrc0 = (device const float *) src0 + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + device const T * pmask = src1 != src0 ? (device const T *) src1 + i01*ne00 : nullptr; + device const T * ppos = src2 != src0 ? (device const T *) src2 : nullptr; + device float * pdst = (device float *) dst + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); float slope = 0.0f; @@ -456,11 +457,12 @@ kernel void kernel_soft_max( } } +template kernel void kernel_soft_max_4( - device const float * src0, - device const float * src1, - device const float * src2, - device float * dst, + device const char * src0, + device const char * src1, + device const char * src2, + device char * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -479,10 +481,10 @@ kernel void kernel_soft_max_4( const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); - device const float4 * psrc4 = (device const float4 *)(src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); - device const float4 * pmask = src1 != src0 ? (device const float4 *)(src1 + i01*ne00) : nullptr; - device const float4 * ppos = src2 != src0 ? (device const float4 *)(src2) : nullptr; - device float4 * pdst4 = (device float4 *)(dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + device const float4 * psrc4 = (device const float4 *) src0 + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00)/4; + device const T * pmask = src1 != src0 ? (device const T *) src1 + i01*ne00/4 : nullptr; + device const T * ppos = src2 != src0 ? (device const T *) src2 : nullptr; + device float4 * pdst4 = (device float4 *) dst + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00)/4; float slope = 0.0f; @@ -499,7 +501,7 @@ kernel void kernel_soft_max_4( float4 lmax4 = -INFINITY; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)); + lmax4 = fmax(lmax4, psrc4[i00]*scale + (float4)((pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f))); } const float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); @@ -525,7 +527,7 @@ kernel void kernel_soft_max_4( // parallel sum float4 lsum4 = 0.0f; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)) - max_val); + const float4 exp_psrc4 = exp((psrc4[i00]*scale + (float4)((pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f))) - max_val); lsum4 += exp_psrc4; pdst4[i00] = exp_psrc4; } @@ -562,6 +564,14 @@ kernel void kernel_soft_max_4( } } +typedef decltype(kernel_soft_max) kernel_soft_max_t; +typedef decltype(kernel_soft_max_4) kernel_soft_max_4_t; + +template [[host_name("kernel_soft_max_f16")]] kernel kernel_soft_max_t kernel_soft_max; +template [[host_name("kernel_soft_max_f32")]] kernel kernel_soft_max_t kernel_soft_max; +template [[host_name("kernel_soft_max_f16_4")]] kernel kernel_soft_max_4_t kernel_soft_max_4; +template [[host_name("kernel_soft_max_f32_4")]] kernel kernel_soft_max_4_t kernel_soft_max_4; + kernel void kernel_diag_mask_inf( device const float * src0, device float * dst, @@ -2084,6 +2094,632 @@ kernel void kernel_leaky_relu_f32( dst[tpig] = src0[tpig] > 0.0f ? src0[tpig] : src0[tpig] * slope; } +typedef void (flash_attn_ext_f16_t)( + device const char * q, + device const char * k, + device const char * v, + device const char * mask, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne31, + constant uint64_t & nb31, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant float & scale, + threadgroup half * shared, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]); + +// ref: https://arxiv.org/pdf/2307.08691.pdf +template // head size, queries per threadgroup, cache items per threadgroup +kernel void kernel_flash_attn_ext_f16( + device const char * q, + device const char * k, + device const char * v, + device const char * mask, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne31, + constant uint64_t & nb31, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant float & scale, + threadgroup half * shared [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + const short nsg = ntg.y; // number of simdgroups + + const short iq3 = tgpig[2]; + const short iq2 = tgpig[1]; + const short iq1 = tgpig[0]*Q; + + const short D4 = D/4; + const short D8 = D/8; + const short Q8 = Q/8; + const short NW = N_SIMDWIDTH; + const short SH = (C + Q); // shared memory per simdgroup in (half) + + const short T = D + 2*nsg*SH; // shared memory size per query in (half) + const short TF = T/2; // shared memory size per query in (float) + const short T4 = T/4; // shared memory size per query in (half4) + + threadgroup half * sq = (threadgroup half *) (shared + 0*D); // holds the query data + threadgroup half4 * sq4 = (threadgroup half4 *) (shared + 0*D); // same as above but in half4 + threadgroup float * ss = (threadgroup float *) (shared + 2*sgitg*SH + 1*D); // scratch buffer for attention and diagonal matrix + + // store the result for all queries in local memory in 8x8 matrices (the O matrix from the paper) + simdgroup_half8x8 lo[D8]; + + // load heads from Q to shared memory + for (short j = sgitg; j < Q; j += nsg) { + device const float4 * q4 = (device const float4 *) ((device const char *) q + ((iq1 + j)*nb01 + iq2*nb02 + iq3*nb03)); + + for (short i = tiisg; i < D4; i += NW) { + if (iq1 + j < ne01) { + sq4[j*T4 + i] = (half4) q4[i]; + } else { + sq4[j*T4 + i] = 0.0h; + } + } + } + + // zero out lo + for (short i = 0; i < D8; ++i) { + lo[i] = make_filled_simdgroup_matrix(0.0h); + } + + // zero out shared memory SH + for (short j = 0; j < Q; ++j) { + for (short i = tiisg; i < SH; i += NW) { + ss[j*TF + i] = 0.0f; + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + { + float S[Q] = { [0 ... Q-1] = 0.0h }; + float M[Q] = { [0 ... Q-1] = -FLT_MAX/2 }; + + // assume K and V are same shape + const short ne22 = ne12; + const short ne23 = ne13; + + const uint nb21 = nb11; + const uint nb22 = nb12; + const uint nb23 = nb13; + + // broadcast + const short rk2 = ne02/ne12; + const short rk3 = ne03/ne13; + + const short rv2 = ne02/ne22; + const short rv3 = ne03/ne23; + + // k indices + const short ik2 = iq2/rk2; + const short ik3 = iq3/rk3; + + // v indices + const short iv2 = iq2/rv2; + const short iv3 = iq3/rv3; + + // load the queries from shared memory into local memory + simdgroup_half8x8 mq[D8]; + + for (short i = 0; i < D8; ++i) { + simdgroup_load(mq[i], sq + i*8, T); + } + + // pointer to the mask + device const half * mp = (device const half *) (mask + iq1*nb31); + + // prepare diagonal scale matrix + simdgroup_float8x8 mscale(scale); + + // loop over the KV cache + // each simdgroup handles blocks of Q rows and C columns + for (int ic0 = 0; ic0 < ne11; ic0 += C*nsg) { + const int ic = ic0 + C*sgitg; + if (ic >= ne11) { + break; + } + + // Q*K^T + { + for (short cc = 0; cc < C/8; ++cc) { + simdgroup_float8x8 mqk = make_filled_simdgroup_matrix(0.h); + + device const half * pk = (device const half *) ((device const char *) k + ((ic + 8*cc)*nb11 + ik2*nb12 + ik3*nb13)); + + for (short i = 0; i < D8; ++i) { + simdgroup_half8x8 mk; + simdgroup_load(mk, pk + i*8, nb11/sizeof(half), 0, true); // transpose + + simdgroup_multiply_accumulate(mqk, mq[i], mk, mqk); + } + + // mqk = mqk*scale + mask + simdgroup_half8x8 mm; + simdgroup_load(mm, mp + ic + 8*cc, nb31/sizeof(half), 0, false); + simdgroup_multiply_accumulate(mqk, mqk, mscale, mm); + + simdgroup_store(mqk, ss + 8*cc, TF, 0, false); + } + } + + // used to detect blocks full of -INF + float smax = -INFINITY; + + // online softmax + { + float ms[Q]; + + for (short j = 0; j < Q; ++j) { + const short p = tiisg; + + const float m = M[j]; + const float s = ss[j*TF + p]; + + smax = simd_max(max(smax, s)); + M[j] = simd_max(max(M[j], s)); + + ms[j] = exp(m - M[j]); + const float vs = exp(s - M[j]); + + S[j] = S[j]*ms[j] + simd_sum(vs); + + // the P matrix from the paper (Q rows, C columns) + ss[j*TF + p] = vs; + } + + // create a QxQ diagonal matrix for rescaling the output + if (tiisg < Q) { + ss[tiisg*TF + C + tiisg] = ms[tiisg]; + } + } + + // skip -INF blocks + if (smax == -INFINITY) { + continue; + } + + // O = diag(ms)*O + { + simdgroup_float8x8 mm; + simdgroup_load(mm, ss + C, TF, 0, false); + + for (short i = 0; i < D8; ++i) { + simdgroup_multiply(lo[i], mm, lo[i]); + } + } + + // O = O + (Q*K^T)*V + { + for (short cc = 0; cc < C/8; ++cc) { + device const half * pv = (device const half *) ((device const char *) v + ((ic + 8*cc)*nb21 + iv2*nb22 + iv3*nb23)); + + for (short i = 0; i < D8; ++i) { + simdgroup_half8x8 mk; + simdgroup_load(mk, pv + i*8, nb21/sizeof(half), 0, false); + + simdgroup_float8x8 mv; + simdgroup_load(mv, ss + 8*cc, TF, 0, false); + + simdgroup_multiply_accumulate(lo[i], mv, mk, lo[i]); + } + } + } + } + + // these are needed for reducing the results from the simdgroups (reuse the ss buffer) + for (short j = 0; j < Q; ++j) { + if (tiisg == 0) { + ss[j*TF + 0] = S[j]; + ss[j*TF + 1] = M[j]; + } + } + } + + // reduce the warps sequentially + for (short sg = 1; sg < nsg; ++sg) { + float S = { 0.0h }; + float M = { -FLT_MAX/2 }; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // each simdgroup stores its output to shared memory, reusing sq + if (sgitg == sg) { + for (short i = 0; i < D8; ++i) { + simdgroup_store(lo[i], sq + i*8, T, 0, false); + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // the first simdgroup accumulates the results from the other simdgroups + if (sgitg == 0) { + for (short j = 0; j < Q; ++j) { + const float S0 = ss[j*TF + 0]; + const float S1 = ss[j*TF + sg*SH + 0]; + + const float M0 = ss[j*TF + 1]; + const float M1 = ss[j*TF + sg*SH + 1]; + + M = max(M0, M1); + + const float ms0 = exp(M0 - M); + const float ms1 = exp(M1 - M); + + S = S0*ms0 + S1*ms1; + + if (tiisg == 0) { + ss[j*TF + 0] = S; + ss[j*TF + 1] = M; + + ss[j*TF + C + j ] = ms0; + ss[j*TF + C + j + sg*SH] = ms1; + } + } + + // O_0 = diag(ms0)*O_0 + diag(ms1)*O_1 + { + simdgroup_half8x8 t; + simdgroup_float8x8 ms0; + simdgroup_float8x8 ms1; + + simdgroup_load(ms0, ss + C, TF, 0, false); + simdgroup_load(ms1, ss + C + sg*SH, TF, 0, false); + + for (short i = 0; i < D8; ++i) { + simdgroup_load (t, sq + i*8, T, 0, false); + simdgroup_multiply(t, ms1, t); + + simdgroup_multiply_accumulate(lo[i], ms0, lo[i], t); + } + } + } + } + + // store result to shared memory (reuse sq) + if (sgitg == 0) { + for (short i = 0; i < D8; ++i) { + simdgroup_store(lo[i], sq + i*8, T, 0, false); + } + } + + device float4 * dst4 = (device float4 *) dst; + + // final rescale with 1/S and store to global memory + if (sgitg == 0) { + for (short j = 0; j < Q && iq1 + j < ne01; ++j) { + const float S = ss[j*TF + 0]; + + for (short i = tiisg; i < D4; i += NW) { + dst4[(iq3*ne2*ne1 + iq2 + (iq1 + j)*ne1)*D4 + i] = (float4) sq4[j*T4 + i]/S; + } + } + } +} + +template [[host_name("kernel_flash_attn_ext_f16_h64" )]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<64>; +template [[host_name("kernel_flash_attn_ext_f16_h80" )]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<80>; +template [[host_name("kernel_flash_attn_ext_f16_h96" )]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<96>; +template [[host_name("kernel_flash_attn_ext_f16_h112")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<112>; +template [[host_name("kernel_flash_attn_ext_f16_h128")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<128>; +template [[host_name("kernel_flash_attn_ext_f16_h256")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<256>; + +template // head size, queries per threadgroup, cache items per threadgroup +kernel void kernel_flash_attn_ext_vec_f16( + device const char * q, + device const char * k, + device const char * v, + device const char * mask, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne31, + constant uint64_t & nb31, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant float & scale, + threadgroup half * shared [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + const short nsg = ntg.y; // number of simdgroups + + const short iq3 = tgpig[2]; + const short iq2 = tgpig[1]; + const short iq1 = tgpig[0]; + + const short D4 = D/4; + const short NW = N_SIMDWIDTH; + const short SH = (C + Q); // shared memory per simdgroup in (half) + + const short T = D + 2*nsg*SH; // shared memory size per query in (half) + + //threadgroup half * sq = (threadgroup half *) (shared + 0*D); // holds the query data + threadgroup half4 * sq4 = (threadgroup half4 *) (shared + 0*D); // same as above but in half4 + threadgroup float * ss = (threadgroup float *) (shared + 2*sgitg*SH + 1*D); // scratch buffer for attention and diagonal matrix + threadgroup float4 * ss4 = (threadgroup float4 *) (shared + 2*sgitg*SH + 1*D); // same as above but in half4 + threadgroup half4 * sr4 = (threadgroup half4 *) (shared + sgitg*D + 1*T); // scratch buffer for the results + + // store the result for all queries in local memory in 8x8 matrices (the O matrix from the paper) + half4 lo[D4/NW]; + + // load heads from Q to shared memory + device const float4 * q4 = (device const float4 *) ((device const char *) q + (iq1*nb01 + iq2*nb02 + iq3*nb03)); + + for (short i = tiisg; i < D4; i += NW) { + if (iq1 < ne01) { + sq4[i] = (half4) q4[i]; + } else { + sq4[i] = 0.0h; + } + } + + // zero out lo + for (short i = tiisg; i < D4; i += NW) { + lo[i/NW] = 0.0h; + } + + // zero out shared memory SH + for (short i = tiisg; i < SH/4; i += NW) { + ss4[i] = 0.0h; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + { + float S = { 0.0h }; + float M = { -FLT_MAX/2 }; + + // assume K and V are same shape + const short ne22 = ne12; + const short ne23 = ne13; + + const uint nb21 = nb11; + const uint nb22 = nb12; + const uint nb23 = nb13; + + // broadcast + const short rk2 = ne02/ne12; + const short rk3 = ne03/ne13; + + const short rv2 = ne02/ne22; + const short rv3 = ne03/ne23; + + // k indices + const short ik2 = iq2 / rk2; + const short ik3 = iq3 / rk3; + + // v indices + const short iv2 = iq2 / rv2; + const short iv3 = iq3 / rv3; + + // load the queries from shared memory into local memory + half4 mq[D4]; + + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + mq[i] = sq4[i]; + } + + // pointer to the mask + device const half4 * mp4 = (device const half4 *) (mask + iq1*nb31); + + // loop over the KV cache + // each simdgroup handles blocks of Q rows and C columns + for (int ic0 = 0; ic0 < ne11; ic0 += C*nsg) { + const int ic = ic0 + C*sgitg; + if (ic >= ne11) { + break; + } + + // Q*K^T + { +#pragma unroll + for (short cc = 0; cc < C/4; ++cc) { + float4 mqk = { 0.0h }; + + device const half4 * pk4 = (device const half4 *) ((device const char *) k + ((ic + 4*cc)*nb11 + ik2*nb12 + ik3*nb13)); + +#pragma unroll + for (short ii = 0; ii < D4; ii += NW) { + const short i = ii + tiisg; + + half4x4 mk; + mk[0] = pk4[i + 0*(nb11/8)]; + mk[1] = pk4[i + 1*(nb11/8)]; + mk[2] = pk4[i + 2*(nb11/8)]; + mk[3] = pk4[i + 3*(nb11/8)]; + + mqk += (float4) (mq[i] * mk); + } + + // reduce the results from the threads in the simdgroup + mqk += simd_shuffle_down(mqk, 16); + mqk += simd_shuffle_down(mqk, 8); + mqk += simd_shuffle_down(mqk, 4); + mqk += simd_shuffle_down(mqk, 2); + mqk += simd_shuffle_down(mqk, 1); + + // mqk = mqk*scale + mask + if (tiisg == 0) { + float4 mm = (float4) mp4[ic/4 + cc]; + mqk = mqk*scale + mm; + + ss4[cc] = mqk; + } + } + } + + // online softmax + { + const short p = tiisg; + + const float m = M; + const float s = ss[p]; + + M = simd_max(max(M, s)); + + const float ms = exp(m - M); + const float vs = exp(s - M); + + S = S*ms + simd_sum(vs); + + // the P matrix from the paper (Q rows, C columns) + ss[p] = vs; + + // O = diag(ms)*O +#pragma unroll + for (short ii = 0; ii < D4; ii += NW) { + const short i = ii + tiisg; + lo[i/NW] *= ms; + } + } + + // O = O + (Q*K^T)*V + { +#pragma unroll + for (short cc = 0; cc < C/4; ++cc) { + device const half4 * pv4 = (device const half4 *) ((device const char *) v + ((ic + 4*cc)*nb21 + iv2*nb22 + iv3*nb23)); + +#pragma unroll + for (short ii = 0; ii < D4; ii += NW) { + const short i = ii + tiisg; + + lo[i/NW] += pv4[i + 0*(nb21/8)] * ss[4*cc + 0]; + lo[i/NW] += pv4[i + 1*(nb21/8)] * ss[4*cc + 1]; + lo[i/NW] += pv4[i + 2*(nb21/8)] * ss[4*cc + 2]; + lo[i/NW] += pv4[i + 3*(nb21/8)] * ss[4*cc + 3]; + } + } + } + + } + + // these are needed for reducing the results from the simdgroups (reuse the ss buffer) + if (tiisg == 0) { + ss[0] = S; + ss[1] = M; + } + } + + // store results to shared memory + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + sr4[i] = lo[ii/NW]; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // parallel reduce + for (short r = nsg/2; r > 0; r >>= 1) { + if (sgitg < r) { + const float S0 = ss[ 0]; + const float S1 = ss[r*SH + 0]; + + const float M0 = ss[ 1]; + const float M1 = ss[r*SH + 1]; + + const float M = max(M0, M1); + + const float ms0 = exp(M0 - M); + const float ms1 = exp(M1 - M); + + const float S = S0*ms0 + S1*ms1; + + if (tiisg == 0) { + ss[0] = S; + ss[1] = M; + } + + // O_0 = diag(ms0)*O_0 + diag(ms1)*O_1 + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + sr4[i] = sr4[i]*ms0 + sr4[i + r*D4]*ms1; + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + device float4 * dst4 = (device float4 *) dst; + + // final rescale with 1/S and store to global memory + if (sgitg == 0) { + const float S = ss[0]; + + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + dst4[(iq3*ne2*ne1 + iq2 + (iq1)*ne1)*D4 + i] = (float4) sr4[i]/S; + } + } +} + +template [[host_name("kernel_flash_attn_ext_vec_f16_h128")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_vec_f16<128>; +template [[host_name("kernel_flash_attn_ext_vec_f16_h256")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_vec_f16<256>; + kernel void kernel_cpy_f16_f16( device const half * src0, device half * dst, diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 2b76b3ebd64f7..57fe4ea3d4ac2 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -14744,7 +14744,12 @@ inline void ggml_sycl_op_soft_max(const ggml_tensor *src0, GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); + const ggml_tensor * src2 = dst->src[2]; + +#pragma message("TODO: add ggml_sycl_op_soft_max() F16 src1 and src2 support") +#pragma message("ref: https://github.com/ggerganov/llama.cpp/pull/5021") GGML_ASSERT(!src1 || src1->type == GGML_TYPE_F32); // src1 contains mask and it is optional + GGML_ASSERT(!src2 || src2->type == GGML_TYPE_F32); // src2 contains positions and it is optional const int64_t ne00 = src0->ne[0]; const int64_t nrows_x = ggml_nrows(src0); @@ -14760,7 +14765,6 @@ inline void ggml_sycl_op_soft_max(const ggml_tensor *src0, float * src2_dd = nullptr; sycl_pool_alloc src2_f; - ggml_tensor * src2 = dst->src[2]; const bool use_src2 = src2 != nullptr; if (use_src2) { diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 1736ab7361c27..f712cdd5a900e 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -3178,6 +3178,11 @@ static vk_pipeline ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const } return nullptr; case GGML_OP_SOFT_MAX: +#pragma message("TODO: add ggml_vk_soft_max() F16 src1 and src2 support") +#pragma message("ref: https://github.com/ggerganov/llama.cpp/pull/5021") + GGML_ASSERT(!src1 || src1->type == GGML_TYPE_F32); + GGML_ASSERT(!src2 || src2->type == GGML_TYPE_F32); + if (src0->type == GGML_TYPE_F32 && (src1 == nullptr || src1->type == GGML_TYPE_F32) && (src2 == nullptr || src2->type == GGML_TYPE_F32) && dst->type == GGML_TYPE_F32) { return ctx->device->pipeline_soft_max_f32; } diff --git a/ggml.c b/ggml.c index cb273061c5c53..74ecd59279167 100644 --- a/ggml.c +++ b/ggml.c @@ -951,7 +951,7 @@ ggml_type_traits_t ggml_internal_get_type_traits(enum ggml_type type) { #define GGML_F16_VEC_ZERO GGML_F16x8_ZERO #define GGML_F16_VEC_SET1 GGML_F16x8_SET1 #define GGML_F16_VEC_LOAD(p, i) GGML_F16x8_LOAD(p) - #define GGML_F16_VEC_STORE(p, r, i) GGML_F16x8_STORE(p, r[i]) + #define GGML_F16_VEC_STORE(p, r, i) GGML_F16x8_STORE((ggml_fp16_internal_t *)(p), r[i]) #define GGML_F16_VEC_FMA GGML_F16x8_FMA #define GGML_F16_VEC_ADD GGML_F16x8_ADD #define GGML_F16_VEC_MUL GGML_F16x8_MUL @@ -977,7 +977,7 @@ ggml_type_traits_t ggml_internal_get_type_traits(enum ggml_type type) { #define GGML_F16_VEC_ZERO GGML_F32Cx4_ZERO #define GGML_F16_VEC_SET1 GGML_F32Cx4_SET1 #define GGML_F16_VEC_LOAD(p, i) GGML_F32Cx4_LOAD(p) - #define GGML_F16_VEC_STORE(p, r, i) GGML_F32Cx4_STORE(p, r[i]) + #define GGML_F16_VEC_STORE(p, r, i) GGML_F32Cx4_STORE((ggml_fp16_internal_t *)(p), r[i]) #define GGML_F16_VEC_FMA GGML_F32Cx4_FMA #define GGML_F16_VEC_ADD GGML_F32Cx4_ADD #define GGML_F16_VEC_MUL GGML_F32Cx4_MUL @@ -1046,7 +1046,7 @@ do { \ // unlike _mm256_cvt intrinsics that require F16C, _mm512_cvt is defined in AVX512F // so F16C guard isn't required -#define GGML_F32Cx16_LOAD(x) _mm512_cvtph_ps(_mm256_loadu_si256((__m256i *)(x))) +#define GGML_F32Cx16_LOAD(x) _mm512_cvtph_ps(_mm256_loadu_si256((const __m256i *)(x))) #define GGML_F32Cx16_STORE(x, y) _mm256_storeu_si256((__m256i *)(x), _mm512_cvtps_ph(y, 0)) #define GGML_F32Cx16_FMA(a, b, c) _mm512_fmadd_ps(b, c, a) @@ -1144,7 +1144,7 @@ do { \ #if defined(__F16C__) // the _mm256_cvt intrinsics require F16C -#define GGML_F32Cx8_LOAD(x) _mm256_cvtph_ps(_mm_loadu_si128((__m128i *)(x))) +#define GGML_F32Cx8_LOAD(x) _mm256_cvtph_ps(_mm_loadu_si128((const __m128i *)(x))) #define GGML_F32Cx8_STORE(x, y) _mm_storeu_si128((__m128i *)(x), _mm256_cvtps_ph(y, 0)) #else static inline __m256 __avx_f32cx8_load(ggml_fp16_t *x) { @@ -1662,6 +1662,37 @@ inline static void ggml_vec_mad_f32(const int n, float * restrict y, const float #endif } +inline static void ggml_vec_mad_f16(const int n, ggml_fp16_t * restrict y, const ggml_fp16_t * restrict x, const float v) { +#if defined(GGML_SIMD) + const int np = (n & ~(GGML_F16_STEP - 1)); + + GGML_F16_VEC vx = GGML_F16_VEC_SET1(v); + + GGML_F16_VEC ax[GGML_F16_ARR]; + GGML_F16_VEC ay[GGML_F16_ARR]; + + for (int i = 0; i < np; i += GGML_F16_STEP) { + for (int j = 0; j < GGML_F16_ARR; j++) { + ax[j] = GGML_F16_VEC_LOAD(x + i + j*GGML_F16_EPR, j); + ay[j] = GGML_F16_VEC_LOAD(y + i + j*GGML_F16_EPR, j); + ay[j] = GGML_F16_VEC_FMA(ay[j], ax[j], vx); + + GGML_F16_VEC_STORE(y + i + j*GGML_F16_EPR, ay, j); + } + } + + // leftovers + for (int i = np; i < n; ++i) { + y[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(y[i]) + GGML_FP16_TO_FP32(x[i])*v); + } +#else + // scalar + for (int i = 0; i < n; ++i) { + y[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(y[i]) + GGML_FP16_TO_FP32(x[i])*v); + } +#endif +} + // xs and vs are byte strides of x and v inline static void ggml_vec_mad_f32_unroll(const int n, const int xs, const int vs, float * restrict y, const float * restrict xv, const float * restrict vv) { @@ -1746,6 +1777,35 @@ inline static void ggml_vec_scale_f32(const int n, float * y, const float v) { #endif } +inline static void ggml_vec_scale_f16(const int n, ggml_fp16_t * y, const float v) { +#if defined(GGML_SIMD) + const int np = (n & ~(GGML_F16_STEP - 1)); + + GGML_F16_VEC vx = GGML_F16_VEC_SET1(v); + + GGML_F16_VEC ay[GGML_F16_ARR]; + + for (int i = 0; i < np; i += GGML_F16_STEP) { + for (int j = 0; j < GGML_F16_ARR; j++) { + ay[j] = GGML_F16_VEC_LOAD(y + i + j*GGML_F16_EPR, j); + ay[j] = GGML_F16_VEC_MUL(ay[j], vx); + + GGML_F16_VEC_STORE(y + i + j*GGML_F16_EPR, ay, j); + } + } + + // leftovers + for (int i = np; i < n; ++i) { + y[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(y[i])*v); + } +#else + // scalar + for (int i = 0; i < n; ++i) { + y[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(y[i])*v); + } +#endif +} + inline static void ggml_vec_norm_f32 (const int n, float * s, const float * x) { ggml_vec_dot_f32(n, s, 0, x, 0, x, 0, 1); *s = sqrtf(*s); } inline static void ggml_vec_sqr_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = x[i]*x[i]; } inline static void ggml_vec_sqrt_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = sqrtf(x[i]); } @@ -2000,6 +2060,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "LEAKY_RELU", "FLASH_ATTN", + "FLASH_ATTN_EXT", "FLASH_FF", "FLASH_ATTN_BACK", "SSM_CONV", @@ -2026,7 +2087,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "CROSS_ENTROPY_LOSS_BACK", }; -static_assert(GGML_OP_COUNT == 76, "GGML_OP_COUNT != 76"); +static_assert(GGML_OP_COUNT == 77, "GGML_OP_COUNT != 77"); static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "none", @@ -2090,6 +2151,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "leaky_relu(x)", "flash_attn(x)", + "flash_attn_ext(x)", "flash_ff(x)", "flash_attn_back(x)", "ssm_conv(x)", @@ -2116,7 +2178,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "cross_entropy_loss_back(x,y)", }; -static_assert(GGML_OP_COUNT == 76, "GGML_OP_COUNT != 76"); +static_assert(GGML_OP_COUNT == 77, "GGML_OP_COUNT != 77"); static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); @@ -4559,6 +4621,8 @@ struct ggml_tensor * ggml_mul_mat( void ggml_mul_mat_set_prec( struct ggml_tensor * a, enum ggml_prec prec) { + GGML_ASSERT(a->op == GGML_OP_MUL_MAT); + const int32_t prec_i32 = (int32_t) prec; ggml_set_op_params_i32(a, 0, prec_i32); @@ -5397,17 +5461,23 @@ static struct ggml_tensor * ggml_soft_max_impl( GGML_ASSERT(ggml_is_contiguous(a)); if (mask) { + GGML_ASSERT(mask->type == GGML_TYPE_F16 || mask->type == GGML_TYPE_F32); GGML_ASSERT(ggml_is_contiguous(mask)); GGML_ASSERT(ggml_is_matrix(mask)); - GGML_ASSERT(ggml_can_repeat_rows(mask, a)); + GGML_ASSERT(mask->ne[0] == a->ne[0]); + GGML_ASSERT(mask->ne[1] >= a->ne[1]); } if (pos) { GGML_ASSERT(ggml_is_vector(pos)); - GGML_ASSERT(pos->type == GGML_TYPE_F32); + GGML_ASSERT(pos->type == GGML_TYPE_F16 || pos->type == GGML_TYPE_F32); GGML_ASSERT(pos->ne[0] == a->ne[0]); } + if (pos && mask) { + GGML_ASSERT(pos->type == mask->type); + } + if (max_bias > 0.0f) { GGML_ASSERT(pos); } @@ -6216,6 +6286,59 @@ struct ggml_tensor * ggml_flash_attn( return result; } +// ggml_flash_attn_ext + +struct ggml_tensor * ggml_flash_attn_ext( + struct ggml_context * ctx, + struct ggml_tensor * q, + struct ggml_tensor * k, + struct ggml_tensor * v, + struct ggml_tensor * mask, + float scale) { + GGML_ASSERT(ggml_can_mul_mat(k, q)); + // TODO: check if vT can be multiplied by (k*qT) + if (mask) { + GGML_ASSERT(ggml_is_contiguous(mask)); + GGML_ASSERT(mask->ne[2] == 1); + GGML_ASSERT(mask->ne[3] == 1); + GGML_ASSERT(mask->ne[1] >= GGML_PAD(q->ne[1], GGML_KQ_MASK_PAD) && + "the Flash-Attention kernel requires the mask to be padded to GGML_KQ_MASK_PAD and at least n_queries big"); + //GGML_ASSERT(ggml_can_repeat_rows(mask, qk)); + } + + bool is_node = false; + + if (q->grad || k->grad || v->grad) { + is_node = true; + } + + // permute(0, 2, 1, 3) + int64_t ne[4] = { q->ne[0], q->ne[2], q->ne[1], q->ne[3] }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + float params[] = { scale }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_FLASH_ATTN_EXT; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = q; + result->src[1] = k; + result->src[2] = v; + result->src[3] = mask; + + return result; +} + +void ggml_flash_attn_ext_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec) { + GGML_ASSERT(a->op == GGML_OP_FLASH_ATTN_EXT); + + const int32_t prec_i32 = (int32_t) prec; + + ggml_set_op_params_i32(a, 1, prec_i32); // scale is on first pos +} + // ggml_flash_ff struct ggml_tensor * ggml_flash_ff( @@ -12255,7 +12378,7 @@ static void ggml_compute_forward_soft_max_f32( GGML_TENSOR_UNARY_OP_LOCALS - const int64_t ne11 = src1 ? src1->ne[1] : 1; + //const int64_t ne11 = src1 ? src1->ne[1] : 1; // TODO: is this supposed to be ceil instead of floor? // https://huggingface.co/mosaicml/mpt-7b/blob/main/attention.py#L370 @@ -12278,19 +12401,31 @@ static void ggml_compute_forward_soft_max_f32( float * wp = (float *) params->wdata + (nc + CACHE_LINE_SIZE_F32) * ith; // when max_bias <= 0.0f, src2 is not used and we default it to src0 to avoid branching - float * pos = src2 ? (float *) src2->data : src0->data; + ggml_fp16_t * pos_f16 = src2 ? (ggml_fp16_t *) src2->data : src0->data; + float * pos_f32 = src2 ? (float *) src2->data : src0->data; + + const bool use_f16 = (src1 && src1->type == GGML_TYPE_F16) || (src2 && src2->type == GGML_TYPE_F16); for (int i1 = ir0; i1 < ir1; i1++) { float * sp = (float *)((char *) src0->data + i1*src0->nb[1]); float * dp = (float *)((char *) dst->data + i1*dst->nb[1]); // broadcast the mask across rows - float * mp = src1 ? (float *)((char *) src1->data + (i1%ne11)*src1->nb[1]) : NULL; + ggml_fp16_t * mp_f16 = src1 ? (ggml_fp16_t *)((char *) src1->data) + (i1%ne01)*ne00 : NULL; + float * mp_f32 = src1 ? (float *)((char *) src1->data) + (i1%ne01)*ne00 : NULL; ggml_vec_cpy_f32 (nc, wp, sp); ggml_vec_scale_f32(nc, wp, scale); - if (mp) { - ggml_vec_acc_f32(nc, wp, mp); + if (mp_f32) { + if (use_f16) { + for (int i = 0; i < nc; ++i) { + wp[i] += GGML_FP16_TO_FP32(mp_f16[i]); + } + } else { + for (int i = 0; i < nc; ++i) { + wp[i] += mp_f32[i]; + } + } } // ALiBi bias @@ -12298,8 +12433,14 @@ static void ggml_compute_forward_soft_max_f32( const uint32_t h = (i1/ne01)%ne02; // head const float slope = h < n_head_log2 ? powf(m0, h + 1) : powf(m1, 2*(h - n_head_log2) + 1); - for (int i = 0; i < nc; i++) { - wp[i] = wp[i] + slope*pos[i]; + if (use_f16) { + for (int i = 0; i < nc; ++i) { + wp[i] += slope*GGML_FP16_TO_FP32(pos_f16[i]); + } + } else { + for (int i = 0; i < nc; ++i) { + wp[i] += slope*pos_f32[i]; + } } } @@ -14569,6 +14710,198 @@ static void ggml_compute_forward_flash_attn( } } +// ggml_compute_forward_flash_attn_ext + +static void ggml_compute_forward_flash_attn_ext_f16( + const struct ggml_compute_params * params, + const struct ggml_tensor * q, + const struct ggml_tensor * k, + const struct ggml_tensor * v, + const struct ggml_tensor * mask, + struct ggml_tensor * dst) { + int64_t t0 = ggml_perf_time_us(); + UNUSED(t0); + + GGML_TENSOR_LOCALS(int64_t, neq, q, ne) + GGML_TENSOR_LOCALS(size_t, nbq, q, nb) + GGML_TENSOR_LOCALS(int64_t, nek, k, ne) + GGML_TENSOR_LOCALS(size_t, nbk, k, nb) + GGML_TENSOR_LOCALS(int64_t, nev, v, ne) + GGML_TENSOR_LOCALS(size_t, nbv, v, nb) + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) + GGML_TENSOR_LOCALS(size_t, nb, dst, nb) + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t D = neq0; + const int64_t N = neq1; + + GGML_ASSERT(ne0 == D); + GGML_ASSERT(ne2 == N); + + GGML_ASSERT(nbq0 == sizeof(float)); + GGML_ASSERT(nbk0 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nbv0 == sizeof(ggml_fp16_t)); + + GGML_ASSERT(neq0 == D); + GGML_ASSERT(nek0 == D); + GGML_ASSERT(nev0 == D); + + GGML_ASSERT(neq1 == N); + GGML_ASSERT(nev0 == D); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + // broadcast factors + const int64_t rk2 = neq2/nek2; + const int64_t rk3 = neq3/nek3; + + const int64_t rv2 = neq2/nev2; + const int64_t rv3 = neq3/nev3; + + if (params->type == GGML_TASK_TYPE_INIT) { + return; + } + + if (params->type == GGML_TASK_TYPE_FINALIZE) { + return; + } + + // parallelize by q rows using ggml_vec_dot_f32 + + // total rows in q + const int nr = neq1*neq2*neq3; + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + float scale = 1.0f; + memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + + // loop over n_batch and n_head + for (int ir = ir0; ir < ir1; ++ir) { + // q indices + const int iq3 = ir/(neq2*neq1); + const int iq2 = (ir - iq3*neq2*neq1)/neq1; + const int iq1 = (ir - iq3*neq2*neq1 - iq2*neq1); + + float S = 0.0f; + float M = -INFINITY; + + float * V32 = (float *) params->wdata + ith*(2*D + CACHE_LINE_SIZE_F32); + ggml_fp16_t * Q16 = (ggml_fp16_t *) (V32); // reuse memory + ggml_fp16_t * V16 = (ggml_fp16_t *) (V32 + D); + + memset(V16, 0, D*sizeof(ggml_fp16_t)); + + const ggml_fp16_t * mp = mask ? (ggml_fp16_t *)((char *) mask->data + iq1*mask->nb[1]) : NULL; + + // k indices + const int ik3 = iq3 / rk3; + const int ik2 = iq2 / rk2; + + // v indices + const int iv3 = iq3 / rv3; + const int iv2 = iq2 / rv2; + + // online softmax / attention + // loop over n_kv and n_head_kv + // ref: https://arxiv.org/pdf/2112.05682.pdf + for (int64_t ic = 0; ic < nek1; ++ic) { + const float mv = mp ? GGML_FP16_TO_FP32(mp[ic]) : 0.0f; + if (mv == -INFINITY) { + continue; + } + + float s; + + // convert Q to F16 in V32 + { + const float * pq = (const float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)); + + for (int64_t d = 0; d < D; ++d) { + Q16[d] = GGML_FP32_TO_FP16(pq[d]); + } + } + + ggml_vec_dot_f16(D, + &s, 0, + (ggml_fp16_t *) ((char *) k->data + ( ic*nbk1 + ik2*nbk2 + ik3*nbk3)), 0, + Q16, 0, 1); + + s = s*scale + mv; + + const float Mold = M; + + float ms = 1.0f; + float vs = 1.0f; + + if (s > M) { + M = s; + ms = expf(Mold - M); + + // V = V*expf(Mold - M) + ggml_vec_scale_f16(D, V16, ms); + } else { + vs = expf(s - M); + } + + const ggml_fp16_t * v16 = (const ggml_fp16_t *) ((char *) v->data + (ic*nbv1 + iv2*nbv2 + iv3*nbv3)); + + // V += v*expf(s - M) + ggml_vec_mad_f16(D, V16, v16, vs); + + S = S*ms + vs; + } + + // V /= S + for (int64_t d = 0; d < D; ++d) { + V32[d] = GGML_FP16_TO_FP32(V16[d])/S; + } + + // dst indices + const int i1 = iq1; + const int i2 = iq2; + const int i3 = iq3; + + // original + //memcpy((char *) dst->data + (i1*nb1 + i2*nb2 + i3*nb3), V, nev0*sizeof(float)); + + // permute(0, 2, 1, 3) + memcpy((char *) dst->data + (i3*ne2*ne1 + i2 + i1*ne1)*nb1, V32, nb1); + } +} + +static void ggml_compute_forward_flash_attn_ext( + const struct ggml_compute_params * params, + const struct ggml_tensor * q, + const struct ggml_tensor * k, + const struct ggml_tensor * v, + const struct ggml_tensor * mask, + struct ggml_tensor * dst) { + switch (dst->op_params[1]) { + case GGML_PREC_DEFAULT: + case GGML_PREC_F32: + { + // uses F32 accumulators + ggml_compute_forward_flash_attn_ext_f16(params, q, k, v, mask, dst); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + // ggml_compute_forward_flash_ff static void ggml_compute_forward_flash_ff_f16( @@ -16376,6 +16709,10 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm const bool masked = t != 0; ggml_compute_forward_flash_attn(params, masked, tensor); } break; + case GGML_OP_FLASH_ATTN_EXT: + { + ggml_compute_forward_flash_attn_ext(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor->src[3], tensor); + } break; case GGML_OP_FLASH_FF: { ggml_compute_forward_flash_ff(params, tensor); @@ -17388,6 +17725,7 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor GGML_ASSERT(false); // TODO: not implemented } break; case GGML_OP_FLASH_ATTN: + case GGML_OP_FLASH_ATTN_EXT: { struct ggml_tensor * flash_grad = NULL; if (src0->grad || src1->grad || tensor->src[2]->grad) { @@ -18160,6 +18498,7 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads, int n_cur_ n_tasks = n_threads; } break; case GGML_OP_FLASH_ATTN: + case GGML_OP_FLASH_ATTN_EXT: { n_tasks = n_threads; } break; @@ -18563,6 +18902,12 @@ struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threa cur += sizeof(float)*ne11*n_tasks; // this is overestimated by x2 } } break; + case GGML_OP_FLASH_ATTN_EXT: + { + const int64_t ne00 = node->src[0]->ne[0]; // D + + cur = 2*sizeof(float)*ne00*n_tasks; // 2x head size + } break; case GGML_OP_FLASH_FF: { if (node->src[1]->type == GGML_TYPE_F32) { diff --git a/ggml.h b/ggml.h index 86e5a8dc57ad1..a11795973ca3f 100644 --- a/ggml.h +++ b/ggml.h @@ -475,6 +475,7 @@ extern "C" { GGML_OP_LEAKY_RELU, GGML_OP_FLASH_ATTN, + GGML_OP_FLASH_ATTN_EXT, GGML_OP_FLASH_FF, GGML_OP_FLASH_ATTN_BACK, GGML_OP_SSM_CONV, @@ -1722,6 +1723,25 @@ extern "C" { struct ggml_tensor * v, bool masked); +#define GGML_KQ_MASK_PAD 32 + + // q: [n_embd, n_batch, n_head, 1] + // k: [n_embd, n_kv, n_head_kv, 1] + // v: [n_embd, n_kv, n_head_kv, 1] !! not transposed !! + // mask: [n_kv, n_batch_pad, 1, 1] !! n_batch_pad = GGML_PAD(n_batch, GGML_KQ_MASK_PAD) !! + // res: [n_embd, n_head, n_batch, 1] !! permuted !! + GGML_API struct ggml_tensor * ggml_flash_attn_ext( + struct ggml_context * ctx, + struct ggml_tensor * q, + struct ggml_tensor * k, + struct ggml_tensor * v, + struct ggml_tensor * mask, + float scale); + + GGML_API void ggml_flash_attn_ext_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec); + GGML_API struct ggml_tensor * ggml_flash_attn_back( struct ggml_context * ctx, struct ggml_tensor * q, diff --git a/llama.cpp b/llama.cpp index 72c10ffc202fc..18d6297ce1dfd 100644 --- a/llama.cpp +++ b/llama.cpp @@ -108,7 +108,6 @@ #define LLAMA_MAX_NODES 8192 #define LLAMA_MAX_EXPERTS 60 - // // logging // @@ -1846,7 +1845,7 @@ struct llama_hparams { float f_logit_scale = 0.0f; bool causal_attn = true; - bool need_kq_pos = false; + bool use_alibi = false; // currently, we need KQ_pos data for ALiBi-based models enum llama_pooling_type pooling_type = LLAMA_POOLING_TYPE_NONE; enum llama_rope_type rope_type = LLAMA_ROPE_TYPE_NONE; @@ -1936,6 +1935,7 @@ struct llama_cparams { bool embeddings; bool causal_attn; bool offload_kqv; + bool flash_attn; enum llama_pooling_type pooling_type; @@ -2039,8 +2039,8 @@ struct llama_kv_cache { bool has_shift = false; bool do_defrag = false; bool do_copy = false; - // with recurrent state models, a cell can hold the state for more than one past token - bool recurrent = false; + bool recurrent = false; // with recurrent state models, a cell can hold the state for more than one past token + bool v_trans = true; // the value tensor is transposed // Note: The value of head isn't only used to optimize searching // for a free KV slot. llama_decode_internal also uses it, so it @@ -2339,11 +2339,14 @@ struct llama_context { static bool llama_kv_cache_init( struct llama_kv_cache & cache, - const llama_model & model, + const llama_context * ctx, ggml_type type_k, ggml_type type_v, uint32_t kv_size, bool offload) { + const llama_model & model = ctx->model; + const llama_cparams & cparams = ctx->cparams; + const struct llama_hparams & hparams = model.hparams; const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa() + hparams.n_embd_k_s(); @@ -2354,6 +2357,7 @@ static bool llama_kv_cache_init( // TODO: find a nicer way to add other recurrent model architectures cache.recurrent = model.arch == LLM_ARCH_MAMBA; + cache.v_trans = !cparams.flash_attn; // TODO: support mixed reccurent Transformer architectues // NOTE: (!a || b) is a logical implication (a -> b) @@ -2566,6 +2570,10 @@ static void llama_kv_cache_clear(struct llama_kv_cache & cache) { } cache.head = 0; cache.used = 0; + + for (auto & buf : cache.bufs) { + ggml_backend_buffer_clear(buf, 0); + } } static bool llama_kv_cache_seq_rm( @@ -4194,7 +4202,7 @@ static void llm_load_hparams( model.ftype = ml.ftype; if (hparams.f_max_alibi_bias > 0.0f) { - hparams.need_kq_pos = true; + hparams.use_alibi = true; } hparams.rope_type = llama_rope_type(&model); @@ -6203,37 +6211,47 @@ static struct ggml_tensor * llm_build_inp_embd( static void llm_build_kv_store( struct ggml_context * ctx, const llama_hparams & hparams, + const llama_cparams & cparams, const llama_kv_cache & kv, struct ggml_cgraph * graph, struct ggml_tensor * k_cur, struct ggml_tensor * v_cur, - int64_t n_ctx, int32_t n_tokens, int32_t kv_head, const llm_build_cb & cb, int64_t il) { + const int64_t n_ctx = cparams.n_ctx; + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(kv.size == n_ctx); - // compute the transposed [n_tokens, n_embd] V matrix - assert(v_cur->ne[0] == n_embd_v_gqa && v_cur->ne[1] == n_tokens); - struct ggml_tensor * v_cur_t = ggml_transpose(ctx, v_cur); - cb(v_cur_t, "v_cur_t", il); - struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv.k_l[il], n_tokens*n_embd_k_gqa, (ggml_row_size(kv.k_l[il]->type, n_embd_k_gqa))*kv_head); cb(k_cache_view, "k_cache_view", il); - struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv.v_l[il], n_tokens, n_embd_v_gqa, - ( n_ctx)*ggml_element_size(kv.v_l[il]), - (kv_head)*ggml_element_size(kv.v_l[il])); + // note: storing RoPE-ed version of K in the KV cache + ggml_build_forward_expand(graph, ggml_cpy(ctx, k_cur, k_cache_view)); + + assert(v_cur->ne[0] == n_embd_v_gqa && v_cur->ne[1] == n_tokens); + + struct ggml_tensor * v_cache_view = nullptr; + + if (cparams.flash_attn) { + v_cache_view = ggml_view_1d(ctx, kv.v_l[il], n_tokens*n_embd_v_gqa, + (kv_head)*ggml_row_size(kv.v_l[il]->type, n_embd_v_gqa)); + } else { + // note: the V cache is transposed when not using flash attention + v_cache_view = ggml_view_2d(ctx, kv.v_l[il], n_tokens, n_embd_v_gqa, + ( n_ctx)*ggml_element_size(kv.v_l[il]), + (kv_head)*ggml_element_size(kv.v_l[il])); + + v_cur = ggml_transpose(ctx, v_cur); + } cb(v_cache_view, "v_cache_view", il); - // important: storing RoPE-ed version of K in the KV cache! - ggml_build_forward_expand(graph, ggml_cpy(ctx, k_cur, k_cache_view)); - ggml_build_forward_expand(graph, ggml_cpy(ctx, v_cur_t, v_cache_view)); + ggml_build_forward_expand(graph, ggml_cpy(ctx, v_cur, v_cache_view)); } static struct ggml_tensor * llm_build_norm( @@ -6453,11 +6471,11 @@ static struct ggml_tensor * llm_build_moe_ffn( return moe_out; } -// if max_alibi_bias > 0 then apply ALiBi static struct ggml_tensor * llm_build_kqv( struct ggml_context * ctx, const llama_model & model, const llama_hparams & hparams, + const llama_cparams & cparams, const llama_kv_cache & kv, struct ggml_cgraph * graph, struct ggml_tensor * wo, @@ -6465,12 +6483,12 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * q_cur, struct ggml_tensor * kq_mask, struct ggml_tensor * kq_pos, - int64_t n_ctx, int32_t n_tokens, int32_t n_kv, float kq_scale, const llm_build_cb & cb, int il) { + const int64_t n_ctx = cparams.n_ctx; const int64_t n_head = hparams.n_head; const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_head_k = hparams.n_embd_head_k; @@ -6488,71 +6506,99 @@ static struct ggml_tensor * llm_build_kqv( 0); cb(k, "k", il); - struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); - cb(kq, "kq", il); + struct ggml_tensor * cur; - if (model.arch == LLM_ARCH_PHI2 || model.arch == LLM_ARCH_PHI3) { - // for this arch, we need to perform the KQ multiplication with F32 precision, otherwise we get NaNs - // ref: https://github.com/ggerganov/llama.cpp/pull/4490#issuecomment-1859055847 - ggml_mul_mat_set_prec(kq, GGML_PREC_F32); - } + if (cparams.flash_attn) { + GGML_UNUSED(model); + GGML_UNUSED(n_ctx); - if (model.arch == LLM_ARCH_GROK) { - // need to do the following: - // multiply by attn_output_multiplyer of 0.08838834764831845 - // and then : - // kq = 30 * tanh(kq / 30) - // before the softmax below + // note: if this assert triggers, then some check has failed earlier + // the idea is to detect during context creation that ALiBi would be used and disable Flash Attention + GGML_ASSERT(kq_pos == nullptr && "ALiBi is not yet supported with Flash Attention"); - //try from phi2 - //ggml_mul_mat_set_prec(kq, GGML_PREC_F32); + // split cached v into n_head heads (not transposed) + struct ggml_tensor * v = + ggml_view_3d(ctx, kv.v_l[il], + n_embd_head_v, n_kv, n_head_kv, + ggml_row_size(kv.v_l[il]->type, n_embd_k_gqa), + ggml_row_size(kv.v_l[il]->type, n_embd_head_k), + 0); + cb(v, "v", il); - kq = ggml_tanh(ctx, ggml_scale(ctx, kq, 0.08838834764831845f/30.0f)); - kq = ggml_scale(ctx, kq, 30); - } + cur = ggml_flash_attn_ext(ctx, q, k, v, kq_mask, kq_scale); + + if (model.arch == LLM_ARCH_PHI2 || model.arch == LLM_ARCH_PHI3) { + ggml_flash_attn_ext_set_prec(cur, GGML_PREC_F32); + } + + cur = ggml_reshape_2d(ctx, cur, n_embd_head_k*n_head, n_tokens); + } else { + struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); + cb(kq, "kq", il); + + if (model.arch == LLM_ARCH_PHI2 || model.arch == LLM_ARCH_PHI3) { + // for this arch, we need to perform the KQ multiplication with F32 precision, otherwise we get NaNs + // ref: https://github.com/ggerganov/llama.cpp/pull/4490#issuecomment-1859055847 + ggml_mul_mat_set_prec(kq, GGML_PREC_F32); + } + + if (model.arch == LLM_ARCH_GROK) { + // need to do the following: + // multiply by attn_output_multiplyer of 0.08838834764831845 + // and then : + // kq = 30 * tanh(kq / 30) + // before the softmax below + + //try from phi2 + //ggml_mul_mat_set_prec(kq, GGML_PREC_F32); + + kq = ggml_tanh(ctx, ggml_scale(ctx, kq, 0.08838834764831845f/30.0f)); + kq = ggml_scale(ctx, kq, 30); + } #if defined(GGML_USE_KOMPUTE) #pragma message("TODO: ALiBi support in ggml_soft_max_ext is not implemented for Kompute") #pragma message(" Falling back to ggml_alibi(). Will become an error in Mar 2024") #pragma message("ref: https://github.com/ggerganov/llama.cpp/pull/5488") - if (hparams.f_max_alibi_bias > 0.0f) { - kq = ggml_scale(ctx, kq, kq_scale); - cb(kq, "kq_scaled", il); + if (hparams.use_alibi) { + kq = ggml_scale(ctx, kq, kq_scale); + cb(kq, "kq_scaled", il); - kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, hparams.f_max_alibi_bias); - cb(kq, "kq_scaled_alibi", il); + kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, hparams.f_max_alibi_bias); + cb(kq, "kq_scaled_alibi", il); - kq = ggml_add(ctx, kq, kq_mask); - cb(kq, "kq_masked", il); + kq = ggml_add(ctx, kq, kq_mask); + cb(kq, "kq_masked", il); - kq = ggml_soft_max(ctx, kq); - cb(kq, "kq_soft_max", il); - } else + kq = ggml_soft_max(ctx, kq); + cb(kq, "kq_soft_max", il); + } else #endif - { - kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_pos, kq_scale, hparams.f_max_alibi_bias); - cb(kq, "kq_soft_max_ext", il); - } + { + kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_pos, kq_scale, hparams.f_max_alibi_bias); + cb(kq, "kq_soft_max_ext", il); + } - GGML_ASSERT(kv.size == n_ctx); + GGML_ASSERT(kv.size == n_ctx); - // split cached v into n_head heads - struct ggml_tensor * v = - ggml_view_3d(ctx, kv.v_l[il], - n_kv, n_embd_head_v, n_head_kv, - ggml_element_size(kv.v_l[il])*n_ctx, - ggml_element_size(kv.v_l[il])*n_ctx*n_embd_head_v, - 0); - cb(v, "v", il); + // split cached v into n_head heads + struct ggml_tensor * v = + ggml_view_3d(ctx, kv.v_l[il], + n_kv, n_embd_head_v, n_head_kv, + ggml_element_size(kv.v_l[il])*n_ctx, + ggml_element_size(kv.v_l[il])*n_ctx*n_embd_head_v, + 0); + cb(v, "v", il); - struct ggml_tensor * kqv = ggml_mul_mat(ctx, v, kq); - cb(kqv, "kqv", il); + struct ggml_tensor * kqv = ggml_mul_mat(ctx, v, kq); + cb(kqv, "kqv", il); - struct ggml_tensor * kqv_merged = ggml_permute(ctx, kqv, 0, 2, 1, 3); - cb(kqv_merged, "kqv_merged", il); + struct ggml_tensor * kqv_merged = ggml_permute(ctx, kqv, 0, 2, 1, 3); + cb(kqv_merged, "kqv_merged", il); - struct ggml_tensor * cur = ggml_cont_2d(ctx, kqv_merged, n_embd_head_k*n_head, n_tokens); - cb(cur, "kqv_merged_cont", il); + cur = ggml_cont_2d(ctx, kqv_merged, n_embd_head_k*n_head, n_tokens); + cb(cur, "kqv_merged_cont", il); + } ggml_build_forward_expand(graph, cur); @@ -6572,6 +6618,7 @@ static struct ggml_tensor * llm_build_kv( struct ggml_context * ctx, const llama_model & model, const llama_hparams & hparams, + const llama_cparams & cparams, const llama_kv_cache & kv, struct ggml_cgraph * graph, struct ggml_tensor * wo, @@ -6581,7 +6628,6 @@ static struct ggml_tensor * llm_build_kv( struct ggml_tensor * q_cur, struct ggml_tensor * kq_mask, struct ggml_tensor * kq_pos, - int64_t n_ctx, int32_t n_tokens, int32_t kv_head, int32_t n_kv, @@ -6595,12 +6641,12 @@ static struct ggml_tensor * llm_build_kv( ggml_build_forward_expand(graph, k_cur); ggml_build_forward_expand(graph, v_cur); - llm_build_kv_store(ctx, hparams, kv, graph, k_cur, v_cur, n_ctx, n_tokens, kv_head, cb, il); + llm_build_kv_store(ctx, hparams, cparams, kv, graph, k_cur, v_cur, n_tokens, kv_head, cb, il); struct ggml_tensor * cur; - cur = llm_build_kqv(ctx, model, hparams, kv, graph, wo, wo_b, - q_cur, kq_mask, kq_pos, n_ctx, n_tokens, n_kv, kq_scale, cb, il); + cur = llm_build_kqv(ctx, model, hparams, cparams, kv, graph, wo, wo_b, + q_cur, kq_mask, kq_pos, n_tokens, n_kv, kq_scale, cb, il); cb(cur, "kqv_out", il); return cur; @@ -6642,6 +6688,8 @@ struct llm_build_context { const int32_t kv_head; // index of where we store new KV data in the cache const int32_t n_orig_ctx; + const bool flash_attn; + const enum llama_pooling_type pooling_type; const enum llama_rope_type rope_type; @@ -6688,6 +6736,7 @@ struct llm_build_context { n_outputs (worst_case ? n_tokens : lctx.n_outputs), kv_head (worst_case ? (kv_self.recurrent ? 0 : kv_self.size - n_tokens) : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), + flash_attn (cparams.flash_attn), pooling_type (cparams.pooling_type), rope_type (hparams.rope_type), cb (cb), @@ -6802,15 +6851,31 @@ struct llm_build_context { ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa), ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*id)); - ggml_tensor * view_v_src = ggml_view_2d(ctx0, kv_self.v_l[il], - nm, n_embd_v_gqa, - ggml_row_size(kv_self.v_l[il]->type, kv_self.size), - ggml_row_size(kv_self.v_l[il]->type, i)); + ggml_tensor * view_v_src; + ggml_tensor * view_v_dst; - ggml_tensor * view_v_dst = ggml_view_2d(ctx0, kv_self.v_l[il], - nm, n_embd_v_gqa, - ggml_row_size(kv_self.v_l[il]->type, kv_self.size), - ggml_row_size(kv_self.v_l[il]->type, id)); + if (flash_attn) { + // NOTE: the V cache is not transposed when using flash attention + view_v_src = ggml_view_2d(ctx0, kv_self.v_l[il], + n_embd_v_gqa, nm, + ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa), + ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa*i)); + + view_v_dst = ggml_view_2d(ctx0, kv_self.v_l[il], + n_embd_v_gqa, nm, + ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa), + ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa*id)); + } else { + view_v_src = ggml_view_2d(ctx0, kv_self.v_l[il], + nm, n_embd_v_gqa, + ggml_row_size(kv_self.v_l[il]->type, kv_self.size), + ggml_row_size(kv_self.v_l[il]->type, i)); + + view_v_dst = ggml_view_2d(ctx0, kv_self.v_l[il], + nm, n_embd_v_gqa, + ggml_row_size(kv_self.v_l[il]->type, kv_self.size), + ggml_row_size(kv_self.v_l[il]->type, id)); + } ggml_build_forward_expand(gf, ggml_cpy(ctx0, view_k_src, view_k_dst)); ggml_build_forward_expand(gf, ggml_cpy(ctx0, view_v_src, view_v_dst)); @@ -6840,20 +6905,26 @@ struct llm_build_context { struct ggml_tensor * build_inp_KQ_mask(bool causal = true) { if (causal) { - lctx.inp_KQ_mask = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_kv, n_tokens); + lctx.inp_KQ_mask = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_kv, GGML_PAD(n_tokens, GGML_KQ_MASK_PAD)); } else { - lctx.inp_KQ_mask = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_tokens, n_tokens); + lctx.inp_KQ_mask = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_tokens, GGML_PAD(n_tokens, GGML_KQ_MASK_PAD)); } cb(lctx.inp_KQ_mask, "KQ_mask", -1); ggml_set_input(lctx.inp_KQ_mask); - return lctx.inp_KQ_mask; + return flash_attn ? ggml_cast(ctx0, lctx.inp_KQ_mask, GGML_TYPE_F16) : lctx.inp_KQ_mask; } - struct ggml_tensor * build_inp_KQ_pos() { - lctx.inp_KQ_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, n_kv); + struct ggml_tensor * build_inp_KQ_pos(bool causal = true) { + if (causal) { + lctx.inp_KQ_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, n_kv); + } else { + // TODO: this will be needed for ALiBi-based BERT models + // https://github.com/ggerganov/llama.cpp/pull/6826 + lctx.inp_KQ_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, n_tokens); + } cb(lctx.inp_KQ_pos, "KQ_pos", -1); ggml_set_input(lctx.inp_KQ_pos); - return lctx.inp_KQ_pos; + return flash_attn ? ggml_cast(ctx0, lctx.inp_KQ_pos, GGML_TYPE_F16) : lctx.inp_KQ_pos; } struct ggml_tensor * build_inp_mean() { @@ -6959,9 +7030,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -7099,9 +7170,9 @@ struct llm_build_context { cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -7206,9 +7277,9 @@ struct llm_build_context { ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -7326,9 +7397,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -7451,9 +7522,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f, cb, il); } if (il == n_layer - 1) { @@ -7603,9 +7674,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, - model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, + model.layers[il].wo, NULL, + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -7715,9 +7786,9 @@ struct llm_build_context { Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -7919,9 +7990,9 @@ struct llm_build_context { ); cb(Vcur, "Vcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Q, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Q, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -8015,9 +8086,9 @@ struct llm_build_context { Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); cb(Qcur, "Qcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -8308,9 +8379,9 @@ struct llm_build_context { Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -8439,14 +8510,15 @@ struct llm_build_context { Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, - model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } else { Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } } @@ -8588,9 +8660,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -8706,9 +8778,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -8819,9 +8891,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -8933,9 +9005,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -9088,9 +9160,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f, cb, il); } if (il == n_layer - 1) { @@ -9205,9 +9277,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, - model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f, cb, il); } if (il == n_layer - 1) { @@ -9318,9 +9390,9 @@ struct llm_build_context { ext_factor, attn_factor, beta_fast, beta_slow); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } struct ggml_tensor * sa_out = cur; @@ -9421,9 +9493,9 @@ struct llm_build_context { Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -9528,9 +9600,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -9644,9 +9716,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -9761,9 +9833,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -9891,9 +9963,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -10012,9 +10084,9 @@ struct llm_build_context { ext_factor, attn_factor, beta_fast, beta_slow); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f, cb, il); } if (il == n_layer - 1) { @@ -10131,9 +10203,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -10421,9 +10493,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -10552,9 +10624,9 @@ struct llm_build_context { ); cb(Kcur, "Kcur", il); - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf, model.layers[il].wo, nullptr, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); } if (il == n_layer - 1) { @@ -10981,7 +11053,9 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (hparams.need_kq_pos) { + // ALiBi requires the KQ_pos tensor to provide the sequence position of each token in the batch + // this allows to process multiple sequences in parallel with ALiBi-based models + if (hparams.use_alibi) { const int64_t n_kv = kv_self.n; GGML_ASSERT(lctx.inp_KQ_pos); @@ -11363,7 +11437,7 @@ static int llama_decode_internal( // a heuristic, to avoid attending the full cache if it is not yet utilized // after enough generations, the benefit from this heuristic disappears // if we start defragmenting the cache, the benefit from this will be more important - kv_self.n = std::min(kv_self.size, std::max(32u, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32))); + kv_self.n = std::min(kv_self.size, std::max(256u, GGML_PAD(llama_kv_cache_cell_max(kv_self), 256))); //kv_self.n = llama_kv_cache_cell_max(kv_self); } } @@ -11560,7 +11634,9 @@ static void llama_kv_cache_defrag_internal(struct llama_context & lctx) { // each move requires 6*n_layer tensors (see build_defrag) // - source view, destination view, copy operation // - x2 for keys and values - const uint32_t max_moves = LLAMA_MAX_NODES/(6*n_layer); + //const uint32_t max_moves = LLAMA_MAX_NODES/(6*n_layer); + // TODO: tmp fix https://github.com/ggerganov/llama.cpp/issues/6685#issuecomment-2057579516 + const uint32_t max_moves = (LLAMA_MAX_NODES - 2*n_layer)/(6*n_layer); // determine which KV cells to move where // @@ -15167,6 +15243,7 @@ struct llama_context_params llama_context_default_params() { /*.logits_all =*/ false, /*.embeddings =*/ false, /*.offload_kqv =*/ true, + /*.flash_attn =*/ false, /*.abort_callback =*/ nullptr, /*.abort_callback_data =*/ nullptr, }; @@ -15333,6 +15410,7 @@ struct llama_context * llama_new_context_with_model( cparams.defrag_thold = params.defrag_thold; cparams.embeddings = params.embeddings; cparams.offload_kqv = params.offload_kqv; + cparams.flash_attn = params.flash_attn; cparams.pooling_type = params.pooling_type; cparams.n_ctx = params.n_ctx == 0 ? hparams.n_ctx_train : params.n_ctx; @@ -15340,12 +15418,20 @@ struct llama_context * llama_new_context_with_model( cparams.rope_freq_scale = params.rope_freq_scale == 0.0f ? hparams.rope_freq_scale_train : params.rope_freq_scale; // this is necessary due to kv_self.n being padded later during inference - cparams.n_ctx = GGML_PAD(cparams.n_ctx, 32); + cparams.n_ctx = GGML_PAD(cparams.n_ctx, 256); // with causal attention, the batch size is limited by the context size cparams.n_batch = hparams.causal_attn ? std::min(cparams.n_ctx, params.n_batch) : params.n_batch; - cparams.n_ubatch = std::min(cparams.n_batch, params.n_ubatch == 0 ? params.n_batch : params.n_ubatch); + // the batch has to be at least GGML_KQ_MASK_PAD because we will be padding the KQ_mask + // this is required by GPU kernels in order to avoid out-of-bounds accesses (e.g. ggml_flash_attn_ext) + // ref: https://github.com/ggerganov/llama.cpp/pull/5021 + if (cparams.n_batch < GGML_KQ_MASK_PAD) { + LLAMA_LOG_WARN("%s: n_batch is less than GGML_KQ_MASK_PAD - increasing to %d\n", __func__, GGML_KQ_MASK_PAD); + cparams.n_batch = GGML_KQ_MASK_PAD; + } + + cparams.n_ubatch = std::min(cparams.n_batch, params.n_ubatch == 0 ? params.n_batch : params.n_ubatch); cparams.n_yarn_orig_ctx = params.yarn_orig_ctx != 0 ? params.yarn_orig_ctx : hparams.n_yarn_orig_ctx != 0 ? hparams.n_yarn_orig_ctx : @@ -15377,6 +15463,23 @@ struct llama_context * llama_new_context_with_model( } } + if (cparams.flash_attn && hparams.use_alibi) { + LLAMA_LOG_WARN("%s: flash_attn is not yet compatible with ALiBi - forcing off\n", __func__); + cparams.flash_attn = false; + } + + if (cparams.flash_attn && model->arch == LLM_ARCH_GROK) { + LLAMA_LOG_WARN("%s: flash_attn is not compatible with Grok - forcing off\n", __func__); + cparams.flash_attn = false; + } + +#ifdef GGML_USE_HIPBLAS + if (cparams.flash_attn) { + LLAMA_LOG_WARN("%s: flash_attn is not yet compatible with HIPBLAS builds - forcing off\n", __func__); + cparams.flash_attn = false; + } +#endif + if (params.seed == LLAMA_DEFAULT_SEED) { params.seed = time(NULL); } @@ -15384,6 +15487,7 @@ struct llama_context * llama_new_context_with_model( LLAMA_LOG_INFO("%s: n_ctx = %u\n", __func__, cparams.n_ctx); LLAMA_LOG_INFO("%s: n_batch = %u\n", __func__, cparams.n_batch); LLAMA_LOG_INFO("%s: n_ubatch = %u\n", __func__, cparams.n_ubatch); + LLAMA_LOG_INFO("%s: flash_attn = %d\n", __func__, cparams.flash_attn); LLAMA_LOG_INFO("%s: freq_base = %.1f\n", __func__, cparams.rope_freq_base); LLAMA_LOG_INFO("%s: freq_scale = %g\n", __func__, cparams.rope_freq_scale); @@ -15512,7 +15616,7 @@ struct llama_context * llama_new_context_with_model( } ctx->backends.push_back(ctx->backend_cpu); - if (!llama_kv_cache_init(ctx->kv_self, ctx->model, type_k, type_v, kv_size, cparams.offload_kqv)) { + if (!llama_kv_cache_init(ctx->kv_self, ctx, type_k, type_v, kv_size, cparams.offload_kqv)) { LLAMA_LOG_ERROR("%s: llama_kv_cache_init() failed for self-attention cache\n", __func__); llama_free(ctx); return nullptr; @@ -16111,6 +16215,7 @@ size_t llama_state_get_size(const struct llama_context * ctx) { const size_t s_kv_head = sizeof(uint32_t); const size_t s_kv_size = sizeof(uint32_t); const size_t s_kv_used = sizeof(uint32_t); + const size_t s_v_trans = sizeof(uint32_t); const size_t s_kv = ctx->kv_self.total_size(); const size_t s_kv_cell = sizeof(llama_pos) + sizeof(size_t) + cparams.n_seq_max*sizeof(llama_seq_id); const size_t s_kv_cells = ctx->kv_self.size * s_kv_cell; @@ -16128,10 +16233,14 @@ size_t llama_state_get_size(const struct llama_context * ctx) { + s_kv_head + s_kv_size + s_kv_used + + s_v_trans + s_kv + s_kv_cells ); + // on session change it is very likely that the state size has changed - so we need to update this function + static_assert(LLAMA_SESSION_VERSION == 6, "So you just bumped the session version - good. But did you remember to update llama_state_get_size?"); + return s_total; } @@ -16277,11 +16386,13 @@ static void llama_state_get_data_internal(struct llama_context * ctx, llama_data const uint32_t kv_size = kv_self.size; const size_t kv_buf_size = kv_self.total_size() / (kv_size ? kv_size : 1) * kv_head; const uint32_t kv_used = kv_self.used; + const uint32_t v_trans = kv_self.v_trans ? 1 : 0; data_ctx->write(&kv_buf_size, sizeof(kv_buf_size)); data_ctx->write(&kv_head, sizeof(kv_head)); data_ctx->write(&kv_size, sizeof(kv_size)); data_ctx->write(&kv_used, sizeof(kv_used)); + data_ctx->write(&v_trans, sizeof(v_trans)); if (kv_buf_size) { const size_t pre_kv_buf_size = data_ctx->get_size_written(); @@ -16294,7 +16405,7 @@ static void llama_state_get_data_internal(struct llama_context * ctx, llama_data ggml_backend_tensor_get(kv_self.k_l[il], tmp_buf.data(), 0, tmp_buf.size()); data_ctx->write(tmp_buf.data(), tmp_buf.size()); - if (kv_self.recurrent) { + if (kv_self.recurrent || !kv_self.v_trans) { // v is contiguous for recurrent models // TODO: use other tensors for state models than k and v const size_t v_size = ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa*kv_head); @@ -16427,11 +16538,15 @@ size_t llama_state_set_data(struct llama_context * ctx, const uint8_t * src) { uint32_t kv_head; uint32_t kv_size; uint32_t kv_used; + uint32_t v_trans; memcpy(&kv_buf_size, inp, sizeof(kv_buf_size)); inp += sizeof(kv_buf_size); memcpy(&kv_head, inp, sizeof(kv_head)); inp += sizeof(kv_head); memcpy(&kv_size, inp, sizeof(kv_size)); inp += sizeof(kv_size); memcpy(&kv_used, inp, sizeof(kv_used)); inp += sizeof(kv_used); + memcpy(&v_trans, inp, sizeof(v_trans)); inp += sizeof(v_trans); + + GGML_ASSERT(kv_self.v_trans == (bool) v_trans); // incompatible V transposition if (kv_self.size != kv_size) { // the KV cache needs to be big enough to load all the KV cells from the saved state @@ -16441,6 +16556,8 @@ size_t llama_state_set_data(struct llama_context * ctx, const uint8_t * src) { __func__, kv_head, kv_size, kv_self.size); } + llama_kv_cache_clear(ctx); + if (kv_buf_size) { const size_t pre_kv_buf_size = inp - src; @@ -16452,7 +16569,7 @@ size_t llama_state_set_data(struct llama_context * ctx, const uint8_t * src) { ggml_backend_tensor_set(kv_self.k_l[il], inp, 0, k_size); inp += k_size; - if (kv_self.recurrent) { + if (kv_self.recurrent || !kv_self.v_trans) { // v is contiguous for recurrent models // TODO: use other tensors for state models than k and v const size_t v_size = ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa*kv_head); @@ -16474,8 +16591,6 @@ size_t llama_state_set_data(struct llama_context * ctx, const uint8_t * src) { GGML_ASSERT(kv_buf_size == inp - src - pre_kv_buf_size); } - llama_kv_cache_clear(ctx); - ctx->kv_self.head = kv_head; ctx->kv_self.used = kv_used; @@ -16735,28 +16850,49 @@ static size_t llama_state_seq_get_data_internal(struct llama_context * ctx, llam } } - // For the values, they are transposed, so we also need the element size and get the element ranges from each row - const uint32_t kv_size = kv_self.size; - for (int il = 0; il < (int)n_layer; ++il) { - // Write value type - const int32_t v_type_i = (int32_t)kv_self.v_l[il]->type; - data_ctx.write(&v_type_i, sizeof(v_type_i)); + // TODO: simplify, reduce copy-paste + if (!kv_self.v_trans) { + for (int il = 0; il < (int)n_layer; ++il) { + // Write value type + const int32_t v_type_i = (int32_t)kv_self.v_l[il]->type; + data_ctx.write(&v_type_i, sizeof(v_type_i)); - // Write element size - const size_t v_size_el = ggml_type_size(kv_self.v_l[il]->type); - data_ctx.write(&v_size_el, sizeof(v_size_el)); + // Write row size of value + const size_t v_size_row = ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa); + data_ctx.write(&v_size_row, sizeof(v_size_row)); - // For each row, we get the element values of each cell - for (uint32_t j = 0; j < n_embd_v_gqa; ++j) { - // Read each range of cells of v_size_el length each into tmp_buf and write out + // Read each range of cells of v_size length each into tmp_buf and write out for (const auto & range : cell_ranges) { const size_t range_size = range.second - range.first; - const size_t src_offset = (range.first + j * kv_size) * v_size_el; - tmp_buf.resize(range_size * v_size_el); - ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), src_offset, tmp_buf.size()); + tmp_buf.resize(range_size * v_size_row); + ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), range.first * v_size_row, range_size * v_size_row); data_ctx.write(tmp_buf.data(), tmp_buf.size()); } } + } else { + // For the values, they are transposed, so we also need the element size and get the element ranges from each row + const uint32_t kv_size = kv_self.size; + for (int il = 0; il < (int)n_layer; ++il) { + // Write value type + const int32_t v_type_i = (int32_t)kv_self.v_l[il]->type; + data_ctx.write(&v_type_i, sizeof(v_type_i)); + + // Write element size + const size_t v_size_el = ggml_type_size(kv_self.v_l[il]->type); + data_ctx.write(&v_size_el, sizeof(v_size_el)); + + // For each row, we get the element values of each cell + for (uint32_t j = 0; j < n_embd_v_gqa; ++j) { + // Read each range of cells of v_size_el length each into tmp_buf and write out + for (const auto & range : cell_ranges) { + const size_t range_size = range.second - range.first; + const size_t src_offset = (range.first + j * kv_size) * v_size_el; + tmp_buf.resize(range_size * v_size_el); + ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), src_offset, tmp_buf.size()); + data_ctx.write(tmp_buf.data(), tmp_buf.size()); + } + } + } } return data_ctx.get_size_written(); @@ -16881,41 +17017,75 @@ size_t llama_state_seq_set_data(struct llama_context * ctx, const uint8_t * src, } } - // For each layer, read the values for each cell (transposed) - for (int il = 0; il < (int)n_layer; ++il) { - // Read type of value - int32_t v_type_i_ref; - memcpy(&v_type_i_ref, inp, sizeof(v_type_i_ref)); - inp += sizeof(v_type_i_ref); - const int32_t v_type_i = (int32_t)kv_self.v_l[il]->type; - if (v_type_i != v_type_i_ref) { - llama_kv_cache_seq_rm(kv_self, dest_seq_id, -1, -1); - LLAMA_LOG_ERROR("%s: mismatched value type (%d != %d, layer %d)\n", __func__, v_type_i, v_type_i_ref, il); - return 0; - } + // TODO: simplify, reduce copy-paste + if (!kv_self.v_trans) { + for (int il = 0; il < (int)n_layer; ++il) { + // Read type of value + int32_t v_type_i_ref; + memcpy(&v_type_i_ref, inp, sizeof(v_type_i_ref)); + inp += sizeof(v_type_i_ref); + const int32_t v_type_i = (int32_t)kv_self.v_l[il]->type; + if (v_type_i != v_type_i_ref) { + llama_kv_cache_seq_rm(kv_self, dest_seq_id, -1, -1); + LLAMA_LOG_ERROR("%s: mismatched value type (%d != %d, layer %d)\n", __func__, v_type_i, v_type_i_ref, il); + return 0; + } - // Read element size of value - size_t v_size_el_ref; - memcpy(&v_size_el_ref, inp, sizeof(v_size_el_ref)); - inp += sizeof(v_size_el_ref); - const size_t v_size_el = ggml_type_size(kv_self.v_l[il]->type); - if (v_size_el != v_size_el_ref) { - llama_kv_cache_seq_rm(kv_self, dest_seq_id, -1, -1); - LLAMA_LOG_ERROR("%s: mismatched value element size (%zu != %zu, layer %d)\n", __func__, v_size_el, v_size_el_ref, il); - return 0; - } + // Read row size of value + size_t v_size_row_ref; + memcpy(&v_size_row_ref, inp, sizeof(v_size_row_ref)); + inp += sizeof(v_size_row_ref); + const size_t v_size_row = ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa); + if (v_size_row != v_size_row_ref) { + llama_kv_cache_seq_rm(kv_self, dest_seq_id, -1, -1); + LLAMA_LOG_ERROR("%s: mismatched value row size (%zu != %zu, layer %d)\n", __func__, v_size_row, v_size_row_ref, il); + return 0; + } - if (cell_count) { - // For each row in the transposed matrix, read the values for the whole cell range - for (uint32_t j = 0; j < n_embd_v_gqa; ++j) { - const size_t dst_offset = (kv_head + j * kv_size) * v_size_el; - ggml_backend_tensor_set(kv_self.v_l[il], inp, dst_offset, cell_count * v_size_el); - inp += cell_count * v_size_el; + if (cell_count) { + // Read and set the values for the whole cell range + ggml_backend_tensor_set(kv_self.v_l[il], inp, kv_head * v_size_row, cell_count * v_size_row); + inp += cell_count * v_size_row; + } + } + } else { + // For each layer, read the values for each cell (transposed) + for (int il = 0; il < (int)n_layer; ++il) { + // Read type of value + int32_t v_type_i_ref; + memcpy(&v_type_i_ref, inp, sizeof(v_type_i_ref)); + inp += sizeof(v_type_i_ref); + const int32_t v_type_i = (int32_t)kv_self.v_l[il]->type; + if (v_type_i != v_type_i_ref) { + llama_kv_cache_seq_rm(kv_self, dest_seq_id, -1, -1); + LLAMA_LOG_ERROR("%s: mismatched value type (%d != %d, layer %d)\n", __func__, v_type_i, v_type_i_ref, il); + return 0; + } + + // Read element size of value + size_t v_size_el_ref; + memcpy(&v_size_el_ref, inp, sizeof(v_size_el_ref)); + inp += sizeof(v_size_el_ref); + const size_t v_size_el = ggml_type_size(kv_self.v_l[il]->type); + if (v_size_el != v_size_el_ref) { + llama_kv_cache_seq_rm(kv_self, dest_seq_id, -1, -1); + LLAMA_LOG_ERROR("%s: mismatched value element size (%zu != %zu, layer %d)\n", __func__, v_size_el, v_size_el_ref, il); + return 0; + } + + if (cell_count) { + // For each row in the transposed matrix, read the values for the whole cell range + for (uint32_t j = 0; j < n_embd_v_gqa; ++j) { + const size_t dst_offset = (kv_head + j * kv_size) * v_size_el; + ggml_backend_tensor_set(kv_self.v_l[il], inp, dst_offset, cell_count * v_size_el); + inp += cell_count * v_size_el; + } } } } const size_t nread = inp - src; + return nread; } diff --git a/llama.h b/llama.h index 30835de5f9433..059d78f115c6d 100644 --- a/llama.h +++ b/llama.h @@ -40,7 +40,7 @@ #define LLAMA_FILE_MAGIC_GGSQ 0x67677371u // 'ggsq' #define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN -#define LLAMA_SESSION_VERSION 5 +#define LLAMA_SESSION_VERSION 6 #define LLAMA_STATE_SEQ_MAGIC LLAMA_FILE_MAGIC_GGSQ #define LLAMA_STATE_SEQ_VERSION 1 @@ -287,6 +287,7 @@ extern "C" { bool logits_all; // the llama_decode() call computes all logits, not just the last one (DEPRECATED - set llama_batch.logits instead) bool embeddings; // if true, extract embeddings (together with logits) bool offload_kqv; // whether to offload the KQV ops (including the KV cache) to GPU + bool flash_attn; // whether to use flash attention // Abort callback // if it returns true, execution of llama_decode() will be aborted @@ -542,7 +543,7 @@ extern "C" { // Returns the number of used KV cells (i.e. have at least one sequence assigned to them) LLAMA_API int32_t llama_get_kv_cache_used_cells(const struct llama_context * ctx); - // Clear the KV cache + // Clear the KV cache - both cell info is erased and KV data is zeroed LLAMA_API void llama_kv_cache_clear( struct llama_context * ctx); diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 02daad24b030a..b27c1291e4088 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1090,6 +1090,12 @@ struct test_soft_max : public test_case { return VARS_TO_STR5(type, ne, mask, scale, max_bias); } + // the 1024 test with bias occasionally fails: + // SOFT_MAX(type=f32,ne=[1024,16,1,1],mask=1,scale=1.000000,max_bias=8.000000): [SOFT_MAX] NMSE = 0.000000103 > 0.000000100 FAIL + virtual double max_nmse_err() override { + return 1e-6; + } + test_soft_max(ggml_type type = GGML_TYPE_F32, std::array ne = {10, 10, 10, 10}, bool mask = false, @@ -1101,7 +1107,7 @@ struct test_soft_max : public test_case { ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); ggml_tensor * mask = nullptr; if (this->mask) { - mask = ggml_new_tensor_2d(ctx, type, ne[0], ne[1]); + mask = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, ne[0], ne[1]); } ggml_tensor * pos = nullptr; if (max_bias > 0.0f) { @@ -1475,6 +1481,34 @@ struct test_leaky_relu : public test_case { } }; +// GGML_OP_FLASH_ATTN_EXT +struct test_flash_attn_ext : public test_case { + const int64_t hs; // head size + const int64_t nh; // num heads + const int64_t kv; // kv size + const int64_t nb; // batch size + + std::string vars() override { + return VARS_TO_STR4(hs, nh, kv, nb); + } + + double max_nmse_err() override { + return 5e-4; + } + + test_flash_attn_ext(int64_t hs = 128, int64_t nh = 32, int64_t kv = 96, int64_t nb = 8) + : hs(hs), nh(nh), kv(kv), nb(nb) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * q = ggml_new_tensor_4d(ctx, GGML_TYPE_F32, hs, nb, nh, 1); + ggml_tensor * k = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, hs, kv, nh, 1); + ggml_tensor * v = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, hs, kv, nh, 1); + ggml_tensor * mask = ggml_new_tensor_4d(ctx, GGML_TYPE_F16, kv, GGML_PAD(nb, GGML_KQ_MASK_PAD), 1, 1); + ggml_tensor * out = ggml_flash_attn_ext(ctx, q, k, v, mask, 1.0f/sqrtf(hs)); + return out; + } +}; + enum llm_norm_type { LLM_NORM, LLM_NORM_RMS, @@ -1661,7 +1695,7 @@ struct test_llama : public test_llm { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, hp.n_tokens); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx, GGML_TYPE_F32, hp.n_kv, hp.n_tokens, 1); + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx, GGML_TYPE_F16, hp.n_kv, hp.n_tokens, 1); ggml_tensor * k_l = ggml_new_tensor_1d(ctx, GGML_TYPE_F16, 1638400); ggml_tensor * v_l = ggml_new_tensor_1d(ctx, GGML_TYPE_F16, 1638400); @@ -1783,7 +1817,7 @@ struct test_falcon : public test_llm { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, hp.n_tokens); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx, GGML_TYPE_F32, hp.n_kv, hp.n_tokens, 1); + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx, GGML_TYPE_F16, hp.n_kv, hp.n_tokens, 1); ggml_tensor * k_l = ggml_new_tensor_1d(ctx, GGML_TYPE_F16, 1638400); ggml_tensor * v_l = ggml_new_tensor_1d(ctx, GGML_TYPE_F16, 1638400); @@ -2095,7 +2129,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op for (float scale : {1.0f, 0.1f}) { for (int64_t ne0 : {16, 1024}) { for (int64_t ne1 : {16, 1024}) { - test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1}, mask, scale, max_bias)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1}, mask, scale, max_bias)); test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0-1, ne1-1, 1, 1}, mask, scale, max_bias)); } } @@ -2139,6 +2173,16 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_timestep_embedding()); test_cases.emplace_back(new test_leaky_relu()); + for (int hs : { 64, 80, 128, 256, }) { + for (int nh : { 32, }) { + for (int kv : { 512, 1024, }) { + for (int nb : { 1, 2, 4, 8, }) { + test_cases.emplace_back(new test_flash_attn_ext(hs, nh, kv, nb)); + } + } + } + } + // these tests are disabled to save execution time, but they can be handy for debugging #if 0 test_cases.emplace_back(new test_llama(1)); From a68a1e7ed060ee5af2d638585d19e3510ddbf16c Mon Sep 17 00:00:00 2001 From: Kevin Gibbons Date: Tue, 30 Apr 2024 02:34:50 -0700 Subject: [PATCH 091/101] metal : log more info on error (#6987) --- ggml-metal.m | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index c6d580b8462d0..cf3aed488eb74 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2779,6 +2779,45 @@ static enum ggml_status ggml_metal_graph_compute( MTLCommandBufferStatus status = [command_buffer status]; if (status != MTLCommandBufferStatusCompleted) { GGML_METAL_LOG_INFO("%s: command buffer %d failed with status %lu\n", __func__, i, status); + if (status == MTLCommandBufferStatusError) { + MTLCommandBufferError error_code = [command_buffer error].code; + switch (error_code) { + case MTLCommandBufferErrorNone: + GGML_METAL_LOG_INFO("no error code reported\n"); + break; + case MTLCommandBufferErrorTimeout: + GGML_METAL_LOG_INFO("timeout\n"); + break; + case MTLCommandBufferErrorPageFault: + GGML_METAL_LOG_INFO("unserviceable page fault\n"); + break; + case MTLCommandBufferErrorOutOfMemory: + GGML_METAL_LOG_INFO("out of memory\n"); + break; + case MTLCommandBufferErrorInvalidResource: + GGML_METAL_LOG_INFO("invalid reference to resource\n"); + break; + case MTLCommandBufferErrorMemoryless: + GGML_METAL_LOG_INFO("GPU ran out of one or more of its internal resources that support memoryless render pass attachments\n"); + break; + case MTLCommandBufferErrorDeviceRemoved: + GGML_METAL_LOG_INFO("device removed\n"); + break; + case MTLCommandBufferErrorStackOverflow: + GGML_METAL_LOG_INFO("kernel function of tile shader used too many stack frames\n"); + break; + case MTLCommandBufferErrorAccessRevoked: + GGML_METAL_LOG_INFO("access to device revoked by system\n"); + break; + case MTLCommandBufferErrorInternal: + GGML_METAL_LOG_INFO("internal error\n"); + break; + default: + GGML_METAL_LOG_INFO("unknown error %lu\n", error_code); + break; + } + } + return GGML_STATUS_FAILED; } } From 77e15bec6217a39be59b9cc83d6b9afb6b0d8167 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 30 Apr 2024 15:52:21 +0300 Subject: [PATCH 092/101] metal : remove deprecated error code (#7008) --- ggml-metal.m | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index cf3aed488eb74..0ebe7163e7476 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2800,9 +2800,9 @@ static enum ggml_status ggml_metal_graph_compute( case MTLCommandBufferErrorMemoryless: GGML_METAL_LOG_INFO("GPU ran out of one or more of its internal resources that support memoryless render pass attachments\n"); break; - case MTLCommandBufferErrorDeviceRemoved: - GGML_METAL_LOG_INFO("device removed\n"); - break; + //case MTLCommandBufferErrorDeviceRemoved: + // GGML_METAL_LOG_INFO("device removed\n"); + // break; case MTLCommandBufferErrorStackOverflow: GGML_METAL_LOG_INFO("kernel function of tile shader used too many stack frames\n"); break; From c65448d17a7077903f0183da8bedba90843e4cf0 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Tue, 30 Apr 2024 21:29:11 +0800 Subject: [PATCH 093/101] add flash attention toggle --- expose.h | 1 + gpttype_adapter.cpp | 3 ++- koboldcpp.py | 8 +++++++- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/expose.h b/expose.h index 1ef865e730931..04565a357cc83 100644 --- a/expose.h +++ b/expose.h @@ -55,6 +55,7 @@ struct load_model_inputs const int gpulayers = 0; const float rope_freq_scale = 1.0f; const float rope_freq_base = 10000.0f; + const bool flash_attention = false; const float tensor_split[tensor_split_max]; }; struct generation_inputs diff --git a/gpttype_adapter.cpp b/gpttype_adapter.cpp index 422abcb42952f..90bd2dd058078 100644 --- a/gpttype_adapter.cpp +++ b/gpttype_adapter.cpp @@ -785,12 +785,12 @@ ModelLoadResult gpttype_load_model(const load_model_inputs inputs, FileFormat in { kcpp_params->n_ubatch = (kcpp_params->n_batch>1024?1024:kcpp_params->n_batch); } + kcpp_params->flash_attn = inputs.flash_attention; modelname = kcpp_params->model = inputs.model_filename; useSmartContext = inputs.use_smartcontext; useContextShift = inputs.use_contextshift; debugmode = inputs.debugmode; - auto clamped_max_context_length = inputs.max_context_length; if(clamped_max_context_length>16384 && @@ -1089,6 +1089,7 @@ ModelLoadResult gpttype_load_model(const load_model_inputs inputs, FileFormat in } } + llama_ctx_params.flash_attn = kcpp_params->flash_attn; llama_ctx_v4 = llama_new_context_with_model(llamamodel, llama_ctx_params); if (llama_ctx_v4 == NULL) diff --git a/koboldcpp.py b/koboldcpp.py index 3f82bd3c42480..0c404a08ff860 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -56,6 +56,7 @@ class load_model_inputs(ctypes.Structure): ("gpulayers", ctypes.c_int), ("rope_freq_scale", ctypes.c_float), ("rope_freq_base", ctypes.c_float), + ("flash_attention", ctypes.c_bool), ("tensor_split", ctypes.c_float * tensor_split_max)] class generation_inputs(ctypes.Structure): @@ -372,6 +373,7 @@ def load_model(model_filename): inputs.mmproj_filename = args.mmproj.encode("UTF-8") if args.mmproj else "".encode("UTF-8") inputs.use_smartcontext = args.smartcontext inputs.use_contextshift = (0 if args.noshift else 1) + inputs.flash_attention = args.flashattention inputs.blasbatchsize = args.blasbatchsize inputs.forceversion = args.forceversion inputs.gpulayers = args.gpulayers @@ -1662,6 +1664,7 @@ def hide_tooltip(event): contextshift = ctk.IntVar(value=1) remotetunnel = ctk.IntVar(value=0) smartcontext = ctk.IntVar() + flashattention = ctk.IntVar(value=0) context_var = ctk.IntVar() customrope_var = ctk.IntVar() customrope_scale = ctk.StringVar(value="1.0") @@ -2112,7 +2115,6 @@ def changerunmode(a,b,c): # context size makeslider(tokens_tab, "Context Size:",contextsize_text, context_var, 0, len(contextsize_text)-1, 20, set=3,tooltip="What is the maximum context size to support. Model specific. You cannot exceed it.\nLarger contexts require more memory, and not all models support it.") - customrope_scale_entry, customrope_scale_label = makelabelentry(tokens_tab, "RoPE Scale:", customrope_scale,tooltip="For Linear RoPE scaling. RoPE frequency scale.") customrope_base_entry, customrope_base_label = makelabelentry(tokens_tab, "RoPE Base:", customrope_base,tooltip="For NTK Aware Scaling. RoPE frequency base.") def togglerope(a,b,c): @@ -2124,6 +2126,7 @@ def togglerope(a,b,c): item.grid_forget() makecheckbox(tokens_tab, "Custom RoPE Config", variable=customrope_var, row=22, command=togglerope,tooltiptxt="Override the default RoPE configuration with custom RoPE scaling.") togglerope(1,1,1) + makecheckbox(tokens_tab, "Use FlashAttention", flashattention, 28,tooltiptxt="Enable flash attention for GGUF models.") makefileentry(tokens_tab, "ChatCompletions Adapter:", "Select ChatCompletions Adapter File", chatcompletionsadapter_var, 30,tooltiptxt="Select an optional ChatCompletions Adapter JSON file to force custom instruct tags.") # Model Tab @@ -2202,6 +2205,7 @@ def export_vars(): args.highpriority = highpriority.get()==1 args.nommap = disablemmap.get()==1 args.smartcontext = smartcontext.get()==1 + args.flashattention = flashattention.get()==1 args.noshift = contextshift.get()==0 args.remotetunnel = remotetunnel.get()==1 args.foreground = keepforeground.get()==1 @@ -2286,6 +2290,7 @@ def import_vars(dict): highpriority.set(1 if "highpriority" in dict and dict["highpriority"] else 0) disablemmap.set(1 if "nommap" in dict and dict["nommap"] else 0) smartcontext.set(1 if "smartcontext" in dict and dict["smartcontext"] else 0) + flashattention.set(1 if "flashattention" in dict and dict["flashattention"] else 0) contextshift.set(0 if "noshift" in dict and dict["noshift"] else 1) remotetunnel.set(1 if "remotetunnel" in dict and dict["remotetunnel"] else 0) keepforeground.set(1 if "foreground" in dict and dict["foreground"] else 0) @@ -3322,5 +3327,6 @@ def range_checker(arg: str): parser.add_argument("--password", help="Enter a password required to use this instance. This key will be required for all text endpoints. Image endpoints are not secured.", default=None) parser.add_argument("--ignoremissing", help="Ignores all missing non-essential files, just skipping them instead.", action='store_true') parser.add_argument("--chatcompletionsadapter", help="Select an optional ChatCompletions Adapter JSON file to force custom instruct tags.", default="") + parser.add_argument("--flashattention", help="Enables flash attention (Experimental).", action='store_true') main(parser.parse_args(),start_server=True) From f364eb6fb5d46118a76fa045f487318de4c24961 Mon Sep 17 00:00:00 2001 From: Kevin Gibbons Date: Tue, 30 Apr 2024 08:14:02 -0700 Subject: [PATCH 094/101] switch to using localizedDescription (#7010) --- ggml-metal.m | 38 ++------------------------------------ 1 file changed, 2 insertions(+), 36 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 0ebe7163e7476..017b72ce94383 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2780,42 +2780,8 @@ static enum ggml_status ggml_metal_graph_compute( if (status != MTLCommandBufferStatusCompleted) { GGML_METAL_LOG_INFO("%s: command buffer %d failed with status %lu\n", __func__, i, status); if (status == MTLCommandBufferStatusError) { - MTLCommandBufferError error_code = [command_buffer error].code; - switch (error_code) { - case MTLCommandBufferErrorNone: - GGML_METAL_LOG_INFO("no error code reported\n"); - break; - case MTLCommandBufferErrorTimeout: - GGML_METAL_LOG_INFO("timeout\n"); - break; - case MTLCommandBufferErrorPageFault: - GGML_METAL_LOG_INFO("unserviceable page fault\n"); - break; - case MTLCommandBufferErrorOutOfMemory: - GGML_METAL_LOG_INFO("out of memory\n"); - break; - case MTLCommandBufferErrorInvalidResource: - GGML_METAL_LOG_INFO("invalid reference to resource\n"); - break; - case MTLCommandBufferErrorMemoryless: - GGML_METAL_LOG_INFO("GPU ran out of one or more of its internal resources that support memoryless render pass attachments\n"); - break; - //case MTLCommandBufferErrorDeviceRemoved: - // GGML_METAL_LOG_INFO("device removed\n"); - // break; - case MTLCommandBufferErrorStackOverflow: - GGML_METAL_LOG_INFO("kernel function of tile shader used too many stack frames\n"); - break; - case MTLCommandBufferErrorAccessRevoked: - GGML_METAL_LOG_INFO("access to device revoked by system\n"); - break; - case MTLCommandBufferErrorInternal: - GGML_METAL_LOG_INFO("internal error\n"); - break; - default: - GGML_METAL_LOG_INFO("unknown error %lu\n", error_code); - break; - } + NSString * error_code = [command_buffer error].localizedDescription; + GGML_METAL_LOG_INFO("error: %s\n", [error_code UTF8String]); } return GGML_STATUS_FAILED; From a8f9b076316e16aadd0791015b3bfd446fe1e904 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 30 Apr 2024 23:36:27 +0200 Subject: [PATCH 095/101] perplexity: more statistics, added documentation (#6936) * perplexity: more statistics, added documentation * add LLaMA 3 8b scoreboard --- common/common.h | 2 +- examples/perplexity/README.md | 118 ++++++++++++++- examples/perplexity/perplexity.cpp | 230 ++++++++++++++++++++++------- 3 files changed, 291 insertions(+), 59 deletions(-) diff --git a/common/common.h b/common/common.h index 0618eea7429b1..9252a4b63889b 100644 --- a/common/common.h +++ b/common/common.h @@ -135,7 +135,7 @@ struct gpt_params { bool multiple_choice = false; // compute TruthfulQA score over random tasks from datafile supplied in prompt size_t multiple_choice_tasks = 0; // number of tasks to use when computing the TruthfulQA score. If 0, all tasks will be computed - bool kl_divergence = false; // compute KL-divergence + bool kl_divergence = false; // compute KL divergence bool random_prompt = false; // do not randomize prompt if none provided bool use_color = false; // use color to distinguish generations and inputs diff --git a/examples/perplexity/README.md b/examples/perplexity/README.md index 1a8c0dd6436e2..c5e2bc5de9690 100644 --- a/examples/perplexity/README.md +++ b/examples/perplexity/README.md @@ -1,8 +1,118 @@ -# perplexity +# Perplexity -TODO +The `perplexity` example can be used to calculate the so-called perplexity value of a language model over a given text corpus. +Perplexity measures how well the model can predict the next token with lower values being better. +Note that perplexity is **not** directly comparable between models, especially if they use different tokenizers. +Also note that finetunes typically result in a higher perplexity value even though the human-rated quality of outputs increases. + +Within llama.cpp the perplexity of base models is used primarily to judge the quality loss from e.g. quantized models vs. FP16. +The convention among contributors is to use the Wikitext-2 test set for testing unless noted otherwise (can be obtained with `scripts/get-wikitext-2.sh`). + +By default only the mean perplexity value and the corresponding uncertainty is calculated. +The uncertainty is determined empirically by assuming a Gaussian distribution of the "correct" logits per and then applying error propagation. + +More statistics can be obtained by recording the logits from the FP16 version of a model. +To do this, supply `perplexity` with `--kl-divergence-base path/to/logit/binary/file.kld`. +The program will then record all logits and save them to the provided path in binary format. +**The logit file will be very large, 11 GiB for LLaMA 2 or 37 GiB for LLaMA 3 when using the Wikitext-2 test set.** +Once you have the file, supply `perplexity` with the quantized model, the logits file via `--kl-divergence-base`, +and finally the `--kl-divergence` argument to indicate that the program should calculate the so-called Kullback-Leibler divergence. +This is a measure of how similar the FP16 and the quantized logit distributions are with a value of 0 indicating that the distribution are the same. +The uncertainty on the mean KL divergence is calculated by assuming the KL divergence per token follows a Gaussian distribution. + +In addition to the KL divergence the following statistics are calculated with `--kl-divergence`: + +* Ratio of mean FP16 PPL and quantized PPL. Uncertainty is estimated on logits, then propagated. The logarithm of this metric is also calculated and printed, it is 0 if the logit distributions are the same. +* Difference of mean FP16 PPL and quantized PPL. Uncertainty is estimated on logits, then propagated. +* Mean change in "correct" token probability. Positive values mean the model gets better at prediction, negative values mean it gets worse. +* Pearson correlation coefficient of the "correct" token probabilites between models. +* Percentiles of change in "correct" token probability. Positive values mean the model gets better at prediction, negative values mean it gets worse. Can be used to judge noise vs. quality loss from quantization. If the percentiles are symmetric then the quantization is essentially just adding noise. If the negative values are significantly larger than the positive values then this indicates that the model is actually becoming worse from the quantization. +* The root mean square of the change in token probabilities. If you were to assume that the quantization simply causes Gaussian noise on the token probabilities then this would be the standard deviation of said noise. The uncertainty on the value is calculated that the change in token probabilities follows a Gaussian distribution. Related discussion: https://github.com/ggerganov/llama.cpp/discussions/2875 . +* Same top p: Percentage of how often the token was assigned the highest probabilites by both models. The uncertainty is calculated from the Gaussian approximation of the binomial distribution. + +## LLaMA 3 8b Scoreboard + +Results are sorted by Kullback-Leibler divergence relative to FP16. +The "WT" importance matrices were created using varying numbers of Wikitext tokens and can be found [here](https://huggingface.co/JohannesGaessler/llama.cpp_importance_matrices/blob/main/imatrix-llama_3-8b-f16-2.7m_tokens.dat). + +| Quantization | imatrix | Model size [GiB] | PPL | ΔPPL | KLD | Mean Δp | RMS Δp | +|--------------|---------|------------------|------------------------|------------------------|-----------------------|-------------------|------------------| +| f16 | None | 14.97 | 6.233160 ± 0.037828 | - | - | - | - | +| q8_0 | None | 7.96 | 6.234284 ± 0.037878 | 0.002650 ± 0.001006 | 0.001355 ± 0.000006 | -0.019 ± 0.003 % | 1.198 ± 0.007 % | +| q6_K | None | 6.14 | 6.253382 ± 0.038078 | 0.021748 ± 0.001852 | 0.005452 ± 0.000035 | -0.007 ± 0.006 % | 2.295 ± 0.019 % | +| q5_K_M | None | 5.33 | 6.288607 ± 0.038338 | 0.056974 ± 0.002598 | 0.010762 ± 0.000079 | -0.114 ± 0.008 % | 3.160 ± 0.031 % | +| q5_K_S | None | 5.21 | 6.336598 ± 0.038755 | 0.104964 ± 0.003331 | 0.016595 ± 0.000122 | -0.223 ± 0.010 % | 3.918 ± 0.036 % | +| q5_1 | None | 5.65 | 6.337857 ± 0.038677 | 0.106223 ± 0.003476 | 0.018045 ± 0.000139 | -0.287 ± 0.011 % | 4.123 ± 0.039 % | +| q5_0 | None | 5.21 | 6.363224 ± 0.038861 | 0.131591 ± 0.003894 | 0.022239 ± 0.000166 | -0.416 ± 0.012 % | 4.634 ± 0.043 % | +| q4_K_M | WT 10m | 4.58 | 6.382937 ± 0.039055 | 0.151303 ± 0.004429 | 0.028152 ± 0.000240 | -0.389 ± 0.014 % | 5.251 ± 0.049 % | +| q4_K_M | None | 4.58 | 6.407115 ± 0.039119 | 0.175482 ± 0.004620 | 0.031273 ± 0.000238 | -0.596 ± 0.014 % | 5.519 ± 0.050 % | +| q4_K_S | WT 10m | 4.37 | 6.409697 ± 0.039189 | 0.178064 ± 0.004744 | 0.031951 ± 0.000259 | -0.531 ± 0.015 % | 5.645 ± 0.051 % | +| iq4_NL | WT 10m | 4.35 | 6.455593 ± 0.039630 | 0.223959 ± 0.005201 | 0.035742 ± 0.000288 | -0.590 ± 0.016 % | 5.998 ± 0.054 % | +| iq4_XS | WT 10m | 4.14 | 6.459705 ± 0.039595 | 0.228071 ± 0.005207 | 0.036334 ± 0.000284 | -0.668 ± 0.016 % | 6.044 ± 0.054 % | +| q4_K_S | None | 4.37 | 6.500529 ± 0.039778 | 0.268895 ± 0.005638 | 0.043136 ± 0.000314 | -0.927 ± 0.017 % | 6.562 ± 0.055 % | +| q4_1 | None | 4.78 | 6.682737 ± 0.041285 | 0.451103 ± 0.008030 | 0.071683 ± 0.000505 | -0.927 ± 0.017 % | 8.512 ± 0.063 % | +| q4_0 | None | 4.34 | 6.700147 ± 0.041226 | 0.468514 ± 0.007951 | 0.071940 ± 0.000491 | -1.588 ± 0.022 % | 8.434 ± 0.061 % | +| q3_K_L | WT 10m | 4.03 | 6.671223 ± 0.041427 | 0.439590 ± 0.008154 | 0.073077 ± 0.000529 | -0.940 ± 0.023 % | 8.662 ± 0.064 % | +| q3_K_M | WT 10m | 3.74 | 6.734255 ± 0.041838 | 0.502622 ± 0.008901 | 0.084358 ± 0.000588 | -1.198 ± 0.024 % | 9.292 ± 0.065 % | +| q3_K_L | None | 4.03 | 6.787876 ± 0.042104 | 0.556242 ± 0.009171 | 0.087176 ± 0.000614 | -1.532 ± 0.025 % | 9.432 ± 0.067 % | +| q3_K_M | None | 3.74 | 6.888498 ± 0.042669 | 0.656864 ± 0.010071 | 0.101913 ± 0.000677 | -1.990 ± 0.026 % | 10.203 ± 0.068 % | +| iq3_M | WT 10m | 3.53 | 6.898327 ± 0.041643 | 0.666694 ± 0.009449 | 0.102534 ± 0.000663 | -3.178 ± 0.026 % | 10.513 ± 0.066 % | +| iq3_S | WT 10m | 3.42 | 6.965501 ± 0.042406 | 0.733867 ± 0.010245 | 0.111278 ± 0.000710 | -3.066 ± 0.027 % | 10.845 ± 0.068 % | +| iq3_XS | WT 10m | 3.28 | 7.163043 ± 0.043772 | 0.931409 ± 0.012084 | 0.138693 ± 0.000857 | -3.667 ± 0.031 % | 12.148 ± 0.070 % | +| iq3_XXS | WT 10m | 3.05 | 7.458436 ± 0.046404 | 1.226803 ± 0.015234 | 0.183625 ± 0.001042 | -3.918 ± 0.035 % | 13.836 ± 0.074 % | +| q3_K_S | WT 10m | 3.41 | 7.602878 ± 0.046848 | 1.371244 ± 0.015688 | 0.199821 ± 0.001008 | -5.046 ± 0.037 % | 14.980 ± 0.070 % | +| q3_K_S | None | 3.41 | 7.863786 ± 0.048885 | 1.632152 ± 0.017733 | 0.228217 ± 0.001079 | -5.604 ± 0.038 % | 15.541 ± 0.070 % | +| iq2_M | WT 10m | 2.74 | 8.600799 ± 0.055124 | 2.369166 ± 0.025244 | 0.325989 ± 0.00160 | -6.463 ± 0.046 % | 18.519 ± 0.080 % | +| q2_K | WT 10k | 2.96 | 8.652290 ± 0.055572 | 2.420657 ± 0.025587 | 0.331393 ± 0.001562 | -6.606 ± 0.046 % | 18.790 ± 0.078 % | +| q2_K | WT 100k | 2.96 | 8.641993 ± 0.055406 | 2.410359 ± 0.025495 | 0.331672 ± 0.001569 | -6.628 ± 0.047 % | 18.856 ± 0.078 % | +| q2_K | WT 10m | 2.96 | 8.647825 ± 0.055610 | 2.416191 ± 0.025683 | 0.332223 ± 0.001572 | -6.500 ± 0.047 % | 18.881 ± 0.078 % | +| q2_K | WT 1m | 2.96 | 8.674365 ± 0.055743 | 2.442732 ± 0.025843 | 0.335308 ± 0.001576 | -6.634 ± 0.047 % | 19.009 ± 0.079 % | +| q2_K | WT 1k | 2.96 | 8.682605 ± 0.055916 | 2.450972 ± 0.026069 | 0.337093 ± 0.001596 | -6.596 ± 0.047 % | 18.977 ± 0.079 % | +| q2_K_S | WT 10m | 2.96 | 9.323778 ± 0.061551 | 3.092145 ± 0.031914 | 0.403360 ± 0.001787 | -7.131 ± 0.049 % | 20.050 ± 0.081 % | +| q2_K_S | WT 1m | 2.96 | 9.329321 ± 0.061378 | 3.097688 ± 0.031816 | 0.403590 ± 0.001797 | -7.289 ± 0.049 % | 20.123 ± 0.081 % | +| q2_K_S | WT 100k | 2.96 | 9.362973 ± 0.061740 | 3.131339 ± 0.032169 | 0.408367 ± 0.001802 | -7.198 ± 0.050 % | 20.132 ± 0.081 % | +| q2_K_S | WT 10k | 2.96 | 9.376479 ± 0.062045 | 3.144846 ± 0.032464 | 0.408662 ± 0.001819 | -7.141 ± 0.050 % | 20.120 ± 0.081 % | +| q2_K_S | WT 1k | 2.96 | 9.415200 ± 0.062475 | 3.183567 ± 0.032993 | 0.415865 ± 0.001846 | -7.153 ± 0.050 % | 20.311 ± 0.082 % | +| iq2_S | WT 10m | 2.56 | 9.650781 ± 0.063209 | 3.419148 ± 0.034017 | 0.439197 ± 0.001976 | -8.319 ± 0.052 % | 21.491 ± 0.083 % | +| q2_K | None | 2.96 | 9.751568 ± 0.063312 | 3.519934 ± 0.033863 | 0.445132 ± 0.001835 | -9.123 ± 0.051 % | 21.421 ± 0.079 % | +| iq2_XS | WT 10m | 2.43 | 10.761424 ± 0.071056 | 4.529791 ± 0.042229 | 0.546290 ± 0.002133 | -10.576 ± 0.056 % | 23.872 ± 0.082 % | +| iq2_XXS | WT 10m | 2.24 | 14.091782 ± 0.098396 | 7.860148 ± 0.070752 | 0.812022 ± 0.002741 | -14.363 ± 0.065 % | 28.576 ± 0.084 % | +| iq1_M | WT 10m | 2.01 | 25.493722 ± 0.177903 | 19.262089 ± 0.152396 | 1.393084 ± 0.003529 | -24.672 ± 0.077 % | 38.287 ± 0.084 % | +| iq1_S | WT 1m | 1.88 | 58.097760 ± 0.438604 | 51.866126 ± 0.416604 | 2.211278 ± 0.004688 | -32.471 ± 0.087 % | 46.418 ± 0.085 % | +| iq1_S | WT 1k | 1.88 | 58.267851 ± 0.446208 | 52.036218 ± 0.424373 | 2.214858 ± 0.004778 | -31.880 ± 0.089 % | 46.330 ± 0.086 % | +| iq1_S | WT 100k | 1.88 | 58.581498 ± 0.453145 | 52.349864 ± 0.431360 | 2.220834 ± 0.004818 | -32.261 ± 0.089 % | 46.002 ± 0.086 % | +| iq1_S | WT 10m | 1.88 | 60.694593 ± 0.471290 | 54.462959 ± 0.449644 | 2.254554 ± 0.004868 | -31.973 ± 0.088 % | 46.271 ± 0.086 % | +| iq1_S | WT 10k | 1.88 | 63.221324 ± 0.493077 | 56.989691 ± 0.471423 | 2.293527 ± 0.004885 | -32.261 ± 0.089 % | 46.562 ± 0.086 % | + +There seems to be no consistent improvement from using more Wikitext tokens for the importance matrix. +K-quants score better on mean Δp than the legacy quants than e.g. KL divergence would suggest. + +## LLaMA 2 vs. LLaMA 3 Quantization comparison + +| Metric | L2 7b q2_K | L3 8b q2_K | L2 7b q4_K_M | L3 8b q4_K_M | L2 7b q6_K | L3 8b q6_K | L2 7b q8_0 | L3 8b q8_0 | +|-----------------|---------------------|---------------------|---------------------|---------------------|---------------------|---------------------|---------------------|---------------------| +| Mean PPL | 5.794552 ± 0.032298 | 9.751568 ± 0.063312 | 5.877078 ± 0.032781 | 6.407115 ± 0.039119 | 5.808494 ± 0.032425 | 6.253382 ± 0.038078 | 5.798542 ± 0.032366 | 6.234284 ± 0.037878 | +| Mean PPL ratio | 1.107955 ± 0.001427 | 1.564849 ± 0.004525 | 1.014242 ± 0.000432 | 1.028160 ± 0.000723 | 1.002406 ± 0.000191 | 1.003490 ± 0.000296 | 1.000689 ± 0.000107 | 1.000425 ± 0.000161 | +| Mean ΔPPL | 0.625552 ± 0.008725 | 3.519934 ± 0.033863 | 0.082526 ± 0.002530 | 0.175482 ± 0.004620 | 0.013941 ± 0.001110 | 0.021748 ± 0.001852 | 0.003990 ± 0.000624 | 0.002650 ± 0.001006 | +| PPL correlation | 97.36% | 89.62% | 99.71% | 99.34% | 99.94% | 99.88% | 99.98% | 99.96% | +| Mean KLD | 0.108903 ± 0.000645 | 0.445132 ± 0.001835 | 0.012686 ± 0.000079 | 0.031273 ± 0.000238 | 0.002098 ± 0.000014 | 0.005452 ± 0.000035 | 0.000369 ± 0.000007 | 0.001355 ± 0.000006 | +| Mean Δp | -2.710 ± 0.023 % | -9.123 ± 0.051 % | -0.416 ± 0.008 % | -0.596 ± 0.014 % | -0.035 ± 0.003 % | -0.007 ± 0.006 % | -0.005 ± 0.002 % | -0.019 ± 0.003 % | +| Maximum Δp | 85.136% | 94.268% | 45.209% | 95.054% | 23.593% | 53.601% | 43.925% | 28.734% | +| 99.9% Δp | 37.184% | 50.003% | 17.461% | 27.084% | 7.798% | 13.613% | 3.387% | 6.402% | +| 99.0% Δp | 18.131% | 25.875% | 7.798% | 12.084% | 3.838% | 6.407% | 1.867% | 3.544% | +| Median Δp | -0.391% | -2.476% | -0.026% | -0.024% | -0.001% | 0.000% | -0.000% | -0.000% | +| 1.0% Δp | -39.762% | -87.173% | -11.433% | -19.567% | -4.222% | -6.767% | -1.862% | -3.698% | +| 0.1% Δp | -79.002% | -98.897% | -26.433% | -56.054% | -9.091% | -16.584% | -3.252% | -6.579% | +| Minimum Δp | -99.915% | -99.965% | -83.383% | -98.699% | -43.142% | -68.487% | -9.343% | -24.301% | +| RMS Δp | 9.762 ± 0.053 % | 21.421 ± 0.079 % | 3.252 ± 0.024 % | 5.519 ± 0.050 % | 1.339 ± 0.010 % | 2.295 ± 0.019 % | 0.618 ± 0.011 % | 1.198 ± 0.007 % | +| Same top p | 85.584 ± 0.086 % | 71.138 ± 0.119 % | 94.665 ± 0.055 % | 91.901 ± 0.072 % | 97.520 ± 0.038 % | 96.031 ± 0.051 % | 98.846 ± 0.026 % | 97.674 ± 0.040 % | + + +## Old Numbers + +
    +Llama 2 70B Scoreboard -## Llama 2 70B Scorechart | Quantization | Model size (GiB) | Perplexity | Delta to fp16 | |--------------|------------------|------------|---------------| | Q4_0 | 36.20 | 3.5550 | 3.61% | @@ -18,3 +128,5 @@ TODO | Q5_K_M | 45.41 | 3.4451 | 0.40% | | Q6_K | 52.70 | 3.4367 | 0.16% | | fp16 | 128.5 | 3.4313 | - | + +
    diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 9a3374fdc13f2..db6e0949d4c47 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -216,17 +216,22 @@ static void process_logits(std::ostream& out, int n_vocab, const float * logits, } struct kl_divergence_result { - double sum_nll = 0; - double sum_nll2 = 0; - double sum_kld = 0; - double sum_kld2 = 0; - double sum_nll_diff = 0; - double sum_nll_diff2 = 0; - size_t n_same_top = 0; - size_t count = 0; + double sum_nll = 0.0; + double sum_nll2 = 0.0; + double sum_nll_base = 0.0; + double sum_nll_base2 = 0.0; + double sum_nll_nll_base = 0.0; + double sum_kld = 0.0; + double sum_kld2 = 0.0; + double sum_p_diff = 0.0; + double sum_p_diff2 = 0.0; + double sum_p_diff4 = 0.0; + float max_p_diff = 0.0f; + size_t n_same_top = 0.0; + size_t count = 0.0; }; -static double log_softmax(int n_vocab, const float * logits, const uint16_t * base_log_prob, int tok, kl_divergence_result & kld) { +static std::pair log_softmax(int n_vocab, const float * logits, const uint16_t * base_log_prob, int tok, kl_divergence_result & kld) { float max_logit = logits[0]; int imax = 0; for (int i = 1; i < n_vocab; ++i) { @@ -244,12 +249,17 @@ static double log_softmax(int n_vocab, const float * logits, const uint16_t * ba const float scale = d[0]; const float min_log_prob = d[1]; base_log_prob += 4; - float nll = max_logit + log_sum_exp - logits[tok]; + + const float nll = max_logit + log_sum_exp - logits[tok]; kld.sum_nll += nll; kld.sum_nll2 += nll*nll; - nll += (scale*base_log_prob[tok] + min_log_prob); - kld.sum_nll_diff += nll; - kld.sum_nll_diff2 += nll*nll; + + const float nll_base = -(scale*base_log_prob[tok] + min_log_prob); + kld.sum_nll_base += nll_base; + kld.sum_nll_base2 += nll_base*nll_base; + + kld.sum_nll_nll_base += nll*nll_base; + max_logit += log_sum_exp; double sum = 0; int imax_base = -1; @@ -269,34 +279,50 @@ static double log_softmax(int n_vocab, const float * logits, const uint16_t * ba kld.sum_kld2 += sum*sum; ++kld.count; if (imax == imax_base) ++kld.n_same_top; - return sum; + + const float p_base = expf(-nll_base); + const float p = expf(-nll); + const float p_diff = p - p_base; + kld.sum_p_diff += p_diff; + const double p_diff2 = p_diff*p_diff; + kld.sum_p_diff2 += p_diff2; + kld.sum_p_diff4 += p_diff2*p_diff2; + kld.max_p_diff = std::max(kld.max_p_diff, std::fabs(p_diff)); + + return std::make_pair(sum, p_diff); } static void process_logits(int n_vocab, const float * logits, const int * tokens, int n_token, std::vector & workers, const std::vector & base_log_probs, kl_divergence_result & kld, - float * kld_values) { + float * kld_values, float * p_diff_values) { std::mutex mutex; const int nv = 2*((n_vocab + 1)/2) + 4; int counter = 0; - auto compute = [&mutex, &counter, &base_log_probs, &kld, n_vocab, logits, tokens, n_token, nv, kld_values] () { + auto compute = [&mutex, &counter, &base_log_probs, &kld, n_vocab, logits, tokens, n_token, nv, kld_values, p_diff_values] () { kl_divergence_result local_kld; while (true) { std::unique_lock lock(mutex); int i = counter++; if (i >= n_token) { - kld.sum_nll += local_kld.sum_nll; - kld.sum_nll2 += local_kld.sum_nll2; - kld.sum_kld += local_kld.sum_kld; - kld.sum_kld2 += local_kld.sum_kld2; - kld.sum_nll_diff += local_kld.sum_nll_diff; - kld.sum_nll_diff2 += local_kld.sum_nll_diff2; - kld.n_same_top += local_kld.n_same_top; - kld.count += local_kld.count; + kld.sum_nll += local_kld.sum_nll; + kld.sum_nll2 += local_kld.sum_nll2; + kld.sum_nll_base += local_kld.sum_nll_base; + kld.sum_nll_base2 += local_kld.sum_nll_base2; + kld.sum_nll_nll_base += local_kld.sum_nll_nll_base; + kld.sum_kld += local_kld.sum_kld; + kld.sum_kld2 += local_kld.sum_kld2; + kld.sum_p_diff += local_kld.sum_p_diff; + kld.sum_p_diff2 += local_kld.sum_p_diff2; + kld.sum_p_diff4 += local_kld.sum_p_diff4; + kld.n_same_top += local_kld.n_same_top; + kld.max_p_diff = std::max(kld.max_p_diff, local_kld.max_p_diff); + kld.count += local_kld.count; break; } lock.unlock(); - double v = log_softmax(n_vocab, logits + i*n_vocab, base_log_probs.data() + i*nv, tokens[i+1], local_kld); - kld_values[i] = (float)v; + std::pair v = log_softmax(n_vocab, logits + i*n_vocab, base_log_probs.data() + i*nv, tokens[i+1], local_kld); + kld_values[i] = (float)v.first; + p_diff_values[i] = v.second; } }; for (auto & w : workers) { @@ -1711,7 +1737,8 @@ static void kl_divergence(llama_context * ctx, const gpt_params & params) { GGML_ASSERT(llama_add_eos_token(llama_get_model(ctx)) != 1); std::vector log_probs_uint16(size_t(n_ctx - 1 - n_ctx/2) * nv); - std::vector kld_values(size_t(n_ctx - 1 - n_ctx/2)*n_chunk); + std::vector kld_values(size_t(n_ctx - 1 - n_ctx/2)*n_chunk); + std::vector p_diff_values(size_t(n_ctx - 1 - n_ctx/2)*n_chunk); std::vector logits; if (num_batches > 1) { logits.reserve(n_ctx * n_vocab); @@ -1728,9 +1755,18 @@ static void kl_divergence(llama_context * ctx, const gpt_params & params) { df = df > 0 && count > 10 ? sqrt(df/(count-1)) : 0.; return std::make_pair(f, df); }; + auto covariance = [] (double suma, double sumb, double sumab, size_t count) { + if (count < 10) { + return 0.0; + } + double var = sumab/count - (suma/count)*(sumb/count); + var /= count - 1; + return var; + }; kl_divergence_result kld; - auto kld_ptr = kld_values.data(); + auto kld_ptr = kld_values.data(); + auto p_diff_ptr = p_diff_values.data(); for (int i = 0; i < n_chunk; ++i) { const int start = i * n_ctx; @@ -1785,24 +1821,42 @@ static void kl_divergence(llama_context * ctx, const gpt_params & params) { } fprintf(stderr, "%.2f minutes\n", total_seconds / 60.0); - printf("\nchunk PPL ln(PPL(Q)/PPL(base)) KL-Divergence Same top\n"); + printf("\nchunk PPL ln(PPL(Q)/PPL(base)) KL Divergence Δp RMS Same top p\n"); } const int first = n_ctx/2; const float * all_logits = num_batches > 1 ? logits.data() : llama_get_logits(ctx); process_logits(n_vocab, all_logits + first*n_vocab, tokens.data() + start + first, n_ctx - 1 - first, - workers, log_probs_uint16, kld, kld_ptr); - kld_ptr += n_ctx - 1 - first; + workers, log_probs_uint16, kld, kld_ptr, p_diff_ptr); + p_diff_ptr += n_ctx - 1 - first; + kld_ptr += n_ctx - 1 - first; + + printf("%4d", i+1); + + auto log_ppl = mean_and_uncertainty(kld.sum_nll, kld.sum_nll2, kld.count); + const double ppl_val = exp(log_ppl.first); + const double ppl_unc = ppl_val * log_ppl.second; // ppl_unc = sqrt( (dexp(x) / dx) ** 2 * log_ppl.second ** 2 ) + printf(" %9.4lf ± %9.4lf", ppl_val, ppl_unc); + + auto log_ppl_base = mean_and_uncertainty(kld.sum_nll_base, kld.sum_nll_base2, kld.count); + const double log_ppl_cov = covariance(kld.sum_nll, kld.sum_nll_base, kld.sum_nll_nll_base, kld.count); + const double log_ppl_ratio_val = log_ppl.first - log_ppl_base.first; + const double log_ppl_ratio_unc = sqrt(log_ppl.second*log_ppl.second + log_ppl_base.second*log_ppl_base.second - 2.0*log_ppl_cov); + printf(" %10.5lf ± %10.5lf", log_ppl_ratio_val, log_ppl_ratio_unc); + + auto kl_div = mean_and_uncertainty(kld.sum_kld, kld.sum_kld2, kld.count); + printf(" %10.5lf ± %10.5lf", kl_div.first, kl_div.second); - auto ppl = mean_and_uncertainty(kld.sum_nll, kld.sum_nll2, kld.count); - auto log_ppl_ratio = mean_and_uncertainty(kld.sum_nll_diff, kld.sum_nll_diff2, kld.count); - auto kl_div = mean_and_uncertainty(kld.sum_kld, kld.sum_kld2, kld.count); - auto p_top = 1.*kld.n_same_top/kld.count; - auto d_p_top = sqrt(p_top*(1 - p_top)/(kld.count - 1)); + auto p_diff_mse = mean_and_uncertainty(kld.sum_p_diff2, kld.sum_p_diff4, kld.count); + const double p_diff_rms_val = sqrt(p_diff_mse.first); + const double p_diff_rms_unc = 0.5/p_diff_rms_val * p_diff_mse.second; + printf(" %6.3lf ± %6.3lf %%", 100.0*p_diff_rms_val, 100.0*p_diff_rms_unc); - printf("%4d %10.4lf %10.5lf ± %10.5f %10.5f ± %10.5lf %.5f ± %.5f\n", i+1, exp(ppl.first), - log_ppl_ratio.first, log_ppl_ratio.second, kl_div.first, kl_div.second, - p_top, d_p_top); + double p_top_val = 1.*kld.n_same_top/kld.count; + double p_top_unc = sqrt(p_top_val*(1 - p_top_val)/(kld.count - 1)); + printf(" %6.3lf ± %6.3lf %%", 100.0*p_top_val, 100.0*p_top_unc); + + printf("\n"); fflush(stdout); @@ -1813,31 +1867,97 @@ static void kl_divergence(llama_context * ctx, const gpt_params & params) { if (kld.count < 100) return; // we do not wish to do statistics on so few values std::sort(kld_values.begin(), kld_values.end()); + std::sort(p_diff_values.begin(), p_diff_values.end()); + + printf("====== Perplexity statistics ======\n"); + + auto log_ppl = mean_and_uncertainty(kld.sum_nll, kld.sum_nll2, kld.count); + const double ppl_val = exp(log_ppl.first); + const double ppl_unc = ppl_val * log_ppl.second; // ppl_unc = sqrt( (dexp(x) / dx) ** 2 * log_ppl.second ** 2 ) + printf("Mean PPL(Q) : %10.6lf ± %10.6lf\n", ppl_val, ppl_unc); + + auto log_ppl_base = mean_and_uncertainty(kld.sum_nll_base, kld.sum_nll_base2, kld.count); + const double ppl_base_val = exp(log_ppl_base.first); + const double ppl_base_unc = ppl_base_val * log_ppl_base.second; // ppl_base_unc = sqrt( (dexp(x) / dx) ** 2 * log_ppl_base.second ** 2 ) + printf("Mean PPL(base) : %10.6lf ± %10.6lf\n", ppl_base_val, ppl_base_unc); + + const double log_ppl_cov = covariance(kld.sum_nll, kld.sum_nll_base, kld.sum_nll_nll_base, kld.count); + // printf("Cov(ln(PPL(Q)), ln(PPL(base))): %10.6lf\n", log_ppl_cov); + const double log_ppl_cor = log_ppl_cov / (log_ppl.second*log_ppl_base.second); + printf("Cor(ln(PPL(Q)), ln(PPL(base))): %6.2lf%%\n", 100.0*log_ppl_cor); + + const double log_ppl_ratio_val = log_ppl.first - log_ppl_base.first; + const double log_ppl_ratio_unc = sqrt(log_ppl.second*log_ppl.second + log_ppl_base.second*log_ppl_base.second - 2.0*log_ppl_cov); + printf("Mean ln(PPL(Q)/PPL(base)) : %10.6lf ± %10.6lf\n", log_ppl_ratio_val, log_ppl_ratio_unc); - printf("===== KL-divergence statistics\n"); + const double ppl_ratio_val = exp(log_ppl_ratio_val); + const double ppl_ratio_unc = ppl_ratio_val * log_ppl_ratio_unc; // ppl_ratio_unc = sqrt( (dexp(x) / dx) ** 2 * log_ppl_ratio.second ** 2 ) + printf("Mean PPL(Q)/PPL(base) : %10.6lf ± %10.6lf\n", ppl_ratio_val, ppl_ratio_unc); + + const double ppl_cov = ppl_val * ppl_base_val * log_ppl_cov; + const double ppl_diff_val = ppl_val - ppl_base_val; + const double ppl_diff_unc = sqrt(ppl_unc*ppl_unc + ppl_base_unc*ppl_base_unc - 2.0*ppl_cov); + printf("Mean PPL(Q)-PPL(base) : %10.6lf ± %10.6lf\n", ppl_diff_val, ppl_diff_unc); + + printf("\n"); + + printf("====== KL divergence statistics ======\n"); auto kl_div = mean_and_uncertainty(kld.sum_kld, kld.sum_kld2, kld.count); - printf("Average: %10.6f ±%10.6lf\n", kl_div.first, kl_div.second); + printf("Mean KLD: %10.6lf ± %10.6lf\n", kl_div.first, kl_div.second); auto kld_median = kld_values.size()%2 == 0 ? 0.5f*(kld_values[kld_values.size()/2] + kld_values[kld_values.size()/2-1]) : kld_values[kld_values.size()/2]; - printf("Median : %10.6f\n", kld_median); - auto percentile = [&kld_values] (float fraction) { - if (fraction <= 0) return kld_values.front(); - if (fraction >= 1) return kld_values.back(); - float p = fraction*(kld_values.size() - 1); + auto percentile = [] (std::vector values, float fraction) { + if (fraction <= 0) return values.front(); + if (fraction >= 1) return values.back(); + float p = fraction*(values.size() - 1); size_t ip = size_t(p); p -= ip; - return (1 - p)*kld_values[ip] + p*kld_values[std::min(ip+1, kld_values.size()-1)]; + return (1 - p)*values[ip] + p*values[std::min(ip+1, values.size()-1)]; }; - printf("Maximum: %10.6f\n", kld_values.back()); - printf("KLD_99 : %10.6f\n", percentile(0.99f)); - printf("KLD_95 : %10.6f\n", percentile(0.95f)); - printf("KLD_90 : %10.6f\n", percentile(0.90f)); + printf("Maximum KLD: %10.6f\n", kld_values.back()); + printf("99.9%% KLD: %10.6f\n", percentile(kld_values, 0.999f)); + printf("99.0%% KLD: %10.6f\n", percentile(kld_values, 0.990f)); + printf("99.0%% KLD: %10.6f\n", percentile(kld_values, 0.990f)); + printf("Median KLD: %10.6f\n", kld_median); + printf("10.0%% KLD: %10.6f\n", percentile(kld_values, 0.100f)); + printf(" 5.0%% KLD: %10.6f\n", percentile(kld_values, 0.050f)); + printf(" 1.0%% KLD: %10.6f\n", percentile(kld_values, 0.010f)); + printf("Minimum KLD: %10.6f\n", kld_values.front()); + + printf("\n"); - printf("Minimum: %10.6f\n", kld_values.front()); - printf("KLD_01 : %10.6f\n", percentile(0.01f)); - printf("KLD_05 : %10.6f\n", percentile(0.05f)); - printf("KLD_10 : %10.6f\n", percentile(0.10f)); + printf("====== Token probability statistics ======\n"); + + auto p_diff = mean_and_uncertainty(kld.sum_p_diff, kld.sum_p_diff2, kld.count); + printf("Mean Δp: %6.3lf ± %5.3lf %%\n", 100.0*p_diff.first, 100.0*p_diff.second); + + auto p_diff_median = p_diff_values.size()%2 == 0 ? 0.5f*(p_diff_values[p_diff_values.size()/2] + p_diff_values[p_diff_values.size()/2-1]) + : p_diff_values[p_diff_values.size()/2]; + + printf("Maximum Δp: %6.3lf%%\n", 100.0*p_diff_values.back()); + printf("99.9%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.999f)); + printf("99.0%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.990f)); + printf("95.0%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.950f)); + printf("90.0%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.900f)); + printf("75.0%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.750f)); + printf("Median Δp: %6.3lf%%\n", 100.0*p_diff_median); + printf("25.0%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.250f)); + printf("10.0%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.100f)); + printf(" 5.0%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.050f)); + printf(" 1.0%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.010f)); + printf(" 0.1%% Δp: %6.3lf%%\n", 100.0*percentile(p_diff_values, 0.001f)); + printf("Minimum Δp: %6.3lf%%\n", 100.0*p_diff_values.front()); + + auto p_diff_mse = mean_and_uncertainty(kld.sum_p_diff2, kld.sum_p_diff4, kld.count); + // printf("MSE Δp : %10.6lf ± %10.6lf\n", p_diff_mse.first, p_diff_mse.second); + + const double p_diff_rms_val = sqrt(p_diff_mse.first); + const double p_diff_rms_unc = 0.5/p_diff_rms_val * p_diff_mse.second; + printf("RMS Δp : %6.3lf ± %5.3lf %%\n", 100.0*p_diff_rms_val, 100.0*p_diff_rms_unc); + + const double same_top_p = 1.0*kld.n_same_top/kld.count; + printf("Same top p: %6.3lf ± %5.3lf %%\n", 100.0*same_top_p, 100.0*sqrt(same_top_p*(1.0 - same_top_p)/(kld.count - 1))); } From b48ea96eadcf08cc92cf58b762cba3e5d90cbb6e Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Wed, 1 May 2024 11:35:07 +0800 Subject: [PATCH 096/101] removed unwanted debugs --- gpttype_adapter.cpp | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/gpttype_adapter.cpp b/gpttype_adapter.cpp index 90bd2dd058078..fe6dfc1c417f1 100644 --- a/gpttype_adapter.cpp +++ b/gpttype_adapter.cpp @@ -1612,16 +1612,13 @@ generation_outputs gpttype_generate(const generation_inputs inputs) //if it tokenizes to a single token, AND it's a single non-printable special token, use that std::vector tmp; TokenizeString(stopper, tmp, file_format, false); - printf("\nPRINT TOK VEC:"); - print_tok_vec_str(tmp); + if(tmp.size()==1) //tokenizes to exactly 1 special token { int specialid = tmp[0]; std::string tokenizedstr = FileFormatTokenizeID(specialid, file_format); - printf("\nTest %s",tokenizedstr.c_str()); if(tokenizedstr=="") //must NOT have a text representation { - printf("\nAdded %d",specialid); special_stop_sequence.push_back(specialid); } } From c4ec9c0d3d67e6b33638e6dad86419e6fd5ffe01 Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 1 May 2024 07:13:59 +0200 Subject: [PATCH 097/101] ci : exempt confirmed bugs from being tagged as stale (#7014) --- .github/workflows/close-issue.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close-issue.yml b/.github/workflows/close-issue.yml index 7f21daec01d1b..69c9f4f69e53b 100644 --- a/.github/workflows/close-issue.yml +++ b/.github/workflows/close-issue.yml @@ -12,7 +12,7 @@ jobs: steps: - uses: actions/stale@v5 with: - exempt-issue-labels: "refactor,help wanted,good first issue,research" + exempt-issue-labels: "refactor,help wanted,good first issue,research,bug" days-before-issue-stale: 30 days-before-issue-close: 14 stale-issue-label: "stale" From cea46750b0138e76509b70e0970cd10a8153e410 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Wed, 1 May 2024 14:11:40 +0800 Subject: [PATCH 098/101] try hack in missing hmax2 functions (+1 squashed commits) Squashed commits: [c98d0ab6] try hack in missing hmax2 functions (+1 squashed commits) Squashed commits: [9ba8599f] try hack in missing hmax2 functions (+2 squashed commit) Squashed commit: [be497493] try hack in missing hmax2 functions [159ee4c3] bypass missing hmax functions on old cuda --- ggml-cuda/fattn.cu | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/ggml-cuda/fattn.cu b/ggml-cuda/fattn.cu index df1e80068b334..a023c9a176c69 100644 --- a/ggml-cuda/fattn.cu +++ b/ggml-cuda/fattn.cu @@ -11,6 +11,29 @@ #define HALF_MAX_HALF __float2half(65504.0f/2) // Use neg. of this instead of -INFINITY to initialize KQ max vals to avoid NaN upon subtraction. #define SOFTMAX_FTZ_THRESHOLD -20.0f // Softmax exp. of values smaller than this are flushed to zero to avoid NaNs. +//hack: polyfill hmax and hmax2 for older cuda version +#if CUDART_VERSION < CUDART_HMAX +__device__ __inline__ __half hmax(const __half a, const __half b) { + const float fa = __half2float(a); + const float fb = __half2float(b); + return __float2half(fa > fb ? fa : fb); +} +__device__ __inline__ __half2 hmax2(const __half2 a, const __half2 b) { + __half2 result; + result.x = hmax(a.x, b.x); + result.y = hmax(a.y, b.y); + return result; +} +#else +__device__ __inline__ __half hmax(const __half a, const __half b) { + return __hmax(a,b); +} +__device__ __inline__ __half2 hmax2(const __half2 a, const __half2 b) { + return __hmax2(a,b); +} +#endif + + template // D == head size __launch_bounds__(((D + WARP_SIZE - 1) / WARP_SIZE)*WARP_SIZE, 1) static __global__ void flash_attn_vec_ext_f16( @@ -116,7 +139,7 @@ static __global__ void flash_attn_vec_ext_f16( sum2 = warp_reduce_sum(sum2); half sum = __low2half(sum2) + __high2half(sum2); sum += mask ? maskh[k_VKQ_0 + i_KQ] : __float2half(0.0f); - kqmax_new = __hmax(kqmax_new, sum); + kqmax_new = hmax(kqmax_new, sum); if (threadIdx.x == 0) { KQ[i_KQ] = sum; } @@ -416,9 +439,9 @@ static __global__ void flash_attn_ext_f16( const int k = k0 + threadIdx.x; KQ2_tmp[k0/WARP_SIZE] += mask ? mask2[(j*ne11 + k_VKQ_0)/2 + k] : make_half2(0.0f, 0.0f); - KQ_max_new = __hmax2(KQ_max_new, KQ2_tmp[k0/WARP_SIZE]); + KQ_max_new = hmax2(KQ_max_new, KQ2_tmp[k0/WARP_SIZE]); } - KQ_max_new = __half2half2(warp_reduce_max(__hmax(__low2half(KQ_max_new), __high2half(KQ_max_new)))); + KQ_max_new = __half2half2(warp_reduce_max(hmax(__low2half(KQ_max_new), __high2half(KQ_max_new)))); const half2 diff = KQ_max_h2[j0/nwarps] - KQ_max_new; KQ_max_scale_h2[j0/nwarps] = h2exp(diff); const uint32_t ftz_mask = __hgt2_mask(diff, make_half2(SOFTMAX_FTZ_THRESHOLD, SOFTMAX_FTZ_THRESHOLD)); From e9978bfac0f4f05c040b1c08774936e56ac807d4 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Wed, 1 May 2024 17:38:49 +0800 Subject: [PATCH 099/101] resize window dimensions --- koboldcpp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/koboldcpp.py b/koboldcpp.py index 0c404a08ff860..a5ca38809a8f2 100644 --- a/koboldcpp.py +++ b/koboldcpp.py @@ -1524,8 +1524,8 @@ def show_new_gui(): import customtkinter as ctk nextstate = 0 #0=exit, 1=launch - original_windowwidth = 540 - original_windowheight = 500 + original_windowwidth = 550 + original_windowheight = 550 windowwidth = original_windowwidth windowheight = original_windowheight ctk.set_appearance_mode("dark") From b641d986f7bec7dbfcf5b3f148f758b5d0935f58 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Wed, 1 May 2024 18:29:07 +0800 Subject: [PATCH 100/101] use johannes implementation instead (+1 squashed commits) Squashed commits: [f5e6709d] use johannes implementation instead --- ggml-cuda/common.cuh | 30 +++++++++++++++++++++++++----- ggml-cuda/fattn.cu | 23 ----------------------- 2 files changed, 25 insertions(+), 28 deletions(-) diff --git a/ggml-cuda/common.cuh b/ggml-cuda/common.cuh index 276c3fa23799b..1adc6b5b809d1 100644 --- a/ggml-cuda/common.cuh +++ b/ggml-cuda/common.cuh @@ -137,7 +137,8 @@ #define STRINGIZE(...) STRINGIZE_IMPL(__VA_ARGS__) #define WARP_SIZE 32 -#define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) +#define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) +#define CUDART_HMASK 12000 // CUDA 12.0, min. ver. for half2 -> uint mask comparisons #define CC_PASCAL 600 #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products @@ -293,20 +294,39 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { return x; } +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && CUDART_VERSION < CUDART_HMAX +static __device__ __forceinline__ half hmax(const half a, const half b) { + return __half2float(a) > __half2float(b) ? a : b; +} +static __device__ __forceinline__ half2 hmax2(const half2 a, const half2 b) { + half2 ret; + reinterpret_cast(ret.x) = __low2float(a) > __low2float(b) ? __low2half(a) : __low2half(b); + reinterpret_cast(ret.y) = __high2float(a) > __high2float(b) ? __high2half(a) : __high2half(b); + return ret; +} +#else +static __device__ __inline__ __half hmax(const __half a, const __half b) { + return __hmax(a,b); +} +static __device__ __inline__ __half2 hmax2(const __half2 a, const __half2 b) { + return __hmax2(a,b); +} +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && CUDART_VERSION < CUDART_HMAX + static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { - x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); + x = hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); } return x; #else GGML_UNUSED(x); NO_DEVICE_CODE; -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL } -#if CUDART_VERSION < 12000 +#if CUDART_VERSION < CUDART_HMASK static __device__ __forceinline__ uint32_t __hgt2_mask(const half2 a, const half2 b) { const uint32_t mask_low = 0x0000FFFF * (float( __low2half(a)) > float( __low2half(b))); const uint32_t mask_high = 0xFFFF0000 * (float(__high2half(a)) > float(__high2half(b))); diff --git a/ggml-cuda/fattn.cu b/ggml-cuda/fattn.cu index a023c9a176c69..f9418495b8ab6 100644 --- a/ggml-cuda/fattn.cu +++ b/ggml-cuda/fattn.cu @@ -11,29 +11,6 @@ #define HALF_MAX_HALF __float2half(65504.0f/2) // Use neg. of this instead of -INFINITY to initialize KQ max vals to avoid NaN upon subtraction. #define SOFTMAX_FTZ_THRESHOLD -20.0f // Softmax exp. of values smaller than this are flushed to zero to avoid NaNs. -//hack: polyfill hmax and hmax2 for older cuda version -#if CUDART_VERSION < CUDART_HMAX -__device__ __inline__ __half hmax(const __half a, const __half b) { - const float fa = __half2float(a); - const float fb = __half2float(b); - return __float2half(fa > fb ? fa : fb); -} -__device__ __inline__ __half2 hmax2(const __half2 a, const __half2 b) { - __half2 result; - result.x = hmax(a.x, b.x); - result.y = hmax(a.y, b.y); - return result; -} -#else -__device__ __inline__ __half hmax(const __half a, const __half b) { - return __hmax(a,b); -} -__device__ __inline__ __half2 hmax2(const __half2 a, const __half2 b) { - return __hmax2(a,b); -} -#endif - - template // D == head size __launch_bounds__(((D + WARP_SIZE - 1) / WARP_SIZE)*WARP_SIZE, 1) static __global__ void flash_attn_vec_ext_f16( From 1613ef8d8eb2479ba55c4d598e08c8f3f18a0fed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 1 May 2024 14:46:37 +0200 Subject: [PATCH 101/101] CUDA: CUDART < 11.7 workaround for __hmax, __hmax2 (#7019) --- ggml-cuda/common.cuh | 45 +++++++++++++++++++++++++++++++++++++++----- ggml-cuda/fattn.cu | 6 +++--- 2 files changed, 43 insertions(+), 8 deletions(-) diff --git a/ggml-cuda/common.cuh b/ggml-cuda/common.cuh index 156eba6d1ef74..b2627b7b4b77f 100644 --- a/ggml-cuda/common.cuh +++ b/ggml-cuda/common.cuh @@ -137,7 +137,8 @@ #define STRINGIZE(...) STRINGIZE_IMPL(__VA_ARGS__) #define WARP_SIZE 32 -#define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) +#define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) +#define CUDART_HMASK 12000 // CUDA 12.0, min. ver. for half2 -> uint mask comparisons #define CC_PASCAL 600 #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products @@ -293,20 +294,54 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { return x; } +static __device__ __forceinline__ half ggml_cuda_hmax(const half a, const half b) { +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) + +#if CUDART_VERSION >= CUDART_HMAX + return __hmax(a, b); +#else + return __half2float(a) > __half2float(b) ? a : b; +#endif // CUDART_VERSION >= CUDART_HMAX + +#else + GGML_UNUSED(a); + GGML_UNUSED(b); + NO_DEVICE_CODE; +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && CUDART_VERSION < CUDART_HMAX +} +static __device__ __forceinline__ half2 ggml_cuda_hmax2(const half2 a, const half2 b) { +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) + +#if CUDART_VERSION >= CUDART_HMAX + return __hmax2(a, b); +#else + half2 ret; + reinterpret_cast(ret.x) = __low2float(a) > __low2float(b) ? __low2half(a) : __low2half(b); + reinterpret_cast(ret.y) = __high2float(a) > __high2float(b) ? __high2half(a) : __high2half(b); + return ret; +#endif // CUDART_VERSION >= CUDART_HMAX + +#else + GGML_UNUSED(a); + GGML_UNUSED(b); + NO_DEVICE_CODE; +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && CUDART_VERSION < CUDART_HMAX +} + static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { - x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); + x = ggml_cuda_hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); } return x; #else GGML_UNUSED(x); NO_DEVICE_CODE; -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL } -#if CUDART_VERSION < 12000 +#if CUDART_VERSION < CUDART_HMASK static __device__ __forceinline__ uint32_t __hgt2_mask(const half2 a, const half2 b) { const uint32_t mask_low = 0x0000FFFF * (float( __low2half(a)) > float( __low2half(b))); const uint32_t mask_high = 0xFFFF0000 * (float(__high2half(a)) > float(__high2half(b))); diff --git a/ggml-cuda/fattn.cu b/ggml-cuda/fattn.cu index df1e80068b334..c8a11d1733464 100644 --- a/ggml-cuda/fattn.cu +++ b/ggml-cuda/fattn.cu @@ -116,7 +116,7 @@ static __global__ void flash_attn_vec_ext_f16( sum2 = warp_reduce_sum(sum2); half sum = __low2half(sum2) + __high2half(sum2); sum += mask ? maskh[k_VKQ_0 + i_KQ] : __float2half(0.0f); - kqmax_new = __hmax(kqmax_new, sum); + kqmax_new = ggml_cuda_hmax(kqmax_new, sum); if (threadIdx.x == 0) { KQ[i_KQ] = sum; } @@ -416,9 +416,9 @@ static __global__ void flash_attn_ext_f16( const int k = k0 + threadIdx.x; KQ2_tmp[k0/WARP_SIZE] += mask ? mask2[(j*ne11 + k_VKQ_0)/2 + k] : make_half2(0.0f, 0.0f); - KQ_max_new = __hmax2(KQ_max_new, KQ2_tmp[k0/WARP_SIZE]); + KQ_max_new = ggml_cuda_hmax2(KQ_max_new, KQ2_tmp[k0/WARP_SIZE]); } - KQ_max_new = __half2half2(warp_reduce_max(__hmax(__low2half(KQ_max_new), __high2half(KQ_max_new)))); + KQ_max_new = __half2half2(warp_reduce_max(ggml_cuda_hmax(__low2half(KQ_max_new), __high2half(KQ_max_new)))); const half2 diff = KQ_max_h2[j0/nwarps] - KQ_max_new; KQ_max_scale_h2[j0/nwarps] = h2exp(diff); const uint32_t ftz_mask = __hgt2_mask(diff, make_half2(SOFTMAX_FTZ_THRESHOLD, SOFTMAX_FTZ_THRESHOLD));