From 84a44815f704aaed8e8edec7a39e846a975c7ba9 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Mon, 13 Jan 2025 20:18:12 +0100 Subject: [PATCH 1/2] cli : auto activate conversation mode if chat template is available (#11214) * cli : auto activate conversation mode if chat template is detected * add warn on bad template * update readme (writing with the help of chatgpt) * update readme (2) * do not activate -cnv for non-instruct models --- README.md | 38 ++++++++++++++++++++---------------- common/arg.cpp | 20 +++++++++++-------- common/common.h | 9 ++++++++- examples/main/main.cpp | 44 ++++++++++++++++++++++++++++++++---------- 4 files changed, 75 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index 6302ac977..413a16422 100644 --- a/README.md +++ b/README.md @@ -245,6 +245,8 @@ The [Hugging Face](https://huggingface.co) platform hosts a [number of LLMs](htt - [Trending](https://huggingface.co/models?library=gguf&sort=trending) - [LLaMA](https://huggingface.co/models?sort=trending&search=llama+gguf) +You can either manually download the GGUF file or directly use any `llama.cpp`-compatible models from Hugging Face by using this CLI argument: `-hf /[:quant]` + After downloading a model, use the CLI tools to run it locally - see below. `llama.cpp` requires the model to be stored in the [GGUF](https://github.com/ggerganov/ggml/blob/master/docs/gguf.md) file format. Models in other data formats can be converted to GGUF using the `convert_*.py` Python scripts in this repo. @@ -263,21 +265,12 @@ To learn more about model quantization, [read this documentation](examples/quant #### A CLI tool for accessing and experimenting with most of `llama.cpp`'s functionality. -
- Run simple text completion - - ```bash - llama-cli -m model.gguf -p "I believe the meaning of life is" -n 128 - - # I believe the meaning of life is to find your own truth and to live in accordance with it. For me, this means being true to myself and following my passions, even if they don't align with societal expectations. I think that's what I love about yoga – it's not just a physical practice, but a spiritual one too. It's about connecting with yourself, listening to your inner voice, and honoring your own unique journey. - ``` - -
- --
Run in conversation mode + Models with a built-in chat template will automatically activate conversation mode. If this doesn't occur, you can manually enable it by adding `-cnv` and specifying a suitable chat template with `--chat-template NAME` + ```bash - llama-cli -m model.gguf -p "You are a helpful assistant" -cnv + llama-cli -m model.gguf # > hi, who are you? # Hi there! I'm your helpful assistant! I'm an AI-powered chatbot designed to assist and provide information to users like you. I'm here to help answer your questions, provide guidance, and offer support on a wide range of topics. I'm a friendly and knowledgeable AI, and I'm always happy to help with anything you need. What's on your mind, and how can I assist you today? @@ -289,17 +282,28 @@ To learn more about model quantization, [read this documentation](examples/quant
-
- Run with custom chat template + Run in conversation mode with custom chat template ```bash - # use the "chatml" template - llama-cli -m model.gguf -p "You are a helpful assistant" -cnv --chat-template chatml + # use the "chatml" template (use -h to see the list of supported templates) + llama-cli -m model.gguf -cnv --chat-template chatml # use a custom template - llama-cli -m model.gguf -p "You are a helpful assistant" -cnv --in-prefix 'User: ' --reverse-prompt 'User:' + llama-cli -m model.gguf -cnv --in-prefix 'User: ' --reverse-prompt 'User:' ``` - [Supported templates](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) +
+ +-
+ Run simple text completion + + To disable conversation mode explicitly, use `-no-cnv` + + ```bash + llama-cli -m model.gguf -p "I believe the meaning of life is" -n 128 -no-cnv + + # I believe the meaning of life is to find your own truth and to live in accordance with it. For me, this means being true to myself and following my passions, even if they don't align with societal expectations. I think that's what I love about yoga – it's not just a physical practice, but a spiritual one too. It's about connecting with yourself, listening to your inner voice, and honoring your own unique journey. + ```
diff --git a/common/arg.cpp b/common/arg.cpp index 1457a360f..dd10b6352 100644 --- a/common/arg.cpp +++ b/common/arg.cpp @@ -777,15 +777,19 @@ common_params_context common_params_parser_init(common_params & params, llama_ex ).set_examples({LLAMA_EXAMPLE_MAIN, LLAMA_EXAMPLE_SERVER})); add_opt(common_arg( {"-cnv", "--conversation"}, - string_format( - "run in conversation mode:\n" - "- does not print special tokens and suffix/prefix\n" - "- interactive mode is also enabled\n" - "(default: %s)", - params.conversation ? "true" : "false" - ), + "run in conversation mode:\n" + "- does not print special tokens and suffix/prefix\n" + "- interactive mode is also enabled\n" + "(default: auto enabled if chat template is available)", [](common_params & params) { - params.conversation = true; + params.conversation_mode = COMMON_CONVERSATION_MODE_ENABLED; + } + ).set_examples({LLAMA_EXAMPLE_MAIN})); + add_opt(common_arg( + {"-no-cnv", "--no-conversation"}, + "force disable conversation mode (default: false)", + [](common_params & params) { + params.conversation_mode = COMMON_CONVERSATION_MODE_DISABLED; } ).set_examples({LLAMA_EXAMPLE_MAIN})); add_opt(common_arg( diff --git a/common/common.h b/common/common.h index c86a4ef39..4fab1319a 100644 --- a/common/common.h +++ b/common/common.h @@ -103,6 +103,12 @@ enum dimre_method { DIMRE_METHOD_MEAN, }; +enum common_conversation_mode { + COMMON_CONVERSATION_MODE_DISABLED = 0, + COMMON_CONVERSATION_MODE_ENABLED = 1, + COMMON_CONVERSATION_MODE_AUTO = 2, +}; + // sampling parameters struct common_params_sampling { uint32_t seed = LLAMA_DEFAULT_SEED; // the seed used to initialize llama_sampler @@ -275,7 +281,6 @@ struct common_params { bool special = false; // enable special token output bool interactive = false; // interactive mode bool interactive_first = false; // wait for user input immediately - bool conversation = false; // conversation mode (does not print special tokens and suffix/prefix) bool prompt_cache_all = false; // save user input and generations to prompt cache bool prompt_cache_ro = false; // open the prompt cache read-only and do not update it @@ -301,6 +306,8 @@ struct common_params { ggml_type cache_type_k = GGML_TYPE_F16; // KV cache data type for the K ggml_type cache_type_v = GGML_TYPE_F16; // KV cache data type for the V + common_conversation_mode conversation_mode = COMMON_CONVERSATION_MODE_AUTO; + // multimodal models (see examples/llava) std::string mmproj = ""; // path to multimodal projector // NOLINT std::vector image; // path to image file(s) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 640b35c1d..39666a0e8 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -30,6 +30,8 @@ #pragma warning(disable: 4244 4267) // possible loss of data #endif +static const char * DEFAULT_SYSTEM_MESSAGE = "You are a helpful assistant"; + static llama_context ** g_ctx; static llama_model ** g_model; static common_sampler ** g_smpl; @@ -204,8 +206,24 @@ int main(int argc, char ** argv) { LOG_WRN("%s: model was trained on only %d context tokens (%d specified)\n", __func__, n_ctx_train, n_ctx); } + // auto enable conversation mode if chat template is available + const bool has_chat_template = !common_get_builtin_chat_template(model).empty() || !params.chat_template.empty(); + if (params.conversation_mode == COMMON_CONVERSATION_MODE_AUTO) { + if (has_chat_template) { + LOG_INF("%s: chat template is available, enabling conversation mode (disable it with -no-cnv)\n", __func__); + params.conversation_mode = COMMON_CONVERSATION_MODE_ENABLED; + } else { + params.conversation_mode = COMMON_CONVERSATION_MODE_DISABLED; + } + } + + // in case user force-activate conversation mode (via -cnv) without proper chat template, we show a warning + if (params.conversation_mode && !has_chat_template) { + LOG_WRN("%s: chat template is not available or is not supported. This may cause the model to output suboptimal responses\n", __func__); + } + // print chat template example in conversation mode - if (params.conversation) { + if (params.conversation_mode) { if (params.enable_chat_template) { LOG_INF("%s: chat template example:\n%s\n", __func__, common_chat_format_example(model, params.chat_template).c_str()); } else { @@ -252,8 +270,10 @@ int main(int argc, char ** argv) { std::vector embd_inp; { - auto prompt = (params.conversation && params.enable_chat_template && !params.prompt.empty()) - ? chat_add_and_format(model, chat_msgs, "system", params.prompt) // format the system prompt in conversation mode + auto prompt = (params.conversation_mode && params.enable_chat_template) + // format the system prompt in conversation mode (fallback to default if empty) + ? chat_add_and_format(model, chat_msgs, "system", params.prompt.empty() ? DEFAULT_SYSTEM_MESSAGE : params.prompt) + // otherwise use the prompt as is : params.prompt; if (params.interactive_first || !params.prompt.empty() || session_tokens.empty()) { LOG_DBG("tokenize the prompt\n"); @@ -327,7 +347,7 @@ int main(int argc, char ** argv) { params.n_keep += add_bos; // always keep the BOS token } - if (params.conversation) { + if (params.conversation_mode) { params.interactive_first = true; } @@ -451,7 +471,11 @@ int main(int argc, char ** argv) { #if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32) LOG_INF( " - Press Ctrl+C to interject at any time.\n"); #endif - LOG_INF( "%s\n", control_message); + LOG_INF( "%s", control_message); + if (params.conversation_mode && params.enable_chat_template && params.prompt.empty()) { + LOG_INF( " - Using default system message. To change it, set a different value via -p PROMPT or -f FILE argument.\n"); + } + LOG_INF("\n"); is_interacting = params.interactive_first; } @@ -763,7 +787,7 @@ int main(int argc, char ** argv) { } // if current token is not EOG, we add it to current assistant message - if (params.conversation) { + if (params.conversation_mode) { const auto id = common_sampler_last(smpl); assistant_ss << common_token_to_piece(ctx, id, false); } @@ -771,7 +795,7 @@ int main(int argc, char ** argv) { if (n_past > 0 && is_interacting) { LOG_DBG("waiting for user input\n"); - if (params.conversation) { + if (params.conversation_mode) { LOG("\n> "); } @@ -781,7 +805,7 @@ int main(int argc, char ** argv) { } std::string buffer; - if (!params.input_prefix.empty() && !params.conversation) { + if (!params.input_prefix.empty() && !params.conversation_mode) { LOG_DBG("appending input prefix: '%s'\n", params.input_prefix.c_str()); LOG("%s", params.input_prefix.c_str()); } @@ -805,7 +829,7 @@ int main(int argc, char ** argv) { // Entering a empty line lets the user pass control back if (buffer.length() > 1) { // append input suffix if any - if (!params.input_suffix.empty() && !params.conversation) { + if (!params.input_suffix.empty() && !params.conversation_mode) { LOG_DBG("appending input suffix: '%s'\n", params.input_suffix.c_str()); LOG("%s", params.input_suffix.c_str()); } @@ -818,7 +842,7 @@ int main(int argc, char ** argv) { string_process_escapes(buffer); } - bool format_chat = params.conversation && params.enable_chat_template; + bool format_chat = params.conversation_mode && params.enable_chat_template; std::string user_inp = format_chat ? chat_add_and_format(model, chat_msgs, "user", std::move(buffer)) : std::move(buffer); From 504af20ee4eae72080a56d59d744f6774f7901ce Mon Sep 17 00:00:00 2001 From: ebraminio Date: Mon, 13 Jan 2025 22:53:31 +0330 Subject: [PATCH 2/2] server : (UI) Improve messages bubble shape in RTL (#11220) I simply have overlooked message bubble's tail placement for RTL text as I use the dark mode and that isn't visible there and this fixes it. --- examples/server/public/index.html.gz | Bin 1206472 -> 1206483 bytes examples/server/webui/index.html | 7 +++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/examples/server/public/index.html.gz b/examples/server/public/index.html.gz index 489bff84b98f7bd7ebcdabeb7d0f9b31093eae58..18f7ecac9ef63eb46dcb3b84b1e10e2b79f22c44 100644 GIT binary patch delta 7093 zcmV;m8%pHJ(Mi+MNq~d_gaU*Egam{Iga(8Mgb0KQgbIWUgbaiYgbsucgb;)gv=UuN zfA;b5IyzV+s)`o>Orlg$Yx-{PCN}nB_i`ilHTHSt1!f;%BLR{{@TXXm?Twj;k=+st z1Tqte%nx7!$@**QxAU64Tsiai+m~N}o~r%XZ$7)O_GdePuC9tL!z-4O#vHq3*+g9W|`|sYq{2un7zn{PSfBXFN zWd8m3r8oavng4C|a>*ubIIHmXX8!Z)>U#dhn5)}YujX%7a|6C(fPGEo^#X*_-(LTH z{>FI;Iq37Z$;+!PeD}NZ@_O~^8_>+Fx36yJfBj)jx3AvM-@Se9e{ZHY*K_^d%2yZB z-4?zq{H6Zg#(h73y!q%A_y2PJfA!nq{@t6G<^8wHm!H4WKBtTCUlsLk%s2DzzIDI& zPWj>O-*0~SUJd^K#jC6Dzqq^l_KUmEztzGR>VEgPmml5lAMO{cH*ei9?ykRm{U$AF zeti4>?u*6u;l}^lyO5WCsoea`@=;z+CeHkNdvS4smPr=5^jM;JsT=`2oS@`oTw zlZ?7CJzFH1PcrN!VUo?fe@&K=m|nq$k`(`g?@5*iNj!6xlw?mp6oilzS+!)Uv}rC)>2D=gh82p?xVs}gGE%Z^&NZk*qOsR>mn?Wp?xF(un9 zpk!wmFv>BQde|ude_fyT3l41q+9UWtyRyT)0+&FjuvqRT+@a`268iaLv`7x`N-r%p z%CqQHkY{l!4EUmh$^7P4=!4g()hjm)R+*cw8!B05?w!B`3Od?b2QU+ZzPu#dM7!Bx4YZ2+R8jsYlL=V~l` zBu~P?KkG7yvo0n0ZoY;pl_tbbO_tRSupubeU5WRzO}P88fkhmzU0w1XfO+heNHv|o zGG)JEd!d_F9K!mCEOgVZ1yEyPfWW03#4*VPI?b|dSN7CesWqrAgNFvKdP!E6?C)7k zh$rrQfBS{53kaswU7cH3;nJ)cHkCh~2|jMtOan&nsa(gRJ!aJpRO?YD({Clep`CZq zf}7-UV)-u_y_B;gp@KSP%`Sc%5{Y`nvRVFEWH$)_R02|B66c$YWO7Dw!ogFO+3($A zJNOvHt9FH_8r+VdtPN4t>Xc<|3~hagwl2^%f5y-@hG-iC?NQn`hiIDuZD$N^Ylya0 zqdiL7_7G*ePI;WRogvzeK-(FmlIalfX^prp8zWsu8T1{cM&FQCW0dsjuy3jY{i%JR z{MwM~Yc1E89liV+!$TY5p^b+ZZ3qIi4hT@K;>RAt#28{CVuAiF9hpN+L{Kot(6)wX ze_JA$JxfdW5EFYm@@PZopmm^wF^2YZi1zeZ$fFHngVx3dhCR>FygKZ|Y6F7CvC)T+ zLT@95ts0tR+NzzhM8qgN;#gyt*h5TgfywhM${AuJe}aedER&iJF%eP3u%4r?eQ2V$(L|G{)^qGj z9rklo04>uTH!j8yniy>~u{3iGaczjW03-G&+WHV}U4W=@#El{1#&~QphG4_!fDLt= z!mT0dB3_ImZVwT+pR+VFhTy?y!NbANV#W|E7#*mfTBH0&8FU~K4b15{t*gTxe=6dD zdZ0a;L;i2J{eNs&!yJMEvke1dm%Zi?9GGo5uxp{4!+97(JPZMH%Ae^N9_A1ab3F2x zL)c(;V1ruyjA3FAF|mKuGnzRB7G?)njI!0~5bdczd#a6cHf7M;M6l2mqxc!cX4t7! zfsJjAV`B|Lht&ZcV@PX5q(#ISe?wXyA}s>N7}CZNX%QdBkT!=%*I_}Pjv;Lgk+#Mo zjx_`ZRtFepV`w`=v_%XUXW7#s;vxvx+NiPIhXPg?3MlqCzZ`Z<(fy4>!zuQV-`j1! zH`H;{xjh5`b{hc9F{Je&(gFaO&bS%M9zp`UjRa#?JoXSI*lkF#N9o8Kf1+*unrO5= zgbsEa9mY_0hA2DF!5w=D8te{eI23p6L1eJo$iR^28JbszJy>l3(6|A1hH%1Z`&<_rOb(*}%#Lp{zAVmNKYu%_b{ zoX!wpI4#5&jY`fCb~tV9e;7k~Iz)N;98_`!fy3zn4#QBiF%Q7bAaFQs-~fCY_nbW) z0*+}LI5cOfo2p}~(=j}>As*VV0jKE@a7;UZLot=9W==J03=d<7hX5RgrA^^~eGCh8 zh=qU~rlvS!+-^DqAJaB`n6_gpI{ZI|hdso@{xuLa9fFT(3qD3ef7EmcNTzKd8RdA& zpy!D&qA7}|tA?YRqu2~PxCkSLK6QZ0!ACZf!Q)g)2ap)5;wXxK=yp&+LmT9xHQ-8{ zT8eR~4;D1^K`sKA=(=sHnqnHGm>7dh1T@hd)7BitQteS(%t0;!pcuM7H4Md?j_Zpx z$i)(uZJKH+ihbyYe`7(z9^_&-fXkd}%E1_4&~OI1I1T91RmD7b30lyY4sw|`z)Q6? zOV!62^4^15N(ab{qke$Ti`b%0rvW%vMR`vj+~IjU|dhNF$+VhnN-!9`O|LpK!17{$dLf8-*7i*Bj5Zt1!)j*B(O zMFbZ`ooZ9fQp{0Y>_IMe1HyFGR1e-VR)-ILwoiSswU-SRFp9}^kjb=xV5Y9ymZpw# zN@dt9l?H}cimEx*IJZ;}@czD|+CB+`sAyG#2{DTb%6nUAJuS4}7J5$$y|;zY)57R% z5p6KP9cz*>_L1005pTkfHG(|6r#&b7WmRhq0#CQut#PDK|Eu=RF<+2%<5)FSqAwt zY&cl@e|^0)SuS$|*^EWmPL9s3#~@ylETH`f@wGIZ{Ca82*-OlUp$vhKthzeT#jRU8x;sseI8QHyw zVD}1wb>KrPYY;v?i}BhGmu{9NTb8s`Yb3LW5|5UyA0SWt*p5If4hxL(ySb|wS;t_#R zyIf6{HFw%zxd-7E$wPyeuz;`U$1d3$G$7Tr$Ox1bgG5DE;39dzeAb>-*>yX-_T1Pb ze_^v%^~txW0a=?udLmS6=F!#L<;jNb$qf|m6tlik*Pk1&!jb@IAS=eS)-QI4CTX+0 zqrak{vZ9OUynt0h1{67rG_jNCD+`RbSWO>%;y6X~od$3g66B!b$z z8apeALVN)2C<^ydF7Maox(u(v4S8xie~#zF8&zmwfg`g52z;?oKAm2wN1}tb3+)To7%{7TiqW2>>Z)eZEUg(HH=)bgYh)Tk`^IArg-e;btEXNdY!Hw`@=Kibk|`jcx2?VGr)s{0_N77!e>dVl z0Wi@VXtum9jhH=*)m#HrwtPNeh;jMie6l94fBE9YIkdcdfzY`h+@rw1klZwtkQcgn zejyd>7pzE^==={DqpARHgD^x3f-(ZI?4u+`ZknQq$>1Z5foMUtpayRiKnQ}e=>n+vRZ`%^I8&uS1?^j-kNxKi{zmawIISPONiYk zKAK#oT3`c3v_TA^^R-q{OW5;M=%XlvdRIvdZ<}(# zXY%vOTB`=6HY|cx5Uk3RxHd+E8ZLM*Xp9y##;bLbQ#NpH@zM|$p>tSFKSW`iUr1|8 z)7fOQ-EQTrE+^S)qA7|pf64Dx5@JYyA*q&x_)3{y!z0FpRDTZHNf4k#P%LZ~ZpIu| zLiYjLzDyo2Bn2r*wUBDHL_+8sR`;lSpt+F9J&6&RS|L)Aaxau%xV%vn$0ubp(d+_jo`N>)UXu?{)Je_e*y=E-3n`^z_qOY zS4I@sviW){*TqF}DNR~K0h(@%bJO^t$PexX!(nCv51J@hBCl>^4ZC5MJ}ZXjNnms& z+DmEdIdwC7G(+&dx6@Vw?0PU=9L-Qf^4wjKTn~_zvXWnI-u{sPY!(6XRz)dUP1r9 z^n3cB6Z+k*+HukRL_&U$T}UwhH4z2EU!_;kY!iDfCDP^76K7rO{zv5}xmewyPcXf~ zdu0ZdSCWNEv_Lv`{g77^oS^{DfnX?lXyKgzYRJt9%3YWQe`$giS+dPZc9f$q@!as1 zCYie;{UQk;MLTCU4sVNMD(;!HytPZq>8Y4TN`3uX%u=!zu-iT6YOZw2(fH_QY490czBp`9VSCPF2a&9$lsu#42!dJFy| z1sngrTQ)(we+A(cfcN+vvrFc1+Czwh1%oDChM^m|vX`bkpzpHxQWchyqhp$+h%KW} zx{$was&xx(nhM9%V0j@mVf_**wRKvO`pFhNjbC6A`|aTdF(p}+rM{&MLg&RD>Mr0F zpkO`{$`UXRen+tN2&)pB`9TihR{uf@@-IkCGM5JNFRE)P@jDwnU1x+~KcP7;gWCr# za!OwSyPtyH>#hoa%5?t?+MW@Kua@LPWI+?ze-Vuq9dEEkb~u8*?nmwa?Y- zlVTZbW+ci{?T%D|h-v70WSxW%ouvuN*TIs4@=LLy5I*wBv^A$cJ#}whpnpV1w=f&K zbwYo9KLpLt(GmLe>Cl~tC|`kxm3*XH@Nbhg`&87zi_KyYk{3;bV-M$Svtt>on&vf& z@)cv(x^YJy*$p}HTlaz-uc{~d#`Ar%F7X43H#y00l&`QG`#2*-V~FMY0sS^7*-gHh z$NszOPK(U05oRLqKHuak*z*;&aKQdoN@7+x7J!bEc$Eb4>IAuY9^~*4sSuNWF0F)e7_bPT~EL${|n8^PWE-*+I7Pte6Bdipdta8<=a#%vgB_(_N7ZL&BW zn=BBgb2H+;=$H3j5+4Jfz2cV{U=ln6pIw(%U=m{jU+kB>U=k%SC#h?%lsAD>Lp71t zx)9(FSM?n!=3He|`Fyh0yPb56e`F`;rpo;qt6D~)lb`mN5MdH70k4-oVGh(h!z6>~tsbi($)8Q5q6Ahf|Ae zvBMr%P!Q7u`N48YAh3g^@(Z;jl=Ck^YMm(pmbb+hB!8i=>6&DmCHyVjx4G~lki?Kz z$2JNcsJ!5Z#>C@%3#{!>8g%=1Y>px@Cdl112~ zd%ydZ6VA~u8?qr?yP$jQt#~*UX-EyUDn!qJQ@kpP9&Uy09(VM0z=8e8*o7(|GB;i^ zAFfKs1AmXrGJk$dVQ-zg@&olgq*=0X7eN@%$Fd0a;&Z^NmV^Jtbn<>j?|z);Zq@Id z-;mf1X+ZLSzn<9^@i+0x0UOLnN?iIObaVP425Tw`QYH}VhQ!`u zz0l7dKR^a+kcI)t$`VjldF_T_vL*fpzCtZa3x5)&5dUexT^2m|?;&HzZ!VDky_j!9 zzTd=l4}1WI6=Lx`73A0CzVB!TT?f^%j3f>p+4DjUe&CaPcoZS0Xp{2;oc!CAk&Ap1 zCBng47F8qpF1AvAU32$Yk3DBje6rZAzGR%PNtCbyG$+#KF)Keo{7zef-t$OZ8yRDi z*nc5Ccd?#qi(Nr$LR}}@_iV-a4asham&AT&WLG%g%Tonz*KjV7wM8jgqd@ESq2B%; zk9wG>tmA5vLzEPfvm-)bMsJ5?dY?cLPok183o3uP{rXMmsmGru8DEeB(ww4>x?Y~bWDzEdN#y2~WE1khRwDT% zC)qv8ChuO&uWw$-kuM<%IrsEJ`mhMy_)bC@j8vS!Q!~j>oG^(p2r~1w$WOct z=3bukZIX?CtP-B8UU8Xc=n4JlmrX*?q999h>Qeq`hSbyQM*JZyOBGp#W93bw^M9D` z`<}3Rp0W-@ zUkAD8X1+-L3T_@ou^yUgQ@?fZiAWks4hs^Mm(Y$WyB09w4|wHb29DSh4%Tj7Au$iX z76Y+c`PBAmiU_S#OG|5JNgP2qwttcQaKz8o7bzjInEvvi6C%k+Vnq`B^=cQ5 zYK^c|J57UADS8vR*&T#8TlpCETGbBpe(^pv&-PdDS`Wc>Z5q_qkucE~b+3v&a9{;* zAvquh-5D%BBs5(=63c>g>{Gc5zug7jw%B2zw{SJv;;GdWgBE+x8<$yRlYax;fp5e{ z6l*BT>Jih1o=PoUvFz^y{SC;96mRksEEH}xizuL7zSUlL)&~#*zO9ueOxXyI0fPy^ zscvQy<>pe)MiQI8ixczts!GDSD9xdoBn!I5Uv_G|`VjeSS=~(90ipQXs^Sj5UbsWo zmR1qU?#(8;-w<52pXJFW^M8mSILJ}2v_FZcCLj$W_D~rt5s6tmeoq=g`|Q&?VY6=e z5t14BpmWuh)c^D+v}b+QvCuwW7q^m`gL+_%zyH%H}ShKx)>L7K# zxZLf!jO?*M}LqWSm1yC8G{J~R5}1u+zISpyxR|SeOTSl7)L=IFr>ywff{?J znmVu@kjC(@|Lec~+yD9R=#?MP7u^S}xO(>-F4)(ls@-AzYRsd;F^S_SU!5G%rT_cC z|K~rW-C>NrZvAdjdrA7p=jwxm{U85=u9EbzQ_RZB>%OpeVSl`M+yy)NK5N&)xIdaz zE|P`E>EeY9@O!s1gBNCD2q)B%O@-*f%1t{xgFWl7Ntjl)l*EWB5*7;yJDP`Tn*+UX zZ0MX&rN;Bv@N8I%XFWX?EdS}B&@N^Q*~KCS%dRUGJ`pCbEFVFB_6GB%TFGN?&U`8YwYvP3(P*kMgk;@;7_sGZg0#)jO>NY-CVzn$0Y<;t1A-9G;c^i=K7e)q+7wLjbWb9Gg08D6oJG`2{T zJlNk%VXN+&ZMn}ozX!j+c|Csw{CG8=tMlt0;d{yr*eCrK-e+>XoqzXw{srWV!Yd-- zJ>#z-fA+=qFb*%-A%WM=znQU#dln5)|tFXpdSa|6C(fPGEo^#X*_Utj-y z{>pg{Iq37($@8l%eD}NZ{Cf4`JJ8IF*Dr48fBkVzw=dq#-@Jb5zcbUD>$(1B<*SS6 zZVO)){#^fIo4DEU(&_97e&1r^VR%^@7=F{ zP=0*<_nRNzslngBdU5sctGlc3zq z!|S(qUoGB+8~<-_LSFW@a`OwzM|nP(IP>f6#l;C)CRyatV~J)PLLXBif1P}k?_5eA zo|Z{WXGoPbl|<+p_bvI``x`fiPu~U+${L>$*`A%NjCE~e_2LidIcX!Qv46TCs`gO@yuOPl05-Y5JFO9)sj_HYSfNg zx{kspFAUOIMm&0~rVppJe?OThQH;ALME1KF$_l;@wp>7j90Te=3q030)f8le6tQ zpakcs>ycTUY%@14xZo)HfTJMBMYoIMT8TjVwEU3MY(pyV#&+`Z@DIH zMG^P5|t4_>EMuiP+LWp27|sAQSBcLEP6=xG0C=HAhbIE2vmR{OHB zpGI&0f#w(!%DqfroqJiAsd`zEQnBIk^b;(+>~f3+Son$A^&b zG5R3x_42cRU$_s0oMQU41NqQRb22M0&KlT6-9<>w*l!%VkI9D4mcaw@8w9fo(|FY^ z*@`t9_dODZXKlQ_Cm9VqH^gAm@obS$nnWU@%RF^s5yi_1Lupg&Xe$!vgy33bIpvVu zoQOEyD^0y>e@~H{t%4Y5Od_n+W+PrD@6rSxuiyh=lOD_>oW7PmbnE1-y1yg8NFJV8 z9UM>+dVaaYNJ`Jf9H`{Koh8mWcZEQ{9hP8S7YHL zc@hTxS(izibt%Dj^EFhdG$DR!vaD`^4MD-~O1z(K!rg}rEaG_W>XP>W%wxAis_6`t zDf1TN(uj!7oaX_jTXvZvNctwC)WJTz$4OR}fE{tmuA(lsr>Ow@Nu(d8Zd%SAA1ZFV~B}}1^OrH$Q)uKf`U1QwlzfC ze-gp$leA12)ug z3b%%+i+C}PxIIMN{*881p3%%9urNEoVw9~;hiFd)+EZ z3T$j^92;v0I;;-p7(-ecA}u1ue;CsG5NQ!8#*j9KNQ?L|hO{|Ex(*BSbPQ>0h_p2x zajYRQusXm%8$;U}qAg;;ILn?65f?$g)<%uxJ`}LJP(ZQA`Q@-JxWK`e-Lf!w?w1u zA#||Y=rD$|Gep_>6x^|gpuz5dhC^}39z+JajSLKVK0)*9um`IR02(*I&Ja#GZJf{z zTjM`tcxXdBwBLe7&JbWYZNM-bYt*{g83GKa4H#n|!<-?&aN2-zaHz)_LJX&k7}j*$ zg3}p745x({qfyBj!Vag6e;s2ePlqT^KLwSXLEvz@fWt5pZOj9(GYA|`8#n-;#yw|G zhk#?+1`f@c>ZaU0bbZHR~VTfk{L1RT>2;809us+m*G8pFdF;voQsVQEwNUmwH5 z9AY8hhN&se7`K}a!N;@>AExctiVpve;b9N)uzw3gO^4uP+JcYKe-Jet0+MMPNJcrH zGU$0CjA)9Y>8jzV<|sD94lcrop-&y)a`2H2W$-wa(g7rfsyK?GAG#e>(9j0CXbrg1 zrj}wH>VpLheUOU)Cc18$s-~F6C?>`r69G+h$Fwy^u~d5$7juw{04Rp8PYpw{rsMiz z4RW!>Wt*m2ieewSf8kirum`!=4d61TnsPA47c`tfE=~iwbX74AUV;`hrh{Cj4e(NJ z%~JJohP?NnmeK(-)<7`T zv`zD1Y#Bbl**?K3YmTa$is5MExEO<6L~zkm)6fmYF-CDQe+Rh;;G$cqty{WojN@Vr zauLBrQK#BevlMd_7kiM4-GDG%HPwT+jMd>opY2nhZ0%)(1&m@c9b__XAegD^wxy}# zoKhL~N~M8emZEBoHO?*71H8ZQsJ2hSASznbU_#8Ig7V%LT2BkDw}sx*Lho&1^t3QK zTSS|X7A0HyxXe|}#tO_s}?KsIAhwv(eX>oJJeBnxPNLVPU^C%<2z)TQgELJh#Wyca~PGSR2Z z9F55tWa$-*s?VG*|G3LVQ);%iv+kT`gnH|SdAIZWdos_&9HN32f8)Sut9U_{Nk(>W zBG|oxU>*37${K`EpTv0WhD$fgk}XSGD)PnpfBVoF08^2*E?Uf1>_U7Mbgj zPm)SAWwl9%do!n`nSUJ7!zQIq>zgRk1Lk^4UH_?i?*kI{DLucZ4iD=i1+fIPdc-3F zp?0~NENkwx!Ez76Es}=@FJS>+&yQWQH)uerYmpHsD+Y;*tiVO`fcdOFtFr5Mch6x&2<@Gg&XqJe|8+thc~Lw!U9KTje0%~-31r~K*9dT4d?UR z&^tDjKcX*~EuWnA&C?YR?%bVBm=U%pwpsT~>$o7=m@T-Oz!Lyc&iZ__Ymu^Crwb#fri1v-g9txK-byrWlK-eH4hve5d=_OM@K5tul*-zDc3GGXTe-v)S zfdXKnInZo*TN*KY7^}Gks%-gu!Vu%~+4*EmT>tXfvvX*9`3#|RKe$JMe<8VPDj_d) z^ZY_8)-PC*F46fPFh*4Y+6G~W76fGkVA)4WjNCLu5tG437z5FQY(Wj)EP&YS1+rv| z{LI~w427#1zrsmMVu`mx=sbwie+^{<(PXs>3FfsV1g~Jaki0eV?iR^IC2B#0SC$aF zPkc1Fd{$Bf`SZXh3pcC869p`2Kqa)sxt9?VgMiib2W%=(z?M?jGAeNqI%f_BFN}G+>{qI8Z!Xzi97){}e=LzZZe8Q^NsCTN-OG30*VR%~v))WP?B^`TKe?g^$D1F2R zN*{1t)_udmUO-VSClI`2Dp)?^1)*E$f3&~`ifDrvLg#C(qL#4dr_e`H2=%U#7~VGJ zg3sjVleJb2NNrdItsq#HCvk0z1~pvpUeFjVXpC3uB&TfP*y5!jEJEk7n0|=DIKPnA zl%})EWV_wUTU}1F)kISie`S*2uO!5f{z6hM3GtOOzlKMQ3#tAbvXdY{i=bH8EZmGa ztc30ZvVERBTu2I1kZK{-YKera{|oBIRBv!Ekw_D#&RTzSLz~u|rK(4I9C4OQ>Nh4*UzTe~twX3cD57N`Y%x z{jZEDvSst-RIZDQ;8L2jh5|I*80V((Ly;fc3x>nY1|Bp~vP53p#u|3REPYlC&y&FD zNVJ#I*mLS;^k|0QeQ&3&2H5prx;UDlh~&AuBDoxrctzLf;^G1+^#&g$^k**&yt@l2 zCfm9rG=~2R_O7^ke=pj7Z}jLMGe3YfD2PQ3NT1MO#Qc`Oap-`)13x+NS)akWgLnn~ z``Yj6e@^HRyK2Wp^AidAL3Sa*{MSSj2!EAcMYB!pxs*tkPfwh6sr#RlpXFk8hd#pe z2Je*_R9;CICeZ@v*!4qRO>l++I0u5E=%Iyo0;nN3BPe%ae-fk#T4c#KC)rVs!o+jK zTbg9YQp*@Qflk8B=wUmcpAUJB=+0G4Pr{NEK7Y$8HCP@JJemk zD?q_~B$Op!9Q=-8>k(EZH1mTT!ma*=6y#r#m}D*uFyd9$QsQ?ue7epE!G1z>Tn4uf zT;!C#0CqnGyVqS6{*>zoti#``1j=-zL<%-PXP0PN5;6tgpVXhxx1U-PpaB6xmq1(+ z)B_K{ahDfe5I&uw4=}E^dtWx?gFn(-k0o14wG#W8>vi)=VeFhODS2 zGCnY6-K&~xOwA9Gg80=Bs{Wyu5nd8Z0gsnnUJ^uqECX5Je_|YZYu>xzh746Iw$@-g zo0hP*VXqmmRwojg96p@wXUFcR$5!~aGx#b-Jt3l61^3&*PuP+y?-n7z&W$-1<=W@! z^+~aeH8T?BsCGxHK*Th3J+e+hh|bak)dn|&&3;l*aL2+6ai!Lf&Pw%M@^R!#Go zMfr-cYu&gbkL-pV_^o?Ej#t$aedGDQS(o?$#haXDILcSpjeVSvqA|pB{eXU-lk6s6 z&13&fb*DvU*9bF_cb{+a73}$nS~y_;D z!9`P$e_{_i`G$0z9G5L}0Y{hNU=kn>VkhUO z%KaLvT1KLipZAv!VG=F@ZVfK~6N%Ghu^|EC~e%+ARdw;AK z`q|@q$Y2f9Fd$i30_rNS-7rkH#DC9MsAXwEq7>pkEx5~q=l(rp4EfCk^1m1JZOHdo zZ1=ziU|1m*&r?BuP44@SX3%v|9m`1K@R2<)w5pY_;t=ENt9&421^#_5_w2|GY@B3&M{@)N}Gv?b_0kJPo1F-D0U z(sLK<$+p-Pv?kPbvVF@|oZpe`mUv0*cSd%F1HL>};C2n?0$E#>vNZ~{ZXfFH@A0UI ziOM>zHaSE|AvrrD6lU~xNT&A*1o0#)>9U~mm)mb&m7aS1Ws>m)DIm=$%74g`$4btt zhmq>5|0J}|$dd0%!iMcCpzFFQoU2(2egTl)*^F2|P8E48;kPD1#s~Z;SlI+d%$XVQ%i_ zN#7>f=*KGIspAvp? ztLG`}Ff{Ir+cnOh_Qme_G{GxNT@jcrXkgh}^;m{<`38!>Zi)$QUr?uuBulTE$NqJY zdv4~7#INAyVHE43sW$an_nwHPq2#b2QF#gNn6hgDBmRI_E@t3}J%8a~?dBB{^YCji z5WAI6ZLg+?&`Pzmv}TsX5rktK$qz^Te0`A;0-FhN>Hz%#wml(|d?Z#Rv0txt(Wur4 zOSRK9IF+I|k(=E?c(av{QLk0)K<^jtV|S_rpB+ojtw-a!_7CbyN|v<^IB!qsexr&dqgSroN57PH7E2e<&=e2pm9P?Xi9 zqz#3X>bPPV-v{~_kQFK3+aDk>I_Zxz% zdqo_3mM5FcBZ6$es$RE`B@H6>Xc#OJiCI*APclLK1k*ZUb7%PxQWE$ua%Gj&|MVxc zXO+~kULHQ~WWlCDN{By$E#N{b*edY@*o}Xm zlI%TSbjdXN5;?vr{#ZRhA3q{G$p_A6_nwUb|NVef1@Uau>NreS8Y%e0<#$;UrC{cl zO>cxBjJVnwJ%A?fV@Ls)&t?)Ef1Ql%SvfYNI#)It-@k)Qp+#-@oJA9&8uNa6xm#Ru zKq9WUZlRGlcOEe-XBj{}Q!hHI($XpEyE4jA_0q(F^5lSduL4oF7$*Z=k3{_X$#cl5#!=(Fx)Qe3@ze~t+3>r&P3 zuzodWM&Yo-ag?u44(ZbW{onudpV96}MPIjmH>tfOedM$6LBjrze?eDC`q(LEW#x5W z*t@)4Jnll4e7Ce~dD|b8Dwn##Lvit11^B&NYr$);Fa!l^DWgJ+VCANrp242j*Cb3U zTS{WY6baLR!VcJ>T8O{*f4vKx6ROmBsv4dRYf-7Er-J1_{S(?nJ|Ra~M5cXn*g!tk zA(I+UST5p=wQwScb#xpjqW0W-HweMXPK2XYeNRlqD7+*YL0gE)Wm`A%%SA6g0G_&R zQ;R0ZTL&Qo>59YOrzj_6Rvi$}sq5X9F+mn~_;o{ahQt%Y)>U{x{Tr;uLaer_F?e&?5qM*9Qjv-C16)h!&y4GZ^baHo=<(4e^<7G4 vyBbHQ=pWdj5Sd3sWJY6m?ywdj5PbA}!rzesFHM2>w> + }">