From cf054d41de86e9c4c902acce6c0009396821842d Mon Sep 17 00:00:00 2001 From: Wassim Chakroun Date: Fri, 12 Jul 2024 17:30:35 +0200 Subject: [PATCH] Add new features to the app an architecture selection feature, class labels input, and classification scores display and download --- app_model.py | 138 +++++++++++++++++++++++++++--------------- fairscope_favicon.png | Bin 0 -> 8183 bytes 2 files changed, 88 insertions(+), 50 deletions(-) create mode 100644 fairscope_favicon.png diff --git a/app_model.py b/app_model.py index c988da3..e22e6f2 100644 --- a/app_model.py +++ b/app_model.py @@ -14,13 +14,9 @@ import torch import torch.nn as nn +import torch.nn.functional as F import torchvision.transforms as transforms -from torchvision.models import efficientnet_v2_m, efficientnet_v2_s - -model_types = { - "efficientnet_v2_m": efficientnet_v2_m, - "efficientnet_v2_s": efficientnet_v2_s, -} +import torchvision.models as models ############################################################################################ # Functions/variables to be used in the Streamlit app @@ -47,13 +43,30 @@ def set_theme(theme): st.markdown(light, unsafe_allow_html=True) # Define the model loading function -def load_model(model_type, model_path): +def load_model(architecture, model_path, num_classes=5): # Load the model checkpoint (remove map_location if you have a GPU) loaded_cpt = torch.load(model_path, map_location=torch.device('cpu')) - # Define the EfficientNet_V2_M model (by default, no pre-trained weights are used) - model = model_types[model_type]() - # Modify the classifier to match the number of classes in the dataset - model.classifier[-1] = nn.Linear(model.classifier[-1].in_features, 5) + # Define the model according to the architecture and modify the number of output classes (by default, no pre-trained weights are used) + if architecture == "EfficientNet_V2_S": + model = models.efficientnet_v2_s() + model.classifier[-1] = nn.Linear(model.classifier[-1].in_features, num_classes) + elif architecture == "EfficientNet_V2_M": + model = models.efficientnet_v2_m() + model.classifier[-1] = nn.Linear(model.classifier[-1].in_features, num_classes) + elif architecture == "EfficientNet_B7": + model = models.efficientnet_b7() + model.classifier[-1] = nn.Linear(model.classifier[-1].in_features, num_classes) + elif architecture == "ResNet50": + model = models.resnet50() + model.fc = nn.Linear(model.fc.in_features, num_classes) + elif architecture == "DenseNet121": + model = models.densenet121() + model.classifier = nn.Linear(model.classifier.in_features, num_classes) + elif architecture == "VGG16": + model = models.vgg16() + model.classifier[-1] = nn.Linear(model.classifier[-1].in_features, num_classes) + else: + raise ValueError("Unsupported architecture") # Load the state_dict in order to load the trained parameters model.load_state_dict(loaded_cpt) # Set the model to evaluation mode @@ -66,8 +79,15 @@ def predict_image(image_path, model, transform): image = transform(image).unsqueeze(0) # Add batch dimension with torch.no_grad(): outputs = model(image) + classif_scores = F.softmax(outputs, dim=1) _, predicted = torch.max(outputs, 1) - return predicted + return predicted, classif_scores + +# Define the function to save the probabilities to a file +def save_probabilities(probas, filename='classification_scores.txt'): + with open(filename, 'w') as f: + for prob in probas: + f.write(f'{prob[0]}: {prob[1].tolist()}\n') # Function to generate and display the graph of detected objects def display_distribution_plot(class_counts, sns_palette="pastel"): @@ -76,7 +96,7 @@ def display_distribution_plot(class_counts, sns_palette="pastel"): # Convert seaborn colors to Plotly-compatible RGBA format plotly_colors = ['rgba' + str(tuple(int(255 * c) for c in color[:3]) + (1,)) for color in seaborn_palette] # Create a Plotly figure - fig = go.Figure(data=[go.Bar(y=list(class_counts.values()), x=list(class_counts.keys()), orientation='v', marker_color=plotly_colors)]) + fig = go.Figure(data=[go.Bar(y=list(class_counts.values()), x=list(class_counts.keys()), orientation='v', marker_color=plotly_colors, text=list(class_counts.values()), textposition='auto')]) fig.update_layout( title='Distribution of Detected Objects', title_font=dict(size=20), @@ -103,6 +123,13 @@ def display_distribution_plot(class_counts, sns_palette="pastel"): # Body of the Streamlit app ############################################################################################ def main(): + # Set the page configuration + st.set_page_config( + page_title="Microorganism Classification", + page_icon="fairscope_favicon.png", + layout = 'wide', + ) + # Set the title of the Streamlit app st.title("Microorganism Classification") @@ -119,42 +146,38 @@ def main(): """, unsafe_allow_html=True ) - # Create a toggle button - toggle = st.sidebar.button("Toggle theme", key="theme_toggle") - - # Use a global variable to store the current theme - if "theme" not in st.session_state: - st.session_state.theme = "light" - - # Change the theme based on the button state - if toggle: - if st.session_state.theme == "light": - st.session_state.theme = "dark" - else: - st.session_state.theme = "light" - - # Apply the theme to the app - set_theme(st.session_state.theme) + # Initialize an empty list to store probabilities + if 'probabilities' not in st.session_state: + st.session_state.probabilities = [] + + with st.sidebar: + # Add text and link to the sidebar + st.markdown(""" + ### :rocket: Try this easy-to-follow [notebook](https://colab.research.google.com/drive/1iyoA4jVSI0dErl7N3N-rPlx2mrBrV1ad?usp=drive_link) to train your task-specific classifier + """) + # Load the class labels + class_labels = st.text_input("Enter class labels (comma-separated in alphabetical order)", value="d_veliger, pedi_veliger, umbo_veliger").split(", ") + # Select the model architecture + architecture = st.selectbox( + "Select model architecture", + ("EfficientNet_V2_M", "EfficientNet_V2_S", "EfficientNet_B7", "ResNet50", "DenseNet121", "VGG16") + ) + + # Select a model to use for image classification + selected_model = st.file_uploader("Upload a model", type=["pth", "pt", "pb"]) + + # Wait for the user to select a model + if selected_model is not None: + # Extract the input image dimensions from the model name + pattern = r'(\d{3,4})x(\d{3,4})' + image_size = int(re.search(pattern, selected_model.name).group().split("x")[0]) + + # Load the selected model in pytorch + model = load_model(architecture, os.path.join("models", selected_model.name), num_classes=len(class_labels)) + st.success('Model loaded successfully!') # File uploader for image selection - uploaded_files = st.file_uploader("Upload images", type=["jpg", "jpeg"], accept_multiple_files=True) #"png" - - # List of available AI models - available_models = os.listdir("models") - selected_model = st.selectbox("Select a model", available_models) - - # Extract the input image dimensions from the model name - pattern = r'(\d{3,4})x(\d{3,4})' - image_size = int(re.search(pattern, selected_model).group().split("x")[0]) - - # Load the selected model in pytorch - model = load_model( - os.getenv("TORCHVISION_MODEL_TYPE", "efficientnet_v2_m"), - os.path.join("models", selected_model), - ) - - # Load the class labels - class_labels = ["Acantharia", "Calanoida", "Neoceratium_petersii", "Ptychodiscus_noctiluca", "Undella"] + uploaded_files = st.file_uploader("Upload images", type=["jpg", "jpeg"], accept_multiple_files=True) # List to store predicted class labels predicted_class_labels = [] @@ -168,7 +191,7 @@ def main(): if uploaded_files is not None: # Iterate over uploaded images and predict their classes - for uploaded_file in uploaded_files: + for (i, uploaded_file) in enumerate(uploaded_files): # Read the uploaded image image = cv2.imdecode(np.fromstring(uploaded_file.read(), np.uint8), 1) @@ -182,12 +205,18 @@ def main(): ]) # Perform image classification - predicted_class_index = predict_image(uploaded_file, model, transform) + predicted_class_index, predicted_classif_scores = predict_image(uploaded_file, model, transform) + file_name = f"{i}. {uploaded_file.name}" + st.session_state.probabilities.append((file_name, dict(zip(class_labels,predicted_classif_scores.tolist()[0])))) + print(predicted_classif_scores) predicted_class_label = class_labels[predicted_class_index] predicted_class_labels.append(predicted_class_label) # Display the uploaded image with the predicted class - next(cols).image(image, width=150, caption=f"{uploaded_file.name} ({predicted_class_label})", use_column_width=True) + next(cols).image(image, width=150, caption=f"{i}. {predicted_class_label} ({torch.max(predicted_classif_scores):.4f})", use_column_width=True) + + # Save the updated probabilities to a text file + #save_probabilities(st.session_state.probabilities) # Determine the number of detected objects num_objects = len(predicted_class_labels) @@ -197,9 +226,18 @@ def main(): # Count the occurrences of each class label class_counts = {label: predicted_class_labels.count(label) for label in class_labels} + + # Convert probabilities to string format + probabilities_str = '\n'.join([f"{name}: {scores}" for name, scores in st.session_state.probabilities]) # Plot the distribution of detected objects if num_objects > 0: + + # Download the classification scores file with streamlit + with st.sidebar: + st.download_button(label="Download classification scores", data=probabilities_str, file_name="classification_scores.txt", mime="text/plain") + + # Display the distribution of detected objects display_distribution_plot(class_counts) ############################################################################################ diff --git a/fairscope_favicon.png b/fairscope_favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..970867ff01f4d1f6ced24edb1a2f8eb3d89d2155 GIT binary patch literal 8183 zcmXw8by!qiw7tLpLpnpJ3|)ehfP%u%-AGADcS_g5&>#(>5`wg}bf=Wkog;$$=|4!S*`knV%cPqf#+nd+U*}=o&t*aHUi@VL|eQ_E9UwnQOG3$dbKYR%;@7@Z9Lm3M zY&h@Mh1v;{U8a?Cz(&(d+C;yI0%;|dd4z_QW854(u(5Qt=Qk=lZNV%hiYbD-9_P<* z)-40Gi$+s++s<3s?T%kMRJP6fnZ|$#_j{~pX>J$1L3MO^D6{16*b|appk`PIIe-X+ z1Yi&X$-RLMsY9Mz7T6pXTqkN7v;rBR#oyKqX=Utnz`SLJCOD-VsjvJVPcR%Gqsr&A zucXPOSO3o%D#Xp|!6;7B3B?pj)rtfbfqBA1Y{_ixyzFC#Hn-XBbf&L+vDA zyxvic%a2aq^(!A_ULiqsq^@Oo1)eb1&@DK? zO$5t{NbGk4MRH#~?FhZ1=~& zu!HIbaTi>}d+~e3e$oa(ipdTq>*Nnu-04h&tucqB7);*3+$QFji?QLRH+Xn|pLFHW zP=&MD2RT)HF`^jbNd$`pejXaCheNPkTWXR;h{`iNrXBeE^390^1Z$3*6w{iuWYRH< z&(T9jCay*IOp?Ft$(#|n4==qmiFHH9@F^4rqAVZ=cqnjX9Tfl=DO?46=?xn* zWlRK{X)nNX>sP#yGh9)b8+8;tmID?{H5@a`&4UiCR|2d=-Q=G!bN8elxDp+J%eEPd z0fo5U_va&RtBGP13uyC0se%esZ5R46&7_@fCJy|fYre8);khG){jC&hVVo8ald2JX zI-dbu7Civ$SKgmUNz=PMS4OQjeB9(#*|*-E%BL`06Vk)Vx{7;UFnt)9Y3%4?-)dZ_T1 z#xg7n-CAGoKB{{ZF!%4#Yn&2$+D4t&AV?=2+O85oh+Nby7ui%EpDoZmG> zy6`+=;{lyo+jPX)SOoiSiAvD`cyyYiBM z;OB6TC=7Zi0ieQ?y`3}MA#}Cny<0Wz_vgtM#`Z|UgG=oWTvr`MF<})uF!8nab)42F z4eyqPiJYH9is2o}4?3Jka6YH=5!~cxu1xFPNC4?NA5jXM9)LXDZ zRBt(Nr)2)Wzs!Dh8MrSYyQAi!BsoDI%>~9e@(!j)=Fx_MuyErYg!au;+ae>M1)KV{oeIS6tpfDruxC z=9WW=A@CD2nIM*H*+k2wU%c(B(WZ0fyoMx-M-`~ zb$Fnux%MHFRukb|s2ZCl+4nCzUbKRt}txx9M>! zUp?k+uGV$CZJ@GC?%7k!r1n3l-8LNCv6!4&f8={ay<%VU^l?n0n&FM++OVp?gB}n? zBTXaUW3sODqaSS&+(LoDt_BY`iQT?*6`TUItbq|y&|7{AZ647JeUY!a+HLZ3%=`WsK#x)Q7f<|djDpad%-JUu`qF@ymo z9IKu@Wi*&ags&(1#2e}pc=Mr))?=5+Ep2)F zHtS7;&2db>HTGRdED-d#DjpX&5b>f}l7Xp)B(~#pc2LrMLqS=nwz8B<;XtlbpXhqS zP~=m>WxAl}AAU^J2cBqu(t}*GC{<2t_^lb{K5OT?ea8sL7XVWdT}w00!x9UnWJG1a zLCemz_p^=%{0e+j)R$nGX~f`9QF>ptnW~1SMEM)+Wpiw)<7tx5%xtN_^J98Yf!Z@XvxBd{U!j-x?r#+7r9PWSq8Y~o!+2hf$X~v=Ttls9_SM)E8hGi# zwFFD&!SUV{qtsB}_(;0gQVcat}GGiO3 z%>htXSvC*Q2jX@Ss)oR3VE%!wsau06JXb}6`&D-FJ&@}Q!FNlGiQ zSGv2aj^LEo9b{plwE9brzp^h(U%NJ^FqYQA5}qVcNMkc5o+ou|WDd;3IQ8j)(qpZS zIEntUb-jtKGLsjs$pB@lWMr$lOAR%KcC&qGL z!iY@Bn-~8QVUy6WK^TUqD<@(hRAvJ-E7M~1Sk>R_sE{Tq^K}4y1b)$rK^?VuuNs=L zSdnQ&6j=o)cV(}mHQjaSeeM;<@}j>qHgjU%8=(x(BHPzU(49Xe)b^eHYd_F`iO*EU z1nbA$C96HD3W?6TxUK@~<*5Jn(1z5u6xq3he3(|U0>gR9@P`!4`QoH4`SmQ%C4Z!Z z>^-x=@G1J1$41I{ys_j3vhVl*MHE3u2VI)UslGM+3k|CoFn&o2MMbNSo>cd?80KK1 z{M`9^YOUQ0Q|ySe>S_=8z$31vqdp2N84gC!D2<7VUjkHg(gBV3gFmDvveYC(A*Mfl zR>&`l=E|()=f^CvcrL)M^3`P2h4e~BC9#v)nY*y3`ZjpJisLNrVN)XODGVYjkb5Qs zZ$5;-iQaw>eym+uq61UYojG`B#)f?zS!5OKRqfQJYH?))`)OGz&8l zc3KIFL`+P5raLlC6=S>G<0FU6)dv2X7>zW12?(Mq=FEV=)v!C*o9Y$HC4LKTP@g}i zytuP>gu0iYrEdNMnPy`{aWhfc&sB@c_EX}fi`%I=B&l5DSpBE)zI*1c6tk3caxFa_ zdK8}&2^;Ab`FJu&KY0vr-Y{gEK{T&Z`lcB>ex9{OYDYK|cQ_s_-C$q@X~bPd49Ss6 z)IU^5dq956$CSWhn=7J@7`>m=S{VP1#i+$nu(EUJi`)2m!$4(|Xx5)1|1}B^0>0%~ zS#GV?Kk7I44sU^pQg-fZo!S(Gi5ZSMU;UsCTX@kHmBr4=Ivh(CjOpclxtTYu2$*nM zbwrd9^j%9>bgVv5?EGZ%y2JAK96srbm3UsAUAS4S#g1{Aqjlkw5py_Iy}Bm|ermuV zbz~pVn^7ZfBu#GYxu&diDwPo)AK@4kfZ(1vP~L7Ih3P(frV5nCPI9{>RJ{6K*%zHf zE4^H9bbHjx3E-DfzAg5#zZ0|G{RXZYg&A(mFnNluD-X@ak}X18T#OF=oE2)298-^tb3jPm@Rje&|Wtk0x)gKoEbfO}LOg|x4f>#AAxD2!>X9Uhf+ zpFh&#qgXw6wpS=}chnmRe4>wuzyps7T-RYDsFg+9>3dt=94J_9R7~?MF2(QQ{PWnH zJhcTJbCsDF+Y)E_UXkJg%s7e$(Z0?Xu+DlP-GH=3w`I%X7Egk@OdQk!y24AP#CveX zw!{XQGIoL}cavzYc8?5WbK_vEkK#t)178J&y+$;~MvdOY8wjl# zmm?9^WIwY>dcdK-X+(Q)pl*vWhsiWv4!zxqzot@3$?yq2Yz-=ExI&_AS8p%Y|jw~koaz?B6k-e#qJ6#6;3B)sGBy4;tsm!@YHxO`gaN!2Qg!0{3{t_*Y2 z`_Mp|A>ffPn7A><&wWLWk$q|y+hufHvi_EcqFD=n78x$2YVrli`frjTffv z#UeyTOirvfwe|@vv+m!tF~<{pzC;p4lzt$TK^Z+sfr-n>;l};EuKu>Qzd+wAq|JTB zn&Au+=r}=t@F_$@4TYV_(S?7Jsg)L?ReSLC*xRdMu?TqG2Xw)cI@EM z&<)Qv8%6@ID+tj8(BW&dRcIBTjfZ=~KsgJG9^JJ{aeU_K@0lOY;h}<+bdGWC1)9-9 z_HgD9IY#T0OrFqrh*0PRQ1y7s6Sl)RVEA-Y!`(g*@e_nkC(S`F;kQ0?sNGs9f+1>X zFCE%=TYa5KUaYP;ZKN{o(Dx$EybWk45(q1ZLL8h~G@{=cp zZrkf(Z5q!!4f%0T%qe^g`6^;b`e;AsR7f%M3x(vsube?;(BVyu$S{qiHNZM>S~rPC!cS*$Zp$~ADK)bVyehq3?B$m)b}u`mf@Tq=CT z)4aKVEQ`V|uvYmGK;uj_Vz&4@0;z2=HCoZxvuYU zbXtaY$|9$8w|g-V2O3eCT%p`am3EeMFl<#xFsEXLF_MweQgsr7_#k&*{;2+}4{s&W zMpV}|NiC};4o+xAK%gv_%<&Es-S>wrYg50`wu)Du6}Obk+JWGGo)ue&VaOC$jS1o- zaBEQsg%x)WGNfy;%Zgslyi$x0;=wDAycpLm5yke8OxVd_ivSQB{M$*Dklz5!7&X{6a8RGFXtEF{t$ z#vz{Nq>~=VMbKd$Lojh;Q83^;k`cs+cnyG11E29i?IfbXu)VKd)DQL+j)UUyI1CM8 zpHS^Cma7{~n02NV?ux3rg4pn}j@3z%EXm^XU;$5QkcEZccMO@yY|43}_~nbIE>I;Q z%P2`g)MFAno*GgnL(M&)cBXfvI*Y5^I+RKzm;(Nc6W10cDNDypkAk*>Q?}#k&-kgeO z?r42{s(l!lIvO+Y@OA(zw{?xwO!<|Fe($Btq`{)RFfNw;O+QRD1tl&S57H+k#K^4= zf-Etn@-X?}r{4V0+-VPpBaJ!DCN8Kb&@aL?$03wniE{m>Ub~{SvBcE9bYZDicVSAV z(@5Qob?^4BgVHR~0P>T`l`t%aAmTJ#BmaWH%X`PgeT$iDOCq7y^UI5_jBXT8-q~x@ z&g2g%$^BG$Ns~qX925R8iuc1#Y>Ey*(^$GrcdNBdb>hpmTFbBIN^@P<+>(1Pairx) z+B>ad!00l%;rWQO)QXF@a86daL9r8xQBv*K3@@rxxi;vhE@OKcEOBq*5*aUyozV08fU$W#`j5|`hVS+g_ig|)nHa70E;hDUm2 zSV2EDX1wh-c z(5G#++B$mBOqw0iK+i7!o4p~-OLEVqPvWxhe4|yU!)c4e0BVz?aTJAQaxk)ZB*_Nc zK8|o32D|c(njJ#SZOnI+N8bnCBs?ED4p+2&wad2@s>1%2>lJvt-g?-Kg8=ksux+zQ zu7my|{qyxKaSwfu14GWo`pVy#ZPsM`DvXv(+edCVxe?k({SaGJ548~_;YFMspHHNI z2mQ^SC{o7X&;)cC`W1oC@XN>WkXinS5a+3Hz!E3Kfi;^iqpoUOtrw`xr|#M13#A zHVAvQWz8nrG;8-x!#J%ZgyQZ1sM(hiqH2}Y-v6{KYL%=r$AZpH57?c##McU)HGdbl zH=WdGVN)~S44ocyy+Tw|a zRzB{(S-4w!wRgkIS3jr7dr0~Zy-sG)XU7h_A2FvPlx4LtNLe+(peC$o*bLKy-G=v! zWK7%&>*Xgn9MD{|AviyhuV_RMJpEVxX6`%wjbjd7%<_^qgS*~zZrDk$RF4&=E5Bvi zjc}J-pHV3iq+tyCpv;gvF$m-!uh`*GwcycFrkXxQV7u~0bSGR?X#Zh#=hA*5wv&nBGG?5 z^Uf!jW8;RL(naZWh0x6Otu}@P25hIwhL5b1cTs62)!|PjQBBKDmkT7 zVIU+nR};GUI9(Pr>m~-V;|ZC*#Rn@(2Ty)1#LJ|J!G8RSdOkzgsbqQjTI%QP6JQBl zkoBWzZaX4M?S?teEvg>qgt!<81%bNqn6S@Ib5@g|o+ z``GX8Qz6CD9XmmTeR}M z-_`6Rl|hom?^n@G?oPJ-aYS7O$bL(E+e&BOm3{mi`J`ZyfiR+ToE{m=1TA~JcD>D) zp@1acL@+&`h~5U9LPf2O?!&hyIkclQjm?3FmOR&nfWfdPZ6UXN!SeLle}mKd_@$Dx zE}2I>ji3EzpEAwjG#o5b#hhlPl^?U8gk%6%iHM>YLAUJnN8oI zm4vVb-1yJ|n3C3f`Y1_rf4!CjT?srn>Hq7dL~bA~rg9WT-K)km5?kSTAItrFNdOG! z)LjI=4wG=@F{1$QL$>|N%UZq~t0veizA`|QOI1K$z z|GZt#0*zDCUz(0cqFX$b?)d(R5g%!Am;-YP*PQ;=f9w8IRxM`4>miaqjPsjyN4o2y z;A33=(dkY#AqbY?Au04#;(vBQelx9~gwLxGbL3`_w(qrEaLaRYqm8h@v2urSdf>+&(^07=(>^8;GUJdn=KV*%561IFQh+U5K=LjYm5RkY0`+#&H z^Jv*@fE06fG3%FaLH0B^R1E#t|GfcY}5g$k1?F?{m>G164WL5r^`hmR+8a5=7dW*SE* z{;}s5uYfK^)}1L0MRA(%$u^C570jfp)Nr{ZqrdynS4bPcr#AJDhAULwBKOjuKnPc3 z4Qgpx6pOQi$#@5ZkSebDIB;Xp za-R7o{5g<7h)MWCH|m*AFj?ZmsS+}`m9deRVxtKSH%7Ub{jDKD;)cf`^g353I0}5~ zNj3B+WXY=IdacMxHKnL&O#CBK1p?=TY<0wU;==;xg?rwGtj0VO3NFw|a2Mp_#6`xD z#1RH0r;5EjPg2)&l;Vx`s!Mg8G!`Bjf&m)K<07UG_n^*@e`q|FhvxdvQN;|rGCZzt z^7()zVY&T0DuF#-kleNC5vKTWWu`aIe}SPf7*KUnk6qcHx2*!nLeC_l?ppBaiPOG7 zqugH(oGF@ULF9(i@jxu6dl6&Gzq}9i7*y{tWK}(q?rGvMC^P?W>s9Use^5KdMPzQ* zQ6RZ*)uP+~aRaflQZ@XngqU&r+*=7iBh;GqK&Ou)sj`pO$eAlrzOjkVicnz4^?rHE zFti-)dL1pKmHTSer>bno8K_MNLU-)^cvr@1fKXw{b04%wlxB>0W4D+oG5r%mr3-5L z1TECyI`9~Ov3&B!`Va*DZG4~l#;8KQ*!`^%t!3(O7T4G8uztVtJ^+|27WzPZCM2<6 zmnc;NMgo9d*#&mP>=Vp&2f!LF(0w{)3!Cku`8=zZrsyNg^#k_xaNEWPWPVPPY#QO~ zt;My%vQDvB|N42kw8_3sCR`3J+7RWY9@vLG!O;IxmmL%4L;qEI6xJ?4g0zNIXeq3Q9;HGH`1J330mR1UF6|NQ7*MEYG+0ZpzCoynvWH_o5 z)(gnY%+-4)=oy(Q|ElF5(8qLYQw1%8O~9Q8b=d;_Y?mHj=L9d2hY<(<0&7DW%hXSO zcerc;jbWX#z+b=eqB6)WH0D8qV#7F1_{gifRv2vp&GLRDzfm3iyIn7>p&m*fYG7=_RAP~UDr)}(>pi`?n%pxwxd-tYwy zAORrG9jFD^h6+gr7R#=9RFM2jD)33zL3sBY{CJ8lxxHHTz!K;`36eRPa0$7{GSx}n z&^??VFj;eJ%Rh_e=|LYzeA#6fFpItM*PX(IfXwi>Sh`rnthVn>*b@x_#ZYO?c?|PP z@HxpG)_@UWL2>9c#yxL$xs)#}RCkn?nJn@F`#~3H{3JF14lUXPin3}l6;fuQ{{!l$ BG!6g& literal 0 HcmV?d00001