From 526dafd8944bc2b4b4924120a5de0d1c73a2db44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kenan=20G=C3=B6mek?= Date: Mon, 9 May 2022 20:30:26 +0200 Subject: [PATCH] SA Kenan first init --- .../Auswertung_Farbkanaele_Multiprocessing.py | 237 + .../Auswertung_Farbkanaele_V01-00.py | 222 + 01_How-To-Control-LEDs-Manually.txt | 20 + 02_Kameraaufnahme/01_How-To-Start-Scripts.txt | 3 + .../PiCamera_shoot_manual_image.py | 188 + .../mp_raspividyuv_fast_capture_cannyedge.py | 112 + .../Rest/mp_raspividyuv_fast_rgb.py | 120 + .../mp_Vorbereitung/PiCameraVideoPort_mp.pdf | 4559 +++++++++++++++++ .../mp_Vorbereitung/PiCameraVideoPort_mp.py | 594 +++ .../mp_Vorbereitung/shared_memory.xlsx | Bin 0 -> 13472 bytes 03_Aufnahmeserie/01_How-To-Start-Scripts.txt | 3 + 03_Aufnahmeserie/01_Read Me.txt | 5 + 03_Aufnahmeserie/Ansteuerung_LEDs_uC.ino | 148 + 03_Aufnahmeserie/Ledstripe_Alignment.py | 235 + ...e_Shutterspeed_PiCameraVideoPort_V01-02.py | 441 ++ .../01_How-To-Start-Script_Calib.txt | 2 + 04_Spurerkennung/Kalibrierung/object_size.py | 117 + ...edetection_development_V02_findContours.py | 454 ++ .../dev/Lanedetection_development_V01.py | 254 + .../dev/Lanedetection_development_V02.py | 445 ++ .../dev/SimpleBlobDetector_parameters.jpg | Bin 0 -> 15296 bytes 04_Spurerkennung/dev/add_column_to_matrix.py | 12 + 04_Spurerkennung/dev/del_images.py | 25 + 04_Spurerkennung/dev/dev_Fußpunkt.py | 35 + .../dev/dev_Linefitting_and_angle.py | 59 + .../dev/dev_Measurement_Conversion.py | 38 + .../dev/dev_Simple_blob_detector.py | 66 + 04_Spurerkennung/dev/dev_Trafo.py | 37 + .../dev/dev_calc_mean_of_points.py | 20 + .../dev/dev_color_pattern_detection.py | 41 + .../dev/dev_grayscale_conversion.py | 55 + .../dev/dev_indexierung_Bildmatrix.py | 64 + 04_Spurerkennung/images_input/-30_0.png | Bin 0 -> 535664 bytes 04_Spurerkennung/images_input/01_Readme.txt | 2 + 04_Spurerkennung/images_input/0_0.png | Bin 0 -> 7868 bytes 04_Spurerkennung/images_input/0_O.png | Bin 0 -> 10982 bytes 04_Spurerkennung/images_input/0_U.png | Bin 0 -> 10981 bytes 04_Spurerkennung/images_input/135_0.png | Bin 0 -> 12769 bytes 04_Spurerkennung/images_input/135_L.png | Bin 0 -> 8834 bytes 04_Spurerkennung/images_input/135_R.png | Bin 0 -> 10653 bytes 04_Spurerkennung/images_input/30_0.png | Bin 0 -> 12549 bytes 04_Spurerkennung/images_input/90_0.png | Bin 0 -> 8780 bytes 04_Spurerkennung/images_input/90_L.png | Bin 0 -> 8769 bytes 04_Spurerkennung/images_input/90_R.png | Bin 0 -> 8835 bytes 04_Spurerkennung/images_input/black.png | Bin 0 -> 3728 bytes 90_ZielSW/01_OnlyCapturing_NoProcessing.py | 156 + 90_ZielSW/02_With_Processing_Hough.py | 514 ++ ...With_Processing_Zielsoftware_Simpleblob.py | 514 ++ ...th_Processing_Zielsoftware_findContours.py | 521 ++ ...g_Zielsoftware_findContours_Erkennung.xlsx | Bin 0 -> 12470 bytes 90_ZielSW/Final/PAP.pdf | Bin 0 -> 30829 bytes 90_ZielSW/Final/Spurerkennung.py | 522 ++ .../Lanedetection_Picamera_V01.py | 912 ++++ .../Lanedetection_Picamera_V02.py | 905 ++++ .../Lanedetection_Picamera_V03.py | 925 ++++ Installationsanleitung.docx | Bin 0 -> 56930 bytes Installationsanleitung.pdf | Bin 0 -> 185917 bytes 57 files changed, 13582 insertions(+) create mode 100644 01_Auswertung/Auswertung_Farbkanaele_Multiprocessing.py create mode 100644 01_Auswertung/Auswertung_Farbkanaele_V01-00.py create mode 100644 01_How-To-Control-LEDs-Manually.txt create mode 100644 02_Kameraaufnahme/01_How-To-Start-Scripts.txt create mode 100644 02_Kameraaufnahme/PiCamera_shoot_manual_image.py create mode 100644 02_Kameraaufnahme/Rest/mp_raspividyuv_fast_capture_cannyedge.py create mode 100644 02_Kameraaufnahme/Rest/mp_raspividyuv_fast_rgb.py create mode 100644 02_Kameraaufnahme/mp_Vorbereitung/PiCameraVideoPort_mp.pdf create mode 100644 02_Kameraaufnahme/mp_Vorbereitung/PiCameraVideoPort_mp.py create mode 100644 02_Kameraaufnahme/mp_Vorbereitung/shared_memory.xlsx create mode 100644 03_Aufnahmeserie/01_How-To-Start-Scripts.txt create mode 100644 03_Aufnahmeserie/01_Read Me.txt create mode 100644 03_Aufnahmeserie/Ansteuerung_LEDs_uC.ino create mode 100644 03_Aufnahmeserie/Ledstripe_Alignment.py create mode 100644 03_Aufnahmeserie/Takeimage_Shutterspeed_PiCameraVideoPort_V01-02.py create mode 100644 04_Spurerkennung/Kalibrierung/01_How-To-Start-Script_Calib.txt create mode 100644 04_Spurerkennung/Kalibrierung/object_size.py create mode 100644 04_Spurerkennung/Lanedetection_development_V02_findContours.py create mode 100644 04_Spurerkennung/dev/Lanedetection_development_V01.py create mode 100644 04_Spurerkennung/dev/Lanedetection_development_V02.py create mode 100644 04_Spurerkennung/dev/SimpleBlobDetector_parameters.jpg create mode 100644 04_Spurerkennung/dev/add_column_to_matrix.py create mode 100644 04_Spurerkennung/dev/del_images.py create mode 100644 04_Spurerkennung/dev/dev_Fußpunkt.py create mode 100644 04_Spurerkennung/dev/dev_Linefitting_and_angle.py create mode 100644 04_Spurerkennung/dev/dev_Measurement_Conversion.py create mode 100644 04_Spurerkennung/dev/dev_Simple_blob_detector.py create mode 100644 04_Spurerkennung/dev/dev_Trafo.py create mode 100644 04_Spurerkennung/dev/dev_calc_mean_of_points.py create mode 100644 04_Spurerkennung/dev/dev_color_pattern_detection.py create mode 100644 04_Spurerkennung/dev/dev_grayscale_conversion.py create mode 100644 04_Spurerkennung/dev/dev_indexierung_Bildmatrix.py create mode 100644 04_Spurerkennung/images_input/-30_0.png create mode 100644 04_Spurerkennung/images_input/01_Readme.txt create mode 100644 04_Spurerkennung/images_input/0_0.png create mode 100644 04_Spurerkennung/images_input/0_O.png create mode 100644 04_Spurerkennung/images_input/0_U.png create mode 100644 04_Spurerkennung/images_input/135_0.png create mode 100644 04_Spurerkennung/images_input/135_L.png create mode 100644 04_Spurerkennung/images_input/135_R.png create mode 100644 04_Spurerkennung/images_input/30_0.png create mode 100644 04_Spurerkennung/images_input/90_0.png create mode 100644 04_Spurerkennung/images_input/90_L.png create mode 100644 04_Spurerkennung/images_input/90_R.png create mode 100644 04_Spurerkennung/images_input/black.png create mode 100644 90_ZielSW/01_OnlyCapturing_NoProcessing.py create mode 100644 90_ZielSW/02_With_Processing_Hough.py create mode 100644 90_ZielSW/02_With_Processing_Zielsoftware_Simpleblob.py create mode 100644 90_ZielSW/02_With_Processing_Zielsoftware_findContours.py create mode 100644 90_ZielSW/02_With_Processing_Zielsoftware_findContours_Erkennung.xlsx create mode 100644 90_ZielSW/Final/PAP.pdf create mode 100644 90_ZielSW/Final/Spurerkennung.py create mode 100644 90_ZielSW/Multiprocessing/Lanedetection_Picamera_V01.py create mode 100644 90_ZielSW/Multiprocessing/Lanedetection_Picamera_V02.py create mode 100644 90_ZielSW/Multiprocessing/Lanedetection_Picamera_V03.py create mode 100644 Installationsanleitung.docx create mode 100644 Installationsanleitung.pdf diff --git a/01_Auswertung/Auswertung_Farbkanaele_Multiprocessing.py b/01_Auswertung/Auswertung_Farbkanaele_Multiprocessing.py new file mode 100644 index 0000000..7dfe957 --- /dev/null +++ b/01_Auswertung/Auswertung_Farbkanaele_Multiprocessing.py @@ -0,0 +1,237 @@ +# Date: 18.12.2021 +# Author: Kenan Gömek +# Das Skript stellt die unterschiedlichen Farbkanäle der aufgenommenen Bilder dar und erstellt schnitte, um die Pixelwerte anzuzeigen. +# Update: 18.02.2021 +# Update Comment: Multiprocessing + +# Import needed Packages +import cv2 as cv +import numpy as np +import time +from datetime import datetime +import os +from matplotlib import pyplot as plt + +from multiprocessing import Process, cpu_count, Pool + +# define User Input +# No vowels in path name! +PATH_IMAGE_FOLDER = r'U:\bwsyncshare\Auswertung' + +COMMENT = "" # User comment for plot + + +# define functions +def calc_arithmetic_mean_of_brightness_per_pixel(r, g, b): + """Calculate overall brightness per pixel of the image. Mittelere Helligkeit pro pixel berechnen.""" + r = r.astype('uint16') # set dtype of one colour to uint16, because the sum of 255+255+255 >255 =765 + # the rest will also be uint16 then + image_heigth = r.shape[0] + image_width = r.shape[1] + + number_of_colour_channels = 3 + arithmetic_mean_of_brightness_image = np.sum((r+g+b)/number_of_colour_channels) + arithmetic_mean_of_brightness_per_pixel = arithmetic_mean_of_brightness_image/(image_width*image_heigth) + + max_possible_brightness = 255 # maximum possible brightness for an uint8 + arithmetic_mean_of_brightness_per_pixel_relative = arithmetic_mean_of_brightness_per_pixel/max_possible_brightness # range: 0-1 + + return arithmetic_mean_of_brightness_per_pixel_relative + +def image_analysis(image_name, image_number, number_of_total_images, comment): + print(f"Current image {image_number+1}/{number_of_total_images}: {image_name}") + + # Split and Merge colour channels + img_bgr = cv.imread(os.path.join(PATH_IMAGE_FOLDER,image_name), cv.IMREAD_COLOR) + b,g,r = cv.split(img_bgr) + img_rgb = img_bgr[:, :, ::-1] + + # Create Cuts and display Pixel Values + image_width = r.shape[1] + if image_width < 1000: + plot_marker_offset = 4 # Thickness of marker-line in plot in pixels + elif image_width >= 1000: + plot_marker_offset = 10 # Thickness of marker-line in plot in pixels + + + # Identify dominating colour channel and set cut row and column + max_pixelvalue_red = r.max(); max_pixelvalue_green = g.max(); max_pixelvalue_blue = b.max() + max_pixelvalue = np.max([max_pixelvalue_red, max_pixelvalue_green, max_pixelvalue_blue]) + + idx_max_pixelvalue_red = np.unravel_index(r.argmax(), r.shape) #row=idx_..[0] column=idx_..[1] + idx_max_pixelvalue_green = np.unravel_index(g.argmax(), g.shape) #row=idx_..[0] column=idx_..[1] + idx_max_pixelvalue_blue = np.unravel_index(b.argmax(), b.shape) #row=idx_..[0] column=idx_..[1] + if max_pixelvalue == max_pixelvalue_red: + idx_max_pixelvalue = idx_max_pixelvalue_red; msg_dominating_colourchannel = 'red' + elif max_pixelvalue == max_pixelvalue_green: + idx_max_pixelvalue = idx_max_pixelvalue_green; msg_dominating_colourchannel = 'green' + elif max_pixelvalue == max_pixelvalue_blue: + idx_max_pixelvalue = idx_max_pixelvalue_blue; msg_dominating_colourchannel = 'blue' + + + cut_row=idx_max_pixelvalue[0]; cut_column=idx_max_pixelvalue[1] + + + # Red channel + # Info linspace(start, stop, num): + # stop: The end value of the sequence --> stop is included + # num: Number of samples to generate. + # Because Array stars at index 0: e.g. image with 3280x2464 Pixels: for 3280 Pixels you need 3280 values between 0 and 3279 + # check: x_red_h must have 3280 values?? --> Yes from 0 to 3279 in 1 steps + x_red_h = np.linspace(0, r.shape[1]-1, r.shape[1]); y_red_h = r[cut_row,:] # data horizontal cut ->width (e.g.: 3280) + x_red_v = np.linspace(0, r.shape[0]-1, r.shape[0]); y_red_v = r[:,cut_column] # data vertical cut ->height (e.g.: 2464) + + msg1_red = f"Maximum Pixel value in red channel: {max_pixelvalue_red}"; print(msg1_red) + msg2_red = f"Index of max Value in red channel (row, colum): {idx_max_pixelvalue_red}"; print(msg2_red) + msg3_red = f"Maximum Pixel value in marked-row {cut_row}: {np.max(y_red_h)}"; print(msg3_red) + msg4_red = f"Maximum Pixel value in marked-column {cut_column}: {np.max(y_red_v)}"; print(msg4_red) + r_copy = r.copy(); r_copy[cut_row:cut_row+plot_marker_offset,:]=255; r_copy[:, cut_column:cut_column+plot_marker_offset]=255 # manipulate image for displaying in marked plot + + # Green channel + x_green_h = np.linspace(0, g.shape[1]-1, g.shape[1]); y_green_h = g[cut_row,:] # data horizontal cut + x_green_v = np.linspace(0, g.shape[0]-1, g.shape[0]); y_green_v = g[:,cut_column] # data vertical cut + + msg1_green = f"Maximum Pixel value in green channel: {max_pixelvalue_green}"; print(msg1_green) + msg2_green = f"Index of max Value in green channel (row, colum): {idx_max_pixelvalue_green}"; print(msg2_green) + msg3_green = f"Maximum Pixel value in marked-row {cut_row}: {np.max(y_green_h)}"; print(msg3_green) + msg4_green = f"Maximum Pixel value in marked-column {cut_column}: {np.max(y_green_v)}"; print(msg4_green) + g_copy = g.copy(); g_copy[cut_row:cut_row+plot_marker_offset,:]=255; g_copy[:, cut_column:cut_column+plot_marker_offset]=255 # manipulate image for displaying in marked plot + + # Blue channel + x_blue_h = np.linspace(0, b.shape[1]-1, b.shape[1]); y_blue_h = b[cut_row,:] # data horizontal cut + x_blue_v = np.linspace(0, b.shape[0]-1, b.shape[0]); y_blue_v = b[:,cut_column] # data vertical cut + + msg1_blue = f"Maximum Pixel value in blue channel: {max_pixelvalue_blue}"; print(msg1_blue) + msg2_blue = f"Index of max Value in blue channel (row, colum): {idx_max_pixelvalue_blue}"; print(msg2_blue) + msg3_blue = f"Maximum Pixel value in marked-row {cut_row}: {np.max(y_blue_h)}"; print(msg3_blue) + msg4_blue = f"Maximum Pixel value in marked-column {cut_column}: {np.max(y_blue_v)}"; print(msg4_blue) + b_copy = b.copy(); b_copy[cut_row:cut_row+plot_marker_offset,:]=255; b_copy[:, cut_column:cut_column+plot_marker_offset]=255 # manipulate image for displaying in marked plot + + # Create Plots + fig1, ((ax_orig_1,ax01,ax02,ax03),(ax_red_1, ax_red_2, ax_red_3, ax_red_4), + (ax_green_1, ax_green_2, ax_green_3, ax_green_4),(ax_blue_1, ax_blue_2, ax_blue_3, ax_blue_4)) \ + = plt.subplots(4, 4, figsize=(30,25)) + fig1.suptitle(f'Image: {image_name}', y=0.9) + + yticks=np.append(np.arange(0,230,25), 255) # set yticks for cuts + xlim_max_h=r.shape[1] # xlim for horizontal cut. No special reason for choosing red channel. + xlim_max_v=r.shape[0] # xlim for vertical cut. No special reason for choosing red channel. + + ax_orig_1.imshow(img_rgb); ax_orig_1.set_title("Original Image"); + ax_orig_1.set_xlabel('Width=H=Columns'); ax_orig_1.set_ylabel('Heigth=V=Rows') + + # red channel + ax_red_1.imshow(r, cmap = 'gray'); ax_red_1.set_title("Red Channel"); + ax_red_1.set_xlabel('Width=H=Columns'); ax_red_1.set_ylabel('Heigth=V=Rows') + ax_red_2.imshow(r_copy, cmap = 'gray'); ax_red_2.set_title("Red Channel - marked"); + ax_red_2.set_xlabel('Width=H=Columns'); ax_red_2.set_ylabel('Heigth=V=Rows') + ax_red_3.plot(x_red_h,y_red_h, linewidth=2.0); ax_red_3.set_title("Horizontal Cut"); + ax_red_3.grid(True); ax_red_3.set_ylim(ymin=0, ymax=260); ax_red_3.set_yticks(yticks); ax_red_3.set_xlim(0,xlim_max_h) + ax_red_3.set_xlabel('Width=H=Columns'); ax_red_3.set_ylabel('Pixel Value') + ax_red_4.plot(x_red_v,y_red_v, linewidth=2.0); ax_red_4.set_title("Vertical Cut"); + ax_red_4.grid(True); ax_red_4.set_ylim(ymin=0, ymax=260); ax_red_4.set_yticks(yticks); ax_red_4.set_xlim(0, xlim_max_v) + ax_red_4.set_xlabel('Heigth=V=Rows'); ax_red_4.set_ylabel('Pixel Value') + + # green channel + ax_green_1.imshow(g, cmap = 'gray'); ax_green_1.set_title("Green Channel"); + ax_green_1.set_xlabel('Width=H=Columns'); ax_green_1.set_ylabel('Heigth=V=Rows') + ax_green_2.imshow(g_copy, cmap = 'gray'); ax_green_2.set_title("Green Channel - marked"); + ax_green_2.set_xlabel('Width=H=Columns'); ax_green_2.set_ylabel('Heigth=V=Rows') + ax_green_3.plot(x_green_h,y_green_h, linewidth=2.0); ax_green_3.set_title("Horizontal Cut"); + ax_green_3.grid(True); ax_green_3.set_ylim(ymin=0, ymax=260); ax_green_3.set_yticks(yticks); ax_green_3.set_xlim(0,xlim_max_h) + ax_green_3.set_xlabel('Width=H=Columns'); ax_green_3.set_ylabel('Pixel Value') + ax_green_4.plot(x_green_v,y_green_v, linewidth=2.0); ax_green_4.set_title("Vertical Cut"); + ax_green_4.grid(True); ax_green_4.set_ylim(ymin=0, ymax=260); ax_green_4.set_yticks(yticks); ax_green_4.set_xlim(0, xlim_max_v) + ax_green_4.set_xlabel('Heigth=V=Rows'); ax_green_4.set_ylabel('Pixel Value') + + # blue channel + ax_blue_1.imshow(b, cmap = 'gray'); ax_blue_1.set_title("Blue Channel"); + ax_blue_1.set_xlabel('Width=H=Columns'); ax_blue_1.set_ylabel('Heigth=V=Rows') + ax_blue_2.imshow(b_copy, cmap = 'gray'); ax_blue_2.set_title("Blue Channel - marked"); + ax_blue_2.set_xlabel('Width=H=Columns'); ax_blue_2.set_ylabel('Heigth=V=Rows') + ax_blue_3.plot(x_blue_h,y_blue_h, linewidth=2.0); ax_blue_3.set_title("Horizontal Cut"); + ax_blue_3.grid(True); ax_blue_3.set_ylim(ymin=0, ymax=260); ax_blue_3.set_yticks(yticks); ax_blue_3.set_xlim(0,xlim_max_h) + ax_blue_3.set_xlabel('Width=H=Columns'); ax_blue_3.set_ylabel('Pixel Value') + ax_blue_4.plot(x_blue_v,y_blue_v, linewidth=2.0); ax_blue_4.set_title("Vertical Cut"); + ax_blue_4.grid(True); ax_blue_4.set_ylim(ymin=0, ymax=260); ax_blue_4.set_yticks(yticks); ax_blue_4.set_xlim(0, xlim_max_v) + ax_blue_4.set_xlabel('Heigth=V=Rows'); ax_blue_4.set_ylabel('Pixel Value') + + + # Calculate overall brightness per pixel of the image. Mittelere Helligkeit pro pixel berechnen. + arithmetic_mean_of_brightness_per_pixel_relative = calc_arithmetic_mean_of_brightness_per_pixel(r,g,b) + if arithmetic_mean_of_brightness_per_pixel_relative >= 0.01: + msg1_brightness = f"Overall brightness per pixel: {round(arithmetic_mean_of_brightness_per_pixel_relative*100,2)} %" #value in percent + elif arithmetic_mean_of_brightness_per_pixel_relative < 0.01: + msg1_brightness = f"Overall brightness per pixel: {round(arithmetic_mean_of_brightness_per_pixel_relative*10e3,2)} per mil" # value in promille + print(msg1_brightness) + + # add pixel stats under Figure + pixelstats_red= '\n'.join([msg1_red, msg2_red, msg3_red, msg4_red]) + pixelstats_green= '\n'.join([msg1_green, msg2_green, msg3_green, msg4_green]) + pixelstats_blue= '\n'.join([msg1_blue, msg2_blue, msg3_blue, msg4_blue]) + pixelstats_overall_brightness = '\n'.join([msg1_brightness]) + pixelstats = '\n\n'.join([f"pixel stats: {msg_dominating_colourchannel} channel dominating", + pixelstats_red, pixelstats_green, pixelstats_blue, pixelstats_overall_brightness]) + text_x_pos = 0.1; text_y_pos = -0.015 # text position: 0,0 is lower-left and 1,1 is upper-right) + fig1.text(text_x_pos, text_y_pos, pixelstats, ha='left') + + # add Comment under Figure + text_x_pos = 0.5; text_y_pos = -0.025 # text position: 0,0 is lower-left and 1,1 is upper-right) + log_filename=None + try: + log_filename=[f for f in os.listdir(PATH_IMAGE_FOLDER) if f.endswith('.txt')][0] + if log_filename: + with open(os.path.join(PATH_IMAGE_FOLDER,log_filename), encoding='utf-8') as f: + log_text = f.readlines() + if comment != "": + comment = '\nPlot Comment: '+comment + log_text.append(comment) + txt=''.join(log_text) + fig1.text(text_x_pos, text_y_pos, txt, ha='left') + except IndexError: + if comment != "": + comment = '\nPlot Comment: '+comment + fig1.text(text_x_pos, text_y_pos, comment, ha='left') + else: + pass + + # handle numpy memmory error on Windows: + switch_overwrite=0 # do not overwrite files, if they exist + if switch_overwrite == 1: + fig1.savefig(os.path.join(PATH_IMAGE_FOLDER,f'{image_name}.pdf'), bbox_inches='tight') #save plot + print('Save pdf') + else: + if os.path.isfile(os.path.join(PATH_IMAGE_FOLDER,f'{image_name}.pdf')): + pass # skip this pdf file, because it already exists + else: + fig1.savefig(os.path.join(PATH_IMAGE_FOLDER,f'{image_name}.pdf'), bbox_inches='tight') #save plot + print('Save pdf') + plt.close('all') # close all figures + + print('') # new line for better readability in console + + +# start +def main(): + number_of_CPUS = cpu_count() + + image_filenames=[f for f in os.listdir(PATH_IMAGE_FOLDER) if f.endswith('.png')] + image_filenames.sort() + list_numbers = [x for x in range(len(image_filenames))] + number_of_total_images = len(image_filenames) + list_number_of_total_images = [number_of_total_images]*number_of_total_images # list with n times number of total images + list_COMMENT = [COMMENT]*number_of_total_images + + print(number_of_total_images) + print(len(list_numbers)) + + data_to_pass = list(zip(image_filenames, list_numbers, list_number_of_total_images, list_COMMENT)) + with Pool(number_of_CPUS) as pool: + pool.starmap(image_analysis, iterable=data_to_pass) + + t1 = round(time.perf_counter()/60,2) + print(f'Script finished in {t1} min') + +if __name__ == '__main__': + main() + diff --git a/01_Auswertung/Auswertung_Farbkanaele_V01-00.py b/01_Auswertung/Auswertung_Farbkanaele_V01-00.py new file mode 100644 index 0000000..60df8ca --- /dev/null +++ b/01_Auswertung/Auswertung_Farbkanaele_V01-00.py @@ -0,0 +1,222 @@ +# Date: 18.12.2021 +# Author: Kenan Gömek +# Das Skript stellt die unterschiedlichen Farbkanäle der aufgenommenen Bilder dar und erstellt schnitte, um die Pixelwerte anzuzeigen. +# Update:21.12.2021 +# Update Comment: Dominierender Farbkanal wird automatisch erkannt und die Zeilen- und Spaltenmarkierungen werden alle nach dem hellsten +# Punkt im dominierenden Farbkanal gerichtet + +# Import needed Packages +import cv2 as cv +import numpy as np +import time +from datetime import datetime +import os +from matplotlib import pyplot as plt + +# define User Input +# No vowels in path name!' +PATH_IMAGE_FOLDER = r'U:\bwsyncshare\Auswertung' +COMMENT = "" # User Comment for plot + +# select cut mode for determining pixel values +cut_mode = 'auto' # options: 'auto', 'manual' + +ROW = 152 # Row-Number for Cut [Pixels] for cut_mode 'manual' index begins from 0 +COLUMN = 1742 # Column-Number for Cut [Pixels] for cut_mode 'manual' index begins from 0 + + +# define functions +def calc_arithmetic_mean_of_brightness_per_pixel(r, g, b): + """Calculate overall brightness per pixel of the image. Mittelere Helligkeit pro pixel berechnen.""" + r = r.astype('uint16') # set dtype of one colour to uint16, because the sum of 255+255+255 >255 =765 + # the rest will also be uint16 then + image_heigth = r.shape[0] + image_width = r.shape[1] + + number_of_colour_channels = 3 + arithmetic_mean_of_brightness_image = np.sum((r+g+b)/number_of_colour_channels) + arithmetic_mean_of_brightness_per_pixel = arithmetic_mean_of_brightness_image/(image_width*image_heigth) + + max_possible_brightness = 255 # maximum possible brightness for an uint8 + arithmetic_mean_of_brightness_per_pixel_relative = arithmetic_mean_of_brightness_per_pixel/max_possible_brightness # range: 0-1 + + return arithmetic_mean_of_brightness_per_pixel_relative + + +# start +image_filenames = [f for f in os.listdir(PATH_IMAGE_FOLDER) if f.endswith('.png')] +image_filenames.sort() +for image_number, image_name in enumerate(image_filenames): + print(f"Current image {image_number+1}/{len(image_filenames)}: {image_name}") + + # Split and Merge colour channels + img_bgr = cv.imread(os.path.join(PATH_IMAGE_FOLDER,image_name), cv.IMREAD_COLOR) + b, g, r = cv.split(img_bgr) + img_rgb = img_bgr[:, :, ::-1] + + # Create Cuts and display Pixel Values + image_width = r.shape[1] + if image_width < 1000: + plot_marker_offset = 4 # Thickness of marker-line in plot in pixels + elif image_width >= 1000: + plot_marker_offset = 10 # Thickness of marker-line in plot in pixels + + + # Identify dominating colour channel and set cut row and column + max_pixelvalue_red = r.max(); max_pixelvalue_green = g.max(); max_pixelvalue_blue = b.max() + max_pixelvalue = np.max([max_pixelvalue_red, max_pixelvalue_green, max_pixelvalue_blue]) + + idx_max_pixelvalue_red = np.unravel_index(r.argmax(), r.shape) # row=idx_..[0] column=idx_..[1] + idx_max_pixelvalue_green = np.unravel_index(g.argmax(), g.shape) # row=idx_..[0] column=idx_..[1] + idx_max_pixelvalue_blue = np.unravel_index(b.argmax(), b.shape) # row=idx_..[0] column=idx_..[1] + if max_pixelvalue == max_pixelvalue_red: + idx_max_pixelvalue = idx_max_pixelvalue_red; msg_dominating_colourchannel = 'red' + elif max_pixelvalue == max_pixelvalue_green: + idx_max_pixelvalue = idx_max_pixelvalue_green; msg_dominating_colourchannel = 'green' + elif max_pixelvalue == max_pixelvalue_blue: + idx_max_pixelvalue = idx_max_pixelvalue_blue; msg_dominating_colourchannel = 'blue' + + if cut_mode == 'auto': + cut_row = idx_max_pixelvalue[0]; cut_column = idx_max_pixelvalue[1] + elif cut_mode == 'manual': + cut_row = ROW; cut_column = COLUMN + else: + print('Wrong cut_mode. End script.'); quit() + + # Red channel + # Info linspace(start, stop, num): + # stop: The end value of the sequence --> stop is included + # num: Number of samples to generate. + # Because Array stars at index 0: e.g. image with 3280x2464 Pixels: for 3280 Pixels you need 3280 values between 0 and 3279 + # check: x_red_h must have 3280 values?? --> Yes from 0 to 3279 in 1 steps + x_red_h = np.linspace(0, r.shape[1]-1, r.shape[1]); y_red_h = r[cut_row,:] # data horizontal cut ->width (e.g.: 3280) + x_red_v = np.linspace(0, r.shape[0]-1, r.shape[0]); y_red_v = r[:,cut_column] # data vertical cut ->height (e.g.: 2464) + + msg1_red = f"Maximum Pixel value in red channel: {max_pixelvalue_red}"; print(msg1_red) + msg2_red = f"Index of max Value in red channel (row, colum): {idx_max_pixelvalue_red}"; print(msg2_red) + msg3_red = f"Maximum Pixel value in marked-row {cut_row}: {np.max(y_red_h)}"; print(msg3_red) + msg4_red = f"Maximum Pixel value in marked-column {cut_column}: {np.max(y_red_v)}"; print(msg4_red) + r_copy = r.copy(); r_copy[cut_row:cut_row+plot_marker_offset,:]=255; r_copy[:, cut_column:cut_column+plot_marker_offset]=255 # manipulate image for displaying in marked plot + + # Green channel + x_green_h = np.linspace(0, g.shape[1]-1, g.shape[1]); y_green_h = g[cut_row,:] # data horizontal cut + x_green_v = np.linspace(0, g.shape[0]-1, g.shape[0]); y_green_v = g[:,cut_column] # data vertical cut + + msg1_green = f"Maximum Pixel value in green channel: {max_pixelvalue_green}"; print(msg1_green) + msg2_green = f"Index of max Value in green channel (row, colum): {idx_max_pixelvalue_green}"; print(msg2_green) + msg3_green = f"Maximum Pixel value in marked-row {cut_row}: {np.max(y_green_h)}"; print(msg3_green) + msg4_green = f"Maximum Pixel value in marked-column {cut_column}: {np.max(y_green_v)}"; print(msg4_green) + g_copy = g.copy(); g_copy[cut_row:cut_row+plot_marker_offset,:]=255; g_copy[:, cut_column:cut_column+plot_marker_offset]=255 # manipulate image for displaying in marked plot + + # Blue channel + x_blue_h = np.linspace(0, b.shape[1]-1, b.shape[1]); y_blue_h = b[cut_row,:] # data horizontal cut + x_blue_v = np.linspace(0, b.shape[0]-1, b.shape[0]); y_blue_v = b[:,cut_column] # data vertical cut + + msg1_blue = f"Maximum Pixel value in blue channel: {max_pixelvalue_blue}"; print(msg1_blue) + msg2_blue = f"Index of max Value in blue channel (row, colum): {idx_max_pixelvalue_blue}"; print(msg2_blue) + msg3_blue = f"Maximum Pixel value in marked-row {cut_row}: {np.max(y_blue_h)}"; print(msg3_blue) + msg4_blue = f"Maximum Pixel value in marked-column {cut_column}: {np.max(y_blue_v)}"; print(msg4_blue) + b_copy = b.copy(); b_copy[cut_row:cut_row+plot_marker_offset,:]=255; b_copy[:, cut_column:cut_column+plot_marker_offset]=255 # manipulate image for displaying in marked plot + + # Create Plots + fig1, ((ax_orig_1,ax01,ax02,ax03),(ax_red_1, ax_red_2, ax_red_3, ax_red_4), + (ax_green_1, ax_green_2, ax_green_3, ax_green_4),(ax_blue_1, ax_blue_2, ax_blue_3, ax_blue_4)) \ + = plt.subplots(4, 4, figsize=(30,25)) + fig1.suptitle(f'Image: {image_name}', y=0.9) + + yticks=np.append(np.arange(0,230,25), 255) #set yticks for cuts + xlim_max_h=r.shape[1] # xlim for horizontal cut. No special reason for choosing red channel. + xlim_max_v=r.shape[0] # xlim for vertical cut. No special reason for choosing red channel. + + ax_orig_1.imshow(img_rgb); ax_orig_1.set_title("Original Image"); + ax_orig_1.set_xlabel('Width=H=Columns'); ax_orig_1.set_ylabel('Heigth=V=Rows') + + # red channel + ax_red_1.imshow(r, cmap = 'gray'); ax_red_1.set_title("Red Channel"); + ax_red_1.set_xlabel('Width=H=Columns'); ax_red_1.set_ylabel('Heigth=V=Rows') + ax_red_2.imshow(r_copy, cmap = 'gray'); ax_red_2.set_title("Red Channel - marked"); + ax_red_2.set_xlabel('Width=H=Columns'); ax_red_2.set_ylabel('Heigth=V=Rows') + ax_red_3.plot(x_red_h,y_red_h, linewidth=2.0); ax_red_3.set_title("Horizontal Cut"); + ax_red_3.grid(True); ax_red_3.set_ylim(ymin=0, ymax=260); ax_red_3.set_yticks(yticks); ax_red_3.set_xlim(0,xlim_max_h) + ax_red_3.set_xlabel('Width=H=Columns'); ax_red_3.set_ylabel('Pixel Value') + ax_red_4.plot(x_red_v,y_red_v, linewidth=2.0); ax_red_4.set_title("Vertical Cut"); + ax_red_4.grid(True); ax_red_4.set_ylim(ymin=0, ymax=260); ax_red_4.set_yticks(yticks); ax_red_4.set_xlim(0, xlim_max_v) + ax_red_4.set_xlabel('Heigth=V=Rows'); ax_red_4.set_ylabel('Pixel Value') + + # green channel + ax_green_1.imshow(g, cmap = 'gray'); ax_green_1.set_title("Green Channel"); + ax_green_1.set_xlabel('Width=H=Columns'); ax_green_1.set_ylabel('Heigth=V=Rows') + ax_green_2.imshow(g_copy, cmap = 'gray'); ax_green_2.set_title("Green Channel - marked"); + ax_green_2.set_xlabel('Width=H=Columns'); ax_green_2.set_ylabel('Heigth=V=Rows') + ax_green_3.plot(x_green_h,y_green_h, linewidth=2.0); ax_green_3.set_title("Horizontal Cut"); + ax_green_3.grid(True); ax_green_3.set_ylim(ymin=0, ymax=260); ax_green_3.set_yticks(yticks); ax_green_3.set_xlim(0,xlim_max_h) + ax_green_3.set_xlabel('Width=H=Columns'); ax_green_3.set_ylabel('Pixel Value') + ax_green_4.plot(x_green_v,y_green_v, linewidth=2.0); ax_green_4.set_title("Vertical Cut"); + ax_green_4.grid(True); ax_green_4.set_ylim(ymin=0, ymax=260); ax_green_4.set_yticks(yticks); ax_green_4.set_xlim(0, xlim_max_v) + ax_green_4.set_xlabel('Heigth=V=Rows'); ax_green_4.set_ylabel('Pixel Value') + + # blue channel + ax_blue_1.imshow(b, cmap = 'gray'); ax_blue_1.set_title("Blue Channel"); + ax_blue_1.set_xlabel('Width=H=Columns'); ax_blue_1.set_ylabel('Heigth=V=Rows') + ax_blue_2.imshow(b_copy, cmap = 'gray'); ax_blue_2.set_title("Blue Channel - marked"); + ax_blue_2.set_xlabel('Width=H=Columns'); ax_blue_2.set_ylabel('Heigth=V=Rows') + ax_blue_3.plot(x_blue_h,y_blue_h, linewidth=2.0); ax_blue_3.set_title("Horizontal Cut"); + ax_blue_3.grid(True); ax_blue_3.set_ylim(ymin=0, ymax=260); ax_blue_3.set_yticks(yticks); ax_blue_3.set_xlim(0,xlim_max_h) + ax_blue_3.set_xlabel('Width=H=Columns'); ax_blue_3.set_ylabel('Pixel Value') + ax_blue_4.plot(x_blue_v,y_blue_v, linewidth=2.0); ax_blue_4.set_title("Vertical Cut"); + ax_blue_4.grid(True); ax_blue_4.set_ylim(ymin=0, ymax=260); ax_blue_4.set_yticks(yticks); ax_blue_4.set_xlim(0, xlim_max_v) + ax_blue_4.set_xlabel('Heigth=V=Rows'); ax_blue_4.set_ylabel('Pixel Value') + + + # Calculate overall brightness per pixel of the image. Mittelere Helligkeit pro pixel berechnen. + arithmetic_mean_of_brightness_per_pixel_relative = calc_arithmetic_mean_of_brightness_per_pixel(r,g,b) + if arithmetic_mean_of_brightness_per_pixel_relative >= 0.01: + msg1_brightness = f"Overall brightness per pixel: {round(arithmetic_mean_of_brightness_per_pixel_relative*100,2)} %" #value in percent + elif arithmetic_mean_of_brightness_per_pixel_relative < 0.01: + msg1_brightness = f"Overall brightness per pixel: {round(arithmetic_mean_of_brightness_per_pixel_relative*10e3,2)} per mil" # value in promille + print(msg1_brightness) + + # add pixel stats under Figure + pixelstats_red= '\n'.join([msg1_red, msg2_red, msg3_red, msg4_red]) + pixelstats_green= '\n'.join([msg1_green, msg2_green, msg3_green, msg4_green]) + pixelstats_blue= '\n'.join([msg1_blue, msg2_blue, msg3_blue, msg4_blue]) + pixelstats_overall_brightness = '\n'.join([msg1_brightness]) + pixelstats = '\n\n'.join([f"pixel stats: {msg_dominating_colourchannel} channel dominating", + pixelstats_red, pixelstats_green, pixelstats_blue, pixelstats_overall_brightness]) + text_x_pos = 0.1; text_y_pos = -0.015 # text position: 0,0 is lower-left and 1,1 is upper-right) + fig1.text(text_x_pos, text_y_pos, pixelstats, ha='left') + + # add Comment under Figure + text_x_pos = 0.5; text_y_pos = -0.025 # text position: 0,0 is lower-left and 1,1 is upper-right) + log_filename=None + try: + log_filename=[f for f in os.listdir(PATH_IMAGE_FOLDER) if f.endswith('.txt')][0] + if log_filename: + with open(os.path.join(PATH_IMAGE_FOLDER,log_filename), encoding='utf-8') as f: + log_text = f.readlines() + if COMMENT != "": + COMMENT = '\nPlot Comment: '+COMMENT + log_text.append(COMMENT) + txt=''.join(log_text) + fig1.text(text_x_pos, text_y_pos, txt, ha='left') + except IndexError: + if COMMENT != "": + COMMENT = '\nPlot Comment: '+COMMENT + fig1.text(text_x_pos, text_y_pos, COMMENT, ha='left') + else: + pass + + # handle numpy memmory error on Windows: + switch_overwrite=0 # do not overwrite files, if they exist + if switch_overwrite == 1: + fig1.savefig(os.path.join(PATH_IMAGE_FOLDER,f'{image_name}.pdf'), bbox_inches='tight') #save plot + print('Save pdf') + else: + if os.path.isfile(os.path.join(PATH_IMAGE_FOLDER,f'{image_name}.pdf')): + pass # skip this pdf file, because it already exists + else: + fig1.savefig(os.path.join(PATH_IMAGE_FOLDER,f'{image_name}.pdf'), bbox_inches='tight') #save plot + print('Save pdf') + plt.close('all') # close all figures + + print('') # new line for better readability in console \ No newline at end of file diff --git a/01_How-To-Control-LEDs-Manually.txt b/01_How-To-Control-LEDs-Manually.txt new file mode 100644 index 0000000..1a2e49f --- /dev/null +++ b/01_How-To-Control-LEDs-Manually.txt @@ -0,0 +1,20 @@ +You have to connect the serial bus cable from the uC to the Raspberry. +It was connected to the lower middle USB-Port in this case, + +1. start bash +(2. workon cv-4.5.3.56) +3. python +4. import serial +5. s = serial.Serial('/dev/ttyACM0', 9600) + +Now you can control the LEDs. + +To let the stripe light up in one color: +s.write(str.encode('RRR-GGG-BBB')) +eg: s.write(str.encode('255-000-000')) + +To let the stripe light up in one of the 20 color patterns: +s.write(str.encode('color_pattern_XX')), where x is 01-20 +eg: s.write(str.encode('color_pattern_01')) + +Now you can start a program to detect the leds \ No newline at end of file diff --git a/02_Kameraaufnahme/01_How-To-Start-Scripts.txt b/02_Kameraaufnahme/01_How-To-Start-Scripts.txt new file mode 100644 index 0000000..1dadafc --- /dev/null +++ b/02_Kameraaufnahme/01_How-To-Start-Scripts.txt @@ -0,0 +1,3 @@ +1. start bash +2. workon cv-4.5.3.56 +3. python $SCRIPT_NAME$ \ No newline at end of file diff --git a/02_Kameraaufnahme/PiCamera_shoot_manual_image.py b/02_Kameraaufnahme/PiCamera_shoot_manual_image.py new file mode 100644 index 0000000..96af1c9 --- /dev/null +++ b/02_Kameraaufnahme/PiCamera_shoot_manual_image.py @@ -0,0 +1,188 @@ +# Creation Date: 02.03.2022 +# Author: Kenan Gömek +# This script takes pictures with Picameras VideoPort like it will be used to work with OpenCV and saves it with OpenCV to have the real use case pictures. +# This script is designed for shooting manually images with 'i' +# Press 'q' to exit +# Change camera parameters with v,b,n,m,x,c,o. See code for more information. + + +import cv2 as cv +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os + +import numpy as np +import matplotlib.pyplot as plt + + +# Define camera settings + +# divide origin resoluton by a number, to have the origin aspect ratio +# RESOLUTION = (3280, 2464) # Max Photo-Resolution CAM03 and CAM04 # no image with PiCamera Videoport at this Resolution.. Probably GPU Memory and CPU issues. +# RESOLUTION = (1640,1232) # 2nd best Resolution for CAM03 and CAM04 with FUll FOV (2x2 binning) # Mode 4 +SENSOR_MODE = 4 # corresponding sensor mode to resolution 1640x1232 +# OUTPUT_RESOLUTION = (960, 720) +OUTPUT_RESOLUTION = (416, 320) +#OUTPUT_RESOLUTION = (416, 320) # (1640x1232)/4=(410,308) + # (410,308) is being upscaled to (416,320) from ISP (warning in bash), but image will have still (410,308) pixels. +# OUTPUT_RESOLUTION = (820, 616) # (1640x1232)/2=(820,616) + # bash: frame size rounded up from 820x616 to 832x624 + + +AWB_MODE = 'off' # Auto white balance mode +# AWB_GAINS = (Fraction(485, 256), Fraction(397, 256)) # White Balance Gains to have colours read correctly: (red, blue) +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, float or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + # the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +# ISO = 100 # ISO value +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# Define Funcions +def take_image_picamera_opencv(shutter_speed): + folder_exists=False + # Initialise Camera + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = shutter_speed + # it was found that, you have to set the right shutter speed at the first initalisation of the current runtime of the program. + # The gains (analog, digital) will adjust to this set up. + # After the gains are fixed, they will never change! even if you change the shutter speed during the runtime. + # To get consistent brightness values, set the right shutter speed at initalisation once. + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(1) # wait before applying brightness and contrast + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + # camera.start_preview() # show camera preview through PiCamera interface + # camera.annotate_frame_num=True # Controls whether the current frame number is drawn as an annotation. + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + image = frame.array # raw NumPy array without JPEG encoding + + camera_exposure_speed = camera.exposure_speed # Retrieves the current shutter speed of the camera. + + cv.imshow("Current Frame", image) # display the image without text + + output.truncate(0) # clear the stream for next frame + + # Only uncomment following code if you display the image. No errors if not commented, but higher fps if commented. + # if q is pressed, break from loop. + pressed_key = cv.waitKey(2) & 0xff + if pressed_key == ord('q'): + break + elif pressed_key == ord('i'): # Take image from manipulated image if i is pressed + if not folder_exists: + path_saveFolder, folder_exists = create_folder_for_captures() + + now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M-%S.%f")[:-3] + cv.imwrite(f"{path_saveFolder}/img_ss{shutter_speed}_iso{ISO}_res{OUTPUT_RESOLUTION[0]}x{OUTPUT_RESOLUTION[1]}_Date {d1}.png", image) + print('took image!') + elif pressed_key == ord('v'): # increase shutterspeed by 5 + shutter_speed = round(shutter_speed+5) + camera.shutter_speed = shutter_speed + time.sleep(2) # wait to shutter speed is applied before querying exposure speed + exposure_speed = camera.exposure_speed + print(f"shutter speed set to: {shutter_speed}") + print(f"retrieved shutter speed: {exposure_speed}") + elif pressed_key == ord('b'): # increase shutterspeed by 50 + shutter_speed = round(shutter_speed+50) + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + time.sleep(2) # wait to shutter speed is applied before querying exposure speed + exposure_speed = camera.exposure_speed + print(f"retrieved shutter speed: {exposure_speed}") + elif pressed_key == ord('n'): # increase shutterspeed by 500 + shutter_speed = round(shutter_speed+500) + print(f"shutter speed set to: {shutter_speed}") + time.sleep(2) # wait to shutter speed is applied before querying exposure speed + exposure_speed = camera.exposure_speed + print(f"retrieved shutter speed: {exposure_speed}") + elif pressed_key == ord('m'): # max shutterspeed + shutter_speed = round(1/FRAMERATE*1e6) + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + time.sleep(2) # wait to shutter speed is applied before querying exposure speed + exposure_speed = camera.exposure_speed + print(f"retrieved shutter speed: {exposure_speed}") + elif pressed_key == ord('x'): # decrease shutterspeed by 500 + shutter_speed = round(shutter_speed-500) + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + time.sleep(2) # wait to shutter speed is applied before querying exposure speed + exposure_speed = camera.exposure_speed + print(f"retrieved shutter speed: {exposure_speed}") + elif pressed_key == ord('c'): # decrease shutterspeed by 50 + shutter_speed = round(shutter_speed-50) + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + time.sleep(2) # wait to shutter speed is applied before querying exposure speed + exposure_speed = camera.exposure_speed + print(f"retrieved shutter speed: {exposure_speed}") + elif pressed_key == ord('o'): # set shutterspeed to 0 + shutter_speed = 0 + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + time.sleep(2) # wait to shutter speed is applied before querying exposure speed + exposure_speed = camera.exposure_speed + print(f"retrieved shutter speed: {exposure_speed}") + + +def create_folder_for_captures(): + # Create folder for saving the captured pictures + now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M") + path_cwd = os.getcwd() + + path_saveFolder = path_cwd+r"/Capture_"+d1 + try: + os.mkdir(path_saveFolder) + folder_exists = True + except OSError: + print("Error! Ending script.") + quit() + + return path_saveFolder, folder_exists + + +# Start Script + +# start capture series for different shutter speeds +print('start caputure...') + +take_image_picamera_opencv(shutter_speed=50) +# take_image_picamera_opencv(shutter_speed=round(1/FRAMERATE*1e6)) # max shutter-speed depending on fps: 1/2 fps = 500 000 µs + +# End Script + +cv.destroyAllWindows() + +print('Script finished') + + + + + + diff --git a/02_Kameraaufnahme/Rest/mp_raspividyuv_fast_capture_cannyedge.py b/02_Kameraaufnahme/Rest/mp_raspividyuv_fast_capture_cannyedge.py new file mode 100644 index 0000000..b08b57d --- /dev/null +++ b/02_Kameraaufnahme/Rest/mp_raspividyuv_fast_capture_cannyedge.py @@ -0,0 +1,112 @@ +# This script uses multi processing and raspividyuv to capture frames @35 fps with canny edge detection +# To-Do: +# -set parameter for camera +# -typos and uniformity +# -comment things better and make more steps for easier understanding for non-python-people (or maybe proficiency level beginner) +# -capture led strips and save video +# -How about OV-Sensor? because putting camera in sensor mode 4 and requesting image with less resolution drops fps. +# -Ist blitzen auch bei diesem skript da??!?!?!??!? + +import cv2 as cv +import numpy as np +import subprocess as sp +import time +import atexit + +frames = [] # stores the video sequence for the demo +max_frames =500 + +N_frames = 0 + +# Video capture parameters +(w,h) = (416,320) # height must be multiple of 32 and width multiple of 16 +colour_channels = 3 +bytesPerFrame = w * h * colour_channels +fps = 35 # setting to 250 will request the maximum framerate possible + +sensor_mode = 4 + +# "raspividyuv" is the command that provides camera frames in YUV format +# "--output -" specifies stdout as the output +# "--timeout 0" specifies continuous video +# "--luma" discards chroma channels, only luminance is sent through the pipeline +# see "raspividyuv --help" for more information on the parameters +# videoCmd = "raspividyuv -w "+str(w)+" -h "+str(h)+" --output - --timeout 0 --framerate "+str(fps)+" --rgb --nopreview --mode "+str(sensor_mode) +videoCmd = f"raspividyuv -w {w} -h {h} --output - --timeout 0 --framerate {fps} --rgb --nopreview --mode {sensor_mode}" + # with sensor mode 38 fps instead of 72 fps --> How about Camera OV-Sensor? +# videoCmd = "raspividyuv -w "+str(w)+" -h "+str(h)+" --output - --timeout 0 --framerate "+str(fps)+" --rgb --nopreview" +videoCmd = videoCmd.split() # Popen requires that each parameter is a separate string + +cameraProcess = sp.Popen(videoCmd, stdout=sp.PIPE) # start the camera +atexit.register(cameraProcess.terminate) # this closes the camera process in case the python scripts exits unexpectedly + +# wait for the first frame and discard it (only done to measure time more accurately) +rawStream = cameraProcess.stdout.read(bytesPerFrame) + +print("Start...") + +start_time = time.time() + +while True: + cameraProcess.stdout.flush() # discard any frames that we were not able to process in time + + frame = np.frombuffer(cameraProcess.stdout.read(bytesPerFrame), dtype=np.uint8) # raw NumPy array without JPEG encoding + + if frame.size != bytesPerFrame: + print("Error: Camera stream closed unexpectedly") + break + frame.shape = (h,w,colour_channels) # set dimensions for numpy array --> from (921600,) to (480,640,3) + + + # do the processing here with OpenCV + + frame = cv.cvtColor(frame,cv.COLOR_BGR2RGB) # convert frame to rgb + + # test + frame_gs=cv.cvtColor(frame,cv.COLOR_RGB2GRAY) + frame = cv.Canny(frame_gs, 50,150) + + + frames.append(frame) # save the frame (for the demo) + + N_frames += 1 + + #test + #put text + font = cv.FONT_HERSHEY_SIMPLEX # font + fontScale = 1 # fontScale + color = (255, 255, 255) # Font colour in BGR + thickness = 1 # Line thickness in px + + # set text position + frame_width = int(frame.shape[1]) + frame_height = int(frame.shape[0]) + text_start_position_Y = int(round(frame_height*0.12)) # start position of text in pixels 12 % of frame height + text_linespacing = 50 # line spacing between two strings in pixels + # text_start_position_X = int(frame_width/4) # start text from 1/4 of image width + text_start_position_X = int(0) # start text from left edge of image + pos_1 = (text_start_position_X, text_start_position_Y) + text_line_1 = f"Frame: {N_frames}" + cv.putText(frame, text_line_1, pos_1, font, fontScale, color, thickness, cv.LINE_AA) + + # if N_frames > max_frames: break #if i deactivate cv.imshow i can control end of program with this parameter. + + + cv.imshow("Current Frame", frame) # display the image + pressed_key = cv.waitKey(1) & 0xff + if pressed_key == ord('q'): + break + + + +cv.destroyAllWindows() + + +end_time = time.time() + +cameraProcess.terminate() # stop the camera + + +elapsed_seconds = end_time-start_time +print(f"Finish! Result: {(N_frames/elapsed_seconds)} fps") + diff --git a/02_Kameraaufnahme/Rest/mp_raspividyuv_fast_rgb.py b/02_Kameraaufnahme/Rest/mp_raspividyuv_fast_rgb.py new file mode 100644 index 0000000..9ce6173 --- /dev/null +++ b/02_Kameraaufnahme/Rest/mp_raspividyuv_fast_rgb.py @@ -0,0 +1,120 @@ +# This script uses multi processing and raspividyuv to capture frames @35 fps with canny edge detection +# To-Do: +# -set parameter for camera +# -typos and uniformity +# -comment things better and make more steps for easier understanding for non-python-people (or maybe proficiency level beginner) +# -capture led strips and save video +# -How about OV-Sensor? because putting camera in sensor mode 4 and requesting image with less resolution drops fps. +# -Ist blitzen auch bei diesem skript da??!?!?!??!? + +import cv2 as cv +import numpy as np +import subprocess as sp +import time +import atexit + +from fractions import Fraction + +max_frames =500 + +N_frames = 0 + +# Video capture parameters +(w,h) = (416,320) # height must be multiple of 16 and width multiple of 16 +colour_channels = 3 +bytesPerFrame = w * h * colour_channels +fps = 30 # setting to 250 will request the maximum framerate possible + +sensor_mode = 4 + + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = f"{485/256},{397/256}" # White Balance Gains to have colours read correctly: (red, blue) +ISO = 100 # ISO value +EXPOSURE_MODE = 'off' + +shutterspeed = 20000 + +# "raspividyuv" is the command that provides camera frames in YUV format +# "--output -" specifies stdout as the output +# "--timeout 0" specifies continuous video +# "--luma" discards chroma channels, only luminance is sent through the pipeline +# see "raspividyuv --help" for more information on the parameters +# videoCmd = "raspividyuv -w "+str(w)+" -h "+str(h)+" --output - --timeout 0 --framerate "+str(fps)+" --rgb --nopreview --mode "+str(sensor_mode) +videoCmd = (f"raspividyuv -w {w} -h {h} --output - --timeout 0 --framerate {fps} --rgb --nopreview " + f"--mode {sensor_mode} --awb {AWB_MODE} -awbg {AWB_GAINS} --ISO {ISO} --exposure {EXPOSURE_MODE} -ss {shutterspeed}") + + # with sensor mode 38 fps instead of 72 fps --> How about Camera OV-Sensor? +# videoCmd = "raspividyuv -w "+str(w)+" -h "+str(h)+" --output - --timeout 0 --framerate "+str(fps)+" --rgb --nopreview" +videoCmd = videoCmd.split() # Popen requires that each parameter is a separate string + +cameraProcess = sp.Popen(videoCmd, stdout=sp.PIPE) # start the camera +atexit.register(cameraProcess.terminate) # this closes the camera process in case the python scripts exits unexpectedly + +# wait for the first frame and discard it (only done to measure time more accurately) +rawStream = cameraProcess.stdout.read(bytesPerFrame) + +print("Start...") + +start_time = time.time() + +while True: + cameraProcess.stdout.flush() # discard any frames that we were not able to process in time + + frame = np.frombuffer(cameraProcess.stdout.read(bytesPerFrame), dtype=np.uint8) # raw NumPy array without JPEG encoding + + if frame.size != bytesPerFrame: + print("Error: Camera stream closed unexpectedly") + break + frame.shape = (h,w,colour_channels) # set dimensions for numpy array --> from (921600,) to (480,640,3) + + + # do the processing here with OpenCV + + r,g,b = cv.split(frame) # OpenCV works with bgr. Image is also speciefied to be captured in bgr. + + frame = cv.cvtColor(frame,cv.COLOR_BGR2RGB) # convert frame to rgb + + N_frames += 1 + + #put text + font = cv.FONT_HERSHEY_SIMPLEX # font + fontScale = 1 # fontScale + color = (255, 255, 255) # Font colour in BGR + thickness = 1 # Line thickness in px + + # set text position + frame_width = int(frame.shape[1]) + frame_height = int(frame.shape[0]) + text_start_position_Y = int(round(frame_height*0.12)) # start position of text in pixels 12 % of frame height + text_linespacing = 50 # line spacing between two strings in pixels + # text_start_position_X = int(frame_width/4) # start text from 1/4 of image width + text_start_position_X = int(0) # start text from left edge of image + pos_1 = (text_start_position_X, text_start_position_Y) + text_line_1 = f"Frame: {N_frames}" + cv.putText(frame, text_line_1, pos_1, font, fontScale, color, thickness, cv.LINE_AA) + + # if N_frames > max_frames: break #if i deactivate cv.imshow i can control end of program with this parameter. + + + cv.imshow("Current Frame", frame) # display the image + cv.imshow("b", b) + cv.imshow("g", g) + cv.imshow("r", r) + pressed_key = cv.waitKey(1) & 0xff + if pressed_key == ord('q'): + break + + + +cv.destroyAllWindows() + + +end_time = time.time() + +cameraProcess.terminate() # stop the camera + + +elapsed_seconds = end_time-start_time +print(f"Finish! Result: {(N_frames/elapsed_seconds)} fps") + diff --git a/02_Kameraaufnahme/mp_Vorbereitung/PiCameraVideoPort_mp.pdf b/02_Kameraaufnahme/mp_Vorbereitung/PiCameraVideoPort_mp.pdf new file mode 100644 index 0000000..31b0bb6 --- /dev/null +++ b/02_Kameraaufnahme/mp_Vorbereitung/PiCameraVideoPort_mp.pdf @@ -0,0 +1,4559 @@ +%PDF-1.3 +%߬ +3 0 obj +<> +endobj +4 0 obj +<< +/Length 18561 +>> +stream +0.200025 w +0 G +q +1. 0. 0. -1. 0. 1860. cm +q +1. 0. 0. 1. 0. 0. cm +1. w +0. g +q +1. 0. 0. 1. 0. 0. cm +0. 0. 2483. 1860. re +W +n +q +q +1. g +1. 0. 0. 1. 0. 0. cm +0. 0. m +2483. 0. l +2483. 1860. l +0. 1860. l +h +f +Q +q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -840.9375 m +-150. -815.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 352.5 448.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -943.46875 m +-150. -905.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 352.5 358.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -1102.5 m +-150. -1068.53125 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 352.5 195.29289 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -750.9375 m +-150. -725.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 352.5 538.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -660.9375 m +-150. -635.53125 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 352.5 628.29289 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-1.25 -570. m +55.25 -570. l +S +Q +q +/GS1 gs +q +-1. 0. 0. -1. 562.04289 689.53125 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -510.46875 m +-150. -485.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 352.5 778.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -240.9375 m +-150. -190. l +-150. -183.33333 -153.33333 -180. -160. -180. c +-350. -180. l +-356.66667 -180. -360. -183.33333 -360. -190. c +-360. -560. l +-360. -566.66667 -356.66667 -570. -350. -570. c +-304.75 -570. l +S +Q +q +/GS1 gs +q +-1. 0. 0. -1. 202.04289 689.53125 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-1.25 -450. m +55.25 -450. l +S +Q +q +/GS1 gs +q +-1. 0. 0. -1. 562.04289 809.53125 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -420.9375 m +-150. -395.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 352.5 868.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -330.9375 m +-150. -305.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 352.5 958.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-1.25 -270. m +55.25 -270. l +S +Q +q +/GS1 gs +q +-1. 0. 0. -1. 562.04289 989.53125 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. -990.9375 m +810. -875.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 388.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +1410. -990.9375 m +1410. -965.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1912.5 298.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +1800. -990.9375 m +1800. -965.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 2302.5 298.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. -810.9375 m +810. -785.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 478.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 -120. m +610. -120. l +603.33333 -120. 600. -123.33333 600. -130. c +600. -830. l +600. -836.66667 603.33333 -840. 610. -840. c +655.25 -840. l +S +Q +q +/GS1 gs +q +-1. 0. 0. -1. 1162.04289 419.53125 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 540. m +580. 540. l +573.33333 540. 570. 536.66667 570. 530. c +570. -830. l +570. -836.66667 573.33333 -840. 580. -840. c +655.25 -840. l +S +Q +q +/GS1 gs +q +-1. 0. 0. -1. 1162.04289 419.53125 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +660. 30. m +580. 30. l +573.33333 30. 570. 26.66667 570. 20. c +570. -830. l +570. -836.66667 573.33333 -840. 580. -840. c +655.25 -840. l +S +Q +q +/GS1 gs +q +-1. 0. 0. -1. 1162.04289 419.53125 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0.0000000000000001 0.0000000000000001 -1. 1110.83333 1281.53125 Tm +(no) Tj +ET +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. -660. m +810. -635.53125 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 628.29289 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. -510.46875 m +810. -485.75 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 778.07414 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. -420. m +810. -395.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 868.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1279.41667 856.69467 Tm +(yes) Tj +ET +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +960. -449.875 m +1010. -449.875 l +1016.66667 -449.875 1020. -446.54167 1020. -439.875 c +1020. -40. l +1020. -33.33333 1016.66667 -30. 1010. -30. c +820. -30. l +813.33333 -30. 810. -26.66667 810. -20. c +810. -5.875 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 1257.94914 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1485.83333 825.65625 Tm +(no) Tj +ET +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. -270. m +810. -245.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 1018.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. -180.9375 m +810. -155.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 1108.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. 59.875 m +810. 84.9375 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 1348.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0.0000000000000001 0.0000000000000001 -1. 1288.16667 1341.7518 Tm +(yes) Tj +ET +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. 210. m +810. 234.9375 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 1498.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. 299.0625 m +810. 324.9375 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 1588.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. 390. m +810. 414.46875 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 1678.29289 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. 479.53125 m +810. 504.46875 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1312.5 1768.29289 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +1410. -900.9375 m +1410. -875.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 1912.5 388.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +1.5 w +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +1800. -900.9375 m +1800. -875.0625 l +S +Q +q +/GS1 gs +q +0. -1. 1. 0. 2302.5 388.76164 cm +q +1. 0. 0. 1. 0. 0. cm +/Xo1 Do +Q +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-298.75 -899.0625 m +-1.25 -899.0625 l +-1.25 -840.9375 l +-298.75 -840.9375 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 339.5 393.60625 Tm +(Main) Tj +ET +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-298.75 -1062.53125 m +-1.25 -1062.53125 l +-1.25 -943.46875 l +-298.75 -943.46875 l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 282.16666 215.60625 Tm +(create shared memory for:) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 334.5 233.60625 Tm +(- bools) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 291.16667 251.60625 Tm +(- framenumber \(uint64\)) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 321.5 269.60625 Tm +(- red_frame) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 316.5 287.60625 Tm +(- green frame) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 320.5 305.60625 Tm +(- blue frame) Tj +ET +Q +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-298.75 -809.0625 m +-1.25 -809.0625 l +-1.25 -750.9375 l +-298.75 -750.9375 l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 300.16667 474.60625 Tm +(create 3 processes:) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 281.83334 492.60625 Tm +(one for each color channel) Tj +ET +Q +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-298.75 -719.0625 m +-1.25 -719.0625 l +-1.25 -660.9375 l +-298.75 -660.9375 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 263.5 573.60625 Tm +(start activity of created processes) Tj +ET +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-298.75 -629.53125 m +-1.25 -629.53125 l +-1.25 -510.46875 l +-298.75 -510.46875 l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 259.40833 675.60625 Tm +(get frames @25 fps from Picamera) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 298.16667 693.60625 Tm +(as raw NumPy array) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 290.16667 711.60625 Tm +(without JPEG encoding) Tj +ET +Q +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +61.25 -629.53125 m +358.75 -629.53125 l +358.75 -510.46875 l +61.25 -510.46875 l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 614.5 675.60625 Tm +(always the newest frame is recieved:) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 620.83334 693.60625 Tm +(processing must be faster than fps) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 619.16666 711.60625 Tm +(if every frame should be processed) Tj +ET +Q +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-298.75 -479.0625 m +-1.25 -479.0625 l +-1.25 -420.9375 l +-298.75 -420.9375 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 292.41667 813.60625 Tm +(increase framenumber) Tj +ET +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +61.25 -479.0625 m +358.75 -479.0625 l +358.75 -420.9375 l +61.25 -420.9375 l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 625.89167 804.60625 Tm +(shm_bools[0] = True \(newframe\)) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 612.91666 822.60625 Tm +(shm_framenumber[0] = framenumber) Tj +ET +Q +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-298.75 -389.0625 m +-1.25 -389.0625 l +-1.25 -330.9375 l +-298.75 -330.9375 l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 256.83334 894.60625 Tm +(split frame into: b, g, r - components) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 270.16666 912.60625 Tm +(and allocate to shared memory) Tj +ET +Q +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-298.75 -299.0625 m +-1.25 -299.0625 l +-1.25 -240.9375 l +-298.75 -240.9375 l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 242.125 984.60625 Tm +(set trigger to "True" for each colorchannel) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 284.83334 1002.60625 Tm +(for start of the processing) Tj +ET +Q +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +61.25 -299.0625 m +358.75 -299.0625 l +358.75 -240.9375 l +61.25 -240.9375 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 647.89167 993.60625 Tm +(shm_bools[7,8,9] = True) Tj +ET +Q +Q +Q +q +q +0.51 0.95 1. rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 -1049.0625 m +1950. -1049.0625 l +1950. -990.9375 l +661.25 -990.9375 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1766.45833 243.60625 Tm +(Multiprocessing) Tj +ET +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 -869.0625 m +958.75 -869.0625 l +958.75 -810.9375 l +661.25 -810.9375 l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1263.08333 414.60625 Tm +(read framenumber) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1255.5 432.60625 Tm +(shm_framenumber[0]) Tj +ET +Q +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 -779.0625 m +958.75 -779.0625 l +958.75 -660. l +661.25 -660. l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1250.5 508.075 Tm +(conditions for first start:) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1297.33333 526.075 Tm +(- i = 0) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1223.89167 544.075 Tm +(- shm_bools[0]= True \(newframe\)) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1196.66666 562.075 Tm +(- shm_bools[1,2,3] = False \(p_rgb_finished\)) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1193.66666 580.075 Tm +(- shm_bools[7] = TRUE \(p_red_start_trigger\)) Tj +ET +Q +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 -629.53125 m +958.75 -629.53125 l +958.75 -510.46875 l +661.25 -510.46875 l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1231.16666 675.60625 Tm +(conditions for start processing:) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1207.33334 693.60625 Tm +(- framenumber > last_processed_frame) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1193.66666 711.60625 Tm +(- shm_bools[7] = TRUE \(p_red_start_trigger\)) Tj +ET +Q +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. -479.75 m +960. -449.875 l +810. -420. l +660. -449.875 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1233.225 813.73125 Tm +(conditions for first start = True) Tj +ET +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +630. -389.0625 m +990. -389.0625 l +990. -270. l +630. -270. l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1302.83333 907.075 Tm +(set:) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1211.55833 925.075 Tm +(- shm_bools[4] = True \(p_red_started\)) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1158.35834 943.075 Tm +(- shm_bools[7] = False \(p_red_start_trigger\) | reset trigger) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1206.66666 961.075 Tm +(- shm_bools[1] = False \(p_red_finished\)) Tj +ET +Q +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 -239.0625 m +958.75 -239.0625 l +958.75 -180.9375 l +661.25 -180.9375 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1275.16667 1053.60625 Tm +(do processing) Tj +ET +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 -149.0625 m +958.75 -149.0625 l +958.75 -60. l +661.25 -60. l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1297.5 1132.075 Tm +(i += 1) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1301.16667 1150.075 Tm +(set: ) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1209.225 1168.075 Tm +(- shm_bools[1] = True \(p_red_finished\)) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1209. 1186.075 Tm +(- shm_bools[4] = False \(p_red_started\)) Tj +ET +Q +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +810. 0.125 m +960. 30. l +810. 59.875 l +660. 30. l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1215.66666 1293.60625 Tm +(conditions for start processing = true) Tj +ET +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +630. 90.9375 m +990. 90.9375 l +990. 210. l +630. 210. l +h +B +Q +Q +q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1302.83333 1387.075 Tm +(set:) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1211.55833 1405.075 Tm +(- shm_bools[4] = True \(p_red_started\)) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1158.35834 1423.075 Tm +(- shm_bools[7] = False \(p_red_start_trigger\) | reset trigger) Tj +ET +Q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1206.66666 1441.075 Tm +(- shm_bools[1] = False \(p_red_finished\)) Tj +ET +Q +Q +Q +Q +q +q +1. 0.96 0.32 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 240.9375 m +958.75 240.9375 l +958.75 299.0625 l +661.25 299.0625 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1275.16667 1533.60625 Tm +(do processing) Tj +ET +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 330.9375 m +958.75 330.9375 l +958.75 390. l +661.25 390. l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1212.89167 1624.075 Tm +(shm_bools[1] = True \(p_red_finished\)) Tj +ET +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 420.46875 m +958.75 420.46875 l +958.75 479.53125 l +661.25 479.53125 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1210.91666 1713.60625 Tm +(last_processed_frame = framenumber) Tj +ET +Q +Q +Q +q +q +1. 0.64 0.64 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 510.46875 m +958.75 510.46875 l +958.75 569.53125 l +661.25 569.53125 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1212.66666 1803.60625 Tm +(shm_bools[4] = False \(p_red_started\)) Tj +ET +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +661.25 -959.0625 m +958.75 -959.0625 l +958.75 -900.9375 l +661.25 -900.9375 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1271.5 333.60625 Tm +(processing_red) Tj +ET +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +1261.25 -959.0625 m +1558.75 -959.0625 l +1558.75 -900.9375 l +1261.25 -900.9375 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1864.83333 333.60625 Tm +(processing_green) Tj +ET +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +1651.25 -959.0625 m +1948.75 -959.0625 l +1948.75 -900.9375 l +1651.25 -900.9375 l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 2258.83333 333.60625 Tm +(processing_blue) Tj +ET +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +1261.25 -869.0625 m +1558.75 -869.0625 l +1558.75 570. l +1261.25 570. l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 1847.16666 1114.075 Tm +(same as processing_red) Tj +ET +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +1651.25 -869.0625 m +1948.75 -869.0625 l +1948.75 570. l +1651.25 570. l +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 2237.16666 1114.075 Tm +(same as processing_red) Tj +ET +Q +Q +Q +q +q +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-150. -1102.5 m +-223.1665 -1102.5 -282.5 -1119.2925 -282.5 -1140. c +-282.5 -1160.7075 -223.1665 -1177.5 -150. -1177.5 c +-76.8335 -1177.5 -17.5 -1160.7075 -17.5 -1140. c +-17.5 -1119.2925 -76.8335 -1102.5 -150. -1102.5 c +h +B +Q +Q +q +q +q +BT +/F1 12 Tf +13.8 TL +0. g +1. 0. 0. -1. 339.83333 123.60625 Tm +(Start) Tj +ET +Q +Q +Q +q +/GS3 gs +0.27 1. 0. rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +-472.5 -645.531 m +437.75 -645.531 l +437.75 -150. l +-472.5 -150. l +h +B +Q +q +q +q +BT +/F1 17 Tf +19.55 TL +0. g +0. -1. -1. 0. 45.86667 960.46562 Tm +(Get frames from picamera) Tj +ET +Q +Q +Q +q +q +/GS4 gs +0.82 0.87 0.93 rg +0. G +10. M +1. 0. 0. 1. 502.5 1259.53125 cm +225. -1229.53125 m +615. -1229.53125 l +615. -1110.46875 l +225. -1110.46875 l +h +B +Q +Q +q +q +q +q +BT +/F2 27 Tf +31.05 TL +0. g +1. 0. 0. -1. 812.225 58.19791 Tm +(mp_vorbereitung) Tj +ET +Q +q +BT +/F2 27 Tf +31.05 TL +0. g +1. 0. 0. -1. 875.95 98.69791 Tm +(V06_04) Tj +ET +Q +q +BT +/F2 27 Tf +31.05 TL +0. g +1. 0. 0. -1. 854.93333 139.19791 Tm +(28.02.2023) Tj +ET +Q +Q +Q +Q +Q +Q +Q +Q +Q +endstream +endobj +1 0 obj +<> +endobj +5 0 obj +<< +/Type /Font +/BaseFont /Helvetica +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +6 0 obj +<< +/Type /Font +/BaseFont /Helvetica-Bold +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +7 0 obj +<< +/Type /Font +/BaseFont /Helvetica-Oblique +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +8 0 obj +<< +/Type /Font +/BaseFont /Helvetica-BoldOblique +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +9 0 obj +<< +/Type /Font +/BaseFont /Courier +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +10 0 obj +<< +/Type /Font +/BaseFont /Courier-Bold +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +11 0 obj +<< +/Type /Font +/BaseFont /Courier-Oblique +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +12 0 obj +<< +/Type /Font +/BaseFont /Courier-BoldOblique +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +13 0 obj +<< +/Type /Font +/BaseFont /Times-Roman +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +14 0 obj +<< +/Type /Font +/BaseFont /Times-Bold +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +15 0 obj +<< +/Type /Font +/BaseFont /Times-Italic +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +16 0 obj +<< +/Type /Font +/BaseFont /Times-BoldItalic +/Subtype /Type1 +/Encoding /WinAnsiEncoding +/FirstChar 32 +/LastChar 255 +>> +endobj +17 0 obj +<< +/Type /Font +/BaseFont /ZapfDingbats +/Subtype /Type1 +/FirstChar 32 +/LastChar 255 +>> +endobj +18 0 obj +<< +/Type /Font +/BaseFont /Symbol +/Subtype /Type1 +/FirstChar 32 +/LastChar 255 +>> +endobj +19 0 obj +<< +/ca 1. +/CA 0. +>> +endobj +20 0 obj +<< +/ca 1. +/CA 1. +>> +endobj +21 0 obj +<< +/ca 0.04 +>> +endobj +22 0 obj +<< +/ca 0. +>> +endobj +23 0 obj +<< +/Type /XObject +/Subtype /Form +/BBox [-2.5 -10. 7.5 10.] +/Matrix [1. 0. 0. 1. 0. 0.] +/Length 77 +>> +stream +q +/GS2 gs +0. g +0. G +10. M +1. 0. 0. 1. 0. 0. cm +5. -5. m +0. 0. l +5. 5. l +h +B +Q +endstream +endobj +2 0 obj +<< +/ProcSet [/PDF /Text /ImageB /ImageC /ImageI] +/Font << +/F1 5 0 R +/F2 6 0 R +/F3 7 0 R +/F4 8 0 R +/F5 9 0 R +/F6 10 0 R +/F7 11 0 R +/F8 12 0 R +/F9 13 0 R +/F10 14 0 R +/F11 15 0 R +/F12 16 0 R +/F13 17 0 R +/F14 18 0 R +>> +/ExtGState << +/GS1 19 0 R +/GS2 20 0 R +/GS3 21 0 R +/GS4 22 0 R +>> +/XObject << +/Xo1 23 0 R +>> +>> +endobj +24 0 obj +<< +/Producer (jsPDF 2.3.1) +/CreationDate (D:20220228182020+01'00') +>> +endobj +25 0 obj +<< +/Type /Catalog +/Pages 1 0 R +/OpenAction [3 0 R /FitH null] +/PageLayout /OneColumn +>> +endobj +xref +0 26 +0000000000 65535 f +0000018736 00000 n +0000020897 00000 n +0000000015 00000 n +0000000122 00000 n +0000018793 00000 n +0000018918 00000 n +0000019048 00000 n +0000019181 00000 n +0000019318 00000 n +0000019441 00000 n +0000019570 00000 n +0000019702 00000 n +0000019838 00000 n +0000019966 00000 n +0000020093 00000 n +0000020222 00000 n +0000020355 00000 n +0000020457 00000 n +0000020553 00000 n +0000020589 00000 n +0000020625 00000 n +0000020656 00000 n +0000020685 00000 n +0000021222 00000 n +0000021308 00000 n +trailer +<< +/Size 26 +/Root 25 0 R +/Info 24 0 R +/ID [ ] +>> +startxref +21412 +%%EOF +3 0 obj <>/Font<>/ProcSet[/PDF/Text]/XObject<>>>/Type/Page>> endobj 23 0 obj <>>>/Subtype/Form/Type/XObject>>stream +0 TL +q +0 g +0 G +10 M +/GS0 gs +q 1 0 0 1 5 -5 cm +0 0 m +-5 5 l +0 10 l +h +B +Q +Q + +endstream endobj 24 0 obj <> endobj 25 0 obj <> endobj 26 0 obj <> endobj 27 0 obj <>stream + + + + + 2022-02-28T18:20:20+01:00 + 2022-03-08T08:18:51+01:00 + 2022-03-08T08:18:51+01:00 + jsPDF 2.3.1 + application/pdf + uuid:3224bb93-0008-4556-8c5d-c39429a4ac86 + uuid:7d80532f-629c-4714-9ee4-e23becec9431 + + + + + + + + + + + + + + + + + + + + + + + + + +endstream endobj 28 0 obj <>stream +q +1 0 0 -1 0 1860 cm +0 0 2483 1860 re +W n +1 g +0 0 m +2483 0 l +2483 1860 l +0 1860 l +h +f +1 0 0 1 502.5 1259.53125 cm +1.5 w +q 1 0 0 1 -150 -840.9375 cm +0 0 m +0 25.875 l +S +Q +q +0 -1 1 0 -150 -810.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -150 -943.4687 cm +0 0 m +0 38.406 l +S +Q +q +0 -1 1 0 -150 -900.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -150 -1102.5 cm +0 0 m +0 33.969 l +S +Q +q +0 -1 1 0 -150 -1064.23836 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -150 -750.9375 cm +0 0 m +0 25.875 l +S +Q +q +0 -1 1 0 -150 -720.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -150 -660.9375 cm +0 0 m +0 25.406 l +S +Q +q +0 -1 1 0 -150 -631.23836 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -1.25 -570 cm +0 0 m +56.5 0 l +S +Q +q +-1 0 0 -1 59.54289 -570 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -150 -510.4687 cm +0 0 m +0 25.406 l +S +Q +q +0 -1 1 0 -150 -480.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -150 -240.9375 cm +0 0 m +0 50.938 l +0 57.604 -3.333 60.938 -10 60.938 c +-200 60.938 l +-206.667 60.938 -210 57.604 -210 50.938 c +-210 -319.062 l +-210 -325.729 -206.667 -329.062 -200 -329.062 c +-154.75 -329.062 l +S +Q +q +-1 0 0 -1 -300.45711 -570 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -1.25 -450 cm +0 0 m +56.5 0 l +S +Q +q +-1 0 0 -1 59.54289 -450 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -150 -420.9375 cm +0 0 m +0 25.875 l +S +Q +q +0 -1 1 0 -150 -390.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -150 -330.9375 cm +0 0 m +0 25.875 l +S +Q +q +0 -1 1 0 -150 -300.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 -1.25 -270 cm +0 0 m +56.5 0 l +S +Q +q +-1 0 0 -1 59.54289 -270 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 810 -990.9375 cm +0 0 m +0 115.875 l +S +Q +q +0 -1 1 0 810 -870.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 1410 -990.9375 cm +0 0 m +0 25.875 l +S +Q +q +0 -1 1 0 1410 -960.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 1800 -990.9375 cm +0 0 m +0 25.875 l +S +Q +q +0 -1 1 0 1800 -960.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 810 -810.9375 cm +0 0 m +0 25.875 l +S +Q +q +0 -1 1 0 810 -780.76961 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 661.25 -120 cm +0 0 m +-51.25 0 l +-57.917 0 -61.25 -3.333 -61.25 -10 c +-61.25 -710 l +-61.25 -716.667 -57.917 -720 -51.25 -720 c +-6 -720 l +S +Q +q +-1 0 0 -1 659.54289 -840 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 661.25 540 cm +0 0 m +-81.25 0 l +-87.917 0 -91.25 -3.333 -91.25 -10 c +-91.25 -1370 l +-91.25 -1376.667 -87.917 -1380 -81.25 -1380 c +-6 -1380 l +S +Q +q +-1 0 0 -1 659.54289 -840 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +q 1 0 0 1 660 30 cm +0 0 m +-80 0 l +-86.667 0 -90 -3.333 -90 -10 c +-90 -860 l +-90 -866.667 -86.667 -870 -80 -870 c +-4.75 -870 l +S +Q +q +-1 0 0 -1 659.54289 -840 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 -1 -502.5 600.46875 cm +BT +0 g +/T1_0 12 Tf +1 -0 0 1 1110.8333 578.4687 Tm +(no)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 -660 cm +0 0 m +0 24.469 l +S +Q +Q +q +0 1 1 0 1312.5 1231.70711 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 -510.4687 cm +0 0 m +0 24.719 l +S +Q +Q +q +0 1 1 0 1312.5 1081.92586 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 -420 cm +0 0 m +0 24.937 l +S +Q +Q +q +0 1 1 0 1312.5 991.23836 cm +1 w +/GS0 gs +/Fm0 Do +Q +BT +1279.417 1003.305 Td +(yes)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 960 -449.875 cm +0 0 m +50 0 l +56.667 0 60 3.333 60 10 c +60 409.875 l +60 416.542 56.667 419.875 50 419.875 c +-140 419.875 l +-146.667 419.875 -150 423.208 -150 429.875 c +-150 444 l +S +Q +Q +q +0 1 1 0 1312.5 602.05086 cm +1 w +/GS0 gs +/Fm0 Do +Q +BT +1485.833 1034.344 Td +(no)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 -270 cm +0 0 m +0 24.937 l +S +Q +Q +q +0 1 1 0 1312.5 841.23836 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 -180.9375 cm +0 0 m +0 25.875 l +S +Q +Q +q +0 1 1 0 1312.5 751.23836 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 59.875 cm +0 0 m +0 25.062 l +S +Q +Q +q +0 1 1 0 1312.5 511.23836 cm +1 w +/GS0 gs +/Fm0 Do +Q +BT +1 -0 0 1 1288.1667 518.2482 Tm +(yes)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 210 cm +0 0 m +0 24.937 l +S +Q +Q +q +0 1 1 0 1312.5 361.23836 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 299.0625 cm +0 0 m +0 25.875 l +S +Q +Q +q +0 1 1 0 1312.5 271.23836 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 390 cm +0 0 m +0 24.469 l +S +Q +Q +q +0 1 1 0 1312.5 181.70711 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 810 479.5312 cm +0 0 m +0 24.937 l +S +Q +Q +q +0 1 1 0 1312.5 91.70711 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 1410 -900.9375 cm +0 0 m +0 25.875 l +S +Q +Q +q +0 1 1 0 1912.5 1471.23836 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +q 1 0 0 1 1800 -900.9375 cm +0 0 m +0 25.875 l +S +Q +Q +q +0 1 1 0 2302.5 1471.23836 cm +1 w +/GS0 gs +/Fm0 Do +Q +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 -298.75 -899.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +339.5 1466.394 Td +(Main)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 -298.75 -1062.5313 cm +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +Q +BT +282.167 1644.394 Td +(create shared memory for:)Tj +52.333 -18 Td +[(-)-278 (bools)]TJ +-43.333 -18 Td +[(-)-278 (framenumber \(uint64\))]TJ +30.333 -18 Td +[(-)-278 (red_frame)]TJ +-5 -18 Td +[(-)-278 (green frame)]TJ +4 -18 Td +[(-)-278 (blue frame)]TJ +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 -298.75 -809.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +300.167 1385.394 Td +(create 3 processes:)Tj +-18.333 -18 Td +(one for each color channel)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 -298.75 -719.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +263.5 1286.394 Td +(start activity of created processes)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 -298.75 -629.5312 cm +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +Q +BT +259.408 1184.394 Td +(get frames @25 fps from Picamera)Tj +38.758 -18 Td +(as raw NumPy array)Tj +-8 -18 Td +(without JPEG encoding)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 61.25 -629.5312 cm +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +Q +BT +614.5 1184.394 Td +(always the newest frame is recieved:)Tj +6.333 -18 Td +(processing must be faster than fps)Tj +-1.667 -18 Td +(if every frame should be processed)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 -298.75 -479.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +292.417 1046.394 Td +(increase framenumber)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 61.25 -479.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +625.892 1055.394 Td +(shm_bools[0] = True \(newframe\))Tj +-12.975 -18 Td +(shm_framenumber[0] = framenumber)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 -298.75 -389.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +256.833 965.394 Td +(split frame into: b, g, r - components)Tj +13.333 -18 Td +(and allocate to shared memory)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 -298.75 -299.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +242.125 875.394 Td +(set trigger to "True" for each colorchannel)Tj +42.708 -18 Td +(for start of the processing)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 61.25 -299.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +647.892 866.394 Td +(shm_bools[7,8,9] = True)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.51 0.95 1 rg +1 w +q 1 0 0 1 661.25 -1049.0625 cm +0 0 m +1288.75 0 l +1288.75 58.125 l +0 58.125 l +h +B +Q +Q +BT +1766.458 1616.394 Td +(Multiprocessing)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 661.25 -869.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +1263.083 1445.394 Td +(read framenumber)Tj +-7.583 -18 Td +(shm_framenumber[0])Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 661.25 -779.0625 cm +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +Q +BT +1250.5 1351.925 Td +(conditions for first start:)Tj +46.833 -18 Td +[(-)-278 (i = 0)]TJ +-73.442 -18 Td +[(-)-278 (shm_bools[0]= True \(newframe\))]TJ +-27.225 -18 Td +[(-)-278 (shm_bools[1,2,3] = False \(p_rgb_finished\))]TJ +-3 -18 Td +[(-)-278 (shm_bools[7] = TRUE \(p_red_start_trigger\))]TJ +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 661.25 -629.5312 cm +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +Q +BT +1231.167 1184.394 Td +(conditions for start processing:)Tj +-23.833 -18 Td +[(-)-278 (framenumber > last_processed_frame)]TJ +-13.667 -18 Td +[(-)-278 (shm_bools[7] = TRUE \(p_red_start_trigger\))]TJ +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 810 -479.75 cm +0 0 m +150 29.875 l +0 59.75 l +-150 29.875 l +h +B +Q +Q +BT +1233.225 1046.269 Td +(conditions for first start = True)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 630 -389.0625 cm +0 0 m +360 0 l +360 119.063 l +0 119.063 l +h +B +Q +Q +BT +1302.833 952.925 Td +(set:)Tj +-91.275 -18 Td +[(-)-278 (shm_bools[4] = True \(p_red_started\))]TJ +-53.2 -18 Td +[(-)-278 (shm_bools[7] = False \(p_red_start_trigger\) | reset trigger)]TJ +48.308 -18 Td +[(-)-278 (shm_bools[1] = False \(p_red_finished\))]TJ +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 661.25 -239.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +1275.167 806.394 Td +(do processing)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 661.25 -149.0625 cm +0 0 m +297.5 0 l +297.5 89.063 l +0 89.063 l +h +B +Q +Q +BT +1297.5 727.925 Td +(i += 1)Tj +3.667 -18 Td +(set: )Tj +-91.942 -18 Td +[(-)-278 (shm_bools[1] = True \(p_red_finished\))]TJ +-0.225 -18 Td +[(-)-278 (shm_bools[4] = False \(p_red_started\))]TJ +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 810 0.125 cm +0 0 m +150 29.875 l +0 59.75 l +-150 29.875 l +h +B +Q +Q +BT +1215.667 566.394 Td +(conditions for start processing = true)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 630 90.9375 cm +0 0 m +360 0 l +360 119.063 l +0 119.063 l +h +B +Q +Q +BT +1302.833 472.925 Td +(set:)Tj +-91.275 -18 Td +[(-)-278 (shm_bools[4] = True \(p_red_started\))]TJ +-53.2 -18 Td +[(-)-278 (shm_bools[7] = False \(p_red_start_trigger\) | reset trigger)]TJ +48.308 -18 Td +[(-)-278 (shm_bools[1] = False \(p_red_finished\))]TJ +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.96 0.32 rg +1 w +q 1 0 0 1 661.25 240.9375 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +1275.167 326.394 Td +(do processing)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 661.25 330.9375 cm +0 0 m +297.5 0 l +297.5 59.063 l +0 59.063 l +h +B +Q +Q +BT +1212.892 235.925 Td +(shm_bools[1] = True \(p_red_finished\))Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 661.25 420.4687 cm +0 0 m +297.5 0 l +297.5 59.063 l +0 59.063 l +h +B +Q +Q +BT +1210.917 146.394 Td +(last_processed_frame = framenumber)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +1 0.64 0.64 rg +1 w +q 1 0 0 1 661.25 510.4687 cm +0 0 m +297.5 0 l +297.5 59.063 l +0 59.063 l +h +B +Q +Q +BT +1212.667 56.394 Td +(shm_bools[4] = False \(p_red_started\))Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 661.25 -959.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +1271.5 1526.394 Td +(processing_red)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 1261.25 -959.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +1864.833 1526.394 Td +(processing_green)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 1651.25 -959.0625 cm +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +Q +BT +2258.833 1526.394 Td +(processing_blue)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 1261.25 -869.0625 cm +0 0 m +297.5 0 l +297.5 1439.063 l +0 1439.063 l +h +B +Q +Q +BT +1847.167 745.925 Td +(same as processing_red)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 1651.25 -869.0625 cm +0 0 m +297.5 0 l +297.5 1439.063 l +0 1439.063 l +h +B +Q +Q +BT +2237.167 745.925 Td +(same as processing_red)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +q 1 0 0 1 -150 -1102.5 cm +0 0 m +-73.167 0 -132.5 -16.792 -132.5 -37.5 c +-132.5 -58.208 -73.167 -75 0 -75 c +73.166 -75 132.5 -58.208 132.5 -37.5 c +132.5 -16.792 73.166 0 0 0 c +h +B +Q +Q +BT +339.833 1736.394 Td +(Start)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.27 1 0 rg +1 w +/GS1 gs +q 1 0 0 1 -472.5 -645.531 cm +0 0 m +910.25 0 l +910.25 495.531 l +0 495.531 l +h +B +Q +Q +BT +/T1_0 17 Tf +0 1 -1 0 45.8667 899.5344 Tm +(Get frames from picamera)Tj +ET +q +1 0 0 -1 502.5 600.46875 cm +0.82 0.87 0.93 rg +1 w +/GS2 gs +q 1 0 0 1 225 -1229.5313 cm +0 0 m +390 0 l +390 119.063 l +0 119.063 l +h +B +Q +Q +Q +BT +/T1_1 27 Tf +815.976 1801.802 Td +(mp_vorbereitung)Tj +/C2_0 27 Tf +<0003>Tj +/T1_1 27 Tf +63.723 -40.5 Td +(V06_04)Tj +ET + +endstream endobj 29 0 obj [31 0 R] endobj 30 0 obj <>stream +H\j0 ~ +C]C`t rZ[I l琷&AO/- ua縰Cq ޕM6)-ps>xT9Kw-)}ㄔ@ӀAn']SEy= \BUo=:dK#H5PJ5 +wrq?ۘq=xr );(A#o +endstream endobj 31 0 obj <> endobj 32 0 obj <> endobj 33 0 obj <> endobj 34 0 obj <>stream +H` +endstream endobj 35 0 obj <>stream +H|U tW9CDׁܒyn؊k1vMu. [e~c{jAq`sYrdF><'A]{W(#~e=& &lJxj@Y2|xa4<'Y(n@{R@e*FZc k "U /Ou{؃ u&{B(C=4a`4VS:-`()RZc[7oa*|&>CFpYzaAnkȱޅU~x<3X#4zZ۠%cYǬ'RLG,Aҭ<jgA rv28SCb,&?*leMC1*')2 uhΌ7 5q>e{NS[q{G>CtLK~۴Zo?Id`=p]QaU`XqjGŰϋ01_̗gхhg09"p7EQ_$ZNwgY+)Rtda>)X~7ʤ)4~GQ0m@BX5i1cku +؋Z|/q +THɤ-|D)JjI#ryLPXV_iKlnf +.OϪ `1`.؄8ҿ׸ߏri"kIS=d+z^o-jm쁧czr9'a}-${sF܀s):1>Ah8H㩀*ޢi ml l=N Y- +"Q%<⼸ yt8(\9NNc|=\ngYy]~/9j!wU絩<7hG:^{=хG])VMe~c;gg/He"k.ڨ +jvВ-8dqU@x[S/=WAЅMk] c_9M;)2qHTԊd1Vk8gc%LFKQ/9Vf"$["Jw@c"~rP\ :B9; m#Ҭ$w#7wz*øgPK:w^5wOpFr')RԷV/++ [ +1+gaxt_%I\ՙE>^㮷2Bk5GO}"|_?m|&RGJzhʵevV;ۋ3 +g/[zslooƞ##Q5ۙxd)콵\χ6p#@BآDNy݁JѴ߹B}oI 3-a}@zQd;L#c8KRfJ)g1IQ6)cF ?_>{x{Rb]b޹SL(gEF n:U`ˀ~>v)) aƸL2$K;|~e|cp5?LaÙ3?h$#9!HwiOyӑceFzh0e.'Ms&ģ׏~3C527"$o2t3"0etw9*;oRZc G2ijL=ʹ51`Q_Wt &٦txucN3d,<(-YZdx*ìm7'e[1UeM,6i14xzj_#{b>TGa_W}pTW?{I)lA6*_!6 !%! iCL : 2LTI7D(‡ u?jlg4" +B[Ӑb[N~9s?9w?/^/g^":Ȍdk(R|*=OoZ4.L]XhΥDf%( $@Lif%Ê<PZG,oC`@񚵶F!.2OT)Gss3g2ER)||Tms[~V kEHf&/p[,H D[*CVO , GzKZ [ZK{}L>Md'&{NYtQT!|e._s[^=f'lv):4dvIP\h̕Ĩ?]z]T)BEK'fx\b߹f{-__8>λryu]$2V() "1!"} Dd˒zgAcf[FlWA<-2- H=Aue]ϋP,ɥ={%(2kY?W\{eh$jVU(D1&yD1yDA4ce :U2hiYӍ\B:PIѵ1Y9#e<Ѵ\w+-703535 $x:WA> 'Afh?\|ǓoW7߅fg+N`<:VA^ykmFŔ_>RX;Ku_i lRA|&}.ǁG@6Xec%}])G+/b.kZA}5sAT=(wBrZzq\ׁAMoh&1,i12qiW[kv! ~h@@Iڬ=IKYMY4 aUzKuyrz|؈;yܠBQ 3As{Rkh{ 30tn+=_XZKoDN 9@̆|r +=7Xm0 +ڧ3MsC;+}b{3_A@:0C?Ikl@[Os36s9HM8NKũ}[}qY}TD7Gr^kyOr+<{gEG:H7c{aP/R*fȽ:ģKɥ'd6byR$M6k$׺2Rܲdm@uohCx0BH{`'WlbF yuf5HEZ)QD}8wwc0ceQ@sp?<>rq.Kt,3|BjwpƟR1|hkK~&I4y,n]-ا݉G>s>}tߏ:{_@8c#J̼=zM9`~<6 9;5nesjٴ>:E[/Zqތ0^}C>1e2[zu%UnCzlO-U;MAUÔM1ecVGEC!^+Cck=QqSg,Ok +6ю~a'ߥ|8K\=yW}apZ +4Me{h'F0^jdeҘ#Z'WhM8x7`6X6|hc }x=4bih>U}Ԋdg>GT_ ?m埈E?yzȲXNEC:`#eIXꮫ;V`;d> `8Ρob8).`? +O?:67x{PblZp獷 װ6f?CF) }ʑ/C_~IumT7m)ӇY<1:9cm +LFloHgI9\gXu|C?{oWrΓu/ 2?J.M 1!]u7 FSG?!EqhDXf$*Hh?G,sJawOsv_+J|s=yqg'&cMCL>D/[j: e_r]*Y|Ii-W E*~RJPUh%o]/u h_~߇í=x\-if6]cIGmRg㫅l?Oa { OMh՟s!DyFiq8մI<ʨ%yYvƒate/ckx$aΑI#a%ϻKh~5[N[~hzh 7~6!cU==Ojq&|jNM!>߳ kߕwv֭%[G~xiFSVGeV{,YSY+{{E5F^l k$qO*i2SJ~=G)VdjA*%>][srorYi>]c]>)rŎ)|tG/*\|t?u./g-<)-Ôx ~iK:rW:hiݑWެt-u/irΛ'u5^sz\9\c-Z+GA-pW5@s\ maɿQ8jgzR$A-SÝeszߪ>5÷8(Cao +ȼz 1P!-Xrϛ=ʥKf"+y-gIm9z̶e"q~z6f1ldrN黂w\-ߗe5eF'}K0\Mﲶμ-}}!L!_El9BH Gp6} .3j9)8"2(Y'KS#?bGڬ;#~w_\|+=w8l7=x E!(b%gK#:cIIvf4ʾcN<s}3ak~n}q?wd(]Lr޻3߳KW9Ko{ 4*)[yG$ymu+EcYx +{+`(`>htE%7^=栢Z1/˰3ޑT{Y-׿x'Xs?Mm!<#?̥.f!^=&ALsFLW6vǒDs:_!}G&y9:vvB˻I}_>(a»].e +4O#Jp,FSC= sd8џu/e\eu%Ae)%!"HC ,v (@֘`!tmI)J(F0_累Lozpv5UE5٧I߃4gjC"uħۉqQlQ~_CA֓%QH.i'(_&n[rI+h5X7"]k{LY)˅ 6xZ7ƴ˶ bw PWI"?.u6c&TI^m`)- mIMO޵ SֆEK>+_ZKߒg͐1csއ|9㬒lZP5<߶X7߭n6kV&o kw*÷dκ1~N +no2Jqi :JZIZH&$#$ʼn)SX)7Hy fD[7/V?ilV8>uun{Ժȝ}tpan|bߊH^<4mX}+uR?~Nу?dⲑ2Z|*c9ZT4R A~ƺDE܆uϋ)At3O#>LmzW(  4qXl†&6{F&;HG|.KAڍN}՘t!m^%>Jޛ,4gjk7hwˎ:_<}'utVIk|J[޺[]+;=yu_7qNc1_4ktV;+>n +xNqfRvvY8 +\gqܴxh"7m:R~Bi׋gD') +U+!)0X=X|+!Y)N` pc@"ƏH/b{ w]Pl/|i3}VKCݷ/OHer_V w:cYʝRn-j> fh{3r>*uO@;BJ6R죒\盾j[13Xk$d&Xuun E)%הK٧d10 R!@| dB o__RC; LWXRnִߘKͺ :>+ Z6Xo9f/>ۤemg -;0XeO#N A{hqLɹ-[ cK-lN^TƷ =-wiފf b~(ckQh\7jd'Ghĥpl{ܢCL\X6~:o/)~Ú[si}v?dSfXg1>X:%yI)o 7e]һ5IMv2\N/J٬F5ԡJmܖN +|Ƈ跣lq7;gS^hJu+cy\A7EZ!JDbvwId͈) lF`"3]@M%}OZre"9bm?p[qggk cc 88Q*>N3gc+4jޓ_ An6N6b_)0..n~ lQ}Oz<4 wA-ZmC?%mCK},J?,o~jǹKe)SWvɶJelw7m]r󿳲ɟ2F|Tz- <Mxk;23=%t_k2w;dLxw g&<@z.T'ѾNX`k;ʜuoM'KWX:So76|GW=Mcj_sO2Ax7ӘtOrdߐXnI["?b=JkQQ& ƀĆ51K-FAυg㛇/3b,megp}@r{ͬyRR1F:-ڗљ(uඊqɿ6/&3? $1BB^7QHH06t +,(scI,G36 Ц%a, nЎ/Q>[IjJZmڨZUUEeդMTi X'j&m:[y{XS5ހ5B΀=8UvO'y/eM]SwQ)}zy*ؔ=ׄx_~oJC OABLvt9(o]39Kk^E!amCaRo:qi)ls_\e zµ{ҶqEĹ밙>| nOֺԽk)U8_p\Qg|Ϋ_)BKx>@}9j3ϳ^Bȿ.jSGcf\ϹC\w`6vؚ'b~qf15TU#9RWقFIqFAD, +xv=؏IK'Q%&qI\w"i&w>;2bWma1䧜qձ6bnCa=(!cەlmA*[gTscNC=IکhL6J(k1e-A$كRFLHUeF& [yO`ܩq6$|(==qč۩ZR𐂭a#}Dhm{/J9Ę1'G\ZrZdmXeXg*(9F|e7jr O$S*KTSY41e*숦lϠF/ucoCgE =3BPYg45IQƼUͮ_=O {G b}2nKbe2JX2PqfD1хlٗ6EwLdum\Vē$SҲ;E]-'O D$ي{ :ȑB?+b}X$IbPI?;N"iXb31i +sόFͨkEDұ#jfñxfawMt,o]Gׇ>^ehaMP8U$o/Ur+ Ђ[nq2Bjs\;n=GnNg})< ~*n0v3t WNLS)i,&թ[w'i uu>WsIQu(iM+6{|~#XnmQuoCEF@']P?ŏZ x8GA?ApDs9NkXdUϏW^EQb(?by o4; ޿|7_QQi0݋n{|ڡR6_Z~A"Q yX@rH|Ut5b}QQ{k/в 6m˔4A۪BBǕ2wHwt._R.#*Ym~F>ggj >_M%_[/;,l |VqiбF.>iVq6h,7AcL{)]FYʟYQ&kC^Jp{ζw4&xmvNF~i9[(G!!;!_x8!AFNl-hVfJe?.h\r.Kl>"d-$ҫ?]k s-Ξg?Őy\pHqrKqڢbiCqf':>okTLgY #͎?0%%UVt2f8ccڮ@S5p7I.qBln /# FzĶq\a|fEsWeQQY]6%Ha !4<`;ځ$#4$ـ +j"q J% +,@ȱ0<)@摒]TK} goh i@R9*<%1猪nyH + +К!1 T*I D D D%^@x <"<G#@DDDD!p@8D8     "   D$@$@$0A L&& $a0""Dק +) MDDDD66Ti"EH H H{Z fZV!v^UH!AH"$ BDH$FD D D Dm*)npV[S+ 2#?f$c< +Er =[<[}~$=t=ksd`@C؆ֵ|^p#0xl#jfzL(n-S*WP>AZJEYgfoͧ1(F^5;ɒ&]D؃ܙn>#/&ؔcß@uh %(Y]g;6& S`##Az~֫DOҏ&`M?:.ҽ|Eկ"9//,m?zvŏKLz 8o|&.b +fш!)pK)sI_U σ,N;)i`BOqx,,ǿfj7>+Ox<]?mŭMgqZ܍7 4oS +_4*,uqM\o[hUɊl kVpDT5;&jL'1&tbw~E>Rׇ!#d4N}anaCʂUT%ӸSГ ;uS~noc Cy/I& W:wh]v|_5Cݺ38;" 50x-gq+?/U/󪻥 hvRY +%(e{La'Ƣfdp?ɨ0\N\8&!E(.Oq{f.[7Mۣ==6[D(*lrWEq7lĦh !3BnabD'fA)籘yc[ pn>xwT^7rUu']]e5Y={WuOuwŬ?LײF꼛\sUR#Ӫ;rUʕrR+nLohbf$T6ֹ/g<~Hu:n򹉱/nW̓ ;XZrb^N,媭"kG_:{+$|d#QGZ-jB^t3uE>h顶zooNNlnc>uZ-X5Pi8>Wx@xӖ3ԘKqfx]U81 $PVǝ8*kZ|-YjZ2u4u(Ϭ]>XT+E,68%m2~erpA: څJPgηQz(ɪ# +endstream endobj xref +0 1 +0000000000 65535 f +3 1 +0000022091 00000 n +23 13 +0000022324 00000 n +0000022572 00000 n +0000022686 00000 n +0000022797 00000 n +0000022931 00000 n +0000026101 00000 n +0000038669 00000 n +0000038694 00000 n +0000038991 00000 n +0000057273 00000 n +0000057342 00000 n +0000057608 00000 n +0000057688 00000 n +trailer +<]/Prev 21412>> +startxref +71836 +%%EOF +3 0 obj <>/Font<>/ProcSet[/PDF/Text]/XObject<>>>/Type/Page>> endobj 23 0 obj <>>>/Subtype/Form/Type/XObject>>stream +0 TL +q +q +1 0 0 1 5 -5 cm +0 g +0 G +10 M +/GS0 gs +0 0 m +-5 5 l +0 10 l +h +B +Q +Q + +endstream endobj 24 0 obj <> endobj 27 0 obj <>stream + + + + + 2022-02-28T18:20:20+01:00 + 2022-05-04T09:25:22+02:00 + 2022-05-04T09:25:22+02:00 + jsPDF 2.3.1 + application/pdf + uuid:3224bb93-0008-4556-8c5d-c39429a4ac86 + uuid:3840d3de-7740-4876-a957-61f878c67dfc + + + + + + + + + + + + + + + + + + + + + + + + + +endstream endobj 36 0 obj <>stream +q +1 0 0 -1 0 1860 cm +0 0 2483 1860 re +W n +1 g +0 0 m +2483 0 l +2483 1860 l +0 1860 l +h +f +1 0 0 1 352.5 418.59375 cm +1.5 w +0 0 m +0 25.875 l +S +q +0 -1 1 0 0 30.16789 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 0 -102.5312 cm +0 0 m +0 38.406 l +S +q +0 -1 1 0 0 42.69909 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 0 -159.0313 cm +0 0 m +0 33.969 l +S +q +0 -1 1 0 0 38.26164 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 0 351.5625 cm +0 0 m +0 25.875 l +S +q +0 -1 1 0 0 30.16789 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 0 90 cm +0 0 m +0 25.406 l +S +q +0 -1 1 0 0 29.69914 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 148.75 90.9375 cm +0 0 m +56.5 0 l +S +q +-1 0 0 -1 60.79289 0 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 -148.75 59.5313 cm +0 0 m +0 25.406 l +S +q +0 -1 1 0 0 29.69909 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 0 269.5312 cm +0 0 m +0 50.938 l +0 57.604 -3.333 60.938 -10 60.938 c +-200 60.938 l +-206.667 60.938 -210 57.604 -210 50.938 c +-210 -319.062 l +-210 -325.729 -206.667 -329.062 -200 -329.062 c +-154.75 -329.062 l +S +q +-1 0 0 -1 -150.45711 -329.0625 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 148.75 -209.0625 cm +0 0 m +56.5 0 l +S +q +-1 0 0 -1 60.79289 0 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 -148.75 29.0625 cm +0 0 m +0 25.875 l +S +q +0 -1 1 0 0 30.16789 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 0 90 cm +0 0 m +0 25.875 l +S +q +0 -1 1 0 0 30.16789 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 148.75 60.9375 cm +0 0 m +56.5 0 l +S +q +-1 0 0 -1 60.79289 0 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 811.25 -720.9375 cm +0 0 m +0 115.875 l +S +q +0 -1 1 0 0 120.16789 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 600 0 cm +0 0 m +0 25.875 l +S +q +0 -1 1 0 0 30.16789 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 390 0 cm +0 0 m +0 25.875 l +S +q +0 -1 1 0 0 30.16789 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 -990 180 cm +0 0 m +0 25.875 l +S +q +0 -1 1 0 0 30.16789 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 -148.75 690.9375 cm +0 0 m +-51.25 0 l +-57.917 0 -61.25 -3.333 -61.25 -10 c +-61.25 -710 l +-61.25 -716.667 -57.917 -720 -51.25 -720 c +-6 -720 l +S +q +-1 0 0 -1 -1.70711 -720 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 0 660 cm +0 0 m +-81.25 0 l +-87.917 0 -91.25 -3.333 -91.25 -10 c +-91.25 -1370 l +-91.25 -1376.667 -87.917 -1380 -81.25 -1380 c +-6 -1380 l +S +q +-1 0 0 -1 -1.70711 -1380 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 1 -1.25 -510 cm +0 0 m +-80 0 l +-86.667 0 -90 -3.333 -90 -10 c +-90 -860 l +-90 -866.667 -86.667 -870 -80 -870 c +-4.75 -870 l +S +q +-1 0 0 -1 -0.45711 -870 cm +0 g +1 w +/GS0 gs +/Fm0 Do +Q +1 0 0 -1 -1162.5 570.46875 cm +BT +0 g +/T1_0 12 Tf +1110.833 578.469 Td +(no)Tj +ET +q +1 0 0 -1 1312.5 1260.46875 cm +0 0 m +0 24.469 l +S +Q +q +0 1 1 0 1312.5 1231.70711 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 1312.5 1110.93745 cm +0 0 m +0 24.719 l +S +Q +q +0 1 1 0 1312.5 1081.92586 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 1312.5 1020.46875 cm +0 0 m +0 24.937 l +S +Q +q +0 1 1 0 1312.5 991.23836 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +BT +1279.417 1003.305 Td +(yes)Tj +ET +q +1 0 0 -1 1462.5 1050.34375 cm +0 0 m +50 0 l +56.667 0 60 3.333 60 10 c +60 409.875 l +60 416.542 56.667 419.875 50 419.875 c +-140 419.875 l +-146.667 419.875 -150 423.208 -150 429.875 c +-150 444 l +S +Q +q +0 1 1 0 1312.5 602.05086 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +BT +1485.833 1034.344 Td +(no)Tj +ET +q +1 0 0 -1 1312.5 870.46875 cm +0 0 m +0 24.937 l +S +Q +q +0 1 1 0 1312.5 841.23836 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 1312.5 781.40625 cm +0 0 m +0 25.875 l +S +Q +q +0 1 1 0 1312.5 751.23836 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 1312.5 540.59375 cm +0 0 m +0 25.062 l +S +Q +q +0 1 1 0 1312.5 511.23836 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +BT +1288.167 518.248 Td +(yes)Tj +ET +q +1 0 0 -1 1312.5 390.46875 cm +0 0 m +0 24.937 l +S +Q +q +0 1 1 0 1312.5 361.23836 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 1312.5 301.40625 cm +0 0 m +0 25.875 l +S +Q +q +0 1 1 0 1312.5 271.23836 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 1312.5 210.46875 cm +0 0 m +0 24.469 l +S +Q +q +0 1 1 0 1312.5 181.70711 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 1312.5 120.93755 cm +0 0 m +0 24.937 l +S +Q +q +0 1 1 0 1312.5 91.70711 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 1912.5 1501.40625 cm +0 0 m +0 25.875 l +S +Q +q +0 1 1 0 1912.5 1471.23836 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 2302.5 1501.40625 cm +0 0 m +0 25.875 l +S +Q +q +0 1 1 0 2302.5 1471.23836 cm +1 w +/GS0 gs +/T1_0 12 Tf +/Fm0 Do +Q +q +1 0 0 -1 203.75 1499.53125 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +339.5 1466.394 Td +(Main)Tj +ET +q +1 0 0 -1 203.75 1663.00005 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +BT +282.167 1644.394 Td +(create shared memory for:)Tj +52.333 -18 Td +[(-)-278 (bools)]TJ +-43.333 -18 Td +[(-)-278 (framenumber \(uint64\))]TJ +30.333 -18 Td +[(-)-278 (red_frame)]TJ +-5 -18 Td +[(-)-278 (green frame)]TJ +4 -18 Td +[(-)-278 (blue frame)]TJ +ET +q +1 0 0 -1 203.75 1409.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +300.167 1385.394 Td +(create 3 processes:)Tj +-18.333 -18 Td +(one for each color channel)Tj +ET +q +1 0 0 -1 203.75 1319.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +263.5 1286.394 Td +(start activity of created processes)Tj +ET +q +1 0 0 -1 203.75 1229.99995 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +BT +259.408 1184.394 Td +(get frames @25 fps from Picamera)Tj +38.758 -18 Td +(as raw NumPy array)Tj +-8 -18 Td +(without JPEG encoding)Tj +ET +q +1 0 0 -1 563.75 1229.99995 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +BT +614.5 1184.394 Td +(always the newest frame is recieved:)Tj +6.333 -18 Td +(processing must be faster than fps)Tj +-1.667 -18 Td +(if every frame should be processed)Tj +ET +q +1 0 0 -1 203.75 1079.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +292.417 1046.394 Td +(increase framenumber)Tj +ET +q +1 0 0 -1 563.75 1079.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +625.892 1055.394 Td +(shm_bools[0] = True \(newframe\))Tj +-12.975 -18 Td +(shm_framenumber[0] = framenumber)Tj +ET +q +1 0 0 -1 203.75 989.53125 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +256.833 965.394 Td +(split frame into: b, g, r - components)Tj +13.333 -18 Td +(and allocate to shared memory)Tj +ET +q +1 0 0 -1 203.75 899.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +242.125 875.394 Td +(set trigger to "True" for each colorchannel)Tj +42.708 -18 Td +(for start of the processing)Tj +ET +q +1 0 0 -1 563.75 899.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +647.892 866.394 Td +(shm_bools[7,8,9] = True)Tj +ET +q +1 0 0 -1 1163.75 1649.53125 cm +0.51 0.95 1 rg +1 w +0 0 m +1288.75 0 l +1288.75 58.125 l +0 58.125 l +h +B +Q +BT +1766.458 1616.394 Td +(Multiprocessing)Tj +ET +q +1 0 0 -1 1163.75 1469.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +1263.083 1445.394 Td +(read framenumber)Tj +-7.583 -18 Td +(shm_framenumber[0])Tj +ET +q +1 0 0 -1 1163.75 1379.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +BT +1250.5 1351.925 Td +(conditions for first start:)Tj +46.833 -18 Td +[(-)-278 (i = 0)]TJ +-73.442 -18 Td +[(-)-278 (shm_bools[0]= True \(newframe\))]TJ +-27.225 -18 Td +[(-)-278 (shm_bools[1,2,3] = False \(p_rgb_finished\))]TJ +-3 -18 Td +[(-)-278 (shm_bools[7] = TRUE \(p_red_start_trigger\))]TJ +ET +q +1 0 0 -1 1163.75 1229.99995 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 119.063 l +0 119.063 l +h +B +Q +BT +1231.167 1184.394 Td +(conditions for start processing:)Tj +-23.833 -18 Td +[(-)-278 (framenumber > last_processed_frame)]TJ +-13.667 -18 Td +[(-)-278 (shm_bools[7] = TRUE \(p_red_start_trigger\))]TJ +ET +q +1 0 0 -1 1312.5 1080.21875 cm +0.82 0.87 0.93 rg +1 w +0 0 m +150 29.875 l +0 59.75 l +-150 29.875 l +h +B +Q +BT +1233.225 1046.269 Td +(conditions for first start = True)Tj +ET +q +1 0 0 -1 1132.5 989.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +360 0 l +360 119.063 l +0 119.063 l +h +B +Q +BT +1302.833 952.925 Td +(set:)Tj +-91.275 -18 Td +[(-)-278 (shm_bools[4] = True \(p_red_started\))]TJ +-53.2 -18 Td +[(-)-278 (shm_bools[7] = False \(p_red_start_trigger\) | reset trigger)]TJ +48.308 -18 Td +[(-)-278 (shm_bools[1] = False \(p_red_finished\))]TJ +ET +q +1 0 0 -1 1163.75 839.53125 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +1275.167 806.394 Td +(do processing)Tj +ET +q +1 0 0 -1 1163.75 749.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 89.063 l +0 89.063 l +h +B +Q +BT +1297.5 727.925 Td +(i += 1)Tj +3.667 -18 Td +(set: )Tj +-91.942 -18 Td +[(-)-278 (shm_bools[1] = True \(p_red_finished\))]TJ +-0.225 -18 Td +[(-)-278 (shm_bools[4] = False \(p_red_started\))]TJ +ET +q +1 0 0 -1 1312.5 600.34375 cm +0.82 0.87 0.93 rg +1 w +0 0 m +150 29.875 l +0 59.75 l +-150 29.875 l +h +B +Q +BT +1215.667 566.394 Td +(conditions for start processing = true)Tj +ET +q +1 0 0 -1 1132.5 509.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +360 0 l +360 119.063 l +0 119.063 l +h +B +Q +BT +1302.833 472.925 Td +(set:)Tj +-91.275 -18 Td +[(-)-278 (shm_bools[4] = True \(p_red_started\))]TJ +-53.2 -18 Td +[(-)-278 (shm_bools[7] = False \(p_red_start_trigger\) | reset trigger)]TJ +48.308 -18 Td +[(-)-278 (shm_bools[1] = False \(p_red_finished\))]TJ +ET +q +1 0 0 -1 1163.75 359.53125 cm +1 0.96 0.32 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +1275.167 326.394 Td +(do processing)Tj +ET +q +1 0 0 -1 1163.75 269.53125 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 59.063 l +0 59.063 l +h +B +Q +BT +1212.892 235.925 Td +(shm_bools[1] = True \(p_red_finished\))Tj +ET +q +1 0 0 -1 1163.75 180.00005 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 59.063 l +0 59.063 l +h +B +Q +BT +1210.917 146.394 Td +(last_processed_frame = framenumber)Tj +ET +q +1 0 0 -1 1163.75 90.00005 cm +1 0.64 0.64 rg +1 w +0 0 m +297.5 0 l +297.5 59.063 l +0 59.063 l +h +B +Q +BT +1212.667 56.394 Td +(shm_bools[4] = False \(p_red_started\))Tj +ET +q +1 0 0 -1 1163.75 1559.53125 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +1271.5 1526.394 Td +(processing_red)Tj +ET +q +1 0 0 -1 1763.75 1559.53125 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +1864.833 1526.394 Td +(processing_green)Tj +ET +q +1 0 0 -1 2153.75 1559.53125 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 58.125 l +0 58.125 l +h +B +Q +BT +2258.833 1526.394 Td +(processing_blue)Tj +ET +q +1 0 0 -1 1763.75 1469.53125 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 1439.063 l +0 1439.063 l +h +B +Q +BT +1847.167 745.925 Td +(same as processing_red)Tj +ET +q +1 0 0 -1 2153.75 1469.53125 cm +0.82 0.87 0.93 rg +1 w +0 0 m +297.5 0 l +297.5 1439.063 l +0 1439.063 l +h +B +Q +BT +2237.167 745.925 Td +(same as processing_red)Tj +ET +q +1 0 0 -1 352.5 1702.96875 cm +0.82 0.87 0.93 rg +1 w +0 0 m +-73.167 0 -132.5 -16.792 -132.5 -37.5 c +-132.5 -58.208 -73.167 -75 0 -75 c +73.166 -75 132.5 -58.208 132.5 -37.5 c +132.5 -16.792 73.166 0 0 0 c +h +B +Q +BT +339.833 1736.394 Td +(Start)Tj +ET +q +1 0 0 -1 30 1245.99975 cm +0.27 1 0 rg +1 w +/GS1 gs +0 0 m +910.25 0 l +910.25 495.531 l +0 495.531 l +h +B +Q +BT +/T1_0 17 Tf +0 1 -1 0 45.8667 899.5344 Tm +(Get frames from picamera)Tj +ET +q +1 0 0 -1 727.5 1830.00005 cm +0.82 0.87 0.93 rg +1 w +/GS2 gs +0 0 m +390 0 l +390 119.063 l +0 119.063 l +h +B +Q +Q +BT +/T1_1 27 Tf +810.74 1766.456 Td +(mp_vorbereitung)Tj +/C2_0 27 Tf +<0003>Tj +ET + +endstream endobj xref +0 1 +0000000000 65535 f +3 1 +0000072315 00000 n +23 2 +0000072548 00000 n +0000072796 00000 n +27 1 +0000072910 00000 n +36 1 +0000076079 00000 n +trailer +<]/Prev 71836>> +startxref +87242 +%%EOF diff --git a/02_Kameraaufnahme/mp_Vorbereitung/PiCameraVideoPort_mp.py b/02_Kameraaufnahme/mp_Vorbereitung/PiCameraVideoPort_mp.py new file mode 100644 index 0000000..359e40c --- /dev/null +++ b/02_Kameraaufnahme/mp_Vorbereitung/PiCameraVideoPort_mp.py @@ -0,0 +1,594 @@ +# Creation Date: 14.01.2022 +# Author: Kenan Gömek +# This script takes pictures with Picameras VideoPort like it will be used to work with OpenCV and saves it with OpenCV to have the real use case pictures. +# This script is designed for capturing a Video with the frame number in it. press "q" to quit. +# You can take images with "i". + +# Update: 28.02.2022 +# This program is a development step for the final program +# This program works with multiprocessing and with Picamera +# create shared memorys once before exectution instead of twice: in main and take_image_picamera_opencv + +import cv2 as cv +import numpy as np + +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os +import sys + +from multiprocessing import Process, shared_memory + + +# Define camera settings + +# divide origin resoluton by a number, to have the origin aspect ratio +# RESOLUTION = (3280, 2464) # Max Photo-Resolution CAM03 and CAM04 # no image with PiCamera Videoport at this Resolution.. Probably GPU Memory and CPU issues. +# RESOLUTION = (1640,1232) # 2nd best Resolution for CAM03 and CAM04 with FUll FOV (2x2 binning) # Mode 4 +SENSOR_MODE = 4 # corresponding sensor mode to resolution +OUTPUT_RESOLUTION = (416, 320) # (width, heigth) +image_width = OUTPUT_RESOLUTION[0] +image_heigth = OUTPUT_RESOLUTION[1] + # (410,308) is being upscaled to (416,320) from ISP (warning in bash), but image will have still (410,308) pixels. +# OUTPUT_RESOLUTION = (820, 616) # (1640x1232)/2=(820,616) + # bash: frame size rounded up from 820x616 to 832x624 +number_of_colorchannels = 3 # r, g, b +size_of_frame=int(image_heigth*image_heigth) +frame_dimension = int(1) + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (Fraction(485, 256), Fraction(397, 256)) # White Balance Gains to have colours read correctly: (red, blue) +ISO = 100 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 30 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# miscellaneous parameters +max_value_of_uint64 = int((2**64) - 1) # @30 fps: konservative calculated driving time: 1.95*1e10 years --> Integer overflow not relevant. + +# settings for development +show_opencv_window = False + +# create shared Memorys for main-process +# shared memory for bools +shm_bools_pre=np.array([False, False, False, False, False, False, False, False, False, False], dtype=np.bool8) # create numpy array with bools stored in it + # [0]: newframe [1]: p_red_finished [2]: p_green_finished [3]: p_blue_finished + # [4]: p_red_started [5]: p_green_started [6]: p_blue_started + # [7]: p_red_start_trigger [8]: p_green_start_trigger [9]: p_blue_start_trigger +size_of_buffer = shm_bools_pre.nbytes +print(f"size of buffer: {size_of_buffer}") # size of buffer: 10 +print(f"shm_bools dtype: {shm_bools_pre.dtype}") # dtype: bool +shm_bools_create = shared_memory.SharedMemory(name="shm_bools", create=True, size=shm_bools_pre.nbytes) # create a new shared memory block +shm_bools = np.ndarray(shm_bools_pre.shape, dtype=shm_bools_pre.dtype, buffer=shm_bools_create.buf) # create a NumPy array backed by shared memory +shm_bools[:] = shm_bools_pre[:] # Copy the original data into shared memory + +# print(shm_bool) +# print(shm_bools.name) + +# shared memory for framenumber +shm_framenumber_pre=np.array([0], dtype=np.uint64) +size_of_buffer = shm_framenumber_pre.nbytes +print(f"size of framenumber-buffer: {size_of_buffer}") #8 +print(f"shm_framenumber dtype: {shm_framenumber_pre.dtype}") #uint64 +shm_framenumber_create = shared_memory.SharedMemory(name="shm_framenumber", create=True, size=shm_framenumber_pre.nbytes) # create a new shared memory block +shm_framenumber = np.ndarray(shm_framenumber_pre.shape, dtype=shm_framenumber_pre.dtype, buffer=shm_framenumber_create.buf) # create a NumPy array backed by shared memory +shm_framenumber[:] = shm_framenumber_pre[:] # Copy the original data into shared memory +# print(shm_framenumber) # [0] +# print(shm_framenumber_create.name) # shm_framenumber + +# shared memory for red, green, blue frame +int_black = 0 # integer for black color/ no color +shm_colorframes_pre = np.full(\ + (image_heigth,image_width), \ + int_black, dtype=np.uint8) +size_of_buffer = shm_colorframes_pre.nbytes +print(f"size of colorframe-buffer: {size_of_buffer}") #133 120 +print(f"shm_colorframes_pre dtype: {shm_colorframes_pre.dtype}") #uint8 +shm_redframe_create = shared_memory.SharedMemory(name="shm_redframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_greenframe_create = shared_memory.SharedMemory(name="shm_greenframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_blueframe_create = shared_memory.SharedMemory(name="shm_blueframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_redframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_redframe_create.buf) # create a NumPy array backed by shared memory +shm_greenframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_greenframe_create.buf) # create a NumPy array backed by shared memory +shm_blueframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_blueframe_create.buf) # create a NumPy array backed by shared memory +shm_redframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +shm_greenframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +shm_blueframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory + + +# ---------------------------------------------------------------------------- +# Define Funcions +def get_frames_from_picamera(shutter_speed): + # newframe= shm_bools[0] # do not use this! no updted values in "newframe" + # framenumber = shm_framenumber[0] + + # Initialise Camera + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = shutter_speed + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + t_start= time.perf_counter() # save time for fps calculation + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + # General information: + # - always the newest frame is recieved: processing must be faster than fps if every frame should be processed + + shm_bools[0] = True + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + + + shm_framenumber[0] = framenumber + #print('') + #print(f"new frame: {framenumber}") + + #image = frame.array # raw NumPy array without JPEG encoding + + b,g,r = cv.split(frame.array) # split colour channels of raw NumPy array without JPEG encoding + shm_redframe[:] = r + shm_greenframe[:] = g + shm_blueframe[:] = b + # for better performance one can assign directly in funtion line the values to the shm_memorys: shm_red, .. , ... = cv.split(..) + + shm_bools[7:10]=[True] # trigger the start of the processing for each colorchannel + #print(shm_bools[7], shm_bools[8], shm_bools[9]) + + #display_image_with_text(image, shutter_speed, framenumber, camera_exposure_speed, trigger_record_OpenCV, out) # show the frame + + output.truncate(0) # clear the stream for next frame + + if framenumber == 500: # 5 sek @ 30 fps, only for performance measuring + t_stop=time.perf_counter() + print(f"calculated fps: {framenumber/(t_stop-t_start)}") + break + + +def display_image_with_text(img, shutter_speed, framenumber, camera_exposure_speed, trigger_record_OpenCV, out): + img = img.copy() # make copy of image and do not modify the original image + + # please activate only one trigger once + trigger_show_brightness = 0 # trigger for (not) calculating andshowing the brightness of the image+ + if trigger_show_brightness == 1: + arithmetic_mean_of_brightness_per_pixel_relative = calc_arithmetic_mean_of_brightness_per_pixel(img) + + trigger_show_max_brightness_values_of_colour_channels = 0 # trigger for (not) calculating and showing max values of colour chanels + if trigger_show_max_brightness_values_of_colour_channels == 1: + r_max, g_max, b_max = get_max_rgb_values(img) + + font = cv.FONT_HERSHEY_SIMPLEX # font + fontScale = 1 # fontScale + color = (255, 255, 255) # Font colour in BGR + thickness = 1 # Line thickness in px + + # set text position + frame_width = int(img.shape[1]) + frame_height = int(img.shape[0]) + text_start_position_Y = int(round(frame_height*0.12)) # start position of text in pixels 12 % of frame height + text_linespacing = 50 # line spacing between two strings in pixels + # text_start_position_X = int(frame_width/4) # start text from 1/4 of image width + text_start_position_X = int(0) # start text from left edge of image + + # set position in (x,y)-coordinated from top left corner. Bottom-left corner of the text string in the image. + pos_1 = (text_start_position_X, text_start_position_Y) # start text from 1/4 of image width + pos_2 = (text_start_position_X, text_start_position_Y+text_linespacing) # start text from 1/4 of image width + pos_3 = (text_start_position_X, text_start_position_Y+2*text_linespacing) # start text from 1/4 of image width + if trigger_show_brightness==1 or trigger_show_max_brightness_values_of_colour_channels==1: + pos_4 = (text_start_position_X, text_start_position_Y+3*text_linespacing) # start text from 1/4 of image width + + + # define text to display + text_line_1 = f"set ss: {shutter_speed} us" + text_line_3 = f"Frame: {framenumber}" + text_line_2 = f"ret exs: {camera_exposure_speed} us" + if trigger_show_brightness==1: + if arithmetic_mean_of_brightness_per_pixel_relative >= 0.01: + text_line_4 = f"brightness: {round(arithmetic_mean_of_brightness_per_pixel_relative*100,2)} %" + elif arithmetic_mean_of_brightness_per_pixel_relative < 0.01: + text_line_4 = f"brightness: {round(arithmetic_mean_of_brightness_per_pixel_relative*10e3,2)} pm" + if trigger_show_max_brightness_values_of_colour_channels==1: + text_line_4 = f"max: r:{r_max} g:{g_max} b:{b_max}" + + + # put the text into the image + image_text_1 = cv.putText(img, text_line_1, pos_1, font, + fontScale, color, thickness, cv.LINE_AA) + image_text_2 = cv.putText(img, text_line_2, pos_2, font, + fontScale, color, thickness, cv.LINE_AA) + image_text_3 = cv.putText(img, text_line_3, pos_3, font, + fontScale, color, thickness, cv.LINE_AA) + if trigger_show_brightness==1 or trigger_show_max_brightness_values_of_colour_channels==1: + image_text_4 = cv.putText(img, text_line_4, pos_4, font, + fontScale, color, thickness, cv.LINE_AA) + + + cv.imshow("Current Frame", img) # display the image + if trigger_record_OpenCV == 1: + out.write(img) # write frame to Video + + +def calc_arithmetic_mean_of_brightness_per_pixel(image): + """Calculate overall brightness per pixel of the image. Mittelere Helligkeit pro pixel berechnen.""" + #Comment: So rechenintensiv, dass man kein Blitzen sieht im Bild. (Oder sehr selten bzw. schwach). Daher anzeige von max-werten + + b,g,r = cv.split(image) # OpenCV works with bgr. Image is also speciefied to be captured in bgr. + r=r.astype('uint16') # set dtype of one colour to uint16, because the sum of 255+255+255 >255 =765 + #the rest will also be uint16 then + image_heigth = r.shape[0] + image_width = r.shape[1] + + number_of_colour_channels = 3 + arithmetic_mean_of_brightness_image = np.sum((r+g+b)/number_of_colour_channels) + arithmetic_mean_of_brightness_per_pixel = arithmetic_mean_of_brightness_image/(image_width*image_heigth) + + max_possible_brightness = 255 # maximum possible brightness + arithmetic_mean_of_brightness_per_pixel_relative = arithmetic_mean_of_brightness_per_pixel/max_possible_brightness + + return arithmetic_mean_of_brightness_per_pixel_relative + +def get_max_rgb_values(image): + """get max values of colour channels""" + b,g,r = cv.split(image) # OpenCV works with bgr. Image is also speciefied to be captured in bgr. + + r_max=r.max() + g_max=g.max() + b_max=b.max() + + return r_max, g_max, b_max + +def create_folder_for_captures(): + # Create folder for saving the captured pictures + now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M") + path_cwd = os.getcwd() + + path_saveFolder = path_cwd+r"/Capture_"+d1 + try: + os.mkdir(path_saveFolder) + folder_exists = True + except OSError: + print("Error! Ending script.") + quit() + + return path_saveFolder, folder_exists + +def do_processing(): + time.sleep(0.001) + #print("ohh i am doing high complex image analysis with computer vision") + +def do_processing_frame_r(frame): + print(f"max frame color red: {frame.max()}") +def do_processing_frame_g(frame): + print(f"max frame color green: {frame.max()}") +def do_processing_frame_b(frame): + print(f"max frame color blue: {frame.max()}") + + + +def processing_red(): + shm_bool_init = shared_memory.SharedMemory(name="shm_bools") # Attach to existing shared memory block + shm_bools = np.ndarray((10,), dtype=np.bool8, buffer= shm_bool_init.buf) # do not: newframe = np.array(...)[0] --> then one can not assign new value in main script + newframe = shm_bools[0] + p_red_finished = shm_bools[1] # not used, but for clarity + p_red_started = shm_bools[4] + + shm_framenumber_init = shared_memory.SharedMemory\ + (name="shm_framenumber") # Attach to existing shared memory block + shm_framenumber = np.ndarray((1,), dtype=np.uint64, \ + buffer= shm_framenumber_init.buf) + # framenumer = shm_framenumber[0] + + shm_redframe_init = shared_memory.SharedMemory\ + (name="shm_redframe") # Attach to existing shared memory block + shm_redframe = np.ndarray((image_heigth,image_width), dtype=np.uint8, \ + buffer= shm_redframe_init.buf) + + + i=0 + while True: + try: + framenumber = shm_framenumber[0] + if i==0: + last_processed_frame = framenumber + + conditions_for_first_start = (i==0) and\ + (shm_bools[0] == True) and \ + (shm_bools[1] == False) and (shm_bools[2] == False) and (shm_bools[3] == False) \ + and (shm_bools[7] == True) + + conditions_for_starting_processing = (framenumber>last_processed_frame) and (shm_bools[7] == True) + # newframe and all color-channel-processings have to be finished + + if conditions_for_first_start == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + do_processing() + i += 1 + #print(f"first processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + + shm_bools[4] = False # process ended + elif conditions_for_starting_processing == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + #print(f"red: framenumber: {framenumber}, last_processed_frame: {last_processed_frame}") + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + do_processing() + #print(f"max frame color red: {shm_redframe.max()}") + if show_opencv_window: + cv.imshow("red", shm_redframe) + cv.waitKey(1) + #print(f"processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + #t2 = time.perf_counter_ns() + #print(f"processing time for red channel: {round((t2-t1)*1e-6,2)} ms") + + last_processed_frame = framenumber + shm_bools[4] = False # process ended + # elif shm_bools[0] == False: + # pass + #print(f"no new red frame") + + # image processing finished + + + + except KeyboardInterrupt: + try: + shm_bool_init.close() + shm_framenumber_init.close() + shm_redframe_init.close() + + except FileNotFoundError: + # Memory already destroyed + pass + +def processing_green(): + shm_bool_init = shared_memory.SharedMemory(name="shm_bools") + shm_bools = np.ndarray((10,), dtype=np.bool8, buffer= shm_bool_init.buf) # do not: newframe = np.array(...)[0] --> then one can not assign new value in main script + newframe= shm_bools[0] + p_green_finished= shm_bools[2] # not used, but for clarity + p_green_started = shm_bools[5] + + shm_framenumber_init = shared_memory.SharedMemory\ + (name="shm_framenumber") # Attach to existing shared memory block + shm_framenumber = np.ndarray((1,), dtype=np.uint64, \ + buffer= shm_framenumber_init.buf) + # framenumer = shm_framenumber[0] + + shm_greenframe_init = shared_memory.SharedMemory\ + (name="shm_greenframe") # Attach to existing shared memory block + shm_greenframe = np.ndarray((image_heigth,image_width), dtype=np.uint8, \ + buffer= shm_greenframe_init.buf) + + i=0 + while True: + try: + framenumber = shm_framenumber[0] + if i==0: + last_processed_frame = framenumber + + conditions_for_first_start = (i==0) and\ + (shm_bools[0] == True) and \ + (shm_bools[1] == False) and (shm_bools[2] == False) and (shm_bools[3] == False) \ + and (shm_bools[8] == True) + + conditions_for_starting_processing = (framenumber>last_processed_frame) and (shm_bools[8] == True) + + if conditions_for_first_start == True: + shm_bools[5] = True # process started + shm_bools[8] = False # reset trigger + + shm_bools[2] = False # set bool for p_green_finished to false + i += 1 + do_processing() + #print(f"first processing green finished. frame: {framenumber}") + shm_bools[2] = True # set bool for p_green_finished to true + + shm_bools[5] = False # process ended + elif conditions_for_starting_processing == True: + shm_bools[5] = True # process started + shm_bools[8] = False # reset trigger + #print(f"green: framenumber: {framenumber}, last_processed_frame: {last_processed_frame}") + + shm_bools[2] = False # set bool for p_green_finished to false + do_processing() + if show_opencv_window: + cv.imshow("green", shm_greenframe) + cv.waitKey(1) + #print(f"max frame color green: {shm_greenframe.max()}") + #print(f"processing green finished. frame: {framenumber}") + shm_bools[2] = True # set bool for p_green_finished to true + + last_processed_frame = framenumber + shm_bools[5] = False # process ended + # elif shm_bools[0] == False: + # pass + # # print(f"no new green frame") + + # image processing finished + + + + except KeyboardInterrupt: + try: + shm_bool_init.close() + shm_framenumber_init.close() + shm_greenframe_init.close() + except FileNotFoundError: + # Memory already destroyed + pass + +def processing_blue(): + shm_bools_init = shared_memory.SharedMemory(name="shm_bools") + shm_bools = np.ndarray((10,), dtype=np.bool8, buffer= shm_bools_init.buf) # do not: newframe = np.array(...)[0] --> then one can not assign new value in main script + newframe= shm_bools[0] + p_red_finished= shm_bools[3] # not used, but for clarity + p_blue_started = shm_bools[6] + + shm_framenumber_init = shared_memory.SharedMemory\ + (name="shm_framenumber") # Attach to existing shared memory block + shm_framenumber = np.ndarray((1,), dtype=np.uint64, \ + buffer= shm_framenumber_init.buf) + # framenumer = shm_framenumber[0] + + shm_blueframe_init = shared_memory.SharedMemory\ + (name="shm_blueframe") # Attach to existing shared memory block + shm_blueframe = np.ndarray((image_heigth,image_width), dtype=np.uint8, \ + buffer= shm_blueframe_init.buf) + + i=0 + while True: + try: + framenumber = shm_framenumber[0] + if i==0: + last_processed_frame = framenumber + + + conditions_for_first_start = (i==0) and\ + (shm_bools[0] == True) and \ + (shm_bools[1] == False) and (shm_bools[2] == False) and (shm_bools[3] == False) \ + and (shm_bools[9] == True) + + conditions_for_starting_processing = (framenumber>last_processed_frame) and (shm_bools[9] == True) + # newframe and all color-channel-processings have to be finished + if conditions_for_first_start == True: + shm_bools[6] = True # process started + shm_bools[9] = False # reset trigger + + shm_bools[3] = False # set bool for p_blue_finished to false + i += 1 + do_processing() + #print(f"first processing blue finished. frame: {framenumber}") + shm_bools[3] = True # set bool for p_blue_finished to true + + shm_bools[6] = False # process ended + elif conditions_for_starting_processing == True: + shm_bools[6] = True # process started + shm_bools[9] = False # reset trigger + + #print(f"blue: framenumber: {framenumber}, last_processed_frame: {last_processed_frame}") + + shm_bools[3] = False # set bool for p_blue_finished to false + do_processing() + if show_opencv_window: + cv.imshow("blue", shm_blueframe) + cv.waitKey(1) + #print(f"max frame color blue: {shm_blueframe.max()}") + #print(f"processing blue finished. frame: {framenumber}") + shm_bools[3] = True # set bool for p_blue_finished to true + + last_processed_frame = framenumber + shm_bools[6] = False # process ended + # elif shm_bools[0] == False: + # pass + # #print(f"no new blue frame") + + # image processing finished + + + + except KeyboardInterrupt: + try: + shm_bools_init.close() + shm_framenumber_init.close() + shm_blueframe_init.close() + except FileNotFoundError: + # Memory already destroyed + pass + +# ---------------------------------------------------------------------------- +# main +def main(): + start = time.perf_counter() + + try: + # create processes + p_red = Process(target=processing_red) + p_green = Process(target=processing_green) + p_blue = Process(target=processing_blue) + processes = [p_red, p_green, p_blue] + + print(f"waiting 1 second to create processes") + time.sleep(1) # sind prozesse schon vorhanden + + # start acitivity of processes + for process in processes: + process.start() + + # start capturing + get_frames_from_picamera(shutter_speed=33333) #Can see something at normal light conditions + # get_frames_from_picamera(shutter_speed=1000) + + + print('*******************************') + # this below code is only executed if the loop in take_image_picamera_opencv is breaked + # In real use case there will be no end of this program + + # wait for all processes to finisch + + # main is blocked as long all processes are not finished + # The processes will never finish by design (better for performance, not to check for several triggers) + # for process in processes: + # process.join() + + for process in processes: + process.terminate() + + # print time measuring + end = time.perf_counter() + print(f'Script finished in {round(end-start, 2)} s') + + # close each SharedMemory instance and unlink to release the shared memory + shm_bools_create.close() + shm_bools_create.unlink() + shm_framenumber_create.close() + shm_framenumber_create.unlink() + shm_redframe_create.close() + shm_redframe_create.unlink() + shm_greenframe_create.close() + shm_greenframe_create.unlink() + shm_blueframe_create.close() + shm_blueframe_create.unlink() + + except KeyboardInterrupt: + # Normally this prgoram never gets keyboard interrupted! But here this case is nevertheless handled + # End Script + try: + # close each SharedMemory instance and unlink to release the shared memory + shm_bools.close() + shm_bools.unlink() + shm_framenumber_create.close() + shm_framenumber_create.unlink() + shm_redframe_create.close() + shm_redframe_create.unlink() + shm_greenframe_create.close() + shm_greenframe_create.unlink() + shm_blueframe_create.close() + shm_blueframe_create.unlink() + + except FileNotFoundError: + # Memory already destroyed + pass + + +if __name__ == "__main__": + main() diff --git a/02_Kameraaufnahme/mp_Vorbereitung/shared_memory.xlsx b/02_Kameraaufnahme/mp_Vorbereitung/shared_memory.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..29f8cac68286b76dcc50f752d9d55950219cccb5 GIT binary patch literal 13472 zcmeHu^;;du)-^7{-95oQ5Znpw7Tn$4-Q9C=x8UyX?(PuW-3fdoGxwc)GxPlg_x2B6 zr=RYzi$wv38J2GCA%otEET< z0U~yNgmE1CC)k%CE9VnTz-YeWIiweN6?h2!NBL4ph!doKL4DvO* zFVfqGa(hoH1Ix%4aAO+6$`k79k#Eb60WoU|HQ9Z&)kh8ddU4+|Crw{NR-ke6y{!Sa z(^!O*a-zU=p;>+YvOl1ArSY>X{`S;N4{BZnqht9KtXE4oBds3}jLDDB8}N8NTo8<& z`*bEm^VtN@%fq)snB?(Lz?Sb8ok>WG$_8DUK=|@Pz(-vxR})~mY^!TH8XF};c=)UJVX;5Hq4M<=6iDVjFk7ogPjdYh zh!Ss*gn7fPmaT!g9Ubk@^Z&5=UkuQHd-al7N$GBS*uXQ9=b(YR+0|%7esL#0k!B(V zPjAr`gxZLl_c*KVWVnb5*nXhGUag+bL(8k25r+drx8Ipc!jMrpNa~zQ0}~!?93Ux4 zZ2>|yC7V4+j$HQc+CUy58!x$*(8pqlF$b1bd46v+y~5wZXU1|<&D`) zDsL=UZl7swut z-d!s1+i7M%D5VX$H{qq;|!AnKl`fyacrN%ygeox)IWRAlpI)@%VRK!H%&sj}eG6(4|lj_@fm6~Ti zG+mZmi^~f}69V>V(V6TRrIc6Dp(x1d7!h+>1RRV;mE?ZP=TIuvJ?t8P=)4CL1hn{L8`cqo(vp{F`^t3+0(Bhtz{CxWHJ16STpg`9gJU!!e?7UO z-VcM=@Dfp9BHiIyILO9@a}r1_>rbJ$^S6E`6CWAZWe(5OGUvusSMLT=ON>?IV81js zMLaPbB}2`s^%w*`F`gpi=SpNz&CGB>V5<0BW~#Z&4tYZGF%m+BXYu;zWhR|qM-nIp zo-F_vZ1U$u@J92i7ZlOtw8GC{8(5y&pzbn=dy!T7#=u1hHbEW5uM%qr3yBCfo&e^@ zj|^2l*>M{^*4EzPtha=X(>;#D&g@BzgH$SLo-dth!z@qJ@dNPes?2un@Uw}>C6IdD zn2ujv=RG!06m~qE9-Shc5Isbt?WIno?Ni-b)3q}8-I!D82x8^aa*mv_Po77$4Ev9Z z3;HzecH4F_c;2A-ckudf$0uC9-D!HuaM(alz;EFFBgy?cg#VTBfZrCUZ&Sd3_faAz zDc(bm*b4I!MDLX1h=#OiPe**Lc!U5oSVOT)O7zk5dL4(dNke%~oDS3~(B*8j-|>za z{yQkjO()g2P-HMy6!S9`Wl z@()-+Ir+qb*NszwOr~S>4veOD6L>>!6aH|`w)dmI{BfyoO?^FKGKJyJYooyOEkuT| z3&G?S7PKfQoD0z8{tOY-edp&}sLPgTLrIr0v(B7H*d5$rar`1?uD8jQFla!Sz>6Z* zZYMjbS@CSsp8sixqJ8<2dieUxaC@+ScKvOn{~j{&&gj=+U_d}P@IXMAZ%_O=-`g1* z7}(p<{c0F~PW-8H%aJMch=FHRH+a!!Yy`+M14Sv+hieLo^+9HOf!g4DrW`>hMQ3Y1 z(Z^zqqF+EbKixmQUciPm=NOlAk&hZApsZg6gX6`aoqb|^xM;1|>zN94M6)n}WOIR# zJOvQ&P{YevIDl$Xzqe;6i(v5BZAxY){N|{SfY(--hdi^PV4=QBl{woWT4TB45<*3O zHV_hBsD@cp#^zV6&jFmq#)n!nlj*g3u9;!tyXuRgVByp{n!ArAdkIL{LlUFztm>B} zL!(g}dD<6_R}l{Q;_c)Nz?v_Q2Qv~gRl$KAJRQ7pcq~k5iq{aCe^~dwAontNRgF_DL9QhqxqV_x6h{CF;@XU%M@%bAlkSas+Em#pKIgGUw)`GHie=le7= zu9tjCL}tGYr^HNSoU5pJIC2fez4v?cWfPKK6=#q%MCBMHpDq93u2(@p6p(&sq+BUn za1+LitQa#JCy6bBUqSTi6c5m_^rMIMF88(d_Q`q$$^D=X#*oYHAYZt2a%%@A6)aG* zlO>Pg1+9g9M>EZF?<&ttDZE~oB(&a? zZpn4xn4^o8Y4dfw?%S1-+8(H|YC8&_gXv0l+2| zS4xW29lIPbO@K1Pk_u5tm#1c4fm)SGrzsXN(LHiH@1YK<0BoXKb6hn6ql$V+O>Yk# zN{xv>jD8;xq|udRA^og4ty+~Y`V}n$3`KnpbtTM9(`%S@K0>9kM)yq9X_3mYA6a@Q z%1m&a*hnc9Sz^M@aFox2p+vbUpfg{a<5Pp(Wf;l96o}ncBC(k8J5^t{6~1UYU!B@@XzLzXe0ntxJcy%Wvr=V^D-Z47y4ZcyV46?WzQwgNtM>r zrLoJbN5A_u#o*YsU+?hyu1*<}GTYaye7LQw7cPy*)kukOf2Ll>7K4v_7_vFiLSzlFkP_#tu~oUuGH{_ z#P`zxFEdthCUSyug3{P7mi$SRV1<(gU^DF>?DBCh6dM{L9Ohbnu ztIs-Vc)v&&Nd|g9E}q|7B05!`Tq!0=?7gP1u8f-pBN55572WJD#lBRo3@FLa-B-Dm zNIU`n8b7sQg6$d9Bmm)K_~HfPdR&`uO!T?HFx>%9dxJ-1a2+ENCCdP|E$EMgK?9Jj z?$Go&X#}M>2tLf2C8{6TygTe=T4(c*wO_g6cyo|Sm}-0HRLbrBV9mUz%HdaAUCUML z&I4Z#lSfZvJg8eB@*e$|pS35z=~SKJ#4a|mxsRDRxYXJq^6r17`kE^nBqZy0AvaRY>)Iz=00W^$6N@5C(;7Pa z3PrbT2o3_1thijQzGlIbtqF)BDm?}f?^%kj>PI1wGOXIs z0xoR>cDXoSyb6-e;CViI$PpeVc!?b?RaCL%AWurRE%$tWJUeSYe|fwK`V!SHTEHEG zLHe>YuGRMZBWk`d2yL&-`QjulK{})5{8azAE0X?^>UK`Dy5J2w8NjW`2}73tG#{;G;zgKxp#kZS5qP@C z%By(%)a|D+NJhSrxlTdOc!H_4Sh1i~F}7KF$O7u^-_aq51PcYn!HXcMoQ}Pl?^8|WC>WuSjCG${^irSb? z3wQg(busF(0tsUpJ&7U5mED&9-;<^dV+BcXKiWW1gtp*FG;O$ztPvezefN%y*ld%m zKs#kY>7*h2GM&rGH$!K48oXmLa{Qwq6UASHdV06Ax6DE+b#XFDx^!l(H!)j}sc)hA zCaK#OK1GFlJfaO5xcFH6i>?NrV_DH27|iiOlVKXMQ%HrJBZ2fHgxf7TS+J2$Eart| zayj2j=}dt9m}uVo!Tto8CW*{FUmI(ht%f%|N#j#W&027@gJYr5=GZ)V{r9_1Zm_pI ztJA0A(VYbVU|lb>by%cY(g761;LHx|Nt$5g&GFAOjYYk?QjH<4CRg;lI{LF545Dkw z&#B~YU0wypHW>6qGUp9w?b3zSA##pVsH`d0ubox-LA!%+uw;YmMrmJ|_Y@;u+sjQq zo$Oqx?zRBf$_>=5vELrWo;!F71eOen5?8GAqoDjuj3lNC&X!fv^j?DznI5K(98PF-6@i zu%6fhhKq`mnq&x*q7u4hS?vq;L(68bPFKZRPlKc!5*$ptW4R)b`t<9N6oZg%b_fMG z9fbD3riGwM2BE>_d14hApnfR>npTqa`C^HnIiu!@nTw}y+{fp_tz;vXGB~_nw2^9( z%fc3yj0N=JmJ#$)gym}~PC|iFLjIDP;1Xk(2+J}Q2^@(t(pc5Bd1Hn_Ns5aI%~d=V zpqYpzXYC%bQt7>|FOIP;Be-PC7C@^w&^m*WzZ$^f6UpEnVHC~5-|9fb9srXNvwaZg zg<+UvL*ynRSlG>ckZX>J64EFvx^g*t`DKw8Wt3>C&K4YMGl#hhy5_PH3zDziFPsd8 zGuCh+0%_;q8VQqo=iOep>ho%+SnMd#%G2ci0!OBuCkkI^z)h9yMj})LD|x{hE=mKm z8j?!77a4qhxZ`Lsd78ihYj!o10a>W%E`@sB{t)hunoFOH_);v~rqeFJLU!%-rx}kc z*-}oUXhTm!)sk!}NtPtf27$E-N*+O)V!6Y)vl-&Orrp>hLfN!}9vrFFdR9c(>B~Xt z>G<9wrV>^ToLWhaZr_e`K?}ciB_sc7lrz^$zm>rfMaZ72vYC)_y5UhZ3rA~=fgH%Y zAUnzc_zx>8c3_{iTtSnjAk1Uj7>kiMQ&{$Lch>0wPBPV?n*-!Xjxl^qWhj}t9BU`w zyGOSNNqzV=&mGlA06?ns!<=P)V*`UAgA3E=b=03gg zMY7O9KyZJgUpspja|64dn}!;tRjX-wL@(j*UKN+=Y=g;DN}`Aw7|_tUqBzLqmfvg8 zU!Wp6Xf62442i$JFG-b-9(?z0g#RfmaF^Nee+q7ldk;@I+rXOpx+$4)`EC>EPK3Xc85l}NX?eyjh7F4 z#$s+j>j>y_LJSI0Qlu6NtJ}3hD^vPM1LK0|p-h*e@mQP+hB-0E2uwyDiP=O~E8Gll z1@U3`5FBNC<;`#f?*t&H(b|j)RPdi0G~1HO%ZA^lejZj~ZOk=cl};$8PgAG)Qf!O= zVRCkS^2M$1*6s*6LKARHb-XcJBjyq#eS zbO<2}{vb?^jMY-5yQFCV>I}Cm^-P++sQx;#Cl8jTViKg>x|UtocoTKfNb^ z0Tvvqefk~IxNeeB&eMAIM5F1J2M_3a;tf7EmU~9ub6my}v=f|Jko=R3D6pd5pqQGW zl$ucPNB3zmPCsFrB#wTAlh5w)nz;l+iu%iS1LCy=Xn-{CYU+m)lvbg|ViRBCctxF> z6v2~tmAz=4AVJ>5?MD0{1R<@?u`1v+Z0Ak?3Ed9o;OToKH76T&;~09@?gBtLAsfBO zxfyO{3#Mlkk2Vxk5Exmx%I3TbuASA7pjR7_1PuPUri6VYEG@WWGub$*WVA$Obj0oN z#O(Jm4X2Nlg|G3cgArf7{&5`n+V8a&-|h^(jU~w+MM;+Z>|Jp!~)Mv`{8rFbOvDEO92;Wz7+v+jB!gvi3>u3K5kl0Ie{7d<{*-v*sJg^EJ&1@hn-9( z8@xAPLY+f^62??=c%L!Q33t#Ud3WvWI_$oZMD-b0yhl5*9qUfhgOww8Am%9xmIE5(Q1sT6|R{Aq+03nEG4H2jQtw?g@{# z82)<-aWqIH8hA?|HC#YIWPkLG^;>A$8`#Pl*xUb%ak}5G8EAD)ERR*CgK_4uUgxA% za~Uel#H^=fBq;LyXGBy@oGc_Nb7N|j_QwUi%s+tT>m8Ibtxk^6e_T4v_o9Cvk1huF zAzsGyoG1w2);bh{+aItdK%NzZ>pGORm(GuncXH-7RkF6Es#3ST#DvW-_0)Pc{?vBn zaW>vENqS%(SyU4d_1OZsCV)dQO^Unmel-*GgLOs@5GsK>$OpgVp7|k^e(C23?$-Dp z(?gYLuA!b!_Lqb3r4vJ9M+*$kb&+`ePRX&cZVXyfB+Soluy>-UFMh=`B?P8IR7%O~ z*(n85=@0t>;u#j{8FQ{_9-SWU5Gb2z+w5)_R{D!+U}Zd^bsZhlilb;gPX}2k1;K|q zedsLC$(E^)41I%?y$n*H$c0tn+k`0|2D9UPpp;4A@?bVZrr5=B$c;J@_$A*N0Hl46 z3MQ28Tlf+yaih%|o0KSJrz_o&tP8j6B-Ld2?mea%4zmvICE5jUChAiyqiZ4g$e%IM zYMZj=BiswW;pCa-MNhwL#1gc^=>-9j+} z=lG34(e72Iv7s^l0Q$*`x~y zb+eyTDmGJ2@T?IJ;fIXB)VR%_obdXiwnux5XaLea}V5R7guSOE86v6ZBL-`&JYy z#v4P$QpbmO!+zv8w9g*Fay%Z!Bv5LI(moZ!%DJVTS3)3#9z-KxA-&ixvuN;hq^llh zqi`!eO-%3C9`r0N*{_vV!2O;(|yg%n9NYuKcL9OK2c+ut4fih z%z&QHz&3*sRr|eI$dN&bRL#ts%7~Let_fQ$eoDbi<51nAmjyi=g$wg`%41Wn zY=&V&9TK!}5v?xFB^^aucUM9LD18LM=?6p;zJ@}{NO1vL)MwRQv3}jwP!}8c0%bl^ zO>i^2KoY-Ag>vkRn_4Aew6MMdP{(Pd%Jih+9P{e6Z-5Jr$;d4GZ^8gdxB zuT(}vy652AAM${ zFLTQQTArEhmwjKkW?o@}_@uoZHK5Uk?eGSwxVzlZuSrj#ydZb9$dv^izPvlleEwv~ zZS}Yqd>hH%9&kCINj-Jvv{4O&^lC@WQ@#N^)Gluwb-;=nw!N;~*%9=qRS^s@dM*`Y2j<#f{7tk6QU@suft^jj~L2qD7?Yg^5ED4_!vtBqBM#V<11MO|Nf0y*v!@aat z-2-vZUsX)?bK*Ud%ms98$=r!F1>S-T;O`C(qc}mzsnpa^EmLeY`_)MlZ9O>0Pp5RS6CSo$42JH=6;6qUA;L<6K?W~{rP0* zj#v1>&5O~=)zfRrsbBlkL-%ANsoks1;AFacO5a5leKOt%VJEpM>pEomoF+6Q=M(Pk zHj6E1vh^v4-Lp{~8|6#2C;{HNN_C?!#^wm!e%A|ib?$;WLz6)wKQK2Oc+77XmcS- zcGeHtgw$QwxCIOXTAg4_7C&-cNr$=2fRyjmt#FNW;Vphlxlv5Qs~h>cShDF69&w!9 z3n0!m_Dy-kp@;E0(T1GlrD|zk1Y9jlBGC-N?##YNP>I~PZ|{G0Sm>z>!Md`I-QF}` z@g1Fq7kW3ymeMc7p_q1_kA<*00G`B2T21>JL~a#t z3&Kscs^cJ?@Pr@MtnkAZh}|U}ZU+}_d$J?qQKp*CEqwwS>A*D%JXh_I54rrSVchU$ zE3YYVQOivgxR9I#H2s~nC3GYe;T<_qydb3KSIgKh$&D>-iAWn)C~cQH($}2h4#O*~ zsVKNn+qNat1)&CD03>B0Z7iRkKN69FJ&n5VE>(p#bUtT>vuMJvaa=Tyiq3nzErkER z7$naRr{sAnUn{)TB>&v**z4%N@dy|0>sC{Q-CvUY#jS7a>?AReNYQ-YQ zNk@bY#5iSeoJFY=sCX2mZ`Yj?X=r3!LB@f zsfIf-23uc9L$ur%!2N8(YF*foRDrqF?gP8Rc_O@UsIx6qWda$L|) zfGW&pDf3SBtFDNyuQpQkJ9O}Fq%jjeyK5A%Pz28g;Wl&+y61<88olwT^%ruS$*Nh(@)-3pS9)95>!y10$y4a|sWF;C4Gm2kr=chXH@2mS@H1?8 zLkD%K-AK!VgrVEbcD}YNM%VgEUHKYMgC3owVgX&&L2scmSv|*Ix1I-nBGX*jJQJI1 z(dq?49tGFffb4zoi|eJkaanqu&8gl4%s&O(9Aj&|)LST;yagT7n`T1aN>9et%G!=j z&&t-|PaW_7q!Mr2GXK~~v2J?IfHUy*tnD_*xWo7f$4o{#dqQQ|7MQ{I(h^Z|F^@H1_5l_S^~av@-2*ABfD*`Lm}4UwzQV z?nf!1!y<-09Z6Bw5{|weXaz@Zxq*J)80nO_|3628MP%P;QnS8WydnM4x-b<3D zq1#)U^5j7$7X77JYfW#}t{IyPSAWB=vh9sbmnyRg#Tqh^Fu|OTPDf)h^sn-7sT+Sy zwDP;fFeUh|rHkB`O9)MZaqP}Wj1S}z8pSRA@{iI~`abm2w6`Fnf7>7<{(BJWSX=)u z2H%43&m%Ri)AAS51$qk~cp8!FNFKfn$*q zGJRWvGnn!65E45(+Ge8?#IU#l!;rB%-6+aBCciR$}GfGKy5>*9*$?RmdfB=!k9#yqf+(Vu++^sc8L*AEJ5y73U(EN)u9 z&2a=PUwp=`_oV&?scCHAI%o^&88)jnE}Z2BI=+RFxi??^Ou2WIHWSauCK?pKZ+>PX0~4{k!4sBG$i57a{&n;`A>` z>+i<@DWdz!6bK0L?w9fZM^g8@pWl^Le|Z{&{l7o)H=Wh*UVfK1{N*JS=^w=ozdQK7 zK=YRaXsllj{wmk}Zu&d*{FkXI?jNSV6VbnW_?`6p%L60+?|tx(6zJ~`f9DGSawzd8 zO?nfr{l*mje*KF%{N3sAYtFx%s!;rL`nMJ6?;ieh5%HJ(8{JC<1oXEh#qZ|-IsN~; fxisy+ng5v#WW>SWqWEXI4Gzfo4G6>xKd=5DEYj~c literal 0 HcmV?d00001 diff --git a/03_Aufnahmeserie/01_How-To-Start-Scripts.txt b/03_Aufnahmeserie/01_How-To-Start-Scripts.txt new file mode 100644 index 0000000..1dadafc --- /dev/null +++ b/03_Aufnahmeserie/01_How-To-Start-Scripts.txt @@ -0,0 +1,3 @@ +1. start bash +2. workon cv-4.5.3.56 +3. python $SCRIPT_NAME$ \ No newline at end of file diff --git a/03_Aufnahmeserie/01_Read Me.txt b/03_Aufnahmeserie/01_Read Me.txt new file mode 100644 index 0000000..aed5c2a --- /dev/null +++ b/03_Aufnahmeserie/01_Read Me.txt @@ -0,0 +1,5 @@ +#Author: Kenan Gömek +#Date: 07.12.2021 + +Ino-Code muss auf Arduino und der python-code auf den Raspberry. +Dann kann man für alle Farben von 0-255 und Leuchtmuster automatisch fotos aufnehmen. \ No newline at end of file diff --git a/03_Aufnahmeserie/Ansteuerung_LEDs_uC.ino b/03_Aufnahmeserie/Ansteuerung_LEDs_uC.ino new file mode 100644 index 0000000..2b8c82a --- /dev/null +++ b/03_Aufnahmeserie/Ansteuerung_LEDs_uC.ino @@ -0,0 +1,148 @@ +/* This program contains all 20 possible combinations for a lane wich have an orientation. + * The brightness of the color patterns is set here to 255. + * Furthermore colors can be transmitted via rrr-ggg-bbb values. + * It uses serial communication. + */ +#include +#define LED_PIN 9 +#define NUM_LEDS 29 + +String redString; String greenString; String blueString; String inputString; String color_pattern; + +Adafruit_NeoPixel strip = Adafruit_NeoPixel(NUM_LEDS, LED_PIN, NEO_GRB + NEO_KHZ800); + +/* Info: + * set the color of a pixel: + * strip.setPixelColor(n, red, green, blue); + * pixel starts from n=0 + * + * Multiple pixels can be set to the same color using the fill() function, + * which accepts one to three arguments. Typically it’s called like this: + * strip.fill(color, first, count) + */ + +int pause = 3000; // pause in ms + + +// Definition of Colors +int led_brightness = 255; +uint32_t red = strip.Color(led_brightness,0,0); +uint32_t green = strip.Color(0,led_brightness,0); +uint32_t blue = strip.Color(0,0,led_brightness); +uint32_t yellow = strip.Color(led_brightness,led_brightness,0); +uint32_t magenta = strip.Color(led_brightness,0,led_brightness); +uint32_t cyan = strip.Color(0,led_brightness,led_brightness); + + +void setup() { + // put your setup code here, to run once: + Serial.begin(9600); + + strip.begin(); + strip.clear(); strip.show(); // turn off all LEDs + + +} + +void loop() { + // put your main code here, to run repeatedly: + if (Serial.available()) { + inputString = Serial.readString(); + color_pattern = inputString; + Serial.print("Folgender Text wurde empfangen: "); + Serial.println(inputString); + + //String aufteilen in R G B + redString=inputString.substring(0,2+1); + greenString=inputString.substring(4,6+1); + blueString=inputString.substring(8,10+1); + } + + // define color patterns + if (color_pattern == "color_pattern_01") { + SetColorCombination(red, green, blue); // color pattern 01 + } + else if (color_pattern == "color_pattern_02") { + SetColorCombination(red, green, yellow); // color pattern 02 + } + else if (color_pattern == "color_pattern_03") { + SetColorCombination(red, green, magenta); // color pattern 03 + } + else if (color_pattern == "color_pattern_04") { + SetColorCombination(red, green, cyan); // color pattern 04 + } + else if (color_pattern == "color_pattern_05") { + SetColorCombination(red, blue, yellow); // color pattern 05 + } + else if (color_pattern == "color_pattern_06") { + SetColorCombination(red, blue, magenta); // color pattern 06 + } + else if (color_pattern == "color_pattern_07") { + SetColorCombination(red, blue, cyan); // color pattern 07 + } + else if (color_pattern == "color_pattern_08") { + SetColorCombination(red, yellow, magenta); // color pattern 08 + } + else if (color_pattern == "color_pattern_09") { + SetColorCombination(red, yellow, cyan); // color pattern 09 + } + else if (color_pattern == "color_pattern_10") { + SetColorCombination(red, magenta, cyan); // color pattern 10 + } + + else if (color_pattern == "color_pattern_11") { + SetColorCombination(green, blue, yellow); // color pattern 11 + } + else if (color_pattern == "color_pattern_12") { + SetColorCombination(green, blue, magenta); // color pattern 12 + } + else if (color_pattern == "color_pattern_13") { + SetColorCombination(green, blue, cyan); // color pattern 13 + } + else if (color_pattern == "color_pattern_14") { + SetColorCombination(green, yellow, magenta); // color pattern 14 + } + else if (color_pattern == "color_pattern_15") { + SetColorCombination(green, yellow, cyan); // color pattern 15 + } + else if (color_pattern == "color_pattern_16") { + SetColorCombination(green, magenta, cyan); // color pattern 16 + } + + else if (color_pattern == "color_pattern_17") { + SetColorCombination(blue, yellow, magenta); // color pattern 17 + } + else if (color_pattern == "color_pattern_18") { + SetColorCombination(blue, yellow, cyan); // color pattern 18 + } + else if (color_pattern == "color_pattern_19") { + SetColorCombination(blue, magenta, cyan); // color pattern 19 + } + else if (color_pattern == "color_pattern_20") { + SetColorCombination(yellow, magenta, cyan); // color pattern 20 + } + else{ + strip.clear(); + strip.fill(strip.Color(redString.toInt(),greenString.toInt(),blueString.toInt()),0,0); + strip.show(); + } + +} + +// Define Functions +void SetColorCombination(uint32_t color_1, uint32_t color_2, uint32_t color_3){ + // Set the color combination for the whole strip + + //determine whole numer of pixels, which can be displayed in a 3-row-constellation + int number_of_3rowpixels = NUM_LEDS - (NUM_LEDS % 3); // should be 27 if 29 leds are mounted. Modulo = 2: (29 % 3)=(9x3) +2 + int lastpixelindex = number_of_3rowpixels-1; // index starts at 0 + + for(int i=0; i<=lastpixelindex; i=i+3){ + strip.setPixelColor(i, color_1); + strip.setPixelColor(i+1, color_2); + strip.setPixelColor(i+2, color_3); + } + + strip.show(); + +} diff --git a/03_Aufnahmeserie/Ledstripe_Alignment.py b/03_Aufnahmeserie/Ledstripe_Alignment.py new file mode 100644 index 0000000..d92f901 --- /dev/null +++ b/03_Aufnahmeserie/Ledstripe_Alignment.py @@ -0,0 +1,235 @@ +# Creation Date: 14.01.2022 +# Author: Kenan Gömek +# This script takes pictures with Picameras VideoPort like it will be used to work with OpenCV and saves it with OpenCV to have the real use case pictures. +# This script is designed for aligning the LED-Stripe horizontally to the image sensor with LEDs off +# Use: +# 1. turn on the LEDs to e.g. 005-000-000 manually via bash or similar +# 2. execute this script +# 3. adjust shutter speed if desired with x,c,b,n,m (program starts with max shutter speed for selected frame rate) +# 4. align LED strip e.g. to the upper blue line and first led to the left edge of the image +# 5. take image of alignment with 'i' +# 6. end program with 'q' + +import cv2 as cv +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os + +import numpy as np + + + +# Define camera settings + +# divide origin resoluton by a number, to have the origin aspect ratio + +# RESOLUTION = (3280, 2464) # Max Photo-Resolution CAM03 and CAM04 # no image with PiCamera Videoport at this Resolution.. Probably GPU Memory and CPU issues. +# RESOLUTION = (1640,1232) # 2nd best Resolution for CAM03 and CAM04 with FUll FOV (2x2 binning) # Mode 4 +SENSOR_MODE = 4 # corresponding sensor mode to resolution +OUTPUT_RESOLUTION = (416, 320) # (1640x1232)/4=(410,308) + # (410,308) is being upscaled to (416,320) from ISP (warning in bash), but image will have still (410,308) pixels. +# OUTPUT_RESOLUTION = (820, 616) # (1640x1232)/2=(820,616) + # bash: frame size rounded up from 820x616 to 832x624 + + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (Fraction(485, 256), Fraction(397, 256)) # White Balance Gains to have colours read correctly: (red, blue) +ISO = 100 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 10 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# Define Funcions +def take_image_picamera_opencv(shutter_speed): + + # Initialise Camera + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = shutter_speed + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + # camera.start_preview() # show camera preview through PiCamera interface + # camera.annotate_frame_num=True # Controls whether the current frame number is drawn as an annotation. + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + image = frame.array # raw NumPy array without JPEG encoding + + camera_exposure_speed = camera.exposure_speed # Retrieves the current shutter speed of the camera. + + # cv.imshow("Current Frame", image) # display the image without text + img = alignment_procedure(image, shutter_speed, framenumber, camera_exposure_speed) # show the frame + + framenumber_to_save = 7000 # frame 15 (no particular reason for frame 15) + trigger_save_frame = 'no' # trigger for saving the frame + if framenumber == framenumber_to_save and trigger_save_frame=='yes': # save desired frame + now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M-%S.%f")[:-3] + print(f"Take picture! framenumber: {framenumber} shutter speed: {shutter_speed} µs") + cv.imwrite(f"{path_saveFolder}/ss{shutter_speed}_Date {d1}.png", image) + break # break from the loop, because we took the image we wanted + + output.truncate(0) # clear the stream for next frame + + # Only uncomment following code if you display the image. No errors if not commented, but higher fps if commented. + # if q is pressed, break from loop. + pressed_key = cv.waitKey(1) & 0xff + if pressed_key == ord('q'): + break + elif pressed_key == ord('i'): # Take image from manipulated image if i is pressed + now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M-%S.%f")[:-3] + cv.imwrite(f"{path_saveFolder}/Date {d1}.png", img) + print('took image!') + elif pressed_key == ord('b'): # increase shutterspeed by 50 + shutter_speed = round(shutter_speed+50) + camera.shutter_speed = shutter_speed + elif pressed_key == ord('n'): # increase shutterspeed by 500 + shutter_speed = round(shutter_speed+500) + elif pressed_key == ord('m'): # max shutterspeed + shutter_speed = round(1/FRAMERATE*1e6) + camera.shutter_speed = shutter_speed + elif pressed_key == ord('x'): # decrease shutterspeed by 500 + shutter_speed = round(shutter_speed-500) + camera.shutter_speed = shutter_speed + elif pressed_key == ord('c'): # decrease shutterspeed by 50 + shutter_speed = round(shutter_speed-50) + camera.shutter_speed = shutter_speed + elif pressed_key == ord('o'): # set shutter speed to 0 + shutter_speed = 0 + camera.shutter_speed = shutter_speed + + +def display_image_with_text(img, shutter_speed, framenumber, camera_exposure_speed): + img = img.copy() # make copy of image and do not modify the original image + + font = cv.FONT_HERSHEY_SIMPLEX # font + fontScale = 1 # fontScale + color = (255, 255, 255) # Font colour in BGR + thickness = 1 # Line thickness in px + + # set text position + frame_width = int(img.shape[1]) + frame_height = int(img.shape[0]) + text_start_position_Y = int(round(frame_height*0.12)) # start position of text in pixels 12 % of frame height + text_linespacing = 50 # line spacing between two strings in pixels + # text_start_position_X = int(frame_width/4) # start text from 1/4 of image width + text_start_position_X = int(0) # start text from left edge of image + + # set position in (x,y)-coordinated from top left corner. Bottom-left corner of the text string in the image. + pos_1 = (text_start_position_X, text_start_position_Y) # start text from 1/4 of image width + pos_2 = (text_start_position_X, text_start_position_Y+text_linespacing) # start text from 1/4 of image width + pos_3 = (text_start_position_X, text_start_position_Y+2*text_linespacing) # start text from 1/4 of image width + + # define text to display + text_line_1 = f"set shttr-spd: {shutter_speed} us" + text_line_3 = f"Frame: {framenumber}" + text_line_2 = f"ret exp-spd: {camera_exposure_speed} us" + + # put the text into the image + image_text_1 = cv.putText(img, text_line_1, pos_1, font, + fontScale, color, thickness, cv.LINE_AA) + image_text_2 = cv.putText(img, text_line_2, pos_2, font, + fontScale, color, thickness, cv.LINE_AA) + image_text_3 = cv.putText(img, text_line_3, pos_3, font, + fontScale, color, thickness, cv.LINE_AA) + + cv.imshow("Current Frame", img) # display the image + +def alignment_procedure(img, shutter_speed, framenumber, camera_exposure_speed): + img = img.copy() # make copy of image and do not modify the original image + + font = cv.FONT_HERSHEY_SIMPLEX # font + fontScale = 1 # fontScale + color = (255, 255, 255) # Font colour in BGR + thickness = 1 # Line thickness in px + + # set text position + frame_width = int(img.shape[1]) + frame_height = int(img.shape[0]) + text_start_position_Y = int(round(frame_height*0.12)) # start position of text in pixels 12 % of frame height + text_linespacing = 50 # line spacing between two strings in pixels + # text_start_position_X = int(frame_width/4) # start text from 1/4 of image width + text_start_position_X = int(0) # start text from left edge of image + + # set position in (x,y)-coordinated from top left corner. Bottom-left corner of the text string in the image. + pos_1 = (text_start_position_X, text_start_position_Y) # start text from 1/4 of image width + pos_2 = (text_start_position_X, text_start_position_Y+text_linespacing) # start text from 1/4 of image width + pos_3 = (text_start_position_X, text_start_position_Y+2*text_linespacing) # start text from 1/4 of image width + + # define text to display + text_line_1 = f"set shttr-spd: {shutter_speed} us" + text_line_3 = f"Frame: {framenumber}" + text_line_2 = f"ret exp-spd: {camera_exposure_speed} us" + + # put the text into the image + image_text_1 = cv.putText(img, text_line_1, pos_1, font, + fontScale, color, thickness, cv.LINE_AA) + image_text_2 = cv.putText(img, text_line_2, pos_2, font, + fontScale, color, thickness, cv.LINE_AA) + image_text_3 = cv.putText(img, text_line_3, pos_3, font, + fontScale, color, thickness, cv.LINE_AA) + + # draw lines into image + alignment_line_thickness = 1 # thickness of the alignment line in pixels + alignment_line_offset = 21 # offset of the alignment line from the center + frame_center = round(frame_height/2) # center of the frame + offset_center = frame_center + 70 # offset of the center for e.g. have the alignment lines more in the bottom part of the image + alignment_row_start = offset_center-alignment_line_offset + alignment_row_end = offset_center+alignment_line_offset + + img[offset_center, :, :] = [255,255,255] # bgr format + img[alignment_row_start-alignment_line_thickness:alignment_row_start, :, :] = [255,0,0] # bgr format + img[alignment_row_end:alignment_row_end+alignment_line_thickness, :, :] = [0,0,255] # bgr format + + cv.imshow("Current Frame", img) # display the image + return img + + +# Start Script + +# Create folder for saving the captured pictures +now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M") +path_cwd = os.getcwd() + +path_saveFolder = path_cwd+r"/Ledstripe_alignment_"+d1 +try: + os.mkdir(path_saveFolder) +except OSError: + print("Error! Ending script. Try it again in 1 minute") + quit() + + + + +# start capture series for different shutter speeds +print('start caputure series...') + +# take_image_picamera_opencv(shutter_speed=2000000) #Can see something at normal light conditions +take_image_picamera_opencv(shutter_speed=round(1/FRAMERATE*1e6)) # max shutter-speed depending on fps: 1/2 fps = 500 000 µs + +# End Script + +cv.destroyAllWindows() + +print('Script finished') + + + + + + diff --git a/03_Aufnahmeserie/Takeimage_Shutterspeed_PiCameraVideoPort_V01-02.py b/03_Aufnahmeserie/Takeimage_Shutterspeed_PiCameraVideoPort_V01-02.py new file mode 100644 index 0000000..ea00265 --- /dev/null +++ b/03_Aufnahmeserie/Takeimage_Shutterspeed_PiCameraVideoPort_V01-02.py @@ -0,0 +1,441 @@ +# Creation Date: 10.01.2022 +# Author: Kenan Gömek +# This script takes pictures with Picameras VideoPort like it will be used to work with OpenCV and saves it with OpenCV to have the real use case pictures. +# Update: This script is designed to take image series with different camera parameters +# Update-Comment: +# To-DO: + +import serial +import cv2 as cv +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os + +import numpy as np + +# Comment for Measurement-Log: +CAM_NR = "04" # CAM:01,02,03,04 + +COMMENT = "" + +# Set Parameters for series with variable shutterspeeds +# Uncomment/Comment needed code parts for shutter speed lists: +#START_SHUTTER_SPEED = 50 # microseconds. 0=auto +#END_SHUTTER_SPEED = 100 # microseconds +#STEP_SHUTTER_SPEED = 25 # microseconds +# shutter_speeds_add = [47, 66, 85, 104] # microseconds. shutter speeds to additionally add +shutter_speeds_add = [50] # microseconds. shutter speeds to additionally add + # from investigations: ss < 30 are all darker, but the darkness is the same. e.g: ss=5,10,15,20,25 are all the same brightness=Shutter_speed + # with ss <5 µs (because < 20 µs) one can request the minimum posisble shutter speed + # it was found, that ss is adjustable in 20 µs-steps from 30 µs ongoing: [10 µs, 30 µs, 50 µs] in this range + # retrieved ss: 9, 28, 47, 66, 85, 104 µs + +shutter_speeds = list() +#for ss in range(START_SHUTTER_SPEED, END_SHUTTER_SPEED+STEP_SHUTTER_SPEED, STEP_SHUTTER_SPEED): +# shutter_speeds.append(ss) +#shutter_speeds = shutter_speeds + shutter_speeds_add # concatenate lists +shutter_speeds = shutter_speeds_add +shutter_speeds.sort() # sort list + +# camera_resolutions = [(192, 144), (416,320), (640,480), (960,720), (1280,960), (1648,1232)] +camera_resolutions = [(192, 144)] +iso_values = [320] + +# Define camera settings +if CAM_NR == "03": + SENSOR_MODE = 4 # corresponding sensor mode to resolution + OUTPUT_RESOLUTION = (416, 320) # (1640x1232)/4=(410,308) + # Camera informations for log + CAM_DESCRIPTION = "Raspberry Pi Camera Module V2" + CAM_EAN = "506021437024020" +elif CAM_NR == "04": + SENSOR_MODE = 4 # corresponding sensor mode to resolution + # Camera informations for log + CAM_DESCRIPTION = "Raspberry Pi NoIR Camera Module V2" + CAM_EAN = "0640522710898" +elif CAM_NR == ("01"): + SENSOR_MODE = 4 # corresponding sensor mode to resolution --> gibt es sowas für camera 1? Ja gleicher Sensor wie Raspberry Pi V1 sensor! + # Camera informations for log + CAM_DESCRIPTION = "RPI-SPYCAM" + CAM_EAN = "4251266701743 " +elif CAM_NR == ("02"): + SENSOR_MODE = 4 # corresponding sensor mode to resolution --> gibt es sowas für camera 2? --> Ja gleicher Sensor wie Raspberry Pi V1 sensor! + # Camera informations for log + CAM_DESCRIPTION = "RB-Camera_JT" + CAM_EAN = "4250236815909" + + +AWB_MODE = 'off' # Auto white balance mode +# AWB_GAINS = (Fraction(485, 256), Fraction(397, 256)) # White Balance Gains to have colours read correctly: (red, blue) +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, floar or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + #the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + + +# Dictionary for color patterns +# r: red, g: green, b: blue, y: yellow, m: magenta, c: cyan +dict_color_pattern = { + "01": "rgb", + "02": "rgy", + "03": "rgm", + "04": "rgc", + "05": "rby", + "06": "rbm", + "07": "rbc", + "08": "rym", + "09": "ryc", + "10": "rmc", + "11": "gby", + "12": "gbm", + "13": "gbc", + "14": "gym", + "15": "gyc", + "16": "gmc", + "17": "bym", + "18": "byc", + "19": "bmc", + "20": "ymc", +} + +# Define Funcions +def take_image_picamera_opencv(shutter_speed, iso, resolution, led_rgb_value): + colour_string = led_rgb_value + color_substring_list = [str(x).zfill(2) for x in range(1,21)] # string list to check for color pattern + + # Initialise Camera + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = resolution # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = iso # Iso must be set prior fixing the gains (prior setting exposure_mode to off) + camera.shutter_speed = shutter_speed + # it was found that, you have to set the right shutter speed at the first initalisation of the current runtime of the program. + # The gains (analog, digital) will adjust to this set up. + # After the gains are fixed, they will never change! even if you change the shutter speed during the runtime. + # To get consistent brightness values, set the right shutter speed at initalisation once. + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + # digital_gain: The value represents the digital gain the camera applies after conversion of the sensor’s analog output. + # analog_gain: The value represents the analog gain of the sensor prior to digital conversion. + # digital and analog gain can not be set from the user. + + camera.exposure_mode = EXPOSURE_MODE + # exposure_mode value 'off' overrides the ISO setting. + # For example, 'off' fixes analog_gain and digital_gain entirely, + # preventing this property from adjusting them when set. + time.sleep(1) + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + # camera.start_preview() + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + exposure_speed = camera.exposure_speed # settings have to be applied before retrieving the right exposure_speed + + # cv.namedWindow("Current Frame", cv.WINDOW_NORMAL) + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + image = frame.array # raw NumPy array without JPEG encoding + + #display_image_with_text(image, shutter_speed, framenumber) # show the frame + + framenumber_to_save = 15 # save frame 15 (no particular reason for frame 15) + if framenumber == framenumber_to_save: # save desired frame + now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M-%S") + print(f"Take picture! iso: {iso} shutter speed: {shutter_speed} µs") + if "-" in colour_string: + cv.imwrite(f"{path_saveFolder}/RGB {colour_string}_ss{shutter_speed}_es{exposure_speed}_iso{iso}_b{BRIGHTNESS}_c{CONTRAST}_res{resolution[0]}x{resolution[1]}_Date {d1}.png", image) + elif any(substring in colour_string for substring in color_substring_list): + cv.imwrite(f"{path_saveFolder}/cp {colour_string}-{dict_color_pattern[colour_string]}_ss{shutter_speed}_es{exposure_speed}_iso{iso}_b{BRIGHTNESS}_c{CONTRAST}_res{resolution[0]}x{resolution[1]}_Date {d1}.png", image) + break # break from the loop, because we took the image we wanted + + output.truncate(0) # clear the stream for next frame + + # Only uncomment following code if you display the image. No errors if not commented, but higher fps if commented. + # if q is pressed, break from loop. + # if cv.waitKey(1) & 0xff == ord('q'): + # break + + +def take_image_series_onecolourchannel_per_led(led_rgb_value): + """Take image series in one specified colour.""" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for i, resolution in enumerate(camera_resolutions): + print(f'resolution {i+1}/{len(camera_resolutions)}: {resolution}') + for ss in shutter_speeds: + for iso in iso_values: + take_image_picamera_opencv(ss, iso, resolution, led_rgb_value) + + +def take_image_series_rgb(brightness): + """Take image series. One colour per series.""" + t1 = time.perf_counter() + + print(f'switchting to red') + led_rgb_value = f"{brightness}-000-000" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for i, resolution in enumerate(camera_resolutions): + print(f'resolution {i+1}/{len(camera_resolutions)}: {resolution}') + for ss in shutter_speeds: + for iso in iso_values: + take_image_picamera_opencv(ss, iso, resolution, led_rgb_value) + + print('switchting to green') + led_rgb_value = f"000-{brightness}-000" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for i, resolution in enumerate(camera_resolutions): + print(f'resolution {i+1}/{len(camera_resolutions)}: {resolution}') + for ss in shutter_speeds: + for iso in iso_values: + take_image_picamera_opencv(ss, iso, resolution, led_rgb_value) + + print('switchting to blue') + led_rgb_value = f"000-000-{brightness}" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for i, resolution in enumerate(camera_resolutions): + print(f'resolution {i+1}/{len(camera_resolutions)}: {resolution}') + for ss in shutter_speeds: + for iso in iso_values: + take_image_picamera_opencv(ss, iso, resolution, led_rgb_value) + + t2 = time.perf_counter() + elapsed_time = round((t2-t1)/60,2) + print(f'series_rgb finished in {elapsed_time} min') + +def take_image_series_ymc(brightness): + """Take image series. One colour per series.""" + t1 = time.perf_counter() + + print(f'switchting to yellow') + led_rgb_value = f"{brightness}-{brightness}-000" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for i, resolution in enumerate(camera_resolutions): + print(f'resolution {i+1}/{len(camera_resolutions)}: {resolution}') + for ss in shutter_speeds: + for iso in iso_values: + take_image_picamera_opencv(ss, iso, resolution, led_rgb_value) + + print('switchting to magenta') + led_rgb_value = f"{brightness}-000-{brightness}" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for i, resolution in enumerate(camera_resolutions): + print(f'resolution {i+1}/{len(camera_resolutions)}: {resolution}') + for ss in shutter_speeds: + for iso in iso_values: + take_image_picamera_opencv(ss, iso, resolution, led_rgb_value) + + print('switchting to cyan') + led_rgb_value = f"000-{brightness}-{brightness}" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for i, resolution in enumerate(camera_resolutions): + print(f'resolution {i+1}/{len(camera_resolutions)}: {resolution}') + for ss in shutter_speeds: + for iso in iso_values: + take_image_picamera_opencv(ss, iso, resolution, led_rgb_value) + + t2 = time.perf_counter() + elapsed_time = round((t2-t1)/60,2) + print(f'series_rgb finished in {elapsed_time} min') + + +def take_image_series_twocolourchannels_per_led(brightness): + """Take image series. Two colours per series.""" + # All posibilities with R G B: RG, RB, GB + + print(f'switchting to red and green') + led_rgb_value = f"{brightness}-{brightness}-000" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for ss in range(START_SHUTTER_SPEED, END_SHUTTER_SPEED+STEP_SHUTTER_SPEED, STEP_SHUTTER_SPEED): + take_image_picamera_opencv(ss, led_rgb_value) + + print('switchting to red and blue') + led_rgb_value = f"{brightness}-000-{brightness}" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for ss in range(START_SHUTTER_SPEED, END_SHUTTER_SPEED+STEP_SHUTTER_SPEED, STEP_SHUTTER_SPEED): + take_image_picamera_opencv(ss, led_rgb_value) + + print('switchting to green and blue') + led_rgb_value = f"000-{brightness}-{brightness}" + s.write(str.encode(led_rgb_value)); response = s.readline(); print(response) + for ss in range(START_SHUTTER_SPEED, END_SHUTTER_SPEED+STEP_SHUTTER_SPEED, STEP_SHUTTER_SPEED): + take_image_picamera_opencv(ss, led_rgb_value) + +def takeimage_all_color_patterns(): + """Take images of all 20 color patterns including orientation """ + "The brightness is hard coded to 255 in the Arduino" + + t1 = time.perf_counter() + + print(f'take images for all 20 color patterns') + for number_of_color_pattern in range(1,21): + number_of_color_pattern = str(number_of_color_pattern).zfill(2) + print(f"color pattern no: {number_of_color_pattern}: {dict_color_pattern[number_of_color_pattern]}") + s.write(str.encode(f"color_pattern_{number_of_color_pattern}")) + + for i, resolution in enumerate(camera_resolutions): + print(f'resolution {i+1}/{len(camera_resolutions)}: {resolution}') + for ss in shutter_speeds: + for iso in iso_values: + take_image_picamera_opencv(ss, iso, resolution, number_of_color_pattern) + + t2 = time.perf_counter() + elapsed_time = round((t2-t1)/60,2) + print(f'series_rgb finished in {elapsed_time} min') + + +def display_image_with_text(img, shutter_speed, framenumber): + img = img.copy() # make copy of image and do not modify the original image + + font = cv.FONT_HERSHEY_SIMPLEX # font + fontScale = 1 # fontScale + color = (255, 255, 255) # Font colour in BGR + thickness = 1 # Line thickness in px + + # set text position + frame_width = int(img.shape[1]) + frame_height = int(img.shape[0]) + text_start_position_Y = int(round(frame_height*0.12)) # start position of text in pixels 12 % of frame height + text_linespacing = 50 # line spacing between two strings in pixels + + # set position in (x,y)-coordinated from top left corner. Bottom-left corner of the text string in the image. + pos_1 = (int(frame_width/4), text_start_position_Y) # start text from 1/4 of image width + pos_2 = (int(frame_width/4), text_start_position_Y+text_linespacing) # start text from 1/4 of image width + + # define text to display + text_line_1 = f"Shutterspeed: {shutter_speed} us" + text_line_2 = f"Frame: {framenumber}" + + # put the text into the image + image_text_1 = cv.putText(img, text_line_1, pos_1, font, + fontScale, color, thickness, cv.LINE_AA) + image_text_2 = cv.putText(img, text_line_2, pos_2, font, + fontScale, color, thickness, cv.LINE_AA) + + cv.imshow("Current Frame", img) # display the image + + +# Start Script + +# Create folder for saving the captured pictures +now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M") +path_cwd = os.getcwd() + +# path_saveFolder = path_cwd+r"/Solarradiation_"+d1 +path_saveFolder = path_cwd+r"/series_"+d1 +try: + os.mkdir(path_saveFolder) + lines = [f"CAMERA NR: {CAM_NR}", f"CAMERA DESCRIPTION: {CAM_DESCRIPTION}", f"CAMERA EAN: {CAM_EAN}", + f"\n#Camera settings:", f"SENSOR MODE: {SENSOR_MODE}", + f"FRAMERATE: {FRAMERATE} # frames per second", + f"AWB_MODE: {AWB_MODE}", f"AWB_GAINS (red, blue): {AWB_GAINS}", + f"Brightness: {BRIGHTNESS}", f"Contrast: {CONTRAST}", + f"EXPOSURE_MODE: {EXPOSURE_MODE}", + f"\nCOMMENT: {COMMENT}"] + with open(os.path.join(path_saveFolder, "log.txt"), "w") as f: + f.write("\n".join(lines)) +except OSError: + print("Error! Ending script. Try it again in 1 minute") + quit() + +# Turn on LEDs +s = serial.Serial("/dev/ttyACM0", 9600) # Name of USB-Port may vary (here: ACM0) +time.sleep(5) # der Arduino resettet nach einer Seriellen Verbindung, daher muss kurz gewartet werden + + + +# start capture series for different shutter speeds +print('start caputure series...') + + +# s.write(str.encode("000-255-000")); response = s.readline(); print(response) +# take_image_picamera_opencv(shutter_speed=200, led_rgb_value="000-255-000") + + +# take image series for two colour channels per led +# for brgns in range(50,260,50): +# brgns=str(brgns).zfill(3) +# take_image_series_twocolourchannels_per_led(brgns) #Take Image Series for every color in given brightness +# take_image_series_twocolourchannels_per_led(255) #Take Image Series for every color in given brightness + + +# # take image series for white colour +# for brgns in range(50,260,50): +# brgns=f"{brgns}".zfill(3) +# ledcolour=f"{brgns}-{brgns}-{brgns}" +# take_image_series_onecolourchannel_per_led(ledcolour) +# take_image_series_onecolourchannel_per_led("255-255-255") + +# -------------------------------------------------------------------------------------------------------------------------------------------- +# # 03.02.2022: Take images with multiple shutter speeds but in brightness 255 for determining colour falsification to the edges of the image +# take_image_series_rgb(255) +# take_image_series_ymc(255) +# take_image_series_onecolourchannel_per_led("255-255-255") + +# -------------------------------------------------------------------------------------------------------------------------------------------- +# # 26.02.2022: Take images with multiple shutter speeds and brightness for investigating the influence of the solar radiation on the images +# # take image series for rgb +# for brgns in range(50,260,50): +# brgns=str(brgns).zfill(3) +# take_image_series_ymc(brgns) #Take Image Series for every color in given brightness +# take_image_series_ymc(255) #Take Image Series for every color in given brightness + +# # take image series for ymc +# for brgns in range(50,260,50): +# brgns=str(brgns).zfill(3) +# take_image_series_rgb(brgns) #Take Image Series for every color in given brightness +# take_image_series_rgb(255) #Take Image Series for every color in given brightness + +# # take image series for white colour +# for brgns in range(50,260,50): +# brgns=f"{brgns}".zfill(3) +# ledcolour=f"{brgns}-{brgns}-{brgns}" +# take_image_series_onecolourchannel_per_led(ledcolour) +# take_image_series_onecolourchannel_per_led("255-255-255") +# -------------------------------------------------------------------------------------------------------------------------------------------- +# 10.04.2022: Take images with multiple shutter speeds for investigating the influence of the solar radiation on the images +# take image series for rgb +take_image_series_rgb(255) #Take Image Series for every color in given brightness + +# take image series for ymc +take_image_series_ymc(255) #Take Image Series for every color in given brightness + +# take image series for white colour +take_image_series_onecolourchannel_per_led("255-255-255") +# -------------------------------------------------------------------------------------------------------------------------------------------- +# 07.03.2022: Take images of all 20 color patterns with birghtness 255 +takeimage_all_color_patterns() +# -------------------------------------------------------------------------------------------------------------------------------------------- + + +# End Script + +# Turn off LEDs +s.write(str.encode('000-000-000')); response = s.readline(); print(response) +s.close() #close serial port + +cv.destroyAllWindows() + +t1 = round(time.perf_counter()/60,2) + +print(f'Script finished in {t1} min') + + + + + + diff --git a/04_Spurerkennung/Kalibrierung/01_How-To-Start-Script_Calib.txt b/04_Spurerkennung/Kalibrierung/01_How-To-Start-Script_Calib.txt new file mode 100644 index 0000000..68bdee6 --- /dev/null +++ b/04_Spurerkennung/Kalibrierung/01_How-To-Start-Script_Calib.txt @@ -0,0 +1,2 @@ +#Author: Kenan Gömek +python object_size.py --image $IMAGE_PATH$ --width $OBJECT_WIDTH$ \ No newline at end of file diff --git a/04_Spurerkennung/Kalibrierung/object_size.py b/04_Spurerkennung/Kalibrierung/object_size.py new file mode 100644 index 0000000..85c0eb0 --- /dev/null +++ b/04_Spurerkennung/Kalibrierung/object_size.py @@ -0,0 +1,117 @@ +#modified by Kenan Gömek +# Source: https://pyimagesearch.com/2016/03/28/measuring-size-of-objects-in-an-image-with-opencv/ + +# import the necessary packages +from scipy.spatial import distance as dist +from imutils import perspective +from imutils import contours +import numpy as np +import argparse +import imutils +import cv2 + +def midpoint(ptA, ptB): + return ((ptA[0] + ptB[0]) * 0.5, (ptA[1] + ptB[1]) * 0.5) + +# construct the argument parse and parse the arguments +ap = argparse.ArgumentParser() +ap.add_argument("-i", "--image", required=True, + help="path to the input image") +ap.add_argument("-w", "--width", type=float, required=True, + help="width of the left-most object in the image (in inches)") +args = vars(ap.parse_args()) + +# load the image, convert it to grayscale, and blur it slightly +image = cv2.imread(args["image"]) +gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) +gray = cv2.GaussianBlur(gray, (7, 7), 0) + +# perform edge detection, then perform a dilation + erosion to +# close gaps in between object edges +edged = cv2.Canny(gray, 50, 100) +edged = cv2.dilate(edged, None, iterations=1) +edged = cv2.erode(edged, None, iterations=1) + +# find contours in the edge map +cnts = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL, + cv2.CHAIN_APPROX_SIMPLE) +cnts = imutils.grab_contours(cnts) + +# sort the contours from left-to-right and initialize the +# 'pixels per metric' calibration variable +(cnts, _) = contours.sort_contours(cnts) +pixelsPerMetric = None + +# loop over the contours individually +for c in cnts: + # if the contour is not sufficiently large, ignore it + if cv2.contourArea(c) < 100: + continue + + # compute the rotated bounding box of the contour + orig = image.copy() + box = cv2.minAreaRect(c) + box = cv2.cv.BoxPoints(box) if imutils.is_cv2() else cv2.boxPoints(box) + box = np.array(box, dtype="int") + + # order the points in the contour such that they appear + # in top-left, top-right, bottom-right, and bottom-left + # order, then draw the outline of the rotated bounding + # box + box = perspective.order_points(box) + cv2.drawContours(orig, [box.astype("int")], -1, (0, 255, 0), 2) + + # loop over the original points and draw them + for (x, y) in box: + cv2.circle(orig, (int(x), int(y)), 5, (0, 0, 255), -1) + + # unpack the ordered bounding box, then compute the midpoint + # between the top-left and top-right coordinates, followed by + # the midpoint between bottom-left and bottom-right coordinates + (tl, tr, br, bl) = box + (tltrX, tltrY) = midpoint(tl, tr) + (blbrX, blbrY) = midpoint(bl, br) + + # compute the midpoint between the top-left and top-right points, + # followed by the midpoint between the top-righ and bottom-right + (tlblX, tlblY) = midpoint(tl, bl) + (trbrX, trbrY) = midpoint(tr, br) + + # draw the midpoints on the image + cv2.circle(orig, (int(tltrX), int(tltrY)), 5, (255, 0, 0), -1) + cv2.circle(orig, (int(blbrX), int(blbrY)), 5, (255, 0, 0), -1) + cv2.circle(orig, (int(tlblX), int(tlblY)), 5, (255, 0, 0), -1) + cv2.circle(orig, (int(trbrX), int(trbrY)), 5, (255, 0, 0), -1) + + # draw lines between the midpoints + cv2.line(orig, (int(tltrX), int(tltrY)), (int(blbrX), int(blbrY)), + (255, 0, 255), 2) + cv2.line(orig, (int(tlblX), int(tlblY)), (int(trbrX), int(trbrY)), + (255, 0, 255), 2) + + # compute the Euclidean distance between the midpoints + dA = dist.euclidean((tltrX, tltrY), (blbrX, blbrY)) + dB = dist.euclidean((tlblX, tlblY), (trbrX, trbrY)) + # if the pixels per metric has not been initialized, then + # compute it as the ratio of pixels to supplied metric + # (in this case, inches) + if pixelsPerMetric is None: + pixelsPerMetric = dB / args["width"] + print(f"dB: {dB}") + w_kg = args["width"] + print(f"width: {w_kg}") + print(f"pixelsPerMetric: {pixelsPerMetric}") + + # compute the size of the object + dimA = dA / pixelsPerMetric + dimB = dB / pixelsPerMetric + # draw the object sizes on the image + cv2.putText(orig, "{:.2f} mm".format(dimA), + (int(tltrX - 15), int(tltrY - 10)), cv2.FONT_HERSHEY_SIMPLEX, + 0.65, (255, 255, 255), 2) + cv2.putText(orig, "{:.2f} mm".format(dimB), + (int(trbrX + 10), int(trbrY)), cv2.FONT_HERSHEY_SIMPLEX, + 0.65, (255, 255, 255), 2) + # show the output image + cv2.imshow("Image", orig) + cv2.waitKey(0) \ No newline at end of file diff --git a/04_Spurerkennung/Lanedetection_development_V02_findContours.py b/04_Spurerkennung/Lanedetection_development_V02_findContours.py new file mode 100644 index 0000000..c54797a --- /dev/null +++ b/04_Spurerkennung/Lanedetection_development_V02_findContours.py @@ -0,0 +1,454 @@ +# Creation Date: 01.03.2022 +# Author: Kenan Gömek +# This program is for the development of the lane detection algorithm on a windows and Linux machine +# It uses images, which were captured previously +# V02 detects the lane by using the grayscale-image of the rgb-image + +from turtle import position +import cv2 as cv +import numpy as np +import os +import time + +import platform + +# input +if platform.system() == "Windows": + folder_path=r"images_input" +if platform.system() == "Linux": + folder_path=r"/home/pi/Desktop/images_input" + +# Parameters +pixels_per_mm = 32/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 71/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 107/24.25 #[px/mm] for 120 mm camera height for resolution: 640x480 + +# Offset Camera Sensor in Scooty according to Scooty-KS +x_offset_camera_mm = 100 # [mm] +y_offset_camera_mm = 50 # [mm] +x_offset_camera_px = x_offset_camera_mm*pixels_per_mm # [px] +y_offset_camera_px = y_offset_camera_mm*pixels_per_mm # [px] + +# image parameters: width > height +image_height = 320 # shape [0] +image_width = 416 # shape[1] +# calculate center of image +[x_0, y_0] = np.array([image_width/2, image_height/2], dtype=np.uint16) + + +threshold_color_detection = 60 # values under this will not be considered as active leds in each color channel + +# Parameters for LED Detection +minDiameter_mm = 3.75 # [mm] minimum diameter of detected blob/LED. Must be minimum >0 ! +# maxDiameter_mm = 9 # [mm] maximum diameter of detected blob/LED + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. No string, because numpy array should stay uint16 +color_number_off = 0 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 +color_number_yellow = 4 +color_number_magenta = 5 +color_number_cyan = 6 +color_number_white = 7 + +# Parameters for grayscale to binary conversion +binary_threshold = 15 # determined by testing and application + # the higher threshold is, the smaller the diameter of the led, because more high values are extracted +binary_maxval = 255 # values ofer threshold will be set to maxval + +# Parameters for line fitting for lane construction +param = 0 # not used for DIST_L2 +reps = 0.001 # Sufficient accuracy for the radius (distance between the coordinate origin and the line). +aeps = 0.001 # Sufficient accuracy for the angle. + +show_opencv_window = True # show opencv window +draw_opencv = True # draw lane and so on + +print_additional_info = True + +# calculations before start +# Filter blobs by Area --> not implemented anymore, because no need, because detection is good and less time for calculation needed +# more than good trade off! +minDiameter_px = minDiameter_mm*pixels_per_mm # [px] minimum diameter of detected blob/LED +# maxDiameter_px = maxDiameter_mm*pixels_per_mm # [px] maximum diameter of detected blob/LED +minArea_px2 = np.pi/4*minDiameter_px**2 # min Area of a blob in px^2 +# maxArea_px2 = np.pi/4*maxDiameter_px**2 +# minArea = minArea_px2 # min Area of a blob in px^2 +# params.maxArea = maxArea_px2 # max Area of a blob in px^2. +# reasons for not filtering maxArea: motion blur + rolling shutter --> larger Area + + +def points_trafo(detected_LEDs, alpha_rad, dx, dy): + """Tranfsform points of LED to lane in KS-LED""" + detected_LEDs_trafo = detected_LEDs.copy() # copy, becuase else it is only a pointer + detected_LEDs_trafo = detected_LEDs_trafo.astype(np.int16) # avoid integer overflow + x_pnts = detected_LEDs_trafo[:,0] + y_pnts = detected_LEDs_trafo[:,1] + + # Translation + x1 = x_pnts-dx-x_0 + x_trafo = x1 + y1 = y_pnts-dy-y_0 + y_trafo = y1 + + # Rotation. Winkel Sensor im UZS, also negativ zu mathematischer definiton --> passive Drehung + # cos(-x)=cos(x) + # sin(-x)=-sin(x) --> -sin(-x) = - -sin(x) = sin(x) + # wegen besserer verständlichkeit nicht verinfachung angewendet + x_trafo = np.cos(-alpha_rad)*x1-np.sin(-alpha_rad)*y1 + detected_LEDs_trafo[:,0] = x_trafo + y_trafo = np.sin(-alpha_rad)*x1+np.cos(-alpha_rad)*y1 + detected_LEDs_trafo[:,1] = y_trafo + + #sort points along lane: x_2, y_2 -axis (KS_LED) + detected_LEDs_trafo = detected_LEDs_trafo[detected_LEDs_trafo[:, 0].argsort(kind='quicksort')] + return detected_LEDs_trafo + +def construct_lane(detected_LEDs, img_bgr): + """construct the lane""" + # This function is partially commented in german, because higher math is used + # clearer what is trying to be achieved + + # get points + # xy_pnts = detected_LEDs[:,0:2] + # x_pnts = detected_LEDs[:,0] + # y_pnts = detected_LEDs[:,1] + + # approach 2: + # fit line through centers of LEDs in KS_0 + # DIST_L": the simplest and the fastest least-squares method: the simple euclidean distance + [dx, dy, x_2, y_2] = cv.fitLine(detected_LEDs[:,0:2], cv.DIST_L2, param, reps, aeps) + # x_2, y_2: point on the line --> mean of leds --> point in the middle of the leds + # x2, y2: same as: mean_of_leds = np.mean([x_pnts, y_pnts], 1) + + alpha_rad = np.arctan2(dy, dx) # calculate angle of line + alpha = np.arctan2(dy, dx)*180/np.pi # calculate angle of line + # print(f"Lane: dx: {dx}, dy:{dy}, x2:{x_2}, y2:{y_2}, alpha:{alpha}°") + if print_additional_info: + alpha_print = alpha[0] + alpha_print = float("{0:.2f}".format(alpha_print)) + print(f"Alpha: {alpha_print}°") + + # get smallest distance to point an line + # Berechnung nach: Repetitorium Höhere Mathematik, Wirth + # Gerade: x = a+ t*b + # Punkt : OP = p + # d = abs(b x (p-a))/(abs(b)) + # info: np.array()[:,0] --> gets only array with 1 dimensions with desired values + p = np.array([x_0, y_0]) + a = np.array([x_2, y_2])[:,0] + b = np.array([np.cos(alpha_rad), np.sin(alpha_rad)])[:,0] # Richtungsvektor + c = p-a + + # Betrag von Vektor: np.linalg.norm(vec) + cross= np.cross(b, c) + d = np.linalg.norm(cross)/np.linalg.norm(b) # distance [px] + #print(f"d: {round(d,2)}") + + # Fußpunkt x_FP=(X_LED, Y_LED) + t_0_dot = np.dot(c, b) + t_0_norm = (np.linalg.norm(b)**2) + t_0 = t_0_dot/t_0_norm + [x_LED, y_LED] = (a+t_0*b) + if print_additional_info: + # convert float32, round and prepare for printing string + x_LED_print = float("{0:.2f}".format(x_LED)) + y_LED_print = float("{0:.2f}".format(y_LED)) + print(f"x_LED: {x_LED_print} [px], y_LED: {y_LED_print} [px]") + + # Abstand (dx_LED, dy_LED) Fußpunkt x_FP zu KS_0 (P) + dx_LED = x_LED - x_0 + dx_LED_mm = dx_LED*(1/pixels_per_mm) + dy_LED = y_LED - y_0 + dy_LED_mm = dy_LED*(1/pixels_per_mm) + if print_additional_info: + # convert float32, round and prepare for printing string + dx_LED_print = float("{0:.2f}".format(dx_LED)) + dy_LED_print = float("{0:.2f}".format(dy_LED)) + dx_LED_mm_print = float("{0:.2f}".format(dx_LED_mm)) + dy_LED_mm_print = float("{0:.2f}".format(dy_LED_mm)) + print(f"dx_LED: {dx_LED_print} [px] , dy_LED: {dy_LED_print} [px]") + print(f"dx_LED: {dx_LED_mm_print} [mm] , dy_LED: {dy_LED_mm_print} [mm]") + + # Abstand (dx, dy) Fußpunkt x_FP von Bildmitte zu KS_Scooty + # Diese Werte zurückgeben + dx_LED_scooty = x_LED - x_0 + x_offset_camera_px + dx_LED_scooty_mm = dx_LED_scooty*(1/pixels_per_mm) + dy_LED_scooty = y_LED - y_0 + y_offset_camera_px + dy_LED_scooty_mm = dy_LED_scooty*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED_scooty: {round(dx_LED_scooty,2)} [px] , dy_LED_scooty: {round(dy_LED_scooty,2)} [px]") + print(f"dx_LED_scooty: {round(dx_LED_scooty_mm,2)} [mm] , dy_LED_scooty: {round(dy_LED_scooty_mm,2)} [mm]") + + # Punkte Trafo, um sortierte position der LEDs entlang Spur zu erhalten + # Bei normal detected kann bei vertikaler LED zb Fehler entstehen und dann muster: 211323233 -> daher mit dieser sortierten weitermachen + detected_LEDs_KS_LED = points_trafo(detected_LEDs, alpha_rad, dx_LED, dy_LED) + if print_additional_info: + print(f"Detected LEDs in KS_LED:(x2, y2):\n {detected_LEDs_KS_LED}") + + #----------------------------------- + # draw useful lines and points + + # draw lane line + if draw_opencv: + pt_0 = (a+b*np.array([-300, -300])).astype(np.int32) + pt_1 = (a+b*np.array([300, 300])).astype(np.int32) + #print(f"pt_0: {pt_0}, pt_1: {pt_1}") + cv.line(img_bgr, pt_0, pt_1, (255,255,255),1) # draw lane + + # draw dx dy + cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # shortest distance from KS_0 to KS_LED --> Lot + # cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_0)), (0,0,255), 2) # only dx + # cv.line(img_bgr, (int(x_LED), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # only dy + + #draw additional points + cv.circle(img_bgr, (int(x_2), int(y_2)), 5,(255,128,255),-1) #pink. Center of points + #cv.putText(img_bgr, '(x2, y2)',(int(x_2)+5, int(y_2)-5), cv.FONT_HERSHEY_SIMPLEX, 2, (255,255,255), cv.LINE_AA) + cv.circle(img_bgr, (int(x_LED), int(y_LED)), 5,(170,255,0),-1) # lime green. Fußpunkt + + if show_opencv_window: + cv.imshow("Lane", img_bgr) + + return dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED + +def convert_rgb_to_grayscale_average(image_bgr): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + # convert dtype to prevent integer overflow while addition + image_bgr = image_bgr.astype(np.uint16, copy=False) + image_gray = (image_bgr[:,:,0]+image_bgr[:,:,1]+image_bgr[:,:,2])/3 # add values / do conversion + image_gray = image_gray.astype(np.uint8, copy=False) # convert back to uint8 + + return image_gray + + +def get_color_of_leds(matrix_of_LEDs, image_bgr): + """Determine color of LEDs at positions and add to matrix""" + # is image_r[y_pos, x_pos] = image_bgr[y_pos,x_pos, 2] ? --> yes. No need to split the color channels. + + offset = 1 # half of length from rectangle which is going to be used to determine the color around the middle point of the blob/led + # offset=0 --> only the value from the middle point of the blob/led + # offset=1 --> 9 values, offset=2-->25 values + + for led in matrix_of_LEDs: + x_pos = led[0] # uint16 + y_pos = led[1] # uint16 + + # get values of color channels in region around middle point of blob/led: + # +1 at stop index, because it is not inclusive + region_around_blue_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 0] # uint8 + region_around_green_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 1] # uint8 + region_around_red_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 2] # uint8 + + # average of the values + # convert dtype to prevent integer overflow while addition + region_around_red_led = region_around_red_led.astype(np.uint16, copy=False) + region_around_green_led = region_around_green_led.astype(np.uint16, copy=False) + region_around_blue_led = region_around_blue_led.astype(np.uint16, copy=False) + # sum all elements in matrix and divide with number of elements + number_of_elements= region_around_blue_led.size + value_of_red_led = region_around_red_led.sum()/number_of_elements # float64, if not integer result + value_of_green_led = region_around_green_led.sum()/number_of_elements # float64, if not integer result + value_of_blue_led = region_around_blue_led.sum()/number_of_elements # float64, if not integer result + + # determine which leds are active: + # if value > threshold --> led is active + status_blue_led = False; status_green_led = False; status_red_led = False + if value_of_blue_led > threshold_color_detection: + status_blue_led = True + if value_of_green_led > threshold_color_detection: + status_green_led = True + if value_of_red_led > threshold_color_detection: + status_red_led = True + + # determine color by checking the cases: + # case 1: red + if status_blue_led==False and status_green_led==False and status_red_led==True: + color = color_number_red + # case 2: green + elif status_blue_led==False and status_green_led==True and status_red_led==False: + color = color_number_green + # case 3: blue + elif status_blue_led==True and status_green_led==False and status_red_led==False: + color = color_number_blue + # case 4: yellow = red + green + elif status_blue_led==False and status_green_led==True and status_red_led==True: + color = color_number_yellow + # case 5: magenta = red + blue + elif status_blue_led==True and status_green_led==False and status_red_led==True: + color = color_number_magenta + # case 6: cyan = green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==False: + color = color_number_cyan + # case 7: white = red + green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==True: + color = color_number_white + # case 8: led not active + # this case can not occur, because no inactive led can be detected from the implemented blob-algorithm in detect_LED_positions_in_grayscale + else: + color = color_number_off + + # fill matrix with color + led[2] = color # uint16 + + return matrix_of_LEDs + +def get_position_of_LEDs_contours(image_gray, image_bgr): + # create binary image + ret, image_binary = cv.threshold(image_gray, binary_threshold, binary_maxval, cv.THRESH_BINARY) + + # find contours + contour_retrieval_algorithm = cv.RETR_EXTERNAL # retrieves only the extreme outer contours + contours, hierarchy = cv.findContours(image_binary, contour_retrieval_algorithm, cv.CHAIN_APPROX_SIMPLE) + + # analyse contours + number_of_detected_contours = len(contours) + if number_of_detected_contours != 0: + # centroid of contours + # Pre-allocate matrix for numpy + number_of_rows = 0 + number_of_columns = 3 + position_of_leds = np.zeros((number_of_rows, number_of_columns), dtype=np.uint16) #empty: [] + + if draw_opencv: + image_bgr_contours = image_bgr.copy() # create copy of image + # copy is needed to draw on, because else the color of + # the circle is detected als LED-Color + + number_of_detected_LEDs = 0 + for i, cnt in enumerate(contours): + area = cv.contourArea(cnt) + # diameter = 2*np.sqrt(area/np.pi) + # diameter_mm = diameter*(1/pixels_per_mm) + # print(f"area: {area} [px^2], diameter: {diameter} [px], diamter: {diameter_mm} [mm]") + + # Filter contours by area. minimum Area needs to be at least >0 ! + if area > minArea_px2: + M = cv.moments(cnt) + number_of_detected_LEDs += 1 + # prevent zero division + if M['m00']==0: + cx = 0 + cy = 0 + else: + cx = int(M['m10']/M['m00']) + cy = int(M['m01']/M['m00']) + #print(cx, cy) + # add positions to matrix + x_pos = int(cx) # x positon + y_pos = int(cy) # y position + position_of_leds = np.vstack((position_of_leds, \ + np.array([x_pos, y_pos, color_number_off], dtype=np.uint16))) # vstack: row wise + + # draw centroids + if draw_opencv: + radius = 2 + color = (255,255,255) + thickness = -1 # filled + cv.circle(image_bgr_contours,(cx,cy), radius, color, thickness) + + if number_of_detected_LEDs != 0: + if print_additional_info: + print(f"detected LEDs: {number_of_detected_LEDs}") + + if draw_opencv: + # draw contours + contours_to_pass = -1 # pass all contours + color_of_contour = (255,255,255) + line_thickness = 1 + cv.drawContours(image_bgr_contours, contours, contours_to_pass, color_of_contour, line_thickness) + + if show_opencv_window: + cv.imshow("binary", image_binary) + cv.imshow("Contours", image_bgr_contours) + + return position_of_leds + else: + if print_additional_info: + print(f"No LEDs were detected") + return None + + else: + if print_additional_info: + print(f"No contours were detected") + return None + +def detect_position_of_LEDs(image_bgr): + + # convert rgb to grayscale + image_gray = convert_rgb_to_grayscale_average(image_bgr) + if show_opencv_window: + cv.imshow("grayscale", image_gray) + + # get position of leds + position_of_LEDs = get_position_of_LEDs_contours(image_gray, image_bgr) + # position_of_LEDs = None + + return position_of_LEDs + + + +def lane_detection(image_bgr): + # Detect LEDs + if print_additional_info: + print(f"Detect LEDs and color:") + position_of_LEDs = detect_position_of_LEDs(image_bgr) + # detected_LEDs = None # only that following code is not being executet for development + + # Get color of leds + if position_of_LEDs is not None: + detected_LEDs = get_color_of_leds(position_of_LEDs, image_bgr) + # print result + if print_additional_info: + print(f"Detected LEDs in KS_Sensor (x,y):\n{detected_LEDs}") + else: + detected_LEDs = None + + + # Contruct lane + if detected_LEDs is not None: + if print_additional_info: + print("\nContruct lane with consideration of camera offset:") + dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED = \ + construct_lane(detected_LEDs, image_bgr) + return detected_LEDs + else: + return None + + +def main(): + filenames_of_images = [f for f in os.listdir(folder_path) if f.endswith('.png')] + + + for i, filename_of_image in enumerate(filenames_of_images): + if print_additional_info: + print(f"image {i+1}/{len(filenames_of_images)}:{filename_of_image}") + full_path_of_image = os.path.join(folder_path, filename_of_image) + image_bgr = cv.imread(full_path_of_image, cv.IMREAD_COLOR) # load original image + + start_processing = time.perf_counter() + + + detected_LEDs = lane_detection(image_bgr) + end_processing = time.perf_counter() + time_processing = end_processing-start_processing + time_processing = time_processing*1000 + time_processing = round(time_processing, 2) + print(f'processing time: {time_processing} ms') + + #print(f"_____________________________________") + # show images: + # cv.imshow("Blue channel", image_b) + # cv.imshow("Green channel", image_g) + # cv.imshow("Red channel", image_r) + + if show_opencv_window: + pressed_key = cv.waitKey(0) & 0xff # display and wait if a key is pressed and then continue + if pressed_key == ord('q'): + exit() + cv.destroyAllWindows() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/04_Spurerkennung/dev/Lanedetection_development_V01.py b/04_Spurerkennung/dev/Lanedetection_development_V01.py new file mode 100644 index 0000000..2e7582f --- /dev/null +++ b/04_Spurerkennung/dev/Lanedetection_development_V01.py @@ -0,0 +1,254 @@ +# Creation Date: 01.03.2022 +# Author: Kenan Gömek +# This program is for the development of the lane detection algorithm on a windows machine +# It uses images, which were captured previously + +import cv2 as cv +import numpy as np +import os + +# input +folder_path=r"U:\bwsyncshare\images_input" + +# Parameters +pixels_per_mm = 71/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 107/24.25 #[px/mm] for 120 mm camera height for resolution: 640x480 + +cut_off_brightness_grayscale = 45 # 15 pixels with a value less than this will bet set to zero (black) + + +# Parameters for HoughCircles +dp = 1 # Inverse ratio of the accumulator resolution to the image resolution. For example, if dp=1 , the accumulator has the same resolution as the input image. If dp=2 , the accumulator has half as big width and height. +minDist_mm = 10 # [mm] minimal distance between two circles +minDist_px = int(minDist_mm*pixels_per_mm) # in [px] Minimum distance in px between the centers of the detected circles. If the parameter is too small, multiple neighbor circles may be falsely detected in addition to a true one. If it is too large, some circles may be missed. +minRadius_mm = 5 # [mm] minimum radius of a circle +minRadius_px = int(minRadius_mm*pixels_per_mm) # [px] Minimum circle radius. +maxRadius_mm = 7 # [mm] maximum radius of a circle +maxRadius_px = int(maxRadius_mm*pixels_per_mm) # [px] Maximum circle radius. If <= 0, uses the maximum image dimension. If < 0, returns centers without finding the radius. + +param1 = 100 # 30 First method-specific parameter. In case of HOUGH_GRADIENT , it is the higher threshold of the two passed to the Canny edge detector (the lower one is twice smaller). + # If circles/LEDs should be detected at low shutter speeds, than lower this value + # Upper threshold for the internal Canny edge detector. + # "Gradient value between dark and white" +param2 = 5 # 12 Second method-specific parameter. In case of HOUGH_GRADIENT , it is the accumulator threshold for the circle centers at the detection stage. The smaller it is, the more false circles may be detected. Circles, corresponding to the larger accumulator values, will be returned first. + # By increasing this threshold value, we can ensure that only the best circles, corresponding to larger accumulator values, are returned. + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. No string, because numpy array should stay uint16 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 + +def preprocess_grayscale_image_1(image_gray_copy, color_channel): + + # Set all pixels with a value less than "cut_off_brightness_grayscale" to zero + image_gray_cutoff = image_gray_copy.copy() + image_gray_cutoff[image_gray_cutoff < cut_off_brightness_grayscale]=0 + cv.imshow(f"{color_channel} channel cutoff", image_gray_cutoff[150:,:]) + + preprocessed_image = image_gray_cutoff + return preprocessed_image + +def preprocess_grayscale_image_2(image_gray_copy, color_channel): + # Set all pixels with a value less than "cut_off_brightness_grayscale" to zero + # This prevents the false detection of color-channels which are not intended to be detected + image_gray_cutoff = image_gray_copy.copy() + image_gray_cutoff[image_gray_cutoff < cut_off_brightness_grayscale]=0 + # cv.imshow(f"{color_channel} channel grayscale cutoff", image_gray_cutoff[150:,:]) + + # For better accuracy, binary images are used before finding contours --> apply treshold + image_gray_binary = image_gray_cutoff + perc = np.percentile(image_gray_binary, 95) # calculate 95 % percentile + ret, tresh=cv.threshold(image_gray_binary, perc, 255, cv.THRESH_BINARY) # set pixels above specified percentile to 255, the resto to 0 + + preprocessed_image = tresh + return preprocessed_image + +def preprocess_grayscale_image_3(image_gray_copy, color_channel): + # Set all pixels with a value less than "cut_off_brightness_grayscale" to zero + # This prevents the false detection of color-channels which are not intended to be detected + image_gray_cutoff = image_gray_copy.copy() + image_gray_cutoff[image_gray_cutoff < cut_off_brightness_grayscale]=0 + perc = np.percentile(image_gray_cutoff, 95) # calculate 95 % percentile + image_gray_cutoff[image_gray_cutoff < perc]=0 + cv.imshow(f"{color_channel} channel grayscale cutoff", image_gray_cutoff[150:,:]) + + # For better accuracy, binary images are used before finding contours --> apply treshold + image_gray_binary = image_gray_cutoff + perc = np.percentile(image_gray_binary, 95) # calculate 95 % percentile + ret, tresh=cv.threshold(image_gray_binary, perc, 255, cv.THRESH_BINARY) # set pixels above specified percentile to 255, the resto to 0 + + preprocessed_image = image_gray_cutoff + return preprocessed_image + +def preprocess_grayscale_image_4_cannyedge(image_gray_copy, color_channel): + # Set all pixels with a value less than "cut_off_brightness_grayscale" to zero + # This prevents the false detection of color-channels which are not intended to be detected + t_lower = 1 + t_upper = 5 + + edge = cv.Canny(image_gray_copy, t_lower, t_upper) + cv.imshow(f"{color_channel} channel edge", edge[150:,:]) + + preprocessed_image = edge + return preprocessed_image + + +def detect_LEDs_in_color_channel(image_gray, color_channel, image_gray_copy, image_bgr_copy): + + # preprocessed_gray_image = preprocess_grayscale_image_1(image_gray_copy=image_gray_copy, color_channel=color_channel) + preprocessed_gray_image = preprocess_grayscale_image_2(image_gray_copy=image_gray_copy, color_channel=color_channel) + # preprocessed_gray_image = preprocess_grayscale_image_3(image_gray_copy=image_gray_copy, color_channel=color_channel) + # preprocessed_gray_image = preprocess_grayscale_image_4_cannyedge(image_gray_copy=image_gray_copy, color_channel=color_channel) + # preprocessed_gray_image = image_gray_copy + + detected_LEDs = cv.HoughCircles(preprocessed_gray_image, cv.HOUGH_GRADIENT, dp=dp, minDist = minDist_px + , param1=param1, param2=param2, minRadius=minRadius_px, maxRadius=maxRadius_px) + + # specify color number, for adding to matrix of detected LEDs + if color_channel == "blue": + color_number = color_number_blue + elif color_channel == "green": + color_number = color_number_green + elif color_channel == "red": + color_number = color_number_red + + # check if at least one circle was found in the image + if detected_LEDs is not None: + detected_LEDs = np.uint16(np.round(detected_LEDs)) # convert the (x, y) coordinates and radius of the circles to integers + detected_LEDs = detected_LEDs[0,:] + detected_LEDs=np.hstack((detected_LEDs, np.full((detected_LEDs.shape[0],1), color_number, dtype=np.uint16))) + # matrix with columns: x, y, r + number_of_detected_LEDs = detected_LEDs.shape[0] + print(f"detected {color_channel} LEDs: {number_of_detected_LEDs}") + + # paramters for drawing + line_thickness = 1 + circle_color = (0,255,0) + vertex_offset = 2 + rectangle_color = (0,128,255) # R G B + for (x, y, r, cn) in detected_LEDs: + print(f"x:{x} px, y:{y} px, r:{r} px, r:{round(r*1/(pixels_per_mm),2)} mm, D: {round(2*r*1/(pixels_per_mm),2)} mm, color: {color_channel}") + cv.circle(image_bgr_copy, (x, y), r, circle_color, thickness=line_thickness) # draw detected circumference of the cirle + cv.circle(image_gray_copy, (x, y), r, circle_color, thickness=line_thickness) # draw detected circumference of the cirle + cv.circle(preprocessed_gray_image, (x, y), r, circle_color, thickness=1) # draw detected circumference of the cirle + cv.rectangle(img=image_bgr_copy, pt1=(x-vertex_offset, y-vertex_offset), pt2=(x+vertex_offset, y+vertex_offset), \ + color=rectangle_color, thickness=cv.FILLED) + cv.rectangle(img=image_gray_copy, pt1=(x-vertex_offset, y-vertex_offset), pt2=(x+vertex_offset, y+vertex_offset), \ + color=rectangle_color, thickness=cv.FILLED) + cv.rectangle(img=preprocessed_gray_image, pt1=(x-vertex_offset, y-vertex_offset), pt2=(x+vertex_offset, y+vertex_offset), \ + color=rectangle_color, thickness=cv.FILLED) + + cv.imshow(f"{color_channel} channel binary", preprocessed_gray_image[150:,:]) + + return detected_LEDs + else: + print(f"No {color_channel} LEDs were found in the image") + return None + + + +def detect_blue_LEDs(image_colorchannel_gray, image_colorchannel_gray_copy, image_bgr_copy): + color_channel = "blue" + detected_LEDs_blue = detect_LEDs_in_color_channel(image_gray=image_colorchannel_gray, color_channel=color_channel, \ + image_gray_copy=image_colorchannel_gray_copy, image_bgr_copy=image_bgr_copy) + + if detected_LEDs_blue is not None: + return detected_LEDs_blue + else: + return None + +def detect_green_LEDs(image_colorchannel_gray, image_colorchannel_gray_copy, image_bgr_copy): + color_channel = "green" + detected_LEDs_green = detect_LEDs_in_color_channel(image_gray=image_colorchannel_gray, color_channel=color_channel, \ + image_gray_copy=image_colorchannel_gray_copy, image_bgr_copy=image_bgr_copy) + if detected_LEDs_green is not None: + return detected_LEDs_green + else: + return None + +def detect_red_LEDs(image_colorchannel_gray, image_colorchannel_gray_copy, image_bgr_copy): + color_channel = "red" + detected_LEDs_red = detect_LEDs_in_color_channel(image_gray=image_colorchannel_gray, color_channel=color_channel, \ + image_gray_copy=image_colorchannel_gray_copy, image_bgr_copy=image_bgr_copy) + if detected_LEDs_red is not None: + return detected_LEDs_red + else: + return None + +def detect_LEDs(image_b, image_g, image_r, image_b_copy, image_g_copy, image_r_copy, image_bgr_copy): + detected_LEDs_blue = detect_blue_LEDs(image_colorchannel_gray=image_b, image_colorchannel_gray_copy=image_b_copy, image_bgr_copy=image_bgr_copy) + detected_LEDs_green = detect_green_LEDs(image_colorchannel_gray=image_g, image_colorchannel_gray_copy=image_g_copy, image_bgr_copy=image_bgr_copy) + detected_LEDs_red = detect_red_LEDs(image_colorchannel_gray=image_r, image_colorchannel_gray_copy=image_r_copy, image_bgr_copy=image_bgr_copy) + + # check the cases: + # case 1: r + if detected_LEDs_blue is None and detected_LEDs_green is None and detected_LEDs_red is not None: + detected_LEDs = detected_LEDs_red + # case 2: g + elif detected_LEDs_blue is None and detected_LEDs_green is not None and detected_LEDs_red is None: + detected_LEDs = detected_LEDs_green + # case 3: b + elif detected_LEDs_blue is not None and detected_LEDs_green is None and detected_LEDs_red is None: + detected_LEDs = detected_LEDs_blue + # case 4: y = r+g + elif detected_LEDs_blue is None and detected_LEDs_green is not None and detected_LEDs_red is not None: + detected_LEDs_all = np.vstack((detected_LEDs_red, detected_LEDs_green)) + detected_LEDs = detected_LEDs_all + # case 5: m = r+b + elif detected_LEDs_blue is not None and detected_LEDs_green is None and detected_LEDs_red is not None: + detected_LEDs_all = np.vstack((detected_LEDs_red, detected_LEDs_blue)) + detected_LEDs = detected_LEDs_all + # case 6: c = g+b + elif detected_LEDs_blue is not None and detected_LEDs_green is not None and detected_LEDs_red is None: + detected_LEDs_all = np.vstack((detected_LEDs_green, detected_LEDs_blue)) + detected_LEDs = detected_LEDs_all + # case 7: w = r+g+b + elif detected_LEDs_blue is not None and detected_LEDs_green is not None and detected_LEDs_red is not None: + detected_LEDs_all = np.vstack((detected_LEDs_red, detected_LEDs_green, detected_LEDs_blue)) + detected_LEDs = detected_LEDs_all + + return detected_LEDs + +def lane_detection(image_b, image_g, image_r, image_b_copy, image_g_copy, image_r_copy, image_bgr_copy): + # Detect LEDs + detected_LEDs = detect_LEDs(image_b, image_g, image_r, image_b_copy, image_g_copy, image_r_copy, image_bgr_copy) + return detected_LEDs + + +def main(): + filenames_of_images = [f for f in os.listdir(folder_path) if f.endswith('.png')] + + for i, filename_of_image in enumerate(filenames_of_images): + print(f"image:{filename_of_image}") + full_path_of_image = os.path.join(folder_path, filename_of_image) + image_bgr = cv.imread(full_path_of_image, cv.IMREAD_COLOR) # load original image + image_b,image_g,image_r = cv.split(image_bgr) # Split colour channels and get grayscale images + + # create copy of images, to draw on them + image_bgr_copy = image_bgr.copy() + image_b_copy = image_b.copy() + image_g_copy = image_g.copy() + image_r_copy = image_r.copy() + + detected_LEDs = lane_detection(image_b, image_g, image_r, image_b_copy, image_g_copy, image_r_copy, image_bgr_copy) + print(detected_LEDs) + + + print(f"_____________________________________") + # show images: only region of interest + cv.imshow("Original image", image_bgr[150:,:]) + cv.imshow("All detected LEDs", image_bgr_copy[150:,:]) + + cv.imshow("Blue channel", image_b[150:,:]) + cv.imshow("Blue channel detected", image_b_copy[150:,:]) + + cv.imshow("Green channel", image_g[150:,:]) + cv.imshow("Green channel detected", image_g_copy[150:,:]) + + cv.imshow("Red channel", image_r[150:,:]) + cv.imshow("Red channel detected", image_r_copy[150:,:]) + + cv.waitKey(0) # display and wait if a key is pressed and then continue + cv.destroyAllWindows() +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/04_Spurerkennung/dev/Lanedetection_development_V02.py b/04_Spurerkennung/dev/Lanedetection_development_V02.py new file mode 100644 index 0000000..9487e9b --- /dev/null +++ b/04_Spurerkennung/dev/Lanedetection_development_V02.py @@ -0,0 +1,445 @@ +# Creation Date: 01.03.2022 +# Author: Kenan Gömek +# This program is for the development of the lane detection algorithm on a windows machine +# It uses images, which were captured previously +# V02 detects the lane by suing the grayscale-image of the rgb-image + +import cv2 as cv +import numpy as np +import os +import time + +import platform + +# input +if platform.system() == "Windows": + folder_path=r"U:\bwsyncshare\SA\Software\Code\02_SA\04_Spurerkennung\images_input" +if platform.system() == "Linux": + folder_path=r"/home/pi/Desktop/Studienarbeit/04_Spurerkennung/images_input" + +# Parameters +pixels_per_mm = 71/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 107/24.25 #[px/mm] for 120 mm camera height for resolution: 640x480 + +# Offset Camera Sensor in Scooty according to Scooty-KS +x_offset_camera_mm = 100 # [mm] +y_offset_camera_mm = 50 # [mm] +x_offset_camera_px = x_offset_camera_mm*pixels_per_mm # [px] +y_offset_camera_px = y_offset_camera_mm*pixels_per_mm # [px] + +# image parameters: width > height +image_height = 320 # shape [0] +image_width = 416 # shape[1] +# calculate center of image +[x_0, y_0] = np.array([image_width/2, image_height/2], dtype=np.uint16) + + +threshold_color_detection = 60 # values under this will not be considered as active leds in each color channel + +# Parameters for Blob/LED Detection +minDiameter_mm = 5 # [mm] minimum diameter of detected blob/LED +maxDiameter_mm = 9 # [mm] maximum diameter of detected blob/LED + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. No string, because numpy array should stay uint16 +color_number_off = 0 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 +color_number_yellow = 4 +color_number_magenta = 5 +color_number_cyan = 6 +color_number_white = 7 + +show_opencv_window = True # show opencv window +draw_opencv = True # draw lane and so on + +print_additional_info = True + +def points_trafo(detected_LEDs, alpha_rad, dx, dy): + """Tranfsform points of LED to lane in KS-LED""" + detected_LEDs_trafo = detected_LEDs.copy() # copy, becuase else it is only a pointer + detected_LEDs_trafo = detected_LEDs_trafo.astype(np.int16) # avoid integer overflow + x_pnts = detected_LEDs_trafo[:,0] + y_pnts = detected_LEDs_trafo[:,1] + + # Translation + x1 = x_pnts-dx-x_0 + x_trafo=x1 + y1 = y_pnts-dy-y_0 + y_trafo = y1 + + # Rotation. Winkel Sensor im UZS, also negativ zu mathematischer definiton + x_trafo = np.cos(-alpha_rad)*x1-np.sin(-alpha_rad)*y1 + detected_LEDs_trafo[:,0] = x_trafo + y_trafo = np.sin(-alpha_rad)*x1+np.cos(-alpha_rad)*y1 + detected_LEDs_trafo[:,1] = y_trafo + + #sort points along lane: x_2, y_2 -axis (KS_LED) + detected_LEDs_trafo = detected_LEDs_trafo[detected_LEDs_trafo[:, 0].argsort(kind='quicksort')] + return detected_LEDs_trafo + +def construct_lane(detected_LEDs, img_bgr): + """construct the lane""" + # This function is partially commented in german, because higher math is used + # clearer what is trying to be achieved + + # get points + # xy_pnts = detected_LEDs[:,0:2] + # x_pnts = detected_LEDs[:,0] + # y_pnts = detected_LEDs[:,1] + + # approach 2: + # fit line through centers of LEDs in KS_0 + # DIST_L": the simplest and the fastest least-squares method: the simple euclidean distance + param = 0 # not used for DIST_L2 + reps = 0.001 # Sufficient accuracy for the radius (distance between the coordinate origin and the line). + aeps = 0.001 # Sufficient accuracy for the angle. + [dx, dy, x_2, y_2] = cv.fitLine(detected_LEDs[:,0:2], cv.DIST_L2, param, reps, aeps) + # x2, y2: same as: mean_of_leds = np.mean([x_pnts, y_pnts], 1) + + alpha_rad = np.arctan2(dy, dx) # calculate angle of line + alpha = np.arctan2(dy, dx)*180/np.pi # calculate angle of line + # print(f"Lane: dx: {dx}, dy:{dy}, x2:{x_2}, y2:{y_2}, alpha:{alpha}°") + if print_additional_info: + print(f"Lane: alpha:{alpha[0]}°") + + # get smallest distance to point an line + # Berechnung nach: Repetitorium Höhere Mathematik, Wirth + # Gerade: x = a+ t*b + # Punkt : OP = p + # d = abs(b x (p-a))/(abs(b)) + # info: np.array()[:,0] --> gets only array with 1 dimensions with desired values + p = np.array([x_0, y_0]) + a = np.array([x_2, y_2])[:,0] + b = np.array([np.cos(alpha_rad), np.sin(alpha_rad)])[:,0] # Richtungsvektor + c = p-a + + # Betrag von Vektor: np.linalg.norm(vec) + cross= np.cross(b, c) + d = np.linalg.norm(cross)/np.linalg.norm(b) # distance [px] + #print(f"d: {round(d,2)}") + + # Fußpunkt (X_LED, Y_LED) + t_0_dot = np.dot(c, b) + t_0_norm = (np.linalg.norm(b)**2) + t_0 = t_0_dot/t_0_norm + [x_LED, y_LED] = (a+t_0*b) + if print_additional_info: + print(f"x_LED: {x_LED}, y_LED: {y_LED}") + + # Abstand (dx, dy) Fußpunkt zu KS_0 + dx_LED = x_LED - x_0 + dx_LED_mm = dx_LED*(1/pixels_per_mm) + dy_LED = y_LED - y_0 + dy_LED_mm = dy_LED*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED:{dx_LED} [px] , dy_LED:{dy_LED} [px]") + print(f"dx_LED:{dx_LED_mm} [mm] , dy_LED:{dy_LED_mm} [mm]") + + # Abstand (dx, dy) Fußpunkt von Bildmitte zu KS_Scooty + # Diese Werte zurückgeben + dx_LED_scooty = x_LED - x_0 + x_offset_camera_px + dx_LED_scooty_mm = dx_LED_scooty*(1/pixels_per_mm) + dy_LED_scooty = y_LED - y_0 + y_offset_camera_px + dy_LED_scooty_mm = dy_LED_scooty*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED_scooty:{dx_LED_scooty} [px] , dy_LED_scooty:{dy_LED_scooty} [px]") + print(f"dx_LED_scooty:{dx_LED_scooty_mm} [mm] , dy_LED_scooty:{dy_LED_scooty_mm} [mm]") + + # Punkte Trafo, um sortierte position der LEDs entlang Spur zu erhalten + # Bei normal detected kann bei vertikaler LED zb Fehler entstehen und dann muster: 211323233 -> daher mit dieser sortierten weitermachen + detected_LEDs_KS_LED = points_trafo(detected_LEDs, alpha_rad, dx_LED, dy_LED) + if print_additional_info: + print(f"Detected LEDs in KS_LED:(x2, y2):\n {detected_LEDs_KS_LED}") + + #----------------------------------- + # draw useful lines and points + + # draw lane line + if draw_opencv: + pt_0 = (a+b*np.array([-300, -300])).astype(np.int32) + pt_1 = (a+b*np.array([300, 300])).astype(np.int32) + #print(f"pt_0: {pt_0}, pt_1: {pt_1}") + cv.line(img_bgr, pt_0, pt_1, (255,255,255),1) # draw lane + + # draw dx dy + cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # shortest distance from KS_0 to KS_LED --> Lot + # cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_0)), (0,0,255), 2) # only dx + # cv.line(img_bgr, (int(x_LED), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # only dy + + #draw additional points + cv.circle(img_bgr, (int(x_2), int(y_2)), 5,(255,128,255),-1) #pink. Center of points + #cv.putText(img_bgr, '(x2, y2)',(int(x_2)+5, int(y_2)-5), cv.FONT_HERSHEY_SIMPLEX, 2, (255,255,255), cv.LINE_AA) + cv.circle(img_bgr, (int(x_LED), int(y_LED)), 5,(170,255,0),-1) # lime green. Fußpunkt + + if show_opencv_window: + cv.imshow("Lane", img_bgr) + + return dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED + +def convert_rgb_to_grayscale_average(image_bgr): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + # convert dtype to prevent integer overflow while addition + image_bgr = image_bgr.astype(np.uint16, copy=False) + image_gray = (image_bgr[:,:,0]+image_bgr[:,:,1]+image_bgr[:,:,2])/3 # add values / do conversion + image_gray = image_gray.astype(np.uint8, copy=False) # convert back to uint8 + + return image_gray + +def create_detector(params_for_blob_detection): + detector = cv.SimpleBlobDetector_create(params_for_blob_detection) # Set up the detector with specified parameters. + return detector + +def define_parameters_for_blob_detection(): + """set parameters for simple blob detector""" + params = cv.SimpleBlobDetector_Params() + + # Threshold for Convert the source image to binary images by applying thresholding + # with several thresholds from minThreshold (inclusive) to maxThreshold (exclusive) + # with distance thresholdStep between neighboring thresholds. + # Since the Grayscale image is dark if only one color channel is active, + # the Threshold values have to be set like this. + # particularly the thresholdStep-Value has to be low + params.minThreshold=20 # reminder: this value is set for grayscale image + params.maxThreshold=255 + params.thresholdStep=1 + + params.filterByColor=False # do not filter blobs by color + + # Filter blobs by Area + params.filterByArea=True + minDiameter_px = minDiameter_mm*pixels_per_mm # [px] minimum diameter of detected blob/LED + maxDiameter_px = maxDiameter_mm*pixels_per_mm # [px] maximum diameter of detected blob/LED + minArea_px2 = np.pi/4*minDiameter_px**2 + maxArea_px2 = np.pi/4*maxDiameter_px**2 + + params.minArea = minArea_px2 # min Area of a blob in px^2 + # params.maxArea = maxArea_px2 # max Area of a blob in px^2. + # reasons for not filtering maxArea: motion blur + rolling shutter --> larger Area + + # Filter by Inertia + params.filterByInertia=False + params.minInertiaRatio = 0.2 # [0-1] + + # Filter by Convexity + params.filterByConvexity=False + params.minConvexity = 0.2 # [0-1] + + # Filter by Circularity + params.filterByCircularity=False + params.minCircularity = 0.4 # [0-1] + + # params.minDistBetweenBlobs = minDist_px # this has no effect + + return params + + +def detect_LED_positions_in_grayscale(image_gray, image_bgr, detector): + + keypoints = detector.detect(image_gray) # Detect blobs --> LEDs + number_of_detected_leds = len(keypoints) + + if number_of_detected_leds != 0: + # print information of keypoints + print(f"detected LEDs: {number_of_detected_leds}") + + #Pre-allocate matrix for numpy + number_of_rows = number_of_detected_leds + number_of_columns = 3 + position_of_leds = np.zeros((number_of_rows, number_of_columns), dtype=np.uint16) + for i, k in enumerate(keypoints): + # x_pos = round(k.pt[0],0) # x position + # y_pos = round(k.pt[1],0) # y position + # print(f"x: {x_pos} y: {y_pos}") + # diameter_px = round(k.size,2) + # diameter_mm = round(diameter_px*1/pixels_per_mm,2) + # print(f"diameter [px]: {diameter_px} diameter [mm]: {diameter_mm}") # diameter + # area_px2 = round(np.pi/4*k.size**2,0) # area in px^2 + # area_mm2 = round(area_px2*(1/pixels_per_mm)**2,0) + # print(f"area [px^2]: {area_px2} area [mm^2]: {area_mm2}") + # print('') + + # calculate parameters to transfer to matrix + # x_pos = int(np.ceil(x_pos)) + # y_pos = int(np.ceil(y_pos)) + # Fill matrix + # position_of_leds[i,:] = [x_pos,y_pos, 0] + position_of_leds[i,0] = int(np.ceil(k.pt[0])) # x positon + position_of_leds[i,1] = int(np.ceil(k.pt[1])) # y position + + + if draw_opencv: + # draw the keypoints on the original image + # cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS ensures the size of the circle corresponds to the size of blob + blobs = cv.drawKeypoints(image=image_bgr, keypoints=keypoints, color=(255, 255, 255), \ + outImage=np.array([]), flags= cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + if show_opencv_window: + cv.imshow("grayscale", image_gray) + cv.imshow("Detected", blobs) + return position_of_leds + + else: + print(f"No LEDs were detected") + return None + +def detect_position_of_all_LEDs_grayscale(image_gray, image_bgr, detector): + position_of_LEDs = detect_LED_positions_in_grayscale(image_gray, image_bgr, detector) + + if position_of_LEDs is not None: + return position_of_LEDs + else: + return None + +def get_color_of_leds(matrix_of_LEDs, image_bgr): + # is image_r[y_pos, x_pos] = image_bgr[y_pos,x_pos, 2] ? --> yes. No need to split the color channels. + + offset = 2 # half of length from rectangle which is going to be used to determine the color around the middle point of the blob/led + # offset = 0 --> only the value from the middle point of the blob/led + # offset=1 --> 9 values, offset=2-->25 values + + for led in matrix_of_LEDs: + x_pos = led[0] # uint16 + y_pos = led[1] # uint16 + + # get values of color channels in region around middle point of blob/led: + # +1 at stop index, because it is not inclusive + region_around_blue_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 0] # uint8 + region_around_green_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 1] # uint8 + region_around_red_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 2] # uint8 + + # average of the values + # convert dtype to prevent integer overflow while addition + region_around_red_led = region_around_red_led.astype(np.uint16, copy=False) + region_around_green_led = region_around_green_led.astype(np.uint16, copy=False) + region_around_blue_led = region_around_blue_led.astype(np.uint16, copy=False) + # sum all elements in matrix and divide with number of elements + number_of_elements= region_around_blue_led.size + value_of_red_led = region_around_red_led.sum()/number_of_elements # float64, if not integer result + value_of_green_led = region_around_green_led.sum()/number_of_elements # float64, if not integer result + value_of_blue_led = region_around_blue_led.sum()/number_of_elements # float64, if not integer result + + # determine which leds are active: + # if value > threshold --> led is active + status_blue_led = False; status_green_led = False; status_red_led = False + if value_of_blue_led > threshold_color_detection: + status_blue_led = True + if value_of_green_led > threshold_color_detection: + status_green_led = True + if value_of_red_led > threshold_color_detection: + status_red_led = True + + # determine color by checking the cases: + # case 1: red + if status_blue_led==False and status_green_led==False and status_red_led==True: + color = color_number_red + # case 2: green + elif status_blue_led==False and status_green_led==True and status_red_led==False: + color = color_number_green + # case 3: blue + elif status_blue_led==True and status_green_led==False and status_red_led==False: + color = color_number_blue + # case 4: yellow = red + green + elif status_blue_led==False and status_green_led==True and status_red_led==True: + color = color_number_yellow + # case 5: magenta = red + blue + elif status_blue_led==True and status_green_led==False and status_red_led==True: + color = color_number_magenta + # case 6: cyan = green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==False: + color = color_number_cyan + # case 7: white = red + green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==True: + color = color_number_white + # case 8: led not active + # this case can not occur, because no inactive led can be detected from the implemented blob-algorithm in detect_LED_positions_in_grayscale + else: + color = color_number_off + + # fill matrix with color + led[2] = color # uint16 + + return matrix_of_LEDs + + +def detect_LEDs_with_grayscale(image_bgr, detector): + # convert rgb to grayscale image + # start_m1 = time.perf_counter() + image_gray = convert_rgb_to_grayscale_average(image_bgr) + # end_m1 = time.perf_counter() + # time_processing = end_m1-start_m1 + # time_processing = time_processing*1000 + # time_processing = round(time_processing, 2) + # print(f'processing time conversion: {time_processing} ms') + + # get position of leds + position_of_LEDs = detect_position_of_all_LEDs_grayscale(image_gray=image_gray, image_bgr=image_bgr, detector=detector) + + #position_of_LEDs = None + + if position_of_LEDs is not None: + # determine color of leds and add to matrix + detected_LEDs = get_color_of_leds(position_of_LEDs, image_bgr) + return detected_LEDs + else: + return None + + +def lane_detection(image_bgr, detector): + # Detect LEDs + print(f"Detect LEDs and color:") + detected_LEDs = detect_LEDs_with_grayscale(image_bgr, detector) + + if detected_LEDs is not None: + # Contruct lane + #print(f"_____________________________________") + # print("Contruct lane") + dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED = \ + construct_lane(detected_LEDs, image_bgr) + + # print result + if print_additional_info: + print(f"Detected LEDs relative to KS_Sensor (x,y):\n{detected_LEDs}") + return detected_LEDs + else: + return None + + +def main(): + filenames_of_images = [f for f in os.listdir(folder_path) if f.endswith('.png')] + + # initialise parameters for blob detectio once befor loop for performane + params_for_blob_detection = define_parameters_for_blob_detection() + detector = create_detector(params_for_blob_detection) + + + for i, filename_of_image in enumerate(filenames_of_images): + print(f"image {i+1}/{len(filenames_of_images)}:{filename_of_image}") + full_path_of_image = os.path.join(folder_path, filename_of_image) + image_bgr = cv.imread(full_path_of_image, cv.IMREAD_COLOR) # load original image + + start_processing = time.perf_counter() + + detected_LEDs = lane_detection(image_bgr, detector) + end_processing = time.perf_counter() + time_processing = end_processing-start_processing + time_processing = time_processing*1000 + time_processing = round(time_processing, 2) + print(f'processing time: {time_processing} ms') + + #print(f"_____________________________________") + # show images: + # cv.imshow("Blue channel", image_b) + # cv.imshow("Green channel", image_g) + # cv.imshow("Red channel", image_r) + + if show_opencv_window: + pressed_key = cv.waitKey(0) & 0xff # display and wait if a key is pressed and then continue + if pressed_key == ord('q'): + exit() + cv.destroyAllWindows() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/04_Spurerkennung/dev/SimpleBlobDetector_parameters.jpg b/04_Spurerkennung/dev/SimpleBlobDetector_parameters.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8ecc0a879a30fb253cb380d9a71f37b1c92c8702 GIT binary patch literal 15296 zcmV;xJ3quyNk&GvI{*MzMM6+kP&gp0I{*N1{s5f;Du4pF0X|70k42)Pp^;dfC@2Jk zw6}f`Rs_nTs^5p=HGKnXZ^47-ZT8E{{Q>`urK3(%KyE8bp5CP z@%wMc4gh~J{;U48{BQZM`~T{`H@HCuHSr4t6gq; z&VjBLo~JFpKtlYbzj$YK_6+${ca@l@vdxSdp<(7AdG#np^&BfgXflXm)E!r>P+J?T zhaqUYiQ{KXu;G4GE=4+e)4$lji@1s}lfP4I{WK~5)`SZ6p8V~A&^bGFhN*!o8o3AU_p_|tQbr6)GVEuvp9|gC($ux zSU?7LWN7f zub~+9@gsXYnrxP9EtqF16%CQ$ssfPPA&jo+g!fE0FnompKRDA2&`vZw68Dq>H3#2g z+^iZB?K0QoDf8fC`-Z3qmOxBH7VA81`mV4a+O`-j(+Fq{ zB$f5+UQ}PEZgYu%0mNQ>#Y2OT9gl_Lh#FlYJ^K!`bSu?KZ!%^6&QT1F?ksC|U|GER z6-RnC75W5oG!kagzU&xWocNvD-pP&M(r}e9t$Y-HQG}(uCQGkQdV7o%m@>CVY8JZhH&d5}M9)g-aG0l5(gtA}Jkk(klj@r!BugLj0&UH%tt{7Sd1_ zOG3_oTfR11%A*2cUK6 zt$!{qH+=+n_b`;d6eb^)rH4cm&sx95&=UW7(IKcR_=i)i^?tuHSlqScnK43J;ue7_ zH!)AmEa-KhTrw~K6Q(-$qMbZ!D6hI=S1e>J;?2A~@)Y-XT6aDx>?_5qUE?F{Nv0Pn zpENfMgxuV$bVkrN@t5!=_V$CCra0_>!q@><5w~22PR<7_@6Z4d03|kGLk9Gj7U>`V{b1z=bz#2E5w1QLZ7y1d!$gA# z&%aV46^yH^&EV}r7tSbwnVmgRAG(4$wFc;4mkouobBYFvr{HV@y6Cr@v_>^aq9(M; zu&?{p&XUT9IE~Ov$fr+JmfxTuX&C_v;VO?&=u}Sk?A`0`UD=+y3;UdfRo1YO3ph}Y z73L`;K44xexBQDTafem9SjE*c99tkEepD_+I(meHa1qb_At8RNk?Y`glp9wWHS&71 z?nvB=(DxDb=^paeGk-&o!WxIp@a_{oa8SKVw)x;PfcavoJ7#nmdIrF_eB%fBr0w7Y zK!zozfeZ4XXnUgK$818K2BfdA?x*i!k0TzBg9Xuy_S6!dXOHTYG=EG)VFQ
jScCZtb-sM$1G8Oav>k;hlQu9$=RMi3Gb#g;}1O67R8%~ zlSfah<|fpVl(lVO2JM0WWE5x6B}Q^wi;+Xyg?&2Apg(|^U$st`pJ2}aOV`o|S9)eB zv-PBlS@Aj9Mj!igONKuYe?+8%e&+o#lXSu7FNDiNjK+?=$cOf%pjDBGgKF}Y=ZgA3fq>iieUItMg*W;c4n}+w=r4%7w|; zRgfbfEKfGlUQUJiP`MQ8>T=uk1+(Sy*yp=>DpuFVx8J}i2nb)53y{bF{{G}EI>kIO z(zRG=UgT9HpB_7+v;QFiZZhj|P?Lzu^Q`?eC+WSe9I!9}<5%CW+&3mPO+X;SZaJ@9 zGVkolf2yeqQW{trnpI*BqU)Y=O|S>2-?9Q(L?|1Go%_U}zCQ|Y_6Cj}jUiTl+$a4L z6?a-b_;p;|%rSFIHWs!F$pQ)Fq6;~=<8f-!`1sTY$fq)S{iB51m zW~Uf^1h$CV_9;M@D+n&>jYBna@UI{(AB6SzEc<_m$0XpWwyh!f+G>i-?T0z8P$53v z2lu)<%*iWJ3m-Sj2ES3w_5j(>5pxWoKj3)d&tN1%q8tSf6lZ%8vw$-KzE#EcZ-8Js zr)~m(lD26ZgmELNU_6Xf6Y3`7rG4H~RaGCZins6J@@BLU()#{8b<|+A;*>sD2g(zx zH+4=6;s+zLR9n*Cd$chH|48}U zZBdiCbuga2P?6y-8LjOjHWh^%ts*ll3$K>t$bm4JCxHD3Sp@Mtk0O6yR7h-L;!tn(F- z@#GB}2nTE@cBhU%#Nm+iPqzpBr4&}3eDbQ0onlhqx|bp3$E3n8*f8bA(Dk!$sp*Ue ztPdq7kL|{87a!ii*pPs3;_n&k584Ym%zjYynE7A|7I(yiVTfC!L0ZpV$P5ZgqbR zL4_8%`*C-ka>$G1P+~5=+PRaTq06DKR}Tt*V62=CL3wwaKPdp}8TGAxV41LK=wChV zHMCKbf&+J&_F-k-3@qLat6t0as*Ai^(|YV+QrCiCK#C#x%$lj+56{JypNUUI-ZK!Z z&$4rh2y`=jG%CFNp%4mC+LO>k#2+d0HP-;h8@p6e2|LK5%s?`E?ZPW*ngb70&%7-1 zLHPFtW;}U9=+XbGYH{Z3KW->W^jD6dYR)i$RJw^gqNh388L&NZlhMBjCxK_~HeJyWb#Eb=KK?qwT{4pDZNQ-5>DDUwI3ZkAc-Z{_JV z3#knG8D8HYXly)}qvawA7^{LZ*AESwx-#1Gn9JhsV?RuFKK6~`5=H*CxBH{`2seyO zJGK$7FyitC%Z}A+{rXH9zqlLiLjWKJL*$OP)+OS_l3+~l?ngBc))DZxRbJn_Ht|n; zzekWO?;1sw#gJcHLJ(?jo~+u~U>ku*;EmcXKj|@KY(x`c7nED7z<0^6SASThrFIq$^JFuGk)G zNIo;Jym~2~c%UMcW1ppxy}|~L8h-O~ zaV^{L^>n}HKP*wkDt-Vz_U5@tYL4~2ov8=0m8y&SO;ropxx03i!Or1wKxePS9R*J2 z5&#Uasn9J4bjQeGq=m^&)zl;$w6gFAT8{nz;W|!vK)0}&ABXuTyyKvNsDxPK%!|LHYq z2CjycV}U{eLIVqO0dGH*1TgOO#2Yh2LB}P@x^LPQf?+)&-@wPyl;<+9Gx!8@PBo>NLO53Z_^VHFi z3ZLkZb?OMXxDXYJ)~`AG_7uO5#tl|Tj%4D%II%7Aa@lZhQ4c$lY7 zdBaN~rhMS*1}CChIbok*F3tD=KmY(Mk)y`s{zxLkreMQ&;!Il)^M(fj7Z|jF$Y=a0 z>@V(<3=xW*JyYswS0iW8b??NQhaK^jzySpKcz9QCjn~-QQ_lYD;Ag1vbOII&w*m#Z zco%ko{t4QCGF3W6biCr_K=fqA4i?J^vE__!nU+8tqQb2#mBFr94@@q5_^vb&!(={Z z>;_LHV@pc_YXBN3n&Hc!%}jyelwnG1PnWL9c|xAO4>C$s-$Yehd{&(Kuc$Ypx?Tvw z;LkJyM+&+&IzgB@tIg>oulN$$3-{UF524eVBv_H8B{?NsXVXsr-&!TRdeXiZlmZ9|rAW9r z^%D>gALE(&tnLm9&2wF-L~IKh*Ge?pipckod?oN&D)Pte^rOQ2$x1ivD>;+%3s0=n z*KJY&Xc%}Z0Sn%+z!IRmUbl4D5Q5R_Yr))eBG`$v+7f&1OWf=^@H)U95x+azyjJtv z*xen6ArKFRCRHDQD!ZmIY_}d-nbQsuS${VNRA?pUW9>O$_tflsj4vKL{etnZlc&_q zI2q9*FM)=UPw)J>X~h}D(l7?p;I|YcK>9Dop*TiADKb5_Inwn6+iCAxokP5_eD!`m zy|qPbDAE>|ZYxj2kI`8y7OLPa?V(5jVo>%fL3D4GjQRGgm5{PrjmbzwP7EHT4v1!5 zoa{aO;d)pa1vHX0sR%K1HHYR4>c)6f1fQ2X@%9mE<_J*I0ZKKa!g-_# z(@L_y#Qhk6ZE$$qXhkb&C;lJYrl;9w2V7mA@<3*n==>FQ0S}${2b)v4h6(w)y;wH; zFnxgDogeWPd)i<(2FTqxYQy&mU;_u6EIH^>jm#Iu8!_tpy_Lr%ZzABX>=@~TvABC9-Fab zrx*5c4BPkdHG071PqER=>sU6B`rL|)iD=lu^>2K0NGjjALP;!^^)HSzeu!OA>^D+d zn>$_atNUubNH8y}SKR&#m;kIv{G=gBXZ_V%`|%33JYw;C$ZXRDwR7zSspKqu(goma zS^99*Ph!8Tv^yxhB(#yfYkSCh%v~KXIB^Iba+F-v{>$XDHe0dfT%<|N=#|#X+aM7& zoDvc6XG4~xh@IKkwO@Zsad%sMo<8!ColrwVvkW9$w=l+(AZ%KzF+bwd(nX6@M{3V;B0z8xwfn`2BYW6HQ9C{Stli8+ zk9a`A(0Xb5#%KB)Mc9~a)1VlXsNN&miwnWY99i!qJ zJ3F#J3=%8ZK1xcir&pncumy7{+eNO`m0%v{vIM|%(v8DBD8h<1ChV>V@&-lcCA`?E z!3}D0@)-WU>Pt?rNF=ld$_@uS-m6c)K*KW3V=gS7k3SO?Mnx^vv35GRLnN61M*Hnz z+iz0{i+G5XyHtdtXyVZ_HY7osAx^VeCtG#NN})t^55Bx5^EtlA_4k*X?oG1>m*)^3 z(nT^YUq+HNh}26^JO>5MD|sk^0h|6V44*SKddZ7H)-*f6*9WMk^SjrbWG2*36u#<3 zO9yk^&RP_X%$1r-JmchHZb@&aEP>5{_Y$8FoNw2fEjgU{k)4A=@a;sMYK}Sy%OOnV zgM{w@%D@csEvqD;jF#}}7?Ol{dCx;C;~R)&Er$VJTAR}2?J<9vu==z! zL**-cPP)WP=@D?MO@fIE`V-)no<2+&x(@6E=&c`rzqKz(eVM^Hf2o!8Ts=Gn)oNnq zX;atFUmR^|rpAgqv`_l7YkVUHPMhdSkbI#NjEc)ylSPE5$rWvdE<^4&8vYzS45;Eq zvc*TTmJv)BmH$GK;_YKUo7`;Zi-QVlo`*33STRaIqf}eN}tTcbVddi)y$JH+~ z85_2uhxpp>(J}$REHccT*3*4uhk|kd;6Odw*;^qoSfX8W59^tZP|0&M*qiq3t-u^< zlc4dySMaQil(tta{t@VDoPdGIziPJEQz`MFgT)lUW~cJ;qq6n-&DC_7u!mht6N+}0 zVDce7&PTvydl+wSayQa1O-ol*?Ksb#bS%-q%OtaqkzLOv!HAF#@6_aq!3;_lyXSQT7?<&r&AnBlRW;Y zvo>)U*-75OsWc3|mzZH)We`+#h+=(M#Ho<;fAls(BemiV@s6z4a$t}oa8(Cr*l<9li$^gyy&i7>nvcY8T;0kgS%IMtlOaCRd8C-Tk$C6U=j zZzJj8?ePd$uq3zB*Onsv1$&KQxCdaJc%V4#3ew+UoUp}xb@TYF!c^!9nR=eiaz)O< zd3S4>zur$e@vH7@G|sIq3+!zDo}1GlO*FrgHS%I$jWv6jAMHOT<;L__{8$lO0o=*qCkBT>>vw9eBLsZ4Pv>NcVDAVnxQEwCR50D42? zm6ezCr{g<%<8>flaPBO(KSM)7Ci~LFi;>3y% z9XHWw2^MFW`-3OCNDYt64Y7r$AMJttRW!#|#!)LwhHBY9nqJr^)xxcxY`ujE1DKke z{(Nll`#_aC`_gfiO=Cnczz4YZ?bh|!L$YqUB6o1nlOm>RLLP0K^|%nH{P?f^^5sc6 zfi-zWf!`9Y?0r2wAVF<*bap)@PFeyJ;19yM=99quGczMy(nB%b?;I) zsEZezqwgXfsFdnXL+6{QQn3d?N)-dL&=@o-d=qXvr~QFH(ZU~L z>Mj>}DF_c-K8clm6vD)Bi$cmwrWMmBe)2q=KMFRxTZBqn_j2`IdM&H39oaeG;t&kX zC&xze1yO%-4Wf6=+jNj-Nk!w592T2_rlOkyCQ}z0_)%k;dzQm2LcI{AlIyYVf7k|NBP*q zsPyvG8Fn!qvi3sXv%ow>%?L{ULb+a)4cDR=(wgs&@hBDd72k;3g_)!3M_$d#=i7vz zBc$H_9DfJilmGw#26n5yI1t5cG4>=sKtDc+J=4}%SrU(tQElw2MTNNAqauPxltWPy zEMvcd&CZj-GPs@+(g5GYVjzHBr&QI4S95h+wr*s)R>O7hH$yojwZ{KOHL#P3pY^#M z5k-;!CpI?rosESPxC+_#ww2CCm87b@mo75Z9j-%QKu)n0?WjXmy2X5VMgy)+_W02(Q=ZT$7;;!T=hxjpV*I zO=ox`rn&{M-^O7|NLX<-;xAu6N^neYWpWlsiG*3mN{OS8AH@RgtIKKRVE+}RyKKbQ z#(9GiXyA3E?(@Zb`3hR_Oh*lXVZ6huj-L}^uRMMIplvBU(}NM(QbvVg~W&LSTLJ3DD(>Xjvr@6W}RIQ?vI z;Q%UUZriIjgCxj!Q}4%3=KJsgQQS~^57^l7euZOS5btnlcy(tFEP4<&7C8)LwRl7^ z1dM!mBx&HM~UglroMaNx9vLc-qRD zhVelH&0ze)Kw{@wUbm>Z-AP|n?-1N^t9x!C=|G~C&Qm|uV?n~%Y&-+|8ERUHc zrt@Iijc44re_GSn{UUdG51TLlQmwcDr}{DQ8Psy^wBZF3&VLV&!S;5nNb+@kq9_Jb zs1(JDEeS>BT|?Q&@FMejLV@(_%`^tLrc}Vw9z%1{6XrwiK=Gm5>NWv_d~v#Bantm$ z(sjx7XZ%uoQo-=<&a_oS7-*^nS)FbC^La7)e~(G=!6V$U4x9pO!|WEi*KO7s%J~G1 zDnTr;cMG~5=!OD#wZfSU^B~kH_b77s%PVlL=VWbPlt1-$GTUdawP*;1U`pFaB$=GzTc1HDwN>aFK;pe{uUgr5- zb*aCKtSlp&1~-746&J&>DjF?&Ex@bFA)wjK1x`6M_&yYS>TjjMKHY#5KLX%#g;KBpL<_bCo?=Ojk*XN91m- z+1z~~EZhD^zP)B)hXX_ju>ER#Hy9>dL zO4DlUwkIvgX&;CF&6hE5LN0X9yVo~mWy#QHVic>Kd%t+IdfNXXq1})RZxP1&W@;uo zLW=WP)0vG%On>tkR0xCs3xtQJ;+;u|Y&67UF<{m|wn1O>>ZAdl&AiCQ@_@kS`i}_r zE#h|;-L>&-l!{5F1$`wMg&?-YTfi0;e|a%q-k6UV(qskn2u#Voy`7o&85T}3MF^gJ zFdb>5CE(o}S9|x`C`=`Vz$$nlK%-PV(V0F;XDjx4W$M3b(UI}q5AUz?$~*l8BJQ-V zhHLm?_*p%84x9$+M~N?n1p%5LZFH?bt*~wR4=&=>sTl>&Xbo6L7(lNShQ?t_xqWh- zq9->{!-@t2du4AV!GkyPB35ez=^F0wWu?+sUt}cSx#e7)A`kpiPxn0=;3B{<-=9uf z2P-FDvTg$!ZjfcV8T+)42k5mY0nzB(;;H=0_dNm9|NbUj{h3nVoxl`_~1x=vk z0^E%YNa-x*3R_NEf^XlZbGz6VO_#1QH<7%M?BMhm;=; z8vbus;sUUCzHN*6vSFugbhKV* z5b`DA^R#tae=9nvfTLzB!fYCU+X!#UW?+!L*{|^^!jAtYUW);ux@q>=SL;X#6}Wvx zs>miXm-GtXXFij0_(-OZ+_TI+Xyn6&|LHaXJ8M>M3(Mp|-28#&x#3ILG^DNS*MI7+iOuWa%e5?(7kEazrs!v{XQ<~UsVO1&`^;}H{iUWgR{9*{z) zCw3;XwHg(j0Fo@0uKxtVMk>(=1k2APQ~>0S-`u83hlox^U1rMJU_thwN_RnaCrkyb zeYiN9%^hi@JQ!4f6XUOi=DG$r3x=qa!^vuzMa zsg+!WMM#1BwPkz&+YByv>^LCA^#2%H7#d-QgLItV9?rY zq0@>JjaG@f{;FI;TtFR@2Z$U!nvun#UQZG4Rsfifm7s^H&9a#>^BKpXs`Dipz+GAG znay3|)>?VF;P;Qz+9;gwjiXsEANQlikT7E53 zcM2zrcOSpSW>rq_IQs3?OLxslv!K-4S>Y9*76|OWs|kz@=OhhOGYw_X+s}Z-6(rY%%zIO(1|#C(jEpCzXaxBv+dQI8 zv!<4PCrl%ll`&(#tR!tIj|CAy*UCxwqWU(PVbNtSAfpNOL39iqGE!;&nnCO}8m(|6 zMydOlte7~r6M6S$`r5!N*T33gG4bzQJ!2UsTeoq*R5M9qjkUT5Kq+r(0OM=Sg7}bl z+51!?w`;mPV8yEdA-mn&pHaGb;5`bp11t1S?TT@Hlf`zgzBh13rmk?|GG_ReWD2S# zZzr2qTI(oo7)>R$#tPO(rEk!WWJ2T;>so7X=TgCEFH(Gz@Jh(t<$Ole!;iV}`ZPS~ zM-0>Dq#G!22yz&NhITGf9f8KvWZ|)7Ci?S^fM2SkZUM9vO_Mzu_^OCS zVL{G5;pCd4Yu8SQ@h#cr7?fxB%@but7F8_Vmc%w@2gY}!IWn54h5nk}Hvx_YVY&JS zHa#Ab|MR%~qgY0?4L$ddQXo@LYieLnN`y^&11sJKBY>D}CrM@p81WHutkhnmcv@K6 z)0L5xhz%oL4YWhp9siw1Ip7dGYHu-{Ov&yQBWo7*`$pbqzLvYzkCmZJ0lvoy!eqHV z-#y(qltf}h)(&bls`_e2uTjr3QB_8~^ulXI2O3qwLC{(ZRR}Kc)q&k>YLM_}d8Ot@ z4p<%?zXujB8Q$wO;L-3qMK7d3_!VjxskkzuV!-$?=$R0J?#AUdZGJ2c@_$V8{qmf0 zkxLJ7N__h7NRwvibIB~Rha|;?4A$o`lb?8uvjSZ<<8tyKHZ7S=(#FMn!Ruh6`Yv(hovJ7A;(S)uu+UQ<2F*q+W5yr+axvmL zck&5p3$ZoDRY8#Xt~e4M1@VqZ=*eQx`L>1qaMaE1Q} zPtcPn!zK0A6T2eH6_5cIUOhGgH+u&xbX!k!1r3aZ;`oV?3r~R% zui=Yf>v5+gH;VA{=Cbc<53T>MQy*YOAth zVyHT@Q7&kbqP$oy-_ME<^3Mj2qGr5gp8rrzY||b0z|RAj%$M#P7coZ3Erc9q!GmZ2 zoq!+k2bMIz%u}o6cpL(l{|__#k?fWVh0J)7SLuuF?bSi37zvli9d$?akq0e`+YDpf zB%Sh-`sZmH+efgdQa6jUz>%{`ADFw4OZ?HGm1VnufGD$yUajeryS(?((!v`Yo&f5v zTu^DZb!JZTo7~|SmvH$Gam4lQu<5|N#C!-V8l`{XmBEM#5dAGo;2Pnn{d-}DRy~!@ z;FI-JMYbxpG_96b-s!$9kWG>}fn!K^?5VTUk9VEMN+ojKiVY#a9ZjUmTRwT4!d;I2 zfeZiv&fUIJ13^L@K_|AF?Ps}}UVtw_H3lpB1x24=CEr6X^oCqw$zzE#Sv>~R`mm&X?AFrpfN(@sAjR83cY28}d%e(eBMHn9|{j?Am&2@X`n}>0YOENv$lM{+ADJ}07)(ekW5R@&qg&el4(j8phM9?v$zCEy~O(EcZk(Y zch|t+7DYw-{lc$*{Z4S{F^h}|bA|7V6(*OkWBO9amL@a@^%bB2oYLzZu;-A}4!4L* zkOy3vdp=Eq;qB)&qgKc{*!l}*p53Bky3*D*Y8`p-^00r*Kh38Fyl2`s&CzL%!qBiPITMYiDF^}|ZLF2#9MBw`k|60k;7fyv!(S)w_0jb9Y++DK zBvwyK0k8)eUUJlC6Zvg6iFtRpc6jxm4A_@rI214ce75qsnl=`_();cGCF2|yR&OS# zUN7C0V>C^Yhz$w3QCElY09luB&%9t*4}wrwl4fy`qe>j|f2Oi8FCrC#n#)%P z;CH_ysq8hFXDuNOpV^y;ebo}?&pYshJ*MHCqEP7qRxY8{bS`Q3FMSaSv#3sl6_E=! zYG7PZ$w$ZYo6o=A9pX3E2RKReF?f%m`S0ObjG)UB{!vJPv@rQcfy?CLHVXIk>?&J)$A=!-|rH$vL_0%Kq7 z3n7x)AoPthU4{-v4tfv+oY@opKwWS#1RdF0#@9Fl$C1I(0HHe+DU^HbAwqs-N<0+K z%`9iV7QYNllDi?S?bz_(QNVIh;49PjsxMd4+-12^0f>E1qSckwUpt{{0i!m zjhDGTGoRB@44Ed)y1zK)O5ecgzYZY2g$%lwo?GzN{zE2^$;-Ap@Mykfeyl5R?YR`P z4i=9kFJpUCUonpmCroq~D-aA+S&}(BM(7S9=$iecRcGiVMHag(Tm5M1Q=fLvqvWC} zK1U{kU@)meF@RgG#q$XCYJ4mjO2)~B3}{|9!^cQ=+BbIx1N7+u@k%G2JKfk{a5@Q{ z{~B;I<${Pm7eisZ9IldX?xH5}# z9!Y77VWbkB>XnP9e>nlUpDiz*b>eOy#*Ek`as_Wpp!Dvm*lp6^PQdPRh6-r3wXTk0 zLMu*`(Q{~qU7S2BBm8T2QO_t;lEpS<+2LQ0p#{B5SO)5OIMpVR!JhrS@jTsb?mJbU zXx1ba?P3PxIbnr`=S;+MHnygqDGC&{vNOryK*>Xr`@Y07rZ@SR_rDP-5P(XJp$t!d zvuD)o#p|!Df^AMKU3~8cRrGw0g^OD86m*X>K&&5M=}W%sPZ$4K6_{EopSanW(Jh8l zpd8$8%=u&y-W|DA^Li0q4K^VZY-UxCZP)omO@{=h9=x9bo3245=HSS&nD&f`^cHoQ zYY*m?gU7cl2No0O7uN0)MNwVFta^sF@UEU)dw{Uftjhyq-U=$r_0|f+@_Y%g5IGNX zqV^PKISsi+{;(4WKp}k#vUM-b?j%e83UWE}=jg0C1U>351SW-+o7!&r;X;5M+AI8R z=UxS;3?IEAHOkzleMH9=&|H5K)lBp}s_FZw52kVy3n%DYPjIp_&(Lh!}RqJ$-5!wglQU0eWA(xJiMRBH{siZ{?L zMFr3^JNWGA^a*lR1Y0ic8rb0D3d8c`eRf+iwvaz-K|R3a(FX`D4wHJf+g!-2<(10o zukiR)7l+7R9@XM%nYvaxkDF+-M9uFy?=>vcEgXP&LGR04K&N#xHoc6=cKa;oC@H{vc(tjcrI>OL)1|y z7tG$C{Ei>^?O@PyTG@evgj&9G5DWtLcGBRVmF#`hTBkr_;4j`Y-ycFSO?W@U^O>GO z$+oi&u@WD zGKA}mxq(hKD?v>HZU%9f|DzTpex-|JTeLjN z--4jS4c0lcxPEghpp~#qQCwZNpZ}o;63ywN;DA;;(q;nwkd{nz^ctrZg+L3Iq$Yik zL#E}Y`&A&dh9fBJ^YlHbAZ#qNSlDOv20WqjXu}i*CT|{U;Cv1%pO==|GSDRyqoj`wl8zhe4d+@|c5D{F>h|`Wz^-8&YMw#5|8nZpjN~ z^qER(^(=hfaon9F9KV%UUa=i-e%lOmr?kl7$G;bRCbrEKR}k1;V!yNBIIo`UaKOS| zLpLjeVsfT?P#_pmM%g<0k453 z++%5bqn@Hom{yd4JHB1IO?|!((;D9Hpb7BSKJ%>EJ2i}vnJqe-+kN)|yb;1d@j}~5 z>|8e7b*zeBu)b1K!?=6&WVkjklT()6?_T3(y-odKFd7iE+x@U`#YW4w_t7|`eQFu#n?g{V z7k;YCDqBNy2hMpZ@M*_<7K9tKg3Dj6mLi7;9HS$(XNjZtdWh8UW}SxMJ9LPaHvyMg z9a~37S%q|6rh@#e>Eb!TWylK1eTcmMEH}li2;`(~Q%se!I8_x}Pt&#g3vfqzP@yrR zU*#SZE&gLF6{VSbx1x`_`wp_j%Q}i^C`vX zfcAYN>+wCJ#&=kN34CtF3ph18j4nBt_gFqu9#pfPE)6G^*x9j)O1f*LK8$=-x`0+V zK3UhrGVtmt7HRu03Xf1~BvOEQI#5mjuIwx;h&(E3YoEK7N!%8iYP4ec?l#;21&|p; zQ1d?o+S{geu;PoziJnvJFExVxEJSV}zgkX)94O8W@M7w;|5epL;I!UYzyj{v5({?u zHN5hA$(hIJ*h$1vaIl7Rq4)^!+{b?&>KSa6afNS$GNQ~=g%ZGZTFjN>-dGR-$^)rP zH{R${Cx5-SPpmS6xWCC)b*GYtB8uX4sdT#Yu+xo+lqGKE=-5I(yAe%gO;f44#%X(u zS&GJG`(LN~FPgEw%Y9z0Qox%h^563NFfOM<-xxy3DJ1s_pV;7*_>5i6F?+#vEw(kzTDlg|r|B?4hpL*DYAn|zo zwz`(ZPx65O=1iEkLt$t5FS*lYpc`^1=d6BZP9%TQm37M* SH25 apply treshold + image_gray_binary = image.copy() + perc = np.percentile(image_r, 95) # calculate 95 % percentile + ret, tresh=cv.threshold(image_r, perc, 255, cv.THRESH_BINARY) # set pixels above specified percentile to 255, the resto to 0 + + cv.imshow("grayscale", image_grayscale) + cv.imshow("red", image_r) + + params = cv.SimpleBlobDetector_Params() + + params.minThreshold=1 + params.maxThreshold=255 + params.thresholdStep=1 + + params.filterByColor=False + params.filterByArea=True + params.minArea = 300 + params.filterByInertia=False + params.filterByConvexity=False + params.filterByCircularity=False + + + + detector = cv.SimpleBlobDetector_create(params) + keypoints = detector.detect(image_grayscale) + number_of_detected_leds = len(keypoints) + + + for k in keypoints: + print(k.pt[0]) # x + print(k.pt[1]) #y + print(k.size) #diameter + area = M.pi/4*k.size**2 + print(area) + print('') + + blank = np.zeros((1, 1)) + blobs = cv.drawKeypoints(image, keypoints, blank, (0, 0, 255),cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + cv.imshow("Blobs Using Area", blobs) + cv.waitKey(0) + cv.destroyAllWindows() \ No newline at end of file diff --git a/04_Spurerkennung/dev/dev_Trafo.py b/04_Spurerkennung/dev/dev_Trafo.py new file mode 100644 index 0000000..7707837 --- /dev/null +++ b/04_Spurerkennung/dev/dev_Trafo.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Mar 31 19:36:39 2022 + +@author: Kenan +""" + +import numpy as np +from matplotlib import pyplot as plt + +points = np.array([[-2, -3, -1, 0], + [0, 1, -1, -2]]) + +x = points[0,:] # x coordinates +y = points[1,:] # y coordinates +f1 = plt.plot(x,y, 'ko') +plt.grid(True) + +alpha = -45*np.pi/180 + +dx=-1 +dy=-1 +b = np.array([dx,dy]) + +#Verschiebung +x1 = x-dx +y1 = y-dy +plt.plot(x1,y1, 'bo') +plt.grid(True) +plt.axis('equal') + +#Drehung +x_trafo = np.cos(alpha)*x1-np.sin(alpha)*y1 +y_trafo = np.sin(alpha)*x1+np.cos(alpha)*y1 + +plt.plot(x_trafo,y_trafo, 'o') +plt.grid(True) \ No newline at end of file diff --git a/04_Spurerkennung/dev/dev_calc_mean_of_points.py b/04_Spurerkennung/dev/dev_calc_mean_of_points.py new file mode 100644 index 0000000..de37757 --- /dev/null +++ b/04_Spurerkennung/dev/dev_calc_mean_of_points.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +""" +Created on Sun Mar 27 09:28:09 2022 + +@author: Kenan +""" +import numpy as np + +from matplotlib import pyplot as plt + + +#p1 = (1,2) +#p2=(3,4) + +# dritter wert ist farbe von led + +points = np.array([[1,3,5], + [2,4,3]]) + +mean = np.mean(points[:,0:2],1) diff --git a/04_Spurerkennung/dev/dev_color_pattern_detection.py b/04_Spurerkennung/dev/dev_color_pattern_detection.py new file mode 100644 index 0000000..01aee6d --- /dev/null +++ b/04_Spurerkennung/dev/dev_color_pattern_detection.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +""" +Created 05.03.2022 + +@author: Kenan +""" + +import numpy as np + +from itertools import permutations + + +def get_color_pattern(color_array): + # define color patterns + color_pattern_01 = np.array([1, 2, 3]) + color_pattern_50 = np.array([4, 5, 6]) + color_pattern_70 = np.array([7, 8, 9]) + + list_color_patterns = np.vstack((color_pattern_01, color_pattern_50, color_pattern_70)) + # check which cp is detected + for cp in list_color_patterns: + # create permutations + perms = np.array(list(permutations(cp, len(cp)))) # 6 permutations for 3 numbers: nPr(3,3) + #print("perm: ", perms) + # check if one of the permutations is included + for permutation in perms: + if np.array_equal(permutation, color_array): + print(cp) + break # end loop if found, for performance + + +# dritter wert ist farbe von led + +points = np.array([[1,3,7], + [2,4,8], + [2,4,9]]) + +color_array = points[:,2] + +get_color_pattern(color_array) + diff --git a/04_Spurerkennung/dev/dev_grayscale_conversion.py b/04_Spurerkennung/dev/dev_grayscale_conversion.py new file mode 100644 index 0000000..0743c2c --- /dev/null +++ b/04_Spurerkennung/dev/dev_grayscale_conversion.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +""" +Created on Sat Mar 26 17:08:00 2022 + +@author: Kenan +""" + +import cv2 as cv +import numpy as np + +from matplotlib import pyplot as plt + +path = r"U:\images_input\image.png" + +def convert_rgb_to_grayscale_average(image_b, image_g, image_r): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + image_r = image_r.astype(np.uint16) #sporadische lösung. umwandlung vor addition, damit keine gehler weil uint8 --> overflow + image_g = image_g.astype(np.uint16) + image_b = image_b.astype(np.uint16) + image_gray = (image_b+image_g+image_r)/3 + image_gray = image_gray.astype(np.uint8) + + return image_gray + +def convert_rgb_to_grayscale_average_v2(image_b, image_g, image_r): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + image_gray = 1/3*image_r + 1/3*image_g + 1/3*image_b # automatically converts into float64 + image_gray = image_gray.astype(np.uint8) + + return image_gray + + + +image_bgr = cv.imread(path, cv.IMREAD_COLOR) # load original image +image_b,image_g,image_r = cv.split(image_bgr) # Split colour channels and get grayscale images + + +image_grayscale_avg = convert_rgb_to_grayscale_average(image_b, image_g, image_r) +plt.imshow(image_grayscale_avg, cmap = 'gray') + +image_grayscale_avg_v2 = convert_rgb_to_grayscale_average_v2(image_b, image_g, image_r) # new alrorithm, cleaner + +# Test if both are same +res = np.all(image_grayscale_avg_v2==image_grayscale_avg) +print(res) # False + +# Explanation: +# row = 227, col = 7 : r=2, g=10, b=0 +# opt1: (r+g+b)/3 = 12/3 = 4 -->int = 4 +# v2: 1/3*2 +1/3*10 + 1/3*0 = 0.6666+3.3333 = 3.9999 -->int = 3 --> FALSE +# conclusion: opt 1 is better \ No newline at end of file diff --git a/04_Spurerkennung/dev/dev_indexierung_Bildmatrix.py b/04_Spurerkennung/dev/dev_indexierung_Bildmatrix.py new file mode 100644 index 0000000..1d7254c --- /dev/null +++ b/04_Spurerkennung/dev/dev_indexierung_Bildmatrix.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +""" +Created on Mon Mar 21 17:49:58 2022 + +@author: Kenan +""" +import cv2 as cv +import numpy as np + +from matplotlib import pyplot as plt + +path = r"U:\bwsyncshare\image.png" +image_bgr = cv.imread(path, cv.IMREAD_COLOR) # load original image +image_b,image_g,image_r = cv.split(image_bgr) # Split colour channels and get grayscale images + +image_r = image_r.astype(np.uint16) +image_g = image_g.astype(np.uint16) +image_b = image_b.astype(np.uint16) + +img_rgb=image_bgr[:,:,::-1] + +image_grayscale = cv.cvtColor(image_bgr, cv.COLOR_BGR2GRAY) # Y = 0.299 R + 0.587 G + 0.114 B +image_grayscale_KG = (image_r+image_g+image_b)/3 +image_grayscale_KG = image_grayscale_KG.astype(int) # quick and dirty way to convert to uint8 array + +image_column = 6 +image_row = 243 +image_bgr_b = image_bgr[image_column, image_row, 0] +image_b_b = image_b[image_column, image_row] + +plt.imshow(img_rgb) + +# get pixel info from specific color channel +print(image_b.shape[0]) # 416 columns +print(image_b.shape[1]) # 320 rows +#image_b[row, column] +print(image_b[230, 20]) + +#image_bgr[row, column, dim] +print(image_bgr[230,20,0]) + +# Test for addition of colors +row = 245 +column = 70 +print('Color addition:') +blue = image_bgr[row,column, 0] +green = image_bgr[row,column, 1] +red = image_bgr[row,column, 2] + +print(f'blue: {blue}') +print(f'green: {green}') +print(f'red: {red}') + +grayscale = image_grayscale[row, column] +grayscale_AVG = image_grayscale_KG[row, column] +print(f'gray Y: {grayscale}') +print(f'gray AVG: {grayscale_AVG}') +image_r = image_r.astype(np.uint16) +image_g = image_g.astype(np.uint16) +image_b = image_b.astype(np.uint16) + +res = image_r[row, column]+image_g[row, column]+image_b[row, column] + +print(f"addition of r g b: {res}") \ No newline at end of file diff --git a/04_Spurerkennung/images_input/-30_0.png b/04_Spurerkennung/images_input/-30_0.png new file mode 100644 index 0000000000000000000000000000000000000000..bdbf466d5c6e8a3521e49883d85af3777e7c6425 GIT binary patch literal 535664 zcmeI52Y_5v`M1xso$cB59@1z55<1c$p;v(*0YoW6P(+F-O-KM~!XHGW8w8bJM37(v zlwKt?sey#_UdZ;H_Px(!4_~N~&F<`-ncvYjx6GZHJI|fH5AQkey>t0@57@J+ykEI< zu4?bSc01U)&}|mGl$O|Bc*+^4 z{QSgIP941Wm@$J-J^hpue*DwpojdEpNr#Ldd&t6GJ2pPkQ%>#(A%$AAN79`S;gOdgS%^Q>B&H@3qB2TTQIIXY`=1pIr3fb1%%C zGjf*;%7>roYDypZ(U6~~hJ{?y`8({e(a@*j|8k+T-mWNhPwjupQ=1OBG}C3#=$pPD zn)vI`Q)gWBoqbCu_HetLbJgfO-7b5CCQiC>y+hn>QFrvK#~nM{eRmUg^!gL#z3nDW zYP#v%kUR4CLwbg8yT%Rfe*9&-xf8c{_rLzV-|X(jZR&cQu!1GI(6%-CRhJ(lSTh7$qYUA*xBj1uitm-F3!z7L>OGT>Rb-L_-yZth z-p5WEH=@VvgSwq|)owRmS-bfiLoXlwyA7XT_pQv2X3f|*5!q?RR_>vn>=ZipnoaIL z>bOY5zU$YFJL9QYjn1un{FJBvyFqDW;)xf{eErPK@|{zA-nOorc*5SlJI%Qt>^6Ml z*jFCe`NdM_cDwC@5fAO!zwYTi8$VSt;OW>$Pgf+5-ul<0hd=ks=$g^xKbkn;^rJVt z|H9E5T=LpO14kS^>c6`T-14b|PQ2vWQ>#ZlbLc;+cRaOA%Q2<5KE1&c7nDVJeW|qj zF+)DQEOhK|#tplwwD(OHHw}4uuV}X`h7W%EvK|Kw-S?WU_S)>=-;5o6=w1U??s$55 z+e^Q{$3L&x{w25hg~>~v*mJYrU-0ljJN~!w(J@o*-lpHL_xMxA8$UW@vzxE(b=xg( zoc{NIJ6^fy<@LwC{*hjDL3h__{#{-0;R>Z@s#3 z&jmLuh}4}~S#?Rzi(b5J{04n*=@aiWzVDfR{(9qnclLezioP$7-E^DH7T-AkrY<*~ za_G1blSllj=QXdCU$@V7>s{A>#Hg|N+PVm`wqQ#(svgh(*Kb0_iTUnrT31%`;@Ue|KAhwxL~X2*8AIQAN=Zwka!J;z@?H8yGfAs-y_@m=M2_1b^O?dl&K zdGASg^uPV_-Ol{?&+b}wTzvaDKW;}NE5oaB-@#MQF|Ks?*AD(l>orf=*{KU^LK4RO$?-+OJ5l20;#{-id zSoht>A9(z(u6Ip2;MxQ3yY=1i&p!0&Pd@LM@yGTa|Jd^n zKD+GNC$By4pXdEwYR7ohr5{w^H}Hso=l}fgr}x~lu_ zX-CaG=dJolw>)yoGfVHD_Sa=wFZ=PbOF#PKf42PNhPNKR#n0b(_N6hO95naMoh$x$ z-KHnaxc~eE#~gUU_($gt8+F{MYYuzhy@wvV&z^GY4=lPd_l^#Fv)*_JJXd^KYDb&D0-PU;N$k7rgsM z{Fr&iT=DGrmFr)8>BWa#vh!7^UN)}Zo15LR)tPU8H0I|sc5B(~`rW4AGJdlIxBcrT z`)oC0r+Y{KWQPy8`RA7Z-1&u(CmjBtcg9T`xA#x?9kA`l5f5(tv!DKJMsjKGrL!i@ zs+fIl?;A(GbkwP5-#+!3x{L4XbSh?4T{m=Q`l53vW>g?gqZ*j-QbEo`z{d<1<#C}!# zeSFJyxAwaEmxE8x{^KuxIQ{(3u6S(J zpzGo4_iLBGvhDTv-MZ0hOaJr8E7N|v z%^#NkF|)MWGu>wXZt@+M?0eWI1INvK^8TM}`|u8@ojt95cF)*Gmwd0}{vH1Do5>?i ze&@jZmL7J)gXcfF-Z}f9biogve&xca9}S(fxZ=?Axfh?e_@noh9(CZjQHRfX^q7`Q zCiUN`=jFe7u*bCCH}stF{steNHMjqzr@Zp-n?Cye-|xBaxhX$-{e-tpSU7L`Cbu40 zef_;R%)6obXH_43`1h(A-~0R0`i9-7jk@8r4W>PQ(d02x{`~J9{_(`|cfCCIwe5Ev z`bTc^t(w%p`KPa+n)>SFADp@VvIm|x@QEMR?Xq;&55|0a={q|g znf~bg$x~OzH||PY+xOaQGttP6 zwaMeQ*o6Py9DLkf2k-CPIqNz%@gnD%Gd5l5+%GnDZqd=sZFi${y-)ew51-$|l@7UO z@7;DCdsfJ0Y@?7p*CuKrAOa#F0wR!G0uoSedF$OoKmA5K>8|(fCz|y2y_Yo z38+)B>rF*K1VlgtBp`hUL_h>YKm^P9Y!xbqaR9sR)RG2#A0Lr0;+Th=2%)K&KFpfI0=c z-c$rcKm2Abkf!KmYpi>A)K%IhJZz=*JAOa#F0qHv+0wN#+BG4%WB%n^gt~V6{5fA|pkbv|Z z5CIVo0TJjF0uoTCVAq?9fCz|y2uMKs4v2sVh=2%m3IPeIQ?TnzML+~ZKm;TpeFsE9 z1VlgtI)#7))G65YrXnB$A|L`1kiG*VAOa#F0-ZuY0_qg(h5x3ZiRlT2|0(qeML-1B znt&{{)(G_iA|L`HAOdY6AOW?7nw}#9A|L`Hu+{`5ptVM*7Z3pv5CIWr3jqnJE!6ZJ z5fA|p5P`KOAOWp4LcM?ph=2%)KwAh%Ky9I>=ZJs^h=2&JH311|tr6-4L_h>YKm^)C zKmuwDH9bcJL_h>YV66#AKx>UqFCYRUAOa%L76KAbTd3(dA|L`HAOdSmKmuB8gn9uH z5CIVofwmBkfZ9S$&k+F;5CIWbYXTC`S|ij8h=2%)fC#jOfCSVQYI=?ch=2%)z*-ZK zfYutJUO)syKmwoub^L_h>YKm^vBfCRMG2=xLYAOa#F0&O860kwsio+APx zAOa$=)&wM=wMM8H5CIVo0TE~mfp!wm#LR@Yyt|$w0wN#+B2e4}+Q~x24N;$71Vlgt zL}1McNI+}OOwSMj5fA|pXa@okP&+`>ONf97h=2&JIROc1&6(*LA|L`HAOh_`Kmuw9 zhH1SFs}XQpR}fCz|y z2($wM38)<)>Lo-#1Vlgt)|`L@wC2q83=t3k5fFiPAdtTTV$Vw~k``sog=`wJ|D}>? zmq}$-p=q70TF}2DAOa#F0zo2>mjVjKLN3$Ygi{)F>1f79Q&E>FOSo`25 zOu2M(+Br!mNZ&W@iwKB-2z*8$zXTLB>6AyD>uOm}Y1oyOmbz3t?Lw)LYpQ6n2Qx0y zm~oL$Bd%mgiA&X{TuWoiXCGEGA|L`HAOgW5kUs(nI~VH~b?l)J0sZ zE|$%LnnTSlS(kEdr9HR7VwuGvi%%_r{b_9n)kHu9M4&hb1Ybam`>YdkWo6~AVNipM z)>t=ONzBFKQ5UKTTi08?6~mRd=Cp~ZCFx3=N=!si*I3)+k_{#yq_fzf&Z5?0g@u|3 zh=2%)K&KN3wt(0>t9GmEQQ?w(t)QS^iz_QHb&1Nf>rztX>dS2`sLaN9Y|tl^3AvVr zq)SFyT(~~uB8wv~vCJ}~ z!|PnUJnmv$qb?SWxu&kouBoQUl{J;Qa3<{1W|ihdi%TbxR(@?z>3XwnxA>g6YhGb1 z$bJ|HA|pyzsEL3GKVLfi$<1QMG zxaGZ;yL5@ww^?a+EERJl4OaG+O1PF5Q%t>m-bNecT4>`w3&o+AWMNtx?9TA%gWz|*SoT+G8gI=61YKDMKILL2JD7!k)Bo#S827^16o|7i*>VEhO~P2lS$fDs0ove^~yyT zMOS9-!+E`Git8-1mn_Z^H)*~Jsw*rK$fXHaL@0%q{@x?KhG4ZrC znvKYQ=yIEB!93-0bkV7afCz|y2y{AuU;JuFN(p-RhYdc<8zZ|gtnZeu;& z>1VUYxTwc}0`pD8ZiO9N9CghNEiS|Tc#sY9Vf=^gHzZ}N)kHu9L_h>OpMXDocmAHN zz6b0T8IEN=uF9g?qBkNk@03~k(=FtZRVEoSA*90g7LjFUrTUCZaW5V+9?G$2S@?VM zAS^Wz5CIVofj|%lrhq(}a+~R8K_)~-WZ97GVxkFKmz@0=P`Wyu{ZXK7SDbaVg=+1V z*xZE_;V|BVe7uNQrzQd-AOa!~7y`i-5SWloH;bMYR+^m^5#4VlB)js*rTGa|)*~E( zZZ|6382g#sy39Rz)I>l8L_h=rMIiVBqMHrjARsa!y5lH1pbCz?=W#0C{3k&`#6k zR_&(#6#)?tfi@8EXS_B%wG*Dg-V&UiwABw(4 zJ^+^G1FG-PAgGCe2oxxR{1VXD9R$wTc_AJD0VwZ7Hte{!r|&Oh9gXGHKhzhqrR+xf zM+8Jb1oD}3Kmic&XT<{e zC_(-RV5TwcpPgGJ7i830Q=SzIgIioZOIaTx@?COt#jP$NFC|;8CITWL0(nY60?NsO zqnnNHG;ZyHfXHH4MlxJsOP3sy;c$@sFu29NgBDvbMudpi-j5gA>}Nt^UQGl;`w)+c=O3aq$vY(6SAJA=^PLh=?w^ z`4-$)hzyCw<77o%Km{qGiJ1v}Ry7e2fr2IAPn8Ax0Xy%5VGB|m(i&of1=he8OsPho zUyif})381B3h-1Vo@v2}nTgeq=$4ObGhf z5XgEK-7M&nV;yM@%WTU!wqbh)gb)fT5h)K+VH-q4pB#-oJJKSih=|u>swM&=AOiVC zKmuy_m~38-BU7MTq4K`aPjpv4di8r^G1Vt@tNj~5K% zMEvjs5U#LuN#V!y!XL z^DPh(>&cdo5)x7q0TB>^{2(9!wd-+0HUvMNH_8H;@Z%9+?qz{&m}eY_DVdVbepp5% zgqIN|*@y-K(GU}{zyk4*@$eWKlHbN=)kHu9L?90caMt5&r`9e6$Y^LASc_t_7m{jcmtfKr?NgZPi3T1VkWz2uMKf zdaS_N+hRitWI?e3!s*jGkD&Z`kk5vY56|5%*Z7D`TK+`PfjzeT@qb33(AOiVAKmuyFV})eMe3&x61KREu zgDqGlkOWC{9(1VkXe2>3H# ze(iN2_ZIPx+4xL{0)rtI$c4w~lH+kQBcwt$#A67A<_i&!3E2+W55i$e;Q^IzG_oI_ z^S*`eqT@MirzQd-AOiVAKmuylV}(?DSdi^_*&rVZ4`^gWWI&W=BMdJM4g(DdAs;dx zy50~C)0wUFUOo$Ld6osw=W&{UtyN3}PPEF&f=;*b%d z;BmrdK&<021VgsN_GC2(X|@H|`*_)~&3moO6d@of$;Tk5iGT>?8375XT@M&IX=Fi+ z^DrjV&w>mHsrYONVbE1aDK^;o( z5y&S35>UGzEkx7J0>K~?vYp6F z&@}F|ii7?Y0TIYo0uoTWA2~7_up$|PZZ^b2ms@`eiV=_pq9SA%P$XXkunU9uoZOUB)c*!C@ebLb~H^19Q@$z7{-= zjd&dE_`Wxu%l7juc-;5DQGGDeg4ZOyQWF6Y5P|$A5F`ubH_0#UJ>_up!I1{hNMpQ+ zNOvfe>urH}xSJ>gV6H3DUFEc-sV`8MxM+;~3Ai!IbdKm83(02H zL_h>Ypg;&nKsi5%1k1kTkPbsGRZE|W zN+*2AL%vpS-7`L+{ z>!DPl%4}3_1{#($;k2$_5e>PzNXV7gx>PEYm5?6~LOw`np@lLbgP1UaM6{e4dn{?uV2>jr3KY1v>fF|O zo2ZF^2#CO!2yjlJ2SrT;vRO}GTIrSYg+ zw>go$BFS8oO@C|%$d#Lr5;mnW&RmqW3lf@Z+cen9gstRP#JR&AK~|LA)~0GAAOa%b z2}nSmM3GPFa_etFr5h3|i$+{u6G^{R+J$XEsLF&CvTP^Tn#+W2fG1^-x0rxzlWg}K z8P9y%Zl-NhYwHmYzaYU>S&%_c69EzU903XFb2y530qN9Oko|PAOr|OlaqA~it4gtL zIc7ql2%*&ELWNt#M1y$J>9kF4{KtfYWT=W`?1z|bLFPksT(GWkvK>+r0TJkQ0uoTC zAA-)`0e*o90a5ivH(aTGD~7Vpq{4MS6&BH0)Rkt^*(``8$ed(yv936iP|{4q6v-eP zL`3E@-(q^})SjXy0wN#+pCcdveGW%)E|A&uus}Y@hBEB+%^2LDf#p&g0Ad=a8_ptQ zU2k3}8IwSBOn2NhN5_9_!1x>3J~~>)=exB5%+zInS81(!zr~sH_LxqBocO3nQc0< zu^<&i-#HQ0qXDZNnfCrr|17QXu-==2*OM;rr%nR!sy% zKmyw8&aS3Y=DOf1qOxKE^hyDuO6CR6ZBah zTca%#h=_FPLkk9kxHk`da%v(V0wVB50-RIqG@@4Q1dz%Q3o;>o508F1`sC=3BLiZY z^?YFn8qXoa@%?Z7fGhoN(=Dh(AQM_@L1tu0l9~vJfCzk!fCTh89L2pr7aSQ7rEp|C zeJl_U84zC--%W>*81O+rWI0F&2_d7!79U%%j(N7rUOZ$|H4zX25m=po1hhJhBA@31 zA7q1o23c@{5ML0)#CjfQ837?9#KM>l*$v%nWIFFza5qw9#FWRBVjBcC5fFhd5s-ks zgrs;^@MWNzjqC@>AfbM(%QneMi1mnvAiJSlj;=RE!|yPBY{6r-ty9UzK&Xj;2z(_0 z3Fs@a6z@vD5M(z5QsD|!y5h)&XkX6!BvS1qmS+z9dx0^{`+}2>H^|3yFo`d@_f_-BO&d|4#kEMz-$!v%J{haIN*G;P(A{uKcc2nc~Mo>Ky{ zTiOi~SPKFSV0k-YNb3^}B||Q3KQ5k%TFyqgMjxV@2#7$Z5|Ds86~ErR-3cHZYQwF9 zB4l?{<>y<|-7Ez%@08TXT(~Y`?n$^*qpk2)$+r7W=mkYU1ilRc3FzBU(l+@)fQpIU zCLa48iD+reMN1+sRi1P$6_)Oh_1NH5bVbyattfLDyN7pubG^$nnw(azrpS*^s=X9} zf*>FP6~tlGhxqK?L0q_GSJTELrLH3u4P7E>sqAC8Z@U zQJHYf<;|`vQSOpqlT6Bfz=+>(wF{Nf$+Qc98g{XTQI}ktv@SWb4_$Ih3z-R!HY>5_5fW#!pSsIj`q#X?q(9Z$PR zCgy6(YF(lv;X(}|S8fu@EY7%=`WBb0wJtgCD};pj5%F0T<|Q=|5P^;-AOUqeW4%L1 z5U}hgZsR=Z!5NnroN(a^8_%hUy7KaJ_vw(8E@KsOm8l9>ZWV45GZV^OV|k-lsLo}Y zO)pd&*n|O~@S?D5US?bI%SNO>b6Tf?y}Z`;PfY|0h=43qKnJs!K1$f?txI~uT=TjK zm$CFGWVcJLs;G3y-YM5Gz(#ktD9C=vDBc*i{DdLferqI8_ZZ%2G`tVrXlsQ z0if`#h)XZFl!(C~B&8+-BGBOkB%lsQtT*Tg0uie~NDr}OrhnRns;xh+Cg$SZ;;wdB ztxK0$io>rPaa#oYhI2)#jRj>d2nscaTzq-lrB)*u2 zEc*$ygk1H4s_d1h$>p|^ACs7G!C(--azr;A>(oR*1Uj661k~Y(^#<)ufN$*}3w|KN zenh;)27sbHN?haMMwcP`skHU{qLFPEGW%p0^l8n2xZ*TgA9W%74cFwSDVJGlkI@Z> zd=^_!kw7<`ZKoyzBG9n}B%qGvt5@#`0?5V0Lxcv{Z4SDGT-t<`?rzqou=Qj>i~$i= zv5;*D#fE{h!B`Nr*NEoh)#ecq#RwE7n3vQZu;$~*$%8%{$&1mim^Es)V%i`f>)i0p@{Wk%V@Y@#LtBG9n}B%qGv zt5@#`0(@&*GoWx+OL43#E@R7?-j=rXu%P$=0Wl>5BAcNL4grzO$0lrz+Nqjo32cu!x89u~|~ ze8AKq9JbqJ+r4-)EK`_3W`khJfEec?>!B--E;rV*PE7bDeF(t@$SVlI!3>)c?0ok2<)I>l8a!)`4$~|+vV|x?eTTDR$1qZ8* z2W7>?W0YJot}{Al_}MM;43 zHv;0^&9|GZ2no>zN7fVQZ#+9k3Wt2yaXt)k@NQZJ5J7^EVz z;Q}Tv8YF~_5Eb_hA}HUZzmdlXM5HDH`9nYg%AaGUJuNr_$OUn5Ark^ZAV`MBGJ{ss zjg#H5o?snQp0~(?D{6Tjf66?@dNmQq4*~@zpoy6Y`9WTLDFUkobg&R<%rFZ~LyE)S zCH-&xEEx2`FxZCvHXbKEVxHjn^vNx-pns0XFp(b|vRO3|2rz+yvrvFpYu7|zH3C!< zAe{jgq&z--K{yD>r$aWIt>r^F{w6|5i)Eko@H+I%&9k6yj%76w2tEM`DEP-ndst8e z_>)5*WGR%nZPYr?AP|rBvG7+g`MPXBK;%WkWwi_zks%=*=DDnvDgAalE(sX~wSW_l zfC7Giw0ng{fNnDcLk6>+MRyB?giKgJ$^to2_Qpj(WJ**dkRh?2ZE1A7&9`73k8!Cj z84(#2Q?eo6LrnyNOF#k&?y=FH6(j+&8D!JbVyFd~3gY2UahL^nwL>CgKL`ZDAR?bN z@mwz~vK>S-%YyCLo{R_qap4ftK;Cf3_Df9!R(JHbZI}gx2S@?|Asl{Mk8PMIc&`6bC96R~WIuGrF(u7PQ*NI zu?3H*i9j$3_-}PE_fC5y0tG{WKeZ7SUM|R{y9LDvUOr3_6B!WmWKA@LgNRs1AQ=i1 zSWmYd(>E+;w=UD=Mwi^j7HqF30>LC80R{88Xs-&600jm8E$DJXJpAd=9oO3exlm|; zaOiqN8b0%}*=+OjVS%iNZa1-+7|GZU8`d0*kL_h)x($Ubq6d(cq#OQwW*$sji&>GBhAu5lrV?ic_ zFvy0Gm2YG=2*%52^(=_8Yu2-l=OUxGEoiHYYt@VFlrB1`gPK*)ql$p48V7=Nt~*$?!aquTbAR#gy#(WS6)o*<)$cT6h0U;v< z!}G|52p(f>2cfW@DX;gj1=sknjF@UI$bM*ui`P*TfnXDmfP#H|w0C(=0BIl*ri}IY zZZ{-E7BtL)3sKpIMyA9(5FAQQ%Y*dB2)MM8A7G4;g~>thA>EL!i<=Nk0qH8X7|K5+e0MLd<_*0!>L5Ac7G{@#CljHSrFCX5Oy=8CIM4&AMB%roX({pl0fNY3Xco`9& zYjtsO0T7i6bjKkdgu>%qHvV*PtFr6pnw!T$r!@ASjzDrS8hcEhMnjS8eWB;Dd{(u! zBOn2-9X~xgM+6GDTh5D!><7tEmyLvIl)JHxI_ys^$Tv9UQx->eoG(JK*&H>0!e9^4 zrI!rr*)vKZ*i$bdK2`2>GkeoEv`8f4(&=;o{>M%mVC3j2{`-`px6|8*z}gWgToJ9# zg)TViv^k9MRyoEVW=A8fAN?`Oso9xZD0FlgfxX@PJ7`9SB!1-LV7^ z5ka>}VK*`%G9X&+JGFRcq*5LZS*b+E4Kv(PJ9AK@m#}Fp9Cj^s3?i2AgwvU^~w(rBX%k9plsHzDYp2XRdc_X9C5O5g{T3l)J_Wf?@+oBr+vo z*FTZ5E0{7_9hRAekj{`q+SS>HOQT`eVA)VOopSXiqDB)Hqg2RdnZ+UtYWB#4a{DRl z4cDUa{R*38H9oW!5WnWB642Lx(dzsnP&^XK?Qx+ait(OuOM`mb*0-wkq&V-?8>TL=sT@ zGu110A_1Kw+7CV_2MP=R?tZHo=y5*KymSnwj*>NC5G}nSsZ7(dF zRVz{g5>SyI_+t6kh^X)`V$IFb<60l?!%$;gZd;pBD7hvhid%Nm)y^DUt&5GpAoGkB zBZOUZ%!)#+8m^h~A4_lOg+pSC?eST*jC81sBl|%_5|BYC@+JWl`4^xVKE5Q>wxi6w zx9M&hX=jNcCJ^pk*Tv>btn00x*(GXnw*+TZL4rM=HWMwe6-_3gw52>rTgO-rLHFBq zi`OlX4`F^%D{=x7P?3KDis|F4TkhK*ZR+jFfVd0YmaX$t!o^HDy{z)B#=733CLBw8 zECUL;ZYCTAMK#Z=oEYeG z`;JBT)_LyDz&b8M<+`9!lTjqXMW`89Y29u}hI%-j`-w@Z(In){t`W}L7RZRZBye32 z^J>LSKmsc6<6pF&UfIz9KJ?s`ZUc*+7R)1^ein#`qJwPqV+9Dkt*?!)I>vOY*Uhc4 zv&zcW?l&aEEqUIwpc{@cAc7kSv8-0y1SFv1KK@1j>6HzA^;du{xOFYK2$iv*UUsHf z$D~5`Ltz2y8&7{)vXH9@|V5 zw|=)$ONp=#DG{j-ztu3qg8n!LfC&2GiZKP!fAQFX|3Z_6z5qmvA|L{*27mm(4}ENd zECyPT9w8uZ2FE-X0MQ@E)u>?;kGaTd-&fuKz$R{v1?kT+3w}ic@qB2Z6v!Z`Wd{xU zo~Vg{2#CPv2p}Lo@I(I_%gBdYW78K`YSO8&vj>?F(xDWN!6M4v2nK*=S&#*B1uFe- z)MFzfH4%V-eRXtO41EM199o=%2!_oc5 z0MJYeDiOFPHd&C*et1kx1Rx**>3bjoBJh<2I608@a6wR|#Q=-$7Ur95wu6M|o?|;N zC!5V?Kiv9&tOp^{4M!G4_M`f1gP`^q0SV|c6q*qM5m=o7CkMuQkPh8z$cEwsYO@g% zkF$=fh$*rmYoZ|^WV762g$3Pih)7KYRwp0--6h^ZHo%B~HD+Ls7OKwmV0S$qJlmM!e0H8AMEZbu}buOp-NBG5(xWJ5@ZX(B-|fCQ9#=6c5>A%K8lu~@cSj*Lhx#{@XLae>|mKOE%aN1tiptbJeW= zTfrdYrb$4#;jDKlasp&SNQh}mi!RCR(60mIJ65U}=@M~~iij(#D0QFqTunxQeE7m$ILZNMxE_sww4?wJDdj-x1H=Gl+|pxOb3RhZB&1IvlayKm`6j0R%)w z#D8%s8~XoT+HHY8~MGy6Ez#OD;;fM6*d~k-ZjULMlq=(8DJIbtqoFxCn?q zP6%)&qv)W@3ft@kb>$W1F5EBdT6!g1d1<*zS6KEFiMfW#MpxEiHx)``tO&ubTxzi_ z)T-c`nwwnOy5OPs)D-4ffQOxM(cuTFP47r`4ajXeR2)8_He0zQiRHN!QfW?9#Pqm#9m) zbe&Zr%r}!QGCR#S3n>li(4#I3btqoFxCn?qP6%*ysr_P6$Zn5XT2<~c157Ag60V|4 zg$r9rT(l(Wmettp>LO+o`yugkDD7g6F;}^=(j}MMkWh1rYpS&pMFxPVLf}5$b1nQG z?rc`A{Rv1w?ax%NC;}poYXa0?_qMwobd9)Jx0q`j*yPG8OI@@k=1NOTU47RE*I3nH zH${k8Qj>O}OxUGbY{18y!yr(2iS^m7w0kHmvkZvasWK46uNg5_>AXScm?i;r3|GCX z2#7!~3D}t|+{g6N-3D?7+J!w`GA?dqaM7+&7b}msPlLldvh} zZVdKV)j?>6UBYVFPv%pT5LwUy3o;_L4kI7|br@Q`un34i&Ini+T)0cvMf=-W&wv)Y z5Gd?Idi_v&Bn~uDYwm5>o5Wm zP=}$_3yXjVaKs_citgV`2Tz;S0%LaoCHNI)HiRxd09B9JozWH(#~)Wf2u1rjm=F%4ISU5VX? z$CYMWOG%5}m&Ytb*Bb&_5^^CM_et}+40OGb0WtPN88-4kSZWiU4OcG9R)YD&tC9mk|+_Zv-NuYmII;mYI?b zO|u{qQn#By$W4=ga>H5gA_5|iD*|Ldh==YtB!qB~lCOe8I&`s-F_HN&9<ocAgKzl*SGR-k8}v z{ic7u_$j|>TRkQMA|L|!Lx8gz=W9enV@hQkV?6%c%{n9$xN#rSG@9R8sw*%l%X~0@ z>ZXFie;Aw1v*tfVp6#}FR|G^r1lEv1-kj|ixWbeO$ERxEo;*eu9A@>~k-~8SQ~tHQ zIVAaqR(mW0A|L{5N&vAiP(?#NWH&=C=zinJiO6`65oLa)ZeCQ}MHLAlqx@+SQ2rb% z?WqWeKtU5gKHR>642M&*7tK00k7&qv5D?=()Q$UU3Lf*%;c*OG0oh?u69EzUW&%jaw@fB@|0V7B zE&rx)<)`$J5b{AbOov*~5DmO*Ea-k?9;qN11VckCWJqK~%p;*W7D$OZ+|^o;4Y3@o zCIJQOcxZ1#AkYLPqV_&K6do|n!>O2gro&g8M=D6EuLW{pnLtDo8PM%UV;RY?4Wc1a znr?xJXb6goiS2^ZB%t6N3+;&rVw{KUhwe499;S4=4YoiOh=(cNb7V-I zuKg*S$7p0ibiI-3(8!9A4{g2$-EWI6kQL7jRu<$xjqRA42#A0PtQi5H5vf~l&A6@F znt}qxbr27NK`QIEPFW_?q1&y$1=$TULLgo~WHdCU3oPiGqvDP2kkUL0B*$~9ltV;3 z7Mvyl1?O03PedSZ2}ne1dgzb{zf^=|$bM++SoE<#68`R^18tsdIaSjT5K^JB9bIm8 z!_mb?_QQA&%Va-1?h6vgfDlr!ngkTAW7tiS|eYM4+Gvc%k@iHmcyrevpmNe)`!w<3pUH5f#%fHY7v08(9uT2xLKw z^B^6h#Mlo)deegKg4ZOV;2jg~kqC%D!4g0+WH~;wK`!fBAS1FNZih{GTwe5P>2lfOP0;qx2f-APJ;HcO0i?gu{An!-Jf>h`dC62E_G0 zWIrq;AD%nMVwMGhVjaytZ`D!|{*{1&a1^u;A|L`qMu1Z<84%eCV?Rh}um$UA$ceEc zq{EauBp@RsLU$V(5yD}4z6HewEYk%?d|<(AGmnJ)IFZc;s7XKpIvCm&5fFi5AwYJ+ z>6dJW@f{l3P(O>FEVMSZLpI0+Igt$^C*SpkcotY7A_@<9%^4O*iH3k!Cjmi2O$0f^PE0Ow)M!ypf4VVQC`?1Me96t_v> zanc!*2DW2;qf&n6eem73#%EeAv+&Q$k5hYo?4@B<69Ewrf#M**Y1t0~AsAne&8J61 zWIohy(@jUC1a7(og7PgtPVM=zm!7GnA|L`HP&5R{W~fM@+s)TrBP6O1n35e)Hb*Ts z>+-KjK>2sfw8tVK0!2jt(NHeOI1ky5Z%8P(Wo~{iziBD~eG?r$CITWL0)Zibi2Ox8 zOno)n>RoSv+1YQkBNEWJ!laEvKm(#c=(c#;y3C!whbh=2%)Ku!rrKsm*& zHxmI75CIYJ1SB9&LQ@eC0TB>^oDz_La*A7TCITWL0wUlENI;&1rXnB$A|L`eB_ILi z6t~_?1VlgtM8Fe}fIJCJML+~ZKm>A1Kmy7sZoQcZh=2%)fF~dUc@mn6fCz|y2;`K2 z1e8YAYTYbK>2c{w4Wj%0wPdg1SFsWJD~bF zA|L`HkS_!zpnN$}+D{P>0TC!L0uoSx9Z-E75fA|p$QJ?^d?sK5%4Bw2dDPpdHbF1~r2xTDjA|Rk-s0oM=5X3-ef`nd&i~&W8%YlHD07f7v zy?00u5eOw9y#<+&AT1Orf^v^@?^^fyzTWjc=H?f%xxy0uVRwAMfR=%%HtrV@{|GDWu5d5bK{+~61YE4qn zbp`xeiUr05GSO=NJe!4u6Dg^jgd{7wxHk@x60cS9I1)MwRFY7}!Zv97aq*Zy z{D^#To~(3y$wv0C}1(-oAtrr7(4 zcC*q2(t&9z#7zmEtnYA&B87WfSebi@z0!6oO+|9XKFZX#zsKn-?9nYX4ZTY!MLr8d z6UdwfD<^Sei|bt8h0{Cqz$i#nq(O7xI*Qu{0OS5gSdX;s(t?*?fq0B-q4kOo|^4l8srjwdn%*6(lF>GqiT>rpKBj}11VRkG0bgS!PFJzLj0z8rbh|Aj|f-9Op z?zusypls=iY|_zkQT7;9Xt4ux);>-SlXdb6{H)2>YNEj!_BEc__N|+VsNKIWE27Np zUOeWYb6LR%Vi#qKw55A-Rxw+{^Kk57jZqHL6449FKu-#V8r?BjQNiM9kBcNkM;-M* zt{__?!hU@Lh|D?oUCK1E;bJt%H9X%zcXjZYgFttAQ733R|0j1P6(jxQdvA;D51cXAFjeYmX+pIz3EBHP1MceV8hC~R?S8y zy72h(EElO=Z-EvI!7)o{HhoaH)|@&EpC|Uc(QE3H8g7XG1v))k!Ka>C%GWu^AHJ!B z?A4X7>JY*vRZr9uv3_JLF@fMvj&Nu#-E#$H!{Y~NQwwEC^Ik(tl9gHwfsu)ah9g>$ zST+I3v+-^tRNo_CE6Mv$e)-mwVX;_4$AG)(m;&)nfjmk;EH#%}yx?3oI47r^{j3^G zMRuC+jBMRLlz&yj{r<=T$}+O5%&Rwj`BC2pZ>h{AM6#B^sN-Lt+YTvznOsJtc&gQu zF}Phvm?s~=`hP!eB&-tlwMQWL9lZnFgW4m#iKBeklSxV{$IsDtyTv-1^hE*1AE_cg zwfAb!{9JDEHEDw4;RoZ6pEGxs^ z+G2XYJ}}hBSL24Rs}^cm_l{{=YqoVQyKT0)b;s5%AcFEkhAlT2yF{_gzRnY@Yot_< zKzOWQQHTk0sHeIdp}|ry?96L=&62uMuk)tScmA&~f2*gTGyE#s-|TFNgilJw#E;Un z7GdeR^nyOp3W+c+uPXK|$A7*_<>pP#=K0x~fU0C(t=xOf-;fMp>1O5QYhZt=E}NQP zfrheB0s-TQFF^A6`VSy<3-Vx*e?t0;3m}4rar5u$QZt#!#Wy{=mm@ld{Dw{uyJKKi z-|WDMW>@)&+}$(zLPGei(MP1D|HW>=XI36-QA(^)|MhoR%Cggm{WEQ0x)y& zgE;)MdhqShcD{%3V?ukb`FuB3_wTOB=%{bwr42}hKe+-Y>*+5n5xd;tXLyY_uHnD4 z+;}W6F1_~j3|%G)S!Df~MoEc<8o{KVWsRC*hsDQNdDCC+yRFHL2b)6ed?jaU7^$*l z0VkJTRTk5enswIRtx#9`{Kb)y^djH9aCE{I<~l`A`*dUJDkJutuKc4BShKeY%S}qa z_t+m8-WM$T%JhJVvAKStC@UU%CeEp+U5p%PzRUWf>3Gh=0$NNMhbbN)YUx0fF-`ex zve*l28?)PKosjFVZlawHU{=#%hmNCoCAnILx^2`ZuG6%plobecc(p^0h@|J?k6$SS){teC7!Y%oOC8 zHo8BgI#c&(C|xl&n=Fh4DvoPgSkq#XsI5l`8SZI6K#cDn9R!rawLSY@h&c5{aH0ir zAlrQZzDV%4YrRj)G!&fOjRnn6n{6a zVPPx$8=a=u%}|s_4_pD@S!vgl8QY8F;pI6>lPdfy8EJ~_@3CEPDOwF;G8Z0mK)++c zTJC8Eo;C(fk2|DvJuRL}?3#wN-Pt@AW}GGR+AOAXM99j>Bf6qm4f3aZbA}l>QEH~A z#OH_1!r9QprlTkRMk_CjTacU8Qy zk9s`}VnV?>}<-bzK=rz`k9 z?y^|s6vg-02z!NR`HJ%|25KOSJ|ZE+3TZYv%GC9t8Bf%QiKIJ=Cg?gpZntg#FdOdj zl(Z6wi!7D?tft`De>Eol9Ymg$nX#Czzwqe%d#7cP0bZVxyXxJFc*r<)$bDmWP`lF> zZMa!=hz}+y+MbC1Ie#Ga<9(&TCg3!IsvRNd+*z_&%T%<}(Om0_yL>C#Xk4b`HcVIe~ zBJ(^fSbr3KL6nq*yzdcZ+Zjij^xI0dyaGC~T{%jT!l1{@drs*T1?K6S$=gdMy7(LO zEf!VdYv`9<<9kFi))yOE`*vebr?&qlJ!*wbz5Vi+YN3i4?_0gXVWYOAr-MC91HG$T z;qwJyi(xx#_`N5v_>C!Lh1f!%)OYNig$yn!SYWLz>8XieHscaTe&YGF^vcnS;N&lL zC(o?A0~Y+}X9Ero`@-g(-`#Q@RK0xNtnQQjUE6cmu}`od%~ z$4n~jy#4kjC^w0uVHQwTy%E;g)|Fxzn9v&WUbowWq19>8D~t`YcmAcrq!^MbVv%(n zet2+($MYVZ$;wI-KG)x^!<$EXmv_i*On&{ek-hPkV}g%!rBhiD-Vf!Q87O)9r>yjl z;9A~BK{8^GS8o4fr7ue7(m#a}BwUeZqi~Vn2hgy0JxPi8#sD-kn6quV)%9x-vHHEO zZ)&RPX7ib0l_;o5oHud0F}$;XGl~o`rekbpsO|RX3)e4XFLyRfb$yhn5FF{+{rG~- z^s&o2Q(`7~P;@8ceZRp<;bJ-t$R02lSLWGyG1=TM1QM~Vu9%UVE>A^Uj4qGY@4u{! ze2H95SO-V;RDi1^%oK6OH#-&_58y_x882jaLI$cM2aVe}pf-^-)-m%_33=u%UPJslzww znBWiGVBrQ2hHPi{)TN`f{Z@{*cPDlXz!yrQ#$@E=3{K|CMU|xN%_R;G4gvs9Wvj~& zM6kN%?R9=V&d|)aI~K!okcQYRsU9C;vueTSK$aq{`j_oyPbp3vls8|d3m`}2*r|Bm zX-J^*(W^U)YS#KbO}o}MXA^a))M$`?bQSUJkMy#GvqHXy{m~- zX?^67nbuj3to*ooi@nAWfxc!OAmi%}O1_ZnvmEI+Fo!1Hu}Ks+U+uB!;m3Ci%i)Ey`P0rnkQ_30}$hI-l8hv~(W0VNYtNW$n>_y~VDkLoLE zkC5-)e-PFIIvNUTjXijOg`bzeZ#W&Gd|tDFcxdpq;?ARyR@bIiE7giJ!m(I#Dh&ci zPDw|Q{BJb?j|a5W9@`t~FV}e8y{TWFeBRqrn}%=8j@>L0Xx!N7`W#8dXUyz12Son9 zQwnkFf3^|yP_}QAJ{#Srcl!P%hrbIM*V^VBvjd_sl;Zn1 zjM2OTErgD78F~4FuJ|@1=TUb*#{S85Rl@*=hPCm5*%o2@4L@T|ek9_KzC4m$+^*tA8QdJQOx4Q<&LUxk47XxzTjJF;Y z;f$x1#$m4>-AxK|)DU>t_{iWR7r&X~+u}Lu{uBO;)=%BFbnZ!O-um9~^RyASN2&q> zf~wy)7k-9H@tE}YKwNEf8RGeS&tY<{5HNVTEcjOM5trD-ZF^Me8+%JYh6A~_=vQPr z&q^-f(8L-t(T zP@1shv+s~IYIRHy*;t*NoUW2ZZTI`Nc0}&a&!1|ZwO<~Q1_qn3$7mv-^p(m zMLH${L^pH#eyua2yR-JeZ8gI9M7>w9B;$KpwX8z^f=u*%bURr#W$&4JtEr;|e`kbYv%^!fU9Q;7d6tToelyj6Vz@)30x@C+D z2M9^W!eQZO7RQ9fS^M^L3!h=PeaHHkPVKU*Gj)s?hVmf;W;Q2a>E`N^m1-2Qpm?w) zR3*S5BGhSoeGF);dsH;3$9AD1+c_keV@GXOKw}C3%3*eFHwRo;v)Uif8mceWF??B7 zaY*8Oho3W$e9psA1bT$y`~ycui^s+kJAcCM-pknTU!5g8Z;sg+HveFIxWt4aMn1k9 zSrj3HliBO8r3s*yijX#*wsz$gLD`6Xw91L0c*jn-46AN-AmE%HhlYRl1jfyVs+%vW zJb*b{Ll?~7f5IF8ekm|I%fnUm(^Ib3eh5lGaSC;Lj%On>(x1c-%nMT9`87H zlo;ki`fHt)M|SQtSbMqu`#!VH#s56ChR!{0u!70xypdqzX}w}C<$?ibBpPtjpqx#7 zz?w|KFSm0*x9yHW8tG<=T_1U+I5|Pm;b%#*KAXp)la<`=o+Snf0N?mba;$h((&W1l z&{MOMT-@f}FPVLo#A{RjaCw^BNi+5@V3H&k*_8thNL4w4y02n*hp&TS0Y1J4O@E24 zzQd1}0tw^G9mmjpIQbA#GJviBw(FvsTLN7VC)0PoRzjdXT zgFIJ?w+YL0YN+5^y6kKpc|(Xj%MYFglIHw=gN{k;E^E`(Aa@@k1$2w)KZBL@IQ6P# z_W^UGmG+VMKnn}F16*r>Hu}^$h#%z00<)b{dah;xAB+`o=^D&ub8(0Gap{4i56_ao zjLnlY*YIRW#5=vu(8(7#NpMyt6{4@w^7nLp{l4j$Eavr_QZ+=-{R4nBP!Y2ARojq4FdN5kzivA!RRQu@(hb22&*uLG*V<-u& z68TQFyR^+33MkwiJAkJb*>M3T2q;TdR^Gn#HDd>`1gm8qB}0^)Dsm*ZBxmYY6s6_r z2}l3xwDB}yIbyKX6fzCQmQ@BAYDPs~P`XI7*X^MV%H46+A4xbXp9Zhd3jwVjLs(a_ zidM^^w-o0wJ0d)~DKsBFk{r;f0C04n*fbB5-dg)rSPbY6Tnipg0whXz_(a!WDw}u5 zKz^friJHW)M2p9eVCz~jb)?c)cMYoSTHPyR^Mh^|`bA`1gMZy?pm%x-;#iheIz7@64h zdoPAuD59LU{oiy?3;2Mg9|ol0LW3$b1V{wRS=tfQhq4&u&&GBjz)Cem` zLK68Ym4ePn9E3$5!trDE>T_+f7c^sElJkqCz{G%Z1bR4Vy;A||lJ2YX`ZAXb0T9@; z!2`5Dz1M2jb*|-z$f&6a<-=xSQDndu?*!bYF^VuIK5g-s~nq@KJui z^Dm$_9+VN)K^8};Jp9QPShY9!_E#a+^m3|4Lqwi{A|*|VP*hyz1MSZm9ZgwMAa2H1 zNfB_g=cZ;gnxE6cEnGq376tZU6Q))PAua&NReR&`^@vOH+koO@2FOIUB%%91hn@e& d^WN|BxWy0PfL}pW;8gmj+vYYmt4uvG{{@Pi@6-SQ literal 0 HcmV?d00001 diff --git a/04_Spurerkennung/images_input/0_O.png b/04_Spurerkennung/images_input/0_O.png new file mode 100644 index 0000000000000000000000000000000000000000..9b3ae0e5c8384cf43f53b71674527fb3cc3274c1 GIT binary patch literal 10982 zcmeI1XHXMP+wOz<2c%n2q$40bfFMY3N-t7Fk0=nTNUxzt6KPUIF%)SMnzYcnfDk%{ z7K-#55J*A|9G~~h^W~g5@B87LIiKF0xn^f}?t6Ck+L_&*`xo;{SB;v2kpch!P;02a zFaQ8Z^8QMZob>O=xyLum-|qH%bu&)@fb!nIL;}eA!TKO*Jtq2% z&{U3$NF*R>z(PA6q}Pw%yji_kA1!;Y2mma51&?0|3fD4*%8-V-UVY`>zvW;}ndf~H zYhOdFSP#hCo&z1f|Ccu}^FKGu&aSVm&1n8mwy+rlL9Za);7$PLPRX;ALD(hqXZc=`;B?kX?wA%O9-LRLP zwouJ$0^^m>Yg;;Is3uz=Ba43sk$KN z)ZN5;!gPTjRenfv-HlOs%G31uhDLRv(Zt;e)rofooZEt>+2ZPqoGAlyw$3$VQmKOM z0zV_y;A=f=Vrxum97h%u@HdL?g{Ouqyat0#nvd4$*2ULJ?J}M!85d+3EPTs1y7Mg2 zT)842X;7sf{q3H(cb3#}_yY~CqKdC+lZ*}z9Znppw!E6}ETpmO!p1-osBN_^*w#&) zFBNqZeR%&z2=Ak81~LZ7BVUH94DI4ald+HbjYK3xcQZCKX*1o7P5F`hLG($36lt1i zPtut9g^fxP#fU`&rSY*b$|$ka+7Mc)YAj~BT6+6edYLf-{%Y5d$q-U1^DP79C5nazq@M=`1q_tFBq!_9Z2HDVw3OT_Y0^#6Ez6mL^n5==4j@Y#9(`Y*A;nZ8U6D{5$eV|dhTh7a z{gk#8XfrgPC`U#eJxf!@V$KrmS?6(I+C+LWYvjwm0<;&GzAX$f9iR&w#_q8WHdva{i514^&QEe9OAq!Pk;ZF?PXJq{=d9pv6;EVzBq&ND;r&Nb#&J>DGvD!f70;y?=P%MzdeZKKDy`2oj~E=&C(5dEIckT{&7adlAl%gNS$9NAbBsT^_f3c zuUL`bdQ${X>E~8$YVCz=>6`~yKJ2bf<)qt0!;_OfyDK9&bW)Iz9ms~g531ZQvgWGy zU2KCeYZ}>rbM5Eyf6Rm^kjlkdM&`WJ{A4u{JCJ?HaLk}odRdOZFm|YR5c65`%R%ln z_HOsxs+)1iN*4-aNu}c4sK$Hn-xoqPnB)=6r?fE!EqcN5Pro>0>@s-p4$I(TB|N&ik!1MY|1l zWX`A7m6rWzvK_Ubu;1QT6w0xnNk_moU^MTjXXomumrUxgc*0AJFl?9?(-MwUX)5TD zZEUqG85kLsQ7|er!Wo?#N$gK7)Z0v-z8l|X3T!2a@1ZdkttUkJfp&~BiLnma9&LA_ zaZ!7sP#J^RK25)HwW`o{b;tJKoVZCO`kQS^g61+tltAQg0iZl!LB=AJF-_PlY0FvUs|#%n=GB1_5<4L1MN?? znQ|mY7I1C;hYzl&&^c0a3vI^%ZYRAb_@xI6!5%!s5md{pFVB8Ot6nP@r-)UWRauBe zD_C63&m7E$ICc8w9gokeosdR`k6tC=6KPfcNz6<{MNvoX<~)~|l3G#lAZ}llsEpc< zKFN7__4Nu>`S5{o@F{wCdSpCfJRf!hOX+Y8DkAQ5?mnu*^mX^Kwh#D;iCW`KF0fs` z^S&$x1xvJgYeDQGyw?tA)p*0Pl8cl_DJj>tZe|=`z7-St*MF1!t-65@01&_p0EC7E z0B6^K`)vTgTLb{uc?|$aX8-^X-IBq*s((Z8XuMD|@|)XRa8AKIPv1D)1Qez|Y(X^J z3s+yf>A2>>#K-I-fSAlu%G+O{O6=Y*Ab8yA;{myAa0DmTYmVa+JL_+{2<>D_Kq#eF z>puX%4RU}YfaE_!&JF zD~zDyB=D5u66)9y(EmPFyy1YTXk&||3)FjuJ;4*e1Ulxp^%`Om1OD4{R%og39c5MFGXAQQo+5Gptnq& zu`2i;sg!To=fn4RX|UwyLt91gLjPx+WLM?Hbvj;u^b#7a{;Nk3(;Z5-#1Sf(Dp$u? zR4j5aTOKSCojbgIP>i9@&4IUdw75l2(UzV--lBeaWSeD~|8Y?PP-E@dBWm5rmAn>e zlX3o;6}h)G0$kMod}o9s+K!lYlN&->z~1Z%KjSdLL+yJNH`=P~7aCX;`EWKW-&k6o z-9wa-yvmIio4nE$%e${yQ0h(9Q5Sm+j583rALPOC_T=77OA~ar%WaJ&B2wZxGI(D; z2(FS{hW*@tr7QM%F8Q9E_2^J{qD2JE(mfiFg&bf|!q}ryP_{to7}5ouBN$ZG(4!*r ziSdd^H@71ik+r*ldJl~dDOV+3@4sD$>w8~6{bJ(}+Kqrd5h$pWNLL0+L=>potUWqV zx$0cLCtQ0lMiOKP@Mj9Ux?h>BrqO`}wV=;#+@dv{30{IYaW^gA3UTfjL_j*;yg2zZ z+VVj5U@o6{f5(s;H`Rvl`;5ycHd!AWmD+s#6I*y)U>N3g4+!fhAKBAlP;RXg^L79W z2i{=yLIhqh8nh=;1yisn3dihWVge^hfs6Pcv z+dw;@RV==A7ahg-y<2)sDz16n1jQOgfV)EuY;doXr>Cnj7KibjnHY`vrM|Rh$WDKk zXhJ*jOddBvv^y!=GQ60DHm(q+v6HOkGUrRlS89~m^1d_mS`Oy%tAu3vC}mI*u7M!e zVG4OaJqM!AyPlc@b;K6=DDOA{r`2*tu^?(!(e%KO>G}f?=+xq3gEn>&-C|rN7p-zB z$GqZ2e57yv&xkN~y@S@uhZ>SsCu;6GdtQwv$0K-N-wT)q&kbzZH3TQNT<=vw<4^9d z>MRdC^$~+C3L&+GjfLAR(DbrYXB);+ zMc2X2iyGU)%a!$A{f2;9I6Q!0_Ehq04HYwx%w{>KDUlhp2@<;Lru?y$(qB5t-^sFb zUO=vm>V#qLcS$U3RCFdD>HwjY2FHEB9iIL5nCXd}mvuKeUgdccn_C&xbiZ52j#t4u zDxN-_EMlE9S9FY3t~dDbRTLpPF!FqwuxqtjyBHm&n}&0ln4u{VewkGjz~J5o=O$Xn zeDEt<7Okt6s_T$%S=>Huv8>)X_6oE^>M*>%z7MS6@apmWwvX%Xu5EW(KKkQb)lBx2 zp)%PuqnOYU`!dgLQ!+iU+#W8hK2z>g3L3kp>_1+_EN1VG*lN_c8W041L7A}=9HkqG zBTHL*%|wlMZ*;yjx7LMB6LkvmWH~N=*()oTriGSge=a{TBiypRWgq*x!1nms>PhUlvGIL<6sUZPFKjOf9tp2`)#((QFYsb&-3U*BCwN8nZ18H9d%4 zjHC-7$6}!wlf|!|`-wXqlng8q0;2B}-*8WVYXhhoKa=~3SG*WY_?k!;5Yp(@G1?$` zJX@D?Jf_Jh7aGu#vSv;??gOdPn>#I$_$;9EqEoem`RU7&>|RIjNHkrk zR9xhF?mSgi9<11ky3~5t)@K-S=)r zdoRz9syr7^h_1z-d!@sxUe$=0$V8p(acw8QBI(eENkrWnb8Bs*70Y9G%AYvvKAK;| zZ4wR&C-26!jlj^>)i?i)9bXlp)!DjjXp&vEOFA)0tyr6L|Ag zK=o^;K9>WPXVpwI{+SGXi&8K>UDQiWD4gL#< ziGtkyMX;ZvPe&x)xg{r}a!+bi6b!DS6j^VB_~qgUCGBTU8-g zLtP)_7#1RYkz>8+|AlF*mbf8YvRf|AFT-JC{numQ zi!}*mXh=`s#az@IA8`95#DMJ`XXyE@eJC&SYF;l(=w|NtnnsNi!ZQt;IzeVwU*zLN zHjqoKlRkbbmRCnn(CERH@OWb9AZo0rx7)_@Nw&%LNTt}D?_McibP9d_nO z9IdP$ZSV9COp*siD-B8v&7Q|u*mwnW@HecBBxZYE?aj97q5=&Bm92$C11ff`c*B15 z7F7tBWuHrbClEb8OaM!D+vhz6lc}|Y&{bow-Wy%Q*wO1L8doZnq5B(D;hD(&D(4~V zOYs&7tMhay_}rUq&WMekDxY~mFD{zqYm@~YTTj_lDKVJohQlwNa)xRVJEGR}U)N1I z_jdO)=~uMtdAh&49(Wq+RJyajV84MhX8Lbt9rHEvj661TnA_{O&}?cQyM} zf>u6z-V_P1FgP}ww2X1*uBy9!_b$Bk*ON$mpz!C!llLoCN}J9vrbln|@=Q!3+IkJt z_ckgUh4g6F7otUWnOr17Ul#Y-LCWl(Feds0ev3yludHN%FbQ$9I znjjW&gn`>8YMjTSmM(U0j^<&h<$GrHy?GDgv zQ($g|`VBGskuEw{!7<1NA2eYMk!s zA|y#w58;`WlOq#|hG6opdwQnKyXeVpkEzZ4e`fW4t;tN4C=Q53c;`wMiuCL+oD}~v z_}XB^F(#;tamYv~OPPr&+0jeEBEd`QN>w3UnF%LyZ-S|+vho98#oU+fnARHRoj<21)e3%0E$v)7PgHCmWnRblrHA35mX7l;UM&slo$bZZNXUZw@#p6WyI{65 zXK}|>0eg~vUG&*R zf5dD0?yrh_GX)&TLm95u?m9^t?LgD8b5Cnh3k`&&7pq^riT(Tdl|wU9llgd3GHr8g z2gHr0d8g-rM2^gI&G3uaN4Qw#$Y?&t`5MW6NG9ke8;vma39TcVvX^5!)N_<2;VZjnNBl`F3ge%|?UJ)x1pR&Ljf zOlKY|4TM*e-JQ4KIioN%+IBf>M;Vu)q9<({wck}5Y}!SPDfi9W_83@ycPat2{LcST z^!U(94x>3<7_aqYy3Z=y@>!;A$#f=0l`yw%agiu2mT-*Ts%>a;MBB6)BjRsn*JSzk zr?~|$_@wvn9s^tV8rL)Gxv%+fk*lS$kQ%$p4lPD)q@#Bwr8U1JF?kx)p^>gz#aHyJ z9hs?b0pbuVvV{b0UAz=NZAJJOEAaKsB5vGgBm1b*K8$!M2@S5g&?FJh(KU4gJ_(eQ zMNR3>Z{NZ=)VLe;~Z?(h6x`E4ywvi+49-zGLYw zIqLV^mGaY5(3!j0nN2O9o|&U&&1~9}6rE&EeR%Akkufch#mc&HC-_jw2OJkPc2;{W zXLv!;818Z=m6DKdG;G`kw6RFnA9fZkkAIC{5Qh8Z6MUZT2HCAy#DZ6*oB2`rdx4h2 zGY!)M@vYth&nnK5J1B6;R7{B>B&p8p#yf1+rr>&(un+Ig!C3OJTBvU!W}1!X>uz0(QZAP5btSj9xyA10 z;}NF4nYp(Md^?PB%X{&N?tL_s)H1FlgSj zx+AW@IybGp%T~A;$H6yhmfd5rUddac@5m}%^wmazmnC__aYA;uxy%<8=itPhL^&E7 zuod*nF}lvm?HowVNoI91i+H^;mt8uAXwwEx1G%A^l?yKXXqcRd;e_R`uYBrrPU}S3 z*b#+hPU1kEzh41$uUylV7(!rcSl*;MF_}f%e+Yp=IN|HRNaz+IK(kzFt2O*UJ&h(? zw2Boe)x%)(#)r=nrNM9Cmd%7Ll*||IphSgw#)>NN`CYb~EqJSUnskz?*0Av@?9p(B zRirWXO)S2XFTtZKuU)?hS#m7QMN3j zuDV>7JFTbOBLi09zcHostI)ss@^HwUwRdmSM%K~H@vQmhtfgMMI2Wv>uDpp?LrsX} z*kz*R3vREnttH3m{Po0QI`Ra%Di$3u`F$s0rJuLlSMiVW4At`WZo#J-%}{~*`kaEv z(zaPOzGlNm_Ss)C>cQpAf_n$kJ4dv+89Tpd5yQYLe1R z#qSd8X?&YBtX=s|8g2LIP<1dzkL}*V0hrQAN;ct$QvM^OmeZ~U`FecKmkLrLg6a~3 zulx3)`O>$?R*t0iPreH#Kh!uj7^rrsd85ha$Qv%+vQf&*Qai!1?U%9MYllu9PW@DR z`qHH94B_)?i8m4#8(@k4?fMMGmmki%yu=C`KKXRs7nov{h*F{OKH!&dmKXeB+Bb2~N29Y~ejz>U2gW){!i&#{hy}!KJn@1<_~Wi{j4^= z|9xdx6wT6(^SC^j|PJb6pmojT~GE=odXw&=~(`4kAI=|SH_XdjBB@pc3kRjiafZ2CDJLNLZK z;LmE`ltIK71uM>+NO4mG9m&t%IAq67<+uCv=UAbwY7v$%HJ)T65?bIpu|~yzD=O24 z9_3no(6Fiuk~EQuASZCohr?-}5DVfC-j*ugLfhQuA!ggFK*}c#6#TmE#4*ImI)Ofg zB^{Q*mMr0K#1b2|>2B8KX<3P=)01X}6g@N~zOvH`uBV5!eL4-4No{B9cF^J*Nh#%; zG>sMHG1h86LBI%auJ;jTI_K!R=Yt%bJXRi zug}vShVN4p+@vfBgADBdt!Q7mo7tPPA@(#FC-Q8~)}ITsap|;}9S5hgS*;hT=N6v% zTdNj=&Sw%C&+8J23odqZGD2lSz7;n|wTeoxUate5I=Z1e74?L?=6rk~Izim)ZT&f) zuaKqTFjN+BAI&;r3v=O&2`26Q))eE;iF39qEe3d`YJX5v=T3w`;-}`e0*m2kk-3>s`YBCNAh)JQ3sJ zd`KMl4ZUb-cbe!C{MqAj!B;m5*JVp&mISqjb$N&Qy8=@CxEWBU;`7v+(2&C9H3R~< zUZUUQY2xpWPPCiFhd{tVHBoNU`mM%UyD0bbh}MWCrAu{8i3qcH*`0y-pp8HC5)01W zVg5NrK_)eYQuLYH8|LZO({{Wd9xzf+!MdX&G7Kq?ubeKrXxhp!KOC%RI|iO`d*!SJ zNCYlC^=Us9INPT8T#Gz0N0A#(nRd1;PIoNS-q*pi$%We zLlK{Z1E~{|uQpJU*Xk%@-;()7dGlKH57T`tJ)HHao{(ms%ytW2Aa%1?q;au=bZs=B zBGv0^wrcud5;w0(iPh2L7RpYLS0}vIM48ZMO$e41<3t@#8^;^VvgDb?6^1osEaX|& zo^m(hf96N=!ZGT2jgsM6Uk<e7Bvc#=1)g>;$cZ?2wF2SXfAihimQI1|F>n-V{2nc0 zfNkifov!NYKtBpPnobM`KcPeoa-!|PU8_E3W{x?w!de&E!MR30FV_v5PE{L19=vE7 z6L1M~1SJ+JQ2ee^IN@@vtW5OKsjn#irO@tt1O@wd{51Ub2*!w6=12Y(e{L|dt_!8C#LL>f#ik|C~|KzY~UGfVeByW zdOHZRtl)LEUBV>9wqe=Bi&Oxdo{ZUU65jo|x8BSL{UuVbuVV3hwYCBBY!6qs;3Xrh zkbpf`<4nXZ98l*!B;EHm*X{`FK6RAZ^Ew2zxw(0P4++t_n4-(6C^vWGZ1cb!*J{|j zAeGO?pEeH}9*2fszwsE4yL)!C5P^6zDZAkwu3-%cEpH!32%^InorpJjcC^c?+_A|p z-Lx1DyaWyzUwm0fys8bRWHVWS@S1Iw30{y5<3DR~81;#i#i+4{^hP5#G209QN*vBj z@neKXxVmzJwK=zS^92jxWw*IxFf(|dy5d6!;q>s4M@)4LLoZC#wch7ItnQt?EK)?b6zvGT9rX0__=N)@O%I}F!T6bJ$v*g~^%&+*N z_tcdZ)vBukeRiDbef3R|w(_W_^`0?)Z{ip!aM(~D7u3&>YL=k$O^er~&(6UAR(Myu zn;3F%;>TK4JA6TZV<+#pt>FoLDY7w4NdNuY0I7zC7#X;{ZhqqQ1(Cht1_Bd3-mSkm z`emWBHeS-l&J({(hP)~qXb0qP9S45uRG8%MlbHNlh}k{1`hLOKVSQ-k_EmQK@)76U zW0HuFWnIol-LG3u4#rl=e))DMV1;dFCFFNVT> zbMI71kLp9?8n&+gfeZ*^W%k};40002t$H!t-8 z0HT6_ctJ}1@5zOCIr?9B&-0Cm4*)>^@IOEV$oWbS0FdcAsj9wx>*(R@;p6Dx$*QiZ z%IfLuVejN>2LJ@lK@A)X4YnUEoGu=zXvRmRYkKI@lCtWne2RFK!o|mSk47V&t#FoB z@8^9rwL6Tz3*v7lB}F9C>IqOJko_T9XZw~ERTQ5vKv)SWa+z*F-59!t&nRwI9YPyM zNPdt~WWEvClZ>FKP89=*bY z-@+lI7HLK*=tTko2|eW*R;S~CBxQZeI_HH@S5Wp7u}BH(<-3T9Oy|oqn>f zr+xtk6l~9d4n6-v8@0kOEwi(j_4Pj*zf{bu2S8!h4qaA1Kv%BkVEN0_2x2 z+rv42W4pQ}K)9{ABnkNLtVk4+B3N!=bNfV-o5tAp$Lc8nfN%5oImttMJHjz)WhCGR zr*!qYfD;g5ub%1!09d}{5HcKSQ0gNE0A3bE@_kokJo-s5@PnA?=iT|AW3zz-B51PwES6(!8HM?UDEH zDE}sXXw5bkPxL;)m_3P^?C}E6mZp+a%qO>SKr7-ZGA;^k{=i3j(RsTC?rZ1ZjM$u zQqLtTs`iQe*rF#9|K%zo)Z496_gvs!nhmP%SrtWPFT;1Es@rWJk8MWk0`5GBRQqvH z>P~C?yk?iEl`mw@rHptt$)terJh74U4WJU zcg#9;y?b4JopGHFZ$<`v|H7;I%wUyAf52Je35EtEfg!fb5>o~i<>)Vz7aHD|Og2@i zE=1_p>cy2ml<>=u8H#$Wu31w3En|Ygk@fZDvzvdjO635PYGuDFb8~8W~&} zjC{g|6|hp+B8(h(2t*ksS6CW^)w}|V8?05_o6M{P!k}+=4Hyj^D&)%JU=HOThDya? zldF%!Cf!=yx~j!`1$9%%w1rakWAysHs?LdQoiA#A;CbN7L^nly0|2o>k0}YB>9i?M?8_7@#bIlgQ!r$4KQ!vs;9- z;7~SXl9DXi?xcPetDIQSqD-rt{Gs{-=4UmV5C!LeBDY(ybD$ zO1(n%CWBMPP-AshO(q#3z8Wca*UqI&>|xw0bd`K9gf2_?w}od=;c(YcOxfcKp$7u8 znJ={Bb)KjHRi@!9f1JpPe;B}-L>AP=+~x30cuZ<6{*S6T-%}m@8$Rve)V-8;$smp% z@e%<{OEh=Imv${mt%Y3KyvI5ItZrfovK^vPsVQH)RA6k{X$XfMhYfpwRFz#!{dJG~ zrzYX286^EK4PUD6nh261R7$rDO?hVcNa`KzK=#O?j{`E9m3jO_*ujPY^cU%`xcnRJ zgRTcFsfdzTO^_h7@kU3LvrN6bR}h+PjrJ~otPydQd~*VV&uvP#5(|RVfIGN?wOt}D ze-3>32`6&dB?ppWV#0RkXRsE!#=<5`aO;a$D5E0n$Ap?kXgUaO_#dA6Kn!D?#{e>W zKE1J`@>h%Pi2bTD5mLUn z-L9;Ect}pku-Nd(@WN1Pe|!OMJ#*$hw$B*Sej>3qg*IzHy;AJ&L<5O{+EeyZb_8`o z!>LluN7(jRCc(|3TF1=`+jo2X_7w>`6=J4ka*BCe!B9VWnt%5-RUb>|H53;hNGU}L z{QR|dmEH4Y(E9f2y`|Hql(KXFi6K=hBoi$D%nr=L&MGo#wVbOr$yBM-rhK&@iyj`TRTdH$=#$&G3>lwFj=1#&>00 z?dH>_T)W(`IlkE!UysH-1T}+lYGtk5c6@i_uhS-U``W&?<_gENwHnlr5JeD<92*E?rkZz$RS35s;pVXpzyLz8?_6LfKS{@k@us;Ll z1DJ6|OVqmSp{(INH;(6Z#|EQigtRAVX*YLnXB}SJhztHly~%3xMqe8M2<8L;BBB6* z^P7MDHUQu!0s!p10{~>R0026VRI8p>|6Zk0f2nL3IJdXplEx^VxpA-w$hNiMKx1b$ z_d5UJZu-e7<{$JAZx`*C_=mlVmcbVa9 z6W}a~@+R>68DEqB8Q5J3WW*dDXVpCU`x~NR1s58It6E$fw7(i4q-1zTU%6biHHt4= z2}Hc_6G0$zINj#%Dn;K|&ebl8GI0)JLzeXDP2_$prkN4KI<6oJ(UAP^FE4QIgudM8 zj9zcWW2NO|YMni<|A50Cv9(RL;9q^7-$*$wQ}}xqTSl?a!rWRg*8({1?-j|HnCxWr zIfTx#kZ(ZU#FygDzX1WMma3;9_cnvAD`Taq#a<192jOoBawsi$#_~eyE4+SSMCS55 z@c6I#N{0JfIM= z-uV(!A7@lF`N&==cWPwgZ-dNb)3$$inS%okw~sD7R^7Rp{UZjSx6dqnERUvnp&F81 zyW+~ELSu1S#Svy=zss$h`V@U%zUoz(bFF+_+Bz1u4`TiuAhw3UH3)~)zQ9$QoVt;` zB@6pQ<4l4Zir#SsOggL$HX_F;cw)vJote9Jv8zb(B17`K`AofkuI`+;yDtU*Wo)@^ zT}YW!o!>@nwK=-sEVu3^7hSHHVT@- zxyhGyv42OCzQrPyq7lIqg&VEsV&*Jt6qPB;=ipns9pdI#$rPE;QT5a-xv2T3?4F-9Y(1d zKjU9Yh26E&{Sl?(HK182F+}7xvhGRpoF+~+N;_~i*K=D7i(#pi9W^;^CW}fZoFouPeH?2q6q&TCi!9`V z>?Fwp!ss}c+8L^Y9X}Z~8w!c|6QaZm^v091F8R+cA6?(0E{c25+6nGCa<3s7p4VBu zma3_$yE|+n*O-hQIV}v>7jCms%SVswchfc@-SbaI=f-X_kCaUR9JO@USg1@mGc`eu z9+6q7ja#8lFW*CdTDVv#L|mJ>SX~Ka&fTl1Y%ymE>uj`=Z0s@KzXHy5qK?%<2>((^ z7lbqFIANkXnDLCCICgj$MEhI`4!IHaot69=Io|6Cx7#(?y}tw0{lRrqqXbVIL%Cl) z4Y--LAu!;0dvm*TrxFPm#4GZ;wBOZ5^j-I~tWyJAxhs~8j^7fs>Huct#f1?g{g7B%ZBtq+c&k13{!W>vcfNZi z#Y~@v(ynijI|X;IcFt9!3P;>Wi+n$;gSW9KsUa~JnEg+lZ2*^Ms#{vU zLrdF$c{-K4Bq#&&Oxz$?3twGHbJ)fh(M;7U<8aq0Uf?77zr)j~JZI)1^= zgG3H0%-ZzVZ}c4RI&%(dE*9QBK=4sp1>J%9AB2-O?D^0Bfx?^E)D1F5gs{a&Z0fHo zV0rGES~J_-!AeJL(W9!49xUBorDD2%7O96QX`diwpPQ1c ziPKdD4P~v2^U%51hc2(hKAb+|kzdIZf&SH6ON6krP**hBhR3(63wOE&W zHK9p{qH1k~Vz*)sn;yzyw&}T~v)EzF!ois8Dv_P*48ueDsZH`MH1y$oSc~h=?!pwv zB~hL`y%t$3^D$KHnUmwr&SMUeac`e)uNCQS+8pphmU}(pi;AghF20+KPKc=ImaJ#$ zrv~ZgaR>t?L9=P<$>7$I@RgF;tH;wLSg{?V$=A6;6|tL}#7dvn zgc5uU@|^cQXra_>&ze@Y2#ky98L3AN1UytR?ZTIlXp-BLMRohM4?OPrJ6)hzAm}*UIw(%mwA9_D3Ca+0* zWBeUn8DEV&c2N{kK$!fK6s^&{z?yIF1I=hMv14~K&=EiIp1l@?+b?Odt8Qf`BNAU2 zq|TSrh6a7SBIV49Y%jT@{*y@M_3O}JIs?6O%Q-Bf^k9f#fJ_f`+la2Y47jj zgNY_xfGjHaID~}U!b}z>n1 z#DRXW(J$Q0=47XA7QQ{+v_}}PC5)C{l)odzd;!0g_ob1X{}4!4dl7d+a3d|yldgOJ zt1JCjc;)TQ=6K(EFXps5JU4STfC0V7O?TdG7>X}4)Ht8eox}}B>_IQiLkQj_zMivRlr~5qLbj+37p1P{{fb9cTMOzowhMG4lArC!q1`Zx_Qz8}k0GE-Jsvh*D5X)7U?b4BkewkI#Q*;8Nw=20~|t z1r_}lKix%(cwl>?Y*DfapVdn=@*qz}EX=^k=-pz~z_qy1nPWC%rgwdJ zfx8L1m*-626JmQMRUYJm`bkzXviY(I34O67fIPJ!|2ET}`0^Ck8t@sd4Bq4u_A_Df zX#otZP|nD$r22J!6I^7S#tnMHIjU=-wawKA;q&OzFzzC*o?@K6HAVzA-nyWa#(!%1dfhk-j z=3$VX%;XA=BP2bRP>eT*j0TOKYcxwC8VJfkX~iKEKa)`%{bROE@20Qhh>AV^en~gg zXt15g4gKyM*S?SwFXs1z)~+6pzRtLiDi9U#Pn*{Us(Bj>XllDm@@T4C?rblP#5gQ? z9e#P9v}?su=_28@2JA1UgTPws)lEv-vQssUGQ~cgkB_yRSBY;?8z28}m~xaLp1M%9 zakcUx{kG`8ITe4b5wO1|;l~(^Qw*a=df$UH@*Q^$_`5wZ(a^7;W5yZt)b)s=xuzAh zH2Txi`nAM#>unieF6l1rkn1m6MkPP5_9!mYlDfLnCERBjPa6g+Ba>?r5^T<99|hys z8aoupbNr^bC)b#)*-bLXvkYHD>hooj!T&6yqI4XI6HN9IuY&EladEXe$&G=o;l-LfwF%8%m?n}>8GK?`Zee8`sdODfTx5C|I*RCngn0?e|2){LE>^a#?;S^z-NH_cw0&-f1x5zD5dF2x2;pI1P%M4rt+3 zo~IA8cxBdv3b65~HIk%goVi{0e4yjQg$`yeQ{y)}g$p7ecsqX$UzN=T(pG~n8SyM; zj_>GNy_3AWuFO}-Ix2c%N=GMM#c+&yYSA!!3a*r0l9~1+M?ZjV9UX`g>@G|KfI&>@ zy9MpK8ga!x8xqtRQ3D40GJ||^L9;my?#wT>bIMRGBUKo^!E2`8hRlpik56BRj$PGN zy@NDRIiFH}Cm)CibA=773-8ah?e>)Da$((^LYAZyeafXNEBAT8iCw`YcdTNDXs$aH zVLX+0n)S_gWn`gqxeDqnR17ZDmlN%G-tMyWEflPX44eWdy!65SbDi$#_D)SoR z2bYdQ-vedQIP^(9{o@^KdkGjy|Ew-9mgAsdFn&{kPet9qawAr~6G%3B9TO4fuRKGj~ zRA66#P|6>zbt$E z{J91~JVV~4v0Z?Td}R7!OcHD<8||dje`1OV5B59PfwetTGIy7v3lJcM447#bQK!0O zyb7Qdv~HDm*pMxE=AQUj9iux{6ElLzE3;5CdInXh%>tJ-H|LF|19KB@?;mdhExcs> zsVqFv)*V80t&JX_1kGGkqZ7TJPp$(4ams#HKr4vdjcgWPv?;Dg7(o%Dpq-^M*Z2By z$wY4`BNV}yD4s7PVHShy;|k^MK>Qs?m!k0AD@x#Y*QRW?vaLMyA!+eVg z%?v6axC$hb&TxT%>X=Dm(N9uxVNKjSLw@OkyWQqFm>g$dMUASP^~Sg|IFj38hfkz* zlw)^?ETLq@FS}diqo6;)4oKXxs(%B1|1?vx@Py#r_bs>zwNq5oFO51>=v&jn82e>W zYY?igz_r`PhE|Y}bW5RdCWVL#&x7)b6Ky#`^qwsw;T&ydWx>}ET%RL|iOWuwgk>NS zYAx5FS>4Qn+-xQxBMH@cV}VVL8Z(7c5SD5U+ zKde?id&3T4nKn=-v~tW%kP|O>P?|As45dDmy5F$_@A!jk*#%|iVLaN}%}WKwg_l(siH^kPJ!BQK_pztD zn}SJ&(2hnB^GydB(^Jsawe_RRaapZCBrT6K+&uHibR1tuW~>B{iIL2(My>jU`*eFg z<~1ltaNNNOoDI3ZouPPA6sw4>K;~9!g_POBF*Sb6K@~@hP%5pFfB$?C)mBT44tjrI z1uIaKDS*54`m!JuoNHYpthtL(JcQkW)F|CfVCg;j*5oj9IcrCaI(3tx%>@E8 z!Y239VtLhTOT7igE%|C2oK1`f*Ryfrmv#LC=Dswii#xp{QVA@{uCg~TzT1=sVQZ{% zlNADQMpM=CESYh3dxh8oshs*$c|rCz-$8u89SJD7pHEi}+>;F8m9=_+4=4Qn0m0^J zh`9{`ee|81Sbb9S>=Z6^Pt<3XjC@zx4z(N8DeECK$cofu z^RzTYhd3*RQ-D@8jLc51E8Jon{hVI=BVFNZY5drR-c4mYe$2U!Q|69gNfiH3)ZSFO zRsCLrV4Q!6ml<_IxJURtfpSsvqF;+w6OGt$)6tAmdOTxwSU-=IX**H72(VI2!Y@PQ z`4KxK7uRkr@Uv^%znxc2ozCOk0>8Um2{Mf%P#u7cY1KJNx9RTxctsn_7IJ+CzEC|9(bTFQhb90zwGrbi|X+P+!A z<*iPl;PC<5QNrSdt0>E;qGIF)erRdj8+ibkR;(0i5faS|2~jJH8}#`!zNmBIenp09 z;iaevs3bbqEWgDG4L{jECP;KNj;Vx97^tI4|1E|JS>yGrHDEM)M(ESbqUe!S(gVZPto*D$&dWlAYxu{ahpANb!Z)xbFSEma)BnycTT z|C4YV%R0u|pt2ggL_h|*><+BHj}D{1)vVb!PHqH7piOTIhBl{bvBF@fRUzVCHR1!+ zXRCNuv%4Tl_9<}4rRNpHWeD`ALpWjU)UN(}mAt;i39Y<4BqL)({iL-Kxjmz92QpNI zeDd%mz**;FEUnix+kyqI^Z?<676)}7KZSnDDMYF-4e#B!e47rJFpsT);Gsqh__SS6d&@?Uk*vL4_J%A9BPOk_o3_IZQW%7xNc~K( zA?MV^pN6nE+o29COTO3JWsHI>8|K|S2qnN-)2Qv{iTkgIm{#7fNfEf7s@e0kh9(Ee zy`$mLBw16#g53JG|w%9h)YXk5S$a_WWiY_&=M-*bQ}a45>?L`uRIKz>tPhWx+6p zb3f|BC7;AybDSz2m~I0WrgLC+V}gvKuG|+-uQ~)~UPWyL|1PHCv&owJ#h#2@Rh>G- z`ciZhe1XruawJd$n;H>XTXsqZ{7YLaWWjmqinLQRCfClWgcc89o^Vf-w>Nd2n3p$! z9ipMgC6ZR^qDV_HK`?QI&@8t&-&P0E|2@@AkGg3Iz>RxnlodGSq zE`P^d9|S9ig;Uv~-I~whXe6!_F{8J(A)ba;%C(GR!i9~Yc=x9buUOEpk^j$#0k>O$gA6@5f^V*!TUW-!JFW; z!0~e`l3KRJRS?oN;FNl*@ks9W7Gv19m)2vYPg0Ao1f}V2`b1XSC!F{vuT$*88e6F` zh)ofPUGn=Er)&GnacEiP!-^NnahRfcP2L=w{pX2tCwU>w`pW#p%wV&)<;u4~lq&`v z1Q-<%%2X2#)ydxsu05P`Rm~DTK6xIac)&^VAr|f8L1t0cbVZC7Ja4SC+b!b@f@$<2 zp!&#AA(6)fZs&4fneNReGwe}t*d_hG4Nt?oV?Wl)(83QF%~{AHvKN7q+!7v@qqv#3+Wm-JG;S0|J_W56 zcr)M^#)n#NR4z-o-FM@(|DDbkO*og>kisnYu|LOA9wud&8Lu+Rn@*ikt&pn|5C2du zo*2XfrT#_*fkP?EGPsn3e5unhdPE+4Vok9nqLrD;wATu((c@gd7zXc(DnUv z6msd&49m?T?m%9{A@`L3&ZP2Yh=;Z;lHBXI*|+81BX4()hXJx-UsYJrP6^7UcMY4U zi(-m~^JUs19T<^C|E_*Kly<*TPYWYb8H1cYic}iM_DD@Yu=Bepi|+&mq~(E$$2a-y z%O~t}&xoSKS9I88biQqI;YQcwCIh;Xu!3OvGL`0|DbEh300K&glr@6uQ?8rA!Axs) z51PztbMH)9m*PWn4$-obWH%Vsc|4;?l}Fq0&o5tYOAjRf?^gT&x&3!$e|-bE)i9)Z T!jI?&{A=oJIxlNf-hKFQ3ks5| literal 0 HcmV?d00001 diff --git a/04_Spurerkennung/images_input/135_0.png b/04_Spurerkennung/images_input/135_0.png new file mode 100644 index 0000000000000000000000000000000000000000..7491fb3a90fb65b9f4c0e8835fb0d66a0b3f65f7 GIT binary patch literal 12769 zcma)iby(By-|qk^X^jFv{h(VbtBmTu{hG9(1W(Nbso z{+@HL=Q`*7@%*uCyFR<)ov-)neczu19W7;&hff{?000tI6-8YD0IL}NktM)GKl$rb zjzE7AdZ?Iq0{}!+m=6{pH~%pJ@KD=PK|x2y!QIE*+riy~Sye%S*~81-&e7!!01&WJ zpl7eIw-1!QSU;6lj}OmKch{vRVAhp?8~zB!&c#AVt`^TyvP}J|mqJPD0o`zMJWg_Q zcoOw19^%A@OZZ5Z56Mv<;}b`(w}L-9FLYk~n7C_M1n*X#6f{lY_Ye?gsR+Lk2`8?Q zXQbGS8X6efLh{K*JY@3#kP|e$X7Rp#iUqg~1%bF(d+@seSOMRN2mn2rc|8L3ara~g z^6$;C!lSWzd|=}01mTYWvO&r6p8&EivBC>-*!2L#4**tUwl>=U9bSMHN7#>VfbfFL z_rX{INCpcvRxu2~O!X#S5#S&NsGc%RR00?a03O+?4@dx(cmV>cMs_NI`Zhq%Bsozd zfPfev@G3rr6YwAyU^V>oX#gPlJ>bzxlo99;cP;rY7ur(q8$lgx5=s#e0$xviBO^Y> z=aZ^5+>b=9v8{7u`T7Fi(+WkrAlp0{2LOs;)M#x_ZUZJsYbPf~lUqqGc@Mhr?pbYY zw(oYPsyt)?fGwZUnL8f-rYGT2c;T*hAD;bs;9x;i?0fOnu8~Z(1yH=dVszqxv5itm zYRB^O4idSfHXskN9yJQNv+uL)F}iiR3I*LKil|~qC zv-%@N`KFEN{)XWj>wv0FhcVZq?N^kZ$%?s2CqiYc3G!*2U!Tq1zy7|#bBzPu5CPOX zfw6*NyyfP$IOpmd+{RMR{G01HJn zKK;=qnIQrIK(RQ2t6q-owD&Pj4<3Cl?rJa5jg@G$JljB@Jh}YC*WvVDR-Dz5^3P)@ z>KVAK_~&F9Ui7?jh)wq(S#F@SP2PQQP5Q@Bs?Nk=bh-iEz zJxAQ4&`~b-B>LqCqUqN@g1ITc;=F0E<|oogq9wc2RSsk;DPV;mfu6m`_`(A&0(^u0 z@(toVgz2_p4Xo9~RfA9J4XbfJC!E<%HTXZEj8N(!6n)??&uq;or(&#OthJ<4&P>nt zf?|bGj1VXKRUZdydbxTt`#%&{1CMQl1mlcVc%Cj$rcm*d2gfT6h_O*7CZC@yL%#E32k%XeketECT-~+u5`7Ah6jS+{l2+ZiS8?T3FMM+)CZd3<>SZ+_ zGQT}>pmDfxtlRVXhhi<0Q7e1eXm)I0c`thp=hUB=B$hgADBk`#ZQf)2$M&@TkLz+Y zKGA-AOFN`5ASU=TXZJnXdv^n4u2wE6HEi@@rdlR@CLI^QensV{%JoVjgA;=>{ges| zy^z|M2EuyV6@+tHRR)y>IzRR3^z197%Ht~S%iZ;5N<&R<6Yxy>HTty`N?#Q>SZ;R? zG85U-HU;poIFnSx`bLNhus2LBL;biu=iD-Uek)yBQ6ixrtR`&Kq5I~5lj{gl zxgj+UGY)0@lu6uzUA<+!HTXj~P*Ym+td_6Rxs0ylv!Z{u8~j&JVV6aCp@utrJ@Cu! zU-}UKTm4(tzwk3 z{L6PQ$&wrg&9>RLT?btUKeFSov#_gc9%%l`)z$3y#Q8}|LrP<4VtnGm#KL=#JZZs2 zq20XQyv4kYHY?KrQ=ZoH*7i55jY!i^Ccj!c-C|57Ov;Q6OwF5K))dq{Tlie_8BPK3 z&`Q^;DqOJdum96J(pu3*Tfeh}hp26JZ4I%ux1q6b`&!%oEB#A)$UhxrNg7J7=T7GTk_q{GnzhewZ#+dlwIsaVN;Sqg;8>uS_f6TJ#=gTl>LmVc zf_mf_UpAO8k8W^n_zP0a=giZ5D`@ttAg?K}@2DJ6_e=Db`*wg#h_d)kF0C@N_EPI@U2)PId_Vr~Dl%+daMiTMnym9$R zE~&Ufj#@4yvLDstCI*I#U@SSKom?npR*xVzqWU}ux1`e#?qvVmylGTXU63;l6Bj+jy zCOtz@`9Dj37}&?qXV1z%BRUhmq+rIysD)DD(hN#FgmsDpvV9dUz7Oj7nrx6>?7;jW>-VQpNIr7u_FG_EB!7wU))I8}Tw2 z|M*hvB+&@+3`9KKrpD!tHN>k1wI{Z6xz1}>9T{cUhJIlW(sYin=pBvhZNYN>No4SF zCnn_Q>f({PwlTlSMvL{|*aA8*bwXn8BLq!0b=cDL)qowkIQP-+_p2Gr6;%Trucz#0 z?e>4H^AYwWW)ffFWyVhd8cp?fl2qv=p z7a)^BwG;gj$CdraZ|U}^sicC;f{;+Yt5qw+ht}X-+sO?lGWXyAko#=SD&4=`mFkq5 zG()xfpc)Z7R7UMaxpk73t_MB`pu6;M+C!i7Kj+qVm^pX+3i{F#YaT)urF7 zVNN~%#V0eX$`^PsQB!xYvlKD~+?4kzW3eQ$KMTdd5)$8Kyl(ez;0jZ(r`QW=?mpa& z)zSd@LoerlE>6zm%#>uKveUa=p=GxRJwIvd5QBY#j9nuE!h#m3hSx{E{;U2Bzdvq} zX(OS`VbAX!t{Tqtrs3D=wCU;h4{&l$Zfu2lF?^HRRz+7600?>p0E9;Y09W_u?|lHk zR{#JwumS)ia{vGu_cY6|F984!VpT;s{eYFjHOF+s%dB62!q|a)Uss+X(&o=kHMCc+ zox1L6Is`p!Tf5e4YW(;BLML6r>Dbi^ztaAx2@0i1vWc+b#AdxZW+N41NF}F07uinI zi1_&D;fjiiSZu7U9{`jT6u7DJ@$uLyii(e9Sy@>hGE-2z0wj2*7iq;QD3&g;T*gPF zhf8}z^<#O$n6Vki4}rerib%IGjZt1E1F@(%*(R5GKFTUtk-C#x*MZ-linnuO)VJh5 zlH9aCL;D<7Up%cA#1m2hnWR$0b% z-%6dEFE3L+I#cKIHgcmAY^J|wAmZlGy^JPyReftEV$LNX11laxOAC~Zr; z{^}rBa-qHB>7cB6V4t-%QxFtYujP?OU{I_@V%9^B$DoeZXe8KJ-uH0XvU+L#_flxb zn}tb_b=bi`V#r^mq2=%Se?OFTI4+ejOe&N=b))f+%BFjSF$hZQym+~4$(__##7vCXOFw|YBR@7Bk^e8@4he55rnUB7HJ6Erer>|EgzY1@}_lV8%NO)CHr}rh_ zRNr=MDqg(i__flYW00bvKGp~~&hTzH;hAhpjVi^0@@IMw&UWcXoc48!IF%=q5h~Ta z?x4HTUci^1LmgW$l=CsFwfF%a5$;)e;Dp3qdKdNETjdA%;8&!Sf_HY6$``560yXJ% z*Dw0yUGZ1PlQOyArIpYkyE%anAT=rEV zR?hRY67_~>Ptgewn1|JlO&SqMS{jgEUNlFnXp>&tLf&}n$WM3k-j+^k_Q_Ie)B$#Y zT?soT7R4R{nWw^~iI`~~S4IaPZVtyTQclZyFK^abA^baLmyI_;xRWTb*?xT@I?dJx zE1&rFcvt!Ok=qWqn}f5xU9NqT=brg_KJJH)2YFa`yU!fwVN_)x0kTD`&U)r3uuW2q zA2vw3)5Ti5XtXzv?vI{#ysTMd^@2Mn%^`H2ker9>p)~Lcp?CK9JT66(=by0|Az!~i za7*uTF`2d2v4nW(9KIE08uBJ(lIlIUNzhuzK6%uyd^Ca;u8P zg2-MO2EGCp?^hh>o9c@taDSN;zi6HECW4pY>Nk>0Htmv~o4H4@VVq`!O1gbkgR6sm z_kbQ}5eUx2o#`@=M~s}{5f!UnhuCjXQOje-EyienU0TaJya;POu&=)jB1)q|brh>_ zR^ZNB^=5|uejK+g8Iy`}moHaUfZh3ZU*GPqyuBSu3(k2s_4e@!$+2+Y^V;Y+(#x-a+!{j|qP4b*g-t6U|$t-k3)XOxt%3!|Up zP_3|MA06=_@eh@VkdN9VS@8zodyOEgwtm1zn5zyi1LT zF2S>)qF-GAT`XjuNs^8h6INZ0Q$gc~f}}$X084Ey_^h12&Ae{MoxLVFr8yVs|0g{R zf-WRJ%D9`5&zW{ngOPiT9vhLR*7Q!0<(igCcy6=~n$G7THj0+LNk?|G)OJdb(NQtY zP#4N*EKUr3a#o6xu*=`5X(+-6vXwdi8b0U6+Tr!lI=U!8!%Ic4aI4o(dW?C0?S*ZH zRK4b-&BIAE0AIJ(7r5C$-!=^LZQDTdL+vQUPYq?oi=^hcEU!d)VnbTtD@^Kv&H*x-$ zA9$8tRHaI-KY7u4qf3C?YL8w0V9w53U?PwSJaj|3I|@pneX}OV5cqw168n1>Zr?=d zs7Y$vcBSfNsaPi)7g6@I`jkYDzRWyZPIFGdf0VF(-?5qT=SG{=67;Y`qT~fpd-i!R zBlp``C6o)nIyT&Fy9qb+jf>5aU@cSgx3DP8tis_Xonv8JJ=q+6m7U0$(^=b{m6FEe z)mTvJ?qd#K>6RmHjD>`?82ecl{>0u~8$IK&GW50G=T-xAX3 z!$Mj3#ojyA|KTJ1pP(YZ&3tPOkB{l#i8y}iMy7#MnaJnpZ*@T9 z$;uK+HJO!->Dg6Q)U~5T?9||RWSN=l!N{8Xy*yYO$&N_jEgdB${PvzK)P_D1VLE-t zGd<~Y|IR`bld$_?JALxc1x4BE(>b2=dUs~pMTb-+M&f>3smv>EFu|&d?&1)8I-nX; zmi-Qshhpc$dPt4z9q=>$F$Nwh)2aY9eDnQ6?k4&SBF!R`9lc4c%uJn`v6mP&&?vNl zh}@`64RV^-p-p_7u}g90&YfW@Qj zlEz%J1y=Gv7JO>vsc~z3zDpS5)>fi1qGzc1rQ!ZOIO(K6GeKRXq8y{lfl z^hKa8>Z@%|yPj$Tqca1FYTLSj_J3p9K0H)Gh8XnFAh%k8;(#A596Fl_pCg(CQfvj+0=THILNV^TOkSxVL`aQMhV zg_yK3NST^)q_UV{8lg>b1=^F>@9-_S&BqzpWsk0lTb){vG?pk2`PwIuY*YFtc|ppK zFK#|t;k!6i%SX>Z0=FqHE_AvjbS@-8yXoY;xJJRn? z=J<@X$rQemm*)plfOtFJYN2D6wGJw0fc)tX>{;!Dw;XU77Sc|aD(*-#X7hu6b*?%s zaW5ZO#tvAh`55BP18st4646!t?&ZFd1Up%;bn6j`?2d7?iUMm^y_!UI=NLfTt&h~3Z*0+-IArzPhqpASi)>j4Sso-mu`{{5%{f; z=3_$1add%(Wh)Ij`{B8mGGuemGU&SL?BNWg5(rZ0v3?yOe~QQ$`Z?+=1M3WHPUM0? z8bqX1(7AxjtmO@GsE@qf7TeqF2;ljNoE;Itq+0uP3nys_f2rUbEVc z`Bntv<4&!d-!2z!EW1`IaHI1qF#R=4qyg_e*3H*G_=7p1c=N-(L5uhu&@0xd+w-QQ z?)Nv+6jx9OA>Atnu0aEx-&kR-qviTwBc!+D8}Uh4+|r#II1V{HCS(Rlt+>|2h&T!A zh^nb3Ii0rf*~!;>zdZqYb857?8hOyiRVMTrPE(1o(U&>oi7c%1rA~hU>2p(A-jRMc zn)45po5r~pG-xdOA2f~sXAml7XrDYDs%Dx7DQ&5?z^PZ84qSsFj9Nl zAusFF_x1G=d-p;1^xrIYQ5Perg)tFM*(ONpkAyiT8>*#dNEVM{QH#qqu=RN~Ce?nE zgEMWx&ihQbu`)*<@A?_U{~AnHS_&SLtkSR6$E*LHqq^?-?j>h=cDM>GqF>9F0^X*z zC;OC)DfnOzIxysKcv0ZrI{er;tS6hyGqrX+D-uR2wFxJ~cV}QnV1>NW0o85g$DoDH zs@x2`*heNCmU^z-tM8xU7LZY50Ad9D`y$LIzt~Sdn0lhoW}$TzWcqFGCv&gM`Yb64 zIG{GUcE76debmm0gjl}_pZpThKrqaw?3kj1HX?jrlr=Ss=)F(g*15fGb9xxwH`(*W@ z+pi6BnguoUogvRnY~qR3w(Faml)`0$F#)>bMr$$r({e&NOfo!~v{-(3%UCSMi2BWG zBwpHCfTfGgbaX)g4geZ?+a#Snu*oah4#GFuLsLg|*z?vM@|LpCEK2|Cm|8r3P-VsA zQcuzPc0T|XHL#skx_2BK4y75!X?04dRc%A)m@*$u2=XekS!|amCDrx zf%&(Y4#qa`s|M#yyze07Mlgd73{f1)cm*O!hmbj{TvjDI6~=3_W$qInj2H>h#i}C) zHe*+%FStD%=jZs|Wu{{y*n&=WDx&PmC?viqy!*6$6sEz4N$7C-BfQl))Gw@iifv9aB82!Tisr-w^KQ!XfUs&BW#^>c$-dDC_Ghd~^S zyYah5?G1m|g)8&B^1s8+G*FMa?ZTV%xx{2bcN-;_-Z6dp=WQ;Oz-rm;FP?C&DyXQW z3kF>ftv318Xlid{)Y0;H-(i8_Y33(Z>l;vPb`(LH&oRpoRX=sT6w8{{k4y zu>ak1-ZVIj@S!a=yzvYQ3XIi#jKc9rkL=w`d|c)!dSYV6$19(bfm*v~wEI*xIFzW-3!;uh+a5ZQ z-Vlm6kUJiEx*+=G%NmEm*u+?x(f!8*5PoR=H3i8~0$MtY6T`ga+mI@#VR|Lr^%YGq z3t59^+&;Pot>v&z=Ls)dxvh!GL`T1(Mdt)pa8-!)mT;3R5z)I$SIeb4=PHlRBM9X3 z#wrCD-OjF8euSU9ANjN^p>3_?8u6z`{!^=t$;$cD&7$FcXGcmy$mU>ZO6$=IwtauV zoxLlfmM7!G`T{DhxFSFi{EsYn)zj+k~Jb zN^?COelu>56QTov0;>vo_j+Ew^mj(X*7KB#ee@A1@{K9E^Bc=82w)Me>e^bf;V=8$Qt-t#i)`~1~d^Sk8O@91Fm z8d}#KaT7xlDxnL$XlBe1$#zH4e5om?`_S;xgkkKjQN_|XelAl>P#x?#eTXAv^uOZi z-jK!XGFq9r!8AaY-X{x>tTL2zB9)^nQtRY(!L|`z(`CQ_4p(6=OOx zf=tpDiEZ)~+bS)?B&rk(bWeLz>hy=-KrRY5IZt1)1$PB^TjGiu}+kcv^qy}Rnb#XI4#S}&SrbV>#yWH=dU!c$a5>~B^ z{D^?`-b5L{Ip$RfD>;(V10Vn5Km`^SHWWw-h*Ce;lyU8*R4cRj%TFW-lL1b|@}g$Cx20Lh8d{M1 zUVtuk%Wl2c?NL@={PZ@v8I$3BZTaeavA;5gw;G5>OdSde-r@;hG-=Ox*1?U6{s2#E zVmbxt#{v0-tCB;l=qI_WN-Vk`sX2jCWf(BCx=#VIM1SucrD)??+;=tQ*@N`_`k8rsq}{ z{^$OofV%?EP;rvan9N?k6y-3gAQZNS;~T6=>#XODcVG4{GF812)0&0pasNxTS5t_AXE8psbJy=C5X#0rf# z5EeB?9!PY2tJ!MPJH=EZWE6c`+yZmaJuOd8R;z3M=CY7h)5rf{rXg1eb+;>ck0;s= z=`EK#V~VyDYGyrBq}x2YR%7O4+b7HdAABQd9)19r6Dc zPXG5S1~2^P8$S~e4Dm_Sh2lz6s@mR6v6GcQf&-1{Fo+RflqQle{CxP^hx7BMXdT%_ z*Wsb-?@w9%4-xigKqewh*0W%PPP9k9st6TDI=*7aeG9>eCG{fd#LeBFn!|k4`iab= zR?>xay|_!8wfUm(AqNVNjx~NcR&?r}#sOt(Bf}CC6x7oPcXxjCMc~Z7z3NDhF;KE7 z7>32|J(9=B;jf!{?71m@;~Xh-22&^NLXFe=bbX)t%veOZF;B|G@>P+IZ00O!H>z3m z-|AA6Zy;S0wa|Z`cx*6=V0yXtWa?AJmEE_7A}`-9OyDTLXQzxGyr{6`-LtS)yw091dl*1X6Jl*rb&F(H~A(dOC{GhKJ8T#IPZ6 z9oZkiq?iHtJ>8mICC@hLc?!12Lanw+D!f!gGC$Tx8|>u3E*t5@0V|(u_E9Hf;kLLK z&_!x$Mey^)I}x$qSJUScM?5M2^3gMSR4SRWbY+`6?xe$dYn|LgJjJ_1uaBsk{dNji8$JXr5^_$TG0;qOjj*mOKk}%0@60jBp z;?AYgQB3kg8v^B941`8@yrl@^cXFAs>B!8U*nmn>|UP%u#f-+zNp zpAK+k&%XT9%ry+}w2T)ZN_ZEQ;dSGXKIjXKg8DB9fU&O~He;=t^u{$g#X_ zbXS=uZ8G$?E-8F+G{4a7)(iorInQq~06Sj?Lx|WX;(5`@h;;W!TKs7SdF<+ncv|I_ zW}vv3#Ql-f?>=D-`*`&i8Bon$vJ-v2qG-WkvU&Je>N^+ecv%?1VcO%<>gvjJ^7J@L z`rZgdZssmiv_GAQ0kmxLf2`?~5yncV16>hdt4?MS*c7h0kSML$O4lUzegi3{3?t2P zQS~OY$=ocbZ$k(&RF$|lWsd&f1W!KOPFvlT%%27mpWI>;hxEDyjS_tY0xg^#VZWGaof?9c{*id{!i*8 zPwM%|?%UDB1)N+vWwh(O#!Ro5znn(sFIEcArFpSB|`3dTDmHILUejqK4Q|nop zmtn862BYDkL<>J$1I3=`Ig)TFafRgjAD;WS7knh8?ssB7L&hYg;bLg0A~?QDyXaXU zb?37~5%rZXSxq|nthU(qY>*ZU<7$6w&ieu}Jbn{YidRTS6`Irbumy!`Vq%B=rQXu=QOF&PK?a4@Xh z7S6k*1kDS~;WW)ruBbeOHOEv|F6%?&3Fgr6i_xMnbDZ{U%SoYgLv|%QFIqlRp+74S zxoeKf#<;)iPdR0(|qqzuKn-&|C&a; zD%7sn_ozc$5^i~_-@tANGymMLJY=zC3vsW=O*B^eYe04q{keXb z5(7RcQ|kKgt=Hi|s8=bizONE0p{d`y4}?A|*kolSn`v_O*FSmUe{?*#MIX)n-P8jt z#xu;6{ZdB>`3*XI_{T=8ZI4RRp*W(d-#L=#axcLRK*OSBL$AVC`_uXFCtDR97NsRw z6#4E|7sjdXWiLX?fyacLX*R6Huty^`3pj^%(h6dm?gtGq@FN{>fg~hV95;MA35oIwuv_j)FlqQ7R9H^B>lMbsJI zX0YRicyVGP#Kd)es^pf(U?vg7W3&v{Wv;tOA0FcaJ*|tc9NldSmWYpsKEbj5D5ngD zZ$GAc)M=n5Mf6d4b60imITX!S<8NxU(bq@9ZQ8|Ipci7dPEcIX0!IeM%C^Vnoch|> z73^a4X8Xtgrt>@ptPODBgDD@ZtV|66;QzdxjqhZFD-@&C?Wx#`coGIGHq+{9UqsnNEY)r4Q=pn@~lExYt0c15oYREUHMa$qf2$H~C z4>aSB@61Lwn5+U#R-X4V4(2sQRdSIbPF98_pkr zkMst8iJ9tIiastd02|AqTWuQ2QVeRfNi-b5^LHJ2;XaQHHCD19hdIvFg=bzH7nNXok@GUH6_J@zw_sl@ySkNZs(q z$FH)8csOMe8=hqpZKTB^tw1#T=iT;Gf0B0N4Ur}Pyhe)KMNQIB3#7Wz;>e)Uhv=PZz77LR^V zmD}AiSD_G&JZll%-yq$yjDTB7C0BM29`4UT@!-Qbr1!ofz&v3uz8?Hi>Khx6hDcUKDOlwe!T@{-9NLA*$KHv5E zU;rkgjT;^nKu{ciE$!pX)OOw4KHLo<%JKR&w0!d9T1q;wT?s(@@cT~V^Xz7cgU9&|Rzt9nF-ED&VWlm=!fUDo< zjq&d!D9LnRp9Wfn562}9MKY$-86%47^WN5(>a?G@``!85UMUY|9S$r>v|-gAg}!gx z)u&EHr^)Fn8V-Z03&Y++&B_nLD2WmweHspHVUj`z>dDjmSqag7K<=M@qU$#MLEjLo&7VHLWMXi_fJ>EpIS;eZr|sp`se6s=t@YGhzr0vo z`>CsWH;vX{R4O7&u+Z5{a@D`WrG!GSPO zXlCze@FAYDFveC=Mm=Gvaa&GaRPuzJEf)V)%+Yj6qmf^?K1kV+!CX-5`O5OS`ervR z-J@ByoEH~)7+m{8FTkfL)>a9BrmbTNET|e@^U^+)z}fC#eQd8m{x9%yk9qU+xcC7< zaY^c4_wsJtd?%xs=g!lO2u#XvIHIF)^mrwNsI8KGC&=UXvKnzh=HUI#OJoHZ< z4+s6^!n0%!J>Yw&n|lEOA;sMf1IWy#1poqFXC);)y=U&;?q1K_Jsv=mlpc6^x;r_$ zIs$;-Y>pw+$Z(rl_H^+`Q8OBrqUmlxh4;WfF&0Ld$jQr!e_tb-HGhUm{~MW#3KqkU zylBk$cvu{jJ|EFrf@$1!){pV8KSjU&dASn!$>mq;>F=SNuT$X7^240k5!_BZqEvNJ z{YNmOQblI6<=1`Py({Yi@?iw*9>9IPS_f9IYY+x-79u0_kgXH99l-EI5aI!y+F6}K zj8V6wNW~0G4A>itPVdCWns_iuKt3Q|u?Ub?#en6ca~cA9Sb*)oQ~NbQj~}q*3jK`$ zU^!pABcDCw0bs>DWbB4du$CSsjRSje^O0i@>zOrS-mBACry5fEuRz}Rtm&c09XBfZ z@0(_3HrCgtHM$j_*!?sOzJY$X=`_7|Jr9w&Iz3))Uu6#yu?kbeI$Qoec%)UxIFf|_ z#(w^Hg4$IB;q4U@f~^~3-(<#1xu#G4GF~||?ogzVEk-ekr-x(w)?syt?-CQd^a%L! z9E=f|$X{ah6!S!r>%N(H=jtf{9JIK9o8ZR7ggtw`GU9u?FL$k$#{s~cAW1I)z*?DI z!02bKTpu0)DCdRoeo9AJ!|wmjwGirkSy zUzm7p1t;X0#5(n#MJ9WY2z)ngB$4$Xy|BZBf8%Zq!^cwifk$D-Ivb5)|JDo?k4KXc zamM;h9g94c{Xu^;r9PWx;v;oogSSQxjMbSAWIgXmMp>#hCrbYSM8dBkTh)Y9!fNzo zCy2f(HI+!vzft{2IO^anoS8tKmo@5HM=u*Ep1(Qz`5EbK0$8a}sB?=JSG3zzNT9b% zvHCF|e)7|SYPNEs&%N|tOv*7^Vve7VRQqC)hpBYpi(~mJKCoj}P&d;u)0tK;dBDgn zMmCEtfsgq{|2r33a*1XgXEWJ(H?6&haFm%kALti(0)^oHz-Xmz33l=rB@wQs_xB*x z1xDlK#jp5aig)zbrU2 zT;(?S`CNl;`cWkmVy`kKhh9@dGz%*} zen8Maqj`4fT(#xhOt$cWSto7ObbMf2Z7XdH^T?NoIFjmhUo@1PE{hhI7E0$!Ta~U= zM2Co_>oXFP5Z+1O%plEhH#Xy~=MADt{7LXZ;{)dh23|p<(z2qm#WF(UL*oIXgi>q6 z;7V0vQNy)T{E5`h#$`ErJBAE~&{FA=s4{4YyOCT$i1~F4j(L|>m#$KQeqOcBT5InE z!l!h#etfJh#GfNyg+1!#tR9*P^5Jbszh-KQl`SjHmsAqf5H+neI81Sp!(B^%d6w<1 zgTKjLoOT%Uc9C+QXO)McNKC6q*wNCZEHy3F?6dl3{m*JUZQ_-?#W_W7DlO)hVI&HD z8eEoKYS4sX?3_)*8N-EQf?{fIS*hbnfijmuhWr*~-wwCpy>xiHH4LuhUcBhvwt2xA zEO>2n{oH(uIrmzwv9G=8ebM`g6Ni(PQ&StMy_adA-BiR=B&{W_)i*Ra^l|7{#-l7*;VF^L ztj(;ctfdB93qK3K`jYxaM@Y@OMUnYleXCo9g`|0*nX!dct!hP11;?+Jik4!s;wGJB zozL)J(5^42`u_UT2D&dB(>QaL^)Kp!?V$EF(1xDMuD#^8L~_zx}9?jvk+=f8fcT(>%-{S7|rkj zfi$o{7DMmCkG6FU@8g$ND*@xjIa#$?-*-#qs`kY9ocE&Ua|gw8dEn-vUZY(4rhSZh{1#3MQIXOnqr6^esEJ+ou7afIV~9z-uxRiz18WHiHtK z4L=$Nbi3T)HW5oop`crb+ko3Kpcux10mY07QwV!<@A51lh_xeLPHN8aq-JJUAug{; zfl47Eydr$#TLtU0AAjCgu%2$F4<`;glSJr+Z2cO=wfk)s+Z95@6%iF-8Yvg=5p704 zCY<)jcWhqCYf1S+IaPTv3o5KDS;epMh!wCTFAH2^oFbSeXAC+)REA8`9!YTuV*oAVRY zD;8}TS#VGB;?_VNO`ST1Vtz`deuh6B+8wO@Irm;7dq4Mfm;5{VN>Y7cq^2o9liBaK z^5>E@GB5q-2-c{uA4Zzsl*=@}t>=9)srz}?G_5kEjWa;oCCvKU&+uCjR`g) zf_LVpcCB>H1kIPe+FeBEFo3CI-d0l1(WFs@PIJ%uZ7@W+|Ln+^PpK>Y+}-3b;xz8G z{dym-0WhdGXUjSC{FpK6+zWEz(>GD!IKa7T@#kEQ)4XWy5fSe)4SU3Tv($V1tG zeft!D>6DpRYSt$%uzUp=$w|;7Vw0*gWyY6lucB4A)#gZkS7AnJA!<_Y$<6%q{(R{3 zPT#!4v3a#qoQT&WH;Kmyq)ONc83_ZC#F0Dj$6!gxRXNY=?Wde3?v_oP3LgnSOZnRFwa&-+b^?-9Q@v0yqEw_8I`rZ_(p60K5_c z0HiGdNTmY+jeC+!k1BesfG8^%`OW@WaPeYL%d}ldHjj9yfLyG1Jblq1;R7DJy#+B) zFo;Tz&DFOo)V#{~!_-U5COuCDW|H`v*WDz&;ds~z_Z?i`#!Ci#CO60DR!=%2O_jw; zV#j2Mh{z|g(>;JBVI;Bl1CS)5_v{cH^&|(_f4%v?7~D<$Up@TO0PXuLbxF7+#yK}9!%w+UoOhSOBEw<}g)7VoU$b}w; zg-Q_c3w8EV3tZgEoZds)^w&%yX;I{;W51f=1YV1y{i zwu$2o!yL2dL}mV{4HLM6#4oXI1TCgd#XvF%JX=qe`?FLZLXzUS@~F?Un;UKEDD>u> zbmk(XTH=kv1iG3#!Cf6ZI;2D>5-F-1Y<QCarivLv&}iv_0=VXM>05}PCSSrDOp)Yf(968As2-`(X3v8tKIE`EOA=DYKDDzPfy7cQt?Bt2=5&c z+FtdfQ|Nn0dDUEBLG8QT>H>l1TQa>#1*U%06yKjB+1|8}!5LYdOEk*}^?EG|geN8P z_kP+1%wlY!n!T2n*wo?S$?R?7^1*Lbagn99Dq4bGBrSAxC`+7K0~X!l1-HlZ@hiB? za+0lNlV|Eli?n%k$6~WKZ1UY=UgU{%l6*~YPB`UCQ~cBoe|soPXg#wHWIXGh5gS|g z#KC@NE*sb|ufDS)9ag!@P`slY(R@0$w0bO|CZ~(Wc;3HDThx-bh0o5#*Ugrfio)rbbf8H$Yp^l{lV#n}rFjpkQ|QNzi|mjenzA zEUE`I+>j2F-Kqn3*#z%+)XRhhn-A=mM}#`k{2n+wTtCa@abuKcsHT+`IeJ$Y%g~|= z*7K&Pk30r^!#E0(G7|VpA>IpcJN;#xSvBGXA-!gUeHsDu+WS&o?9J*8s)OP0&IY+}L8AkKt$e zC=q|x$Ia*UP?O(B5=?P1!Zc1V?5`Ixvo?s=j<*)#(WRG(X&`S>zNL{(@tbh(D0<`)SvzR{7!pG=;Fu^rQfS3 z99D>G^)Dec>t!A6?}W06;!f3mMxYW462|&!DHI9kBRoxz4BEQ3)s{lJf{j-TRF{F5 z7Y$b5j@w+!I0g2s$8&u2@}%cP7wOIF2n~|+!)kb6ZTsv9TA(@DafNX*FNK>U-s>zM ztZ;Po>)jR++TwHttzZ4}YJPQU-!tDx+caz0^%2hJlg0uCC;FwDOgat0oZRBtir@WHqI*2sw5%;|UrxtI=;+3C9YB*&D}{$7wi)6p*bU5c~x>l5yr zm-Lmcgu%!iWK##`$kWW(4_)hbomLg`D1|Lk=x52e*sv>^> z%}Z`=?3b(i$-_V)rP<5IR4@AE5$}#%{=dONBVf+F&*>rXn|$7sJ8J4Aec%~@0k26y1VFEsC1T*`S<#P7GKXWI8@0^u>T zc1Vfw-2R{mMZ=3ecBMIwX)0DPGwX%Ft8Gf?mg8tZOL(FG(-lJRp9RRzPi&GX-dp?or3z#4hb`Un1+g8TNulO> zKaO`#brcFHrW@+^Q7z;D)s|sKd1td@7ECc_s>p!f^+UbHChxP{VhxrQPsi&b9too? znR|+qVIk3dbJ1w|d(ifnPn$7wNjZr6&RuoPrYY)TC*8xdwf@~%5QPmx$y9iAdVEOq zZADJhlO&Asf_Jt-WQooo-W|Ui@xQpp?XRAcpZPtgauZPVj#!(KUB|gQetT>m#V%al zAN9X{2sfoiH%d2@@7zG(i01KKQne^IvZ@&%w4gHHC4sXjuyY?>!7NCMPxic~B^A(3 z_yf+|rQSM`SO}D5C`*7o(a)d$g{Vz-p%}dBOS3>X&NH(xpi1F zap~S4nd085fMe9 z7wl5}v}aGXc;-Gi|9%Wch>2o5SinZ_LDj08i{Js5vd1Zw@wY>}W6B)=HAk$}(!*@V z=Ux!i*tA!p4?Xnsh~!wm5^voqhid+M&KtwmQ%9svGmoL{NHBPz*9zw{KfY5scQg>4Z|I?f3;zuRI57gh~d4%o2aA@JDNauDT?_sJzwv zFGYLSf5qTSDy~vU9o)WZNpY2^=+TX_ktO{qy5r7VkRl>Q?8UR@zEdo8#^qYRDWZC= zNm4w@+~Zxd@&&aedF*Wo&a^m4D9QteC&zPs6c6_qUZF>bsc2WB+T@eK{MO!nJc@{# zoydUMUEwGu!Xm`0rJgnC^W!=!>6d1DLfQ(5j}aXhHeP1E!OT^m0;-mlk##J*BOm$yszuLzY~io7Q62QPXduuUkD-I=2A? zL4t3Gy`|imAqz8O`PqL!A|vr^;#LwDa>Cb)f&;1SnCmMKRQ%fCHflAjyP?l#aqY{c z0!@B1)l2hk?<3+Ugg;_IPcW2{gN9lCqOJ*Vg#*E$*f6mpZWyP*Sk}?Kqr(P1#QO$Yj1o@AW7y$o-DZyQQkoY)+D59LQA zx5==)^!N|yrnAJID$DR8@+AxRk`_G8v^*3SjOyO;Yy~2db^}rK0`|wme?ui(6^zlE zwzxey|7kB;)}ztt)EjrAZ$ctNC+h=TuZr%P>lL=;?=KEd?yMjb$)iljuesYk#0bt$ zhCxNrCE!n$-dU&vYS}xzwRY3jtx#yYL8ztwLRqq`%y>>a^>sZ&2l;C)MFwl@pf3q{ z=x>hj`LVm|F6i^a!nz%Qp-v?s%HZxlvd>rW6*95FcY{HHY2YlgFNvM&0c2clXyFSw zQU2SNLP5fGcA9%Ex}=Dv+1+X|3EB)Yy0Iy3?9v_Ef741fv2DBdDM$$zEH#EdXwp@lq zZT!`o=WHC#@S(Mt_dTxa1hwKf79^aqOkPl3CAq1<3@}4!2^&6g)>^uUo-#iE)Hi8BN)uhPOxr5FBJCU&eaoW~1;USe%<}qWEX4RuWUuueV=(Jg z;cNSfBGFZh=)HAezJN?4_SFuaFNm4J_LqZ`_V3|#D2b)S7rqz+g23fPp_-AH{GoM` zPCv8gzgAlEphb1Xak(^U)}R8MWumjGPmpKC1z0;NXXQKQj_Hpc8PswLyNhZko9Ugo zE_vY$LWy^bI&O&?pZeX^kAB%@39^skY$-79@RnlcIK~hi`2ahM^ZGT9h2NLn>6VevYKE|sMp4Q=k_e2( zNt)q+Jm>Vj=S_}bErkL-!<|%7r`YH!KCU@j8rWKRhQ07GivQukQBZhrvU$qIuW|BF z<|EA5e4$upD3%UkkUwQ^c=0lNSJ&$Kduu*)9oeGD)z_c>+zNC=CGeXK&MIF_7ag(b zZQ@gRsmGT><9Lvg$HDSSL1OZE%qOwY(oDF3ckon|tiZuc7_(HgeTi1QGH%cH;7)#U z;`O8_B)e5Iw(G%-aC;*0#Q9d3X;64TS2y5=t` z0^9pgB`jbojQ8Dfz0!eKq^;zo1zu8BKS7|AdReh$NqgA9Iy9S%Dg}0Xn_iUGm`KSvq~%WETP<&WNIq#D7^($F*nhO-8r= z>p>-+ZP25(P<@tCNe{WAY|d4LfvxKF8tp@xg!wCYUzbUpBSHKTN1K` zXoC{vXmlsJ-Bxf`<*408xJa+#49o?BO=%@BT< zlgV0bOgkd0UiV^q!hJ;7wsWBCj`qWG3(ji3i0Y8$kAwNSuTtuT>aatWH>9`G^@RD} zwQJC?PJ;1WWeXbd&&1cxD_4t>hv(T&sdR@wYWQX<8~&t9Q|nT%o7?NEebjTK@r&~v zPDZnxO8p2tAO72fgB075uONFy2vme zS6cBHyJ*OBhEPj5|M{)84Aw;xKDVIgz)bk=0@HO{ym3pob|i86&&=3k(Ps0Jir6`# zHTB#8gWQ=HKILw|>m>Mt>3ArYF8I(?^7FBdi0-yJ`nr#Np)(sYS+7;LcJy@f^EruI z2-2iqS^b4woGnzKhpXxT`Hh)?C)jh POMU>NqN7}?XdC_?UVUuq literal 0 HcmV?d00001 diff --git a/04_Spurerkennung/images_input/135_R.png b/04_Spurerkennung/images_input/135_R.png new file mode 100644 index 0000000000000000000000000000000000000000..4a611784cf9df2159436ffcc5684a8adcfcf0ef3 GIT binary patch literal 10653 zcmb`sWl)<@*ESjmTD&bTEmj~<+}(mZ!KFwcxVJdP8fei%aCd8wLUAoxTnoi1P~6?W zn`h3Ccb;?3JLkul%%05T&fZI}YpuQRPa10Scuy#vfIuKTMFm+c5C|n3Sdv&6z%SQs z#dE+Hwu^#+I|zjH>~W!hQq##mpeO3KGBO$(HqIW-?l#UYFBN5EUb?tBTiZHVfk0lf z8QO4|_7;iQ#p0>7a(Ez8*;$Ji>!p@-MBwu;uOPJ8gi7JGIe&;XJBXlAG|Im0aBy^V zU?j078}8>PGni|%xzQnc;h+0&R($j9e>Y$J8@m57Exz&fB%^u+vmFaJQGrL3FA%px znucgOq_?YQWsO}j=m~=hh!Cs#Ev@?5_*3VR^f zmQFE72@FMP_xK{Hj1~ACB3tC5P}97VZC<6)YD_9 zVIEc_Wqrd`a6N-4^Z~jtlo*sqi>)& zu?>Iztgo%jD0N92nfL4Y-@|{tY1g}RxC#)xy*OKL{mT%@KmLMY9tOj z)MEZ$jQnjK&ciMB1YMV+MT0)%`Ksnq*J#<)$P?}Yx=+$^EZvOb4{!f2vE6{hm-s-H zcH$_$UtSlRSc1=$nF#eg+W%gFK*vqa9h1yh;6R&@l@ZT}1BpBNY(`L^wPKtr2xKbD zzz*xLmgvO-fn>9TAeB;-ryXQ$?HE)Y=<^*ow`TmI(hOZcr3s~ac&o$|kzwjw=X?=#h)KQ>57IQ-t3Nw~#{36l^;tsw=G|O`#gT8Bu zP2&ELX($$;2$jpl8GY-)l^R2moi^%LLm?K)pR+MqW!Y@Z>L1QPPopyj$xI|Ev1u}s#^>8CLv^_g$*cPzZ^la>2LU zgb4~8Qkx6gicOD3qJ;z+wWLwK@qsP*&7@86sV6QTf;gl%9L`LhMutfSC-)?)NLDE% zpNJsug>edS?Idrc5TrQk=tF8De#Bq;pCl+Hyh@;iaKK7R3riPEadb{}24FEIrrQ4H zaymTPt0mZziDf#a85%papYc(F4~B0s?3?h}SVr%I=~OulAz)tlAko|ib5 zQ!Pg zW|D`;45MdkX#de(D0)&vqADhQR?c2(UqG4DB6DZyFi$W~S_%EqFeaKeB^M<} z5NX?EyvnfZ*yGrf_X^_`?JH%~ZPoo$E!BoXmO>E~5tZJd!J*ut-zj`)VqDYQ8)+M9 z(`id}W`N4s<@E;rW9F`nk^uA43))jb`UYH+{il2+SWvz;@iV7@pFB-fV6lkyO zs~b@7bk2P9aY^O_{lnV_pNBC}Q6M7<9Q-LrD#!@+=F-QHwk=vhc+Tql`=33j$m|9w zVyT$mZ^7#w-)L?6c4NQMUTh=}e;KyL|ELkL`Fj-8{GWM5X8kpS38&*2;~xv3kuiqQs2wXnRDI%hzcllCGj#J5u&pS+@;+>K95Z%VZte8?5f1ixo&e$a>g&`t#{ZTx|hDS?@Kq{=b&5 zc0%7pUA^a?tP-QMB6Kmniq?Owg*Z;Bm+k2#l?SxE@=>)9GVSOO?)ZUXzk{RmWc{Q6 z&iwSAiMl?A!O{=&YeWX6IPs^?<s~K-9DIjKk6%08FgZm%E(X(z`OUf`kKg;E^*-ak^EDnno=#MH&tG~MS?{&2 z3+$x}8a(0I_mRF|S1>1Dyr+g!bJ$6z9;eEsX{X*8Sr|2{4~mcd-d^;%m7jY-kj^V= z;H`85`(-=36+96SKOTw9NXYOHV85C-o6D{B-LM>9vLkRlXe9Jl8HYYZY=|_A3>zQU zdmMkCTYsikua{aO{Kj$HV_Wn-enP#sDZMGRyusMMVc)0ahp+X;7Gxbmfed;~O3VNP53!ReC@v)QSHkUX>$XT{ z{9b-wez3W!I@2C4x``)`kAFY|C!gF}^1OcRH(y#RXsLogK8zqxU|K=eaY!6--YC`!u<6BFNG4-o}r*TpC>N$^)Xn|;&^It}ZpkqNA# z5UgS<+Ecd7rEd2!EQVj8xYPL3`ltl;XD^y0Ky^XYSXsiso6L}$p~aI%tn8$t8~&eg z&d|3YMIY_U$s~`Soj~4sFid-_zGzA*?wtsYXiOvR5 zY!{w&VKGO>W?A4tF*n;{=7Zt)Y;JZMLMt)M9mt>wt!G%~S*{>bAhr@Lk8!zXD9xuW z&2-%5iHO`9hE=wMQN8~3~R*YwP4bbdVt zK@s&!yZ#cpEKyIW3k-t1_FJ$@4^o6TXD=yRoZHHWie$$z$&ELIB(x9n_$$PObXT@qe|y;nigWThJMj*cA4G zd_LT96}@C?O2*y#)!cB7eOwqdh1`AJ6+FAFW0iJ z!haL7M@(nBH|yO^`gZVMNQ3}dSqJ3zq~PpxXFU8uM#$&zGDQxdZvF5N@4fbN$R0#! zK$jz@FC7A3B&U+q9^IPECp}t1o=owi)za?C<^?4GD0u94)^hcSjLdvnKJFG{p`!9Z z&v~zJh1f7(T}0(~A}Nem4=}Y>D%#Tm94F(>q8!|)3@||oJM#jx-otlAX4kWym-2k3 z#zifK$oy+^Epnv}&uVjo!~w7cFX3Fepg@J<+KYp&hxSL9%Jg1j9ed`79^Sp-S5BX} z*UtGS>`F~w;_9o;3V@st6Yh7_1y~Vb$Pg!8=I6n5j*+G^RR549`Y@%LFisB)yv;eiXsxKrI^$m#IbTF0!M%Ao0fwzI)4 za9-szPBohLY;Kpb+-fnBt&Aj%)kW^twP_7MD`?rPqtEXle|ioVzr>s%oPQN;O&N5~ zcHVYs$p%&!zn8ZsrbVVuJaf{1#Z0~7MVUqvoeSBfPiD_;v`tAR4p-W=SK|3usb_x0 zm59QyC6CXdy|&9S;GguJ&GQ8cPt0fdZZ>7pMvno04~}Y<>^e^-m_kpwJEjWvrD6r; zhOg1lDzVmq93L-!H=c@npO}?lHT{6d@=gb3Otij zpJoD)WzBZ7eE6V2fxn%F_2Q9+s_hCtuJ({@QA$ zs8V_CPvvqPW^jD2uF6@Ha>t9|2CnNlr%W?#TPX< z6n2(3AZyMU9C_X1tR@Fpko2SrY`knJ&K@{4K;SGxg zT?9L6A{OuijhVJF#OJK{<$s)T!V$t;l-xq<%Ijh$lx1H5Yrj!khH^LaV@3yAX>-92 zvnd6KD?e5KVZ8JiYcJ2E>HoQG1bj*`@eNQEebNZ7;mCi%Rxn9bZP&?3(V5z>+%?2h zZ)bE|MB6*cHx$w5aAnv>%9vH3Y#>)eemzkbA3=;zo2d#dx#Q^bewr)!J5US%l3mi@ zkzE1NqAB+6BhQ@rRIwvFM__!uemlx?i0AUph?VuFq6D0#4&CbVo0>~lSJrGMMlz#5 zqe4k;+0zqzH;9bW#kES(vTH)|jY>4c2B+po+egnd9CA0j_jluIpN_5cWiIQT!wK3B znyOB<(kG+N_OD8VWnFX$mDe=`^TU$y$pzeKo@B%ckCiA7ecf+}@$hBmopH|syggS* zLx=6zU|&9js{*oJYLN&@g_Raz`u{^K*7hM5byp(I0CrvfBJfge2!Nxr(fj?=yu*Fh zjhei!Vxj}fcRR8TZiL)SNhYjJh`T5AG!+M@VsR1B0l9TYV3{=L*iSg$IU^1(epVJ> zCTC672}!Cf)xyr9sz$y)no$%LZ*}SsT749OO=(V}b;^b7WHjK1ZV%eh6?yz%xXoQ+WRC0Z|DzB$4yW{D+*e#Y0{$4fJJNnrJRpeSoNIJCG@E&|=6gont zLAlHT$vzR}jNJIdZM9!_VDT`}G94d)!$hCp6rO5JJn7s1CYPInHA}l+DA!exT@x{L zuO<|RWFFv7hsOT7dj$xt(A5;4>G=^v(X{PCi^Q9uca_1udF1s24caqOUj~?_v8mAE`amJKLIzVilM`TQd^0@KaCML;~}As zV3#H)9@EE;`u2yXc@zeOh*gk~7b6;YGOk+E%BTpO{m2^sJ9CVuF)jr!i6pHho{m`2 zyon0CZ0KCq!+WWC9XO>1rjjXr>YbSLQI^h`XE%HH6=HY(7jR@t(P&A@7J*Man2mP* z^VMkVw(Q*gCMh|4j<^2Axmd%HRf`Lt_^H|oPsqB1;)l9CiQwyEgz$u(Sj2g(ooSVU zlPO*YUiIMZpY(Y#I}UdZc`BtGxNk{d+8n5+04*MNKD z012W?!^&Fq@A3_pJt+y7G^8x$Q4GlykMbbg9l?RY|DlmhJ_nqv=Wg8@Ipj0Y-GSZ^ z_UAp+)cV-TEcwiMSb)l<{q|S-P-PGvA{K=~v5n4eyUUHM=$m^jCG_+4GUVdn>0Sur@!o9u#pyI+>Pw%J#)~ z1oKgUcU?|Du2^Td&C@gUq1|k2t}OTab@V1*k-kd(;S^7v($>GHzsUg&6kHBHetCP! zQDjT(lJDcKX?Jc2yT3wH)H~6+z+YQ6X6F?P?RM+%YhB)r+1TCR?;o>4C-vkoOA%~w zm-C&JMx1-~x{O!bkmNT8QyfE*eml0wQ349c*=I-JFmaaIkJF}$3Fb-c4t%je8O+Wi zXcqKV^c`*8PNpOQZ|vaczkR7LU(V;UJ7*7498Y`OFO&`A+Of{(OJ9qA-Jnb0sv zZRe!Dv+~uUDMNhj-b!~GlcMf)!nUcx?f}K*3n;ker*t2P?K*j`Ul>EE-2SLx3d!(w z9e`o}-P<0&IB{!EaZirZCtnG6yGsI@cK7p(HP}1RHZAeA1~Pqks@WksLrCw?8Ed+b z^^V6aYpaQ-wmCJq#>Z4dvjm8S1U)#Zy%dcQl%{f)8*BnW8JCOoAd>TEZzHHo+nXs! ztWjOB;#sCbSiK2w_K&Dwy^&||_gYx|_Ho{-bRyL!g&Yu>ts_IER(ktGRU= z>y99vXh_d1_Ukzd;BU|K8fFJ*s5l_=@6`)Hdcen4n7A=20n7CLr0>dvoKhEzUFC{(FJ5oZ_7I2#X(2DT?+E=-|YcCyW%w`@z^2B9KfM*r$()w|LI8LfZzYKfYX4E<3D8dKM1Ks zBuRq@Kzv@PIy8k==ODzr1Mn8c2${GH+Gu@ieV966tav#3A&ZsRAywF*xU@}3#Pdh6 zWWs!D-Jh?3tu%9S`hUtQo6%fk$?~nOOakfUsjH2c6<#)-M+mA6vnc;4#3~%9Zv(eR zW#N^lKgPodL;Z)R;H@Q0ob<@+na=Rt%t2VoMlF%}(!oddn6k|~;q&K8jVntXdPJgP zhttJ=hRX~AwM0sFdBUh(M6`gji&1HM>}!0@*Did4PU8Ak=Mpz0oAD5HSuM0rl!mGH z8y#%h>()r=+0S%ts-&Qzb^+H9?JI(hiQ}?q)O800_P4JN-qDG3dn9*Pt%Qo6e7Aco zDZdcvrtAEYtUl1=*2}BhKw7H~bQFirJ=KUiV-i zlEKA(_k>gZY>*5ShX>2u_;b{^szT~W7CdZuq*Hy>%^u02!LEqqHnNGd&FGFyGOlZb zh%O0vBC?vm{O~m9jEKV0g-UKYlG@e1-#ah9*vDEEJ1U1CzfR}q+SBD&3#gS*iGi3z zuN=`-Uxc;0+Ppsas6sC0C(^8IjRa!?kGvb2e5)& z>Bg$empL>G9L`Hq?heVzQn)00FhVf|0UzM>kIrQ=%ed*>4h@+?838GHI9XL%LRQ-j zB519VqcdC?O(pb_BuDAMMtCQUj14*QlmQwZkwaF`NtX6(W$&~r^DrpRqZ8j%$Y}$K z4V^~1^S=x26`E+sIy3#jO+$lO=v3>v=>%Lb)2W**?8M2y&R=VfO$mXn-b1D|tJ$+y znFv=NuvTZ#3dUJ?b(HIF%MsYK^{%nSr6K&Lm&jN(bX?DOuJ;dNdTKAQDP*k$w9SHt zf{FZtli7Xl7^!tKs+;}Kxf^B+f z4_#ZG9C3rWE~-8-skXqwM||4;p(ntSIf{7VA%bv%!!kVr6-w^+%6? zJq+hI7N~s_lQ3erEb;dpYiI8h0J3kJXHJv3Pl1;eJj28nLIq#U>PXeVRwU?u@A*wP zD3_N~{)M^E3;~$U65EXmYo+gJ`-7*=(?M&cHTJ4BTOOIX*Hq<5X?})x_#uT|BLN^n z=XaY;$|*!maKmz6W0W)Ob-D@H@8dH4LU|9l9_>0Rujd{+p>LC#*b9Z}H{?n7WK87L zop58RGogYB$DWv?%`dZBbWgmaBpnRpYtA~fOv8~AV}jHbR2c(F?t zN&*nYu#F?Ydzc~&{I_02jjo?*2oeDD_t&VXFtwXH$?>8sWw`4*MBIEwjbpfTvQz!P zk+z=0a7cm5A_ZB;z5Fb>qGQrWk1~&p^~#g;2-U4Kiu5=?7to z3UFNhhd8_JLfju`*iIYY6)w>yp2%6AxhtlS@VT~Z{N)EMq)+1-YHBfMe)Q^ixWSVx@16Qo;uv1b$@EhUDki>$q{6ETz1vq9;%~i zkio2p*gWRD^-N;$M>``E$1Z(x&)4~4HYkZ0_O6?*qrCKLpthhtr6N_9Pm+ckY!zN^ zCH%r;;kPy4qXr6^XM@iMSufv!{SpVxF~Pgm#7Wf%3DbSS{(?qDuxRNs>QTHDzC-Wk z#aEE*48ZNzLYDG#+#L_mhb7D=&X_vrckRvgLYtVgSYJoNfO}debjV_Q>?}oQS$sWx zv_4f6C^~TgN~*S^f++Wnfg(gS_GY?zgWI3ynxchVG&{MO+=h1po9^1nl}=Tm3BF{7 za-j!#s1)@yIEsSLf*DMzXaq6t9V?y+gS3J}ke4-k`Z}Ct$_hihi|MgycF5#>}e?&g*FsRzWwHe0szE#Og>g5a7oJw!HP^B7xyPZsTtzL1{5GRb(5ivAf`X?z1?&S{JvP_K_?4HnNx)UXg zK!MZ?W_eS6Art3;{$6EzCml-WXfce4v?tkqq#ctbJSqSdxj!5p^+E49@DtNdAEHduXkKlPR7W{qfXT990 zml?t~x(*LO2+F#j`i;nyr@$i|Uxbn(4Zil4SHg)D?E*6g7)?Ek69{I79Y{s$U%1t!JotR6mElGtn6;RYld z*b24os0cXC$JrXW)nH$&OZ?7gZ2XJ$C%?=A+!CXHY3QQ_0sGv)LMohLSAJ@xPv`Pp z+osuEr#dZsd3-Ju3=BFLTH>GEx=0$JAAs=Tn1cixq9R^uKa|#94|0(3HX#-Y8TG&o zh+E#IgyTorG%8^psA8x0!#A(eAN>j!ZMsXhOG)Oj^qm|xt)&t*`m!-PWat*Zm4+L0 zb)@9;r~+x{21sA3F+l+{<;+gi6Y1@Gfqrt8?xV{E0}nL6b+0sUn*Z+An4QY_zJ`JO zN*BS@eAe#`>|B6FJQYuu^w)A9W&3rJ)6%wDDV}c#TN8F~Pv<{_Id(hMTA7)hIdoSJ zuQ_E)^pB!)FfX|NN940*!qRu$c2yRh@ZSA^0UuX|>dY}d>Il)IEP}Owm(JPcl;ZO|clMx#&H`2Erk4S#q0TldGIlju^ zeKkz?VmCUDY$rRn1n>99=>dOuD*m0?X6FXySnw=qaZrvM1e&HWX6i&U=L0084e5=Ngi>lN0yU`z1k2D`g+JV5%`>FSlUs9c)821wAK**8pIe z{5Ru0j?5_E%RFJEeAN@=pJ37@A4M@#lv6HVgEwLYV#8V;mXKz-0Zkv!Mo<8c@aT#c zwSU8k;n_^fyc!U)O`Q4mo|83!$OLT^f18we%RG0-$1P3y61Uxbxd5P#tt3<%N^BRR zmAIUxbBD0kq9Lbr3I1x9S$Mqal%>7I{`&)0nGr3A-sk5JWRKiGv4)icTuUdBw%**h zXOeoJla9dEs!J&66m=Y$yEl8B&Q(u?qQ12Eg8nFNx+b2%M5GKW!w8vD=?o^wWlNDU ze5DYpM?OrOA+4se;*RY{oiLKqb5ngc_zFHFHZHtjR++6yZV z=y~<3z{guYlEMwZgZ+;=INHaBbW&L~m;SVFqaZb6>C1Ymt zLPhb(6wgbGk+no}BY_9!(nU9p9~ObgJAHpHr7cg4r1yw6Wz|s&3Of+s0-&m;35D?b z22BFfe>Dk}^GEkTq~PkHjqZ2elPkuRH!uIcjpQH4^8fP^{CN3&TmZ8E_laDblie+V zgZvnco54r2&{38wJc48sPCWSW_;PVS2Wg<2LnG#V+)0^Wav6rpOe<uEm`ecixxZ z%S&!P?xV*?ce{@+G|IBFG%ju*ZS0+_K_H*`99>&I z-Q5@Bm&<1|s!_oysvmVo&}npJz628|GO^KO;HyN@Lgq-cy9wmwQOJMiM1;>(T zb703{&Z2G57Q}u29u+fq`{&bl$LY4qt&xX@8Hw$R)0`ipXkF;osY?9X0>Rj2GE@Ys zpZkCI{n_A>3c+M>0pX+n_(1D^Pmct;3KSD#r|UxN03rEIVxfb&)U&#HC?X&6_GB{5 zkb=XIx;zr!sG-^w@qmbJRDTMCW;sDT%7!*dpqfTd7aSj}4up;k z;?a%@X9b~r0=@f9PwxW?%K#B8oEVDzW3R&BW&=ExQ76{SAS@qZiq7eZW@yMo#R69* zWhWN2d}f&`#nt1RLB<#I8gKP<7zD~sBmt~Fz4w9RR>9$daWGs9&b`j(k96}#~(Pjf4mA7eI5*cC}2E7v3rY^?|J#frVdZ40hGTxZ+PnRp-!)%k$NaHHHvA^AJUptJR+2Gqoa$(Ikwp_lsNc zig%4zk9U-lbU&5fHyg1LuWJ*!#>r*Ip7IsZMav|y{$iYX{IIsdaf>XmA^@s!kU;vB z$XRM`g?ypPjBn)8wRQ;towR=JMzEkG2itxAGwS_#EP1b(&j<>(QBHCNf!@k7aOn;H zknBeXf#mW-*lMK7&$?f7bUml&MqTX2x_c)WCd2TvM+RR8^FuI&+dI~ZP#Knpks3<2 zciadm%GX`mb`i-gxLiH@O}OGNcsG{lx!o*nAs8sqztM>-Y3HMm-p3fx$DxyEgkRBi zE1?j6VW1g^BG#r;O%zb#)rrxg(O0585qEng9BHP|k|_Ec#20!O(Wc0o5>lrvj=*k^ zZ7vmh6{b*tHTJ=SH#7c4e%6><{VVZULCE%4xgFkoyo78&PuI>%H2$AXJY0RfGPQ3w zFp{l?YUwJl%llr{7*rs)MxR@a)_S85hRAne2%>n)&{$GQD;cR7Y0N5>(oitGCYZ+% z!axqw?qQ}&E>*2(Y9YA(`SLvgGj*0qA$kM?@g_2r5SUK2c4R5hYtKyKkTe zBelD!Pxvs5(hEAseEJlM$X2qR|YMHE<>^OPxXiN;>+IZ22?5N z^XsmcVIWe=^`SXh`?}=1wq>HFkx<*xk9v}YfyVdI&y9Q4dNpMWwexE&*4z4Mu&l^_ z_;AoV;*>{th6wy*svVj0_hM^Jzo%^dA`UHs2+Q)T@EiWnIZd&VL|adHy~_5`z}RLf zNjnXUStdSaU1KFLey!HbYi;HTO-)NRDpxAkE?3-d7pyug$tiA^Z#BLR!Ikb;VK!q{ zw2gqjw4?*4^;+Op=oQnE#s-z?Mqge5kQi+!I`_^N;x zYu{(K&ae*d1NVJrdd@`4q^iEBew3-B-dxODEUG4|);}^lQZO=|A&@1`JHxk~wVgGS zwbJ;`#K(jKRtjsfR<7GHDK6@7UP^iqQWSnlTY%L)uY1T;AD9@d? z?XCF-8-SHHlGSX^K3}MUfnfobw(m)88-G>x9woOY2ebryy%$HriJZeNr!=7qbgy?K z7BLW6&V*+lNe28nOWozRH5$bqo#kJL5e>2aw9nDanpCtUwQY9)d>Zv7S~YZtD@}qc zi@a~?cl(Bl$GNNdAHRw7oU9*NJ%^iK`t0_~&!4t4PEusS`l4>`P&zihS(-#T zKD07)v%8Yk?)O1LCGF*QIy@0>j~lKPxHCP5X1Qhgr8f|pIXp7lFhVlUCCcd4IB%MO z_xPf$`-et@AB zrV+RCN`vK1L=L$GNpwsV@d9ZYNzg3IqR%FIKXu1L&apQqjv$Wy*X+JEdKWq0jonL(l}jod zk$JD!PZfXACaJwBZ7I1OWHL`O6|!_QZ%yBuwrCDZj8E?^``sxnP~&C47Blu$In^7m zpWh9gOtw84jm`O%6A;LCz4&gS0QPCy3clii_wl#|-{a4O{NtBx(Kb=I*-4YfN!`LG zkztczX0?a~c+X=`>>+tlv%fXFHM6SO%(3~%uf5@u&E+n6t`L0btljtDi^u7yTw(F0 z_EW!)m%lF0S6(ayx-s9whg#>nnU5;lG}|oBqzKO3h=Y4x-aC_M8z35GO{Cx#abHkq|h~?4l zy7pXmtmHPCEIIiR1v&ln&WfM&sotcqQqoZef&3Ujpy1CS(Dftmy9)w&@_;~l??51t zbP$O2W0J)$1rUgdM_Eo<&u9K%$<&=(Ikk9yhW;5SUSy-p&(E)~&&lHO@zMQaGdFX$ zO05W&K^jExM}o8?n?R?ij_(tjJlnN|4Nn3Mx;8Nhb=s}lB?>Wv@=cFOCu$M{vQp9q zkn(>Bn?^b5iI5onLwvyu%1`t>fnflY^dBOVN@92-fa!l={4Y%Z3*-MR?Y}er&(fY$ z{Vx*#|J1x$#|gw@*kw-)recwb`vgciiwsw%ZX`B5BzyC-9a#FNWH%Vcg#hACb?mKu zt=7f)m-g8ndu14)x}U?hQQRh!x5&1x`?Tt164on;7I2SNx#kcz4hw4mz@-zqZWwJQ zOK``lG3lTKM-q=jmVgjWr{yy>YMT@Fhdr6!prpc*cjS%7!D-s}W>O-mLw`*zcKYR* zbdUJ0({@xf4MjB8qy4X1*k}RWs7$F1*FUa%A!SRqC}G=@Z;|rbV@*yC|Fz5)Wo)6< zws|@9^Or~8uv(yKH>Jf$b^@TkCT%dh^c$Sa1!GA+)G*pD=MQ5Fq)R}JH2g=St;Z5J z+M6@QEwkBF&Qr1E5*^VEz^;jE71tG!rRVR^Hc9>#@N{Ofa;s8?s`9iCZlEk!Dhdw` zduQC&Z=YO*1|AHQlz9!P1Mt|L@epvC{do^FklM8NeHr5j(IbiD#&bHwn)Nv7>+ukG z_49r2BdNv3KWYA&A*JNeFpWVxl@Mt%;t!lI%*(&+!8WQNEq&BhV@9zBWl6C1P$>It zoJSgEYoKc9ZbwXcD_~4KYkvSWQZfyWIc26}_vDK#*b+f-?{!F94lB8mM^A+nH4~WdY{(r4 zu$nuIF052quWxv(<9k+1WOKG&KD(zKm<2iO7Dsn5MUO^ymyh9%Gj0l!PG0Vv#R9N( z{V8i+XcUh4BkoMcSiS0+Y+cwzA6l@pdQ+Dy`?qQ!lAeo~-YH1~jLF4vTp49*JS188 z49cAaBgYgWjR*lT=Jeu>hvyNuR5nWea?RXl-eZTNFCke({g#E;;X}KjhxYw8iwJ0` z*!@%~%u;p2Dywt^{ZbnvM^=Jetwin8T{GrSLmO+X+OQRbcqdMLGyOM_6Q1YJ3;EM_ZVxx?{QMe4bQxcz0^%QJD}kQo?9A78m1}eIZHtqCFnz>3A6)YA&H}+0)RSMmU}23#_k6Pm^E!zKU(yIlFpc?U}?_V=9lMtBeYzf7JS~C zqTYPluzxtc!f#ZpN_sZ6$HXMRC+~4VL1fT74Hmvx@ZFizB z|8)OXXNL;lXp<0TIoDYnZyJr?5*M@MDHaH?yZ0!rx?4nggkcuXfq=ZYn^rglCT}#j z-|z{F8f$+3R_;v$Ei6NXm=x_$tUg^G(eC2BR{I_wh?p~b5ZGwTQzVtPQE=D`$)AX1 z)P_V1{3+y%&yl*A1X2*hSw0|^#&qQ=v&o{@Vy2kuiB{A3zzYZ+Mi&(D8)_WbMIm0@ z1R)pI2xQ+Qx|I2Cav2~uBpOHbo7A*D@Az^%eLv0hahpKN5MkM*8>Tv1w_r;eAB5~r zt*^!F4}kRs5j{UQ?aXa-0kz-v=c3E*i0-d0nD_0(OiM#3gtekq5?qD&QT2esG*1@5 zAZ0=hS$l4JvnoJ^9I$Mdrek|xEi@Bu+vCWoyltLrB!=A#w6WiO{xD19 z1$PZfk%by3u6byI0V`vwJhT@Y<4}DbYQ4XkI1djXtM24|Iz~npqOErh#GuK(`Xl(% zl9fFr7W%UL7l$yH1_5fbAKAWRY>+e#bC>5pI^WGOltxR+Mq5b+w+m=ae~yZE1#7J zP}IM>!T*LB*>gA1%K3Fq>8TjTN;N5&jHS&O5Kh=IzRH}c(Lmd?!8nwy9tX8~H#c_^ zVU45x)*Q37G}Pn`CB47Oo$-EnHGpPuE5PJNSI5?uZAG{l>g6>Kw=FqqhtqyJ2KwW) z65g@5u(+tEJOll2Pma|qm)SyxkP_f#mA;)1bnZvbS!WaL)0eMze%BPgYHVmMy2rCf zQEhc@1prC-*;>SW!kCw!9i-lBZs;&~^9(`wXJQ(+-8O@HzVeE{nRsTiBCQhaF&|$1+=cT^7D9(9AybwEB*6a3acKk1IFoQj({AS%(wk^fInuU7I z%sA^$In%)j>sFWL&}g3nCaR#`iLSBAPz?Z=ime-I7CyJG<@p9VcW9pIUz@l&WZl6d zW1V`|}r7P9V;Kv0vZz|uf zx_&?Dtm9w+o9@-)Hhadx4!xIaUo+N&eFvdEJwt^6_EaqfO>xEz=GtN}N9hxo_M8S> zJ)0$KvL{a8a0SmGAV0uj)3~*c|Qb zI!3Y&h$z+uc~fp?H7P0F+uP!`N$&4TNz+0$(+}`vc;zSS^ z9}&{!LD5no@mPI~JpCr{4#f6H{=4+Gf2Ao$-(UDr=-%xj4~gW$>{Zv8j=jCam`9 zoT}*U<{yx@wg1)kR(nh3y)#^$WppdEPY&rU)T7SgFCjdY=eSi4^6ez%+BB440=P)4 z{Ugvb_*0h}`@MubGI`q-sQ29}{alV`M=-aI!?h5tgIkj|Q+vKRN_^ z1ErZ*C3HfXRUpodb;Il>(J-ScN=F{8{Co8`XuuU?!nQuxrx6qKLX7zxhbrppZJ9Wi zO!{{%!u;E65FX@yoya2zt+dZDt>o#0(B!ikGW8hzn^fz6ySz|Hhm;{rky@sxTS&CY zy;`%xwtC)dT2Ci)Q%;(?3ggq-Ii<36jRZ1aoYCTNt&kKf;+@|cvkCRwE|cA5UGV|^ z5~RyIZ;E=V5Hmbsz`1u*K@sAvurzW*?r_>A$X1x!G6E-d{DkSE=CZR2&U*t7zZ7=Q z+IKIm^XMXz0y5S;)Jp`8efRRu4-0(-&QeU58OEcKyiY!gP<0Giv>17 z7fC+tEc~kA$6s~$Bo3dvu*$f)v#18mt`dl>;KQiSYXV0`8Y&GpiGaTi8x+F4!g`Rd z@(@b|edxK=x`$EwO8&n+-|nPfG3Gv|ip0){>=s(Ahj=7CXox;wFEk-* zXwLUSUs`AqKO(>e6bv}OX_BhPQ8qeZBl2 z6ka+646Xb+%$}d|&s8!HH5Xs1Y-33w&2T274iV zb3fAibFBp}sVdnn-HQZGsLN<*JK=*dUgxFRpxB_f_8B(-v}}De^8^Ju_+cB z(h_D4_0QnN%)G0v|X zZPV)3rjNjhW9iU&r()F7vpW@i+PIA^tErrEJCbNGG_&1|7FPk_MbLj9o@0*E$4ixG z_;|aP+QJwH7@#T6ICYB4wXJ$HF3?advaO@P;G#B111-Pktt_8w>X_qxTB0G7_lh^6@qRNb(?m+=kF5Nx$@KIEbV z_buSQd1yJLHH6;qcUAxOX7&79aq0%`0eepVB znX!eJmy&<|&rgq6to>>&$I3z&%d)~RWn^({DEjD_X}{^X(*v12(R zEdl*5{4RscO_6492kEPw9ZzcQtSfpeF3H@)&bz@%V9_`n3pirq#<#SX4dqw+i&K}A zDSNJqMYGPFfJ!2gP%u{Tj4LX&5iAMrN&?iSlH$*feI^V~ob_vzN2A=OZd5$V-7JZa z5V(tE>w5tW(6IaqaZ)NK^W42-69Zws;KGtStCphNtqYN5XIYn$srY}51RA&cQ{!tD=#HiOsYY2fB+b(H&?vGNt`NV?CwdSaIH%v#| zYyaqnKB!=obpBd**OQi%^YrL>AO}{JF{PPg%Q0xENA~p6#%S2vdQYMz++!CL-{>hV z?zugB>$B{Q5O-Ows~`n>c)J;qnw^*sSYQbd)$%)c;nquQQP)37U@ITAC8Bjl_3y;YTiWw&MgLx28Y?34Hf3IhQE|2{UeY#h^gcHdj7Aml zmm$DDm!q2beJpi9rE2Rr*RwLuj)v#q1|;oo#is>nz$G0GDrNfGU}P8t#B-po+vH?{ zn@q}YS+IPt_`4uheZZ2}zbn;L%f@rR{Br~G?eb?qG20f^fr~=gs9*#AWS~MAo%Lg> zIE4Ggux(|Pn!pXHKX(6cF5}7z)5R({HqLxij9xc0i@{7LMd%6H$(^sA-U^w|9^KsQ z_iWFS&N?=ZB;WwgXI;$>q1V>mv>BipBLVHlpPWO}BH*kX)70y1L2AS_TII}Xy_a)= zj)<-BX?_Xp-SOHzvg$3YayAL6(3VTyb23iXuGh}nb%-O!agsUjayK9b+UPHuW9OkRb6V^8>1>)WMXNb|E#x! zb!ull7c7t2S}}%KJI6POuM9Hh1x3+(BE7sUf9HT@8WiVpl1(#!spN+qN_O=~uQWfl z(2D-mfJ~i$<8WO!Y(O1lcELxqGH@o-$V!l{gSIndeK$r4-xuxpEkN>RZ*&unP zORklfyT-V=qIDFVHMV4_s^v4H&mP&ImEcTP22l?g$O2^{BXI4(<4{MvEe(YmwQNcF z6H4rR9k4nduX&M=I_`ftqH|&TtO(dSn|4>?PUbu{Lhc}BYI=Ig1kLC4ygEn~K}^Tl z&(>te&1aG=_ucmUz+W~F^A$1=fF{0&Yc5os{^RZkM}CeI`EVi9(c)Fn^Qb6(Y)eUZ z4^{D_hMZCV^Shmj_ZO}Evwd|p5abWASxvuMD5c~ZLOl9?8RZVLlg7$7-OG?zxPso4 zI*IJ1ih_SkP_N(Tt+`KsF)?+aEC?0RiCke2-{Nk^^_M(dk(x)EK6Dy~5pJhi3vR|* zUu+CLMiE-fJ@(d(+d+;T{z@7}Og+3pIfP z=$1uMH;rjtUOwduq~Th#3JBAP8@u-M%y_Kpbv^Vh4A2vrDX(4y-+@oqqTKFAoY&kh zw9o59fB|hPHcfyw_|xbY^sFT|J2$gw3$2n{Q!N}0WP_|Gg0zeNImuP^Md|{s*7!?r zj)jYQ%7&GyHFp!#ctsc0=LsF9f+^E%c@jOP`f@V}q>A#U*v6u~R{d{h0cL}bEttjJ zqUlo+lk?qfH54-m>+94;CY3YahjCj0xKjy)ne;CRdm2?z>hQ=(HOS((=xtUAB-(!H%S5txL*ugx6gt~cHSp{STx<}UX5N?&uIY(E$rYHl2{ zJ6|z~I)sES_w`>jnS{p}^UEQ=$rJ1K`tbNabbzF!`PE6|#d*Ix*n{m4s1J;_rv%Zs z-303d^>JytmC+ikTBv|go8He2Zw)Dgi~yx(Av3iUqfBuL?)Qd>4Rt-~?Jli%JNc>! z${xfKW}l0=d5V6DF{Ja~K6^A--KKR-xBUG}-a09ebQIZ7;c=`|^8mk*)6?fLMufXw zS-@Y=e63=C)+@Sde-i*fx79Rb}W~&1I?5K;jkWF@*yIyQdCBpKTja0Dc%^edUk2}j4wsTO{ zN=9LbloF!Om7OM|_(!h(Jt<3(i(1)H6puA#G-o&~rzkscNdvxLVMX}nC0efp>xOBO z7wluU9j8uuGC7o=Lo{PO)KE1rskNFCzW- zkXuA@tG6l}qCUgR)I>>mS*yj8r{d(IJ3A8I%H^u+`$pfEJSgJHB+c-gJ__l5!@t=>{3UQ%JM!dc?b@sBDYfG*>Bf6y8R6#NUX}*qc9}y%PU4;w)o_Qq zLVG=Hah-vic&9IVJSRn8^awG+aE_D79AfSlFY!K zUvjn-CgL@vC_HXp0|S@dHq}uZX}Eq0mBimB5R8t;YrBY)PeM8)2Ge`M{QZaRM%|7) z4LjF&0}2*XM9PniQVVT=(>4XA=K3w4qYlMB$fJxIqiDtT69Ef)EdjvQWbx0t|{cP zVa}8Yuw3&bmi%1QbQ%=S%1)pBEw(l9w$&l{B%pV&qy&C$_UoQqX&d7?aOWHGlI<8S zRDVCOjxB_|aG&bNK_Q6N=lem(*>Z407UDGIrdIZZRHySv7=fKBCU(QgE}Y1W;Y8jh zP?(>;_eG!chFeqqVy|;vldWbmH}<9YsjwG3U<3!%s=~VF z(1ojtwOh4OWlRotrGocZniD!R$ee|+fjh9ki~>shi*IMxT#%d5%mS5NgGQVb^XnZ0 zYE57m*?Bl;LG=!8$$0I+35}G;9Ddds;ygLSvwwe~0>Dr>V{iF6v-fp9)j zo#!faY4gqjvxfs}#F(s%voGS(6k!WEtd~R6;!ic{5B)J5PXc?V-082txJfayCDO9x zm00w!TBhThkpLSOajn^5I-YsoUTNQLHsXyeGd6 z9C=2pB2J^5hlcIn|5yRlv`T(I;6GJgtC!?m(%i5rZX(SFzVBoY)677p|Sd8$MMN{-6QO?S`{2h^#FfqJ;05cFF$AAtZZH|5E-Hp?kpA2~82XskO}q zUdSn?#}Vx&upFya`r}$>7(Pw#I~S}`w6aMM>Px*EGxliB+zswWa^}DNRC82MEPyJm z@R42IGkLn1#2*|A7mB>VPWprb&`!2;S)QfPY+?cELbN&m&HO}dc1c4&D%dvg$aa#nrtJf1;( z3N&Ar?YX3_>8FaI+BA?c`*rGEM#Sl^CkfwBotRmT#_4XTX^y+)94fX0D|F^eU$9Yq zLc2phSQnV56^HMsCXtMNoPgJN#M}xV2+ECEjLsmw&f26V|1tTJ@#7Xi zV*m{F#k~Sp3M=*PCb%u=MYlUu{F3%|U{rr+{C1uJl1EI;(`$$TVzJ6Ba3eHujlwYk zJ+u63wp^d(n-3RgHA;S?lqduefTO8F}BnVNg4k*e6ZELYGG9+v)ufE_x{wQ!mHw zTzUQj0shO3p0kxBqzw^ao(o?4g~>pq#&+G7`kNF@!kKmqx)B}27Z)Y{d>f_dCgGFN z(is8?b1~QtOaP;H=BN#V2kIyajz!Lg^Y;3*mFy6M!}=vW`1grj`nHa*G=u;Al7Yd@ zb9{p58S``jR}3lh63+Y6yC=%$$g1DFUy>kz`)oM;3wr5An}r{a@&rFt{xswwH=Z=* zg+u@R&$_-6f1Ud-zB>b>t!S<_&pz~8!TOn3FY16rM@54t*ut1rafV#XL@&3ul7odS zmYqW;tKaTRgl@mQx=#>31sq76p{{A!eq4a!N)u8#2j%QhG=Dhw#&8%H zr;^ii=dj=BZi$&Bkztn%4ZnP^5VbcTOlx5sEGzyNQbQp#vlXd>qO-7+-`~;;N#aqIOjysKAJC=(aR74q@56zIQI=-U`CTj_M{82sfRft0fX8-R61eJv~qOzVt! z@49YRV%*ol4Ek1;%uXnh0}j6I28==k}`Zm_D0O70YWu+VbOH;PgEE)ME=Sgjq7D zPVZf6@0#_wn=mgfv1RiCscB;JGAv>>P+8Ml&+nb^@pddQ@!RrpHWs81@BymWo06dD z{qJN|^uc^XEl;nY&LV>bzIM|CWf(ASm)&lZ0)p)SK9B->!B3;WCjwf7|DmUk00R0c zfIt7YM^gWf@xL(rFO2`QwExEVq~cl8T^G*T&gqpM@XidREUzI~CG#%y{{U%9 BGsge` literal 0 HcmV?d00001 diff --git a/04_Spurerkennung/images_input/90_0.png b/04_Spurerkennung/images_input/90_0.png new file mode 100644 index 0000000000000000000000000000000000000000..0f212ba3a6b2904bd7b7b9392a6fd91fdcea5cb9 GIT binary patch literal 8780 zcmb_?cQjn@*Y9C;1|fR1=)DWkjp&T1qZ33By@qIsmPB;XTcVfI$>=SJ9%V#Nf(U|$ zGNQyi^L_7K_g%mD_rCw!`^R2qtvP2u&wlpapZ(eUS@Xg`Ujs_QNCE%=R7+FM5CCur zK~W|m0Dn36DPIB~#9o@_z5qZ9on1R6~Pd`uJN1k46S}+)!myhQ| zm&Z;35VV+QfiX7`foG&Ljfe^7C-2ztOj`$r|i8*OClj} zLNhaA7TyUhI)R(g_PF-h%EEn5vKYi8B`LlfjR8PmG7WgzqpP3^a^%E>bV3WctCAVLwrtp2>n!a}YwkCK1 zQCyP(>Rc6ZLXw5btsNkzIy{sJzn*Ux0QlYR**nWi1c`X`{Oe@k^?}lrMj;mv@lY$p z8vtz7IE9Tz8kL5K06?uUlD|%c>A06(u!rDQFaB~b>7|`?lq%;ypDLv)iDSeqA3MJ4 zXR5q0<8{pZb|SON%#uC#AH}44kqh^kyeC)iqByrF%J1dvh$O~S8788(XJ3rNdGHd! zkw8S36@A9utBFVTij(bQ+|B#9b&{nt#SC8>vzcfz|5ot9m5sGhZ%dXN2E?CT#&l?i zrA0Q}SC}PhhM~&uFh;4rC7p8g6U$DdEzFtnX<}4}moD0xs(M7Rn5YOF67BgxPbe|) zSX6khU$tIFkT}(0wElKAS=At8ooO|s{l$sHWPKnWRph-MVrjfURW^GT6-|ULLjSX7 zIomBx$s3ErcZeZT_xpHmr6FyGO`lfoGawYV=JA@J&l!$!_2Q<5SaJt_QPSt_Y3Q7 z*E#pj9#ckx~QP_2Uaq1NhrBubA6kDO6Sz$-*SGCCnNPkJ2705w2%? zpXK`L6L0aBWgLaQT)la~_l=LKR8kiu=49no`6lBHqDr&sewD^z~XW8nL z&aLxXp(0ntSMKL!CmLi3f<`hbbFRA&x(~kNCg5i0*3sM1+s`)CLzVKC%IV7K4vmkEza2+sN#!Vr z&5Lj4Y~{@7thL%%1X&2Sl()Qh(rVbSC^g@2>F|iQkTowsm{?dhs@LSzaG~34+RJW~ zq4ZPrtMbv#{dI>eA6qI~8R|Aa6D%QH+*?BJogdISw|+qO@27UAhPH*izEU8B#x9Uo zFDwz`s@<2~ciE3!E*O(6;LA6c@SW-( zhJCre`ALbiAYIgLtvU=2gGV|yZ*hn?h$NfU%-6M+1Xg0~59kz66@3cU71!nOtctIi z+m7AoX&r7Ih4*{r+eWX!!a1%TuY<2Y0c8=PAia-l~=CdZ{hWJI>!928>oaMK}EY;PI+ege9Huqg!KA zHNLqJNsDUf4`U17d49qMXD73V-QzDtWZzWf3yklLHI6JL-N`*DxZb1cqxzcCQWB$M zCd7>R-dXJ`+aT}#WQk;*245h?l%QJv{mT}9_c?ggo>>Mmtdl!f&n?oXcjQ@bGmhIX zsR_wubm;E#{GK%&A!5GPY=0h;$D~N};wAFt5?uyO_-EebpiQP&&ynt|<+P@XssWVa z0+`~Di3$2Srj1uxi z<@VW|3wP@peRuD@A;>i3ijaf6Wgay<$2_fIY?z1^*wr8(V(t2ycMvE9m4=$U&K>;5 z;idLuV;e)fhGBu;T?~j1ss2hh!y3Ts%q-%nn*BRlJ;x~f!t#M-8+=Uh6MAPg_)=qu zl_FPC-u#L7k?}{D#qDP^sm{MAH#Zk^h@I10QP7ZO=xctlUuh!JW+^4>8+J`FAE( zjysB^RgQlS#00eUZK*VzZ_;U?D zw*kOk6aaSY0N`#W0ML1+*nUt4pMhFxD#k&JKUdrmIqw9Qe$9y=9;;N;YzUBY_)O4& zIJ>@P4_}lLg(Rxe`&a2R5c()3RXeIXn`E>(J*o4zRld!Co1-x@DUQcBrI$!qJ*9Zf z{Fq8Jg^W!r1&8w=5s%>1!U{nWr&jmi53Z!Jsx_aYF7c!tZ_mb5GPdnNckLn%I+53o zfL6Ev%nR#H-_LlHZL;CWxhZ2zYKB3}ZIQ@ht{6sX_oZj${-~4wD%5^iaF}!iCqy&2 zBE0&mx5@MGv29A|tNN-ii|AB7YcGZ~R8_qAFQb*!`MuW}2!f>IGXDIoQTKM%J2e^IO6zQnoRc_-WEK_kZAOgD-G?brB3KG16XJI zDWEYH(SGd}QQWzOY!JhFfl`cR23VkHe z432KuZ4O29Yl&2sDq^fBo~6b*&|(;X=<;Ib{O< zc(>o-qRqH>gQqgPqayS7cZpR}P64)$zwEy>1^ee@Nzbpj8-;eStj(~In}S2IXl1G=H80G|E;+h~+MznREn4mWuCdGpJW*c>WZ z%+zPv&62G-M4PFHP90+)&F)Ncu2ptQUW%u_8<$U16p@zLL{a|X$*5;tcYFblR=*Bg zG}hgX<{$Z$jKkUKgUQ*ut{C4mzrXSqB(7pKGo-P z_{qN!YW?Lgq&)QRwB+L|76#>%{JF$GY9rMxQzn{(ilMyB)>F&Air;7W^G_X(!L&Gj zOf#EEe4=w$OsX;sH~hgPaGYqRNswey^X2+#+VfNV;+1m!g*#naVb)pGu3iIf%JE$K zebNjxbbd|ZrZi0==<4n6v4A|==NjP4u+~b=6q+Q??Sq6#ND3#oKj?#h1O#9TRKY~^IZxlh)Paix6Ye4rZ#mW2<4$;aXFxF$ef>b78>2L&#Lg;IvpOICI zK>^mVk&|W!mFO&>O^^icYs)|Dy4B{?BLE_O+`&+ zyTNj5pPl0%KOeYji`5|oE{NK5?LX>PJS{a*1C8Fkcl+0*lwVhgU;ULQ8+Q=l0DgdA zPX*I{zBqv&<~20*xB=YoI7PkTz{dS&nL9@vMUuqE-C93lO0-WH-s(QC_k?bm z$6fHi$JM)xcRk%4o{-+ceVUK*Fr~{BV^JG)_^#i?Q1! z75X4*g`}Omj+D~DaX5?KAuB40+JUIrjelKPfNXs@n>GeKD7Y?e4e1^$ugL^!WIu^# zgqYJR9=+-VA{?JeoHPcX2j)7EoCwePXufpUA2?^!PSu*hyC*I4l4(dWgARF za}Ka!g^#3pG`1A$w=XnlHM0tB!R2Nbi%usf;o(dFtgrx(__xoRS?;l+lRok_rf7Y= zsVJ=$voqx%g8s_s$;!&EAEmF4dYa z3z}5E@F=7qh`qA&EdTLD2Q9aMVt@P?81D%;VCnVh|FcfNNJ4xy`*XI_V`uBy6t?9& za@Ttm0w(12t0s^44l7D%{zrw&h37JSkuzroq;)T*@dJPIvtj*yn^i_g?Hss7Tr2vt z)%%sJ(Ci+nF+j9Wj)vL}d?T^dp#IbVCCxoM=(W$AdP5cSz}JRJ$<;yWRfZAn#E>3W zXWgeuNSTIMe41EMSVi8_V%_4Y^HZ|tj`*CRCCer-Iem$57Whp*!8b7j+cs=krpnWv zixG=w@bc~-RI9rjpp^%l0Z@{D4J;{!Un?#DUwZtX5%zCjiCyB~K=VK0`Iw2R6K~p# z=)~FTXV~=SIc8%s8H7nId7i=mJ;UIz4EY8nkyrzh2HM~0qr^}wF|8eesYs4r1)r11 z^W#4BZy_)IrxZWi=8L+N(6lGM(0m3N-0&)zPs~XjXyIvQwQXTRqY0wACO^2}$ z7CD-b4=fFPEcBXu`MaNA<@aTOABhLYQw-AL0uvY0NPmsK$ur`cs)H;9f?ZwGKGIPN14w1%2p!PIvkZ-JrRf6?Pm-|!QN*bSg)9Z{9TSw3Kx$+KS z+L`-h8rJq0Vw1gZExdMM5Mi{Vf2(x8aUciwK=IOh!R1Qx>cj2F^y-N!cu~gAw0LKe z2jPOf{`m4e`|J5n%!y8HitOq4!a!2aP!1jpu5=yFja65SroizTi2_i0t+^A#MavvF zA4DJoz=i~*=lryts^}3i7n{-g<`~`TtpVk1U)kDF55BE?Y{g@Dd4|&}J`*JUQ#h=r zW>1z4N1R^dg+jAXrmE}(s#T#e4SA(6awY5|)Ins-h@ zJa|?7VsSVj!#Zr8J=V|+>zX(Z*NCS2BIdFovFf_zTCTJNsbZEL=Y$haR#P4n=iB24 zdtzjs5v;C1o@!*pdzJUjdgyo73hg5CrhjcEpS;gSM@mFsN%TWPZYjccekyYM1pQ{i zMQN9%4bZMz6)qE&!NR%N*8Qzt=bXt67FOMPbi5mlK?oVfedX^gekQie-A0*;6OPbN z8C-kN7z9Fg9PAW=7K(+2Kn!0CS&{1hcij6gApak@7h56zQ`h}(D#L$}c7qnqy9||J z9Ltx-_i#o1iW;lJTOBmm=8qP;u!g$Ap!ohAkSGk0WnHXxz+dX8BY+fqp*Z{3OvA72|Ek&fcUDGeD(;J};Rym6nlcOtvNNWU$ z{o*0Jg}FKzKN>jR>fE|}xGm;lq^$Nano*wOBv(Lx#T#rc!LPs;Zw`J97VNiSs(sQu zgN%|eX!d6llV96G$sl>wE@PzVS7-OS-A}JeVunmsg#B~wL^h%Ku4{1oepVKfT^eGP zfK1`Fo#xij5PaC(QPyBh(w0_o;a(?!zt<3Et zyz`Tot@xG0bR)XFnI>?7+T|$|$$Gf}m&=DCKMlG3Djw_zuvM{vK1`on5Nr)>&U!lJ zLiPv<`|_05s_U0R4KD6kT}4pW-l|TTx!((M9sOX18olXvb${e+${*tvBtWc#4^Ri4 ze)%Rz)gV14;Oxm11gyZj_dRu?bYU}_9_`i*WhQzK7zOvoj}Cv2l4v$iwQ(M8rs6A_ zZE^~*W7~71RXpV|ZZ^Zfu(qitpTNtui)x(%5XaLWNVq&JbKO6r77!sbeujT^<^nU8 z262bRpvv^kd7$N0@b}B_)5mI4O&`?fk+*je$E~?eq^v{0jJXOg##jH*l6AzSF0zMN za(@qYnyJ-cprc&bz2@;Jl~GD%1;JO5GZtQ6*+Lp-?3*4J)LU-7S2Y)bruuM$bTYP& zl)=?Ori#oIt^$^2<4;1<2dL90OI|**B*s>K?G@j@Qz%oM`-!Zw-8laHe@jWfR)7)CiubA%4;a>bXmK-XuCxmK(KZLFpfac> zm70vT)28>__MEDQ%C-9|OWT?sj81pEPX|kVCN%ezPG=?`E+sHTCL*vQ)e7gaf?EE6 z2XgHKI5v0f(^-W;R}MLnI^?l2Ln9-Yy~@A(c0ReLvm@ZP!>1L=0wZSYQ2zS6H8h`D zl#DA0P>sm7UxDh*XzOxC>{rJX!|~Wc7>c*9jA{Z_(Uj-&i$SZiY_8oK7Q>eaCbV8n z*x%(ncu!4+F*qpLEE)uyG}qeS+-Whnw>Q5`Vg*eyBrbLkR*{;YwD>2k=8%$*y-;W( z25h;jE-r#63?SwHy(Qj`)@G|4V@-Xq2-yr*UVycu3ETK1Bgv#5S)4SI)=h49Do}F7 zi&3evMJa2)Sn<#xisW)0><~?WxL7QFjlu)dA^#95op5a~kbiB0=_W)d^a&woPFw@6IKM!B8l4kT& z3c}Y%4}ryB9yL+z{2@LIQm7La*rh}m#?Wh~g+!E|d*X>SUPkh4r%ajfq1*)h%gb&} zmL%3tRN9Q7mYLF~7q%`>coqnXo%@|PnpQ1$aKC#q*=^obbqn>>sw>td*+h^<=1z9o zf+aWf!4L3aqw-@Klb%FbI=Sj_LBPzWu6CJn+f6TI97yofJQiL}huJM;@SL1|`#h7c9k6;P2sx5(L@R5jiE+}|r{QpJKLEya+g2>Ra{U|UwD~sL$l!{d zt>0&yP3_D_F+xulUt@v7(tP1hE~Vz-%J$kn#InDvX0N4cDZq2m`l`Jq$}49yP;qwt z83qRguAd6&8(LZ;I$Z%TTNsZo<0P^5hglZ2`BH1vUAM}$KuPQ^PTL03L&=MAXyLwT zFH2;2#f6zv(CNt~+8UD~T+aPPY3H4{TEhJ*c`<)GDDy0t_oMAt5K@>a9x>=6WcmZ8Fr<*i^5OyxZm|X48 zx2E%BFC1i*aJmipG^gL8pRjGq##)@*`f|P}HQFA%;t>_<*u9iIKOm}Bx=zwF*TJ(j z)-R&*-Q{-ZuwWqRt!M^L}5s&uS zB~ANAG15q7+Bba?(Hcvo_?6__1(i(uoeL^?m?OQqDhb#IXG#pMC(}bH?7xjKx$Unj zpa|Zqcw_zDb?!nAmRS>jBaFT=Y75S?S1bAW{@y(b|-w`mr^val%o423v zy7g=VsMUybukgtZ!OaeuH~vm{g}L5y|Fb+iYsZZsjG#;CYriEzi%XMsBve&SfA__# z-3TU2o@W~NjA7b6hT`PHDgoQWPR2~R6Em~8x;=@|6=rWhAF;Ul-`uTd*xx_UG*UPq zPZ)9_u4|D@1YsL29kK6Px9rF67fw>}o+5rdAy-0{=Qr+d1J%Ad;`?uu91-j7w7}e&}h6Kox&|{LiWy0@?hwD}+O4@4_DIQ(bwGmZ-A9nX?Z^yJ3 zEq6nlmbcmn44JN7;Og#h)y~@R@anb0UL8Jgh^|d{CF)(*l`&;8Qw{1crx_WDDlyd? zI+u0s)S~+%&$d0^l6gAg{>9rrYRJG9LlIi7;+T;Gu72$lO5^~|Htrv)XTc)$>dHsV q@W0$t|NBuKRR4b`cbgyAP@u)605huj8vI)b0BGIQS3|1WJ^NoA1|+ip literal 0 HcmV?d00001 diff --git a/04_Spurerkennung/images_input/90_L.png b/04_Spurerkennung/images_input/90_L.png new file mode 100644 index 0000000000000000000000000000000000000000..d598807e213f117b340051636963f3064d2670e4 GIT binary patch literal 8769 zcmbVw1yoe;yY3!ALQ+9MN|0_*x*LY>4iRYvq@+_y>7k@Sx{(+phEhO4h6Wi@X%LVW zsk`yq|5^Wa*7@Cg?wz&X^{ut{?7hD?p7(j)?~SIqBEcQXI{*L>K$YaQ0RWN*niLKe zc;$~<=`6V5zEm=D2LL>h8-oBDSrhuvh5c$Gt9ydOL9b-LJrLs_k@b=&EsAV!iS(yM7qE69+$4>8Tcf zFn*Z~?cJr3uW(C+++lnP5aZOppm)DyfB?S(#l$!sbz-*z5btq39H3Jzv-2@^ z(9v`umW)M1YROI!X?(69p=V_20?^hK~VK8<%O`d$mQE+x*QFq;*>%zNuCJ-jW__1U_M+`XS< zJ_vE*CRq)BcvOjB(MwsYUy0HB=GbcZgBRw#5cy7A0ZcC$I!jt0e0bkvM| zchR_lxENtt-7JriN>v(|Tkf8Hrg;8@H_}jvlVR#!JP8l6UzF@;LB@M;WS_7!C)|R5 zDAXOhH>NP=tj(~=S(eGCM9GlUkAC4;hb^4UNyqUed?kD3)5_BosulVJlRMeZrCbYt z>-=EV9&k{8uu8njw~A%`?uoQsL5B8xX}<1V{&-`Vs(gfYjaFnS37=<%@X%{As7g`Q z`;>7?J94{I`#QwCZUi24jPpiW_Mg7)M_C1QGXN_eI&VKFFlNrm%hBMN#uk zwfMn!^n*U#$AY}u@7B|a(p~fn*%9pi_Y(*1q$sB_r%c*Fu z>jYFQ=sne0F2kKjt#QYdhkv=KrllHIC{<`xC~hMKs&`7Vi;?oJM&}`f z(tXM-rYwrK@u=I`>pC+!^Cfpm$kfC|j;p!L9gC>)TjjhuoJ)4!<+Pgz=cu}rEchVT z|4;|;TX7{yZ%|1!V7FniPrhN}fY#tRW zm{f>WAd0c?HC<+0hV{aF3z)H(>6um3w$yerwAGr6*@{I~MOFKTz7D+~no8%-6z83O zvYxq~Ii0!K^vu}Xm=jTo_-GBSTQM#++C{WEhZ_qU6&dOoo7F2+WmhpxwN|y3+%0KV zPg1YQnX>&Y1jJs7V-rNq5LVC_ z(*(LVxRHwJi!5ZIvUVi{dX7>zd29`biHE;ET}F@$vVFGC*2x@Kv?aG~c7J^s75zpf zbdWnuf;*F{cm4}1@rez^?sn_od@Qt3p@1s_5;${7k5#KV5%^N<`TchHD0HOpRb$u31KZ3dCm#j|?}6kc@j7 zWk@;7o5t@oIw$MCDECJ$Rc;~EmRCc#hD({BU80=ZK@2U}!q+0%%WY%ybI6AM-)Y<7;w=T zt|2QOgzLmyeP-`0-@nZfp^sH5_VF!(9X6>^v15={9f)N1RdWn6?-~g0YJ@m$Y_$9nkxeCW|IOQK(pmDf`$6AW=e$>`B3%YL(&v&J$|^oLzZkX| zv)No*;L0%}Of64aOCxk9m_auXEb2Gx9G}R~@}v#1&bGcr$Tk;rh<$9cF6l=NiAw4g z>K^I-(G}Pon{Tw7{_Q-vOXb&g!uNA>)}-z9Qlh_oRu7`5Hfb|yeGWaZKb5S0Q@;5- z^&Dnar4Do5>BAVqxWr~4L`*kNOwx>r>uSeOacnCR_C0LB%HH$hA`l?Zi_Pl&!SGVf zXJzvgckz^#Km`3N#;@`__V~kBG`2K64l)@B848&?8RsU?O4w?ZvC5V`EN5PN(dlfRB$7i|+o znI3%fIH;RlB{BGDkWnLI0o(G}61z$o*XV1_YR#x_Hg#;?^+h)N*_>`t;q{-FXx4pR_ zhfc4&!_hg#Q>^gU!&iyN@kFw>g?~u8U2a~K$PT|4X3inMdVe)o zO-{xW_Nw!Po`DX;mF{`pRq*PIv4Ucn-f z#)>~(5bL$w*@t7Dk&^SI2T4iSm>BO4FRY$&-RMm^DbKWt2?qhn-zN~h+N|Q_aJT#P1iY(gynkU(y z6~g>Y6>AyMy8YI>Ad}WDJpq|(Ha8$O7XeE1prK9 zAQ-sy*N8z;bz`@1fMARFe(0HvpNULNvnYENKne($tXXps-micFkfWlGw=NmOzIVl3 zKU~|v>}roc*Wdz}BQT5(DN|#^iy4WlZp8IeccDv$rszH$7v(UO3GGfZI>DJt&5oFa6jyZ2nv?MBCIRW4PQoB$#D`!<^vUlwsd1Y{(4>Q`ikT^&)b@0NOgSCIlYX z=O_g1V(JDlSGtWmT>L;juSdev^3OPdPCO^6!R^`#XVBG$M(- zi+A(r@vOt$N4q&v{c5S)nWT}6z*4wi9@+tAwg@{DntpabQ^m|fsbk(2;U4n$mhX0$ z@9~z%Q&*noEx)gK!8Ix)8-MS>y7~UNuoUWrb(0+ADQ_3Mo!t%0NBJP z|92sLY=B27*kN4Qn9hH~e}9B-Q*p?EyVRiH`BQ=8Ldn8ji8&s(N=w~3QASNzXTBhq zUv)}#0@@C{CoNWHe>`GX7Dw|u6kEZNjesE&@vO9+@Q-amJZO|2&mq!H^ZZ$Ae)~Lc z{6lVm`X2e`ofb$imIoEi4z!>QH}%zJ^@v@rMszRfa06>MHel`z?2r#$e~7o3{S&)Z z@x0Zq{dxB7)5G^?{@tM~R#Rlx8EUNB9 zzy$#Ov4$lf{}qropp*h2e+`EY0B#I|3GN+iA)tXM{s$!g>%p4`+N*cV{1f-M2&Unt z_480lrX8mTOx95BKvD1|5>261jIf`BM?>U;CmT#m(5_popes_qr7;SPno8Z{jM69L z%ic-i3|kPj|2noHv!R4@(+wOYKT2%nH#Mf|8y!e*Ap`jLNUbIYR02?A$^3-}8R#f* zYa<2T(4dA9p9;6AoerimMkIWNrEHqfUIM?#-~mQU>dy!EBVqzaA&d7wB=`Aml>1D4 z$Za`->#GkK4G3tsxWS{XgLpRUybi3U^4#7L-sHy7G2h+(VIZ6RPN7$Gm?aze3;NFABfc6vv6(9eRIex(p4-1BZg zNHbYxnynamlQ}6sTt+RM4r{L?L+4;4LbKCc3wSp^u~kqDRHRLmv1ID^#}%wzY(g4a$|OiG zE(8g1z{p@@e3@~pQ$(UeniBfiIM_Lwg<~A(!uguAlh;0Z86DD#1D)fiYBpVsT+D*; z{gfl5^`W=p>ZSAFx#N~l8VYl8bw^h`0xQR(WK?F^E_OBVY-{TH#YDnjpneb|R0sZ? z+dv`;jx}pri~St~OxtUSZeNIkpOB+uW6z5eNz?Js7`s4!)TbN@>M$-Wf3cH4@M($cG#le19VjJ`;R zU;_5Yr8qT=TOjSkyA_?u6khEU8#{>@KrO0~DHd(+^?U=eKSO&F;9^%~Q2B z?)y(&W`>Vz`8|Q27o~BRsuyvv&Kik6egv0SoINmXdQebh`A%Y$^D16xapspCBX0pFtwkBKJo{1bqiQhAtU%t znEhX%24eGHFuTF%zv1)0JoxWS>u))qbgqDUJ}P)OeP=J^AwHO29vTsje!rR#A5SyQ zSL!IgW3v*9f?}~PZ;C?b7%SVsGNjXeEhBIrCUIF+7+biROSTy++1CN6T zA`}{ul7G{K9r4J}d`y2VJbZ}bTp$gel{9695*1!g)*hSPjg7Dzc^RN6Ljo%MUehdgcE)cuD^5w4moZ%$#8`L4!>jdrL4AZdV$>z!S@2}$8ACH(4Yl^nEBo^azzq&>Ft1saP0vJ*$4+fG zo{aCFAoaJ^KLx(O4Hk~aFpKZd(|HF97|iIk{VX3H6m1i}!MDm-L+8cNyiz2ntqu*G zhoiz)WGKNU>IH(FnM~%tVFLvn_R@xtfYN=z*{Rp z=9k8PgE7#VWH053_N@KAT{u@V`?!VR5tMu8>n)bcMK8(JCA5-=(q&{yuRXlpJzQo( z=mHDi%k%r(JjuGv$9F(2;ij!_gn~Bj3J#qM8KCaEEkGA2i6NNapG2AbCNv)u@%7em z8GrGf?Gr8HO}pIMF0RW>ThZ31PD<7*eok`Jv*y_ZrLCsjRoK#0epYh6P;ivt9?sXZ zD5#^3kQTOWrdyTHC{6^6`ql=mB3@e8)j|a|Jkx2`yu?cSRq5`I7p|s%;({Q&e#oUd#m-S3vE*Xn!5kQl37Mm}Zx5AOZct9AWs$Y3l1J zH%&(8=~O!HFB-wBc5W!MX-Dr_J59ag;;fGi3=cFR!)%cRpTZ0Gz&=u@s$gjHcrr=n z^hC{zDWQ8&v}c*m0y_@$iE^ZXhF{~ug7JpZ>WB%FQ6sirmP88rR&d-OC+-&n^qHJW zmk>Flch}iP)u85q0hK0_;53#i$l(?~7`jXLnT)T-*7&U778@b_EO`r9Qy~wDfSjk! z%UHl~X0dC`{|{{cZx#IyO@Z#8Nc+0H=G(psq_ZQP=_Ut6|>Q?RY54%zeVAopOPE-FO zaEu#ACwfUTI_)##T#QjqzSIRA+nukjiCdBAgWhcj82aKxdya#sHb5Y&Yn+fU30VCb zGBNB~+k&%~rtcvjh#K(o5tj`?=f!s$N$#aWri?}~@hBG?*f+=bAN`erdz=*9PG@O7 ze{wm+1Q1zH>Et1#u(J*_slH-RV?2aiVsvnWCFOvAV{);xcV;ql zLmH+x)z-oIz$CYhFeg5onW)uHRM1e96WBGZW6iq!&3mEzp%E%78XU)}O0YDX78v-Y zlgZ-UVK;Jg^L6;ZDCMw)r|V}P$@p1Vb(KxPL|#)zcTx$9kH&{tY1hYTo8%z;v4 zVl#FF4LDGZ7LN1gulg2Vf-+=6SnH_%>A>cTsDn3S()+}=*^=M zYLJUt1QVW<2ziFeOqDdPv{&qNvak?K0KcdwuD|C7qr)7SX)|K*)gBi;CAtyR^^{to zC*vp^leh=ax#$0(s4t%91l{@{>g5{(|38!Xzl3(%vw0oW&wp@+P6`S?@7)IbS{fp8 zLdDqRw-fnrP%@t~C8Io)q=N>m>|BO#Y2Mad<0^OZG2x8DEUTP75_S~t{n;T61$QBx z^=pi)F8f_UwFq~6`Xk%(C*)*u{0|(zT-aG0q0z&Ona9#&1RYGEj-fs8LW=8?DWFmt z?@xNfzy+k0%P_}y)AA^H8u{pN2$@uuum*|vvdhEH{aOAh6Jm-i+|&ASPd=kf_zKanJ~* zNs6nc)oW*9Y?^ku%HM=iLhE%w-1NlYuL&q_6Zs=G_@I&GM>jieib*?@Bs?#HE?6MHcWxo45HS_B_ZLjHfsIWt@s)_xzRp>YRTY45||_*9`pC0Wb)8+ zitN!enNl{F3#b291Key~geSPxY}}rvR(2`nt>Q?=3-m()>z) zGP#Jo{!$pxnTL#5^sB?}4ah#|p{ zmS<3;+yki+?*luBtk&EZSMs zAFqH0EJ8VI^cFe#3W?hytdbdFy(w-e5q~iUYOJD)N_#=5VxZg&f*lz)o2GotRnW#1 z$7HJJ2VH~0C3$(46a$t`T4Sh;VIubLhZ?R*gg{PyO%RSVEVl*Gr|(A9-@zR+Zc(ZH z3$un;q!L=CF_9>6a+e>dB8$0hb8Xvnk zmhJgj?=eC-_V{B9L_8^b^IT&nBc!|hKz-b=NZ4^RvjrQRe4G$FIW$(@hJJd;yt7TA z$-2W*!YP=N{&Mm1=%Gl9R^T$}$|}N~mpMGvKn(s@&RRPT_07~}PPevC6BpO*7CBE2 zik7^CC&O2Yh(p_m$~{ZsNZ4NVr~=?Tba>3&L&D|nYf@dgQ&~7?MCW=3x zDsQ2VHIT6qn%0)K%{c8-l;vVV0)VGQrzO_es#mX2{i1>{UfOS=H%*UmU47wEQK5ZC zvilTY?0|5Z_R4Qw@K{o$kDseZfClO_K-s4yvhgZZ=0u%MQJAH@ptzl zwYXJg*2k8a{|-f{es-1^eMP(?UE|mv)Q^Od9XqXAtVJQmwTq5f#dQ|R#T5M1CHG7* zBpE?&@Oto*SJ{_m6NdZHOj)Z8v%kd5vGqg4r>R5=7^*E~NAJKHX|`LK~d z!iODhTP^0owyRmmENtpp&ePCwL-VGjxs(3;?&;g|lMn#>LFLuus%4&q{x96SlbHYj literal 0 HcmV?d00001 diff --git a/04_Spurerkennung/images_input/90_R.png b/04_Spurerkennung/images_input/90_R.png new file mode 100644 index 0000000000000000000000000000000000000000..3526f3ebb765ffd8f77c11c3824e8198ec4e15d0 GIT binary patch literal 8835 zcmb_=2QXZ3+xO9;MMww<5kway5^eR8V2R#)MD*S}OO%8JQ4_3^h*ei#b+rUR^uAg| z3xcp%C3w&N=Xt()=b3run|Z%)XYM&WXYO`g_w_61eyy#kLPfzy0RR9MRQ0hg01)JX zO`hxu_~z8BXcl~sd#IXt0{|s0z7YVK*|z|ILd#J}Nn6{&-N)VA!QF!us-(o~;pJ}U z=wb^10T{TRy}sTCy$o*QKvDe_ELGiI_a+&uu3|il?j08&8##^IE4KWZn>roWpFAOE z`j+>KC?Ns%=B5t+)i{c2(iOIk2`@jrit9UD3jX9g*@9aexTu?wT`$AJYlcYM$*!iW zKGG3|T`g9;d;Q1Dp0C|YD}wS76dWD^4Oz_#Ht%zG0^m4ITKfLIcG6aWAYhD=3~1NL zYJYGi_L6#2@q;-5EQ+Ar=beN)8H^5)4@yuh1mu+oVDR@`dO#jA@T}kF`7)p_06gOk zUmF8p@Z%4`1b|s8+f9PJcK|D`?W@OtgA`CUWEl4ZFn$2g*{Oep0Mi1%1E`UmDo|Mu zv=7oyRs&>Lfd@LTqIm)0VBpy|cJ=@u>H|Qhyl*7EcOOZ!&Ie}dL$!1x2joeF8JU15 zsgaT3U7kTG!+kn2YeMTxdBM)W54Rsih*1B)eg}ZOcQ?UkvF8DU*N}sQVhN~gRsx%C zS1#{8f4+RNI#lW*4**L(VIvp(LN$yqsVgwoi;tW;#158}dA_)KyJ~9rIv{TYV}$j< z=jKU%Qsd0b>dMNr+E+z0>pr8<3;Ry1cB6BblQ8MOxWgZ7e5lfBTg?9VBxdlz*RzMJaN8?Np>uq<8_lrf@W5zoQe`-)EIp87jcRbkyYeO3ci=6xA2LP)H+a??AhZ@|OIzcDQ; z!l@C}Ix^!|>y#Rc#2KTMKT;0A@Da{Tq|eJ5_Nrx+c_Ws;K3wWRjY*VM>Uq%q>lW#w zuPzS+yT2$_N$``W*z{N3E4y0S%~)wzM%4WJ&}OL0pZG?^lXh}3Vt++e>$?i7#!rnk zr&Wts?{J7*$B>JY6GiECa^FiSQm^G|x_+@-MH8X~dM*BBZer5b3I`F?r6?5}t&ntVL+gZ0r zZ`t4Wzg6-6Y2od$_}e}D55$GH-mib4{@`w4%!lF&x%sY-B26ugD~*XyNWZwGuw;0w`&6U*_^3LV|*h-zj9R#%!K&yHNQV+ZjP5JDb9x|JyLsQRHKVc zwUZ-Ve(!ml?W0M)&V$InhQ%$=?ehNQWhxYT+9+&m?p%_dk#1b7TB=j3vV|5yZX@7@ z=qJr4XA##FdepehxmD~F2T9=TdNX?S2nqzfhK%GPQn1ANGgE%^WB)cc#LoMiR!dmU zQ+LEdAbS1uPN>ki{<-TZ;!x$P@s*mZ99L$q4&IXP67_VKW)|w1-P2LB_#&_L**eWS zV=3xgz|nhnDV2FPz|=w>J`&MlO0rxTeK;}AXsYQ))IlP>6 zvbnq&aUId9nW9;mGim>&au3yuDz3j>xjKDi7Kw62g<9J`XRxpDLVnpvL8pW^g}yzP zA*G6)xmL<-${glh>qRGND7lb1n7tzx+I5h=A!KhnL^Cw~XcL8u9r2YV$WdT z==~D=D*m;4WWQjBtY8*X_xv~Xiki=%r^Qmx=pj6-CaZJ1XtrWUY{zjYb}sk3NG@-V z$s_OKFW;1Y=&X*&QRb#Sa9%76(^Arku&-NZ7qSt0XH-5_S^wF;1ZTa=AbTY1mAfpv zEG@q9aKXgtyLfy3xB7mqFYY;3(Thsq?3XVtgDyt^1dNlwp6GRiLWCLN*>O+^TU&yh z|z8iGB7OfrjYjT*>dd)iiOW0NJ z=-6nZ7`X(GSH_GZ!Wp9eBXdgLi;qtqr$1iEvKQ8ZR0ybv^2wG6I!R;1n?#!Ax&`e_ zzE2p#jiD~;cOZg%NHI5;*2TZu*w~-&pOnkN4DW@$S$G8I4|X0zBj}3-Z}3Z|%Rhak zDUtd|frhV${tf3otv_c1MPMg$r~N&l5wVe1(@N%icQyA_`80x(f4^%H4dm#0^qGIP zF^ao5spTov)A=mP9QsT@R@a9zl4#+V$?uXp6-(GOQc(7r_G@;2{bjb%)fZiE@%2J? z(@1-rYm&-HOav%UiiN-QO?jsHNUQB_jqKV6UiU%LOLOiIY=5umn@tkW-p#$-zR`JO zDH-)SM%_q&*?0|I<^-vh_6(e*SiVVeKgRG%nRG)OiqCaIt908a0~vS*)`@je#@C-h3U0q?={PxEVgpGZ(PR6zksC_F*oy zwz%|b%$l{}&SiUBXAKApG$!mOY|o%) zH8?rs>ynM*^fOnBa!psy?H;00qH|LAYpAKl@d@Tp8GYTv$@^O>*Lql5FW|fW0#ssD z25+;wf3kZ#4qVy5kuT!zQb}U`-UOE|k&dzWG21f>IVooDXDVmuWuBQmH*3=RE;};0 zxe)YMWtN3HTSVFxkKi980m^A9@2%gNKF*p3FU){fG))w;5b?qITbHqVZW8 zeR3JUF4ZD6XujXzvtK>CN^8_$lvyEZ<+|y!DSeSLrq$D&-JFSRGNJc572>o^p}$pIS`q7G&aZ`*M&P=q@NG-;yxejZ=Uhz-YIXj?Agxu0+2ax_Eb(NUcPY_#v@BhAL(&M?w|?`6=ghzVR2KH1uMK zD~I9Y<3&G`fnF%=cw%d6aOC|+e#U-AN}FrQ=kv|>t=ko|-JRWcTYCc@30oc*o^5yd z&-pX&ep;l~S_xqd=ecw^sXEjfMx3SGPD!~WCVG$kYx76|?>AX(RCP4~AczwHU@rmS zm0Xk5OKiC{=QuzrP*FyB3z?r8xE>Oe@Fy?XF#h2g6RSjm-oH2iJM~pnZhE zkLbV)VgMi{1LOgMe;OGp0N@(|G0=Xg(vu4SNHn)Z@C#^fj@2b+HZcuQw)tS&Me4dd z0JA!+{GGK{V?nHC-V$CbCNdp%4+a2oPbw>mXBi(KrkY9lD*P;+%k*jQ@qpWE5X7K!n zA^d8Hq#4XnH>yHFLJYOdkXt%{5X&FQVR31zpK^1#WCqu8sN3!qFx8run|gsc$G}EE zUT@^ebC3rB=qMBAD*WlF1~;`Y~~t`do<*{XX0Nd0|xSkgjAb(xwvN0 z&GoFgYpef95n^=ncfCq5vStli=ep}F31zb|ICGS`1N`VBbU3LA=W+qz`p7HyI1KlC z9gOEs^c8I>-aO4uvi<}maeVxmuN*QEG_H{-%gTW2}WBk>bXnTXOkPr?r{p?n2Nhc2(-vJ~Bk5aaKi<-9Hv0yX=U72n zGArq)Z`2|C%jfrc(byS@#o~4oIV;d$Oz9-Vr~w|**-*6dS&*qnVL;&%i^a#pV4Yiz zhgF@sOVPQzUMgTqrgzw5wgiKxf`WYg^lk7C3U#yMv0<6fDN zrK3Dh$=EMiFy<6a%Lw8PdGlOy7{rFr$STMd;2#i#_yz+A|7l?Q0el0a`!|yR8;bEr z#t-@*27-fN;Z+H81b}&>d--h_&c?MsZpYLEhglw8bxCkqSnd~fj4Ea(qS5`@=*(So z0SB)!zzGifqX%)85=Wh5A{}fL#%6+(qlXhU8Eb0mX(W*nQQ!_5{wOraZWh;&2KNW~Wob9*mU8g3%;5-O4vBw&4mmlbH zm11g5bV>`30@5FB#l^N#FbL^--zTA+syM}42xY@F(2%xP{<-q!)xVmfVruotO{u&I|~SACiMSl|_fvn@Wg>AV}-5&q%kM&ULn$sr~`tQ#XuuMWC{|tGo`jyn%FJim)8w z0iw|(nsimYve6QeuaJ$xI z`+E)YelDn=H)4!Y_;Rk}nYD_@6Y094OT9(GhR^ZF+k%R|$+ms9MXnRL-cA8>H%%<1 zg_^UpIwkziCc+W}*=E6EK9}ERVNw4R0R0ELdf9b3nqKvq&GVYE^Om#QhD(J;e=IR+ zIyYEabe}e9)PrlG#VFW**|q5WioDxH+V$R0v{DhQ3(nag_kOW(_+dkE>czcnaZ*Qr zo~7EFJYDkiJ(iRhPwk*rYO!TKS?E#WbR0Uvr{dw=$Do%Pr0^qND1It74c(c%AFH1e zY`a@bu)@LV*gW4C`%wH$hZ^HnkI_ohTCAJ#V>}w;Go=BEGDGq-XyDPj)2;bw)Ri%f zF&M`N*Zs!`UhY7WM$%qRp-|4%)RlVpffJv`ufD1q;OQ$GHpXz9jct4u^jR(Xof)J5 zg)enoJhWoi7jwYmo3A(JyV;CrFB(I>G>Fjb3k{<9gkVixNgm+$ z7`+U`T96~*1uw_jW*xG*AkBt6QEs9sG$5aOkbAm`_DNE`m5xo!PSs2I)Gk$ zkk`EypbLyNH9tV#q;^!4|4_~G&1$0q+w!)|0`J?#mWnVpzFlmxd0(7Oa&@1+3!b#g z=7`^xuczw@s{eN3f1&KGC^(dFAor=7IYt3u-^jYEkXZ=27a-*8NT+ROE{+ee!WHaR zF;>MYWq>=%wQ{_pvKlIA?VU8LlzwgX(DRN_%^kcfX3#LawR4C>EX3s}9Qt@WOWsWc zPt?sLuBRYiI^z#SEhwqmlW*WPIJLBMMYW$9JdWAG@X;-SHz<1kFgN+J`2vv_ymlKr zpM%eGg40-o=)g&juO`w9qD4OjQzl7@jXSq8E2S2O^5^^1CP_})Cmdnp+zflpLA4&; zbiL)q8S0#mt?PRcRZgOZmhg2~+3vZay0i=b>6tkdVWtCs#r|V7AroP|X#7icb==G% z8DSWsL@N)CqaF*PaI>x7X*VQw{aS<@~hy6rtX=g z%#^$#AUSIk%UdHHN3=_p;ihb_k^vsq%RrO$KWOa#4;sMx^Z%ym#|(Fo%~lic<9jSO z0p#lxy3FB??e)_+9bFV|A{2knH6)aG-PxA2o;l&_$UZ&=hs)l4;1s!nJ2JN)LWdTf z==Y*P-Ibb6IQr|K{JcgE&nt$mB6WLFTppOh<*f9YtJN;Cdk(w(1ODAyWq2;vt305m zpDo-AfL3guG__ToqfTlExJ>`t`7I+gfLOi(x|zzW`Ka?+xwFFSb+k9q2_{X3i76Qu zYxy?5?A-&m)#|Zm{9q1qBou!xXA#a^6wVJz&`|G7UYNa*0=wlE!W+7V zF4*Nt1H+lO9D71qG<)8R{#?W-SPL=b9|XCA%X2#$XO0DP}WKI ziGS!=E$GAobn+cVS_60HjtBM|y)}NB+c+@|AENLoVOe%kTQg@l%XaKMU?ZsJ?T0aSyadkox19Tt_iYmINYr@V3oj@J{H|OyocA z__9q-gv$QJ*z*16;6e19ONRQ<_~f|gqFzr77wFxFqXv6VH&RGsC%i@{MA^VMEw5V0 zV1M7iR0qz+K78?W%V~3z%`!Y>$R{9Zj~C5tae(xLrk5N!G;_(D`x$)6z$(b7?Rr1u z+}x<`u5Xa1V-T0a=JQWgj*qv!ZN>`128(6DkiwogJ!`Pb@(t?v)p+jLd^N*|%VBf^ zg%>MJ>43XgKgju})@$`Dc10>kxCd%h$3+U}T-v5KOg-d8E;YwPNGG=NdLAFE?JBLq zl0Z6-l^(3i-vUXwJwC`j->bG8YbA-sZlGmbScV(F z__>OOTZ!a{Ph)$YIN)s#!cLbdF|(W2H;BDS42Dq_+3vCdGq-{^yRa8a`7~NhM#cVdISdM$0-5NULr})X(fCk6(?isSHh5c{M_8;m(|ic zn^KHGh>jC&$@!1rgXm#mJxQ!F19&Lr7VJ(vVVD`0eqr_vj9ZmF5yd%J7SDc= zNaiFfvp)yPiaZe<&_O6fy^5MKh9_4IkU=(Fhd@7E9snMU+97zs;afA2D9G;Y~4nDji7)$M2u1Apeqb(#2;Adb3))1sZ}C zt{j0vz6<>j?)7Y4waqijL5K#Vv_GXukcUbKJt@u>0e;dk9AK`sT)Mqomvgyff{L4* z!ZS@gpcK5t-n!pcMdq^3(8&ca`-i3>W!pF3X6&biW5C`43fsK(Z~HhR%VjpP@V?_DZ$t`Ba*FbL zs<~n9Nx#(YDxGgFu8cFp=@t{$@%OPwsE#+;lVDf1 z(bVeX`$K{2sn`u%1qpcSLOddG5w4;ptBPY~fuD)i_hS(oUWRJktZ>SPy0TYzr%YMW zq0{5M`B2(dw{uLf0O|$Vaz|!o#wU9v@84SWCa>lPRr(2KLBYeA+H?|GJQpqjjH0=I zr5&DLnFE>0{iv&2Iacw37f(|~y7og;WToG#fclU#zU7OezlWn&g}buwhiqcp(5_o` zGKojt7UWB88N1TcHlsmEBK#Ey$;7i_YhjTNI?inYpX#-}J5wTsAY3K4PaP6FT6b0P z1}w048@}e5ZE1V+x8`WBOSpYUO%-lx%=gkE(kbb{X!dRg)o3UeOAw_aKQOnVWod@^?GA4YLf|^0CpqSLy z-IwNSY9@WfQ3GPGKGQ7yXDJ0ZKI>Y=x*r69ptW$rEUaI&;yds9#_#^h)F zF>%V6Vo$Qvj{oYFN^$n^N&pS|*bSFvfu&`?EeN!rdJ0q6*N%IE4ZSP zN{vk&PPgDsA?t9;m;Q$h%rMK%-&OB?%uI*3T*c6Hh9F_gNI^%tgEf|mG~qGA z4tGAqSZn{Ww3u?-_w;@A6OU}iitRl$?DP;5uPZlI`1{%l8~Oa&-0bQ(eY?(3?0MV9 zFJzb|iJ{LG)SJlM)G|3R<*u)&6RbK1i>|WKMEQyscq&cbUW+FOxoyHcau%FTq`E{=k%3 zAXV2JSLM#9UkdO_)5xqKZEA= zQR~Y(cK4yTqstdMj+HnWwVvfFVq=R;{MT)W2<)za@lF;EW@u?yT+?Fe{(4z<%05RI zZ=a0cs7U&djM~niI2Fw{DsPZ1*U>ZWQ2k#LB2wxylV3;xgb*ni;v-!lF$4#1Pf|1)uf6kaj`;taXC*E|B=0QiGG L(R_?ld=~j%YazKE literal 0 HcmV?d00001 diff --git a/04_Spurerkennung/images_input/black.png b/04_Spurerkennung/images_input/black.png new file mode 100644 index 0000000000000000000000000000000000000000..4be5a5f6e31f695b74cffe70d8854396d75e074b GIT binary patch literal 3728 zcmdUxcTf|0632fSq&KP3d>}~gRXQZ}Di9=yN+%#9MU*BOdVL@u-5{v+B7y`XprJPj z0#ZYfE+BA^Vo*vB=gr)B_wN3`H?uoC^O@b5-S6&i=9h5A+<=~jiv|EdZ)B)z1pry$ zscTbFoRS~@WrWi}?Q3Xv7XVt8pH2qyo}34O#=;W@yK=?DC&1^fhmS9x5e&xX>+j?4 zdD{(upeeL95^lZ5u0fpLgPO#n9-H`Bu~G3^L6cCdX+mQB)J(?l{6&*&maXUX^vJn; z3gaOuDX3&NO9{F}nhDA!{%0vs&*KyO4(CIkdyO>{R|ZZR#x+-~cG2~NlsGE7EJJxq zMHF2*^uoE%QQaL~^Gj0N5j29nfQhQ!mH+Os02w$4S63Imh@*T5$bv>_sQ}I_A1A{T zM`B!u=3Xa5MU&wI(k`1&p;&=-ND8zBXzP=q&^bcZppYCm_PaPQf-921QTXA?2tc6^ zazn|0{bPPMvcfdL$Kn>R3p~_7)!@}cJ%Eq_tnMZqDqunq$QaqW8-m(KfWtG<)&VLy zAY&OHBMQhvfn$$=KoE$|1+4lzw(5U~*D$S$ohp@Er;Zg=(TlLBlJujrwUxRcf;Zw2 zXH{}KPom}fAZI@)=9CDq`l zk(PACJ94qZ$Qg?eV_mdl_Dj*tOWu_$zL)?_7wx<>OmbbAlQ@KE&MAW0Tbg8{X_93( zTp-^}gqaWlxCJ5rJ54^VpG2r2D37T5!9dcs*0Dk1C4h1_O7{ccrmmnAysuuXn+gEk z!U(Zi9qzr>^Ab1;p4PL|t+YpuO3_fkjy5P0l*SdsCGNWZ+cdQvAQyQQ+iW2FIHJx{;}cy23|pqm6|Mh_ z_M>ZnY+fpRVg5(|*IXLON=2(5D?J#eQZ-@SGPp12DdjtE%Sd&#LtkB%pw4jVe|52n zuCj}(_G%TRDPiAb@Kqo=bA%p_T8TUm%I9=J#}HwPFrP3i7(HWyxFW|$>!}`N_tOV91 z%JUTrxdbwLr(C`2C{-Ry@QL@tETNa$m*khYm-u(=Y0%EvelHHJ7euW3ZW(hfGc7AD zQ@G{G>DWBavzjR@f}c}Ny#}o=!dShuj4NYNxR<9g5XEj}Qe6EkbA-!-!-MGg@=L(m zb2FJ2%(Fk*4)?Dae98U-*$bqjk7bMMjz@}c=AWlLkK_zI|1!t4gmWZ`vl}j>EW44j zn#-8$V}lUGh&^CS>!Zmu&J@bz7L$gTSCmxDR?yn)+VsOy%WqnT)#%&ETQ8PVf6A(~ zsX$-Zu;#W#maCP;RUpfJ;94)j?T!;D?AlG+EnqJ!3tu@bHh1ySx^UJ9N$`8oSH|9p zQ0x$TH8A<$zF1SvF>h0nMn!p%3QXQu-nQOq_p!ScQ#}_?A`FhS=MdQ z1ySx21yii7+jXystn4gArD3IIrNKsvQq5*5x}-&~$?hcr#30k)g#03grC+qe6K$P8Vu0j8V(&)n#wR71MD|N%Yf9yFcg^&)EExyv z``wri8Qw?d*XOrwmJwcVDQ$Uf#Z5o`pzu_*z)t?|$MzoBXUpXwE!wAUaK2O$sm-=}D)65PJkO>XKW7h-kTbW$U$BbQsN`8|5R zKC0#?uIAv=@SY4YSHi!HeWY|+aY||rrxT8ei?NN>O7V?Ha1F_3D+Uft!|u-M{?N_R zoy|weTBy8~G*%STtdP2;KBfFt;jLDel)K%BQ3M)cblZeSMM|tj>CWwUb4Q!IaSP}L z+Qm?g9O<4LzQINKw!N5A_Hrp^3DqoZ(|GgCkN=^=Bv!_rd})Uz@KOp*a2szM@}l&R z(op;a?7G+m^BqGmvyk+!Y0Zkkf}Qfk63f_V;qph#ru3#W`Kkr%dH4CeC}< z-w!18sbp0ah!1RjsP7{@Qhu`il(fm*#yp>nDULOQc$CJ`g%J!(<8X`5XW-XauY<`r?XwJ)-@fy`@z)`n&|CTwGReDj6{LfUSw!RbdV znp=}CA+d&)z>&@Na6)7{Xqh|Cr|*64^yAm%l^s~uLHA+zwUt@P0(*w6itLqahCB3= zQ?KdguD;&f|E5Qf&K?jUG(}-x*yr!n-!!|G_TmTBwBRq`d+;A{rLEza2B-0ZJ40LC zq0QeEzK#;?n~BGoz3&J%WHx4_?xSvpMu+u8t(t_2wS%lf?;F+T-hP|ikYUI%r2qqF z9Q$dMcUS{%l{zNAVZhM++q)C=cAzA^61~muPr4Qae077D)`-+|#0&JQQ}>fYtL7<3 ze!I_$V8{-oNb-Q5WU3jnO)LP_726AUG z8J&p^3zzyn?MQfr30-x;&)s75*?!9uFh8tEN?KKGR>NQ4c@wZxM_6XDePf&VQq{qG zJz!n^BxA&)yXi?&UJdrT7j`S8r6JUvxW-+ejGx(S3I2nfG&WkGqA}C58{$LkB<|0# z&xHF6ALIL*CIf}Hs+%pE9rm<0btYjmaid!HC({$#(+_Xq0t+ zu(PSTsr|9^u^R=KHC0p=wEU0Pj!I#Ju7g4a94F6C`fE7YrNa+KH^%WpIYUL+JJ}iU zy&n`Guj4j2UlO|7x-Puy4U(6=xp(z&vo&x!kazp}9HYh31HOkMB#-Z}_N_mb9%gW6 zWRS=qIlD(L@{&LQn|v;YR%QT%Tmk?U1;BUGXtK-9ssJ2S_rA2^(?iKxYJXcbZWrkMGa*503Fp&gPb`v915J;Df<)uH}*^F zzw&-ze}i%yO7P#N{7d(z;4kbKo&H0lzoh=ZmP67{FXBH}{7d(L({=ti)BkRs&hX6N hX`Lu7b^u5u2$=PRJanpT3psuAz(~(rw+8AM`A-(#yL12m literal 0 HcmV?d00001 diff --git a/90_ZielSW/01_OnlyCapturing_NoProcessing.py b/90_ZielSW/01_OnlyCapturing_NoProcessing.py new file mode 100644 index 0000000..ed08f1d --- /dev/null +++ b/90_ZielSW/01_OnlyCapturing_NoProcessing.py @@ -0,0 +1,156 @@ +# Creation Date: 02.03.2022 +# Author: Kenan Gömek +# This script takes pictures with Picameras VideoPort like it will be used to work with OpenCV and saves it with OpenCV to have the real use case pictures. +# This script is designed for shooting manually images with 'i' +# Press 'q' to exit +# Change camera parameters with v,b,n,m,x,c,o. See code for more information. + + +import cv2 as cv +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os + +import numpy as np +import matplotlib.pyplot as plt + + +# Define camera settings + + +SENSOR_MODE = 4 # corresponding sensor mode to resolution 1640x1232 +OUTPUT_RESOLUTION = (416, 320) # (1640x1232)/4=(410,308) --> needs to be divisible by 16 --> 416x320 + + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, floar or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + #the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +SHUTTER_SPEED = 30000 # [µs] + +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# Define Funcions +def get_frames_from_camera(): + # Initialise Camera + print('Initalise Camera...') + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = SHUTTER_SPEED + # it was found that, you have to set the right shutter speed at the first initalisation of the current runtime of the program. + # The gains (analog, digital) will adjust to this set up. + # After the gains are fixed, they will never change! even if you change the shutter speed during the runtime. + # To get consistent brightness values, set the right shutter speed at initalisation once. + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(1) # wait before applying brightness and contrast + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + # camera.start_preview() # show camera preview through PiCamera interface + # camera.annotate_frame_num=True # Controls whether the current frame number is drawn as an annotation. + + print('Start caputure...') + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + start_processing = time.perf_counter() + + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + image = frame.array # raw NumPy array without JPEG encoding + + #cv.imshow("Current Frame", image) # display the image without text + output.truncate(0) # clear the stream for next frame + + # processing + + + + # Only uncomment following code if you display the image. No errors if not commented, but higher fps if commented. + # if q is pressed, break from loop. + pressed_key = cv.waitKey(2) & 0xff + if pressed_key == ord('q'): + break + + elif pressed_key == ord('v'): # increase shutterspeed by 5 + shutter_speed = round(shutter_speed+5) + camera.shutter_speed = shutter_speed + time.sleep(2) # wait to shutter speed is applied before querying exposure speed + exposure_speed = camera.exposure_speed + print(f"shutter speed set to: {shutter_speed}") + print(f"retrieved shutter speed: {exposure_speed}") + + elif pressed_key == ord('b'): # increase shutterspeed by 50 + shutter_speed = round(shutter_speed+50) + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + elif pressed_key == ord('n'): # increase shutterspeed by 500 + shutter_speed = round(shutter_speed+500) + print(f"shutter speed set to: {shutter_speed}") + elif pressed_key == ord('m'): # max shutterspeed + shutter_speed = round(1/FRAMERATE*1e6) + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + elif pressed_key == ord('x'): # decrease shutterspeed by 500 + shutter_speed = round(shutter_speed-500) + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + elif pressed_key == ord('c'): # decrease shutterspeed by 50 + shutter_speed = round(shutter_speed-50) + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + elif pressed_key == ord('o'): # set shutterspeed to 0 + shutter_speed = 0 + camera.shutter_speed = shutter_speed + print(f"shutter speed set to: {shutter_speed}") + + end_processing = time.perf_counter() + time_processing = round(end_processing-start_processing, 2) + time_processing = time_processing*1000 + print(f'processing time: {time_processing} ms') + + + +# ---------------------------------------------------------------------------- +# main +def main(): + # start capturing + get_frames_from_camera() # start capture + + cv.destroyAllWindows() + print('Program finished') + + + +if __name__ == "__main__": + main() + + + + + + + + + diff --git a/90_ZielSW/02_With_Processing_Hough.py b/90_ZielSW/02_With_Processing_Hough.py new file mode 100644 index 0000000..fdf5bf4 --- /dev/null +++ b/90_ZielSW/02_With_Processing_Hough.py @@ -0,0 +1,514 @@ +# Creation Date: 02.03.2022 +# Author: Kenan Gömek +# This code detects the lane with HoughCircles. +# Quit program with 'q' if opencv windows is shows. Else use Ctrl+C. + + +import cv2 as cv +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os + +import numpy as np +import math as M + +# Define camera settings +SENSOR_MODE = 4 # corresponding sensor mode to resolution 1640x1232 +OUTPUT_RESOLUTION = (192, 144) # (1640x1232)/4=(410,308) --> needs to be divisible by 16 --> 416x320 + + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, floar or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + #the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +SHUTTER_SPEED = 50 # [µs] + +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# Define Functions +# Parameters +pixels_per_mm = 71/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 107/24.25 #[px/mm] for 120 mm camera height for resolution: 640x480 + +# Offset Camera Sensor in Scooty according to Scooty-KS +x_offset_camera_mm = 100 # [mm] +y_offset_camera_mm = 50 # [mm] +x_offset_camera_px = x_offset_camera_mm*pixels_per_mm # [px] +y_offset_camera_px = y_offset_camera_mm*pixels_per_mm # [px] + +# image parameters +image_heigth = OUTPUT_RESOLUTION[1] # shape [0] +image_width = OUTPUT_RESOLUTION[0]# shape[1] +# calculate center of image +[x_0, y_0] = np.array([image_width/2, image_heigth/2], dtype=np.uint16) + + +threshold_color_detection = 60 # values under this will not be considered as active leds in each color channel + +# Parameters for Blob/LED Detection +minDiameter_mm = 5 # [mm] minimum diameter of detected blob/LED +maxDiameter_mm = 9 # [mm] maximum diameter of detected blob/LED + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. No string, because numpy array should stay uint16 +color_number_off = 0 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 +color_number_yellow = 4 +color_number_magenta = 5 +color_number_cyan = 6 +color_number_white = 7 + +show_opencv_window = True # show opencv window +draw_opencv = True # draw lane and so on + +print_additional_info = False + +# Parameters for HoughCircles +dp = 1 # Inverse ratio of the accumulator resolution to the image resolution. For example, if dp=1 , the accumulator has the same resolution as the input image. If dp=2 , the accumulator has half as big width and height. +minDist_mm = 1 # [mm] minimal distance between two circles +minDist_px = int(minDist_mm*pixels_per_mm) # in [px] Minimum distance in px between the centers of the detected circles. If the parameter is too small, multiple neighbor circles may be falsely detected in addition to a true one. If it is too large, some circles may be missed. +minRadius_mm = 3 # [mm] minimum radius of a circle +minRadius_px = int(minRadius_mm*pixels_per_mm) # [px] Minimum circle radius. +maxRadius_mm = 7 # [mm] maximum radius of a circle +maxRadius_px = int(maxRadius_mm*pixels_per_mm) # [px] Maximum circle radius. If <= 0, uses the maximum image dimension. If < 0, returns centers without finding the radius. + +param1 = 150 # 30 First method-specific parameter. In case of HOUGH_GRADIENT , it is the higher threshold of the two passed to the Canny edge detector (the lower one is twice smaller). + # If circles/LEDs should be detected at low shutter speeds, than lower this value + # Upper threshold for the internal Canny edge detector. + # "Gradient value between dark and white" +param2 = 5 # 12 Second method-specific parameter. In case of HOUGH_GRADIENT , it is the accumulator threshold for the circle centers at the detection stage. The smaller it is, the more false circles may be detected. Circles, corresponding to the larger accumulator values, will be returned first. + # By increasing this threshold value, we can ensure that only the best circles, corresponding to larger accumulator values, are returned. + + +def points_trafo(detected_LEDs, alpha_rad, dx, dy): + """Tranfsform points of LED to lane in KS-LED""" + detected_LEDs_trafo = detected_LEDs.copy() # copy, becuase else it is only a pointer + detected_LEDs_trafo = detected_LEDs_trafo.astype(np.int16) # avoid integer overflow + x_pnts = detected_LEDs_trafo[:,0] + y_pnts = detected_LEDs_trafo[:,1] + + # Translation + x1 = x_pnts-dx-x_0 + x_trafo=x1 + y1 = y_pnts-dy-y_0 + y_trafo = y1 + + # Rotation. Winkel Sensor im UZS, also negativ zu mathematischer definiton + x_trafo = np.cos(-alpha_rad)*x1-np.sin(-alpha_rad)*y1 + detected_LEDs_trafo[:,0] = x_trafo + y_trafo = np.sin(-alpha_rad)*x1+np.cos(-alpha_rad)*y1 + detected_LEDs_trafo[:,1] = y_trafo + + #sort points along lane: x_2, y_2 -axis (KS_LED) + detected_LEDs_trafo = detected_LEDs_trafo[detected_LEDs_trafo[:, 0].argsort(kind='quicksort')] + return detected_LEDs_trafo + +def construct_lane(detected_LEDs, img_bgr): + """construct the lane""" + # This function is partially commented in german, because higher math is used + # clearer what is trying to be achieved + + # get points + # xy_pnts = detected_LEDs[:,0:2] + # x_pnts = detected_LEDs[:,0] + # y_pnts = detected_LEDs[:,1] + + # approach 2: + # fit line through centers of LEDs in KS_0 + # DIST_L": the simplest and the fastest least-squares method: the simple euclidean distance + param = 0 # not used for DIST_L2 + reps = 0.001 # Sufficient accuracy for the radius (distance between the coordinate origin and the line). + aeps = 0.001 # Sufficient accuracy for the angle. + [dx, dy, x_2, y_2] = cv.fitLine(detected_LEDs[:,0:2], cv.DIST_L2, param, reps, aeps) + # x2, y2: same as: mean_of_leds = np.mean([x_pnts, y_pnts], 1) + + alpha_rad = np.arctan2(dy, dx) # calculate angle of line + alpha = np.arctan2(dy, dx)*180/np.pi # calculate angle of line + # print(f"Lane: dx: {dx}, dy:{dy}, x2:{x_2}, y2:{y_2}, alpha:{alpha}°") + if print_additional_info: + print(f"Lane: alpha:{alpha[0]}°") + + # get smallest distance to point an line + # Berechnung nach: Repetitorium Höhere Mathematik, Wirth + # Gerade: x = a+ t*b + # Punkt : OP = p + # d = abs(b x (p-a))/(abs(b)) + # info: np.array()[:,0] --> gets only array with 1 dimensions with desired values + p = np.array([x_0, y_0]) + a = np.array([x_2, y_2])[:,0] + b = np.array([np.cos(alpha_rad), np.sin(alpha_rad)])[:,0] # Richtungsvektor + c = p-a + + # Betrag von Vektor: np.linalg.norm(vec) + cross= np.cross(b, c) + d = np.linalg.norm(cross)/np.linalg.norm(b) # distance [px] + #print(f"d: {round(d,2)}") + + # Fußpunkt (X_LED, Y_LED) + t_0_dot = np.dot(c, b) + t_0_norm = (np.linalg.norm(b)**2) + t_0 = t_0_dot/t_0_norm + [x_LED, y_LED] = (a+t_0*b) + if print_additional_info: + print(f"x_LED: {x_LED}, y_LED: {y_LED}") + + # Abstand (dx, dy) Fußpunkt zu KS_0 + dx_LED = x_LED - x_0 + dx_LED_mm = dx_LED*(1/pixels_per_mm) + dy_LED = y_LED - y_0 + dy_LED_mm = dy_LED*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED:{dx_LED} [px] , dy_LED:{dy_LED} [px]") + print(f"dx_LED:{dx_LED_mm} [mm] , dy_LED:{dy_LED_mm} [mm]") + + # Abstand (dx, dy) Fußpunkt von Bildmitte zu KS_Scooty + # Diese Werte zurückgeben + dx_LED_scooty = x_LED - x_0 + x_offset_camera_px + dx_LED_scooty_mm = dx_LED_scooty*(1/pixels_per_mm) + dy_LED_scooty = y_LED - y_0 + y_offset_camera_px + dy_LED_scooty_mm = dy_LED_scooty*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED_scooty:{dx_LED_scooty} [px] , dy_LED_scooty:{dy_LED_scooty} [px]") + print(f"dx_LED_scooty:{dx_LED_scooty_mm} [mm] , dy_LED_scooty:{dy_LED_scooty_mm} [mm]") + + # Punkte Trafo, um sortierte position der LEDs entlang Spur zu erhalten + # Bei normal detected kann bei vertikaler LED zb Fehler entstehen und dann muster: 211323233 -> daher mit dieser sortierten weitermachen + detected_LEDs_KS_LED = points_trafo(detected_LEDs, alpha_rad, dx_LED, dy_LED) + if print_additional_info: + print(f"Detected LEDs in KS_LED:(x2, y2):\n {detected_LEDs_KS_LED}") + + #----------------------------------- + # draw useful lines and points + + # draw lane line + if draw_opencv: + pt_0 = (a+b*np.array([-300, -300])).astype(np.int32) + pt_1 = (a+b*np.array([300, 300])).astype(np.int32) + #print(f"pt_0: {pt_0}, pt_1: {pt_1}") + cv.line(img_bgr, pt_0, pt_1, (255,255,255),1) # draw lane + + # draw dx dy + cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # shortest distance from KS_0 to KS_LED --> Lot + # cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_0)), (0,0,255), 2) # only dx + # cv.line(img_bgr, (int(x_LED), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # only dy + + #draw additional points + cv.circle(img_bgr, (int(x_2), int(y_2)), 5,(255,128,255),-1) #pink. Center of points + #cv.putText(img_bgr, '(x2, y2)',(int(x_2)+5, int(y_2)-5), cv.FONT_HERSHEY_SIMPLEX, 2, (255,255,255), cv.LINE_AA) + cv.circle(img_bgr, (int(x_LED), int(y_LED)), 5,(170,255,0),-1) # lime green. Fußpunkt + + if show_opencv_window: + cv.imshow("Lane", img_bgr) + + return dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED + +def convert_rgb_to_grayscale_average(image_bgr): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + # convert dtype to prevent integer overflow while addition + image_bgr = image_bgr.astype(np.uint16, copy=False) + image_gray = (image_bgr[:,:,0]+image_bgr[:,:,1]+image_bgr[:,:,2])/3 # add values / do conversion + image_gray = image_gray.astype(np.uint8, copy=False) # convert back to uint8 + + return image_gray + +def create_detector(params_for_blob_detection): + detector = cv.SimpleBlobDetector_create(params_for_blob_detection) # Set up the detector with specified parameters. + return detector + +def define_parameters_for_blob_detection(): + """set parameters for simple blob detector""" + params = cv.SimpleBlobDetector_Params() + + # Threshold for Convert the source image to binary images by applying thresholding + # with several thresholds from minThreshold (inclusive) to maxThreshold (exclusive) + # with distance thresholdStep between neighboring thresholds. + # Since the Grayscale image is dark if only one color channel is active, + # the Threshold values have to be set like this. + # particularly the thresholdStep-Value has to be low + params.minThreshold=20 # reminder: this value is set for grayscale image + params.maxThreshold=255 + params.thresholdStep=1 + + params.filterByColor=False # do not filter blobs by color + + # Filter blobs by Area + params.filterByArea=False + + # Filter by Inertia + params.filterByInertia=False + + + # Filter by Convexity + params.filterByConvexity=False + + + # Filter by Circularity + params.filterByCircularity=False + + + # params.minDistBetweenBlobs = minDist_px # this has no effect + + return params + + +def detect_LED_positions_in_grayscale(image_gray, image_bgr, detector): + start_processing = time.perf_counter() + #keypoints = detector.detect(image_gray) # Detect blobs --> LEDs + + detected_LEDs = cv.HoughCircles(image_gray, cv.HOUGH_GRADIENT, dp=dp, minDist = minDist_px + , param1=param1, param2=param2, minRadius=minRadius_px, maxRadius=maxRadius_px) + end_processing = time.perf_counter() + time_processing = end_processing-start_processing + time_processing = time_processing*1000 + time_processing = round(time_processing, 2) + print(f'processing time Hough: {time_processing} ms') + + # check if at least one circle was found in the image + if detected_LEDs is not None: + detected_LEDs = np.uint16(np.round(detected_LEDs)) # convert the (x, y) coordinates and radius of the circles to integers + detected_LEDs = detected_LEDs[0,:] + detected_LEDs=np.hstack((detected_LEDs, np.full((detected_LEDs.shape[0],1), 9, dtype=np.uint16))) + # matrix with columns: x, y, r + number_of_detected_LEDs = detected_LEDs.shape[0] + print(f"detected {9} LEDs: {number_of_detected_LEDs}") + + # paramters for drawing + line_thickness = 1 + circle_color = (0,255,0) + vertex_offset = 2 + rectangle_color = (0,128,255) # R G B + for (x, y, r, cn) in detected_LEDs: + print(f"x:{x} px, y:{y} px, r:{r} px, r:{round(r*1/(pixels_per_mm),2)} mm, D: {round(2*r*1/(pixels_per_mm),2)} mm, color: {9}") + cv.circle(image_bgr, (x, y), r, circle_color, thickness=line_thickness) # draw detected circumference of the cirle + cv.rectangle(img=image_bgr, pt1=(x-vertex_offset, y-vertex_offset), pt2=(x+vertex_offset, y+vertex_offset), \ + color=rectangle_color, thickness=cv.FILLED) + cv.imshow(f"HC", image_bgr) + + return None + + else: + print(f"No LEDs were detected") + return None + +def detect_position_of_all_LEDs_grayscale(image_gray, image_bgr, detector): + position_of_LEDs = detect_LED_positions_in_grayscale(image_gray, image_bgr, detector) + + if position_of_LEDs is not None: + return position_of_LEDs + else: + return None + +def get_color_of_leds(matrix_of_LEDs, image_bgr): + # is image_r[y_pos, x_pos] = image_bgr[y_pos,x_pos, 2] ? --> yes. No need to split the color channels. + + offset = 0 # half of length from rectangle which is going to be used to determine the color around the middle point of the blob/led + # offset = 0 --> only the value from the middle point of the blob/led + # offset=1 --> 9 values, offset=2-->25 values + + for led in matrix_of_LEDs: + x_pos = led[0] # uint16 + y_pos = led[1] # uint16 + + # get values of color channels in region around middle point of blob/led: + # +1 at stop index, because it is not inclusive + region_around_blue_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 0] # uint8 + region_around_green_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 1] # uint8 + region_around_red_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 2] # uint8 + + # average of the values + # convert dtype to prevent integer overflow while addition + region_around_red_led = region_around_red_led.astype(np.uint16, copy=False) + region_around_green_led = region_around_green_led.astype(np.uint16, copy=False) + region_around_blue_led = region_around_blue_led.astype(np.uint16, copy=False) + # sum all elements in matrix and divide with number of elements + number_of_elements= region_around_blue_led.size + value_of_red_led = region_around_red_led.sum()/number_of_elements # float64, if not integer result + value_of_green_led = region_around_green_led.sum()/number_of_elements # float64, if not integer result + value_of_blue_led = region_around_blue_led.sum()/number_of_elements # float64, if not integer result + + # determine which leds are active: + # if value > threshold --> led is active + status_blue_led = False; status_green_led = False; status_red_led = False + if value_of_blue_led > threshold_color_detection: + status_blue_led = True + if value_of_green_led > threshold_color_detection: + status_green_led = True + if value_of_red_led > threshold_color_detection: + status_red_led = True + + # determine color by checking the cases: + # case 1: red + if status_blue_led==False and status_green_led==False and status_red_led==True: + color = color_number_red + # case 2: green + elif status_blue_led==False and status_green_led==True and status_red_led==False: + color = color_number_green + # case 3: blue + elif status_blue_led==True and status_green_led==False and status_red_led==False: + color = color_number_blue + # case 4: yellow = red + green + elif status_blue_led==False and status_green_led==True and status_red_led==True: + color = color_number_yellow + # case 5: magenta = red + blue + elif status_blue_led==True and status_green_led==False and status_red_led==True: + color = color_number_magenta + # case 6: cyan = green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==False: + color = color_number_cyan + # case 7: white = red + green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==True: + color = color_number_white + # case 8: led not active + # this case can not occur, because no inactive led can be detected from the implemented blob-algorithm in detect_LED_positions_in_grayscale + else: + color = color_number_off + + # fill matrix with color + led[2] = color # uint16 + + return matrix_of_LEDs + + +def detect_LEDs_with_grayscale(image_bgr, detector): + # convert rgb to grayscale image + # start_m1 = time.perf_counter() + image_gray = convert_rgb_to_grayscale_average(image_bgr) + # end_m1 = time.perf_counter() + # time_processing = end_m1-start_m1 + # time_processing = time_processing*1000 + # time_processing = round(time_processing, 2) + # print(f'processing time conversion: {time_processing} ms') + + # get position of leds + position_of_LEDs = detect_position_of_all_LEDs_grayscale(image_gray=image_gray, image_bgr=image_bgr, detector=detector) + + #position_of_LEDs = None + + if position_of_LEDs is not None: + # determine color of leds and add to matrix + detected_LEDs = get_color_of_leds(position_of_LEDs, image_bgr) + return detected_LEDs + else: + return None + + +def lane_detection(image_bgr, detector): + # Detect LEDs + print(f"Detect LEDs and color:") + detected_LEDs = detect_LEDs_with_grayscale(image_bgr, detector) + + if detected_LEDs is not None: + # Contruct lane + #print(f"_____________________________________") + # print("Contruct lane") + dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED = \ + construct_lane(detected_LEDs, image_bgr) + + # print result + if print_additional_info: + print(f"Detected LEDs relative to image-center(x0,y0):\n{detected_LEDs}") + return detected_LEDs + else: + return None + + + +# Picamera +def get_frames_from_camera(detector): + # Initialise Camera + print('Initialise Camera...') + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = SHUTTER_SPEED + # it was found that, you have to set the right shutter speed at the first initalisation of the current runtime of the program. + # The gains (analog, digital) will adjust to this set up. + # After the gains are fixed, they will never change! even if you change the shutter speed during the runtime. + # To get consistent brightness values, set the right shutter speed at initalisation once. + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(1) # wait before applying brightness and contrast + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + # camera.start_preview() # show camera preview through PiCamera interface + # camera.annotate_frame_num=True # Controls whether the current frame number is drawn as an annotation. + + print('Start caputure...') + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + start_processing = time.perf_counter() + + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + image_bgr = frame.array # raw NumPy array without JPEG encoding + + #cv.imshow("Current Frame", image) # display the image without text + output.truncate(0) # clear the stream for next frame + + # processing + lane_detection(image_bgr, detector) + + + # Only uncomment following code if you display the image. No errors if not commented, but higher fps if commented. + # if q is pressed, break from loop. + pressed_key = cv.waitKey(2) & 0xff + if pressed_key == ord('q'): + break + end_processing = time.perf_counter() + time_processing = round(end_processing-start_processing, 2) + time_processing = time_processing*1000 + print(f'processing time: {time_processing} ms') + + + +# ---------------------------------------------------------------------------- +# main +def main(): + + # initialise parameters for blob detectio once befor loop for performane + params_for_blob_detection = define_parameters_for_blob_detection() + detector = create_detector(params_for_blob_detection) + + # start capturing + get_frames_from_camera(detector) # start capture + + cv.destroyAllWindows() + print('Program finished') + + + +if __name__ == "__main__": + main() + + + + + + + + + diff --git a/90_ZielSW/02_With_Processing_Zielsoftware_Simpleblob.py b/90_ZielSW/02_With_Processing_Zielsoftware_Simpleblob.py new file mode 100644 index 0000000..59e03b3 --- /dev/null +++ b/90_ZielSW/02_With_Processing_Zielsoftware_Simpleblob.py @@ -0,0 +1,514 @@ +# Creation Date: 02.03.2022 +# Author: Kenan Gömek +# This code detects the lane with Simpleblob Detector. +# Quit program with 'q' if opencv windows is shows. Else use Ctrl+C. + + +import cv2 as cv +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os + +import numpy as np +import math as M + +# Define camera settings +SENSOR_MODE = 4 # corresponding sensor mode to resolution 1640x1232 +# OUTPUT_RESOLUTION = (416, 320) # (1640x1232)/4=(410,308) --> needs to be divisible by 16 --> 416x320 +OUTPUT_RESOLUTION = (192, 144) + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, floar or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + #the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +SHUTTER_SPEED = 50 # [µs] + +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# Parameters +pixels_per_mm = 32/24.25 #[px/mm] for 120 mm camera height for resolution: 192x144 +# pixels_per_mm = 71/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 107/24.25 #[px/mm] for 120 mm camera height for resolution: 640x480 + +# Offset Camera Sensor in Scooty according to Scooty-KS +x_offset_camera_mm = 100 # [mm] +y_offset_camera_mm = 50 # [mm] +x_offset_camera_px = x_offset_camera_mm*pixels_per_mm # [px] +y_offset_camera_px = y_offset_camera_mm*pixels_per_mm # [px] + +# image parameters +image_heigth = OUTPUT_RESOLUTION[1] # shape [0] +image_width = OUTPUT_RESOLUTION[0]# shape[1] +# calculate center of image +[x_0, y_0] = np.array([image_width/2, image_heigth/2], dtype=np.uint16) + + +threshold_color_detection = 60 # values under this will not be considered as active leds in each color channel + +# Parameters for Blob/LED Detection +minDiameter_mm = 5 # [mm] minimum diameter of detected blob/LED +maxDiameter_mm = 9 # [mm] maximum diameter of detected blob/LED + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. No string, because numpy array should stay uint16 +color_number_off = 0 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 +color_number_yellow = 4 +color_number_magenta = 5 +color_number_cyan = 6 +color_number_white = 7 + +show_opencv_window = True # show opencv window +draw_opencv = True # draw lane and so on + +print_additional_info = True + +def points_trafo(detected_LEDs, alpha_rad, dx, dy): + """Tranfsform points of LED to lane in KS-LED""" + detected_LEDs_trafo = detected_LEDs.copy() # copy, becuase else it is only a pointer + detected_LEDs_trafo = detected_LEDs_trafo.astype(np.int16) # avoid integer overflow + x_pnts = detected_LEDs_trafo[:,0] + y_pnts = detected_LEDs_trafo[:,1] + + # Translation + x1 = x_pnts-dx-x_0 + x_trafo=x1 + y1 = y_pnts-dy-y_0 + y_trafo = y1 + + # Rotation. Winkel Sensor im UZS, also negativ zu mathematischer definiton + x_trafo = np.cos(-alpha_rad)*x1-np.sin(-alpha_rad)*y1 + detected_LEDs_trafo[:,0] = x_trafo + y_trafo = np.sin(-alpha_rad)*x1+np.cos(-alpha_rad)*y1 + detected_LEDs_trafo[:,1] = y_trafo + + #sort points along lane: x_2, y_2 -axis (KS_LED) + detected_LEDs_trafo = detected_LEDs_trafo[detected_LEDs_trafo[:, 0].argsort(kind='quicksort')] + return detected_LEDs_trafo + +def construct_lane(detected_LEDs, img_bgr): + """construct the lane""" + # This function is partially commented in german, because higher math is used + # clearer what is trying to be achieved + + # get points + # xy_pnts = detected_LEDs[:,0:2] + # x_pnts = detected_LEDs[:,0] + # y_pnts = detected_LEDs[:,1] + + # approach 2: + # fit line through centers of LEDs in KS_0 + # DIST_L": the simplest and the fastest least-squares method: the simple euclidean distance + param = 0 # not used for DIST_L2 + reps = 0.001 # Sufficient accuracy for the radius (distance between the coordinate origin and the line). + aeps = 0.001 # Sufficient accuracy for the angle. + [dx, dy, x_2, y_2] = cv.fitLine(detected_LEDs[:,0:2], cv.DIST_L2, param, reps, aeps) + # x2, y2: same as: mean_of_leds = np.mean([x_pnts, y_pnts], 1) + + alpha_rad = np.arctan2(dy, dx) # calculate angle of line + alpha = np.arctan2(dy, dx)*180/np.pi # calculate angle of line + # print(f"Lane: dx: {dx}, dy:{dy}, x2:{x_2}, y2:{y_2}, alpha:{alpha}°") + if print_additional_info: + print(f"Lane: alpha:{alpha[0]}°") + + # get smallest distance to point an line + # Berechnung nach: Repetitorium Höhere Mathematik, Wirth + # Gerade: x = a+ t*b + # Punkt : OP = p + # d = abs(b x (p-a))/(abs(b)) + # info: np.array()[:,0] --> gets only array with 1 dimensions with desired values + p = np.array([x_0, y_0]) + a = np.array([x_2, y_2])[:,0] + b = np.array([np.cos(alpha_rad), np.sin(alpha_rad)])[:,0] # Richtungsvektor + c = p-a + + # Betrag von Vektor: np.linalg.norm(vec) + cross= np.cross(b, c) + d = np.linalg.norm(cross)/np.linalg.norm(b) # distance [px] + #print(f"d: {round(d,2)}") + + # Fußpunkt (X_LED, Y_LED) + t_0_dot = np.dot(c, b) + t_0_norm = (np.linalg.norm(b)**2) + t_0 = t_0_dot/t_0_norm + [x_LED, y_LED] = (a+t_0*b) + if print_additional_info: + print(f"x_LED: {x_LED}, y_LED: {y_LED}") + + # Abstand (dx, dy) Fußpunkt zu KS_0 + dx_LED = x_LED - x_0 + dx_LED_mm = dx_LED*(1/pixels_per_mm) + dy_LED = y_LED - y_0 + dy_LED_mm = dy_LED*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED:{dx_LED} [px] , dy_LED:{dy_LED} [px]") + print(f"dx_LED:{dx_LED_mm} [mm] , dy_LED:{dy_LED_mm} [mm]") + + # Abstand (dx, dy) Fußpunkt von Bildmitte zu KS_Scooty + # Diese Werte zurückgeben + dx_LED_scooty = x_LED - x_0 + x_offset_camera_px + dx_LED_scooty_mm = dx_LED_scooty*(1/pixels_per_mm) + dy_LED_scooty = y_LED - y_0 + y_offset_camera_px + dy_LED_scooty_mm = dy_LED_scooty*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED_scooty:{dx_LED_scooty} [px] , dy_LED_scooty:{dy_LED_scooty} [px]") + print(f"dx_LED_scooty:{dx_LED_scooty_mm} [mm] , dy_LED_scooty:{dy_LED_scooty_mm} [mm]") + + # Punkte Trafo, um sortierte position der LEDs entlang Spur zu erhalten + # Bei normal detected kann bei vertikaler LED zb Fehler entstehen und dann muster: 211323233 -> daher mit dieser sortierten weitermachen + detected_LEDs_KS_LED = points_trafo(detected_LEDs, alpha_rad, dx_LED, dy_LED) + if print_additional_info: + print(f"Detected LEDs in KS_LED:(x2, y2):\n {detected_LEDs_KS_LED}") + + #----------------------------------- + # draw useful lines and points + + # draw lane line + if draw_opencv: + pt_0 = (a+b*np.array([-300, -300])).astype(np.int32) + pt_1 = (a+b*np.array([300, 300])).astype(np.int32) + #print(f"pt_0: {pt_0}, pt_1: {pt_1}") + cv.line(img_bgr, pt_0, pt_1, (255,255,255),1) # draw lane + + # draw dx dy + cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # shortest distance from KS_0 to KS_LED --> Lot + # cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_0)), (0,0,255), 2) # only dx + # cv.line(img_bgr, (int(x_LED), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # only dy + + #draw additional points + cv.circle(img_bgr, (int(x_2), int(y_2)), 5,(255,128,255),-1) #pink. Center of points + #cv.putText(img_bgr, '(x2, y2)',(int(x_2)+5, int(y_2)-5), cv.FONT_HERSHEY_SIMPLEX, 2, (255,255,255), cv.LINE_AA) + cv.circle(img_bgr, (int(x_LED), int(y_LED)), 5,(170,255,0),-1) # lime green. Fußpunkt + + if show_opencv_window: + cv.imshow("Lane", img_bgr) + + return dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED + +def convert_rgb_to_grayscale_average(image_bgr): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + # convert dtype to prevent integer overflow while addition + image_bgr = image_bgr.astype(np.uint16, copy=False) + image_gray = (image_bgr[:,:,0]+image_bgr[:,:,1]+image_bgr[:,:,2])/3 # add values / do conversion + image_gray = image_gray.astype(np.uint8, copy=False) # convert back to uint8 + + return image_gray + +def create_detector(params_for_blob_detection): + detector = cv.SimpleBlobDetector_create(params_for_blob_detection) # Set up the detector with specified parameters. + return detector + +def define_parameters_for_blob_detection(): + """set parameters for simple blob detector""" + params = cv.SimpleBlobDetector_Params() + + # Threshold for Convert the source image to binary images by applying thresholding + # with several thresholds from minThreshold (inclusive) to maxThreshold (exclusive) + # with distance thresholdStep between neighboring thresholds. + # Since the Grayscale image is dark if only one color channel is active, + # the Threshold values have to be set like this. + # particularly the thresholdStep-Value has to be low + params.minThreshold=20 # reminder: this value is set for grayscale image + params.maxThreshold=255 + params.thresholdStep=1 + + params.filterByColor=False # do not filter blobs by color + + # Filter blobs by Area + params.filterByArea=False + + # Filter by Inertia + params.filterByInertia=False + + + # Filter by Convexity + params.filterByConvexity=False + + + # Filter by Circularity + params.filterByCircularity=False + + + # params.minDistBetweenBlobs = minDist_px # this has no effect + + return params + + +def detect_LED_positions_in_grayscale(image_gray, image_bgr, detector): + start_processing = time.perf_counter() + keypoints = detector.detect(image_gray) # Detect blobs --> LEDs + end_processing = time.perf_counter() + time_processing = end_processing-start_processing + time_processing = time_processing*1000 + time_processing = round(time_processing, 2) + print(f'processing time detector: {time_processing} ms') + + number_of_detected_leds = len(keypoints) + + if number_of_detected_leds != 0: + # print information of keypoints + print(f"detected LEDs: {number_of_detected_leds}") + + #Pre-allocate matrix for numpy + number_of_rows = number_of_detected_leds + number_of_columns = 3 + position_of_leds = np.zeros((number_of_rows, number_of_columns), dtype=np.uint16) + for i, k in enumerate(keypoints): + # x_pos = round(k.pt[0],0) # x position + # y_pos = round(k.pt[1],0) # y position + # print(f"x: {x_pos} y: {y_pos}") + # diameter_px = round(k.size,2) + # diameter_mm = round(diameter_px*1/pixels_per_mm,2) + # print(f"diameter [px]: {diameter_px} diameter [mm]: {diameter_mm}") # diameter + # area_px2 = round(np.pi/4*k.size**2,0) # area in px^2 + # area_mm2 = round(area_px2*(1/pixels_per_mm)**2,0) + # print(f"area [px^2]: {area_px2} area [mm^2]: {area_mm2}") + # print('') + + # calculate parameters to transfer to matrix + # x_pos = int(np.ceil(x_pos)) + # y_pos = int(np.ceil(y_pos)) + # Fill matrix + # position_of_leds[i,:] = [x_pos,y_pos, 0] + position_of_leds[i,0] = int(np.ceil(k.pt[0])) # x positon + position_of_leds[i,1] = int(np.ceil(k.pt[1])) # y position + + + if draw_opencv: + # draw the keypoints on the original image + # cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS ensures the size of the circle corresponds to the size of blob + blobs = cv.drawKeypoints(image=image_bgr, keypoints=keypoints, color=(255, 255, 255), \ + outImage=np.array([]), flags= cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + if show_opencv_window: + # cv.imshow("grayscale", image_gray) + cv.imshow("Detected", blobs) + return position_of_leds + + else: + print(f"No LEDs were detected") + return None + +def detect_position_of_all_LEDs_grayscale(image_gray, image_bgr, detector): + position_of_LEDs = detect_LED_positions_in_grayscale(image_gray, image_bgr, detector) + + if position_of_LEDs is not None: + return position_of_LEDs + else: + return None + +def get_color_of_leds(matrix_of_LEDs, image_bgr): + # is image_r[y_pos, x_pos] = image_bgr[y_pos,x_pos, 2] ? --> yes. No need to split the color channels. + + offset = 2 # half of length from rectangle which is going to be used to determine the color around the middle point of the blob/led + # offset = 0 --> only the value from the middle point of the blob/led + # offset=1 --> 9 values, offset=2-->25 values + + for led in matrix_of_LEDs: + x_pos = led[0] # uint16 + y_pos = led[1] # uint16 + + # get values of color channels in region around middle point of blob/led: + # +1 at stop index, because it is not inclusive + region_around_blue_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 0] # uint8 + region_around_green_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 1] # uint8 + region_around_red_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 2] # uint8 + + # average of the values + # convert dtype to prevent integer overflow while addition + region_around_red_led = region_around_red_led.astype(np.uint16, copy=False) + region_around_green_led = region_around_green_led.astype(np.uint16, copy=False) + region_around_blue_led = region_around_blue_led.astype(np.uint16, copy=False) + # sum all elements in matrix and divide with number of elements + number_of_elements= region_around_blue_led.size + value_of_red_led = region_around_red_led.sum()/number_of_elements # float64, if not integer result + value_of_green_led = region_around_green_led.sum()/number_of_elements # float64, if not integer result + value_of_blue_led = region_around_blue_led.sum()/number_of_elements # float64, if not integer result + + # determine which leds are active: + # if value > threshold --> led is active + status_blue_led = False; status_green_led = False; status_red_led = False + if value_of_blue_led > threshold_color_detection: + status_blue_led = True + if value_of_green_led > threshold_color_detection: + status_green_led = True + if value_of_red_led > threshold_color_detection: + status_red_led = True + + # determine color by checking the cases: + # case 1: red + if status_blue_led==False and status_green_led==False and status_red_led==True: + color = color_number_red + # case 2: green + elif status_blue_led==False and status_green_led==True and status_red_led==False: + color = color_number_green + # case 3: blue + elif status_blue_led==True and status_green_led==False and status_red_led==False: + color = color_number_blue + # case 4: yellow = red + green + elif status_blue_led==False and status_green_led==True and status_red_led==True: + color = color_number_yellow + # case 5: magenta = red + blue + elif status_blue_led==True and status_green_led==False and status_red_led==True: + color = color_number_magenta + # case 6: cyan = green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==False: + color = color_number_cyan + # case 7: white = red + green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==True: + color = color_number_white + # case 8: led not active + # this case can not occur, because no inactive led can be detected from the implemented blob-algorithm in detect_LED_positions_in_grayscale + else: + color = color_number_off + + # fill matrix with color + led[2] = color # uint16 + + return matrix_of_LEDs + + +def detect_LEDs(image_bgr, detector): + # convert rgb to grayscale image + # start_m1 = time.perf_counter() + image_gray = convert_rgb_to_grayscale_average(image_bgr) + # end_m1 = time.perf_counter() + # time_processing = end_m1-start_m1 + # time_processing = time_processing*1000 + # time_processing = round(time_processing, 2) + # print(f'processing time conversion: {time_processing} ms') + + # get position of leds + position_of_LEDs = detect_position_of_all_LEDs_grayscale(image_gray=image_gray, image_bgr=image_bgr, detector=detector) + + #position_of_LEDs = None + + if position_of_LEDs is not None: + # determine color of leds and add to matrix + detected_LEDs = get_color_of_leds(position_of_LEDs, image_bgr) + return detected_LEDs + else: + return None + + +def lane_detection(image_bgr, detector): + # Detect LEDs + print(f"Detect LEDs and color:") + detected_LEDs = detect_LEDs(image_bgr, detector) + + if detected_LEDs is not None: + # Contruct lane + #print(f"_____________________________________") + # print("Contruct lane") + dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED = \ + construct_lane(detected_LEDs, image_bgr) + + # print result + if print_additional_info: + print(f"Detected LEDs relative to image-center(x0,y0):\n{detected_LEDs}") + return detected_LEDs + else: + return None + + + +# PiCamera +def get_frames_from_camera(detector): + # Initialise Camera + print('Initialise Camera...') + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = SHUTTER_SPEED + # it was found that, you have to set the right shutter speed at the first initalisation of the current runtime of the program. + # The gains (analog, digital) will adjust to this set up. + # After the gains are fixed, they will never change! even if you change the shutter speed during the runtime. + # To get consistent brightness values, set the right shutter speed at initalisation once. + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(1) # wait before applying brightness and contrast + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + # camera.start_preview() # show camera preview through PiCamera interface + # camera.annotate_frame_num=True # Controls whether the current frame number is drawn as an annotation. + + print('Start caputure...') + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + start_processing = time.perf_counter() + + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + image_bgr = frame.array # raw NumPy array without JPEG encoding + + #cv.imshow("Current Frame", image) # display the image without text + output.truncate(0) # clear the stream for next frame + + # processing + lane_detection(image_bgr, detector) + + + # Only uncomment following code if you display the image. No errors if not commented, but higher fps if commented. + # if q is pressed, break from loop. + pressed_key = cv.waitKey(2) & 0xff + if pressed_key == ord('q'): + break + end_processing = time.perf_counter() + time_processing = end_processing-start_processing + time_processing = time_processing*1000 + time_processing = round(time_processing, 2) + print(f'processing time: {time_processing} ms') + + + +# ---------------------------------------------------------------------------- +# main +def main(): + + # initialise parameters for blob detection once befor loop for performane + params_for_blob_detection = define_parameters_for_blob_detection() + detector = create_detector(params_for_blob_detection) + + # start capturing + get_frames_from_camera(detector) # start capture + + cv.destroyAllWindows() + print('Program finished') + + + +if __name__ == "__main__": + main() + + + + + + + + + diff --git a/90_ZielSW/02_With_Processing_Zielsoftware_findContours.py b/90_ZielSW/02_With_Processing_Zielsoftware_findContours.py new file mode 100644 index 0000000..f270af8 --- /dev/null +++ b/90_ZielSW/02_With_Processing_Zielsoftware_findContours.py @@ -0,0 +1,521 @@ +# Creation Date: 02.03.2022 +# Author: Kenan Gömek +# This code detects the lane with findContours. +# Quit program with 'q' if opencv windows is shows. Else use Ctrl+C. + + +import cv2 as cv +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os + +import numpy as np +import math as M + +# Define camera settings +SENSOR_MODE = 4 # corresponding sensor mode to resolution 1640x1232 +# OUTPUT_RESOLUTION = (416, 320) # (1640x1232)/4=(410,308) --> needs to be divisible by 16 --> 416x320 +OUTPUT_RESOLUTION = (192, 144) + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, floar or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + #the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +SHUTTER_SPEED = 50 # [µs] + +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# Parameters +pixels_per_mm = 32/24.25 #[px/mm] for 120 mm camera height for resolution: 192x144 +# pixels_per_mm = 71/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 107/24.25 #[px/mm] for 120 mm camera height for resolution: 640x480 + +# Offset Camera Sensor in Scooty according to Scooty-KS +x_offset_camera_mm = 100 # [mm] +y_offset_camera_mm = 50 # [mm] +x_offset_camera_px = x_offset_camera_mm*pixels_per_mm # [px] +y_offset_camera_px = y_offset_camera_mm*pixels_per_mm # [px] + +# image parameters +image_height = OUTPUT_RESOLUTION[1] # shape [0] +image_width = OUTPUT_RESOLUTION[0]# shape[1] +# calculate center of image +[x_0, y_0] = np.array([image_width/2, image_height/2], dtype=np.uint16) + + +threshold_color_detection = 60 + # values under this will not be considered as active leds in each color channel + # see get_color_of_leds() + +# Parameters for Blob/LED Detection +minDiameter_mm = 3.75 # [mm] minimum diameter of detected blob/LED. Must be minimum >0 ! +# maxDiameter_mm = 9 # [mm] maximum diameter of detected blob/LED + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. No string, because numpy array should stay uint16 +color_number_off = 0 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 +color_number_yellow = 4 +color_number_magenta = 5 +color_number_cyan = 6 +color_number_white = 7 + +# Parameters for grayscale to binary conversion +binary_threshold = 15 # determined by testing and application + # the higher threshold is, the smaller the diameter of the led, because more high values are extracted +binary_maxval = 255 # values ofer threshold will be set to maxval + +# Parameters for line fitting for lane construction +param = 0 # not used for DIST_L2 +reps = 0.001 # Sufficient accuracy for the radius (distance between the coordinate origin and the line). +aeps = 0.001 # Sufficient accuracy for the angle. + +# Parameters for OpenCV +show_opencv_window = True # show opencv window +draw_opencv = True # draw lane and so on + +print_additional_info = True + +# calculations before start +# Filter blobs by Area --> not implemented anymore, because no need, because detection is good and less time for calculation needed +# more than good trade off! +minDiameter_px = minDiameter_mm*pixels_per_mm # [px] minimum diameter of detected blob/LED +# maxDiameter_px = maxDiameter_mm*pixels_per_mm # [px] maximum diameter of detected blob/LED +minArea_px2 = np.pi/4*minDiameter_px**2 # min Area of a blob in px^2 +# maxArea_px2 = np.pi/4*maxDiameter_px**2 +# minArea = minArea_px2 # min Area of a blob in px^2 +# params.maxArea = maxArea_px2 # max Area of a blob in px^2. +# reasons for not filtering maxArea: motion blur + rolling shutter --> larger Area + + +def points_trafo(detected_LEDs, alpha_rad, dx, dy): + """Tranfsform points of LED to lane in KS-LED""" + detected_LEDs_trafo = detected_LEDs.copy() # copy, becuase else it is only a pointer + detected_LEDs_trafo = detected_LEDs_trafo.astype(np.int16) # avoid integer overflow + x_pnts = detected_LEDs_trafo[:,0] + y_pnts = detected_LEDs_trafo[:,1] + + # Translation + x1 = x_pnts-dx-x_0 + x_trafo = x1 + y1 = y_pnts-dy-y_0 + y_trafo = y1 + + # Rotation. Winkel Sensor im UZS, also negativ zu mathematischer definiton + x_trafo = np.cos(-alpha_rad)*x1-np.sin(-alpha_rad)*y1 + detected_LEDs_trafo[:,0] = x_trafo + y_trafo = np.sin(-alpha_rad)*x1+np.cos(-alpha_rad)*y1 + detected_LEDs_trafo[:,1] = y_trafo + + #sort points along lane: x_2, y_2 -axis (KS_LED) + detected_LEDs_trafo = detected_LEDs_trafo[detected_LEDs_trafo[:, 0].argsort(kind='quicksort')] + return detected_LEDs_trafo + +def construct_lane(detected_LEDs, img_bgr): + """construct the lane""" + # This function is partially commented in german, because higher math is used + # clearer what is trying to be achieved + + # get points + # xy_pnts = detected_LEDs[:,0:2] + # x_pnts = detected_LEDs[:,0] + # y_pnts = detected_LEDs[:,1] + + # approach 2: + # fit line through centers of LEDs in KS_0 + # DIST_L": the simplest and the fastest least-squares method: the simple euclidean distance + [dx, dy, x_2, y_2] = cv.fitLine(detected_LEDs[:,0:2], cv.DIST_L2, param, reps, aeps) + # x2, y2: same as: mean_of_leds = np.mean([x_pnts, y_pnts], 1) + + alpha_rad = np.arctan2(dy, dx) # calculate angle of line + alpha = np.arctan2(dy, dx)*180/np.pi # calculate angle of line + # print(f"Lane: dx: {dx}, dy:{dy}, x2:{x_2}, y2:{y_2}, alpha:{alpha}°") + if print_additional_info: + alpha_print = alpha[0] + alpha_print = float("{0:.2f}".format(alpha_print)) + print(f"Alpha: {alpha_print}°") + + # get smallest distance to point an line + # Berechnung nach: Repetitorium Höhere Mathematik, Wirth + # Gerade: x = a+ t*b + # Punkt : OP = p + # d = abs(b x (p-a))/(abs(b)) + # info: np.array()[:,0] --> gets only array with 1 dimensions with desired values + p = np.array([x_0, y_0]) + a = np.array([x_2, y_2])[:,0] + b = np.array([np.cos(alpha_rad), np.sin(alpha_rad)])[:,0] # Richtungsvektor + c = p-a + + # Betrag von Vektor: np.linalg.norm(vec) + cross= np.cross(b, c) + d = np.linalg.norm(cross)/np.linalg.norm(b) # distance [px] + #print(f"d: {round(d,2)}") + + # Fußpunkt (X_LED, Y_LED) + t_0_dot = np.dot(c, b) + t_0_norm = (np.linalg.norm(b)**2) + t_0 = t_0_dot/t_0_norm + [x_LED, y_LED] = (a+t_0*b) + if print_additional_info: + # convert float32, round and prepare for printing string + x_LED_print = float("{0:.2f}".format(x_LED)) + y_LED_print = float("{0:.2f}".format(y_LED)) + print(f"x_LED: {x_LED_print} [px], y_LED: {y_LED_print} [px]") + + # Abstand (dx, dy) Fußpunkt zu KS_0 + dx_LED = x_LED - x_0 + dx_LED_mm = dx_LED*(1/pixels_per_mm) + dy_LED = y_LED - y_0 + dy_LED_mm = dy_LED*(1/pixels_per_mm) + if print_additional_info: + # convert float32, round and prepare for printing string + dx_LED_print = float("{0:.2f}".format(dx_LED)) + dy_LED_print = float("{0:.2f}".format(dy_LED)) + dx_LED_mm_print = float("{0:.2f}".format(dx_LED_mm)) + dy_LED_mm_print = float("{0:.2f}".format(dy_LED_mm)) + print(f"dx_LED: {dx_LED_print} [px] , dy_LED: {dy_LED_print} [px]") + print(f"dx_LED: {dx_LED_mm_print} [mm] , dy_LED: {dy_LED_mm_print} [mm]") + + # Abstand (dx, dy) Fußpunkt von Bildmitte zu KS_Scooty + # Diese Werte zurückgeben + dx_LED_scooty = x_LED - x_0 + x_offset_camera_px + dx_LED_scooty_mm = dx_LED_scooty*(1/pixels_per_mm) + dy_LED_scooty = y_LED - y_0 + y_offset_camera_px + dy_LED_scooty_mm = dy_LED_scooty*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED_scooty: {round(dx_LED_scooty,2)} [px] , dy_LED_scooty: {round(dy_LED_scooty,2)} [px]") + print(f"dx_LED_scooty: {round(dx_LED_scooty_mm,2)} [mm] , dy_LED_scooty: {round(dy_LED_scooty_mm,2)} [mm]") + + # Punkte Trafo, um sortierte position der LEDs entlang Spur zu erhalten + # Bei normal detected kann bei vertikaler LED zb Fehler entstehen und dann muster: 211323233 -> daher mit dieser sortierten weitermachen + detected_LEDs_KS_LED = points_trafo(detected_LEDs, alpha_rad, dx_LED, dy_LED) + if print_additional_info: + print(f"Detected LEDs in KS_LED:(x2, y2):\n {detected_LEDs_KS_LED}") + + #----------------------------------- + # draw useful lines and points + + # draw lane line + if draw_opencv: + pt_0 = (a+b*np.array([-300, -300])).astype(np.int32) + pt_1 = (a+b*np.array([300, 300])).astype(np.int32) + #print(f"pt_0: {pt_0}, pt_1: {pt_1}") + cv.line(img_bgr, pt_0, pt_1, (255,255,255),1) # draw lane + + # draw dx dy + cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # shortest distance from KS_0 to KS_LED --> Lot + # cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_0)), (0,0,255), 2) # only dx + # cv.line(img_bgr, (int(x_LED), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # only dy + + #draw additional points + cv.circle(img_bgr, (int(x_2), int(y_2)), 5,(255,128,255),-1) #pink. Center of points + #cv.putText(img_bgr, '(x2, y2)',(int(x_2)+5, int(y_2)-5), cv.FONT_HERSHEY_SIMPLEX, 2, (255,255,255), cv.LINE_AA) + cv.circle(img_bgr, (int(x_LED), int(y_LED)), 5,(170,255,0),-1) # lime green. Fußpunkt + + if show_opencv_window: + cv.imshow("Lane", img_bgr) + + return dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED + +def convert_rgb_to_grayscale_average(image_bgr): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + # convert dtype to prevent integer overflow while addition + image_bgr = image_bgr.astype(np.uint16, copy=False) + image_gray = (image_bgr[:,:,0]+image_bgr[:,:,1]+image_bgr[:,:,2])/3 # add values / do conversion + image_gray = image_gray.astype(np.uint8, copy=False) # convert back to uint8 + + return image_gray + + +def get_color_of_leds(matrix_of_LEDs, image_bgr): + """Determine color of LEDs at positions and add to matrix""" + # is image_r[y_pos, x_pos] = image_bgr[y_pos,x_pos, 2] ? --> yes. No need to split the color channels. + + offset = 1 # half of length from rectangle which is going to be used to determine the color around the middle point of the blob/led + # offset = 0 --> only the value from the middle point of the blob/led + # offset=1 --> 9 values, offset=2-->25 values + + for led in matrix_of_LEDs: + x_pos = led[0] # uint16 + y_pos = led[1] # uint16 + + # get values of color channels in region around middle point of blob/led: + # +1 at stop index, because it is not inclusive + region_around_blue_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 0] # uint8 + region_around_green_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 1] # uint8 + region_around_red_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 2] # uint8 + + # average of the values + # convert dtype to prevent integer overflow while addition + region_around_red_led = region_around_red_led.astype(np.uint16, copy=False) + region_around_green_led = region_around_green_led.astype(np.uint16, copy=False) + region_around_blue_led = region_around_blue_led.astype(np.uint16, copy=False) + # sum all elements in matrix and divide with number of elements + number_of_elements= region_around_blue_led.size + value_of_red_led = region_around_red_led.sum()/number_of_elements # float64, if not integer result + value_of_green_led = region_around_green_led.sum()/number_of_elements # float64, if not integer result + value_of_blue_led = region_around_blue_led.sum()/number_of_elements # float64, if not integer result + + # determine which leds are active: + # if value > threshold --> led is active + status_blue_led = False; status_green_led = False; status_red_led = False + if value_of_blue_led > threshold_color_detection: + status_blue_led = True + if value_of_green_led > threshold_color_detection: + status_green_led = True + if value_of_red_led > threshold_color_detection: + status_red_led = True + + # determine color by checking the cases: + # case 1: red + if status_blue_led==False and status_green_led==False and status_red_led==True: + color = color_number_red + # case 2: green + elif status_blue_led==False and status_green_led==True and status_red_led==False: + color = color_number_green + # case 3: blue + elif status_blue_led==True and status_green_led==False and status_red_led==False: + color = color_number_blue + # case 4: yellow = red + green + elif status_blue_led==False and status_green_led==True and status_red_led==True: + color = color_number_yellow + # case 5: magenta = red + blue + elif status_blue_led==True and status_green_led==False and status_red_led==True: + color = color_number_magenta + # case 6: cyan = green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==False: + color = color_number_cyan + # case 7: white = red + green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==True: + color = color_number_white + # case 8: led not active + # this case can not occur, because no inactive led can be detected from the implemented blob-algorithm in detect_LED_positions_in_grayscale + else: + color = color_number_off + + # fill matrix with color + led[2] = color # uint16 + + return matrix_of_LEDs + +def get_position_of_LEDs_contours(image_gray, image_bgr): + # create binary image + ret, image_binary = cv.threshold(image_gray, binary_threshold, binary_maxval, cv.THRESH_BINARY) + + # find contours + contour_retrieval_algorithm = cv.RETR_EXTERNAL # retrieves only the extreme outer contours + contours, hierarchy = cv.findContours(image_binary, contour_retrieval_algorithm, cv.CHAIN_APPROX_SIMPLE) + + # analyse contours + number_of_detected_contours = len(contours) + if number_of_detected_contours != 0: + # centroid of contours + # Pre-allocate matrix for numpy + number_of_rows = 0 + number_of_columns = 3 + position_of_leds = np.zeros((number_of_rows, number_of_columns), dtype=np.uint16) #empty: [] + + if draw_opencv: + image_bgr_contours = image_bgr.copy() # create copy of image + # copy is needed to draw on, because else the color of + # the circle is detected als LED-Color + + number_of_detected_LEDs = 0 + for i, cnt in enumerate(contours): + M = cv.moments(cnt) + area = cv.contourArea(cnt) + # diameter = 2*np.sqrt(area/np.pi) + # diameter_mm = diameter*(1/pixels_per_mm) + # print(f"area: {area} [px^2], diameter: {diameter} [px], diamter: {diameter_mm} [mm]") + + # Filter contours by area. minimum Area needs to be at least >0 ! + if area >minArea_px2: + number_of_detected_LEDs += 1 + # prevent zero division + if M['m00']==0: + cx = 0 + cy = 0 + else: + cx = int(M['m10']/M['m00']) + cy = int(M['m01']/M['m00']) + #print(cx, cy) + # add positions to matrix + x_pos = int(cx) # x positon + y_pos = int(cy) # y position + position_of_leds = np.vstack((position_of_leds, \ + np.array([x_pos, y_pos, color_number_off], dtype=np.uint16))) # vstack: row wise + + # draw centroids + if draw_opencv: + radius = 2 + color = (255,255,255) + thickness = -1 # filled + cv.circle(image_bgr_contours,(cx,cy), radius, color, thickness) + + if number_of_detected_LEDs != 0: + if print_additional_info: + print(f"detected LEDs: {number_of_detected_LEDs}") + + if draw_opencv: + # draw contours + contours_to_pass = -1 # pass all contours + color_of_contour = (255,255,255) + line_thickness = 1 + cv.drawContours(image_bgr_contours, contours, contours_to_pass, color_of_contour, line_thickness) + + if show_opencv_window: + cv.imshow("binary", image_binary) + cv.imshow("Contours", image_bgr_contours) + + return position_of_leds + else: + if print_additional_info: + print(f"No LEDs were detected") + return None + + else: + if print_additional_info: + print(f"No contours were detected") + return None + +def detect_position_of_LEDs(image_bgr): + + # convert rgb to grayscale + image_gray = convert_rgb_to_grayscale_average(image_bgr) + if show_opencv_window: + cv.imshow("grayscale", image_gray) + + # get position of leds + position_of_LEDs = get_position_of_LEDs_contours(image_gray, image_bgr) + # position_of_LEDs = None + + return position_of_LEDs + + + +def lane_detection(image_bgr): + # Detect LEDs + if print_additional_info: + print(f"Detect LEDs and color:") + position_of_LEDs = detect_position_of_LEDs(image_bgr) + # detected_LEDs = None # only that following code is not being executet for development + + # Get color of leds + if position_of_LEDs is not None: + detected_LEDs = get_color_of_leds(position_of_LEDs, image_bgr) + # print result + if print_additional_info: + print(f"Detected LEDs relative to image center (x0,y0):\n{detected_LEDs}") + else: + detected_LEDs = None + + + # Contruct lane + if detected_LEDs is not None: + if print_additional_info: + print("\nContruct lane with consideration of camera offset:") + dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED = \ + construct_lane(detected_LEDs, image_bgr) + return detected_LEDs + else: + return None + + + +# PiCamera +def get_frames_from_camera(): + # Initialise Camera + print('Initialise Camera...') + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = SHUTTER_SPEED + # it was found that, you have to set the right shutter speed at the first initalisation of the current runtime of the program. + # The gains (analog, digital) will adjust to this set up. + # After the gains are fixed, they will never change! even if you change the shutter speed during the runtime. + # To get consistent brightness values, set the right shutter speed at initalisation once. + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(1) # wait before applying brightness and contrast + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + # camera.start_preview() # show camera preview through PiCamera interface + # camera.annotate_frame_num=True # Controls whether the current frame number is drawn as an annotation. + + print('Start caputure...') + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + start_processing = time.perf_counter() + + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + image_bgr = frame.array # raw NumPy array without JPEG encoding + + #cv.imshow("Current Frame", image) # display the image without text + output.truncate(0) # clear the stream for next frame + + # processing + lane_detection(image_bgr) + + + # Only uncomment following code if you display the image. No errors if not commented, but higher fps if commented. + # if q is pressed, break from loop. + pressed_key = cv.waitKey(2) & 0xff + if pressed_key == ord('q'): + break + end_processing = time.perf_counter() + time_processing = end_processing-start_processing + time_processing = time_processing*1000 + time_processing = round(time_processing, 2) + print(f'processing time: {time_processing} ms') + + + +# ---------------------------------------------------------------------------- +# main +def main(): + + + # start capturing + get_frames_from_camera() # start capture + + cv.destroyAllWindows() + print('Program finished') + + + +if __name__ == "__main__": + main() + + + + + + + + + diff --git a/90_ZielSW/02_With_Processing_Zielsoftware_findContours_Erkennung.xlsx b/90_ZielSW/02_With_Processing_Zielsoftware_findContours_Erkennung.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..5818cf46cb38a84abd7606a2f579d93eeb902d27 GIT binary patch literal 12470 zcmeIY1zQ|j);8QoaCdii2*D+|y99T4cL@?85S-wy!QI{6-QC?K$lJ-xd}k(ezF+W6 zbzfE0wQH^JUbSrB`<9mi14jcu0H6Q>05L#--GAH$1ONbk2LPY~pg=W+Y;By3ZJczK z-R+DWwHe&3t%$P0L8&qUpuqe8xBVZsK#B68O&1eV>!%}r;WpK;S;0j`PY^k56ZOC2tGU&J#+ z{BuiG!>gmhK4fCSpPxfa*Fs7+EEzCZki3zQV|=KAdB5LzC3`4X9KtU|-b%Aq1p2se z%sben4k|_MjAg*#7NAIvBf0jerA2||y((FGiX*-XL)?8hHCm;7W6cwx#L>_cTBdfT z88(*?;YaflkRR48gmCYJpphz>(XNP0KSMB*t~L!du)uULxq0awi(xgTZI{(K>ppY) zPey%H$&{O|a&5HOiG;mQa=boa9mp&FN~(8PmwS!rH-12r&Kz~= z-Wmwy7=&cL3Rr78MZUpvECoG)zPwRo$GpJ&8HW%t!@&`mx&%mY)yIr3L0={C|XLtqK$A4bbW)b$$$InB{4E`-Ar&n=i<-71NXD5QAmPPt^(rC#L7OOB~}n?!*j@RSKBG^ zkd$!(z{Gr8eV&JwS9!vZ28i#rSWCiC(6~wKTuXxzp6s0=X($}xMeR#AdyrjbZ)fk4 z#3bFRTv{V(iW`1pNe!-(icVh&S0arwe8Pc7&BF`A;7#(==#x=jGk7QinH5wzEe)!u zXU{x}9Z&I{kIz4Z=MUkOKAB2FA8<4bMq8 zX-9qWqL)n_P$XeRd0-hA>nG2=_0_0hy&O(+?_r1TE*`iT^baHau?DPI{~aVx&0mud z!2p1t5C8xo&@ygTjIOp0mIk)AmcQy)iHd@43KNn~M$Ox2w*oy-RuP%vK>1Qdb`hES zR^d>*ZE)H?lR9%0m6vmFA@lj&Jj-Ad+@B-+=Vxuz{saDB zM+aVfTzBEM#c=6hM)ERA??2Xhx?es{Kgt%JN)DBR0R$sgrV_bsy=5dd^u0+eX*#On zi@G3!EVhp|6toUv6qW4x;rx5bnNV$lql)VF25ys)xxVKX{$Ron$;QLYESFL}CMYGb z^((GeVyhK1QUU7;wKqYa|8cBL*185&co7<~?1{$h`K}%OmFhFai7V*zq$)b8q(vhe zCE(OGH}YA`FLI(7=_Jzdf-QI8(@pv!9$&TMAh$aCxY{5r77X9FppP1*s)|r++>=|T zHd&tbZ6>L&2f4=^O|L<;Db#0Nc$Tr4BNGNY>0R}Ly|ldD`A#^!S%B$Y zivmRd0~Fe(#7Im>q}j1H)2hrcBCRehB>H{loF7Yy#5k4e6)?6xbXXMFO_)`4jqZo_ z8hTPqt7$2iGL)(xgxVh()LsQT6BhtaNHt@XXD{afe_xtY6|VO~e)q?R@J1F9=ft*| z5nF{N<;2Wp6==eFW{*hw66wOGV2I(}g_tr|h3t(uCOQ~$`LR|}{R(Vu;HJtkC|_}v z_$0c34-M+d*XSwqzV+QZ^X^bg9K_D)?Zwyf{CZxE;M2|ffXOr;kln(LX~cYYd&0NDb6u;L%2TJ4%dV#Xyz=t40a~J@-0eF)?}zsyO{}ke4?W))6+3WW;s9{ z8wtE`dwPTJ%HIpgbMk`jCTNoArpgRw@6w!al(KR(s+DT$&0ednHs+2!${2cA{|t>n zv!UMR(8K?Oc#$L2gFlJqoX^{AuPrb#{yTR3_~H_-fg?@80)qp90s+R(KWfasN6^3O z4G6Fu1V-Th?X5&nMydzc4#B<#Gr6X?pd&9jF_N6993w&v)=)1~5Wn}iS;wVm(pH<3 zVg$1daz7vKce($7umy&C+e!N)6b0M^)#{x4o$cuLC>WH%X(@rf*gI5|lcVF}Q&@y# zN7P2Xm;t(UG>*LoDst@LoO}}Do5raiR*NwvXBG>`3H%{oz8$XF`TP{9bW-YHQ(sS% zOl`9J)+n@m2a)0LPB?ju4K2X~?+%zem?5Tp=oGnwx@vhgk#Qfh?96$B+r=xEA}IR6 z>lNfg6g(hC=t~{rxSO5Sta85TB=~cPx_$YIZusWhWM{B{b{+Vn|J!8{W4vM!Cz+bSfLLRlxx6Jho8o)MWJPQV<;{ z(IOLjEDCA+#R*d45BE$?>XIPi8dhkmXHd-Yj=@jX5X*6_-#iJ!8;CpuY`?8nxK5X@yD2JSSvwzsl2zyIEngH^4K;AY+HAMO zqJ}5>mEJ(NWNy_`D#(D4v^%ucTd0X;JcD>Gk*G(*D+p;u7z`duB}EJoRQJK1R=T_!La1 z^~L8h6kYyG=9D2Uc7s^9PvV=PoTu*18R>w^Zp##4zFAT#toyy1UtXj}fPR7C0gkqT zR9p}Dx35ZG845httAmDKSz+PM8n=pGNTxz?74Li6*i#j+2m^MEK6~3DK@2jb3-d+b zm1s&bzUd;(6~4m$@|@r96PLIevBZQVs{3vi1cvRcm`<@d_uP+4vAnMM-Q?XVci%J< z!MJI^>*@aBL(c-|;N5&i!tkkEQyRj{e))R!`23i2^>P2Yz^~lX83Jt$iQGH5|oNstt!&B)Ba5ePhtJJ-^XU;=W||dw1?|Z&}q6F^KP9p^78H3{J;k0-|A{<|Z zh9$*F=aU#hd?d2{MT=^VTGGOnh|@f>;)N_6-+X|?l*ruLE1{(ng=6OI-Bb+lR`SB{ zjS?K=ZRG}y^YL)Pvoc$G+CyBC;fj3jB&UZ1Kh2OoU ztsR4sGBmlkOO)Cuy<_=>KKQVL`c8@LrHB)|Eyi_z+yHcVnW-~AWw9orrQIFI zP1M7{;a4scN~$77!N`J4-n_}x%J+!Fy$Gp_g!@)>?JlrqT_!vuXv{S~P_2nl7EB(_ z*~}2_QNU}nf0t<@l$TN~~ z-Sg=|Xx^4b_J?E8ds($5d&_yA`Od>Y840fQhI_GY$zkV)kB4Xq@mmqiT`R4XZ;z() zlj$M${`eJgj|4{zl}KT&7T@JEu`eyH{FL0-4AuR?S4^&tScUdC*t2Gv_P@-6N6$Db z#(iGquEtK%GbPh*`f0fj22)?SQKc_fje3ZKivh#UTPgvrdl_OfhMkR%`IJ<3jqF3G z6->ggA<%z2lcBlVt|Q|41sR>&>O*tZ6YU91z@fSJZPI9eimW&PMfX`>JE(kRI4&#X zCeg}+5`Z^1{0w+* z=bU^MJ-O2rg!bsEA_kB}kq9V?m0-99QONfqXB(v=?}v;SQnkF3#11woKZ9?DcuwHg z)h-Ol~u4KLfNOtangyq21@@Dw1D!pP)|zVzc*tT=(_$xfgN$`F8ky*u70qAD5Rf z%1GMs!%Aa63RdzLegaclQ|3@)W;%JeHu^G1Bo2Hx&o4jV=etdy2#qos*)}?_qC0QN z#^U|eJlfDHAubY`mkg6W0P2l&H3G$koovo|G>;lQq0$zgBIGT|+$yv_SD>T}Q}scN zHlyW%j@pNQAwYlq{)|CY>iDf9LNze`!*Y1H>)6w28IK5QrYMimGbCtRmN>mCvE5=+o5Afsx)9lwju68WeWTuP@U7NCEwT8j0g`o)s#y%cZ zWzRyUGy`DmQGOerb8~ECrVCMX3wi@J^!4HLpb}#eGhrn;qGZ z3*$mf_p9K}L52ERGuzp$tV`Klza{9@owc}GS`rx!2Fj>2snMo0*~~y$pvZDpHu-9~ zQtj9l1=>gJSadZO&Ween2EUB+7k%6>ROv%JyZwOUlc60g^L*ELu}A2D<6!4^aY;5( zW!%A(VhIULxJFAvS>ID61ZlgmWb75vp7aM*_L{WwXEG^;Dt&MzaYpBY2WruxVV6EC zX;YgWrMj+EAa}SgfYWoG!QDOCCDCQNG-w@>a0P=^e;v5OeASLKj<%l^;~4`h6Q=wW z5D1mZ8Apcy3Bgwho>c`WyJ7Z-2#S!tk!@h8Ulbw6h5~!s#Es;p#*&_8>XDwxs|$}lpxQ#eFj;q4m$y5}M?iDC?bze-1d_k96Az>9E zckb*kEcExG411hfUk`#JnwEP$Hq-XOfvh5Ix-n2L+^0J;W)vCkuujtLOAD#&l$c{q zBeD-#mJCVE=zW)}Ut+sZJYKRa4G^pyLrEBEvSlJRn;9B|vxvE@gOeI%(2%kC)Ic;r zf_xUTY!Xo+x}|OEQ_-we>)@|ePOGw69EtyQ55?vKzBL&fyH1X-m50*CW(0-d{kUk) zfO`?jgVBqFT$XAq%5{dyc<`ja8$zQ)i>3S=bgrTyP_J+ZA&|ocWuepl$rlDSEF@MV z%GV6L3mHutJwsFZ=Nx5%k@t{`byp0HQigI-Xv5QvV)6Ow&K43<8l}GXf=!L5e%|T< z0nC*( zKKJI8-~6KYuM~PfyNcaT-+s4`(Vqi8ugxmy`K{+dzbstnpJ z1E*e?2mDAav>H~Nb;Q&mr%+H)pzmTpTc=QVkv2)9oM`F~+FhvzOc zv#V~g#ltF%WtK{Hu8~mz*(SqzKjsTa7d6#>y^V6as$4}1%wX%lmsSQPIH7YI7al>P zm&)#A@L+z?;|j3~2K6_Du;in_x4C1ahgR968=UB-J;?c;GH>p~nCdjN8sImvic&Ut zo2qXnw|~}Ly0uce1rZd_Y2h7QqW0P0V(YCxHJKro)*yVhbcS6o-4^ryBYjF1jGraw zv8sw@Ml2zfs03}*{@s~V($?nl*LW+&?ATe;yB}7t(Oa8d+sWjqoP;%%>=1p(ZD!@_ zp}tzBW0)z$JXd?27H2Y)talif7Tdkkw-(idm;z`ez7ZIEz9qvh?LxYN(|MZ}%FcvV zgj=VX0agftV(s{-nY?>#Ats&1cn~^#(1NyHq>i?HDLpy_!ILIz+S4+Z`NvOy4c_0(#8o}M#bN<_Q)D1`gZ9s0 zG1GT2Hd1nOFt;)N9Wz$ZlxJSBJ(1{0=jW~j?OCEuWXMQ_H zO>HzGYTtBn74JrtA@c@qHdsfWG3-ui_&7!lvAIN+0Zmy1pVL&X=`?8yAmh3oLbQf< z1w%u+T>}f=57-*3Ovs~awF^^CXfU*sWk_Li@4ew;&nF|<0pOdxS7 zByZ0+zv?CFU4hJ8TQ)Ilxz(Qr1Z-Za7BW2yVrX*^P&6$aC3nQ<2N1_7}*)x#e^nu5B4hL<{3Np1x`qbouol!AI#us31T@R>K^UIvl`F1V|UkX ze3vcdCr=@+2|_g8OcUsr9YajBiVM#r`gVazj-wScXnLL~{b`1_WH4D-$*=uUYUr?) zUlhn*u)2ME7Aqs53#0h<7A3yAGYvk5;ZBqkSy*4`Le?M~cfRCeoJKqV9I4?2 z04V=q0Xv`*IT<@B89O=s@+QVVJ~J~InA@Ca$c5m}W53PGuI4gVT1whY%S%(|1i^GtFB9D{zxF8NDaIgzS61DWa$*JW|9O zrREE_aGES%-V{bSk6hem@VhQVK$jcKa~c-I8rmpP2gwX?mGu z*{HSQ$K1s=SonUAs#ouu1^ZYSzfZK7%+1*o2}&}*Br=EJ=Tf8gd>u$8wbjx-_q9=e&T&CFl0a=%)r^xa;AU#} zzp?sSMi$IM6%{d5V%hAo=p=svOPL#bT){8{C#{L=R<}@$$TNN`RJ3=UX=Y+3IDm0F z;gF`gfL|G zsm62m^z>sOT6@%Iah;H{fp)rtMLERwkq}N>rSXQd2>PxrB4KH2XX46;p=A8*Vb*$H zy#gEz8|1*1RFijmA8{i2sTUy#8bSG=K)2hpGEk-Eemiyzv<)MQO8%jkS7h5MjEy=~ zTq{biZ6&YnM5ok*&rKNF8?$fN@GTKU=+17s!jMR3a5CPL-d2;q)nq!ydK#g}Zy$Z8 z2YS6b2u#pQLM&cX#zI_uFWi%rI9{iwX1Rc6|UPm4Bz;EVY`OeNtL$w4KT z*>XO^jBZhl8y~S9a|+2FHzq4Ewk4D15aDg6xFCeyaqqQl6*4n#LbP!`32z_QZMOGjYlu z!FD+r#v)X0h}1h1#m>27m{&!lfF49AWT&{?DYI?}aAB+-=cM*5K1)pRH{@4f88W6} zKN5xEtaRCoNM#DeCbc>^d*FT`b$~94Ak*h(SqsEMp&&)Z48CknkEb79n zN}*|KMS3hvFKf!ftk{I388@YDsePnn-OG-VjmnF4H|4!)ST@7Fp#=%nw}@UB=AMqK zXRs$N4oDwCbbWznBG6VY87VG6j}+0^lk7KW4RyCiC{Poy(0OO+7(^Pdsa%e8d0VSW zf*#g)2<9@aTA7|SoMTnJ_9K4Ri(5=@maYWuX+DrJlAa3I@jI<37qe-Bu~JwC#A>{@ zW)w+j4U~w*$}rZGPjkIlZ6ohwpN^^P+G4KSqys%^N||||U0u9(a>rFo8=qGC)?~bK z{-VTWf`zs4^|0VgI7QNz1Dai&a4l;B-FoS9rz&38il&e<-idgv#5$R3%QLkm+(2s| zhfVVx5TSy6M+v9ON}Y*RWfX^g{&f$vQFdJmoRmYsr@ZXw5dHG#m5}`NcO2ius_Y)L zZayU0Thys({hgC_qsC@(OMse8nYd$eb|p>+u6Y+Z04M53vA zeJ)VsO;^8mSiDPLxaVyDH|Xa!|8c*|UdHfU+>UeFhxrb6KT_|EOgpJg%1l~!1L<`@ z+lMZz8HVY0p8R#a=beVuc9wSk@sIb`8$VicE`niRIWO`G`fiq`3H5Ex9rJySoqV9Y z5bCr%JXY3>`zZS#f;$q>pKA*;HdrC?qh`^3AbUNJX|%QlWX;5Woy zG3=zWeES}5)Lzveo^Y71GdfE_-t+<+kqJ4JA8eUkkJMgKH+bK@;XY8p@I|U=Lz=uW zPk40L(%iWE*?MbFLVDpQK9+C$KvcfScdmoC-K<1K5QZ{)2QIF`Z^(%~upDT9YiHi! zhg<>S3xnRkk=^t1l3o%!E#|m%DU6JXSO@HHcx}l98sc5qY3zeK8?CCO2DpAam&uLq z*p|N+Zwk5t9U#~n9!7PAR8+62pXue>qz?S#HP`d$(EUfTM^9fCR}UZGDc62I zw#V+tL<+|@`@zX{uav&aDyC%o5u#2i3yyWj^f?`97M`DYdpqn7Jjr%v+>Xztv79uo z)e?mGZ--a0+x!wH9?z5#BLr^}Q`-;hNanhG9vt>oas|cqX=CpDbiFSl*L>w9?>J*c zEs@K?v}aNV0zj;~ixUE@Hk>UvSjr{2%5}UCJAo_4sKzb6(Z$OOT3m|``4}+?j@A!0 ziPay-W;1d$lgqX%3dAhU2UL>!zf$sUpx4rWIy7ei-Db#zDA`>~e4xHNipPd(a>b_!MJH+g4TCMnx&LfDz405LQ%d@(0 zfQn@5lY#E+MT&X~w!rtbeZ3DRF4;;OR}%%h%T4o46w+!82n-eTv4^<04)C~i3k&DN z6nho(W;ZYORg-nAFbrlOS>qvuzmIww{w#7@V65A8piS&qL0|p2sabUrtp6?H#f}4E z_!6nRq{H*@vTa{sL@Lrk$E~GLNIM;*hME7m9r7_(a5an%!E)s-1wL}QsR9p@hmgL% z)1icswBlPwPUJ^Wii_)IoY&;WmbOIXjce4ls~ou-o^j{l6^>L?yvQAg61sv=WAJ!n zHBmimzh5VbC?GzjUH4ZSq8s`mncvuT5Z1Uan@1()egC!{o^J6=Py~(`04Em6z*S)* zTSIvVTRTTaLt6*qe{uu=w-^lUlD%S9CA*ohg2vySXYI7eOuaA(E&C1eO!#dGU(%7S z*Hux_OVlifyMwJInm5lPhzv+vx4A#Iep>1B}IiW@A zRxvGIzA;Dtk~z;ga4ZBYKy7^JCn(R{_Z@TXIsR}h$j8regTn`VF0~)kr4Pu+rlX`) zo%#;Yal_Y>*E7;=;hN!{;~o;Q`iZfFD=B5MvV7n}C_m15WVDB#;QrmLgsBhXEDh+0 zOh8Bo>E9hu-_GuTeh75He{QLM5wJ?>tEW-+Rz-aNsLbtmn~oqeY+rF;@f!URDEtq&29NZL>V54%SmJ zA34_(B`CA_vX-gjKtq$SljHt19-+0LRmBnvA%RDJeglQZo#qE*pidHQ1+)3=WVVnH zvB_ZD6&IsOonIiY2{Bg)1`o5%Iva)1$&#?bdLO)oB<}q^BC;rXqos0?-BTxP-TIA} z*~UgoB|Db68pl2zJ*Ra05xVdbxF=sP0hZ_iazP+GSMc@>^`@Oe`t6uEb~47Z-xtH1 z>8JI>Li%oeq&Dl@mLGH6A!?T*c=bMXKOl9?oLUDRAbrASHOIxWe8I-I5i|GatDkB1 zkJD!2czB&(fMm3P&?i7Z>45;v-*d13^$7oU{2L~=ywrax_)k{ozXX3B3xIL*ZyeD- z1^;Bq{UtgATqyj5H}|LTe^Q115(NODV15h#{}6}%)bl5Q(l=Ob6-B6 literal 0 HcmV?d00001 diff --git a/90_ZielSW/Final/PAP.pdf b/90_ZielSW/Final/PAP.pdf new file mode 100644 index 0000000000000000000000000000000000000000..ec05d8c8e14ba79f4541e82d42ab095d311c53f0 GIT binary patch literal 30829 zcmV)XK&`(eP((&8F)lX>C4qr~feH#SG9WM@Z(?c+JUj|7Ol59obZ9XkGc__aI0`RD zX>4?5av(28Y+-a|L}g=dWMv93Ol59obZ8(lFfcJS3O+sxb98cLVQmU{ob0^`cvMB! z0D9`K-M6>fOL|YbJDu)?bV48rgg_+C55ce>OVnjh_bktG7b#NOy zix7|*7IQ%SfQmo>m_ECtbj63iDF7hdnA=o4ernx}TbKVEpkxw&^=AuL&R-*%gv$UpH$H!S z;l}mBCk`IF89+V-Agx-wX35GC4>@B1uB8Bm#FF_fYrq2`tj|bWS+e5N#f#UEuL6kw z9bnQcOBb!&q}o5b2*BI~pw}&Jp1bmjU@zqm)m z{_*&ntY5Ku;ry2_co#d*J%`T^ten4TjWk>TBtG+FI=E{7%H}F=-fwVlr?Jd8YgV_c zPZq-M_`HffU$d@x&3zpoeF6~1@fb^h01rA)zzSkA2^OHVY2==QDtG~8;6Q~qTn8Y! z#7{xQG!N`b=F>WV;KKLi(Y@1PoC@DO`ORf2%NcMrvK;Ay#aG>wzw;tX)xUK<9eZ&9 zd-;Vl{_{QW^?dVW&k_}MGcl>}tAXdYk{#F!op_g6h86p?nEx{@=4?8VGe`nQFM*cL zY;w-jaX}nM5O#@w>77Z+q1|cdwO(_Fe2)Wdo4upio0^EUdE_;}yANL5qUYIKUO{}Ls$t3NQle#8dll~I@CCUi;GlqZsXO=D1mTGmA zb)RjU{T=%|j;|eGJMVEV_tbh<`tI;|`a84d1|ARI5ZadeI{oD>mof&(RsPq*;U0x6 zXrQ?8I8yNc!Aod?d1+j%h1LZlqd6cUKJ(kXapQLbtvcD;Xb6|$$DV&G#5KZAiMi~*@SW3es!~n+NtSmzw zUjb8L4%9+3rdu#)HLOBe7!HG>5{BS2nqCd-Q3lpvnUNXq1z{HEEP)M(&&OOUK~wSB zLc~{NU8`X+zBlN5rTX801m7sdHip75#OAQxIH&qjocSV znVwjP_(H_j;InmoZ3X*AFa*jFi}s}|posN@j&co_4Pv|Vu^n2+LewZ)*~yq%%BVdF z+grparj3izVi_CDiXZLIVn&Z3^4c;S(F&HU&C0+rYW1v6jWozw*?>EWd^E2GzUk z`#3E<4*GmO_F@^n*@9F6aX)x4c*S3WM~EW-Lh?flAJMpXS@I+LoW>lkiqM@3fb#zY zSq4wQL3oCo!ndA;eb5De02fNe-Kdd%hLR_tO#K0Fz!{o_k})2BMm)(bD1rM>LhggZ zSoVCl3igAOxRW2l)v$&C3%<1lIVcx-ZU*-FCURbK1L}=q!ZkSC=ix}!kk({f^5*2N z$p=xB@8kcF>;VHx^FsU`PJSl-HTica#y4(*JK-3)RsR&OZ0AEOmb?eYe;3~l@l?I`-pNr%A z4g4OCk}&x#w^}rcrD9yXEcptqr$b>b_V$E+9+E z736wy3%QdVCP&Ff+!$^ScQN-lzm#9g|5_M}zu7{IaE*AKc%$@DZ(Z+my?^fgGFh6u z4rU@hY{uT-hO0pr^2rf+1AoWhT_O?#G2<^tLS!zv9Di4lo5=lSH+h6~VJk<;yX0f? z1^GAmmO};1Nt}-hak=;lbL+TExSw%%b4T!Zl>3zXns@QJe2gE)SMl}yYV6hZ{0{s* z#lItXg(E@|>0Bz_F76a}i;s!Vh^Hi@{1YAMUi$8Vp2D8Dd!cu0@9n)EywqNVT?axJ-wJjD9`D{=s- z^g8(*`)uO;tj~kFVcb}5I{q%?nz^;y4(?X2i+hdxhL?E*Z{f4}LVhCO$T#!r`AhlR z`2_zG{}%r)e}?}Ke@Q_R0z$436=K3f;UZy!aIf&8@S)fsz9_ygDbh;mI;mUwr#xI9 zEzgi=%8l}Nd5`>xt_ivGS$GQeX0EB^IKP>%;h%z=xiZ1S{fYY%^7BQoh@Z+Kr*gZ= zR_;pD#pQ{cq*2@`G7U}%QKa?r+)nNcH;SK1rjXgNoExguCOL#hF{%=tg_FVo9NV9; z=1r23T*ZAZ8KHwPJBIv@A0ou~7vW9*7?Fkh;BSILT;wG8OMV7&!LNnUVjYC|yWv;- zT5=^k#nk{P-|Ds@A5SB=ro$XkO1|Qgz;n}(iz@i{;2OA?`zxG8Io=9CCyRt7a5I#V zE8s(T7^Py6xJoLNvdAB~WkMTgCtbh^k5GGslNE`s5W@<8F8#w!?0+rT22w=s74kZRC8sTf$hBALU>YH6CH;vb6M5<)%dQP2%6A>+-9@pu7&rX}}-7GfUu@^vnp> zVf~outKVueQ8H;4PKk1M^%?bx`v0PSF;zd35+t3Vr(^}YY93I(;q2;Zp2=*c1C)i^YspN(QqL*J$jLd0+{w z--z{#pa`k~Q!=*3M(K9g`k5Ig#(Z}5w6Chws#>sqqiO;xV3*W^C9qACY{vSHlEaKC zol!O$XwQ63M?W(o;ncIM2dvoUaM-L^&}6fK9a{j~z!Da`WeeTRNKOl;^k$vKL`RwJ zarW1*P#4MB7q)57%CggeTI?$79XrrFvxJJbPDMhPWsiy}#iCQ$ECqe;eqlk9+0U+? z_BG4p%EE!_)hrv@lUcwV75$ghinn5={4V=!m@=q(94U@G5}j0O*}r!i%@ri}kyds25Wfm&xm* zJu5AU_6rM6O!g1dj}?dV@vQ&%1CW=WpNCaie0e#Lk1fzUvqTQwmK-F6&s3a^DVxudjim6w$e~63!f3bS zf_nCaZRQsi<|833+4(^z#1`nCSt5wHN;l3qrlCPh*|XJPHm!eLWof^#IGx{nPrD)j z#Y2YBf!cG6Be)|j26|_fh~RCHAR%%sl@UxibL|mE#VOSz`h~?qw=8F0g#e|)hnM0m zF)LbH2*YvTP4CPSg?MKbB0om0)rFYyM6(Jbv}dy?j%EzVE}WPlKTtnbTv1h3QHBKz zD+Z&jT>Qs*txSWtoWRYOtIQ=wA-2(+7rLi~a)>s!{XU9)=C$`uzcU$%6~;^qa7b?4X4 zoilyfn0WQ*bE-y-tQ=7>th}sr=#Y}Z#YM5gK?V8INM1NM6b$5K`+Z)I+vUu1*loB% z;39{1fKHY~f#(Pm)r2QB1ryPxgb)o+np8~F;rW;`KO>_l5yY$s1Ir|WO{`dOV97X^ zT>QO~ajj&$uOv}}RZvx26s!pc6Nkr#gWY7_%sRwx8Xv9?CQdSZD#Ld$+=O^2gf9eZ z+)Kv?6Qn6vlbEn^XM(-4LA^;ieK%S9$MwN1dLw;>Uy%no8}+?M*f1lfX32I)xX zsI>7lhihm~)AC?KA08WC+P1t2r^efsfLWJ@I=tTazT|Q6)&$$;)P+NdYG1g1{&;`8 z1KMU?+Ubc0Jp-Q<7qzQajj-)zOA0ZXGLh!KXAEP-DK=$RA7u&cQTSYBh(vH<5PMP= z#xaecpm_weEgXTx@Tw>HR$>v($g+ffTvMAmlIGL*6JkUS2iyJ)$N}M#pAO8KpUROU z>c4@alu!Ct4WFm+L@bslETk+VkHa~|K8|MTVZ}uoySZ?9jT*!V$pCK9bP>$G$b7Wg_KUJ|QMa82~dq4blix%5eE`bppGO=0BZF6O<@O6a0} zw?%c@YnF~o5a)mKxLJEXWp;SV%z1Uenzp7CZKuo`kkX!y=zEsJ683R*ypKy^oR4Qu zktrMc3e!ZLF(E|omKakn>XvoLl`Mw@CnVISNgAwILZScgrS9Y@`WB1&UrTi}F)}tV zJ!(LDK=+JoJa$-!a#QBaYim;mJVRE~8ay`@A;-gzZ*a z?IXb$n$t86F%HJIhsoBN?Qybo_Pn}%XiNmR&Z+C*2sf^2Y<+tkKC9b@Hbk7|ax|A_ z(nOFZU@;g7^y~RTkFPX^=i5MSiTQKGC`$UDoGEp6E{+@m0oGoqnoA`dYg@ z-P$^AjUsL7<`N5kur=&`f?Pl$yIJ?!heKFvA6IFu(YA)RdB~5UL^f?W)elUX{q?K{ z?8zOhCxk6d3(+Q6Ol2uZl^m+c@Z@%GT8zbn#ch+rHH)wW4Y=qI!^sK-7uC~JVJb9~ z@qe@s$tXk@5>|PeIx3wasgx$ZZHXlVlS})O6KFsiB{Eo36F3$ovZ2IsUt&dltgpy? zI_5T%?U7W#N3xeE($Iv<&csCP!ui-mTyM@@7{-jrm=Ua7piz)6)NRyqS~wqHqf}0{ zlvouTP$lXFf+`9tqQgnF&ImTu2b)lbkeNtqUoatJ99)d%Q+Phr8#A==%s}Oj(fqdA z_%6_C^Ce`|XN%`IhpBQ+P=42_Px~lf?_hRa0(@<4VdNp~Vq^jq#_FR9DS9rA@Lm%O z&u^xd-C}CkHESjZc9&6y*5<1Thw8Bi7h$wS!l9ropkQGewHq6oVn_*Vo2@Na*@gn& zsM1dZ7S=YQUc>Fngdm&h`94e`5zeKldaOt*sE^Qs_&&R%iIuVTMmf@-!|v5Ft+0;O z%8ZD_jC2v1-H5D>B{8rl< zglVr~PJMa-Kz?tJkgYQ^HDN=-Hf7cYJ|t{$`?5?d^Ydz2nN(K`Md|;_23XJ zybC_Zki@`aKV8_W3#MbZ2*Y*^J26PC9L-sc;c5&AF`Qyg;=HTl)-vq7tK&u%buM2~ z%98W7WJ4oMcAj6a#ZzZ$@%VGKVk5O;L(8@7!DF>JzetO1kjqBoFbV!(=TTqi{*?bKEcn zxX-wg+7s?%r`cM%bIfG!U3e11K@2?iF8OY7!o7*u zo7|h&wZC*!RF>{zNUS7<1R^QKv($kfWkF)5uXz^SLk@%J|%l$WUZI zcSqH-KsWbZXD}AnHD(C+3M4Ra*ppYV4L4TfNdW0-+K#~tm!X1ci}9nrCYF;4DJ-0yL(AK(sie_-)T-197c zk^3Es|A=u8#xHQscjN?Mi~*kmzN2ESVqAjHMeetqdA2}ujFmfxvyQa}YzYGq^L##iO_1 ziAf5gH{XgF1<`A_Ax1&;vdxH55M8kmF$$uKmLo<%blydXQ4pOz2QdWQ+`W7A@&gsq zFDAh;7VZ+H(j`cxOOQ&JfWTcsfAFBI0 zSCQ7uq_v8)UPxMFq}5Mab4Y8Pv_4HnASqf&ylX(RGVUg=FOb$JNNWpejgr;~Y0V?8 zK~fR#=0Y9kma(XYMV(`)&|!SeXw*IyE`$^fAk#g1=h z(1Nf8Lp6qrFkFq{a|{ye&gU37Se@$PldNO3(`qBjYF?#N1ZaY=o+>TVdz2j?C-Q)%CBOvFIN{EsO#v`DGrH6rEN8@tvbB|$M+TpLo*DM{;qQHJ)rf=;DeCA>s3Zh>?JUd5cp_5@z>-w^m?Nw*Gj4n(^NM*D-T=sy35z!NX9qMPx-T^)g| zXtXDArGH}JVn2J(tUb7}1ygZLU{-WqU=r3h-oGFaZ^1hE1giZP2CB4D!{|GE0zGE*7R4$T3azM_OeX>Jm)2TYM&Ztv# zI-R5wbes-!j_%~~c#M9q=#bcVi;_U0z;KnLkbWOXRUgrDFc}hdehN2b_E<6{acCh- zSrAN|nH}yX3U2(w@K}N?D@>U)HZdYLrCUzUN>s$AB;*+v)U^|Gb3NuJxUIN#nN!zI zk~C|JkGfd)0U_2cH~DB>u;r%udT={8R=cZhqpg(_#{U3lO2JtF{y?sVin5`GTCP8NZMGlScJ*`}oo1i<((9pC3KGzJ5wKsb%FL zNdAfCkahma%IR>GrsW{0%hAf+rIm}|`&cfIMp#a-hX^Yd(d$_`fzWd8EqOKL+w<~R z8CMWmSeX`AFtf}H5iAplurkh8c!8C9!P!d7Bu29$em@q;@v|bt3w~C_PrR&1ZGVxH zRFNC{irm0j;z@rQzgEU{JYB|g9LvQ1)2n%GEJiv<)h}$IZmgzoO*4k3#El!5x~WAT zY+qQP@^3|(7A#y!W3)-@!_DIp3&Z1s?V}og;Bf}Wf^mu=#Ua4(|HC%O?UE;mN^(RN7785?<+|2s%V~GpDK9v z{BKCbfQ)oXLR&=dssmB4fkQix6*OBUWECyN{ z=$O-C{e~2lqQ&}Dti6e^w4_PWcTr-+`Y5)ZRm;d3Yp5Gz=7;koFa~YsAs829T#Ruk z#-)5o+!hV+Ttz^ys|XmB@c~&HA4u0)AEVBH9(Y18k>?4AmaAL*)NlyC(j@`)X|k7o z50LDkr}pd{HUmN<7!jJlgwV{O1jg%Md)WR8+-_Jz@PjYt!WB_kc}{ew({5XYi@*LgkRAUH-r($MHof+ zCFDaGVFBbJ90XB>g$%|ZKY0d5$D=RgI*(J%sGHB=_QfH;gqI0i-`91B$l z$07VDjE8d&*1%|l6QDYI3MN7v;UpM?@LU*+a595aU>w5pU_8R9Fd_LlOk;35Ohh;X zCLx>&=O#acSq#pG$q47bl;la63sVu+GI%~rLs$pXlb=F8%s@B~;YqjvW+L7Ivk^AJ z9E2A#coED^{sWqz7U6t2AK?OopTI(>N4N;)A#8>V5H5y>e-$i5xSGK=uqgQtG4O76#YD(&PuQ0hS@$2+I*(g7AIV z1Q#Q`6jma<3|1k$oWUz#HNq=l4Z^EnZSn+M&0s6EAlwY=5&i@=Bmm$0XE>FG#ZEyv`8{taK*@o~PxCyR8cr#p$a64>9xC7yFxCMTK z@K(46;ZNb(2!9S+@#!51kHMXA1H!wYE%`S50&YZjH*7SAgL~mY zgx&BE!hP^C!u{2{guh|%AUuZf5Im0Xx9|kQXW&-|pM@urufcPW zK=?aoPreGzGx&SxK==pfOuhntgf4_Hz*7icggppfg1rb2LpQ=d!9IjX7(5F55&jtt zAbc5qo%{>@1>wu^3j7A)t8ftEYj7y}XLy~#zrr&J-+*Tkz6sAE{2PORhu# zHvB$$6pk@?9R8R*0`I^J2;YSl5x&RZ33v(N`*0ZH2k<9^AHvb(pWq_~KZZXe`~+T3 z9)^FwUl4xE;7ND|;b-t#@+J5jUPpL}!GFSE5q<%0Ap94+iSRUo|AxOOUxYL87Q!## zZG>MTd;z|OV+g;2;|RZncMyKZU=O^P{3G_AL0M%^#AvD`al1N>-2y8$2$Gv zAJOR_pQY13`VpP};aNKUga1IMpXk%+>jvob_kKjDzsGd?duQqNcbQIqH$$ht%XIp? zOsBt_q0`?vOQ#=aI{i4)>Bs*E>hw4MKho*1{I5>`U!DFxPN)A*x6}XU>hvG8)Bjhe z|F2H}VV(Ym{~zdd0$23Ty`#&N5>(|jTD zDj(!|{&DL)+uSkrOk>ZPlj@n1P+e7BHFOAR^mT$oCg85F9x}9^L^*4D#qf$UNycB6 zO31O>{xo&ofz6lZpA(LeSntdO77ZAlXnj6yS;7R%is3A z`C0E5y*k?Ob#PJ`DeS=&v7Yri0IT%e*E)WcC`LA>8hTp(?f^&_JRnaBJ*4~U)H{Y2X{%VthBVOqy)Pbv4)0)tYy|v zR>;bc-nC@=qhx#U+LPqg-8A0ayNdp+1oHp{BT_I2*Ti43zLae9-st15@Lu8L7I>R| z+{MQEW^SHwj+q;79&hG+9-SyuDiq3r3v*T} z7rK_`sTT&V#JW68$AILdggkvxtx|CyJy4DCOjY$sTV=@!tE-X{7cXdR^u>)2?dVhZSF!1#6@r94Y{XjpkFC=eM>g)p{c{)DGV36ht!h~>bcyYKzf303x z=G`E!(YF|`5w9^w`A$9W&M(YyX6whOxF(^+L*lr0>Jdj9s1HXQh{WUWTD{(G%PA}z zGzk3JIUL~vIXPC)x$)iJzIWZY-lyMnqn%-7?<%$KsAQx(Ep;b9h({<>OE${Ul4LX@ z)X|?ZN9+8ndN zpdpIUghkRrXs@OA3&KN@LLFAh*+OZTOQRGxyp*@4f*r*1|C7K(!v8o4Q z)ijo#dJ@6ul!KV!D2+W;G$8LFD^*pNV5YUwYO5qRjgzoCUx(gSh?4Qt3Y(2U4q{y^ zKu59>=~zKH#!vWCNvsmN2?eFp>7?94p_vPlP-(^R;mBLja0s7ie1`b#Ty*z~EsK|I z*?xZOZ@2aSl$^79#N;UxesXW`-^j`fqvPg{oO9c@-Y3NU_4}GHe7G$CKt#EDj4!BEvjDN_qkMA=5-gFdBWdAGMYPMxtv$Od^slZz34+bWhY8~fi z)q0kS7iV8?yU}(Rf2a8_|8DXix7+%v*^XAVS9Pdff$L7b-BD19Z61mT3o2C$5W#29 zG4j3~L9a$FlOY-;gm?q4^rXAelkQ4Qx>D2YyxpE z5jl;j^knr(P2llS<64}m7+L3wEA(H^v8bw1@OAU0T`L5=(T+&R3L~%06;5_pTKHX( z5YElRIkDxT)Vt&;)d5_V!{#h29WHb|b58HG@1N{_{TEM?anJmn6pcDq_RLQod9PvR z2iHCDF2@c1{M+A>RWH9!=C&Vyv3S?5_xFB&%hSCdw;iB5@m>IN9?HE1=S5rTY{38- zr_-d*gebf;vFq zr7WkzX?OCHk9UQL&5XdU^M{C2v4#NWFBU7rYcsBo?V$d!sHLRLg)5~k%fX?HMM9;+ zH6hDK$-0+({n)&#>eshSyX=<3TYB3`IO>9w6r`qLYFe@3ihklOO@^y_yQpJY zOB4y{)1(y6(pnlhadp?i{X*V%_wuptUgfV9_dn5F{dlkG2}+yYNSiH48$JAm(I%JC zX1h%K$b+Q0JIDnM9OpIs-=s&}zzAYYsjuF%sG*E%_)%({fa(+>8V68fFQdfwNMQRu z1$OgqefK_>=$S!BKk|v5#n{t>NPx{acAm_jWAEhBgX7Z(C#OcqBQnOsB$y<>?vrLn z<5*NQYJ$ll(pU6bP1sq{6uzgH(v%aC-#SN}Q_kYDaxE?%qQwP+v^X5m;_MtPc6(VI zFEpv;L2-xpq{#C@)Qj73|CRuu1md_+KZeG>XbWQI4&YgF1DgPMYGOW3Pt0fOi8&Kj z*~A3d#N02uR-d61#x>M+v;rZG^=sEv^)#jrVykdEn2JtcXtV1W^{W}-%d7{V5%+&H z0qI;oRt`C*?H@&s|>dO#N?v^4r5?mwWdW<65rbzxdA6{lfCM zsqCT!D$b&Mjhn;hn#1Q-RO<92^Oa%JaAl%$K7SqmIxlZj-r(Or6-^fuwzL$8+k`gp zQQ;Gvs0d`3@S4Es={HDvTd155Qh>{Tr?JvTb2~AuOT_|>vsrwo)8?f4Z^y@Zu+2#1 z9G%|dIY%gTyT_t=DeD!zP7!%Q2#ShB6fva>%92BtB}D;|69|fy4s;6783+j7+{n0P zh)8yc3GtA4TolB~I+|w~B9oxpDko%K?&hxRG^7R7kXAniC2dz2g8yC9@{4{Yyc=gQ zrFGc4*VDN61^VnwKuOsT)CHkikG;ylaXr-h*6?Y*bsb*`rPBp9LdWtD3iZrCZap)H;l)kuC zpNlju@>B{mMATU+(J3(4Ds{PzN+Is3q*L-#1ktR@jBhdOY0YHqy2co+ zMG5v{m69azI2DBcI|c%$32t?@l^RjDN^4n4hDnIvEn9DYhWjg#d+y|Zl7yZ!r^Nj| zgSgjwe)ZiQ+y|fZ3K}m8g~*E{JgDup2`BJ5BItqwYG&@2Psv=LR^`)^$M>xr{C~Wb zo;k~gk$%XA`Jho-I_WY2e0rJGL@qV9P!^8rWzJIU2nJ)0 z#cT$bTeG2T0~RV<#=P5-Uuk>&&RSbk_cShB+Rooxt7E%soyzu0l)r3rMs(hwm*DMn zo6-VhN>96~PoPZyU1RKQ)6|nGSN%KgwT&!msBbg zS*a@rH)C8VWJSEsz~=_tI(5aZ^`G_rv3D!E{J_19=MBBK_Xcsl+19*g<U`PE&DCeuP9sr`WfF6?2O9 zIXO|SyYZNDuNM0`H;l{*Q?2S{V z-HzOU(h0@PFTs?eN`hntX(DUL4kDo$nTQ*UbAopJ*txV}kyGBe0q8ZxHXCzKYwkdAH4_tYYdNYqBPH+{>N7C9SL zq!#+RZ4Q-8&(MVzq&&ALnCY1Efu!9b%n=g9TSc4u1O=xlrK~PWIp)Wzr$*)4{zaZ- zCYYM@?Ik`^>~j{2Yym7bICGdhI{EZI5k<0uS)Vx^vp%UL=H*AD!^)|<)agulPO=;> zr{H1^Ff>i`q78dZ7yaSN)sN1e(J-oa#mr?(uKMC<4}5){xZm=`BZ>PeN02w_S}(iq z+k1ZB`>#96>*}hT&L7({zGg|-H9uDIK=bO~E?V}|X7i0VZ@yr9S=q$}qn_Hh;YiE+ zkEwTMa`Gd=FN}r)sNi#%UMjvHB{Dt=sQ&vIRn9rxqF%QKa_B>j!$MbZ-bDkpwFK4|~pd0TbIdm$mlr?QkrHYJA zhfyw>PO*S1;Eoj)mRAauMdu2WifVQBvBkP&v5m&-jej(LZTdQ9ttdAWK`qHEca?@5 z?u!Pk9>fjumzb-~+s!-8Nwc`q{G|DFGjD!6`HjX}duz?c)TzQJ>2rqDafhl(bInGo zaLtm%f-su$@0i_;!=9=&oBh12n|rj=U8HFgbFI5bQO3@7-{$c9W#}6V)Z{Cre%>%> zzB(U}zt3cBQ@`rtbx3}fT3`&dAhE?D63V0Ono>TWYQ{#MKzTKfo`Dz)3^~mxn@4v( z2HNdB)?rM4$Ci$~ZtjA(IX@0jH5!Z#i9Q(>E9u6LN&zaR*EHm56uZ=F7ady3ER&pY z`H;#(mE5jMQt85G_R`8-I?cVMuXRP-xh1+3RZ7xwSE9*X+*(_bcThSaaRI4X;v`2( z8mT*5LsIn&u9eKp|7~RcZ#VOQ8|la-wgO6K=KofiFMQ~TzGaWDd}}p%jK$E_iJf6v zgtUi(*+8+_`|nfTb^^r}lP9PVo$>NoP1{v6O-E&x2`jb3VXbd3@UfWDXdcwf7t=-4 zpRe$x4Cf7Qm&@;9#~V1!D6pj^Whk}GC5^U;joK*@wpFCkTETw9%Ja3;j?o+wUuRa9 z!|4h~d3x-DXc_gW4dbg8?OXojfr%}XhF$#T5>i&P_3BHr6Yf<YjO zcy4t=>B?nG?~i6*Gk3ycTc&NE<}jPQc@bq*@j3Nt-D_{05}!YL@TODWZaHTJd8@#$ z7ECRf)O5l0b1tEBd>zU$wWd|b=4?!kFDIhWk|z!mYeca+kO**rK(4>cKi0n{up=Oi zv{yN+yyrR3^ET=lO?8$==Y`(ox)r9SmQ~JG-a~;mjBmQ$^t@~T)b*+7z3k(GWWW;? zODrXhA!4;9E}mzZAubl*%>K9VjcQc0%!0&$&o9YD$?}^G^suDg9B^l7{=S2gcy6sb z@2G*OhB%tjtp-8!%Nm%>8r&(D!I`wn;8faWaJq{M0V2nl5E$4ZL0QSLo>;jQ6IwMU z+*4~U16ztiL0}K#cEmKhn^|3De44{~jpyT@T0X*YhX}50yGVkZB0_*vlj(#f)P+HX z7Lo7b*;Kp;<8H#dImAY}n=tMs^h}eA6)WnbT_$eYU&4-kh$m-aMW*A8G48smsnn)t zdALL|`#wV;D97W?j#VgEslen9I&ldC2}$K4~4?W z!MMVp8K^=yH=oA^ri@vF$i1ZamtE`H7d*K(-uuO`4_wTZ&%I^i;}31v_&9Dm|Gj4gHKjM#Rr*WK}@!J&&Czv9pa?^N|ILtB3e?B)y zndO-6U&1XCoAnDFP5wiHSHxHCZ+YIgzwh|m^$*YcjIW%5K+H>-af+96raYL-GYxi* zppy+GK6DB6rfs)p*ouqfB6B>j33et3+AB z%6QhwHjk7AtY$j@R_gVm6K|y)LFeB3bh>xQp6hLCMQcm5p-oe?dunZYa@uB=RgE=A z)gF|O$j9WQECl3gdAiKYIkaCgQ?GIk?Sae~P-aVFP}O=+Wc zjf=WA9Q_=}k7$1G>Q^@`f90B{+eF=ihwkIjZ5NpoYsqOC<|A&dE5=6##!0HMS)Q=oS^=&;} zaT6}=%u+RJo?(e1nzz$LE7j*0mM2WaKJ>9kp~r`B1f>Qao- ze{7{oYdbYhj^`DYr}-t#W|To_fmp2^%wQouV<9tRAwOdtCu1aJp@9mkF~*JxPGBCg z)0wpyThx=(X?=oyNPwHzwN->J#iqWIw_Bt zR;Zv3Q8_!WDuSJ{nP55cffXJ!&ck@yU2x_2g0Ez7Eg(1h$rq@4$GB;Q;7Xl08qgpUfah){ z%(2utbVNCGFva|V@*O}tajJqmaTBT_a4_#f0dqa%Ql$h%DeuD+6Ua817tsh;0#f~9gCgG)DhI{bh=8dVv6Eq>|`VLM^sU_-^ogAB|EZo zs8&fYOFFa-RlUszR>|TcF%m1>yqUyMg{<>E1&Y&7zmX~KTER<-g`FuY+V>=}vQq6d zBRs5(eolhA19wDpLzGpP6}EQl+-dh-v+=wJ--yy#%WCY8;Y2+ml^nh&qbGm4@Mt$%YAzTDi`! z)bNe+?=17+a8dr~@aX*W@^=*NDw2nXh7YPPnqZg^su?sVG-uE)yxu~o>$Rv}m4D&%6Tkju%Q;RI`| z5G^Iqw6+Sl=;nb6voy_wXXFR5`o-6njr&eh76<_(p4MCJ7_s#Ic7;( zgn*^mGTp*k(rU$$TB!!tT9{g~cq!Xia%rzDe%d1oTbV3uWwLl;Me9R!af(gr*DPyO z`*AgMePRm~(>Q0UP{o{}TIK|enICK|Zp?jg1%(v1+Frcy8@{$W!Vdu z?yzgNy=EMtjzSHBEa#%<;LKXjM-{?p>y~R+`f$jev3Msaxs))`CZ0VvmX~Ta3PI%)pCZk zg_}mCG$JF?S7}eip0ytrGjJh|W2HihE%-Ddr4bpCjGmhM4rKdrF#{L+HpmsV`r(vm zrt5d;cj*)QL;7R-Q}kb(4(QkDTlG6rS;zHBy%Nw9AhN*mdWnBJc_>vwVJ&|Z0ZEhu zMUo>T2s?#cLP9ts92cZR!YP3RAt)ThlptspIyYCKu2~nA9f9o&1ci1%V2V;mD@uWy z{*-S8YR@Z_cZF%Xi8C_%wakF;p{KN{4P$K0qk(egx=cUOz{|H+P(%shgqly@!`fGj z!^*OFG?=z_b#)2so4EcUzwlqt^GDvMotU7Ej7ET^o5(UkEF*p`=177d3Q~oBq98`3Vx>;Ggx{dN$-gJb4@)F0MdgUDQW~MJ zHcdCx3-wZ+T(7@UxKzAT|Gf0F@S1c&`dI!_`dXJ|QxuWs1&$tm>UEgV>vR$A$dea@ zh;|IBAbSY(97+%+S*J4?KoPo$C9W3*=7`GG(NtCtts_om&IsAN1C6f!^Uk#TGib!x zTWg2_mww8npK^U)2XPVHIRdGM=_uj!h?(S?*^x~TIbZ1rvZy_z9p}`e^^bc1 zNd3*blswcCV#h`uP8z-4p|YbD3)|n1B zefiUNpH`eS)@x3ieith{KC&n%?E(lo1!qxWBEwFQT$Sn5(=tNBL`KOWkAB>{oE&_+ z_r9ye{ofrRiQbJpi@3mLy%(SyU4weJf_;aw`E0G*_bJH_=-BV+(!QcUpgF&%Av0<{ zOCRp76`9&&Uz}8o(7rq=AEw1ahH7!H_GL;ulI1G5hyih@cuW+gBRD1U0dbAkDken% zNuqFkL^BjCQifs{T2ecK9D-BypEU^r{q|jewEANwQ_Poo^#D>7ZMQJLnw>G-3ib?T zrVcp8>&LAbcy2R1(;HNGT64Nn?l@DeJF>$^e5@s3UrRpL5~e?k zTWL!^c1rG3XiGk-RhiGvXQXZV(jIXiHnj)1vT(YQa9TZwQ|j4K8;+2pfC_pSZ~>r7 znrBKn`z$5ReAVDg?U%kw@0U);9c;s-vA`va?K4 z7gZ&Z3|Za$kUc95vYwZ65banYX-04&X1eEjo^+?D z+oGj1j5nMlrm-m6j(B+6oa5nEq9`Wg3EWMZso<_L9&x&4GSp_YT|)o5P+$VfG5|KB zr*xoY^fDPSB1y$cYfD{)Kq&5!Zc9o^?0JoRV)iA_{?o~6hVP11+*aoOm<&fN(<;-} z#(br975E;jVSBHsmdrTmF~!8fKJT28V0OZ&rEGDgkiji5- z;pItgJ#dwo|D~*?UMJ&FX2JkR+B6L=ty0&JIOLaH`m~7s9$ffV*c4y5GD)HK7%m-$ zqAg#bJbFQdI+(1_@g{YvZqE6#=bo@9c(f;kWk5mSRdZ%gvJfT;N~`7b72J}3m67*b zD+o=-{Iv)k@S4DEQJk#3I^VC=Vt$yap>xLO8?o?mAi~dW9|6HhEcR+7lbiv&A}xcp zdKG7$YaH&j0-RCl8i`aS)~;Tr-6E)7JfBoSWJQ!Uu)%zWypC)V)xl>uGcAv06*Z-H zm9!@gAap9SDsqT>M~p8@5uE?^fo&IU?`QXGo9Yi`tCr*IuhnqOIeOiktC1U2j<=^% z^D3h0;im)cA!~+BuLiER3D5re^sz-)di4GYtMC~yBk%^UG)8(_d2T^c{0e*7qr-0C zPOhfA8G}-aRj}d(n9x?XNy8&1n>2-EM*~^VUen@i{3kSWT|>!36Uo634~oOVM3&lX zI)Z2^g{l!hdL`Pz^P;!dwj#aKly;!i_-$6-goUO$VI~gKoA`K#yW^qNkdh) zQH)r0SfD7E#gPPQY&(um+O6U6HA!REf%QDA2$Fj=2s#4R@yRu_`L%pnJsaag@ zsCB`C{1LHF{j9OobC7A9Il_i337VxxP&`y03_-LA(QoLK{novUx7t3{<7$*caq8ZE*_et2w@fv!O z5W|b7P!y+f)K7Z+t38zTi1(*ZN&aks;Hs=i=!f^q1f|zPkukPY8E|y_l;>Az1kUxo z`e1ZB5EA8vVXvCXww%hvLs)}M+X{V3ll=|>T56RsnZsc!OsPGw4~x!5OE|p>>|xRa z=Fx<`?Se&u%^@TD(Hoe^R=eZ*>>*aP7Prm2?)d-~3)j>5BlTt1_+Oot$;>utw_)I} z?Jd`>k!+h+FgTv)y$agy8}_qB>K_~TBh1Q#jM*4U`efz2{rqio(y7!`s7p}sTjlfT z52PJRHbB}U(3b6dvuGDH2wK)N(ifW5%u@Ci?ueCUv-Ren9-4se-_y0TJ>eRR$)I6k zw?8TZ5pWA5HxHQB{~m+Bku^5$1g^Qd;{aN7yIACPYbGCzE_Go4ULaIifU?n;CJ2cZ%6~S!T(23_405 zR-`dRaNnx-S?JUwa-D#LOp{6bG2wJVHWBXV9)zp1N3>g@uSrM9{RieMc?6)QKk$s8pFX^K~fV}$qjG7;_9 zm##psM5kMJ>!y~rxDKt62#Pg~yw#qayz>*tX+}1_*Z6=Z9j^0(d= z_Qs|}%Ayb#x1FU6A5v~UD@@sI`M{@GLJu>tfHLMKHLQsSIwey*;5v}Bj0{W**foTR zG`2S8kd{I-b&D#JXVo>X1B`TbB@0as=KE_3JCd+0O%`gvs{~bZa(0!GC8l(B0ba$R z{vz3Oy^YQZcZPR^Fwy0C%EBEz&jghPLR0lB%7Rk_w-k*2~7*c;HQ$X|UBHdx8v z2|c`nU@75E{6P`OyT@>3Bu{wRhO$kxf@)Hj+Dlid*|#gR`ZI^12oM%SF+ZT-$w!9uq*di zpdYUiJTxXd@}Si-J-@@{uaEOwaS5w%f&QAxgTqNH;Wdz(qTPT-q(im+eE^1PeA05` zL_;$r5*usB^pZ;G6t+%s83D@mYn6~52{|aJC!T-IsYb1D{$xCWdO}F8yc|>j=Qv(; z#z{XFn}j&~kW$M{52L<5P7cU`w$b_EdWNN0B~#e`$YIqQ>_$2%%O7uQs+0r}J-!w` z=UjFs0Cvf3dLN@`qs~>}2Mz1fQMfcZQNrqZ)y&Mq#ug%48)ZOGc5|Bg#o<1%6Y*FV zJ+v47QG}MY*IySDo>cj!yf_wTC!ZalS`gfXuBF!9z1f>aB$m@GY_r&;zm84nRo>5S zX}N@_?UNk%IvO_kzmGhzjTtz{mo#uh{k>w^VXzW<6G(ID?Vd~1D_~=MN}38?7h>_% zZtifb^C}u2g57%KK5(dGUR!J_aAcDgvDGo-5Jz)RZ#{{g6BNsKj1!>QOFt^2+AGWu zyQZLR8R{6%xFPNGUrsfKG)Sk2Oh24avxq!-hX&*N=_Tu<=%Yz{G9Mh!J#9PFAbpCm zM!;p2z6#~uoU!0AVZ$`Z*qap69)j^`Vui&_<@Fh zm=+b+x4}`~W{=e8v!WI!+1}D4^wYE`;#aXsXp+Bfd>wZ}wry2((wCI4E&H&weIc#^FBsnEX zX(Y8eYOlu&g5_Azq=t2d4eG-!wuF6?^~5$%K(MQ&x9d0j{SN%VGlPb60X84zZB4?N@X1UAp(NDx0US(@@+Ir8!Jv2|vb z21%BSu7u4||IA!JI=~F4d8FVPvLPiT5CM3aiKPT$2LcFd0SrIIMra~@>_H}}*(rFD zVwy^BJpsvbDJQK!nwX*(-|wPD04?^^VW0+uk~l<3r^+3c9OUC#7zHF*#B&SkG@Iuz zN2f(qJ{hg-?l9;sNlD}y)f;c-N^th=pUFcQ)hm;i#|FsKp_6O z;2VY@Ddp3k*0Rz>Z4j%hg~6O?3ALEbND<|c@1SYnHa=8lpG9ftA^gd!D z?kAs-VBHvX`U=2R8@AytBC@;!OeUPw zs6eXyp+kRgq% z4gWvnxNqqn_;Zl-jQ@s2_rF2UF)}dz2k@L$yp(kS9emI;cTmbDArN$+Ait*84foeXPTqfxg93}=yR($7YP0gP<5hbv@OE1Hkf_ZblD>ZMP9 zxS{)0B@~sJpagA=_ob&^qQ)w7l&7W~<-YsUmvkyyzf4(7Hga?;%D$!}pvuUBa#uIV z3Vq#$9so?+RMv+Xj&eyJd$f=y0Z!PHC_jD?Ss@eOShV25_-IOXn0(=dZ{rr>q2M6u zJylkOMzqKUSMs1D^`At1vNd@NzCC$+!nk%r3iKR>NODJ`Q*u(JfmoF+80g?@&FFZ6zB0J{rH@TQ7@3 zE@gT=UXOd$KSLS!V5<5@+zH;1t=%FS)TKY+(Ew*EX72+&oqwBc2(H%Q+!LQK6L5>1 z66dRmy|$r1@MdUgoZAXQ?qJfF-6`RN1Re^VEKgtAHbK7b?%ox)KiQvOZtgNvNTYJ- zgj%$3cK}_Yus3XLxOR!8!$}sYgX{(lHP(e&qcAnd z2`!BN*28RYU6TY~?n4zBJ)pM4%@mfb984&>51=rDGCai#Hj|lY%}_%(h@E>|w90G> zcRk7~Tw)nf2tVjIYE&09lg^INnkZL-6|ER;rjB~&jpQMcf>%eLf`T8_8sRwV_a%Mi z>>T{<3RWiUF@GNp7X&&asp&TT4@qn$C3;0@cIby((RVk^MANdG#ozwPB6W zB2a{w(4RLzsf!!V&XB0_&kM~fvn7cF$QY5Nqn8}Tw85fgA;Ef1tx7d5T_G!isRr1* zH7kt94AYEy{(2y7KfLmOak)Tm{1mv{O+JHd@Shb?<)_xLe8`qe41jm`7c+QYqF3Sq zLs*t2w8p}E01FaAm^;oLTcNX}t0Uw$1(SZ1g9q6XGbk9BUxp|F)s>`_nz`eZ%GBId zV?HXk|DhNG@@uPH#8KLx37~T+qNe%8#NZM5 zce69d$=pW$dCtNv--wm4vcg_74V&G<&iVl_37!fzq*z^`Eqn(2U$-P#RAd9qx@V_T zWA?=Yk&gAE_-?0RsXLZvesn{z*OQzDD!!xcccCCB6U++<*Tk)!+w-fF`sR*d8=VsR z_BLKI3`>U{^hPV*r1`EqF%Xy_l|o<;^bjXS7j?kxeIPJfqx+JQ?=ZSTJdBe5nD7Uy z79rX%ppgFVGQ42xy;_H#pyBj;k;<%;pgHvV!3Y#{80Hj5y1^kvsRRqZ8lHYc&-j^@ znUt2m3&hn`*TNeFKD(s)4#JJ;juqgNk1MDX`>j;9@TNFi~lQ zI20B`&TH@rF9#&YeAdxADeP3(hpZk(n}v|{;#%5fxyM1BO`K5q-s)i|jno=Loqz|F zb5*+yx|!1L5coq$0OLF<`SXs*D-roKx&ED_n5p-DoZ!G4vq1#b*qRW*L*2IKi`QDDill-x`dL^W(t4aRu*I_DMY$>%tHQ0f`PBQ0%oX4Z8u9 zE&i``e-5i)N#0M$^`B^r@CHoeMyA7PTS~$GTGm~@2n;ui{Hhx0;iYouK!Wp7)&##4 z{i3Zv$5D)Bg^N@uyj!F(r~(-FzJ5L`#a4@#vg-4bv;Yw|qi`b{)iN|)6HW0pU$_35 zo_7tbyLdRpzm*hF$%7urNnpyBtCml951zCXVNGN_m4P;@hzL5Dh(o@5Zj zm23TdyVdabs<5+1drgU4^e1DU{L0OuU=~U~D zLcr)W^3dm2<&A&lQ@eB8uf2`XEuqRoguXl7s>O+O+;9+_8z- z86|P&kybe}-bC1=e6Sq;p&qA0d1oQyT{TjJ^7?-(w@rmU`L?81+Mt(#Jup8npJ{af z`%jO{I)JxLo)5%r&-=-i9?$2yMGwUL%lGXb5U*nbaG~HOAeFiGXP!>as17A`wqDo; zMCXZpo964U$HNYt@8Ea!u9CC)RfvW=blVRsKtpwVH3$m&cWW66Eb`&y<#dBzhpF|w z`aGl~1$}GB9gj}y1Mj{P?9a5}YMyC4(fBMGuAgs-+yNsGvu}Z?zkVI4zf!C)lQA_p zbRo4qFFa!cq7tXRuDRa>;UGRk>l?gU&@AE&`NR!0gg&y0smvQltJw`-=6a~Q)^!jw zco|s`MO-CnFHNW?%7`!WWm=jqdZfX$ob)e-OQApvdPMl(Q9fugwGik&ViGGjnaFx& zLDo$LKedszD9)t%#J7{*SX~1v6qQY=INZwC&|QRsgx~@B(9yyXzd_Kgq>}R#zK^&g zX-Q|O80$jH{=!!+&s=?85=!pFh;P2)Vx0b=7eh1-8LfW(?@M2>B#ecsyby#RFzIZ!HPEN#6Y{Hp{AbD5MfYaPB?eHXuaa+dPQ4}fb z%wP)%;XZ{#PWVUon1&G8d?7(cPW*4w6fl7UH#^U1+umZ%(X73T3j#2x3lAJ&wFM}v zHF&V(A5}O#YjxDMwo{GL6VkQa5`{3ns`)IvdWD8z8xsVmv#di^Nn7DR`yn)66S&k1 zshjGO_NZ6;DGs0!PE!_Cyh7ODTT@t~Z23PC=dPiBQ>Gl5gL|Loj z2Y|friSPrES<$gQkeWr# z4?nV+Jw&%V`yRPFH{AIvQ{RQ%{ar?l<2`oSsuKG9K3xx1@pv?9#h`?#s7Qq39P_=+ z^hFtuQb_%o(BS4lVjcIq%z#8)8fhtfTSYTTJsXjZC6Ku53p^<}<+~eu2N=5>YYpnc z8*>LvMAE&n8%o@ex8^y=O3zQ2Gh>Kk?#a7K+!8luI}>BLUDH$QpPJdyMOL{c3nBxivybG+ z9So)8W5SqB#)bnuqD4-*hLO_BW34q{qf^Gx_E+6dRie*@(EOu0j**5nv>HpDBM16( z@U9>e|Inh!T76PIy`?qNS^ciFwJvWMxlXbq?wjaa2~3N+jUMP;3)@K}!AwXyY6p*D zF)t7{{v67i5R3dS++@%<))u)k_GaUeau$*KWQu`>b6Hju-LXER8*sA7i#Y!TBETvc z_|1X^wYX6kB%*z6H)p1AwrXZ+wtBk6*Vy2HBYFn?kfqgjvqNvCGAr*L_W)|0Erp+=B%jusrmBP`jNO`eX>L$u-K_yyPJhV4)D`|W~+H}zw@ z$r;HZ(PRA_^|o5F2!nwAM>0ifd^EBo#q?G##o@z{1jSWV@*=+ismnH(PrwmO^>Z3U zDktopl;O0=-Fr{QG5?4W-L%0Vf-zjyEO5Q^D+FvIBo5`90?FC|1$M}53LGO7@swdd z?Cd!?@oU)VIXPiuQJNhE;m%Mp@stpJks9Gsaza~T5oU5C+e~zvpx&r14&js#eH~}y z5C*~!pR?qUJz+Xw5xFV57ZdA5E{1kaTnrqep_4mSbgeKGVU^!wW&#k zcEoE6t+1HHYDuGEq@sdGR3qYHXw*gLaqDsk&%3`>e?^p41UJp(ZItZ8rd74*Nn0n1 zsrp@0RnoM;$>dcU8;UsORgQ8q)l!(=!e<|H zOWYntj=mGc)?nHPNEwWVb)Q&2s{GtD>oMpB8`2mL8rYoS&Bo}cn2RtD(%rdOGYHTl zGrG_>GK^k(b^o*ijW8%eF&fX(;HI5w>@dL9y%={N0Ohe|2}wFgL{y4^d`2=&ptRQX zdi}bhwAJx@hul!v`D6orJ}%K8vnWGHM|!?W_UZy>v>cHAoJG_6tcRBLq6Stl1}*-Z zk6Xuewu#YqoX$y0>dTb2 zA10EydRY}p`7Qk0=BS!g5k+{aQ@W7oYSR@*$T4OISQmw-oya+HZa@F~%3MaEm)FAW zF(!NpQ1~-;l~XVGG;$%Xlxla)ui!X+LGGdJjJ(CzAs?ezjga;B5?QqXrZ0j(U+C(t zgd#9S%a`&ddLjP!7naczzQa!BVhBl}CHq@CSx4%)<^5L^vEw#yC5r5?wJAI-nLdl) zx`Kp)!#+eEmQ2{DEC0n1a2?h(ZbRO1%wKEHZ`;U!+WtdIT!E2`;T+ePVpEJ#JHa84 zJjIETez49u7Ts#kCUV=S7{!Vqw&5gj+sF7HX*Hm^2ub8_2q~T=JHGQa@F0rp+l0i; zJZ#gU^LFHRNFkaf+xGjxK3j@CwoNAM7OP#Soh>K(!#}=+TWI)=O>D&1j<9tx$Q_g9%6S)^Csg&1AK$Ss0}l?oqjg6o6`5 z%`{vxmtRz^b?z0XKA?cOsBO$EV!uQ4k;B4eD!X&?(L4UhyU{jsr$bR-JB?{38i%&0YiR#ru^M>#5&)|GA|)fR($|Zv>U? zEUsqtD5;~+nunSe&^V=_wBc)_s@Uj@56Nm}Rt$9=XrJ-W*7EA#l~J?UCeA^`T8SS5 zj5_Uh0uk}a+-+HNi-2q3DZ#Rr)0SZGbHrIWe&oV^HI=Ip(RP3PvFUK^7H|%5@@}g@ zo`Fb8%1MH}kdd~M(&9OE#YVML0^;=xtNfS%V z5X`ugfsCta#v@k@fVZPY!OXEwl{@F!R<4d#-=c_@d50*4Cgw>It{Z(@l>7 z+VOg>H>uhrFQo~+lvdF=d9eYZUPFs5naF>k*~;1*>h+5T*A)3DG{QCn5)@Vb?o47b zE)qCFRgee6l!i*sMlk^n(;#E#nKhg&oVa(hS&rm@>rXFGx-xjEM z{qeq#k2V_6l}yBt)DS3e~@||HHvd9z0$qQ-De+1)S=n(gs{nbx^a% z7!`GAe#2KltGiyXksPanjap^Y2|7aTUd}c?J-Hoxvc3wpB3lhmANiemt5UD+hHpMPq*i=-Dricg&@CPMVOQ}$O1Itb`CXpH2Q6MgsKTJ!emgrN< zz`9|n2pE%(FUo4^&yQ;eKcR{F=_IGF8g8GATf{lJSJ-WoX(TE0tScp1_GQ&Iie8%h z)vRqIS*rZ?w4WfdEdQ}?%#X|6Kw2@0mIG;VYQxjuY0YkUl?=%MWyjQ)P*e`hGh1J$ zU1sY_L3h5Gvs<7Zx-=1Z5lCV&Fq$hW-`HA}rcG+>s1=Pzn(jwUmE!T|2-;L%Z*zBo zyMf!Hdg0nW2D;XuXv@*Q5L$I3fXW3f1{g-~R7b2ta(JQ=70Tf^8QfH_X%(quH|(X@ zqFxL{T97GYMY6}kNRt@J{)H_m5oz6?or7y&M?F)$ZVZ7+a(G2jI~zWYf!mn;_$qEjrT2BS)9*xxBS^iEWyJ{UyYDhi|3lM`F66#8)qtWSmFpi-0ESQQcm zC?JVZb0~^LGcyY)u38w>zGPKnzHs^E@u%HLhN>GYx$kWo)yt9>Q3^^YI5aUK(i$k$ zzmtrJ)2MG&Sw$i&E=lYXiEr;p6vn@e6t$+ymAR}ku=p0wN*80Arr66JijOK4%Uack zj*M%}jh1$g)y_d8!lek5BD$6)DXK-~RA0Ij>}K!k%J+<*g(wfBCo!br+#8En&wnq- zDflokQpxWoI}|U8%TUNevb! zqV!NCwRdOS&sIMA1Uw&ACixdNwRw@Q{p!ALoguI|-EGwsbOo|NrbMLb995r8b#1Vv zu$bMihp!BShSmVs>Ejfsi?CV3A~hsx45{L{8&gkEF&eXGGzBsq4d1XzlpWjiUr`C= zMVOA$aa+8adLbwY@Gbdon+-rKUHjvOqfsX^jwL-s@z02%5!dV7DZr-U7HHu~cm$1uCVn zR&5IymFtW9;!>EkqK{g`oB~bDdJ#&BC`9TqrExGc)?5m$)J6+k7($F?zz{B`?y&VV zQO`B*4$g;oHt&s?^r|v5VAUs6ZWq2?ORW6Y8Z`$p%P(<$5b2 zh_InyAe&a#Jyd_nt5%YN2-)Ul&AbU}K9v!N2qiS^&3fk`IklH+UJYgR&q$rkcIEkH z^EY)Vor+TnX0NK^n#G5v^zQ$k@@u=M*yoPcI7sD;cYI24(_?9=0>q&s!`kV6ZyAK! zLTBmG0cCkwNs~v#4xl;suZHS!bxy@g&zA#=&n{^-<&2KsC**@lZVwOu<)nu9s|ER_ zN7rV+q6g%W@qQmCoSciGpm2#GD->U zoAJZoa^g`%pAHmgf1fCr;lNLg_yRaUp*u+sVFPXUcckF7)I>kS!w3RP){ab?2=ul< zG<>&1JkiHEpsA!eOl@<>VqI${Ss9~X}iK=PH3DJjqNLE^Sm0aM(J7q zP4lT}(7oKc`HkkG*L} zAS2`&w?(3B_L2L8S>JC0lhU>*r0k>~zAcj`HLuR5AI%(AUgXv&?Hl0>+;l$cM z7=?x$dyzdbVr2fvp(lb4t4yM=>Js|le15p2?M1S+o%Epz0s!>lUPP6{DHwfM+04@O z>S;|wrU_sDbWLj6B@dvjl)A$Eyln9ad_cqd`pD@4JOHjg&!^XyuX{Y;m^OTFVWw8S z3mNY!zez$~HM3R|Hg8U)-YEn2ZE#Me-T=LWfc3Dhzr8N%Z?UbN;sny2Q`eSQr+(AJ z)*h>A)Aw6lW>Ni3U5#Vn41zunEN$DL4cXy~j>O~U>}2ESccToR?&SPZs{#n-2zT@c z;*hfO4=l93Anle9gRq3=PoQtT96 z(vcLJTqSMQ>eTTnaO9q3FGKs^%|#M7RXEC(qK#o4Fa#F=%o+`n4-|D!77NW9z?`W! zwXA+C_DjoK&ff~R5Q3f`qscoeT!`~S+(i<%mN^j97CFo-w&Xa-1}``@Lf0QHC$Fw8 z$KMx)B!E{$sSI{PI_;tUup|f6vNbvez_3khN(0jDoqem{WK^xD8sZd zNM4|~Xgr8fyA(%qyKfGRXY)78p3tHaI|Pi~sU7)Ax~0WT$uLp~*L*M1aV2muez$h8 zeb@rjMfcx&evE>e*B+7*b14`8!FE)z^gVT~i(Ndm{8>A1?XG-hs@Kk#@(VCks~Kxc z>I#CyxbUDFy9P#P@=h7ahQ{Wfbx#5nK6R@6;5e1W^r|#(!gbgV^!%TTDjt)aPk+r> zSL#KtLWT08cArYZSsY)r2r<;q+uGh(N zA7B<16te%8%KYpwZr{#T|ndL|au|HzP1 zD-B1VgTsC#1v&OLOStOm#d^ZMwY_b2 z={|^!Qf2fwz8p{O-_73(C&rjwy9nU@Jq1=+fO_;k@Q~lY_XXEZ$-R-r29}{qoES~QY>;s7NT%@V!o@p|? zERHd5a#$uU2%o8-C(%`#%yv?6X`y5+{3Cw4ubC8P&CF#q_*F}|e-Bj!0`Z;2+av5s zeZ$YYS$<%via)sBz|mzVdvF)k_Zc|xiu!%aYAVd-R z&8){g9#jD@u@rvxX?`xuF-+%EYn#}du1nD^SDP?nGWX+zelATAA?un^1y)~J!++fL ztO;((Z;4C36QQSU+cc)OjCzFI)i7OMbZgh~Cu}KLGtZvO2QMvG#vu$?gXQ{Egm|f& zX1qS<>@lFvZSmLqj|6iKrZBP`EJ}n^Q^I0n_Yp(Vt?{5lAbfY7ZtT`{rQ_>6Gl&OY zRRVlP_0~G;>r*%htZVzYpM!FOOE|igzavlRrE}Cd8NP$jHUsr<>aZ$WqqUVAo znNaCa)idy`by0)SXjzr%64}7SY(B87e|X?2E`wlgly&EFK8ExO-nDUbk80-!d=`gqh8kvuihFkFsyMFz%z3|c<( zF|ii9JXF_IQV7Gc1}9ttPmwhr2l@nEa~Ha&%{BwQv+l6$@PP+(lD!L_s8x-YprHqZ3&aH-?JtrqGDb%s%|ie96Si1mRg0b2FLJAD(N57kF7u#*zC& zQGiW9sz;3H_YN`%&la8W){$M7&t$NkEx0llPZ+X98n32NMgQstUyXkSYlEW-0BUO#~t;Z%8 z#BAi|l|NWD^Ms@Q+Ba=0dcE=un*kIO3P+30Ij2kAvr63K4g~s9$$4Upf}w(sfHHXV zV7s-){ZZzp!?n>P&}?wZSyS*GaPCrX)rn zL~k`NqI?#R>y2L|&Ycj)_${GXJy(wv#h+zCXl+hx>lsaG2aa;egT>U&pZnHOBHc${ zL_WF#SLCq)?$P9#gJo=$U~~Xfbi$Gl)9E)$L4hxKTQ3G6p#Z18YZ)X&*RQLsXU>Kf zkSC8md%kW@|TUq4t%gWMDImjg>SPWD6zRiADshOo=zq-Q(%6B*d%!`-Rwq_y_fCTqJVea4yY zI$1k<(qzC-z^AFi{M`@@XqStnanE)-&JZ{i<2kMdl4+^RUXM5;4@?rc*hb*PoN69nn z_dt7^+qt9gJ!F3qcQJ6Z>7D5xix~JBTv7AK)HUEW51O=5KU~AZ6h3}(PDkLn4pydZ zxghwACL)QjPu=u=rDMM~uVAMFh>VMhBn7+0`K!S4)%#x$Oyncb*+6MO{{|hG(8%aY z)P3oePx2%qV!Y?|3LXD)*brNESF~vkwup?18J;BiU`g3+4@hl2$F>{;&!ePq2pfzAt7)sEW6y1JYq{5h>y0yPe5${i zt!?f$F>5}^yNpO=vRZg(G+Ha)a7}G~5`vxeh%5kW{x=uF`fo1cUw@EnkhJnPHjdvF zJ#_f;khEgf#y0r>D1`i${}&&kDsN=`KOTeS+l{E;Yv9v=cfx?L^^Z;rja?dGt|K-&j9QEuSUG2Yz^_|{eWrHLk5t0%9KV%3b A4gdfE literal 0 HcmV?d00001 diff --git a/90_ZielSW/Final/Spurerkennung.py b/90_ZielSW/Final/Spurerkennung.py new file mode 100644 index 0000000..2007620 --- /dev/null +++ b/90_ZielSW/Final/Spurerkennung.py @@ -0,0 +1,522 @@ +# Creation Date: 02.03.2022 +# Author: Kenan Gömek +# This code detects the lane. +# Quit program with 'q' if opencv windows is shows. Else use Ctrl+C. + +import cv2 as cv +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os + +import numpy as np +import math as M + +# Define camera settings +SENSOR_MODE = 4 # corresponding sensor mode to resolution 1640x1232 +OUTPUT_RESOLUTION = (192, 144) + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, float or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + # the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +SHUTTER_SPEED = 50 # [µs] + +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# Parameters +pixels_per_mm = 32/24.25 # [px/mm] for 120 mm camera height for resolution: 192x144 + +# Offset Camera Sensor in Scooty according to Scooty-KS +x_offset_camera_mm = 0 # [mm] +y_offset_camera_mm = 0 # [mm] +x_offset_camera_px = x_offset_camera_mm*pixels_per_mm # [px] +y_offset_camera_px = y_offset_camera_mm*pixels_per_mm # [px] + +# image parameters +image_height = OUTPUT_RESOLUTION[1] +image_width = OUTPUT_RESOLUTION[0] +# calculate center of image +[x_0, y_0] = np.array([image_width/2, image_height/2], dtype=np.uint16) + + +threshold_color_detection = 60 + # values (average) under this will not be considered as active leds in each color channel + # see get_color_of_leds()-function + +# Parameters for Blob/LED Detection +minDiameter_mm = 3.75 # [mm] minimum diameter of detected blob/LED. Must be minimum >0 ! + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. +# No string, because numpy array should stay uint16 +color_number_off = 0 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 +color_number_yellow = 4 +color_number_magenta = 5 +color_number_cyan = 6 +color_number_white = 7 + +# Parameters for grayscale to binary conversion +binary_threshold = 15 # determined by testing and application + # the higher threshold is, the smaller the diameter of the led, because more high values are extracted +binary_maxval = 255 # values ofer threshold will be set to maxval + +# Parameters for line fitting for lane construction +param = 0 # not used for DIST_L2 +reps = 0.001 # Sufficient accuracy for the radius (distance between the coordinate origin and the line). +aeps = 0.001 # Sufficient accuracy for the angle. + +# Parameters for OpenCV +# OpenCV windows only for demonstration +# They are not needed for functionality +show_opencv_window = True # show opencv window +draw_opencv = True # draw lane and so on + +print_additional_info = True + +# calculations before start +# Filter blobs by Area --> not implemented anymore, because no need, because detection is good and less time for calculation needed +# more than good trade off! +minDiameter_px = minDiameter_mm*pixels_per_mm # [px] minimum diameter of detected blob/LED +minArea_px2 = np.pi/4*minDiameter_px**2 # min Area of a blob in px^2 + + +def points_trafo(detected_LEDs, alpha_rad, dx, dy): + """Tranfsform points of LED to lane in KS-LED""" + detected_LEDs_trafo = detected_LEDs.copy() # copy, becuase else it is only a pointer + detected_LEDs_trafo = detected_LEDs_trafo.astype(np.int16) # avoid integer overflow + x_pnts = detected_LEDs_trafo[:, 0] + y_pnts = detected_LEDs_trafo[:, 1] + + # Translation + x1 = x_pnts-dx-x_0 + x_trafo = x1 + y1 = y_pnts-dy-y_0 + y_trafo = y1 + + # Rotation. Winkel Sensor im UZS, also negativ zu mathematischer definiton + x_trafo = np.cos(-alpha_rad)*x1-np.sin(-alpha_rad)*y1 + detected_LEDs_trafo[:, 0] = x_trafo + y_trafo = np.sin(-alpha_rad)*x1+np.cos(-alpha_rad)*y1 + detected_LEDs_trafo[:, 1] = y_trafo + + # sort points along lane: x_2, y_2 -axis (KS_LED) + detected_LEDs_trafo = detected_LEDs_trafo[detected_LEDs_trafo[:, 0].argsort(kind='quicksort')] + return detected_LEDs_trafo + + +def construct_lane(detected_LEDs, img_bgr): + """construct the lane""" + # This function is partially commented in german, because higher math is used + # clearer what is trying to be achieved + + # get points + # xy_pnts = detected_LEDs[:,0:2] + # x_pnts = detected_LEDs[:,0] + # y_pnts = detected_LEDs[:,1] + + # approach 2: + # fit line through centers of LEDs in KS_0 + # DIST_L": the simplest and the fastest least-squares method: the simple euclidean distance + [dx, dy, x_2, y_2] = cv.fitLine(detected_LEDs[:, 0:2], cv.DIST_L2, param, reps, aeps) + # x2, y2: same as: mean_of_leds = np.mean([x_pnts, y_pnts], 1) + + alpha_rad = np.arctan2(dy, dx) # calculate angle of line + alpha = np.arctan2(dy, dx)*180/np.pi # calculate angle of line + # print(f"Lane: dx: {dx}, dy:{dy}, x2:{x_2}, y2:{y_2}, alpha:{alpha}°") + if print_additional_info: + alpha_print = alpha[0] + alpha_print = float("{0:.2f}".format(alpha_print)) + print(f"Alpha: {alpha_print}°") + + # get smallest distance to point an line + # Berechnung nach: Repetitorium Höhere Mathematik, Wirth + # Gerade: x = a+ t*b + # Punkt : OP = p + # d = abs(b x (p-a))/(abs(b)) + # info: np.array()[:,0] --> gets only array with 1 dimensions with desired values + p = np.array([x_0, y_0]) + a = np.array([x_2, y_2])[:, 0] + b = np.array([np.cos(alpha_rad), np.sin(alpha_rad)])[:, 0] # Richtungsvektor + c = p-a + + # Betrag von Vektor: np.linalg.norm(vec) + cross = np.cross(b, c) + d = np.linalg.norm(cross)/np.linalg.norm(b) # distance [px] + # print(f"d: {round(d,2)}") + + # Fußpunkt (X_LED, Y_LED) + t_0_dot = np.dot(c, b) + t_0_norm = (np.linalg.norm(b)**2) + t_0 = t_0_dot/t_0_norm + [x_LED, y_LED] = (a+t_0*b) + if print_additional_info: + # convert float32, round and prepare for printing string + x_LED_print = float("{0:.2f}".format(x_LED)) + y_LED_print = float("{0:.2f}".format(y_LED)) + print(f"x_LED: {x_LED_print} [px], y_LED: {y_LED_print} [px]") + + # Abstand (dx, dy) Fußpunkt zu KS_0 + dx_LED = x_LED - x_0 + dx_LED_mm = dx_LED*(1/pixels_per_mm) + dy_LED = y_LED - y_0 + dy_LED_mm = dy_LED*(1/pixels_per_mm) + if print_additional_info: + # convert float32, round and prepare for printing string + dx_LED_print = float("{0:.2f}".format(dx_LED)) + dy_LED_print = float("{0:.2f}".format(dy_LED)) + dx_LED_mm_print = float("{0:.2f}".format(dx_LED_mm)) + dy_LED_mm_print = float("{0:.2f}".format(dy_LED_mm)) + print(f"dx_LED: {dx_LED_print} [px] , dy_LED: {dy_LED_print} [px]") + print(f"dx_LED: {dx_LED_mm_print} [mm] , dy_LED: {dy_LED_mm_print} [mm]") + + # Abstand (dx, dy) Fußpunkt von Bildmitte zu KS_Scooty + # Diese Werte zurückgeben + dx_LED_scooty = x_LED - x_0 + x_offset_camera_px + dx_LED_scooty_mm = dx_LED_scooty*(1/pixels_per_mm) + dy_LED_scooty = y_LED - y_0 + y_offset_camera_px + dy_LED_scooty_mm = dy_LED_scooty*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED_scooty: {round(dx_LED_scooty,2)} [px] , dy_LED_scooty: {round(dy_LED_scooty,2)} [px]") + print(f"dx_LED_scooty: {round(dx_LED_scooty_mm,2)} [mm] , dy_LED_scooty: {round(dy_LED_scooty_mm,2)} [mm]") + + # Punkte Trafo, um sortierte position der LEDs entlang Spur zu erhalten + # Bei normal detected kann bei vertikaler LED zb Fehler entstehen und dann muster: 211323233 -> daher mit dieser sortierten weitermachen + detected_LEDs_KS_LED = points_trafo(detected_LEDs, alpha_rad, dx_LED, dy_LED) + if print_additional_info: + print(f"Detected LEDs in KS_LED:(x2, y2):\n {detected_LEDs_KS_LED}") + + #----------------------------------- + # draw useful lines and points + + # draw lane line + if draw_opencv: + pt_0 = (a+b*np.array([-300, -300])).astype(np.int32) + pt_1 = (a+b*np.array([300, 300])).astype(np.int32) + # print(f"pt_0: {pt_0}, pt_1: {pt_1}") + line_color = (255, 255, 255) + line_thickness = 1 + cv.line(img_bgr, pt_0, pt_1, line_color, line_thickness) # draw lane + + # draw dx dy + line_color = (0, 0, 255) + line_thickness = 2 + cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_LED)), line_color, line_thickness) # shortest distance from KS_0 to KS_LED --> Lot + # cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_0)), (0, 0, 255), 2) # only dx + # cv.line(img_bgr, (int(x_LED), int(y_0)), (int(x_LED), int(y_LED)), (0, 0, 255), 2) # only dy + + # draw additional points + circle_radius = 5 + circle_color = (255, 128, 255) + circle_thickness = -1 # filled + cv.circle(img_bgr, (int(x_2), int(y_2)), circle_radius, circle_color, circle_thickness) # Center of points + # cv.putText(img_bgr, '(x2, y2)',(int(x_2)+5, int(y_2)-5), cv.FONT_HERSHEY_SIMPLEX, 2, (255,255,255), cv.LINE_AA) + circle_color = (170, 255, 0) + cv.circle(img_bgr, (int(x_LED), int(y_LED)), circle_radius, circle_color, circle_thickness) # Fußpunkt + + if show_opencv_window: + cv.imshow("Lane", img_bgr) + + return dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED + + +def convert_rgb_to_grayscale_average(image_bgr): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + # convert dtype to prevent integer overflow while addition + image_bgr = image_bgr.astype(np.uint16, copy=False) + image_gray = (image_bgr[:, :, 0]+image_bgr[:, :, 1]+image_bgr[:, :, 2])/3 # add values / do conversion + image_gray = image_gray.astype(np.uint8, copy=False) # convert back to uint8 + + return image_gray + + +def get_color_of_leds(matrix_of_LEDs, image_bgr): + """Determine color of LEDs at positions and add to matrix""" + # is image_r[y_pos, x_pos] = image_bgr[y_pos,x_pos, 2] ? --> yes. No need to split the color channels. + + offset = 1 # half of length from rectangle which is going to be used to determine the color around the middle point of the blob/led + # offset = 0 --> only the value from the middle point of the blob/led + # offset=1 --> 9 values, offset=2-->25 values + + for led in matrix_of_LEDs: + x_pos = led[0] # uint16 + y_pos = led[1] # uint16 + + # get values of color channels in region around middle point of blob/led: + # +1 at stop index, because it is not inclusive + region_around_blue_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 0] # uint8 + region_around_green_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 1] # uint8 + region_around_red_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 2] # uint8 + + # average of the values + # convert dtype to prevent integer overflow while addition + region_around_red_led = region_around_red_led.astype(np.uint16, copy=False) + region_around_green_led = region_around_green_led.astype(np.uint16, copy=False) + region_around_blue_led = region_around_blue_led.astype(np.uint16, copy=False) + # sum all elements in matrix and divide with number of elements + number_of_elements = region_around_blue_led.size + value_of_red_led = region_around_red_led.sum()/number_of_elements # float64, if not integer result + value_of_green_led = region_around_green_led.sum()/number_of_elements # float64, if not integer result + value_of_blue_led = region_around_blue_led.sum()/number_of_elements # float64, if not integer result + + # determine which leds are active: + # if value > threshold --> led is active + status_blue_led = False + status_green_led = False + status_red_led = False + if value_of_blue_led > threshold_color_detection: + status_blue_led = True + if value_of_green_led > threshold_color_detection: + status_green_led = True + if value_of_red_led > threshold_color_detection: + status_red_led = True + + # determine color by checking the cases: + # case 1: red + if status_blue_led==False and status_green_led==False and status_red_led==True: + color = color_number_red + # case 2: green + elif status_blue_led==False and status_green_led==True and status_red_led==False: + color = color_number_green + # case 3: blue + elif status_blue_led==True and status_green_led==False and status_red_led==False: + color = color_number_blue + # case 4: yellow = red + green + elif status_blue_led==False and status_green_led==True and status_red_led==True: + color = color_number_yellow + # case 5: magenta = red + blue + elif status_blue_led==True and status_green_led==False and status_red_led==True: + color = color_number_magenta + # case 6: cyan = green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==False: + color = color_number_cyan + # case 7: white = red + green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==True: + color = color_number_white + # case 8: led not active + # this case can not occur, because no inactive led can be detected from the implemented blob-algorithm in detect_LED_positions_in_grayscale + else: + color = color_number_off + + # fill matrix with color + led[2] = color # uint16 + + return matrix_of_LEDs + +def get_position_of_LEDs_contours(image_gray, image_bgr): + # create binary image + ret, image_binary = cv.threshold(image_gray, binary_threshold, binary_maxval, cv.THRESH_BINARY) + + # find contours + contour_retrieval_algorithm = cv.RETR_EXTERNAL # retrieves only the extreme outer contours + contours, hierarchy = cv.findContours(image_binary, contour_retrieval_algorithm, cv.CHAIN_APPROX_SIMPLE) + + # analyse contours + number_of_detected_contours = len(contours) + if number_of_detected_contours != 0: + # centroid of contours + # Pre-allocate matrix for numpy + number_of_rows = 0 + number_of_columns = 3 + position_of_leds = np.zeros((number_of_rows, number_of_columns), dtype=np.uint16) #empty: [] + + if draw_opencv: + image_bgr_contours = image_bgr.copy() # create copy of image + # copy is needed to draw on, because else the color of + # the circle is detected als LED-Color + + number_of_detected_LEDs = 0 + for i, cnt in enumerate(contours): + M = cv.moments(cnt) + area = cv.contourArea(cnt) + # diameter = 2*np.sqrt(area/np.pi) + # diameter_mm = diameter*(1/pixels_per_mm) + # print(f"area: {area} [px^2], diameter: {diameter} [px], diamter: {diameter_mm} [mm]") + + # Filter contours by area. minimum Area needs to be at least >0 ! + if area >minArea_px2: + number_of_detected_LEDs += 1 + # prevent zero division + if M['m00']==0: + cx = 0 + cy = 0 + else: + cx = int(M['m10']/M['m00']) + cy = int(M['m01']/M['m00']) + #print(cx, cy) + # add positions to matrix + x_pos = int(cx) # x positon + y_pos = int(cy) # y position + position_of_leds = np.vstack((position_of_leds, \ + np.array([x_pos, y_pos, color_number_off], dtype=np.uint16))) # vstack: row wise + + # draw centroids + if draw_opencv: + radius = 2 + color = (255,255,255) + thickness = -1 # filled + cv.circle(image_bgr_contours,(cx,cy), radius, color, thickness) + + if number_of_detected_LEDs != 0: + if print_additional_info: + print(f"detected LEDs: {number_of_detected_LEDs}") + + if draw_opencv: + # draw contours + contours_to_pass = -1 # pass all contours + color_of_contour = (255,255,255) + line_thickness = 1 + cv.drawContours(image_bgr_contours, contours, contours_to_pass, color_of_contour, line_thickness) + + if show_opencv_window: + cv.imshow("binary", image_binary) + cv.imshow("Contours", image_bgr_contours) + + return position_of_leds + else: + if print_additional_info: + print(f"No LEDs were detected") + return None + + else: + if print_additional_info: + print(f"No contours were detected") + return None + +def detect_position_of_LEDs(image_bgr): + + # convert rgb to grayscale + image_gray = convert_rgb_to_grayscale_average(image_bgr) + if show_opencv_window: + cv.imshow("grayscale", image_gray) + + # get position of leds + position_of_LEDs = get_position_of_LEDs_contours(image_gray, image_bgr) + + return position_of_LEDs + + + +def lane_detection(image_bgr): + # Detect LEDs + if print_additional_info: + print(f"Detect LEDs and color:") + position_of_LEDs = detect_position_of_LEDs(image_bgr) + # detected_LEDs = None # only that following code is not being executet for development + + # Get color of leds + if position_of_LEDs is not None: + detected_LEDs = get_color_of_leds(position_of_LEDs, image_bgr) + # print result + if print_additional_info: + print(f"Detected LEDs relative to image center (x0,y0):\n{detected_LEDs}") + else: + detected_LEDs = None + + + # Contruct lane + if detected_LEDs is not None: + if print_additional_info: + print("\nContruct lane with consideration of camera offset:") + dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED = \ + construct_lane(detected_LEDs, image_bgr) + return detected_LEDs + else: + return None + + + +# PiCamera +def get_frames_from_camera(): + # Initialise Camera + print('Initialise Camera...') + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = SHUTTER_SPEED + # it was found that, you have to set the right shutter speed at the first initalisation of the current runtime of the program. + # The gains (analog, digital) will adjust to this set up. + # After the gains are fixed, they will never change! even if you change the shutter speed during the runtime. + # To get consistent brightness values, set the right shutter speed at initalisation once. + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(1) # wait before applying brightness and contrast + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + # camera.start_preview() # show camera preview through PiCamera interface + # camera.annotate_frame_num=True # Controls whether the current frame number is drawn as an annotation. + + print('Start caputure...') + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + start_processing = time.perf_counter() + + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + image_bgr = frame.array # raw NumPy array without JPEG encoding + + #cv.imshow("Current Frame", image) # display the image without text + output.truncate(0) # clear the stream for next frame + + # processing + lane_detection(image_bgr) + + + # Only uncomment following code if you display the image. No errors if not commented, but higher fps if commented. + # if q is pressed, break from loop. + pressed_key = cv.waitKey(2) & 0xff + if pressed_key == ord('q'): + break + end_processing = time.perf_counter() + time_processing = end_processing-start_processing + time_processing = time_processing*1000 + time_processing = round(time_processing, 2) + print(f'processing time: {time_processing} ms') + + + +# ---------------------------------------------------------------------------- +# main +def main(): + + # start capturing + get_frames_from_camera() # start capture + + if show_opencv_window: + cv.destroyAllWindows() # destroy all open cv windows + +if __name__ == "__main__": + main() + + + + + + + + + diff --git a/90_ZielSW/Multiprocessing/Lanedetection_Picamera_V01.py b/90_ZielSW/Multiprocessing/Lanedetection_Picamera_V01.py new file mode 100644 index 0000000..707dea5 --- /dev/null +++ b/90_ZielSW/Multiprocessing/Lanedetection_Picamera_V01.py @@ -0,0 +1,912 @@ + +import cv2 as cv +import numpy as np + +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os +import sys + +import numpy as np +import math as M + +from multiprocessing import Process, shared_memory + +# Parameters +pixels_per_mm = 71/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 107/24.25 #[px/mm] for 120 mm camera height for resolution: 640x480 + +# Offset Camera Sensor in Scooty according to Scooty-KS +x_offset_camera_mm = 100 # [mm] +y_offset_camera_mm = 50 # [mm] +x_offset_camera_px = x_offset_camera_mm*pixels_per_mm # [px] +y_offset_camera_px = y_offset_camera_mm*pixels_per_mm # [px] + +# image parameters +image_heigth = 320 # shape [0] +image_width = 416 # shape[1] +# calculate center of image +[x_0, y_0] = np.array([image_width/2, image_heigth/2], dtype=np.uint16) + + +threshold_color_detection = 60 # values under this will not be considered as active leds in each color channel + +# Parameters for Blob/LED Detection +minDiameter_mm = 5 # [mm] minimum diameter of detected blob/LED +maxDiameter_mm = 9 # [mm] maximum diameter of detected blob/LED + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. No string, because numpy array should stay uint16 +color_number_off = 0 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 +color_number_yellow = 4 +color_number_magenta = 5 +color_number_cyan = 6 +color_number_white = 7 + +#---------------------------------------------------------------------- +# Define camera settings +SENSOR_MODE = 4 # corresponding sensor mode to resolution +OUTPUT_RESOLUTION = (320, 416) # (width, heigth) +image_width = OUTPUT_RESOLUTION[0] +image_heigth = OUTPUT_RESOLUTION[1] +number_of_colorchannels = 3 # r, g, b +size_of_frame=int(image_heigth*image_heigth*number_of_colorchannels) +frame_dimension = int(number_of_colorchannels) + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, floar or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + #the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +SHUTTER_SPEED = 50 # [µs] + +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# miscellaneous parameters +max_value_of_uint64 = int((2**64) - 1) # @30 fps: konservative calculated driving time: 1.95*1e10 years --> Integer overflow not relevant. + +# settings for development +show_opencv_window = False + +# create shared Memorys for main-process +# shared memory for bools +shm_bools_pre=np.array([False, False, False, False, False, False, False, False, False, False], dtype=np.bool8) # create numpy array with bools stored in it + # [0]: newframe [1]: p_red_finished [2]: p_green_finished [3]: p_blue_finished + # [4]: p_red_started [5]: p_green_started [6]: p_blue_started + # [7]: p_red_start_trigger [8]: p_green_start_trigger [9]: p_blue_start_trigger +size_of_buffer = shm_bools_pre.nbytes +print(f"size of buffer: {size_of_buffer}") # size of buffer: 10 +print(f"shm_bools dtype: {shm_bools_pre.dtype}") # dtype: bool +shm_bools_create = shared_memory.SharedMemory(name="shm_bools", create=True, size=shm_bools_pre.nbytes) # create a new shared memory block +shm_bools = np.ndarray(shm_bools_pre.shape, dtype=shm_bools_pre.dtype, buffer=shm_bools_create.buf) # create a NumPy array backed by shared memory +shm_bools[:] = shm_bools_pre[:] # Copy the original data into shared memory + +# print(shm_bool) +# print(shm_bools.name) + +# shared memory for framenumber +shm_framenumber_pre=np.array([0], dtype=np.uint64) +size_of_buffer = shm_framenumber_pre.nbytes +print(f"size of framenumber-buffer: {size_of_buffer}") #8 +print(f"shm_framenumber dtype: {shm_framenumber_pre.dtype}") #uint64 +shm_framenumber_create = shared_memory.SharedMemory(name="shm_framenumber", create=True, size=shm_framenumber_pre.nbytes) # create a new shared memory block +shm_framenumber = np.ndarray(shm_framenumber_pre.shape, dtype=shm_framenumber_pre.dtype, buffer=shm_framenumber_create.buf) # create a NumPy array backed by shared memory +shm_framenumber[:] = shm_framenumber_pre[:] # Copy the original data into shared memory +# print(shm_framenumber) # [0] +# print(shm_framenumber_create.name) # shm_framenumber + +# shared memory for red, green, blue frame +int_black = 0 # integer for black color/ no color +shm_colorframes_pre = np.full(\ + (image_heigth,image_width), \ + int_black, dtype=np.uint8) +size_of_buffer = shm_colorframes_pre.nbytes +print(f"size of colorframe-buffer: {size_of_buffer}") #133 120 +print(f"shm_colorframes_pre dtype: {shm_colorframes_pre.dtype}") #uint8 +shm_redframe_create = shared_memory.SharedMemory(name="shm_redframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_greenframe_create = shared_memory.SharedMemory(name="shm_greenframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_blueframe_create = shared_memory.SharedMemory(name="shm_blueframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_redframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_redframe_create.buf) # create a NumPy array backed by shared memory +shm_greenframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_greenframe_create.buf) # create a NumPy array backed by shared memory +shm_blueframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_blueframe_create.buf) # create a NumPy array backed by shared memory +shm_redframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +shm_greenframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +shm_blueframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +# shared memory bgr frame +int_black = 0 # integer for black color/ no color +shm_frame_pre = np.full(\ + (image_heigth,image_width, number_of_colorchannels), \ + int_black, dtype=np.uint8) +size_of_buffer = shm_frame_pre.nbytes +print(f"size of colorframe-buffer: {size_of_buffer}") #399 360 = 416 x 320 x 3 +print(f"shm_colorframes_pre dtype: {shm_frame_pre.dtype}") #uint8 +shm_frame_create = shared_memory.SharedMemory(name="shm_frame", create=True, size=shm_frame_pre.nbytes) # create a new shared memory block +shm_frame = np.ndarray(shm_frame_pre.shape, dtype=shm_frame_pre.dtype, buffer=shm_frame_create.buf) # create a NumPy array backed by shared memory +shm_frame[:] = shm_frame_pre[:] # Copy the original data into shared memory + + +# ---------------------------------------------------------------------------- +# Define Functions +def points_trafo(detected_LEDs, alpha_rad, dx, dy): + """Tranfsform points of LED to lane in KS-LED""" + detected_LEDs_trafo = detected_LEDs.copy() # copy, becuase else it is only a pointer + detected_LEDs_trafo = detected_LEDs_trafo.astype(np.int16) # avoid integer overflow + x_pnts = detected_LEDs_trafo[:,0] + y_pnts = detected_LEDs_trafo[:,1] + + # Translation + x1 = x_pnts-dx-x_0 + x_trafo=x1 + y1 = y_pnts-dy-y_0 + y_trafo = y1 + + # Rotation. Winkel Sensor im UZS, also negativ zu mathematischer definiton + x_trafo = np.cos(-alpha_rad)*x1-np.sin(-alpha_rad)*y1 + detected_LEDs_trafo[:,0] = x_trafo + y_trafo = np.sin(-alpha_rad)*x1+np.cos(-alpha_rad)*y1 + detected_LEDs_trafo[:,1] = y_trafo + + #sort points along lane: x_2, y_2 -axis (KS_LED) + detected_LEDs_trafo = detected_LEDs_trafo[detected_LEDs_trafo[:, 0].argsort(kind='quicksort')] + return detected_LEDs_trafo + +def construct_lane(detected_LEDs, img_bgr): + """construct the lane""" + # This function is partially commented in german, because higher math is used + # clearer what is trying to be achieved + + # get points + # xy_pnts = detected_LEDs[:,0:2] + # x_pnts = detected_LEDs[:,0] + # y_pnts = detected_LEDs[:,1] + + # approach 2: + # fit line through centers of LEDs in KS_0 + # DIST_L": the simplest and the fastest least-squares method: the simple euclidean distance + param = 0 # not used for DIST_L2 + reps = 0.001 # Sufficient accuracy for the radius (distance between the coordinate origin and the line). + aeps = 0.001 # Sufficient accuracy for the angle. + [dx, dy, x_2, y_2] = cv.fitLine(detected_LEDs[:,0:2], cv.DIST_L2, param, reps, aeps) + # x2, y2: same as: mean_of_leds = np.mean([x_pnts, y_pnts], 1) + + alpha_rad = np.arctan2(dy, dx) # calculate angle of line + alpha = np.arctan2(dy, dx)*180/np.pi # calculate angle of line + # print(f"Lane: dx: {dx}, dy:{dy}, x2:{x_2}, y2:{y_2}, alpha:{alpha}°") + print(f"Lane: alpha:{alpha[0]}°") + + # get smallest distance to point an line + # Berechnung nach: Repetitorium Höhere Mathematik, Wirth + # Gerade: x = a+ t*b + # Punkt : OP = p + # d = abs(b x (p-a))/(abs(b)) + # info: np.array()[:,0] --> gets only array with 1 dimensions with desired values + p = np.array([x_0, y_0]) + a = np.array([x_2, y_2])[:,0] + b = np.array([np.cos(alpha_rad), np.sin(alpha_rad)])[:,0] # Richtungsvektor + c = p-a + + # Betrag von Vektor: np.linalg.norm(vec) + cross= np.cross(b, c) + d = np.linalg.norm(cross)/np.linalg.norm(b) # distance [px] + #print(f"d: {round(d,2)}") + + # Fußpunkt (X_LED, Y_LED) + t_0_dot = np.dot(c, b) + t_0_norm = (np.linalg.norm(b)**2) + t_0 = t_0_dot/t_0_norm + [x_LED, y_LED] = (a+t_0*b) + #print(f"x_LED: {x_LED}, y_LED: {y_LED}") + + # Abstand (dx, dy) Fußpunkt zu KS_0 + dx_LED = x_LED - x_0 + dx_LED_mm = dx_LED*(1/pixels_per_mm) + dy_LED = y_LED - y_0 + dy_LED_mm = dy_LED*(1/pixels_per_mm) + #print(f"dx_LED:{dx_LED} [px] , dy_LED:{dy_LED} [px]") + #print(f"dx_LED:{dx_LED_mm} [mm] , dy_LED:{dy_LED_mm} [mm]") + + # Abstand (dx, dy) Fußpunkt von Bildmitte zu KS_Scooty + # Diese Werte zurückgeben + dx_LED_scooty = x_LED - x_0 + x_offset_camera_px + dx_LED_scooty_mm = dx_LED_scooty*(1/pixels_per_mm) + dy_LED_scooty = y_LED - y_0 + y_offset_camera_px + dy_LED_scooty_mm = dy_LED_scooty*(1/pixels_per_mm) + #print(f"dx_LED_scooty:{dx_LED_scooty} [px] , dy_LED_scooty:{dy_LED_scooty} [px]") + #print(f"dx_LED_scooty:{dx_LED_scooty_mm} [mm] , dy_LED_scooty:{dy_LED_scooty_mm} [mm]") + + # Punkte Trafo, um sortierte position der LEDs entlang Spur zu erhalten + # Bei normal detected kann bei vertikaler LED zb Fehler entstehen und dann muster: 211323233 -> daher mit dieser sortierten weitermachen + detected_LEDs_KS_LED = points_trafo(detected_LEDs, alpha_rad, dx_LED, dy_LED) + #print(f"Detected LEDs in KS_LED:(x2, y2):\n {detected_LEDs_KS_LED}") + + #----------------------------------- + # draw useful lines and points + + # draw lane line + pt_0 = (a+b*np.array([-300, -300])).astype(np.int32) + pt_1 = (a+b*np.array([300, 300])).astype(np.int32) + #print(f"pt_0: {pt_0}, pt_1: {pt_1}") + cv.line(img_bgr, pt_0, pt_1, (255,255,255),1) # draw lane + + # draw dx dy + cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # shortest distance from KS_0 to KS_LED --> Lot + # cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_0)), (0,0,255), 2) # only dx + # cv.line(img_bgr, (int(x_LED), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # only dy + + #draw additional points + cv.circle(img_bgr, (int(x_2), int(y_2)), 5,(255,128,255),-1) #pink. Center of points + #cv.putText(img_bgr, '(x2, y2)',(int(x_2)+5, int(y_2)-5), cv.FONT_HERSHEY_SIMPLEX, 2, (255,255,255), cv.LINE_AA) + cv.circle(img_bgr, (int(x_LED), int(y_LED)), 5,(170,255,0),-1) # lime green. Fußpunkt + + #cv.imshow("Lane", img_bgr) + + return dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED + +def convert_rgb_to_grayscale_average(image_b, image_g, image_r): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + # convert dtype to prevent integer overflow while addition + image_r = image_r.astype(np.uint16) + image_g = image_g.astype(np.uint16) + image_b = image_b.astype(np.uint16) + + image_gray = (image_b+image_g+image_r)/3 # add values / do conversion + image_gray = image_gray.astype(np.uint8) # convert back to uint8 + + return image_gray + +def define_parameters_for_blob_detection(): + """set parameters for simple blob detector""" + params = cv.SimpleBlobDetector_Params() + + # Threshold for Convert the source image to binary images by applying thresholding + # with several thresholds from minThreshold (inclusive) to maxThreshold (exclusive) + # with distance thresholdStep between neighboring thresholds. + # Since the Grayscale image is dark if only one color channel is active, + # the Threshold values have to be set like this. + # particularly the thresholdStep-Value has to be low + params.minThreshold=20 # reminder: this value is set for grayscale image + params.maxThreshold=255 + params.thresholdStep=1 + + params.filterByColor=False # do not filter blobs by color + + # Filter blobs by Area + params.filterByArea=False + minDiameter_px = minDiameter_mm*pixels_per_mm # [px] minimum diameter of detected blob/LED + maxDiameter_px = maxDiameter_mm*pixels_per_mm # [px] maximum diameter of detected blob/LED + minArea_px2 = np.pi/4*minDiameter_px**2 + maxArea_px2 = np.pi/4*maxDiameter_px**2 + + params.minArea = minArea_px2 # min Area of a blob in px^2 + # params.maxArea = maxArea_px2 # max Area of a blob in px^2. + # reasons for not filtering maxArea: motion blur + rolling shutter --> larger Area + + # Filter by Inertia + params.filterByInertia=False + params.minInertiaRatio = 0.2 # [0-1] + + # Filter by Convexity + params.filterByConvexity=False + params.minConvexity = 0.2 # [0-1] + + # Filter by Circularity + params.filterByCircularity=False + params.minCircularity = 0.4 # [0-1] + + # params.minDistBetweenBlobs = minDist_px # this has no effect + + return params + +def detect_LED_positions_in_grayscale(image_gray, image_bgr, params_for_blob_detection): + + detector = cv.SimpleBlobDetector_create(params_for_blob_detection) # Set up the detector with specified parameters. + keypoints = detector.detect(image_gray) # Detect blobs --> LEDs + number_of_detected_leds = len(keypoints) + + if number_of_detected_leds != 0: + # print information of keypoints + print(f"detected LEDs: {number_of_detected_leds}") + + #Pre-allocate matrix for numpy + number_of_rows = number_of_detected_leds + number_of_columns = 3 + position_of_leds = np.zeros((number_of_rows, number_of_columns), dtype=np.uint16) + for i, k in enumerate(keypoints): + x_pos = round(k.pt[0],0) # x position + y_pos = round(k.pt[1],0) # y position + # print(f"x: {x_pos} y: {y_pos}") + diameter_px = round(k.size,2) + diameter_mm = round(diameter_px*1/pixels_per_mm,2) + # print(f"diameter [px]: {diameter_px} diameter [mm]: {diameter_mm}") # diameter + area_px2 = round(M.pi/4*k.size**2,0) # area in px^2 + area_mm2 = round(area_px2*(1/pixels_per_mm)**2,0) + # print(f"area [px^2]: {area_px2} area [mm^2]: {area_mm2}") + # print('') + + # calculate parameters to transfer to matrix + x_pos = int(np.ceil(x_pos)) + y_pos = int(np.ceil(y_pos)) + # Fill matrix + # position_of_leds[i,:] = [x_pos,y_pos, 0] + position_of_leds[i,0] = x_pos + position_of_leds[i,1] = y_pos + + + # draw the keypoints on the original image + # cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS ensures the size of the circle corresponds to the size of blob + blobs = cv.drawKeypoints(image=image_bgr, keypoints=keypoints, color=(255, 255, 255), \ + outImage=np.array([]), flags= cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + # cv.imshow("grayscale", image_gray) + #cv.imshow("Detected", blobs) + + return position_of_leds + + else: + print(f"No LEDs were detected") + return None + +def detect_position_of_all_LEDs_grayscale(image_gray, image_bgr, params_for_blob_detection): + position_of_LEDs = detect_LED_positions_in_grayscale(image_gray, image_bgr, params_for_blob_detection) + + if position_of_LEDs is not None: + return position_of_LEDs + else: + return None + +def get_color_of_leds(matrix_of_LEDs, image_bgr): + # is image_r[y_pos, x_pos] = image_bgr[y_pos,x_pos, 2] ? --> yes. No need to split the color channels. + + offset = 2 # half of length from rectangle which is going to be used to determine the color around the middle point of the blob/led + # offset = 0 --> only the value from the middle point of the blob/led + # offset=1 --> 9 values, offset=2-->25 values + + for led in matrix_of_LEDs: + x_pos = led[0] # uint16 + y_pos = led[1] # uint16 + + # get values of color channels in region around middle point of blob/led: + # +1 at stop index, because it is not inclusive + region_around_blue_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 0] # uint8 + region_around_green_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 1] # uint8 + region_around_red_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 2] # uint8 + + # average of the values + # convert dtype to prevent integer overflow while addition + region_around_red_led = region_around_red_led.astype(np.uint16) + region_around_green_led = region_around_green_led.astype(np.uint16) + region_around_blue_led = region_around_blue_led .astype(np.uint16) + # sum all elements in matrix and divide with number of elements + number_of_elements= region_around_blue_led.size + value_of_red_led = region_around_red_led.sum()/number_of_elements # float64, if not integer result + value_of_green_led = region_around_green_led.sum()/number_of_elements # float64, if not integer result + value_of_blue_led = region_around_blue_led.sum()/number_of_elements # float64, if not integer result + + # determine which leds are active: + # if value > threshold --> led is active + status_blue_led = False; status_green_led = False; status_red_led = False + if value_of_blue_led > threshold_color_detection: + status_blue_led = True + if value_of_green_led > threshold_color_detection: + status_green_led = True + if value_of_red_led > threshold_color_detection: + status_red_led = True + + # determine color by checking the cases: + # case 1: red + if status_blue_led==False and status_green_led==False and status_red_led==True: + color = color_number_red + # case 2: green + elif status_blue_led==False and status_green_led==True and status_red_led==False: + color = color_number_green + # case 3: blue + elif status_blue_led==True and status_green_led==False and status_red_led==False: + color = color_number_blue + # case 4: yellow = red + green + elif status_blue_led==False and status_green_led==True and status_red_led==True: + color = color_number_yellow + # case 5: magenta = red + blue + elif status_blue_led==True and status_green_led==False and status_red_led==True: + color = color_number_magenta + # case 6: cyan = green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==False: + color = color_number_cyan + # case 7: white = red + green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==True: + color = color_number_white + # case 8: led not active + # this case can not occur, because no inactive led can be detected from the implemented blob-algorithm in detect_LED_positions_in_grayscale + else: + color = color_number_off + + # fill matrix with color + led[2] = color # uint16 + + return matrix_of_LEDs + + +def detect_LEDs_with_grayscale(image_b, image_g, image_r, image_bgr, params_for_blob_detection): + # convert rgb to grayscale image + start_m1 = time.perf_counter_ns() + image_gray = convert_rgb_to_grayscale_average(image_b, image_g, image_r) + end_m1 = time.perf_counter_ns() + time_processing = end_m1-start_m1 + time_processing = time_processing*1e-6 + time_processing = round(time_processing, 2) + print(f'processing time convert image: {time_processing} ms') + + + # get position of leds + position_of_LEDs = detect_position_of_all_LEDs_grayscale(image_gray=image_gray, image_bgr=image_bgr, params_for_blob_detection=params_for_blob_detection) + + if position_of_LEDs is not None: + # determine color of leds and add to matrix + detected_LEDs = get_color_of_leds(position_of_LEDs, image_bgr) + return detected_LEDs + else: + return None + +def lane_detection(image_bgr, params_for_blob_detection): + start_m1 = time.perf_counter_ns() + image_b,image_g,image_r = cv.split(image_bgr) # Split colour channels and get grayscale images + end_m1 = time.perf_counter_ns() + time_processing = end_m1-start_m1 + time_processing = time_processing*1e-6 + time_processing = round(time_processing, 2) + print(f'processing time split image: {time_processing} ms') + + + # Detect LEDs + print(f"Detect LEDs and color:") + detected_LEDs = detect_LEDs_with_grayscale(image_b, image_g, image_r, image_bgr, params_for_blob_detection) + + if detected_LEDs is not None: + # Contruct lane + #print(f"_____________________________________") + #print("Contruct lane") + dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED = \ + construct_lane(detected_LEDs, image_bgr) + + # print result + #print(f"Detected LEDs relative to image-center(x0,y0):\n{detected_LEDs}") + return detected_LEDs + else: + return None +#------------------- +# Define Funcions +def get_frames_from_picamera(): + # newframe= shm_bools[0] # do not use this! no updted values in "newframe" + # framenumber = shm_framenumber[0] + + # Initialise Camera + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = SHUTTER_SPEED + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(1) # wait before applying brightness and contrast + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + t_start= time.perf_counter() # save time for fps calculation + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + # General information: + # - always the newest frame is recieved: processing must be faster than fps if every frame should be processed + + shm_bools[0] = True + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + + + shm_framenumber[0] = framenumber + #print('') + #print(f"new frame: {framenumber}") + + #image = frame.array # raw NumPy array without JPEG encoding + + #b,g,r = cv.split(frame.array) # split colour channels of raw NumPy array without JPEG encoding + shm_frame[:] = frame.array + #shm_redframe[:] = r + #shm_greenframe[:] = g + #shm_blueframe[:] = b + # for better performance one can assign directly in funtion line the values to the shm_memorys: shm_red, .. , ... = cv.split(..) + + shm_bools[7:10]=[True] # trigger the start of the processing for each colorchannel + #print(shm_bools[7], shm_bools[8], shm_bools[9]) + + #display_image_with_text(image, shutter_speed, framenumber, camera_exposure_speed, trigger_record_OpenCV, out) # show the frame + + output.truncate(0) # clear the stream for next frame + + if framenumber == 500: # 5 sek @ 30 fps, only for performance measuring + t_stop=time.perf_counter() + print(f"calculated fps: {framenumber/(t_stop-t_start)}") + break + + +def display_image_with_text(img, shutter_speed, framenumber, camera_exposure_speed, trigger_record_OpenCV, out): + img = img.copy() # make copy of image and do not modify the original image + + # please activate only one trigger once + trigger_show_brightness = 0 # trigger for (not) calculating andshowing the brightness of the image+ + if trigger_show_brightness == 1: + arithmetic_mean_of_brightness_per_pixel_relative = calc_arithmetic_mean_of_brightness_per_pixel(img) + + trigger_show_max_brightness_values_of_colour_channels = 0 # trigger for (not) calculating and showing max values of colour chanels + if trigger_show_max_brightness_values_of_colour_channels == 1: + r_max, g_max, b_max = get_max_rgb_values(img) + + font = cv.FONT_HERSHEY_SIMPLEX # font + fontScale = 1 # fontScale + color = (255, 255, 255) # Font colour in BGR + thickness = 1 # Line thickness in px + + # set text position + frame_width = int(img.shape[1]) + frame_height = int(img.shape[0]) + text_start_position_Y = int(round(frame_height*0.12)) # start position of text in pixels 12 % of frame height + text_linespacing = 50 # line spacing between two strings in pixels + # text_start_position_X = int(frame_width/4) # start text from 1/4 of image width + text_start_position_X = int(0) # start text from left edge of image + + # set position in (x,y)-coordinated from top left corner. Bottom-left corner of the text string in the image. + pos_1 = (text_start_position_X, text_start_position_Y) # start text from 1/4 of image width + pos_2 = (text_start_position_X, text_start_position_Y+text_linespacing) # start text from 1/4 of image width + pos_3 = (text_start_position_X, text_start_position_Y+2*text_linespacing) # start text from 1/4 of image width + if trigger_show_brightness==1 or trigger_show_max_brightness_values_of_colour_channels==1: + pos_4 = (text_start_position_X, text_start_position_Y+3*text_linespacing) # start text from 1/4 of image width + + + # define text to display + text_line_1 = f"set ss: {shutter_speed} us" + text_line_3 = f"Frame: {framenumber}" + text_line_2 = f"ret exs: {camera_exposure_speed} us" + if trigger_show_brightness==1: + if arithmetic_mean_of_brightness_per_pixel_relative >= 0.01: + text_line_4 = f"brightness: {round(arithmetic_mean_of_brightness_per_pixel_relative*100,2)} %" + elif arithmetic_mean_of_brightness_per_pixel_relative < 0.01: + text_line_4 = f"brightness: {round(arithmetic_mean_of_brightness_per_pixel_relative*10e3,2)} pm" + if trigger_show_max_brightness_values_of_colour_channels==1: + text_line_4 = f"max: r:{r_max} g:{g_max} b:{b_max}" + + + # put the text into the image + image_text_1 = cv.putText(img, text_line_1, pos_1, font, + fontScale, color, thickness, cv.LINE_AA) + image_text_2 = cv.putText(img, text_line_2, pos_2, font, + fontScale, color, thickness, cv.LINE_AA) + image_text_3 = cv.putText(img, text_line_3, pos_3, font, + fontScale, color, thickness, cv.LINE_AA) + if trigger_show_brightness==1 or trigger_show_max_brightness_values_of_colour_channels==1: + image_text_4 = cv.putText(img, text_line_4, pos_4, font, + fontScale, color, thickness, cv.LINE_AA) + + + cv.imshow("Current Frame", img) # display the image + if trigger_record_OpenCV == 1: + out.write(img) # write frame to Video + + +def calc_arithmetic_mean_of_brightness_per_pixel(image): + """Calculate overall brightness per pixel of the image. Mittelere Helligkeit pro pixel berechnen.""" + #Comment: So rechenintensiv, dass man kein Blitzen sieht im Bild. (Oder sehr selten bzw. schwach). Daher anzeige von max-werten + + b,g,r = cv.split(image) # OpenCV works with bgr. Image is also speciefied to be captured in bgr. + r=r.astype('uint16') # set dtype of one colour to uint16, because the sum of 255+255+255 >255 =765 + #the rest will also be uint16 then + image_heigth = r.shape[0] + image_width = r.shape[1] + + number_of_colour_channels = 3 + arithmetic_mean_of_brightness_image = np.sum((r+g+b)/number_of_colour_channels) + arithmetic_mean_of_brightness_per_pixel = arithmetic_mean_of_brightness_image/(image_width*image_heigth) + + max_possible_brightness = 255 # maximum possible brightness + arithmetic_mean_of_brightness_per_pixel_relative = arithmetic_mean_of_brightness_per_pixel/max_possible_brightness + + return arithmetic_mean_of_brightness_per_pixel_relative + +def get_max_rgb_values(image): + """get max values of colour channels""" + b,g,r = cv.split(image) # OpenCV works with bgr. Image is also speciefied to be captured in bgr. + + r_max=r.max() + g_max=g.max() + b_max=b.max() + + return r_max, g_max, b_max + +def create_folder_for_captures(): + # Create folder for saving the captured pictures + now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M") + path_cwd = os.getcwd() + + path_saveFolder = path_cwd+r"/Capture_"+d1 + try: + os.mkdir(path_saveFolder) + folder_exists = True + except OSError: + print("Error! Ending script.") + quit() + + return path_saveFolder, folder_exists + +def do_processing(): + time.sleep(0.001) + #print("ohh i am doing high complex image analysis with computer vision") + +def do_processing_frame_r(frame): + print(f"max frame color red: {frame.max()}") +def do_processing_frame_g(frame): + print(f"max frame color green: {frame.max()}") +def do_processing_frame_b(frame): + print(f"max frame color blue: {frame.max()}") + + + +def processing_red(): + shm_bool_init = shared_memory.SharedMemory(name="shm_bools") # Attach to existing shared memory block + shm_bools = np.ndarray((10,), dtype=np.bool8, buffer= shm_bool_init.buf) # do not: newframe = np.array(...)[0] --> then one can not assign new value in main script + newframe = shm_bools[0] + p_red_finished = shm_bools[1] # not used, but for clarity + p_red_started = shm_bools[4] + + shm_framenumber_init = shared_memory.SharedMemory\ + (name="shm_framenumber") # Attach to existing shared memory block + shm_framenumber = np.ndarray((1,), dtype=np.uint64, \ + buffer= shm_framenumber_init.buf) + # framenumer = shm_framenumber[0] + + shm_redframe_init = shared_memory.SharedMemory\ + (name="shm_redframe") # Attach to existing shared memory block + shm_redframe = np.ndarray((image_heigth,image_width), dtype=np.uint8, \ + buffer= shm_redframe_init.buf) + + + i=0 + while True: + try: + framenumber = shm_framenumber[0] + if i==0: + last_processed_frame = framenumber + + conditions_for_first_start = (i==0) and\ + (shm_bools[0] == True) and \ + (shm_bools[1] == False) and (shm_bools[2] == False) and (shm_bools[3] == False) \ + and (shm_bools[7] == True) + + conditions_for_starting_processing = (framenumber>last_processed_frame) and (shm_bools[7] == True) + # newframe and all color-channel-processings have to be finished + + if conditions_for_first_start == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + do_processing() + i += 1 + #print(f"first processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + + shm_bools[4] = False # process ended + elif conditions_for_starting_processing == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + #print(f"red: framenumber: {framenumber}, last_processed_frame: {last_processed_frame}") + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + do_processing() + #print(f"max frame color red: {shm_redframe.max()}") + if show_opencv_window: + cv.imshow("red", shm_redframe) + cv.waitKey(1) + #print(f"processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + #t2 = time.perf_counter_ns() + #print(f"processing time for red channel: {round((t2-t1)*1e-6,2)} ms") + + last_processed_frame = framenumber + shm_bools[4] = False # process ended + # elif shm_bools[0] == False: + # pass + #print(f"no new red frame") + + # image processing finished + + + + except KeyboardInterrupt: + try: + shm_bool_init.close() + shm_framenumber_init.close() + shm_redframe_init.close() + + except FileNotFoundError: + # Memory already destroyed + pass + +def processing_bgr(): + shm_bool_init = shared_memory.SharedMemory(name="shm_bools") # Attach to existing shared memory block + shm_bools = np.ndarray((10,), dtype=np.bool8, buffer= shm_bool_init.buf) # do not: newframe = np.array(...)[0] --> then one can not assign new value in main script + newframe = shm_bools[0] + p_red_finished = shm_bools[1] # not used, but for clarity + p_red_started = shm_bools[4] + + shm_framenumber_init = shared_memory.SharedMemory\ + (name="shm_framenumber") # Attach to existing shared memory block + shm_framenumber = np.ndarray((1,), dtype=np.uint64, \ + buffer= shm_framenumber_init.buf) + # framenumer = shm_framenumber[0] + + shm_frame_init = shared_memory.SharedMemory\ + (name="shm_frame") # Attach to existing shared memory block + shm_redframe = np.ndarray((image_heigth,image_width, number_of_colorchannels), dtype=np.uint8, \ + buffer= shm_frame_init.buf) + image_bgr = shm_redframe + + params_for_blob_detection = define_parameters_for_blob_detection() + + i=0 + while True: + try: + framenumber = shm_framenumber[0] + if i==0: + last_processed_frame = framenumber + + conditions_for_first_start = (i==0) and\ + (shm_bools[0] == True) and \ + (shm_bools[1] == False) and (shm_bools[2] == False) and (shm_bools[3] == False) \ + and (shm_bools[7] == True) + + conditions_for_starting_processing = (framenumber>last_processed_frame) and (shm_bools[7] == True) + # newframe and all color-channel-processings have to be finished + + if conditions_for_first_start == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + lane_detection(image_bgr, params_for_blob_detection) + i += 1 + #print(f"first processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + + shm_bools[4] = False # process ended + elif conditions_for_starting_processing == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + #print(f"red: framenumber: {framenumber}, last_processed_frame: {last_processed_frame}") + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + start_processing = time.perf_counter_ns() + lane_detection(image_bgr, params_for_blob_detection) + end_processing = time.perf_counter_ns() + time_processing = end_processing-start_processing + time_processing = time_processing*1e-6 + time_processing = round(time_processing, 2) + print(f'processing time: {time_processing} ms') + #print(f"max frame color red: {shm_redframe.max()}") + if show_opencv_window: + cv.imshow("red", shm_redframe) + cv.waitKey(1) + #print(f"processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + #t2 = time.perf_counter_ns() + #print(f"processing time for red channel: {round((t2-t1)*1e-6,2)} ms") + + last_processed_frame = framenumber + shm_bools[4] = False # process ended + # elif shm_bools[0] == False: + # pass + #print(f"no new red frame") + + # image processing finished + + + + except KeyboardInterrupt: + try: + shm_bool_init.close() + shm_framenumber_init.close() + shm_redframe_init.close() + + except FileNotFoundError: + # Memory already destroyed + pass + +# ---------------------------------------------------------------------------- +# main +def main(): + start = time.perf_counter() + + try: + # create processes + p_red = Process(target=processing_bgr) + processes = [p_red] + + print(f"waiting 1 second to create processes") + time.sleep(1) # sind prozesse schon vorhanden + + # start acitivity of processes + for process in processes: + process.start() + + # start capturing + get_frames_from_picamera() + + + print('*******************************') + # this below code is only executed if the loop in take_image_picamera_opencv is breaked + # In real use case there will be no end of this program + + for process in processes: + process.terminate() + + # print time measuring + end = time.perf_counter() + print(f'Script finished in {round(end-start, 2)} s') + + # close each SharedMemory instance and unlink to release the shared memory + shm_bools_create.close() + shm_bools_create.unlink() + shm_framenumber_create.close() + shm_framenumber_create.unlink() + shm_redframe_create.close() + shm_redframe_create.unlink() + shm_greenframe_create.close() + shm_greenframe_create.unlink() + shm_blueframe_create.close() + shm_blueframe_create.unlink() + + except KeyboardInterrupt: + # Normally this prgoram never gets keyboard interrupted! But here this case is nevertheless handled + # End Script + try: + # close each SharedMemory instance and unlink to release the shared memory + shm_bools.close() + shm_bools.unlink() + shm_framenumber_create.close() + shm_framenumber_create.unlink() + shm_redframe_create.close() + shm_redframe_create.unlink() + shm_greenframe_create.close() + shm_greenframe_create.unlink() + shm_blueframe_create.close() + shm_blueframe_create.unlink() + + except FileNotFoundError: + # Memory already destroyed + pass + + +if __name__ == "__main__": + main() diff --git a/90_ZielSW/Multiprocessing/Lanedetection_Picamera_V02.py b/90_ZielSW/Multiprocessing/Lanedetection_Picamera_V02.py new file mode 100644 index 0000000..eb14c95 --- /dev/null +++ b/90_ZielSW/Multiprocessing/Lanedetection_Picamera_V02.py @@ -0,0 +1,905 @@ + +import cv2 as cv +import numpy as np + +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os +import sys + +import numpy as np +import math as M + +from multiprocessing import Process, shared_memory + +# Parameters +pixels_per_mm = 71/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 107/24.25 #[px/mm] for 120 mm camera height for resolution: 640x480 + +# Offset Camera Sensor in Scooty according to Scooty-KS +x_offset_camera_mm = 100 # [mm] +y_offset_camera_mm = 50 # [mm] +x_offset_camera_px = x_offset_camera_mm*pixels_per_mm # [px] +y_offset_camera_px = y_offset_camera_mm*pixels_per_mm # [px] + +# image parameters +image_heigth = 320 # shape [0] +image_width = 416 # shape[1] +# calculate center of image +[x_0, y_0] = np.array([image_width/2, image_heigth/2], dtype=np.uint16) + + +threshold_color_detection = 60 # values under this will not be considered as active leds in each color channel + +# Parameters for Blob/LED Detection +minDiameter_mm = 5 # [mm] minimum diameter of detected blob/LED +maxDiameter_mm = 9 # [mm] maximum diameter of detected blob/LED + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. No string, because numpy array should stay uint16 +color_number_off = 0 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 +color_number_yellow = 4 +color_number_magenta = 5 +color_number_cyan = 6 +color_number_white = 7 + +#---------------------------------------------------------------------- +# Define camera settings +SENSOR_MODE = 4 # corresponding sensor mode to resolution +OUTPUT_RESOLUTION = (320, 416) # (width, heigth) +image_width = OUTPUT_RESOLUTION[0] +image_heigth = OUTPUT_RESOLUTION[1] +number_of_colorchannels = 3 # r, g, b +size_of_frame=int(image_heigth*image_heigth*number_of_colorchannels) +frame_dimension = int(number_of_colorchannels) + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, floar or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + #the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +SHUTTER_SPEED = 50 # [µs] + +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# miscellaneous parameters +max_value_of_uint64 = int((2**64) - 1) # @30 fps: konservative calculated driving time: 1.95*1e10 years --> Integer overflow not relevant. + +# settings for development +show_opencv_window = True # show opencv window +draw_opencv = True # draw lane and so on + +# create shared Memorys for main-process +# shared memory for bools +shm_bools_pre=np.array([False, False, False, False, False, False, False, False, False, False], dtype=np.bool8) # create numpy array with bools stored in it + # [0]: newframe [1]: p_red_finished [2]: p_green_finished [3]: p_blue_finished + # [4]: p_red_started [5]: p_green_started [6]: p_blue_started + # [7]: p_red_start_trigger [8]: p_green_start_trigger [9]: p_blue_start_trigger +size_of_buffer = shm_bools_pre.nbytes +print(f"size of buffer: {size_of_buffer}") # size of buffer: 10 +print(f"shm_bools dtype: {shm_bools_pre.dtype}") # dtype: bool +shm_bools_create = shared_memory.SharedMemory(name="shm_bools", create=True, size=shm_bools_pre.nbytes) # create a new shared memory block +shm_bools = np.ndarray(shm_bools_pre.shape, dtype=shm_bools_pre.dtype, buffer=shm_bools_create.buf) # create a NumPy array backed by shared memory +shm_bools[:] = shm_bools_pre[:] # Copy the original data into shared memory + +# print(shm_bool) +# print(shm_bools.name) + +# shared memory for framenumber +shm_framenumber_pre=np.array([0], dtype=np.uint64) +size_of_buffer = shm_framenumber_pre.nbytes +print(f"size of framenumber-buffer: {size_of_buffer}") #8 +print(f"shm_framenumber dtype: {shm_framenumber_pre.dtype}") #uint64 +shm_framenumber_create = shared_memory.SharedMemory(name="shm_framenumber", create=True, size=shm_framenumber_pre.nbytes) # create a new shared memory block +shm_framenumber = np.ndarray(shm_framenumber_pre.shape, dtype=shm_framenumber_pre.dtype, buffer=shm_framenumber_create.buf) # create a NumPy array backed by shared memory +shm_framenumber[:] = shm_framenumber_pre[:] # Copy the original data into shared memory +# print(shm_framenumber) # [0] +# print(shm_framenumber_create.name) # shm_framenumber + +# shared memory for red, green, blue frame +int_black = 0 # integer for black color/ no color +shm_colorframes_pre = np.full(\ + (image_heigth,image_width), \ + int_black, dtype=np.uint8) +size_of_buffer = shm_colorframes_pre.nbytes +print(f"size of colorframe-buffer: {size_of_buffer}") #133 120 +print(f"shm_colorframes_pre dtype: {shm_colorframes_pre.dtype}") #uint8 +shm_redframe_create = shared_memory.SharedMemory(name="shm_redframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_greenframe_create = shared_memory.SharedMemory(name="shm_greenframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_blueframe_create = shared_memory.SharedMemory(name="shm_blueframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_redframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_redframe_create.buf) # create a NumPy array backed by shared memory +shm_greenframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_greenframe_create.buf) # create a NumPy array backed by shared memory +shm_blueframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_blueframe_create.buf) # create a NumPy array backed by shared memory +shm_redframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +shm_greenframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +shm_blueframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +# shared memory bgr frame +int_black = 0 # integer for black color/ no color +shm_frame_pre = np.full(\ + (image_heigth,image_width, number_of_colorchannels), \ + int_black, dtype=np.uint8) +size_of_buffer = shm_frame_pre.nbytes +print(f"size of colorframe-buffer: {size_of_buffer}") #399 360 = 416 x 320 x 3 +print(f"shm_colorframes_pre dtype: {shm_frame_pre.dtype}") #uint8 +shm_frame_create = shared_memory.SharedMemory(name="shm_frame", create=True, size=shm_frame_pre.nbytes) # create a new shared memory block +shm_frame = np.ndarray(shm_frame_pre.shape, dtype=shm_frame_pre.dtype, buffer=shm_frame_create.buf) # create a NumPy array backed by shared memory +shm_frame[:] = shm_frame_pre[:] # Copy the original data into shared memory + + +# ---------------------------------------------------------------------------- +# Define Functions +def points_trafo(detected_LEDs, alpha_rad, dx, dy): + """Tranfsform points of LED to lane in KS-LED""" + detected_LEDs_trafo = detected_LEDs.copy() # copy, becuase else it is only a pointer + detected_LEDs_trafo = detected_LEDs_trafo.astype(np.int16) # avoid integer overflow + x_pnts = detected_LEDs_trafo[:,0] + y_pnts = detected_LEDs_trafo[:,1] + + # Translation + x1 = x_pnts-dx-x_0 + x_trafo=x1 + y1 = y_pnts-dy-y_0 + y_trafo = y1 + + # Rotation. Winkel Sensor im UZS, also negativ zu mathematischer definiton + x_trafo = np.cos(-alpha_rad)*x1-np.sin(-alpha_rad)*y1 + detected_LEDs_trafo[:,0] = x_trafo + y_trafo = np.sin(-alpha_rad)*x1+np.cos(-alpha_rad)*y1 + detected_LEDs_trafo[:,1] = y_trafo + + #sort points along lane: x_2, y_2 -axis (KS_LED) + detected_LEDs_trafo = detected_LEDs_trafo[detected_LEDs_trafo[:, 0].argsort(kind='quicksort')] + return detected_LEDs_trafo + +def construct_lane(detected_LEDs, img_bgr): + """construct the lane""" + # This function is partially commented in german, because higher math is used + # clearer what is trying to be achieved + + # get points + # xy_pnts = detected_LEDs[:,0:2] + # x_pnts = detected_LEDs[:,0] + # y_pnts = detected_LEDs[:,1] + + # approach 2: + # fit line through centers of LEDs in KS_0 + # DIST_L": the simplest and the fastest least-squares method: the simple euclidean distance + param = 0 # not used for DIST_L2 + reps = 0.001 # Sufficient accuracy for the radius (distance between the coordinate origin and the line). + aeps = 0.001 # Sufficient accuracy for the angle. + [dx, dy, x_2, y_2] = cv.fitLine(detected_LEDs[:,0:2], cv.DIST_L2, param, reps, aeps) + # x2, y2: same as: mean_of_leds = np.mean([x_pnts, y_pnts], 1) + + alpha_rad = np.arctan2(dy, dx) # calculate angle of line + alpha = np.arctan2(dy, dx)*180/np.pi # calculate angle of line + # print(f"Lane: dx: {dx}, dy:{dy}, x2:{x_2}, y2:{y_2}, alpha:{alpha}°") + print(f"Lane: alpha:{alpha[0]}°") + + # get smallest distance to point an line + # Berechnung nach: Repetitorium Höhere Mathematik, Wirth + # Gerade: x = a+ t*b + # Punkt : OP = p + # d = abs(b x (p-a))/(abs(b)) + # info: np.array()[:,0] --> gets only array with 1 dimensions with desired values + p = np.array([x_0, y_0]) + a = np.array([x_2, y_2])[:,0] + b = np.array([np.cos(alpha_rad), np.sin(alpha_rad)])[:,0] # Richtungsvektor + c = p-a + + # Betrag von Vektor: np.linalg.norm(vec) + cross= np.cross(b, c) + d = np.linalg.norm(cross)/np.linalg.norm(b) # distance [px] + #print(f"d: {round(d,2)}") + + # Fußpunkt (X_LED, Y_LED) + t_0_dot = np.dot(c, b) + t_0_norm = (np.linalg.norm(b)**2) + t_0 = t_0_dot/t_0_norm + [x_LED, y_LED] = (a+t_0*b) + print(f"x_LED: {x_LED}, y_LED: {y_LED}") + + # Abstand (dx, dy) Fußpunkt zu KS_0 + dx_LED = x_LED - x_0 + dx_LED_mm = dx_LED*(1/pixels_per_mm) + dy_LED = y_LED - y_0 + dy_LED_mm = dy_LED*(1/pixels_per_mm) + print(f"dx_LED:{dx_LED} [px] , dy_LED:{dy_LED} [px]") + print(f"dx_LED:{dx_LED_mm} [mm] , dy_LED:{dy_LED_mm} [mm]") + + # Abstand (dx, dy) Fußpunkt von Bildmitte zu KS_Scooty + # Diese Werte zurückgeben + dx_LED_scooty = x_LED - x_0 + x_offset_camera_px + dx_LED_scooty_mm = dx_LED_scooty*(1/pixels_per_mm) + dy_LED_scooty = y_LED - y_0 + y_offset_camera_px + dy_LED_scooty_mm = dy_LED_scooty*(1/pixels_per_mm) + print(f"dx_LED_scooty:{dx_LED_scooty} [px] , dy_LED_scooty:{dy_LED_scooty} [px]") + print(f"dx_LED_scooty:{dx_LED_scooty_mm} [mm] , dy_LED_scooty:{dy_LED_scooty_mm} [mm]") + + # Punkte Trafo, um sortierte position der LEDs entlang Spur zu erhalten + # Bei normal detected kann bei vertikaler LED zb Fehler entstehen und dann muster: 211323233 -> daher mit dieser sortierten weitermachen + detected_LEDs_KS_LED = points_trafo(detected_LEDs, alpha_rad, dx_LED, dy_LED) + print(f"Detected LEDs in KS_LED:(x2, y2):\n {detected_LEDs_KS_LED}") + + #----------------------------------- + # draw useful lines and points + + # draw lane line + pt_0 = (a+b*np.array([-300, -300])).astype(np.int32) + pt_1 = (a+b*np.array([300, 300])).astype(np.int32) + #print(f"pt_0: {pt_0}, pt_1: {pt_1}") + cv.line(img_bgr, pt_0, pt_1, (255,255,255),1) # draw lane + + # draw dx dy + cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # shortest distance from KS_0 to KS_LED --> Lot + # cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_0)), (0,0,255), 2) # only dx + # cv.line(img_bgr, (int(x_LED), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # only dy + + #draw additional points + cv.circle(img_bgr, (int(x_2), int(y_2)), 5,(255,128,255),-1) #pink. Center of points + #cv.putText(img_bgr, '(x2, y2)',(int(x_2)+5, int(y_2)-5), cv.FONT_HERSHEY_SIMPLEX, 2, (255,255,255), cv.LINE_AA) + cv.circle(img_bgr, (int(x_LED), int(y_LED)), 5,(170,255,0),-1) # lime green. Fußpunkt + + cv.imshow("Lane", img_bgr) + + return dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED + +def convert_rgb_to_grayscale_average(image_bgr): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + # convert dtype to prevent integer overflow while addition + image_bgr = image_bgr.astype(np.uint16) + image_gray = (image_bgr[:,:,0]+image_bgr[:,:,1]+image_bgr[:,:,2])/3 # add values / do conversion + image_gray = image_gray.astype(np.uint8) # convert back to uint8 + + return image_gray + +def define_parameters_for_blob_detection(): + """set parameters for simple blob detector""" + params = cv.SimpleBlobDetector_Params() + + # Threshold for Convert the source image to binary images by applying thresholding + # with several thresholds from minThreshold (inclusive) to maxThreshold (exclusive) + # with distance thresholdStep between neighboring thresholds. + # Since the Grayscale image is dark if only one color channel is active, + # the Threshold values have to be set like this. + # particularly the thresholdStep-Value has to be low + params.minThreshold=20 # reminder: this value is set for grayscale image + params.maxThreshold=255 + params.thresholdStep=1 + + params.filterByColor=False # do not filter blobs by color + + # Filter blobs by Area + params.filterByArea=True + minDiameter_px = minDiameter_mm*pixels_per_mm # [px] minimum diameter of detected blob/LED + maxDiameter_px = maxDiameter_mm*pixels_per_mm # [px] maximum diameter of detected blob/LED + minArea_px2 = np.pi/4*minDiameter_px**2 + maxArea_px2 = np.pi/4*maxDiameter_px**2 + + params.minArea = minArea_px2 # min Area of a blob in px^2 + # params.maxArea = maxArea_px2 # max Area of a blob in px^2. + # reasons for not filtering maxArea: motion blur + rolling shutter --> larger Area + + # Filter by Inertia + params.filterByInertia=False + params.minInertiaRatio = 0.2 # [0-1] + + # Filter by Convexity + params.filterByConvexity=False + params.minConvexity = 0.2 # [0-1] + + # Filter by Circularity + params.filterByCircularity=False + params.minCircularity = 0.4 # [0-1] + + # params.minDistBetweenBlobs = minDist_px # this has no effect + + return params + +def detect_LED_positions_in_grayscale(image_gray, image_bgr, params_for_blob_detection): + + detector = cv.SimpleBlobDetector_create(params_for_blob_detection) # Set up the detector with specified parameters. + keypoints = detector.detect(image_gray) # Detect blobs --> LEDs + number_of_detected_leds = len(keypoints) + + if number_of_detected_leds != 0: + # print information of keypoints + print(f"detected LEDs: {number_of_detected_leds}") + + #Pre-allocate matrix for numpy + number_of_rows = number_of_detected_leds + number_of_columns = 3 + position_of_leds = np.zeros((number_of_rows, number_of_columns), dtype=np.uint16) + for i, k in enumerate(keypoints): + # x_pos = round(k.pt[0],0) # x position + # y_pos = round(k.pt[1],0) # y position + # print(f"x: {x_pos} y: {y_pos}") + # diameter_px = round(k.size,2) + # diameter_mm = round(diameter_px*1/pixels_per_mm,2) + # print(f"diameter [px]: {diameter_px} diameter [mm]: {diameter_mm}") # diameter + # area_px2 = round(np.pi/4*k.size**2,0) # area in px^2 + # area_mm2 = round(area_px2*(1/pixels_per_mm)**2,0) + # print(f"area [px^2]: {area_px2} area [mm^2]: {area_mm2}") + # print('') + + # calculate parameters to transfer to matrix + # x_pos = int(np.ceil(x_pos)) + # y_pos = int(np.ceil(y_pos)) + # Fill matrix + # position_of_leds[i,:] = [x_pos,y_pos, 0] + position_of_leds[i,0] = int(np.ceil(k.pt[0])) # x positon + position_of_leds[i,1] = int(np.ceil(k.pt[1])) # y position + + + if draw_opencv: + # draw the keypoints on the original image + # cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS ensures the size of the circle corresponds to the size of blob + blobs = cv.drawKeypoints(image=image_bgr, keypoints=keypoints, color=(255, 255, 255), \ + outImage=np.array([]), flags= cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + if show_opencv_window: + # cv.imshow("grayscale", image_gray) + cv.imshow("Detected", blobs) + + return position_of_leds + + else: + print(f"No LEDs were detected") + return None + +def detect_position_of_all_LEDs_grayscale(image_gray, image_bgr, params_for_blob_detection): + position_of_LEDs = detect_LED_positions_in_grayscale(image_gray, image_bgr, params_for_blob_detection) + + if position_of_LEDs is not None: + return position_of_LEDs + else: + return None + +def get_color_of_leds(matrix_of_LEDs, image_bgr): + # is image_r[y_pos, x_pos] = image_bgr[y_pos,x_pos, 2] ? --> yes. No need to split the color channels. + + offset = 2 # half of length from rectangle which is going to be used to determine the color around the middle point of the blob/led + # offset = 0 --> only the value from the middle point of the blob/led + # offset=1 --> 9 values, offset=2-->25 values + + for led in matrix_of_LEDs: + x_pos = led[0] # uint16 + y_pos = led[1] # uint16 + + # get values of color channels in region around middle point of blob/led: + # +1 at stop index, because it is not inclusive + region_around_blue_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 0] # uint8 + region_around_green_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 1] # uint8 + region_around_red_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 2] # uint8 + + # average of the values + # convert dtype to prevent integer overflow while addition + region_around_red_led = region_around_red_led.astype(np.uint16) + region_around_green_led = region_around_green_led.astype(np.uint16) + region_around_blue_led = region_around_blue_led .astype(np.uint16) + # sum all elements in matrix and divide with number of elements + number_of_elements= region_around_blue_led.size + value_of_red_led = region_around_red_led.sum()/number_of_elements # float64, if not integer result + value_of_green_led = region_around_green_led.sum()/number_of_elements # float64, if not integer result + value_of_blue_led = region_around_blue_led.sum()/number_of_elements # float64, if not integer result + + # determine which leds are active: + # if value > threshold --> led is active + status_blue_led = False; status_green_led = False; status_red_led = False + if value_of_blue_led > threshold_color_detection: + status_blue_led = True + if value_of_green_led > threshold_color_detection: + status_green_led = True + if value_of_red_led > threshold_color_detection: + status_red_led = True + + # determine color by checking the cases: + # case 1: red + if status_blue_led==False and status_green_led==False and status_red_led==True: + color = color_number_red + # case 2: green + elif status_blue_led==False and status_green_led==True and status_red_led==False: + color = color_number_green + # case 3: blue + elif status_blue_led==True and status_green_led==False and status_red_led==False: + color = color_number_blue + # case 4: yellow = red + green + elif status_blue_led==False and status_green_led==True and status_red_led==True: + color = color_number_yellow + # case 5: magenta = red + blue + elif status_blue_led==True and status_green_led==False and status_red_led==True: + color = color_number_magenta + # case 6: cyan = green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==False: + color = color_number_cyan + # case 7: white = red + green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==True: + color = color_number_white + # case 8: led not active + # this case can not occur, because no inactive led can be detected from the implemented blob-algorithm in detect_LED_positions_in_grayscale + else: + color = color_number_off + + # fill matrix with color + led[2] = color # uint16 + + return matrix_of_LEDs + + +def detect_LEDs_with_grayscale(image_bgr, params_for_blob_detection): + # convert rgb to grayscale image + start_m1 = time.perf_counter_ns() + image_gray = convert_rgb_to_grayscale_average(image_bgr) + end_m1= time.perf_counter_ns() + time_processing = end_m1-start_m1 + time_processing = time_processing*1e-6 + time_processing = round(time_processing, 2) + print(f'processing time grayscale_conversion: {time_processing} ms') + + + # get position of leds + position_of_LEDs = detect_position_of_all_LEDs_grayscale(image_gray=image_gray, image_bgr=image_bgr, params_for_blob_detection=params_for_blob_detection) + + if position_of_LEDs is not None: + # determine color of leds and add to matrix + detected_LEDs = get_color_of_leds(position_of_LEDs, image_bgr) + return detected_LEDs + else: + return None + + +def lane_detection(image_bgr, params_for_blob_detection): + + # Detect LEDs + print(f"Detect LEDs and color:") + detected_LEDs = detect_LEDs_with_grayscale(image_bgr, params_for_blob_detection) + + + + if detected_LEDs is not None: + # Contruct lane + print(f"_____________________________________") + print("Contruct lane") + dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED = \ + construct_lane(detected_LEDs, image_bgr) + + # print result + print(f"Detected LEDs relative to image-center(x0,y0):\n{detected_LEDs}") + return detected_LEDs + else: + return None + + + +# ------------------- +# Define Funcions +def get_frames_from_picamera(): + # newframe= shm_bools[0] # do not use this! no updted values in "newframe" + # framenumber = shm_framenumber[0] + + # Initialise Camera + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = SHUTTER_SPEED + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(1) # wait before applying brightness and contrast + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + t_start= time.perf_counter() # save time for fps calculation + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + # General information: + # - always the newest frame is recieved: processing must be faster than fps if every frame should be processed + + shm_bools[0] = True + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + + + shm_framenumber[0] = framenumber + #print('') + #print(f"new frame: {framenumber}") + + #image = frame.array # raw NumPy array without JPEG encoding + + #b,g,r = cv.split(frame.array) # split colour channels of raw NumPy array without JPEG encoding + shm_frame[:] = frame.array + #shm_redframe[:] = r + #shm_greenframe[:] = g + #shm_blueframe[:] = b + # for better performance one can assign directly in funtion line the values to the shm_memorys: shm_red, .. , ... = cv.split(..) + + shm_bools[7:10]=[True] # trigger the start of the processing for each colorchannel + #print(shm_bools[7], shm_bools[8], shm_bools[9]) + + #display_image_with_text(image, shutter_speed, framenumber, camera_exposure_speed, trigger_record_OpenCV, out) # show the frame + + output.truncate(0) # clear the stream for next frame + + if framenumber == 500: # 5 sek @ 30 fps, only for performance measuring + t_stop=time.perf_counter() + print(f"calculated fps: {framenumber/(t_stop-t_start)}") + break + + +def display_image_with_text(img, shutter_speed, framenumber, camera_exposure_speed, trigger_record_OpenCV, out): + img = img.copy() # make copy of image and do not modify the original image + + # please activate only one trigger once + trigger_show_brightness = 0 # trigger for (not) calculating andshowing the brightness of the image+ + if trigger_show_brightness == 1: + arithmetic_mean_of_brightness_per_pixel_relative = calc_arithmetic_mean_of_brightness_per_pixel(img) + + trigger_show_max_brightness_values_of_colour_channels = 0 # trigger for (not) calculating and showing max values of colour chanels + if trigger_show_max_brightness_values_of_colour_channels == 1: + r_max, g_max, b_max = get_max_rgb_values(img) + + font = cv.FONT_HERSHEY_SIMPLEX # font + fontScale = 1 # fontScale + color = (255, 255, 255) # Font colour in BGR + thickness = 1 # Line thickness in px + + # set text position + frame_width = int(img.shape[1]) + frame_height = int(img.shape[0]) + text_start_position_Y = int(round(frame_height*0.12)) # start position of text in pixels 12 % of frame height + text_linespacing = 50 # line spacing between two strings in pixels + # text_start_position_X = int(frame_width/4) # start text from 1/4 of image width + text_start_position_X = int(0) # start text from left edge of image + + # set position in (x,y)-coordinated from top left corner. Bottom-left corner of the text string in the image. + pos_1 = (text_start_position_X, text_start_position_Y) # start text from 1/4 of image width + pos_2 = (text_start_position_X, text_start_position_Y+text_linespacing) # start text from 1/4 of image width + pos_3 = (text_start_position_X, text_start_position_Y+2*text_linespacing) # start text from 1/4 of image width + if trigger_show_brightness==1 or trigger_show_max_brightness_values_of_colour_channels==1: + pos_4 = (text_start_position_X, text_start_position_Y+3*text_linespacing) # start text from 1/4 of image width + + + # define text to display + text_line_1 = f"set ss: {shutter_speed} us" + text_line_3 = f"Frame: {framenumber}" + text_line_2 = f"ret exs: {camera_exposure_speed} us" + if trigger_show_brightness==1: + if arithmetic_mean_of_brightness_per_pixel_relative >= 0.01: + text_line_4 = f"brightness: {round(arithmetic_mean_of_brightness_per_pixel_relative*100,2)} %" + elif arithmetic_mean_of_brightness_per_pixel_relative < 0.01: + text_line_4 = f"brightness: {round(arithmetic_mean_of_brightness_per_pixel_relative*10e3,2)} pm" + if trigger_show_max_brightness_values_of_colour_channels==1: + text_line_4 = f"max: r:{r_max} g:{g_max} b:{b_max}" + + + # put the text into the image + #image_text_1 = cv.putText(img, text_line_1, pos_1, font, + # fontScale, color, thickness, cv.LINE_AA) + #image_text_2 = cv.putText(img, text_line_2, pos_2, font, + # fontScale, color, thickness, cv.LINE_AA) + #image_text_3 = cv.putText(img, text_line_3, pos_3, font, + # fontScale, color, thickness, cv.LINE_AA) + if trigger_show_brightness==1 or trigger_show_max_brightness_values_of_colour_channels==1: + image_text_4 = cv.putText(img, text_line_4, pos_4, font, + fontScale, color, thickness, cv.LINE_AA) + + + cv.imshow("Current Frame", img) # display the image + if trigger_record_OpenCV == 1: + out.write(img) # write frame to Video + + +def calc_arithmetic_mean_of_brightness_per_pixel(image): + """Calculate overall brightness per pixel of the image. Mittelere Helligkeit pro pixel berechnen.""" + #Comment: So rechenintensiv, dass man kein Blitzen sieht im Bild. (Oder sehr selten bzw. schwach). Daher anzeige von max-werten + + b,g,r = cv.split(image) # OpenCV works with bgr. Image is also speciefied to be captured in bgr. + r=r.astype('uint16') # set dtype of one colour to uint16, because the sum of 255+255+255 >255 =765 + #the rest will also be uint16 then + image_heigth = r.shape[0] + image_width = r.shape[1] + + number_of_colour_channels = 3 + arithmetic_mean_of_brightness_image = np.sum((r+g+b)/number_of_colour_channels) + arithmetic_mean_of_brightness_per_pixel = arithmetic_mean_of_brightness_image/(image_width*image_heigth) + + max_possible_brightness = 255 # maximum possible brightness + arithmetic_mean_of_brightness_per_pixel_relative = arithmetic_mean_of_brightness_per_pixel/max_possible_brightness + + return arithmetic_mean_of_brightness_per_pixel_relative + +def get_max_rgb_values(image): + """get max values of colour channels""" + b,g,r = cv.split(image) # OpenCV works with bgr. Image is also speciefied to be captured in bgr. + + r_max=r.max() + g_max=g.max() + b_max=b.max() + + return r_max, g_max, b_max + +def create_folder_for_captures(): + # Create folder for saving the captured pictures + now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M") + path_cwd = os.getcwd() + + path_saveFolder = path_cwd+r"/Capture_"+d1 + try: + os.mkdir(path_saveFolder) + folder_exists = True + except OSError: + print("Error! Ending script.") + quit() + + return path_saveFolder, folder_exists + +def do_processing(): + time.sleep(0.001) + #print("ohh i am doing high complex image analysis with computer vision") + +def do_processing_frame_r(frame): + print(f"max frame color red: {frame.max()}") +def do_processing_frame_g(frame): + print(f"max frame color green: {frame.max()}") +def do_processing_frame_b(frame): + print(f"max frame color blue: {frame.max()}") + + + +def processing_red(): + shm_bool_init = shared_memory.SharedMemory(name="shm_bools") # Attach to existing shared memory block + shm_bools = np.ndarray((10,), dtype=np.bool8, buffer= shm_bool_init.buf) # do not: newframe = np.array(...)[0] --> then one can not assign new value in main script + newframe = shm_bools[0] + p_red_finished = shm_bools[1] # not used, but for clarity + p_red_started = shm_bools[4] + + shm_framenumber_init = shared_memory.SharedMemory\ + (name="shm_framenumber") # Attach to existing shared memory block + shm_framenumber = np.ndarray((1,), dtype=np.uint64, \ + buffer= shm_framenumber_init.buf) + # framenumer = shm_framenumber[0] + + shm_redframe_init = shared_memory.SharedMemory\ + (name="shm_redframe") # Attach to existing shared memory block + shm_redframe = np.ndarray((image_heigth,image_width), dtype=np.uint8, \ + buffer= shm_redframe_init.buf) + + + i=0 + while True: + try: + framenumber = shm_framenumber[0] + if i==0: + last_processed_frame = framenumber + + conditions_for_first_start = (i==0) and\ + (shm_bools[0] == True) and \ + (shm_bools[1] == False) and (shm_bools[2] == False) and (shm_bools[3] == False) \ + and (shm_bools[7] == True) + + conditions_for_starting_processing = (framenumber>last_processed_frame) and (shm_bools[7] == True) + # newframe and all color-channel-processings have to be finished + + if conditions_for_first_start == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + do_processing() + i += 1 + #print(f"first processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + + shm_bools[4] = False # process ended + elif conditions_for_starting_processing == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + #print(f"red: framenumber: {framenumber}, last_processed_frame: {last_processed_frame}") + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + do_processing() + #print(f"max frame color red: {shm_redframe.max()}") + if show_opencv_window: + cv.imshow("red", shm_redframe) + cv.waitKey(1) + #print(f"processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + #t2 = time.perf_counter_ns() + #print(f"processing time for red channel: {round((t2-t1)*1e-6,2)} ms") + + last_processed_frame = framenumber + shm_bools[4] = False # process ended + # elif shm_bools[0] == False: + # pass + #print(f"no new red frame") + + # image processing finished + + + + except KeyboardInterrupt: + try: + shm_bool_init.close() + shm_framenumber_init.close() + shm_redframe_init.close() + + except FileNotFoundError: + # Memory already destroyed + pass + +def processing_bgr(): + shm_bool_init = shared_memory.SharedMemory(name="shm_bools") # Attach to existing shared memory block + shm_bools = np.ndarray((10,), dtype=np.bool8, buffer= shm_bool_init.buf) # do not: newframe = np.array(...)[0] --> then one can not assign new value in main script + newframe = shm_bools[0] + p_red_finished = shm_bools[1] # not used, but for clarity + p_red_started = shm_bools[4] + + shm_framenumber_init = shared_memory.SharedMemory\ + (name="shm_framenumber") # Attach to existing shared memory block + shm_framenumber = np.ndarray((1,), dtype=np.uint64, \ + buffer= shm_framenumber_init.buf) + # framenumer = shm_framenumber[0] + + shm_frame_init = shared_memory.SharedMemory\ + (name="shm_frame") # Attach to existing shared memory block + shm_redframe = np.ndarray((image_heigth,image_width, number_of_colorchannels), dtype=np.uint8, \ + buffer= shm_frame_init.buf) + image_bgr = shm_redframe + + + params_for_blob_detection = define_parameters_for_blob_detection() + + i=0 + while True: + try: + framenumber = shm_framenumber[0] + if i==0: + last_processed_frame = framenumber + + conditions_for_first_start = (i==0) and\ + (shm_bools[0] == True) and \ + (shm_bools[1] == False) and (shm_bools[2] == False) and (shm_bools[3] == False) \ + and (shm_bools[7] == True) + + conditions_for_starting_processing = (framenumber>last_processed_frame) and (shm_bools[7] == True) + # newframe and all color-channel-processings have to be finished + + if conditions_for_first_start == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + lane_detection(image_bgr, params_for_blob_detection) + i += 1 + #print(f"first processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + + shm_bools[4] = False # process ended + elif conditions_for_starting_processing == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + #print(f"red: framenumber: {framenumber}, last_processed_frame: {last_processed_frame}") + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + lane_detection(image_bgr, params_for_blob_detection) + #print(f"max frame color red: {shm_redframe.max()}") + if show_opencv_window: + cv.imshow("red", shm_redframe) + cv.waitKey(1) + #print(f"processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + #t2 = time.perf_counter_ns() + #print(f"processing time for red channel: {round((t2-t1)*1e-6,2)} ms") + + last_processed_frame = framenumber + shm_bools[4] = False # process ended + # elif shm_bools[0] == False: + # pass + #print(f"no new red frame") + + # image processing finished + + + + except KeyboardInterrupt: + try: + shm_bool_init.close() + shm_framenumber_init.close() + shm_redframe_init.close() + + except FileNotFoundError: + # Memory already destroyed + pass + +# ---------------------------------------------------------------------------- +# main +def main(): + start = time.perf_counter() + + try: + # create processes + p_red = Process(target=processing_bgr) + processes = [p_red] + + print(f"waiting 1 second to create processes") + time.sleep(1) # sind prozesse schon vorhanden + + # start acitivity of processes + for process in processes: + process.start() + + # start capturing + get_frames_from_picamera() + + + print('*******************************') + # this below code is only executed if the loop in take_image_picamera_opencv is breaked + # In real use case there will be no end of this program + + for process in processes: + process.terminate() + + # print time measuring + end = time.perf_counter() + print(f'Script finished in {round(end-start, 2)} s') + + # close each SharedMemory instance and unlink to release the shared memory + shm_bools_create.close() + shm_bools_create.unlink() + shm_framenumber_create.close() + shm_framenumber_create.unlink() + shm_redframe_create.close() + shm_redframe_create.unlink() + shm_greenframe_create.close() + shm_greenframe_create.unlink() + shm_blueframe_create.close() + shm_blueframe_create.unlink() + + except KeyboardInterrupt: + # Normally this prgoram never gets keyboard interrupted! But here this case is nevertheless handled + # End Script + try: + # close each SharedMemory instance and unlink to release the shared memory + shm_bools.close() + shm_bools.unlink() + shm_framenumber_create.close() + shm_framenumber_create.unlink() + shm_redframe_create.close() + shm_redframe_create.unlink() + shm_greenframe_create.close() + shm_greenframe_create.unlink() + shm_blueframe_create.close() + shm_blueframe_create.unlink() + + except FileNotFoundError: + # Memory already destroyed + pass + + +if __name__ == "__main__": + main() diff --git a/90_ZielSW/Multiprocessing/Lanedetection_Picamera_V03.py b/90_ZielSW/Multiprocessing/Lanedetection_Picamera_V03.py new file mode 100644 index 0000000..0f2050c --- /dev/null +++ b/90_ZielSW/Multiprocessing/Lanedetection_Picamera_V03.py @@ -0,0 +1,925 @@ + +import cv2 as cv +import numpy as np + +import picamera +from picamera.array import PiRGBArray +from fractions import Fraction + +import time +from datetime import datetime +import os +import sys + +import numpy as np +import math as M + +from multiprocessing import Process, shared_memory + + +#---------------------------------------------------------------------- +# Define camera settings +SENSOR_MODE = 4 # corresponding sensor mode to resolution +OUTPUT_RESOLUTION = (192, 144) # (width, heigth) +image_width = OUTPUT_RESOLUTION[0] +image_heigth = OUTPUT_RESOLUTION[1] +number_of_colorchannels = 3 # r, g, b +size_of_frame=int(image_heigth*image_heigth*number_of_colorchannels) +frame_dimension = int(number_of_colorchannels) + +AWB_MODE = 'off' # Auto white balance mode +AWB_GAINS = (1.395, 1.15) # White Balance Gains to have colours read correctly: (red, blue). Int, floar or fraction are valid. +BRIGHTNESS = 25 # sets the brightness setting of the camera. default is 50. [0-100] + #the brighter, the brighter the LEDs and the higher the RGB values and vice versa! +CONTRAST = 100 # sets the contrast setting of the camera. The default value is 0. [-100 ... 100] + +SHUTTER_SPEED = 50 # [µs] + +ISO = 320 # ISO value +EXPOSURE_MODE = 'off' +FRAMERATE = 25 # frames per second. 40 fps is max for sensor mode 4 + +SLEEP_TIME = 2 # Time for sleep-mode for the camera in seconds. My default: 2 s + +# miscellaneous parameters +max_value_of_uint64 = int((2**64) - 1) # @30 fps: konservative calculated driving time: 1.95*1e10 years --> Integer overflow not relevant. + +#---------------------------------------------------------------------- +# Parameters +pixels_per_mm = 32/24.25 #[px/mm] for 120 mm camera height for resolution: 192x144 +# pixels_per_mm = 71/24.25 #[px/mm] for 120 mm camera height for resolution: 416x320 +# pixels_per_mm = 107/24.25 #[px/mm] for 120 mm camera height for resolution: 640x480 + +# Offset Camera Sensor in Scooty according to Scooty-KS +x_offset_camera_mm = 100 # [mm] +y_offset_camera_mm = 50 # [mm] +x_offset_camera_px = x_offset_camera_mm*pixels_per_mm # [px] +y_offset_camera_px = y_offset_camera_mm*pixels_per_mm # [px] + +# image parameters +image_heigth = OUTPUT_RESOLUTION[1] # shape [0] +image_width = OUTPUT_RESOLUTION[0]# shape[1] +# calculate center of image +[x_0, y_0] = np.array([image_width/2, image_heigth/2], dtype=np.uint16) + + +threshold_color_detection = 60 # values under this will not be considered as active leds in each color channel + +# Parameters for Blob/LED Detection +minDiameter_mm = 1.5 # [mm] minimum diameter of detected blob/LED +maxDiameter_mm = 8 # [mm] maximum diameter of detected blob/LED + +# Define color numbers to identify the color channels in the matrix with all detected LEDs. No string, because numpy array should stay uint16 +color_number_off = 0 +color_number_red = 1 +color_number_green = 2 +color_number_blue = 3 +color_number_yellow = 4 +color_number_magenta = 5 +color_number_cyan = 6 +color_number_white = 7 + +show_opencv_window = True # show opencv window +draw_opencv = True # draw lane and so on + +print_additional_info = False + +# create shared Memorys for main-process +# shared memory for bools +shm_bools_pre=np.array([False, False, False, False, False, False, False, False, False, False], dtype=np.bool8) # create numpy array with bools stored in it + # [0]: newframe [1]: p_red_finished [2]: p_green_finished [3]: p_blue_finished + # [4]: p_red_started [5]: p_green_started [6]: p_blue_started + # [7]: p_red_start_trigger [8]: p_green_start_trigger [9]: p_blue_start_trigger +size_of_buffer = shm_bools_pre.nbytes +print(f"size of buffer: {size_of_buffer}") # size of buffer: 10 +print(f"shm_bools dtype: {shm_bools_pre.dtype}") # dtype: bool +shm_bools_create = shared_memory.SharedMemory(name="shm_bools", create=True, size=shm_bools_pre.nbytes) # create a new shared memory block +shm_bools = np.ndarray(shm_bools_pre.shape, dtype=shm_bools_pre.dtype, buffer=shm_bools_create.buf) # create a NumPy array backed by shared memory +shm_bools[:] = shm_bools_pre[:] # Copy the original data into shared memory + +# print(shm_bool) +# print(shm_bools.name) + +# shared memory for framenumber +shm_framenumber_pre=np.array([0], dtype=np.uint64) +size_of_buffer = shm_framenumber_pre.nbytes +print(f"size of framenumber-buffer: {size_of_buffer}") #8 +print(f"shm_framenumber dtype: {shm_framenumber_pre.dtype}") #uint64 +shm_framenumber_create = shared_memory.SharedMemory(name="shm_framenumber", create=True, size=shm_framenumber_pre.nbytes) # create a new shared memory block +shm_framenumber = np.ndarray(shm_framenumber_pre.shape, dtype=shm_framenumber_pre.dtype, buffer=shm_framenumber_create.buf) # create a NumPy array backed by shared memory +shm_framenumber[:] = shm_framenumber_pre[:] # Copy the original data into shared memory +# print(shm_framenumber) # [0] +# print(shm_framenumber_create.name) # shm_framenumber + +# shared memory for red, green, blue frame +int_black = 0 # integer for black color/ no color +shm_colorframes_pre = np.full(\ + (image_heigth,image_width), \ + int_black, dtype=np.uint8) +size_of_buffer = shm_colorframes_pre.nbytes +print(f"size of colorframe-buffer: {size_of_buffer}") #133 120 +print(f"shm_colorframes_pre dtype: {shm_colorframes_pre.dtype}") #uint8 +shm_redframe_create = shared_memory.SharedMemory(name="shm_redframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_greenframe_create = shared_memory.SharedMemory(name="shm_greenframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_blueframe_create = shared_memory.SharedMemory(name="shm_blueframe", create=True, size=shm_colorframes_pre.nbytes) # create a new shared memory block +shm_redframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_redframe_create.buf) # create a NumPy array backed by shared memory +shm_greenframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_greenframe_create.buf) # create a NumPy array backed by shared memory +shm_blueframe = np.ndarray(shm_colorframes_pre.shape, dtype=shm_colorframes_pre.dtype, buffer=shm_blueframe_create.buf) # create a NumPy array backed by shared memory +shm_redframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +shm_greenframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +shm_blueframe[:] = shm_colorframes_pre[:] # Copy the original data into shared memory +# shared memory bgr frame +int_black = 0 # integer for black color/ no color +shm_frame_pre = np.full(\ + (image_heigth,image_width, number_of_colorchannels), \ + int_black, dtype=np.uint8) +size_of_buffer = shm_frame_pre.nbytes +print(f"size of colorframe-buffer: {size_of_buffer}") #399 360 = 416 x 320 x 3 +print(f"shm_colorframes_pre dtype: {shm_frame_pre.dtype}") #uint8 +shm_frame_create = shared_memory.SharedMemory(name="shm_frame", create=True, size=shm_frame_pre.nbytes) # create a new shared memory block +shm_frame = np.ndarray(shm_frame_pre.shape, dtype=shm_frame_pre.dtype, buffer=shm_frame_create.buf) # create a NumPy array backed by shared memory +shm_frame[:] = shm_frame_pre[:] # Copy the original data into shared memory + + +# ---------------------------------------------------------------------------- +# Define Functions +def points_trafo(detected_LEDs, alpha_rad, dx, dy): + """Tranfsform points of LED to lane in KS-LED""" + detected_LEDs_trafo = detected_LEDs.copy() # copy, becuase else it is only a pointer + detected_LEDs_trafo = detected_LEDs_trafo.astype(np.int16) # avoid integer overflow + x_pnts = detected_LEDs_trafo[:,0] + y_pnts = detected_LEDs_trafo[:,1] + + # Translation + x1 = x_pnts-dx-x_0 + x_trafo=x1 + y1 = y_pnts-dy-y_0 + y_trafo = y1 + + # Rotation. Winkel Sensor im UZS, also negativ zu mathematischer definiton + x_trafo = np.cos(-alpha_rad)*x1-np.sin(-alpha_rad)*y1 + detected_LEDs_trafo[:,0] = x_trafo + y_trafo = np.sin(-alpha_rad)*x1+np.cos(-alpha_rad)*y1 + detected_LEDs_trafo[:,1] = y_trafo + + #sort points along lane: x_2, y_2 -axis (KS_LED) + detected_LEDs_trafo = detected_LEDs_trafo[detected_LEDs_trafo[:, 0].argsort(kind='quicksort')] + return detected_LEDs_trafo + +def construct_lane(detected_LEDs, img_bgr): + """construct the lane""" + # This function is partially commented in german, because higher math is used + # clearer what is trying to be achieved + + # get points + # xy_pnts = detected_LEDs[:,0:2] + # x_pnts = detected_LEDs[:,0] + # y_pnts = detected_LEDs[:,1] + + # approach 2: + # fit line through centers of LEDs in KS_0 + # DIST_L": the simplest and the fastest least-squares method: the simple euclidean distance + param = 0 # not used for DIST_L2 + reps = 0.001 # Sufficient accuracy for the radius (distance between the coordinate origin and the line). + aeps = 0.001 # Sufficient accuracy for the angle. + [dx, dy, x_2, y_2] = cv.fitLine(detected_LEDs[:,0:2], cv.DIST_L2, param, reps, aeps) + # x2, y2: same as: mean_of_leds = np.mean([x_pnts, y_pnts], 1) + + alpha_rad = np.arctan2(dy, dx) # calculate angle of line + alpha = np.arctan2(dy, dx)*180/np.pi # calculate angle of line + # print(f"Lane: dx: {dx}, dy:{dy}, x2:{x_2}, y2:{y_2}, alpha:{alpha}°") + if print_additional_info: + print(f"Lane: alpha:{alpha[0]}°") + + # get smallest distance to point an line + # Berechnung nach: Repetitorium Höhere Mathematik, Wirth + # Gerade: x = a+ t*b + # Punkt : OP = p + # d = abs(b x (p-a))/(abs(b)) + # info: np.array()[:,0] --> gets only array with 1 dimensions with desired values + p = np.array([x_0, y_0]) + a = np.array([x_2, y_2])[:,0] + b = np.array([np.cos(alpha_rad), np.sin(alpha_rad)])[:,0] # Richtungsvektor + c = p-a + + # Betrag von Vektor: np.linalg.norm(vec) + cross= np.cross(b, c) + d = np.linalg.norm(cross)/np.linalg.norm(b) # distance [px] + #print(f"d: {round(d,2)}") + + # Fußpunkt (X_LED, Y_LED) + t_0_dot = np.dot(c, b) + t_0_norm = (np.linalg.norm(b)**2) + t_0 = t_0_dot/t_0_norm + [x_LED, y_LED] = (a+t_0*b) + if print_additional_info: + print(f"x_LED: {x_LED}, y_LED: {y_LED}") + + # Abstand (dx, dy) Fußpunkt zu KS_0 + dx_LED = x_LED - x_0 + dx_LED_mm = dx_LED*(1/pixels_per_mm) + dy_LED = y_LED - y_0 + dy_LED_mm = dy_LED*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED:{dx_LED} [px] , dy_LED:{dy_LED} [px]") + print(f"dx_LED:{dx_LED_mm} [mm] , dy_LED:{dy_LED_mm} [mm]") + + # Abstand (dx, dy) Fußpunkt von Bildmitte zu KS_Scooty + # Diese Werte zurückgeben + dx_LED_scooty = x_LED - x_0 + x_offset_camera_px + dx_LED_scooty_mm = dx_LED_scooty*(1/pixels_per_mm) + dy_LED_scooty = y_LED - y_0 + y_offset_camera_px + dy_LED_scooty_mm = dy_LED_scooty*(1/pixels_per_mm) + if print_additional_info: + print(f"dx_LED_scooty:{dx_LED_scooty} [px] , dy_LED_scooty:{dy_LED_scooty} [px]") + print(f"dx_LED_scooty:{dx_LED_scooty_mm} [mm] , dy_LED_scooty:{dy_LED_scooty_mm} [mm]") + + # Punkte Trafo, um sortierte position der LEDs entlang Spur zu erhalten + # Bei normal detected kann bei vertikaler LED zb Fehler entstehen und dann muster: 211323233 -> daher mit dieser sortierten weitermachen + detected_LEDs_KS_LED = points_trafo(detected_LEDs, alpha_rad, dx_LED, dy_LED) + if print_additional_info: + print(f"Detected LEDs in KS_LED:(x2, y2):\n {detected_LEDs_KS_LED}") + + #----------------------------------- + # draw useful lines and points + + # draw lane line + if draw_opencv: + pt_0 = (a+b*np.array([-300, -300])).astype(np.int32) + pt_1 = (a+b*np.array([300, 300])).astype(np.int32) + #print(f"pt_0: {pt_0}, pt_1: {pt_1}") + cv.line(img_bgr, pt_0, pt_1, (255,255,255),1) # draw lane + + # draw dx dy + cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # shortest distance from KS_0 to KS_LED --> Lot + # cv.line(img_bgr, (int(x_0), int(y_0)), (int(x_LED), int(y_0)), (0,0,255), 2) # only dx + # cv.line(img_bgr, (int(x_LED), int(y_0)), (int(x_LED), int(y_LED)), (0,0,255), 2) # only dy + + #draw additional points + cv.circle(img_bgr, (int(x_2), int(y_2)), 5,(255,128,255),-1) #pink. Center of points + #cv.putText(img_bgr, '(x2, y2)',(int(x_2)+5, int(y_2)-5), cv.FONT_HERSHEY_SIMPLEX, 2, (255,255,255), cv.LINE_AA) + cv.circle(img_bgr, (int(x_LED), int(y_LED)), 5,(170,255,0),-1) # lime green. Fußpunkt + + if show_opencv_window: + cv.imshow("Lane", img_bgr) + + return dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED + +def convert_rgb_to_grayscale_average(image_bgr): + """This function converts the RGB image into an grayscale image. + Algorithm: Average: Y = (R+G+B)/3""" + + # convert dtype to prevent integer overflow while addition + image_bgr = image_bgr.astype(np.uint16, copy=False) + image_gray = (image_bgr[:,:,0]+image_bgr[:,:,1]+image_bgr[:,:,2])/3 # add values / do conversion + image_gray = image_gray.astype(np.uint8, copy=False) # convert back to uint8 + + return image_gray + +def create_detector(params_for_blob_detection): + detector = cv.SimpleBlobDetector_create(params_for_blob_detection) # Set up the detector with specified parameters. + return detector + +def define_parameters_for_blob_detection(): + """set parameters for simple blob detector""" + params = cv.SimpleBlobDetector_Params() + + # Threshold for Convert the source image to binary images by applying thresholding + # with several thresholds from minThreshold (inclusive) to maxThreshold (exclusive) + # with distance thresholdStep between neighboring thresholds. + # Since the Grayscale image is dark if only one color channel is active, + # the Threshold values have to be set like this. + # particularly the thresholdStep-Value has to be low + params.minThreshold=20 # reminder: this value is set for grayscale image + params.maxThreshold=255 + params.thresholdStep=1 + + params.filterByColor=False # do not filter blobs by color + + # Filter blobs by Area + params.filterByArea=True + minDiameter_px = minDiameter_mm*pixels_per_mm # [px] minimum diameter of detected blob/LED + maxDiameter_px = maxDiameter_mm*pixels_per_mm # [px] maximum diameter of detected blob/LED + minArea_px2 = np.pi/4*minDiameter_px**2 + maxArea_px2 = np.pi/4*maxDiameter_px**2 + + params.minArea = minArea_px2 # min Area of a blob in px^2 + params.maxArea = maxArea_px2 # max Area of a blob in px^2. + # reasons for not filtering maxArea: motion blur + rolling shutter --> larger Area + + # Filter by Inertia + params.filterByInertia=False + params.minInertiaRatio = 0.2 # [0-1] + + # Filter by Convexity + params.filterByConvexity=False + params.minConvexity = 0.2 # [0-1] + + # Filter by Circularity + params.filterByCircularity=False + params.minCircularity = 0.4 # [0-1] + + # params.minDistBetweenBlobs = minDist_px # this has no effect + + return params + + +def detect_LED_positions_in_grayscale(image_gray, image_bgr, detector): + start_processing = time.perf_counter() + keypoints = detector.detect(image_gray) # Detect blobs --> LEDs + end_processing = time.perf_counter() + time_processing = end_processing-start_processing + time_processing = time_processing*1000 + time_processing = round(time_processing, 2) + print(f'processing time detector: {time_processing} ms') + + number_of_detected_leds = len(keypoints) + + if number_of_detected_leds != 0: + # print information of keypoints + print(f"detected LEDs: {number_of_detected_leds}") + + #Pre-allocate matrix for numpy + number_of_rows = number_of_detected_leds + number_of_columns = 3 + position_of_leds = np.zeros((number_of_rows, number_of_columns), dtype=np.uint16) + for i, k in enumerate(keypoints): + # x_pos = round(k.pt[0],0) # x position + # y_pos = round(k.pt[1],0) # y position + # print(f"x: {x_pos} y: {y_pos}") + diameter_px = round(k.size,2) + diameter_mm = round(diameter_px*1/pixels_per_mm,2) + #print(f"diameter [px]: {diameter_px} diameter [mm]: {diameter_mm}") # diameter + # area_px2 = round(np.pi/4*k.size**2,0) # area in px^2 + # area_mm2 = round(area_px2*(1/pixels_per_mm)**2,0) + # print(f"area [px^2]: {area_px2} area [mm^2]: {area_mm2}") + # print('') + + # calculate parameters to transfer to matrix + # x_pos = int(np.ceil(x_pos)) + # y_pos = int(np.ceil(y_pos)) + # Fill matrix + # position_of_leds[i,:] = [x_pos,y_pos, 0] + position_of_leds[i,0] = int(np.ceil(k.pt[0])) # x positon + position_of_leds[i,1] = int(np.ceil(k.pt[1])) # y position + + + if draw_opencv: + # draw the keypoints on the original image + # cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS ensures the size of the circle corresponds to the size of blob + blobs = cv.drawKeypoints(image=image_bgr, keypoints=keypoints, color=(255, 255, 255), \ + outImage=np.array([]), flags= cv.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + if show_opencv_window: + # cv.imshow("grayscale", image_gray) + cv.imshow("Detected", blobs) + return position_of_leds + + else: + print(f"No LEDs were detected") + return None + +def detect_position_of_all_LEDs_grayscale(image_gray, image_bgr, detector): + position_of_LEDs = detect_LED_positions_in_grayscale(image_gray, image_bgr, detector) + + if position_of_LEDs is not None: + return position_of_LEDs + else: + return None + +def get_color_of_leds(matrix_of_LEDs, image_bgr): + # is image_r[y_pos, x_pos] = image_bgr[y_pos,x_pos, 2] ? --> yes. No need to split the color channels. + + offset = 0 # half of length from rectangle which is going to be used to determine the color around the middle point of the blob/led + # offset = 0 --> only the value from the middle point of the blob/led + # offset=1 --> 9 values, offset=2-->25 values + + for led in matrix_of_LEDs: + x_pos = led[0] # uint16 + y_pos = led[1] # uint16 + + # get values of color channels in region around middle point of blob/led: + # +1 at stop index, because it is not inclusive + region_around_blue_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 0] # uint8 + region_around_green_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 1] # uint8 + region_around_red_led = image_bgr[y_pos-offset:y_pos+offset+1, x_pos-offset:x_pos+offset+1, 2] # uint8 + + # average of the values + # convert dtype to prevent integer overflow while addition + region_around_red_led = region_around_red_led.astype(np.uint16, copy=False) + region_around_green_led = region_around_green_led.astype(np.uint16, copy=False) + region_around_blue_led = region_around_blue_led.astype(np.uint16, copy=False) + # sum all elements in matrix and divide with number of elements + number_of_elements= region_around_blue_led.size + value_of_red_led = region_around_red_led.sum()/number_of_elements # float64, if not integer result + value_of_green_led = region_around_green_led.sum()/number_of_elements # float64, if not integer result + value_of_blue_led = region_around_blue_led.sum()/number_of_elements # float64, if not integer result + + # determine which leds are active: + # if value > threshold --> led is active + status_blue_led = False; status_green_led = False; status_red_led = False + if value_of_blue_led > threshold_color_detection: + status_blue_led = True + if value_of_green_led > threshold_color_detection: + status_green_led = True + if value_of_red_led > threshold_color_detection: + status_red_led = True + + # determine color by checking the cases: + # case 1: red + if status_blue_led==False and status_green_led==False and status_red_led==True: + color = color_number_red + # case 2: green + elif status_blue_led==False and status_green_led==True and status_red_led==False: + color = color_number_green + # case 3: blue + elif status_blue_led==True and status_green_led==False and status_red_led==False: + color = color_number_blue + # case 4: yellow = red + green + elif status_blue_led==False and status_green_led==True and status_red_led==True: + color = color_number_yellow + # case 5: magenta = red + blue + elif status_blue_led==True and status_green_led==False and status_red_led==True: + color = color_number_magenta + # case 6: cyan = green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==False: + color = color_number_cyan + # case 7: white = red + green + blue + elif status_blue_led==True and status_green_led==True and status_red_led==True: + color = color_number_white + # case 8: led not active + # this case can not occur, because no inactive led can be detected from the implemented blob-algorithm in detect_LED_positions_in_grayscale + else: + color = color_number_off + + # fill matrix with color + led[2] = color # uint16 + + return matrix_of_LEDs + + +def detect_LEDs_with_grayscale(image_bgr, detector): + # convert rgb to grayscale image + # start_m1 = time.perf_counter() + image_gray = convert_rgb_to_grayscale_average(image_bgr) + # end_m1 = time.perf_counter() + # time_processing = end_m1-start_m1 + # time_processing = time_processing*1000 + # time_processing = round(time_processing, 2) + # print(f'processing time conversion: {time_processing} ms') + + # get position of leds + position_of_LEDs = detect_position_of_all_LEDs_grayscale(image_gray=image_gray, image_bgr=image_bgr, detector=detector) + + #position_of_LEDs = None + + if position_of_LEDs is not None: + # determine color of leds and add to matrix + detected_LEDs = get_color_of_leds(position_of_LEDs, image_bgr) + return detected_LEDs + else: + return None + + +def lane_detection(image_bgr, detector): + # Detect LEDs + print(f"Detect LEDs and color:") + detected_LEDs = detect_LEDs_with_grayscale(image_bgr, detector) + + if detected_LEDs is not None: + # Contruct lane + #print(f"_____________________________________") + # print("Contruct lane") + dx_LED_scooty_mm, dy_LED_scooty_mm, detected_LEDs_KS_LED = \ + construct_lane(detected_LEDs, image_bgr) + + # print result + if print_additional_info: + print(f"Detected LEDs relative to image-center(x0,y0):\n{detected_LEDs}") + return detected_LEDs + else: + return None + + +# ------------------- +# Define Funcions +def get_frames_from_picamera(): + # newframe= shm_bools[0] # do not use this! no updted values in "newframe" + # framenumber = shm_framenumber[0] + + # Initialise Camera + with picamera.PiCamera() as camera: + with PiRGBArray(camera) as output: + # Set camera settings + camera.sensor_mode = SENSOR_MODE # force camera into desired sensor mode + camera.resolution = OUTPUT_RESOLUTION # frame will be resized from GPU to this resolution. No CPU usage! + camera.framerate = FRAMERATE + + camera.awb_mode = AWB_MODE + camera.awb_gains = AWB_GAINS + + camera.iso = ISO + camera.shutter_speed = SHUTTER_SPEED + + time.sleep(SLEEP_TIME) # wait for iso gains and digital_gain and analog_gain to settle before fixing the gains with exposure_mode = off + camera.exposure_mode = EXPOSURE_MODE + + time.sleep(1) # wait before applying brightness and contrast + camera.brightness = BRIGHTNESS + camera.contrast = CONTRAST + time.sleep(SLEEP_TIME) # Camera warm-up time to apply settings + + t_start= time.perf_counter() # save time for fps calculation + + for frameidx, frame in enumerate(camera.capture_continuous(output, format='bgr', use_video_port=True)): + # General information: + # - always the newest frame is recieved: processing must be faster than fps if every frame should be processed + + shm_bools[0] = True + framenumber = frameidx+1 # frameidx starts with 0, framenumber with 1 + + + shm_framenumber[0] = framenumber + #print('') + #print(f"new frame: {framenumber}") + + #image = frame.array # raw NumPy array without JPEG encoding + + #b,g,r = cv.split(frame.array) # split colour channels of raw NumPy array without JPEG encoding + shm_frame[:] = frame.array + #shm_redframe[:] = r + #shm_greenframe[:] = g + #shm_blueframe[:] = b + # for better performance one can assign directly in funtion line the values to the shm_memorys: shm_red, .. , ... = cv.split(..) + + shm_bools[7:10]=[True] # trigger the start of the processing for each colorchannel + #print(shm_bools[7], shm_bools[8], shm_bools[9]) + + #display_image_with_text(image, shutter_speed, framenumber, camera_exposure_speed, trigger_record_OpenCV, out) # show the frame + + output.truncate(0) # clear the stream for next frame + + if framenumber == 500: # 5 sek @ 30 fps, only for performance measuring + t_stop=time.perf_counter() + print(f"calculated fps: {framenumber/(t_stop-t_start)}") + break + + +def display_image_with_text(img, shutter_speed, framenumber, camera_exposure_speed, trigger_record_OpenCV, out): + img = img.copy() # make copy of image and do not modify the original image + + # please activate only one trigger once + trigger_show_brightness = 0 # trigger for (not) calculating andshowing the brightness of the image+ + if trigger_show_brightness == 1: + arithmetic_mean_of_brightness_per_pixel_relative = calc_arithmetic_mean_of_brightness_per_pixel(img) + + trigger_show_max_brightness_values_of_colour_channels = 0 # trigger for (not) calculating and showing max values of colour chanels + if trigger_show_max_brightness_values_of_colour_channels == 1: + r_max, g_max, b_max = get_max_rgb_values(img) + + font = cv.FONT_HERSHEY_SIMPLEX # font + fontScale = 1 # fontScale + color = (255, 255, 255) # Font colour in BGR + thickness = 1 # Line thickness in px + + # set text position + frame_width = int(img.shape[1]) + frame_height = int(img.shape[0]) + text_start_position_Y = int(round(frame_height*0.12)) # start position of text in pixels 12 % of frame height + text_linespacing = 50 # line spacing between two strings in pixels + # text_start_position_X = int(frame_width/4) # start text from 1/4 of image width + text_start_position_X = int(0) # start text from left edge of image + + # set position in (x,y)-coordinated from top left corner. Bottom-left corner of the text string in the image. + pos_1 = (text_start_position_X, text_start_position_Y) # start text from 1/4 of image width + pos_2 = (text_start_position_X, text_start_position_Y+text_linespacing) # start text from 1/4 of image width + pos_3 = (text_start_position_X, text_start_position_Y+2*text_linespacing) # start text from 1/4 of image width + if trigger_show_brightness==1 or trigger_show_max_brightness_values_of_colour_channels==1: + pos_4 = (text_start_position_X, text_start_position_Y+3*text_linespacing) # start text from 1/4 of image width + + + # define text to display + text_line_1 = f"set ss: {shutter_speed} us" + text_line_3 = f"Frame: {framenumber}" + text_line_2 = f"ret exs: {camera_exposure_speed} us" + if trigger_show_brightness==1: + if arithmetic_mean_of_brightness_per_pixel_relative >= 0.01: + text_line_4 = f"brightness: {round(arithmetic_mean_of_brightness_per_pixel_relative*100,2)} %" + elif arithmetic_mean_of_brightness_per_pixel_relative < 0.01: + text_line_4 = f"brightness: {round(arithmetic_mean_of_brightness_per_pixel_relative*10e3,2)} pm" + if trigger_show_max_brightness_values_of_colour_channels==1: + text_line_4 = f"max: r:{r_max} g:{g_max} b:{b_max}" + + + # put the text into the image + #image_text_1 = cv.putText(img, text_line_1, pos_1, font, + # fontScale, color, thickness, cv.LINE_AA) + #image_text_2 = cv.putText(img, text_line_2, pos_2, font, + # fontScale, color, thickness, cv.LINE_AA) + #image_text_3 = cv.putText(img, text_line_3, pos_3, font, + # fontScale, color, thickness, cv.LINE_AA) + if trigger_show_brightness==1 or trigger_show_max_brightness_values_of_colour_channels==1: + image_text_4 = cv.putText(img, text_line_4, pos_4, font, + fontScale, color, thickness, cv.LINE_AA) + + + cv.imshow("Current Frame", img) # display the image + if trigger_record_OpenCV == 1: + out.write(img) # write frame to Video + + +def calc_arithmetic_mean_of_brightness_per_pixel(image): + """Calculate overall brightness per pixel of the image. Mittelere Helligkeit pro pixel berechnen.""" + #Comment: So rechenintensiv, dass man kein Blitzen sieht im Bild. (Oder sehr selten bzw. schwach). Daher anzeige von max-werten + + b,g,r = cv.split(image) # OpenCV works with bgr. Image is also speciefied to be captured in bgr. + r=r.astype('uint16') # set dtype of one colour to uint16, because the sum of 255+255+255 >255 =765 + #the rest will also be uint16 then + image_heigth = r.shape[0] + image_width = r.shape[1] + + number_of_colour_channels = 3 + arithmetic_mean_of_brightness_image = np.sum((r+g+b)/number_of_colour_channels) + arithmetic_mean_of_brightness_per_pixel = arithmetic_mean_of_brightness_image/(image_width*image_heigth) + + max_possible_brightness = 255 # maximum possible brightness + arithmetic_mean_of_brightness_per_pixel_relative = arithmetic_mean_of_brightness_per_pixel/max_possible_brightness + + return arithmetic_mean_of_brightness_per_pixel_relative + +def get_max_rgb_values(image): + """get max values of colour channels""" + b,g,r = cv.split(image) # OpenCV works with bgr. Image is also speciefied to be captured in bgr. + + r_max=r.max() + g_max=g.max() + b_max=b.max() + + return r_max, g_max, b_max + +def create_folder_for_captures(): + # Create folder for saving the captured pictures + now = datetime.now(); d1 = now.strftime("%Y-%m-%d %H-%M") + path_cwd = os.getcwd() + + path_saveFolder = path_cwd+r"/Capture_"+d1 + try: + os.mkdir(path_saveFolder) + folder_exists = True + except OSError: + print("Error! Ending script.") + quit() + + return path_saveFolder, folder_exists + +def do_processing(): + time.sleep(0.001) + #print("ohh i am doing high complex image analysis with computer vision") + +def do_processing_frame_r(frame): + print(f"max frame color red: {frame.max()}") +def do_processing_frame_g(frame): + print(f"max frame color green: {frame.max()}") +def do_processing_frame_b(frame): + print(f"max frame color blue: {frame.max()}") + + + +def processing_red(): + shm_bool_init = shared_memory.SharedMemory(name="shm_bools") # Attach to existing shared memory block + shm_bools = np.ndarray((10,), dtype=np.bool8, buffer= shm_bool_init.buf) # do not: newframe = np.array(...)[0] --> then one can not assign new value in main script + newframe = shm_bools[0] + p_red_finished = shm_bools[1] # not used, but for clarity + p_red_started = shm_bools[4] + + shm_framenumber_init = shared_memory.SharedMemory\ + (name="shm_framenumber") # Attach to existing shared memory block + shm_framenumber = np.ndarray((1,), dtype=np.uint64, \ + buffer= shm_framenumber_init.buf) + # framenumer = shm_framenumber[0] + + shm_redframe_init = shared_memory.SharedMemory\ + (name="shm_redframe") # Attach to existing shared memory block + shm_redframe = np.ndarray((image_heigth,image_width), dtype=np.uint8, \ + buffer= shm_redframe_init.buf) + + + i=0 + while True: + try: + framenumber = shm_framenumber[0] + if i==0: + last_processed_frame = framenumber + + conditions_for_first_start = (i==0) and\ + (shm_bools[0] == True) and \ + (shm_bools[1] == False) and (shm_bools[2] == False) and (shm_bools[3] == False) \ + and (shm_bools[7] == True) + + conditions_for_starting_processing = (framenumber>last_processed_frame) and (shm_bools[7] == True) + # newframe and all color-channel-processings have to be finished + + if conditions_for_first_start == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + do_processing() + i += 1 + #print(f"first processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + + shm_bools[4] = False # process ended + elif conditions_for_starting_processing == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + #print(f"red: framenumber: {framenumber}, last_processed_frame: {last_processed_frame}") + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + do_processing() + #print(f"max frame color red: {shm_redframe.max()}") + if show_opencv_window: + cv.imshow("red", shm_redframe) + cv.waitKey(1) + #print(f"processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + #t2 = time.perf_counter_ns() + #print(f"processing time for red channel: {round((t2-t1)*1e-6,2)} ms") + + last_processed_frame = framenumber + shm_bools[4] = False # process ended + # elif shm_bools[0] == False: + # pass + #print(f"no new red frame") + + # image processing finished + + + + except KeyboardInterrupt: + try: + shm_bool_init.close() + shm_framenumber_init.close() + shm_redframe_init.close() + + except FileNotFoundError: + # Memory already destroyed + pass + +def processing_bgr(): + shm_bool_init = shared_memory.SharedMemory(name="shm_bools") # Attach to existing shared memory block + shm_bools = np.ndarray((10,), dtype=np.bool8, buffer= shm_bool_init.buf) # do not: newframe = np.array(...)[0] --> then one can not assign new value in main script + newframe = shm_bools[0] + p_red_finished = shm_bools[1] # not used, but for clarity + p_red_started = shm_bools[4] + + shm_framenumber_init = shared_memory.SharedMemory\ + (name="shm_framenumber") # Attach to existing shared memory block + shm_framenumber = np.ndarray((1,), dtype=np.uint64, \ + buffer= shm_framenumber_init.buf) + # framenumer = shm_framenumber[0] + + shm_frame_init = shared_memory.SharedMemory\ + (name="shm_frame") # Attach to existing shared memory block + shm_redframe = np.ndarray((image_heigth,image_width, number_of_colorchannels), dtype=np.uint8, \ + buffer= shm_frame_init.buf) + image_bgr = shm_redframe + + + params_for_blob_detection = define_parameters_for_blob_detection() + detector = create_detector(params_for_blob_detection) + + i=0 + while True: + try: + framenumber = shm_framenumber[0] + if i==0: + last_processed_frame = framenumber + + conditions_for_first_start = (i==0) and\ + (shm_bools[0] == True) and \ + (shm_bools[1] == False) and (shm_bools[2] == False) and (shm_bools[3] == False) \ + and (shm_bools[7] == True) + + conditions_for_starting_processing = (framenumber>last_processed_frame) and (shm_bools[7] == True) + # newframe and all color-channel-processings have to be finished + + if conditions_for_first_start == True: + print("first start") + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + lane_detection(image_bgr, detector) # hier immer noch altes bild -->> Processing ist langsamer als neue bilder kommen.. + i += 1 + #print(f"first processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + + shm_bools[4] = False # process ended + elif conditions_for_starting_processing == True: + shm_bools[4] = True # process started + shm_bools[7] = False # reset trigger + #print(f"red: framenumber: {framenumber}, last_processed_frame: {last_processed_frame}") + + shm_bools[1] = False # set bool for p_red_finished to false + #t1 = time.perf_counter_ns() + cv.imshow("AAA", shm_redframe) + lane_detection(image_bgr, detector) + #print(f"max frame color red: {shm_redframe.max()}") + if show_opencv_window: + cv.waitKey(1) + #print(f"processing red finished. frame: {framenumber}") + shm_bools[1] = True # set bool for p_red_finished to true + #t2 = time.perf_counter_ns() + #print(f"processing time for red channel: {round((t2-t1)*1e-6,2)} ms") + + last_processed_frame = framenumber + shm_bools[4] = False # process ended + # elif shm_bools[0] == False: + # pass + #print(f"no new red frame") + + # image processing finished + + + + except KeyboardInterrupt: + try: + shm_bool_init.close() + shm_framenumber_init.close() + shm_redframe_init.close() + + except FileNotFoundError: + # Memory already destroyed + pass + +# ---------------------------------------------------------------------------- +# main +def main(): + start = time.perf_counter() + + try: + # create processes + p_red = Process(target=processing_bgr) + processes = [p_red] + + print(f"waiting 1 second to create processes") + time.sleep(1) # sind prozesse schon vorhanden + + # start acitivity of processes + for process in processes: + process.start() + + # start capturing + get_frames_from_picamera() + + + print('*******************************') + # this below code is only executed if the loop in take_image_picamera_opencv is breaked + # In real use case there will be no end of this program + + for process in processes: + process.terminate() + + # print time measuring + end = time.perf_counter() + print(f'Script finished in {round(end-start, 2)} s') + + # close each SharedMemory instance and unlink to release the shared memory + shm_bools_create.close() + shm_bools_create.unlink() + shm_framenumber_create.close() + shm_framenumber_create.unlink() + shm_redframe_create.close() + shm_redframe_create.unlink() + shm_greenframe_create.close() + shm_greenframe_create.unlink() + shm_blueframe_create.close() + shm_blueframe_create.unlink() + + except KeyboardInterrupt: + # Normally this prgoram never gets keyboard interrupted! But here this case is nevertheless handled + # End Script + try: + # close each SharedMemory instance and unlink to release the shared memory + shm_bools.close() + shm_bools.unlink() + shm_framenumber_create.close() + shm_framenumber_create.unlink() + shm_redframe_create.close() + shm_redframe_create.unlink() + shm_greenframe_create.close() + shm_greenframe_create.unlink() + shm_blueframe_create.close() + shm_blueframe_create.unlink() + + except FileNotFoundError: + # Memory already destroyed + pass + + +if __name__ == "__main__": + main() diff --git a/Installationsanleitung.docx b/Installationsanleitung.docx new file mode 100644 index 0000000000000000000000000000000000000000..b5f5caf707610198d3022e3e3b0e221fcb55c48d GIT binary patch literal 56930 zcmeFYgL^E^*Y7>CZQIF?wPV}1ZQHiZ9Va`svt#bqwr%Il{kxxYuIIhZ`465`*L2U+ zOm$VS)%C6F^{F0tDG*Rp05|{=000mHj))+1?12D)B`^R01po=GC1hvoY+~!Ir|e;G z;-vG#-Nu@*02G)q9{~Ja{{N-_!5(N#9=GggKoofj`3#zAUY2a9EFKswh%>=4e+7zU zaTR_LRMhp`Qpiq6PQ<)j#;nZny24tS&~Kcj*78S~%}l(H!XN!(Pg7Jbb<6xRk|%*q z%p6E%l}~B~Y0T2ugCo5NFBocxBFPjr(Jo;2P6}ux{pER-#dW6mArN}}2s>raA`2wDxq>uI zHE%t;1y}G-O_9osDoc{-uwQcj!~-ffbq&I-rkh}Jt14o(-1M{SuuwE*E7+gKHWUM> zxvMONcjhb5at61)66ordMUxKt5WHh^ zemYsMHeeCNC1R($a6DocJcFWcGFS6e{g!vY#o}-Di6a9~W z_Ww_y|ASTe-%PJe?EMxfEdN!|N6<`{!fG#Op)9@8^cv<01f-UPG|Jkt<;>+sR^V~j+C zYAh&911)k28$97TJpJz!dwTHrKfaX=KKuCg+)toEiZ@) z9+B=)y!J^fUuaXlx-@3G__)5!dYu8~)1+3W7)z=LT5M|uda~>U!%ArE>&}@|&%&P4 z!H5uAs9#VK^l#p#+Q^!pN0EnEd~8=l8jX58b~xEO0(alj)&Hpvx<)g+6hr{vBNqTb z`gV$&oue_mv7M2N&G&5f&op+KO5h@DQ957$3^peEAPqYP<5RfwJl-GFJ9op84pIkK3i_x`cv)0IBJ zP3_c&3XCW%i!S24DVah|o)a6bm}s;zX+he$ue1>BzPU3;6(ereE3yvN?z>b3-Ah7D zcJCIlf;*lHf z5MguWdx}o#rQR~V(JGWmom62-gD=Rn_Fl|P8)}Isz?4s7##Ag~gLmumOTa)AD8nxj zaL4`oln(kUpyH8n?s2x>0PdRTm}~POWAJ?r@#7MtwOnAH4nMHZH4lxPQ~bJpuS>i^ zK>I6Ts1z7+9mF0z0YVlXEjl4+CSq~5YG)dvLIrHtQKbKuRo^Q@V}M)23V_fEtuzme ztn7Go#-^^1Dzz!!Dt!Mg9(#UU^#wd8a(^6kN2_<-5H7|St8vLHsdajC)a;KVbW{2M z?RdOL+Ac7y3BHUjMKlbqsXtN~1Td6XD99^4WaKBKL_Nz(=92hY`XbDAg&=;-yO-$Jv>r-J=qQMys(XbnV?#%7PLrx& z^LzI0SHFvI{%tJVR4qL2h~2+P3GGQ(a_Dk9y;bQ4RutTlMVn+4+%H@nSEEX|wiIb8bsc-p&-NokXrw$4@A_z5hua@IO_ zhez%LqV8J}6-_>I-q@;%R21P(=?9s5DyZna`5JI2TyQl4>@(#wkqdGMACY}{Or{W+ zC_k2>Oe)rzU69T&ATB`|L{z9$M*Am(yb>vJ4mB`JT2nOWMtD%NINQS(VV>uapJ{)$ z|EmbgK<^jTv;Axe=vk_A1&DZfLa3-9JT;N0jJc^&x*&`IDU#Ey=S^&4az0(E(9!R} z@aTFhWRxx+Lfv7IAXIS(44S{cz4q2K2lZ3{xP`fKna2yY!3VW}>LDW#X%MJQjTeSm ziV0zA3zU&oOod%+E0H-*9yhg2PS;i9GSy@1vt#Q!TWNc_6VYxyZ{ds8iBu!UqJ2XQ zD;6NSq7(&E1eqr=3nomDj^Q|Ka;bho81Q8JRMc9veh9Do zstv8r!6S$unJHxwPQ48af1r3)N&E!s4{{YRe>Q*h_S2>$XESy>jSg}WRxz%J&|NUkuTH(us;4La5Db$9ru@sDJF3fKlz2Kw zPG)w>Od0l!64p*Y#WH!V+7$ZK;lxDL51~hSg*D>|`|h*x@Ph}fVgSqFF60lGJ?0|u zA=(Bb#Y5=Xln1-nv+&THdI#rfJqAtY9w=piNHX@C?XGombFLM)=i95h;EU$t zL|_LO)^x;_nO+}{`5RUUlknTZEY*8R&oQ8H_)VwBR`S3`)OQX|d#`0ZaxU5|bC?;d zGQ|}|w-2X&eII(K5n|EeXMinJ9)Idpja5v*e4e`)qQ*_*Yv8yAs(h@lIGvg2ay;el>6{ru57j8O(7|burGSGIr zTI+xd(vrIvH3h({^*MqlTE3(O@mq_hV;>)W27g3H0Um3jO?Zn)SXrWw5F+8Gd75+$ znXj1xPf?u8@^l2=0XS8dn4}T#s78v7#@av=@8QIw|EJK~lcX;j8%$T=%-vEf*0}w> zQY20~26oIYTjUz$B_rxm$dZq?%in#g(@4DRCa^e$fVdd7x#fT%cox@f2KxG*Ad?Tv zI4ZCGI2;ulKb}U~JEIsNH#FU$67Cg8(bvBJn zGmD~H2!&N$kK61N79U1>yWAf*o!vsU9zjpSusy#LVix?sf91h7V(j3H=F7v;5Gkr zqX_H%wMYe3;3V%lkO`9rq&hHnc;SJoYDL~kks8_KFVW`N(bN%~e^>vk~@|S371z&y~R6jj6 zmBm&GI%&4T0sd=l_x7*$iFq2;o-&@MjT9#+4gkjLtgWRw~KdxlGRtfXDx-S|p-YzI0e zQ^iaPyacMqPoM@XyIhLTL}k^Ce>F6>_7#%3@9+gEa;Un{sBSE;rfR9+tpQts(mGwD zxaE+6LKsCEY0P-zM0%|E5l@&q$TUbl<9YxEJP&>$b=ISBFB& zilWX4vBDQF5|0Zy<~QohlV=H|1zC8cVor9SzK*Y1;U9TZ8Q2ff-syb%Uso+|Er=(N z8x`v1C&e$!!g`I*{evWZYsuff~VsVV$|d43;ZY&~pB^ijLnW)Kx{t z{$)J23f4jXWH-Tit$ndhnbD**nz9sfHJ|eJX2RbDIV$XLCp+9F!@Hsq_YOjFq=H;L zvTtO*ttl=u68b&9@(1y+g(GEJ4^ns#Ot9yS=OBjoji*nIe%r79z$V;AMAL?7(FPPJ z^(;4}r!?}b-{SW1oOhoVv!F`cKd0R2-n>j9ZCB`(aA7AWOZV)#V@o#_7|!z&x(T9# zI50zW_@E)W31G!`C{Nu!48o@ZKh5{!Lifx$24^_J3v_$<*n*z3o?}CGp1=0dKl$3? zq*d?YFwnZ{K0qK-$Mb4Oj`}OR#0gB~w)Mw9`CiSAFAnnXH45>$BVHHzY)xlKL<)%s zD?uIni{YYLjJMn1k4YSHlW=u{YlV$oaYCy8mX)ZYRNMdjA&3WG`?6{l+oirAAd8+( zR?C9G-Lvt~{?m-J)B&`Jojy3NL4NCDDuXIg5hzd-axB>no1zWkaHDxogi6LRKrMq8 z6*WcB(V$+M{_ICzC91p0YZ@%Z;8-4lv>os&R8oziw9LkuhdD}g)cT`jT0UK_ox#Rs zX^0&E$vXyD__p=-ixD$)2elgMwPy+C(Cd5(mWT`)1Fdw%FH*}xPD9CG@2*=?>g7xp znVOcFCDTG%cT?7bf$_zoDKJ<&(U`b~+)qaRe>#s%I4ID&8$k#3`^@F{8T@slX8+b`;{z@VZ!wKHdUa70IvnGEG;d7(5S{~&@TjYR8D ziin$F;1t{xzh^y?%s__TDMZz@jf?dMmY)&fc2>$yu0k zVQ|2fkNMR_8cSGn4jO4529zfoGQy3ot=X34%?B>6C&JV&a?s2a>4U=#9}dpWmm4N= zGYj@3fD}ZRzAmSyXtDmjkV2l~u0=NBe6VDUtAl4{2iy|hfRviEjJqtU3a0)9U@Ffe zBO4zb5nqRb|D3u&tUXarI|y0OG+L{So`>W(@MVj>7Ek0{$nyY@$%@NhrXc^Y$GD-q zGe`BZbx*L#uL_dB*bd322v~rkQlgI-sF2tFQm?M{dYW$SgW!%?Y7H$t#71vCUg+ML zOnW*y68r26>+0&6xi6i&NBBZRRtQJvk$rdp#kb9KBr+Up5JvO?4ILBme)LisktYF@ zqXWB?y1xvXMCF$LCC`F@9hyhkU-MU?#H)1me5FLDN>Bd=E%Jd%)*b|zGKsPjS|SlT z`8FByoXE0WF0`e+U@!yo*D991TLl?Dj_M=s<5Y;~DOVRUqZIZlEE}5PXSGr-H zH!!OUl4wtIO$9agC>Ntan31cuL$30}=7ER8E**uSAU*zM^bW3^Vce%X$; z+~`l~$zz-hB^}{rnQpdMvP+CBjB)qFmMqrl)$8(TK&d=9EeqwL=7~gcYUDsNl2uIf zp9KFmhH>>n2iEkP?>$2U0Js21p#Lz8e?i}WCmH_>`T>1Y`EM5TKRb1*>e#I^p!(!B ze)-?pa#4k(mQSbIDiV;(XmzL=XXETH=+)y1KAyQrQOd(jGl_Hx-;!l}P2PNrYd$uZhZ+4d|am6n53*gCi$=>h&pNt z5t9KLWHlzXqSQLlj;DyE0q_F-QIWZ(S)bGGy29hbcGn@GR+rX={lBkyeOu^}TGi-B1poRVl5AR8ox#PpF{*Ga{z}{O#=Y_+0eef`;Sg- zOpGlI=q+pv%uE=6*xQ;#$jgesL1X`O6r6;(up$5e!t?!k5b~dev>Otb@$WYvCq*$q zK=mZ<$#(|SOh85e0H}+Dc{li8jRQd0i)%Om0Py|)Y(T?yWySzN>Y0SFfU>*pMGllU z{2Icw{`8R4Q?cNF-!&W4`h% zFiw_( zoxv$uL;N@M6JkUz^1A>eXhB0_YHDt_cTZVaYk7IZ`ojYxG&D8U11rO`jF?!~`a?sY zGtR%2$x0aSKMV}48Rye6Fku^6a3;XMClhXj@)NGYcWOG-zGg5R z80TNqHZ{FuRN0ER5Fv&)Lr^y-&h^&RWRvxu-`C|;K->-mMh9WH>sk@4Y$Q;xl=V5y6ue;pC= zg42A>7il5D7<1-x-Q^P(Ldd)fg`2^$b%wA845lG zbngB(hM5O7&RMKQvMt)cA>EO7I4d~GQuJqMH=g#vFBQQ}1!BOZU4;(1C1EmIJZ*R~ z1qL~rvA&Z^SIQeE%3f)}rG=QJl9xnXCd~oA2_Yncxp#l|wVDkE{p@(vP4#TU_RDv3w2kfbH zF(>nTgrDZmU&kh6z>MwpRWh)-A!3?_wqKiqVDdMYlV2k2PGwuLfxG#gmQz6vvMxU5 z4s1-`3dVHPd?Gfapq!IqqwgoN{Zu=FLC;jfHA>EjrGZ~TOtLaEG6tdfxSwIah?S_2 z|6Y_5m?~7695eMWfgx6!O@Sh_HIOm0CKvL_NHHbCrQOhD&Sdyk`ql>W7YGXAiv%emGXZft&vcIZ(vNv#CqW`;M5!gIRbD4Ze;X<=GEl@bc5;6r& zXsA0i`lPBDlY3cN;-tU7@bpX7J1%ha{s|_X>x!w9_2CM`@8+Djvg1>gLxF&i!0tFS zNtGBzvIV`Qon>y%1HT{qe5PF!-*Rw<%)8#wh6<4n?7n?W01TS28%x=?++=Y$Uki^Q)b3A_3u6Cu8#Y&%ma&new=YIsDWZAslC*?wNaE4yE!JayR*Q_ns_VpRcMqCk)?Z)D zxxBPCFHPR4k6u5RgJGQtTZ@2U7k9soQ{MJYqcAYg8=@MYnzXg4=%Xb4PZ8;u1|(m1 zNaBpuK)>?B*#Kjp%B9kJTwzRXz%p>G{B{EuN+%Idr@~^`3Nh6-EQ*bnx`sc$3i*2$ zS}ac;t8Fr7^S-)>b|y&}tQ#2%e}%+|A%MhPlEw?5Cwe28m{&b*quL;TtD*T@4S^M2 z)_7m!kJ#@pU@O~r5Z|4JKY~qM<(AGc$bXMtcrEdnH)?&i*IYrS40aoQc)c7QLD-w3 z%xX0uaM@odVbEvdke*$E$W7oCXRRi^;`K*vjrrTC1b0h1HQyseFIW?%Q zxWf6gM!4^oO8ntM>JdnRgt6SgRQW+t5exTQwE<#_9Ux zfyNQ$NXJnyAdh05un32275dS(>aR3OGZY8Ba3h^bjNK@m?LAVmb4nH*`=;OLw#v{W z740VkaW}y{>xGoa?G1?9ZE-Y4RpXyxEx);gJMw!EpFLxJ=~6VC_~sd#gI*Xmax71< zsUv5T`n%64|8Q})4=`U0dcDmCXL*T{^DWikJ4EGyN>_!27c~`E9uBUowFo{a=y2yN zvrn~=gYnXhiH>zw=K5v5Sdsl6N^qC23of#Q-0R&z9=|(;%DZM?AM9PV8d#L?uEO@=Hpnvu69zV0_zF?bs4U5r9Dl(*I&G(eh|7=5H!jM2!$QMgHSoaHCbg0DMmS&Z7~pbQaYr7A19Fk^Qutt?-S^C0zDBx5 za|@K^+ir2Kce?&)htm^Y9lzO6t;u2hVa@i*&hM9a6AarxLR<*TnBKwX&~xx6&aNGI zBeFQ4j<#=R65q@F^EEkInHir<$WP-s5%#lBpGFu%Wb^~Azft~CWxp;z6e_LIz3=q` z?RQdsULrA8Z&s*%k5NsdD#Oxiy*E+t6NM3C4no~8&wX@|MNS(;%W51f)9H79VVr_- z^+-r06vUI2rm)U@<=WeJ6{e&M&y4<@wmiuvHElBF+CERh0v>*yIV5q|77Y%Cs>hum zHKxLgqv&`-H#gmGy_S#{TwkzTAF=H%z}c*bnHlfMR(bzLgtnf7y83N3nH!lIwLulp z7_Db?!H$lEfdSYOeFq$N8LZ`&i{agBn%(Xk$dLhwTM(SvT-SZbh>u>kxMXC&(jfgP z;=Y6|5h5}&sStOpVL+E~5ZflTJ|g!;R8cSopqL(JbeUeSK9?5)c%nf_7vfp>S^Ui? zg&OPg)Bi*dQZFh~w{*GxcN72Xx~q2@?F=hKP}Md76z|HZ4NFQ%H9 z2f}7+tLqD}n7kYi?phd^kGilp#g}SJ&n0!9N5u&KSyjB`m6B78T>BXWflP&T0eYHw z1?tCeEu!sr2~G3&)vNa_RF4DE@vdqNH7rE*V^o|s9?O~5c$4ntqdjJiS!XGY2ISyi zNv~7f9!t7JKmLrlyXW#%E16WPJq;QZ+{%G;e3P-cXXsX(T17}ym1ug};*%G@PP?k- zfoLp1)8)yRsIAla`FW-QO-)Wo>F5^uQk7-u*evjz1${ zRLcln;KE*b8Q)9qhMaU#;0YcSfmlqcWSeUFLzrkh-X`a@QI1=iEZ!A z1!rfvC_W7l0E~P)HWr!K2Ph)QX30Jj!rp95qnq*Z@{V0tY_06;+wY$+%cJvz zGQYa^-0G9zu_4_1a~dyr1rAONuarw~55Cv=}rr#(tH zL*_e_fzwoiQMt8OyZJ;WU(qjF1$j$zV8Y<(He4PCDZGUkV;{p0T6ea04PseZ%veEk zs&r|a}c%u(}U}U7%nU9BVhX&N@na&4@^vqt@RX9R+eNS>~;G-_Zv;* zZ3Lhv-0r$n*lNwsZmmL6@K3vctyddE)-A1?ZSDwLRtY3Cdf^PSToCGj3@(Mow!{AX zno$4p{d~_bLI9p=^PiWJG_EQ0-s z`5n#;@qJm_SXvT;*o5WhZ}WX!H|{raR%%vRsaZc_pW&Bi7qZ1OJrl{-M+dIPIpgeN z`0GA(vz)8HG({PwR zfd~kQ>2*?)g4l?CeU3dox*F$y7%Z6Kw#NjnM%WOVOyB%rqsYuz_LtX;L0gu%rmrL5 zf(F_&J5?Oh(wq8(>lew^Ea!l-rK}Qsbybx1-dJ$6Gm|KcbIU#`7m+uIZ zBp{*^nh_`vM}If}ZWS$#z)ZUpQ5JkP7K^zA4rl11WoivuG9F3JTXoyFY-a=z72VGk zPk!vKpQXe<2p)i=Gdox37qa{t1onGG|4$r$O-xV!9k_?XHiAh?N?Ka-Z8r?*z23iY z+i#m0842bg@t+J;Y!Wh%&D!8WFRTWuk_c<1#UuV|vg+`)zVIaemlE`l5-e3&Ew|Q5 z#o^1>K|w;w&%%9@Aro#a3e+|;7cUb6OQk{Qa6`GVJ)DUV{7mLGhD-qe2QNSld_n2t z@nK9%piT_>_O=;3U%&oV>xc~J-w>5xhpcaLp{%@+mTaEN>i`9XtkDoki+|oe(XcZD znX4ra;sFg3<|FzKka!*>%)K2bB>23*C!lY3tphzCl1nGN zKAG#tVh@OgpSM9QTh8|+Q2|OpovB8=kbVzAhC5}mztUyLu9A>CvyqloQbLIid^4c^ zu`C`=_&(nLA3H${?gS<1C2{y6;Q$G`O_KLxDXTeLI2RYc%u$e$iJfu&F;$prPbUb- zMGJ$E&%o>`&grmotpS3$XXlYE9o!W**?d3`_#O0L{NhUmDFFz;_Kycb92E*9?d;sN zTfQD044Jl(Z$x@zgG~?Q#-7PVynyuzd4>te{IggkxYn}t^1#>>GaGy`6YlSQ2mFWN{T zJS>l+?Shkl{0O}ugt#m)viXgEzB>Uu)I5kh+IC>f=KQ>{wfOTZ@y26N?*jJXWNlb@ zPpy%<3(kyFup|j>fiNFZPH-N`%D))u8I6z>s7^o}m~s_zyZJenC)rlWca{Le+PEMG z%16{WF~5}Jw{1|Jd`539(a(r!o8ZL$seiA>Zd#x`Sdp<2VY3(eZ(EXk0O-ovPk8C3 zCc&289|8O`qW=E{t$G+L+pCZu#z4x}1|H7^+AqWc%bOQZ*p?ZKEc;z>O7`Myh5o@{ zyK11kA4K22vAtfh5l4Ahj8ooxT$eSJ_1%qXBB3D5!Sii`Ek z`(I~UFlYbn%h(ojtlX$FO!Njq`t3vL% zva0|RhQ0cFL;K9Gka29r?t9F?yT7PKJdQmI*(nY$j7pn;Vg(CRS(yLRbtw`~jJdVs zY=VUViDQ7FUzFggZj^vG4RLF(#BPxpT0WauCla<_%;LYu);Btn#*VDi9*0L*%@4kcpa23l3bg--0CK`MKrabnc`l=AeZO){%QS!K+{zZrLjKi2 zRgem6Px7XLAJ3%O6f>%)8flCx;xbTi@`F*Q^aCd)9uduY>n$Oz%7@a;>Z@U$n86<1 zgF1Wg)flx5wuOLolun{=mW+eiAPubT>1{`;a|pI(<*}FEWqp!D7*A{Q>x5Se=Zh_C zBIkm6?eh=wgN)ht;4*2;T*kM@vA`S>UIDAexIrf2{TWaee7CvBr{~aUA88|yuyC*y z(c$@`3x@S>BJcKRH7unob(i0?GJ`xL-)MBr;7G;D?_}3ogH~D3e#I!UB0X^?^sbFH z8*l!4+C%+I!b>++-RpHS3N)>E-}cA64kYv*s;x@z+mHE#HPpSQPQq5VNdX?Gr=qPz z3WE-lBRy^l53iYtkFCP>=1LYX?l_UgV+%glnPwu8tpVYndes~O+n|`qOU7B_G85E5 zor%vZaqvtdW;QnwvK2)(9G6}ZaFA4pc?VHBS_%qokyp(cif9QKNp)rC{5|fk z!dFh}3HlQ&GOrIvULaC8{qDbN?-gYyi;FMp=#KS+2j2cX8W3lB?3v~qUP*IqC9ni{ z*m_wv4n~njL(FZ+n7Q*2G@i^Z-ngfyghF!7<(QyTI5Ne3*x2M(QcB?Vfhi~`cq+>? zbZj+I-z#&+rybP_KEx}KGt_P()y;u+Msj>1#vwM}s@G(+dgu%+c}V%ek9{s z?(8uRXKPsV;`h-z1n;l}!;V`3;vn-Z;S$x`v?KaJ^@At^c!= zp(YjM?tOyTL3sNI(v&kl{=;8v87{;b*ipR*B_ zO+Z%g+P$$ltznKKjPR8A4f0A2P`@mr51_QReeY(s<$1YdhxS#p@KX*14D!!z+*JK) zRK8g~CEageOYCpVO^y`jBz4p_uI=WWqnskbn~9uC|Q6 z3{;1ENko|{R>z}Dfa;a(C5BOUZ~MZ)>qZ|WjU+;X*kN${W2QdH`wcE6UNwG`q@I;o z(7_tPmmk(5FpULCv$4FH)L$dMHQf*`#I@GwhrRA)G~XK5z{9*Cq}OPU9i!N8hsi#g z9k1`p^e=?k88+&=gu|VY4V@wHN(!k@hs23Cm@#V?eW-3#l#jPHn@E0{+9AOigiUsr zdgKtEJ9?imf9D4Xc<&QojrKphI24>iQxtfx4g+f$a0wnbfFS=W0M4!GbWKh z2LMu~^?h&ireYDTV^mN1T#SA=67lL5E1o!hkG3t55~2~N)ULCO3l;b>aXuOcPl4I z?*-4Ar478HN*3K0%e1dT6nxWK*o`*Iq*pAOI9%fwNf9!oTuvFhA~9BTS>N$9allTh zYWuoWe;1=x8C}|#V=>eIR2O~pI=3|FH&vz#`3YnEqUafNCQQ3-*jF3pLkYd*%pfx1 z%=ud?ba&ymq`Zd-8d(P=t30+}skpEZWqaGXVb~GkpKV^lo&tA%-EIs@I`hwp2_ry~ z{fW6af`!N+?`|-gRi(t{g-5Zd$&Mom_e4M-xA!Z?ArD1RRp9XUcEGeh>*lz!WLyV$ z-_CS;=Nxm1?v_QI3_VjjXA05XHsM*x)on8ICa|e|7LdUeb5#BXzNavaHR;LnJg<(y?kK=(N zmV60jlDuoZCCM+HxKae>JmJ6ANvCUS!Rz~=QWfQj-PZYKglsr6xrexa$0H;#a*jBJ z?q^PnJoe!Mvj}0v;j$X~6AuUAh27h?j#HFCF(&XVjtKGCc9jYk9-n+n%dW>3=gjCb zah)_rAIE>ZMr9U*oa<=_{16wgfcBDys3F70Bn&jof}i~nnp&{7Lf~M!$kNn7EXVP5 zl6dl?8?f6e++woy8aHqHZhb*ID+foNvmwu(e0x?YJSP@fHp1?;HZTBA4=zki%K=0o zA-5mwvEz(xf7?}I@F_$QVCcX|Aik;|4&HBsIk!p?ME29-_FOb!<3MI&HQ@4#>55S?Xn*@Z!CmCZkrqf4=ga60A0?bNiJ ztIrJj68cV~1j>=1G!en+xq1$dS0_W3Go5L1Dvf5IAZCx ztIhwEO2g*~4wKzDCqUf(sa4FKEmPDGw4yUVA3B@8`)UijWjFXR02ip^1xbTRw}ysT z>$YGX-#+C^MC2GLsT=URHe4fK86vpT`F0$~_+ze3`em0D6w*Cf9oTkIX-OWXOtMXk zYHe(SBE^4mo>nUtB;8+2jQJ0ozGaZ}(W{y)hvQ5{t+r(uO#lVLCF8Bd#7JP=eMw0T zQ?WfB?)$X%-dl&@qd?qsH2;*79qZDi=7?5Pz^xTK3@I&-P^vx}o3nF9Ilo+N6bVU| z9{H+lX*fC|v=Y}e<3Nsi0*~KuP~^PvvT{}HGL9W?m-;W>QW1jkU@V!W+)qS;l@+yu z$mHZ?YFm-Lm|So6kZP99A&x7xCdJ3D$P-|~RiJc)RZ8)z({VG8%Vmc6 zSs>s5E>usNt!LLlt|RgF7pD6?8TG;C_M3gr8NVxMo z?9)k$6~9|&DA`O2rxj3*Je7>cfw*!Vr*Rya~iPV_m=A0 zK+M|okjIRcxA6y=if(j%X+HQ=$i|4A=1Rnrx6?4 zqrNH7Pp4l55Vnc(_(Y)PprqHRY|JN%m;~d%u5i!506k< z1$zqKfuNkog33E@wf`L(JB7}WIk?kn6kui4th~0U+|0OW^tQn}{<-tCKZI3#D`{#9 z>@A#?nYr4L_vY4)+JR{}Xo$g9{{Ei(0+Qz?W5l{%&*0$ZX*%ZN_BL8ll7t6R{aGwS zAp(CGhlaH$(J8!bsND7FDUWHgL$u8a3fUT`j%+)KQlH7CK>PG3`=FE{WuF{RgV$qx zLyljdctw+869hBNN-PO+?y^v}H9e3orm*^&0@ix{1WBw zTPJn)ptJ4msQA=|mMZhkyXdZ|SGqgbeE6f#^FSj6{PPAwmPUWVES2UJ6XJy^Y9L)q z(GoO89CL zG5BJfk3AJBw=j_cdEMToUQ8Yu%D^H!?dif-R6@yN(l@f)mUAC7J;@Av!tSe?BDXa^ za*20I;Bmc*+x5$d@b!Z<)2?@QkH>)6Z5B#%Oft|*S%uNOE06XxZ6aBrnNP(=>B*t$%# zD=qSl^TX_|4o3b`oGD?${(Z1>Xs30+ZpYF(J}}Z1Z`cU(m?V6+d^XP1ys1^NF3Npg z|Kypy+X+WjL%;et8;#f@I=&x}&GMH2*W(gbiHyh4yokA!!Y$gE{rXqHP5qqm7A581 zDedJU!i`6(#4J5XyQ$mCp!lgAxTromEW6E4ko$Dj@6M}#nL%A$O@>|@#x$Ih7cekS z1)gWCBjry&=HT``#^!B{iim3Hr?1aVPYKqwe=v$D3Uhk?)|)5lF{Sx+8kJUt?-Zra zivA~9>!ElA6^Ml9bY)qxySzEL4l3;M0aHtu`laJ4vqqfshJLsyEe!Ee~h{s-X}n+~VmpRLXu$|5{h@o|+36M)T+KLLD0VkW(`s z&PkmhM7a1PGI5m?=tXU_`iTBR0g>}hpWu%%^f&x3QW(Z36&V{xeWbQGFY_HkdTv+Z zwyYPy%_~M)^H>}@mE_laY_BH;_9k3-7j0K>XP3Q;ed~moJ-88Nz0nR!U0bP3*hQSl zc;r475}xZ*u&cHFomsGon%I{RcK^_8l(e%(^dClbMGEp~$jNYrmp$z!#t&q6%@z*n zH{hqo$LnmmItrD&mH_`7CL305mR>{(5U%+AAu+BBG`wI)WpKz|ykT##D1;YtPz&Jj=_um#DL z6f4m8UK3IOOk{;ZFz_*0$lN8tGF(D?+N{4}V5z2dgw|yO?9Jq z>+COKieDo~gh%$tR1V3vhGe%VkQ>_GHPv6NSxRAM*-!rD=JGB9M#amC{7d@%z_Ca` z(#oQKytliS;%)oN69UJMG+EKF>OTwEE9!UcMf-G1^hK;SXi!?CVmH{p+WDfiU%OD{ ztIxwGhx-Qy#zQYSos(sW*d+)ML|Vf>+mKK5fK}Acl!qy9dFQDmhu*uoe$H%cynmxX6MD}1qUYNi%of1z8r$StHTqhjXX4<#h2&tlX^D8^;6BRC}ty|IvHf;JRpAi@m zLX&!O%EqgF|AkMP5ip2m@qpYvXn9o(Y=5NFT$F4eB!fCk&oI+7-QnEZ@td^%x*;K9 zk{ByLzq;O{c4Y27k%e0#YMHJGts0+O06dvZekc9=S(@y1hYM)mlTi7r-7g=0JU)nmfCO4p&WT=fIK!BuCQS-Vs8R-sPfSw=t01v00xIqmdf_uv7j`(vd8026Nk(RChz3=BeB(W5 zb7h)42%C$X^;WWhGD49haG)t6?}cnu0YEqRw(=7tF;jAIPc1vrup+<|6`24TV%FZS zZ!Ip*r>xM&OgTygjOo)cjR;nraqN-(nh*c&d*7ODjaH@LY28HKnWh+WG@yE`+ZhS~ zdO}(|-Bk(o!u_j~`Tw!^&heFff1YkqNhOt3Y}>ZYij#_M+qRulDzMV)gY}ucTO{C!`FYz6qnC zvi`t@<|pt^P9NaS_Et%bvqj4~cEhBk#IQi9m%mZN4yJM*rB?X)5K+rI+LM0J z*H1i)ESV@hO)M^_y3Epv)Ab4%D7ocaOewa6s;Y;$L$baUvhATPVYjM#>doc6R5-3` zn(jnUMK`@_X;MrO)|F48`2qRe5Xv{`sZSw`BVaVCP*(k%@71N-KF6C&N3~`oxa}>m zs-GyCoU!J{w@xyA;J%XhK3QKNy70Axp>>7qsgS{iH{nIc0M)&-;d0_6tCnfG;qg)a+NkC$mX^L!gNXhQ zVPEiuvz#+M>GlDWTv7YIoe#>UseGBDTGg6zs&`F6T3fAAp&vc7n|q@V66u zR3}O%%GJGy$Qy}RO*eb?jVTO_sCz901rV`NN`B*L`b>`FlYC8*FAS*O7Qb9yCuRV`7|ohXXwv!~zFt0VkTKm3l26>h3(DVar_n zTJy*WeE|){@u2}J0VJTzVodnR0cLq&WX8kZl3axeA}AsW2o5d|AC=cAv#z$m@QJP3 zRfRcmaplr~exOFpGld>XW!6+GGw0^G`GN{zWQQefu=ejs;e+A z|8>lt6^2ZMHL2Di*aH`-QBwE9z772N$2OuMJM2<-vuGx!Uz{_DRxhQ%yVK zPAh>uxZ7x)#zY~XWGwYDldmL=n8r)O!mOYwi@Kkt*C8Rpk|e}BamHg`avWtCmayZv zjvBl5ttR{G2fj-aqZ(;CvDAEjHL0@#VcjGRoYQmv9+Q%S&(Uj354-%3ofCscX*J1| zA{x}EaELs|5PDWP>09(MgmS^bb_RO@dN4+t$F;hQGYd_Gm41JIx69n)63#0++FAk7@hg;>DA{< zKc^Kn$)ebPQtQs@JaLrQ^^$L@zGKuLJh)b;i|8=3+M@$H%Bj2jG_#tidChsZdKh>$ z@uF^aV@MdC^r(PS!}jL1omO;ja=m|{1c`FePtIn;kuOxnWsX$6dm&w@Z4AGnZjB44~X_vFo2HmFdQ}Us&U}jcI&efC2pJ+^UwGbn2g{DEHGAwf z2h;m9TS~p98xEq4?v}A?Ir7Q<15EQ(j*UF-5x11* zu)2OS&HznERAM_%xI3wSe4wG!#rTFy^G9u@am5O%s^yW_xfR^4!DeWU|7R4$ zOtkX^(}3|rd5W}T?I3TpBsGDeR*P$bL$|K+MrMti6I+9Iip9e8J|i{CD-UMJD{TtU z(C`SJekVu;D(3|uvZ<~a=IapX(+Vga!$!(=j;S8E7HAa}@jKWU=?o~&r*pYtgD^nA zk!+m!7*UP6?0ppRu4u%sN?)o*7YMPsGzs^W;r1hGz&yxv?d1bhaQxG2%GgWxMzuh< zf`Vo{xoBVV0Yc{+sB=qQpb%dj5OF_`_XiZ~mNrK^&CxM=o4)9Krnge1I{0JPQiWzK z z)tjm2rx?4o0pj@8F2%i-q2NaJV}mmJ-ym} zOt^W?`VI~*c2=g-{U#jC>%ptO4pE13Rc`w~!bVTx+|}7Nk=9jgki@@{0f7jM{K(=2!~N6>sho(l zllmk_n4m05$U*tMLCEA^M`(phZ+VLhv?=a-gf!0FMf-Zwri5xLA}emkze+{I??41b z@e}ooUHK4B#(Qloi2wKx+97Y%i>N9#)G3Ht=q(VNu*#7#e^m$Wf>Z%xS`4lvu1fw- zO2FOp$FgaMI>p=EOlF^|n&Z?!%L4lY?RzC1u$J4~%C!dxI-dj1!#Ea)qY5rV7myi< z076Vpc`<}l_~mkM_9|WgkxwuHtbyOEfi^eR^PjV9b%JPZo~PB;IzIsOx8W_wZFR_Y z`}W2ET<1?xK#0`nHFB*hui;6HAtzVGtZ0wJck~UkX9J8GC}7IcCc3G!GawN)d-WSC z7Cr=kQV_&nL4N?hMty*5awvXHV~2iuv98eYgU@lt0}vU~JyD{6oQz-h{Mcfl zy!fny8OF3mV9I)B6gt!nmRF?Cp5;FWXzaYp35fR22n$mR8KL89OSxi25~oF+_!a+5SDMIv-~aP`Z|`zJy?*;m~RqgxW{gv9kLcwqS|r3+UA1mL0q%`A(SwpfVQL-4--6PN{?$DmIo699J-xg=P?Pm#Tg--iwi*eBL& z@$W0z>Da%|9-R&6`nL&T)*ePRzYlR1(U%;;Z8)baaTh815wcgHW zz)R~yOG0qT&{TAx*(Z}fHZ10l>?z-OcnLF0_8pQbq5pL``Lvfr7hQMhXqTJix%UuZ$9?5)+)V^H%)US(R`-dF zx})8Afp2~M*dR$v@Y*q2OwcV9(n-$V3g^3+v(o8IzQpA6_PC<5;L}+KU6d_e?5~{E z0LbxYH+rw6-{Vl|*0aZ?qt^lHF~INcm_sqEya*82)gXPyLwv9JW?A^`K5HMkfI`D3 zVN?FdcCqkC9?W#%pp|r(#h{phJ#QA_xnICm2E3ynKSfF#=$}W$V5snft{a>0ZW_fk*LoVQ;<^-c99swo^ekI~-5X;Sx)s35#cxv< zv=W6*^1{X2Y`DK2QbwUjS5uSKw7hZJD${P3;`w)?RN<0ftm0B*H`rV*2h_rx>|NmU zZNP9=V!TSA9**i|zxa*f#3}Uuj$lcfEZ;uM3JRX+eUVwYwd-4%b=D$kf+BIY&%O|l zMJu!^h1_t?j#?`~>pcA0-EI$alH5xA4k(jbrRMzH5gT0%H@f)5*>5n+U*2`4$`O`% ztqyO1Mt>q9!T4Q&0%eD3;ouY>{}wGrX3$lf_nF3GIHU}JeJNSkJY7_Tj&7i z2A8TbWH2oPMIX#!V!|s>^41&@VRRH0b`%`+KfOC1PNcO(w3=s@C89u!DeOUCj}D=@ z)80#4az74KCuLc|D4`U8#B=ig;~t<@vo`TL?jQTGx2Q<|@F{nA=;p5Ov)z=i;QQhG zM5TSU78ceb@6kB^t0bf?DMHu~{H1@E)Gf0&3{-!@YNj%J2~#Buci?+qexjWn4Yj4W zIsf1++G!o$Rb;Hp88oh_ye1&x7BCPBo`t&r$R(yxXjABLB%N8)x8FUc3MAAAcXgF8 z2+ky(6I9cNhPfUZ>wRl-h-|;13JD7z%kg)gc`cSq$zStnW%L4gNRi~OE;rFWX~Zjv zoi{-)fu#}scB7pGM|kZ+>UVasSAFBLq^m}QBX9(J?P2bFH~{oC!+7^8Z8t*tC-mf{ zOGg@u->be0moVv)-{%2U#~Uq+lXp#Jd&QHAd!Q4=Uc2PM6`;NIt@oVi$Lb0-yfecN zouCz;~h|QCS6rq9Xz8Uy_ ziUiBohR6lRY9H(9RIAry#@0qIRY{mWq=s2_(1m=q6zEQ-jJE3|!26wjf(i3X!T% z#*m=EYE#`pv+}rLMTaq$nVD3>uGSEhGS@}3|G^yW86$^dP?Nn(8y6V7R2d0d?A7P_ zm2(bv^7mu^2Ab`OTYGn=d%-RuJJoq~trg++Kd!EU zP9J<883G$K+=SE3DjKW$0y&cqLA_$7v_nr~BjjuWKKpGjmOQZRQ=24l0DiTqjzn2< z%QvD=eNc<8J9q#<`P~!JG^Ok`v>y-$D0Z!a`T&EeHGT|WG}!D~`8WR$QcE%h4!o@5yDpE+B%0$Z2lxdeDvrUBxE?; zBy;&a4|t2pqtnL-;gY^(-MW*EaB&}RIe|oXugm3)hK!ji3Narsn}_kd>uO-RI_L`t z5opf3q3qJHKyew*Y&$x|EsW~CF7rGwHcWZ9d{re92|yi{%$nJq;3qJbUW?$UHdA@0 zh6Vo=$hrUu-+`oEimiZvkSx?V-fw4prM}%dfC6oNv?#4-4^gyic$?*W6{hBxv%(#) zmtwnQSHfeS`iPDZ&*E~-;rJ11u#^LtyBiJ~q^@>sM+^NpV3FORK`|OX62tr0L1%YM z2ao4$lZ$jh6|*mKj9h_O4S)p6em+#g3U8;v6oe$x7&{Kjwz(-nP*j%X7@+2ZiSJ$_ zg_?X5W=+&QyTbZ{wkCR}j08J0`uHXP>$rBjE0hib4L0;Ke)*5&C=Byo&hA>pa;i;h zps@B?c(9otfv%IoI;C6mY%X|AI+2eI@iV&>zDcs;8^`GKm6vscL5)94ppwDCRWIiY zbps(_8NyFnmksUQFL|nNRH&5Ds^|E?;*rGvK^4D@vXQ z@|*AWegB_8=-6Kg0)6}c5uN`GM5p?-@u07mi<`e}gIIW-9at2F5-9+*&u;*;6~;(n z6-*YsP`LyJw1weadVS1U^hvT?Q#_guT;F9r*Mxc!;x%zXeLMPPlI~olHO$(; z6}w_8REpk(*Sur4^R$6SXp}kc5R%BD;H8vx+ zU(1^^n)s{CVRgUe8n%A<@=$>R{qu2|hDbP>=is7g|5Tebg30M=bq-D&7#^(rxC=CaEf&{c9 z@zq{8w+K%py6RQz#@wA^0aAs(KW>V-hqv+&&k=kE{E_QL`N4|j_vfX?k;-+dn5sJ{ zRX>1~aoVup3pYDWRD^|BvI%E8ftnAQ9UoO6!w!&^2i#m_ih`RkUY(s1p0|Fv*Y ziE&TD>^-fyutWj~#uO6ncYL0j+Daj0a&mITwXgw9?q@4VO;e^wTg2xQrIC)rpQIcr z;SOuJ`6t41*h6eBhx>K%$FvNz<>!eF_Bs791$6rPbfl9r3qP55V`zC)p?@K{5NLZ% zi&HWT5!nG(Q)Gu1oy_$j#|a{G*AJtG9Ap7!(d=t|cgFS_fgWIR2A`G3&4c zk?Yq|KSq{)FHq$E#V|}FVfSCJ>VlU0u|p$2Wu433a(#ZVHfZB-rJDkRLfm*aS`a=_ zd5wyra@rV1c@%N6;U-z&(YHIZ<{>y{uJu#aDAT5+kE`bF(yEkSPgOz6qn>(D6Ybr( z^hkC2!Slb}w+)+YI-aahhaqyWzwvxE=LDdq%%+mk<6kkD7*7tUnRHiNlxXE-=;`gk zZSbaVGrCi?oMw8}fCa-Lpm3vaet41-fQeS)8UoT6RM2AB*<(ok0eO0XSwFj*3XU#! zN-x?@-0;qTL9NizL(CMdz+(O)Yf zmw#HS%P$Y8M4vmqQoROm#$g(FjZh{hLtMcF>g>!ey)HJ7f1c@uAct#0#N4_cz^6UN%4E8yM!vip- zVok_0LuMC3)oUR337K9Bzfv6xp7*God4a#i8<}^z!0KRCU`zR7Iw9u#yQGfSyn_ee zL828sup#qClx88$uj#m_=%*(fnofwlzHMK_6>03YTx zui*f@VfbBd(BqJA_yY8qB&40!X_N;?GG($Umk+aC!SRp;i#;h@X?9>xr1nUf%c5`Q z_n_j>+(Egh_e!(;H$T0pLfXvb3`s4Ncs{u%8j@-E5x9MP`8LGp6$kG-TH-M__>2al z*tgQ~!jPvBkU_0t8Wpn;@TIH;;N#AL5);fR zZ_XWQbZ{l1-<21=`&fa=3cNuD+6}vxcOD^YW)SE5cz*vDYDe%PF#6Rt6waVUA}sfu zA!Ut@YQ93safH97#%^~z{PG)2(EeRLjYc9Z$Z7z^?Kuf`#^O;YgYjD@22XN<5|;%M zW-%#Gqj&V32U|{Ge1B^c6JcM)G=nQ1>G(&$$bSggfz>uO+*bw1&*a2J3EX7ZXmbTW zqBo~o{o1|8sHmYSL8Sw07{$p85?U+|#k1a$0HpnU_u?wvvhQ6}Hssyc_qr_$>F!q| zsURA4QSBwTH^x4<`Fkm?R*u^kwcor75THAlNFWayQC4KEPx)N1f-pdWm9nUB0bjg# z*Zmf4x{igaJ`HoXUVcKu&D6cXF5FA? zI=06x6dS`0>U_UIghS9-!u~28kg~B5;|l{Aqtr%pRD2>)Q|~pK9t!^D1FoXr3=a?F z_Lu-#Q~L$bnBr17o1|BDv+zj29!WVc^hE{4?_bH-D(lHt^ebJ*Tk^rX6RU70Lhy==lZ3oIk1Z`? z0zK_yaEj4Aw@v#}^IC|S?}m+~Z(UwfQvefjN;1P#U4yK6E4(cux5O6fN+S*MW$DsM zbOiJQ8W7)jYvmJ5fA!N_39sg0yo{#sQ(vS+^ z?Gbm|v|dfO%1NeU9U8|itFmm|+ZOD!}O@gu9|96m`b3IfH z{j6a(LbJBVEr@90K`I?{#3S;JjnJI{Wd-{+lj~qn=Z)eSUE5r5;u?mTEl1l@s_>oy zCe0Wmlo7Nq(AS~Ge2(7|>1DDTR(v0>{SJjbUAk%v;$dB%X?4TJ803u%_CZXOQ6f*Z zB~BOz12`;4QBsum>7$t>Zn4-86;At%=0E>}84`ttK}$ zvKK;!);>knJR?<4E=Ou;Gp*ZZ)JA8fC7KgkAK2Rl$}A!WV#2<_S}Y4M_oul+>~DRv%|z9uirVyH%6~8 zW|8mB#N`?Yi2}k7+OV45OeF&HRTBL;cX7Mib|ZFeYo?{9z?wA0boIu!A3v?ctY$g_ zM<*4A#?*qAg91p{s1(S#TYxfC1tlRFl25RDoo51L=2!YDp8J>x-W)J{%z~wog*JSu z++C%_#bY~&ANF?q4`!H|fbEoIhjT|y&z$qcC6jo*pd3%~GHqt4N{ggv@f`AXyuFQ6 zd!LB66@L2>$(5d}-si}G0=~pWR>qB^>7?F7iGH+7ZP~r}d#82koS}@K>g3FloC^G# z>T*)})G<~~d3^r!!OG7n69g!9dSE;}5=W3)!+Mcg>uo}GHL1;DzA5$|7o=~Q~Cx@_ZWqeLN}^F_2~1? zK?N3W2SpQ~S?lt}70N2w9SL!-Ugs>Li&X@Nx*X1w7@=9TMt2=y`#r$44Xsd^} zTBEYlDm-+pN7I~HBD(T+cXxOJvj8S+!o_WnYeb4;mu1$2bWt1R$*oEA3{qlIk2Pmm zMFRdYTV{=hrXdz$(m-Uktfc5@;3^Cw0(ZSUD=Vb=_uc6tCeqLFb80>_v!ElW!G11-)!;Azzp^IoSrZ&|1o?Dj z*-@gJOdjx@7sBRuNf6#5Zqu zk(JKrDAdSni~*1pFCVqZ$XNsL8a=m`mWA1@dLlG$DBhmdPP^o+pYd9mxb*XrQoD} ztJHHRIE1Ms7iQm(sqK;bEH)}Eedj{J`si1_@ET|ztyvgNgFP;H`IecHtlGZEj^Efc zKZR4;sdG1XvPLh)LSMP)RWFJtJr>t{W^QSkgK_m2tP#Gy>epP4-5YB|+i@Yz?BspW zcTSdHsnfyW*t9D}p9A6VPWSqJ3$8Dp&wn0RX^3JqjjqWs>L9SBAA!+TUWXp>Lt(7p zKxvK~3S&+$n4K@p2(rFXKneHXsDjFCc@ve%G~($k$QKNd^vLl8C_*iBL^Kr0aus!y zj9E?+u(b?yxn{W|iAh;pV1S`;2oq$k>2f}p8dUZWZdAHy$hJA6F9)x2yVf|>dd$2P zrgk=lRSb1i0OSBb)@tu7+V~>C$JlKMFeBvw=Wt0)WUXWt8b+BU4pHOOidB$eZ!D;kO_W&1B&;TqT+&H;&I{0HJc`W|m`MJ(7hqdu08 zg%;I){_0$c`ii&ZCVF+_g?4~b4rX@bs z9XKAm>$j%C9Lhkn9#%|NqQaFQibgKAap6{Auah5XEULK!sr#(u46SAtUSEbJp#ty8 zXucCM_0e8hzx35JWfQ03_mw~h>8%81c&;n> z5pu->3)+1QAEp*6%qi!9`TI;cpo@j)0WDR2S_wP& zuk}7;iaJlr%w3DZpx#|d9%LAouB&{Z>S5~Ovg2m{Tx&;};B_wunbS%^FLnFD%rlDC==LDR=53xTSyR>Nh|NqGlaRW!~q^Y7L6Dkv_#xwwwRA z0VJf&3;&4)b0L0n#m9q&rK3=4v&b@wg{s5N0mJwOGIWj$He(qgE&Q+KA9b=RPTBO&nNsle20oa} zlQ%xUr%Nm)KWcsw?UI!cDoHtq9&?;;76k+p>AY@4w^^JEHm_b~?xXnn)(**UQ!S9@ z@)AbnTHC+;~8aKTA8lR+2^dM%g z8_9YP*p?VWqGVtAoa-4@=Cj41n-_%QKDN*O3)zRsq0y__jnk6r{E~JHn>Nb#c~p&u zq&$iOgSu@GE9gQ4E0-g#vd=tjTG29Y&qHCqQ5`#0)Ds%eKwHShk}XfvlOfBnYj-WU zHO|AVjAr*4>(jWZ^_pASjJmdVlY@^-IV*Lg6z+^c0=2t3+*Xs-7+-6>9Ddc(3B1N` z5#vWRQ-(PP$Zi^=Pbngh@#nCldbr++5G*~7wJQax4s)gC~e4gZ=) z$cv~Znbm5+^kYJyYBRWJMuMF^F`=QeKFi+}aGg0YdNdokSypz05eNK_FA5mjGGSL7 zOwH}b0ST&cOYPW>kn}-yXF2q5Wa3c4COirX@n}d$qsLaIz>F~Pn;R^pA{>ShP_P9~ z0;d!oW%xtf9rzw4oKt2**2?fF4(sDd{$*TF{egf^fCMEb=~euqICzM!H8vyr)A^S? z&!6zc!ZCo`sbTKJX7{ff$L_g7rwvOw^m#ahr&V8TX0kbQNnl$$-}?gC0nkA{h87j#cDvX#*F< zsDBUN;_hYHhgPX{P_vwx($_hrCrl=T16;8mY<>7nJaf#9@O61}v5N=y9YP|>pPhFBrI#o~j{#et!Bgc1*TTqX+ogQwkF#;RHt z8|4O>WLMj3R&N zJNk!C^>+Vz`i@J?|ERvB`ra`jJHNp#S?+!xG4>p9tz~xXewIsev>}y8J@-88i%-FV zVD$FsRg;@41`ckY&)Qf4Kav|%3~f@j*5|HNf7+3j7P z|0*3zhS9GE~E5ANYfnRN7}ZmcFLnJK;}rf%sf{d{;!rQ%+<`Vy|z*6xg};!1^b z>hdqre)J|x53eeB@@-2?X09N3*Z{DyYpsHxZfneNFlhsO=^6C*FVgN7Zn4wj#Q=aH zSpN@zU>567d6g2WYwhJ&2Y8aOU0RZxEy;1x{v3hanKtfU+B!!9b z2IAAP{BHB!<%RB6Y;KIDxWiL;NJ_dYFNI=%6f6u#2>Gn-an{}mhDdD1+PIOM)lUT}9+ zQlF1J2Cf`okhV0=$ip_yt%NS4W;$EpLT9xH#;woG0T$?l@LC^Do8I_nbi? zh0+-&Pl$aAas|8diZx%XEbggLDqW?vn6zA1Yc7n!OFnOjWSQdVJB`Z{*fVa~m37a$K842XX{9B$SwZA7)8V%vIn9`>p z;pwIa>gpiy=GpQGEAY;_?CYg45!Pb=C5lRk>>EVL*GN_;?6HIf=qv*1N(Jg~4$uZ| zhW=Iut%M#0M|#o>nT9ENkG2%gbivki>-5U#`@j9k_203Z*+GD1gOOGIl*hUEWI$rD zol+2>)~1~ydhPnf9US9$ez0RCLH{;ZHUHUD&cr~18fS^XCH_ycoNNY*cA1o@*$GpR zO$q+@L>g|lZ?Net%e;79jt%0jY^gtt!D|bOilOt_`|sD$IyS>jLoq>HAK;Ye)EX%9 zbS_J#@fKw70D)e`ja84MvdDRv1QlqPa! z)E=4_9r*_-pepgtqyTfR2FnJUTeW~EOlz&o>M@e7K+di?I7d(uBKq00Ag)3liM(Jd zvkkV$zwh>t(my(4koV2NLKPKZ>C94-uUA?_X=1@dB^onHxw{K~Vvg}7F8LuQR=lIc zw81^>EPo`PYZ?J~!BQG-N)#hvmkGB#J^#GVDS|P;TEDehDZWEZPF#49SY@Bq1D8*# z{+SeKY-&!PVmpMISq3Hs$rVf6X_AkOZh+A2qY^(Qtl)UE9X6aDnz3R4C1gJZB%5eg z8Tg9GYY+@YzG@}%kxDL+1bGM@`vQisd z6~BQ=sr43E`B!Tc?TNryx#Tp;cMGUP$>iVj^9(eKdNN~PFfuOgF$bA6 z9K4^_rW94A#5`1bOZR!`!45aJrqLi*W_ry=+OHTi*t9(w!}kKFT|obS(nv7kgn;5|Q1t8DA+Sq~BF3bxrxXjVt05(+c-(kv-&Uh41g9 z7fO33Nh_l{AFkb1JL@QT$B<%dn8{4L|D-J|NAHOWXSEzXb^R{DD#K00p`-ZpUGxTFp;iFMkoK zb(t*KSxKUvZ(xqMjJRL&YMiX`e&UX{1uJX&m7ENO+g4Ce@NjYXkxtW36QqJq@|?NJ z=)GlMX<8PfjzV%FJt@h4X~nxd=TM{HHQ!lxoHx|{R!yMK6`J@>Bcfzc3l=i>j0Kb{ z_>Tf(i5Y=wI^1F?+FR&L;E3M(T(OYhJ2_Gf$?kaKS=I6`wxrrid4TB&5a;V(MHw66 z;lq@wsNY6GH9%JMhWs}uK)Vgr5DnYIRyPl&NGrs*nq})$P1*7iweZHT7vlraUsp`> zOE_7#(Rw9YQdoFcQiPp%ve~GlTs4T9-LQ!B{#nm)5cq$Po@3#Er=DXWd1#zAfK}6V zA-n(3D^Pfw)sNMB1K4d0W6X)4?okq%jCzZ+5y73zer*!{hh)0m)$u|-=+g_Pb8w|a z@1Y1VfA#_F!r|Bati(1h^d1xbbWh$VMv_a_Q*D!Lgn9i5n6iN{Nj9Y&NXY~`2EyFe zYQwDa33M#mQ*Ai<;XjDIA2o05Pp<+Zpz48B(ewh7_9{91v1>*8U#pkWT&72_Scy?$ zXU(r<&EB>vLGrI+2bFGw0B_yzb0Ngr>TN~`m<<-S&rmwjN!U_+k_F1F8h)!_{*N`` zT3#YNA8<}weoiWZR0dUU({LQ=JUw29;klrnF5l@Tl>XVe(Qw5)UH~XNSCBjLH#?&i zTc5LIo3mZMo}Q0vjH?wEiDt!<_awe7ac!nm3e6BPe88*$fFirQwK6eNLK)WLCp zx&egIBco9ZP}?~5}5xUS%fQ~@Dmz>FMO99zxbS#fQ(vo?CeEvO*yH^hQxr~5YwTm33UzG-{jZg zmwua9D|#4W%JyZ1NJWlEa)T%!b-k)BRH|}KnwwrMJNs3JD=6)==4N8I2@&Mv*dUhD zBU#}WTIGc%t$LJOyf&=VcwFuL>)c*U{a@$y^Z!O}53m;g%iO*p$0fedk8O2hl(v&i%(3X*ppl9B z5b=@Wmd>NdRB02WbG1jACG~%t%cJ~SKSh{bBx-UXNUC$3&=kGlvvBMQJ z1t`sCZaAsk$$_TP8DFDbZY~C}8A|?aG57{qg)lP&v~n=H1DJAR4)Evj5s&5xgC_bn z>kchGDPVKtCoO`)BbV#$l;NthHZ9&CRO6qe&`asHcTQOOfDWN%ga{P}f6@%#H!mIN zQXTn)v3bJK7>`~+1|pAyu7I)^b%bvfes?XdD+_*wK(SgmX)>{ADCtX9@n(thu~G@p zk70?DFZ1xMGd?q0*>AX5R5?vI6Yq(00^$&)w-HVKmAIqTEW#%yqG#B z?k2z7+PmcRQT~fov9h%2Ig)7D`U2v9HSa%JVNyjdwqP11RezMbJOS@<(c+(Bi_&sKxY9zDDm54-5xdxDyuXf^DrdYX z-8rc0f~s2^EiUpbs(2xHunpqZ#rwU}8Z>Ge*YAkPV zW~0UbXEJ!51Z4*rE~?|9n_&|9bp0pke7G95Ru$sUE-V64mw$3-P%dG&{OQoJqwwww z{v(O^J)IDmh^DEm-HfN8*M0e>lE?cw<>79`j&AGroCflIbIaQJ226RKL4e16<6vb4 zzc{RdwbuiwbAEkPv)ZFM%(rP}`W+_^X*19&Q8!Xo&e&Q)mz{5DWI!W8`Y^__oNAnP zF~LtbPG=|t0tlEM-R@fdL5$N}JqS_GJ}S~^e82wTRj0nXOexa{3S_}SZMLw;qvfIv zPA1T?(%nNDgp>1)bL$@#4Gy?Jm22z96^IKIHhtABxx;R4N>5@};`{gie+G^wJyh}l_VeMi3XN)nzDdDrVh3W6Ue+FmGZspXS+rC$CM!}YnD}$| zo0d(ZYAG!PDC!6Of!n*Iy5t_pg<7eqn@2h>Zf4^KoujP7z%1v1oYf3^WAXuK^9M%P zV()3gRG5)IX7B$tUxv*8GGB(W!vBmfBeQo2FkB%dj8J%kTio z_XpmDjE2h@-aQ2uslrn^PN-9v$_AfcY8Vg0BUh_N^3O(0{PIX+e>8S#7*Fe#-JNVx z1?eShHK01ORiGp0^C@AGiCWft%5o?U36D#O}^!ske zaX@cl7a9#Zwt0P;o$CVTKZ>&Y+vBQ7dTK}>jZR6|mPv1<| zM56*z0n9MKx1mhFgP)q_@s@H+Cng8W``r3xA`iG6`rh%XvPxqmD3ZE8EDwxAzGH2? z1=lIMxQ!jW-LkixxOd68zdaI6lS;!RTu98=<2=8hed$jBJGx9ywV++uBMUFR#_YV0 zKj-YavEpe?K8Qo@+su0h+0s^~MC6LPr^NU&`>u-{LoLMujK zvhnpY&gOnrhNV)fZ4aKQl)yyBEf?7obIp|~j#BJvPfz=0D6{27%p^p6WAiT5+*Ck zp^t^hHrXNiX&4^m>}4hwdgCqD>ecWm_Mt)hiWBrI%v3f!S8C`&Y}_B+%*D2!+^|ItMoN zr@upMz^n|=u+9;;31MuHwQLfl4>Gky>%Wf#JgF;?H6&^6*;c`B;o-3(J}t_RSH_~d zJQ@E{VV-Q$nP7j#G8s=Ak{R=7X}Vb|Zim1>=8G5r%ohR3g5|?Y{?Iv~>vmVTi6eA= zE;LF}Spqx@e@(Rl$j+x8|9KkNfAR7EX^}dABCU2uOzK}%_Z%Rov!hX)w9?ciRwgf! z7Xoh{|JCdo|6T$A9;xu3WWcNqefxiD?=7I}Sh{`Rg$H+ccXtU8+}+*X-ARH62<{r( z-JReNJh;2NOW>{Soqe{P@7{CHc=y~l-nh3JtkF_)7TwgUuI@Si^@9s*kb^pAnRhf< zfBKa(0K{!vJoWuOW{#8*{5{9Y33jsBqV%)y@0aQf>1vB$h% zuoje7=!B%@Z>271WBwN-H7YmROYz>N?dbj@X?P3+h5Y4>B8lvqK&}la!kL@}_Uwhy zFV((?UMEqc>4l%$?qij%rWV}Mu)VDyKPK+~H#7}kUIGOBEd@Qtqm2s&ww1dUm2B%o z^7D8>s1{3CJz$wNP0_A{D~UdmMcrUhdKq9fCuqNz;b*9*;;3nnuJdr3bQ4wgSJ(Z3vb0z;p!uVs=^oiQHKPbZFVEcX+W)eW8lOR z%6sRGoyu22-%;+tS5bhS1UHnt`i!ENnCy!K6GJxDzy9wL=?iPcL>BDhY8s)Q*m7Cl z%l1MBqJ@rM!YAj#j+gQFxi8Tbe;N!QViJ>RLyRE|1D-*eYicJe`tjXgnHe59#u*sk z9n6F-#9U=^vZd7OOqc^=kny95Fy2A9jJydLpKN;}`Qpo&+!rV6z z0JWzoLi~63K_uFR!k(J^JrDcuRtJ*bjNhCSRF^aANu3|)xVGL<=!AFu3t4{Vve6eF zx@`f24y$4og0J!(p}s}F5U)PePX4cQdg6 z{n(5n9k%`oW=*tbC0OTeksrL`WJ9KppB#37ln*iY@D6n)GT`!na7ZoL0u6b(2+x}L2w|f-(josOTnzb`h>7va;;fLGs zbyt4pPe~%PuO*nq0 zFd*P5)YkT*MA$vc&$`M0xp8CQcY4Mfvv0;&fq#?b*T&ZDt(hDgxS?u%-#lr5>KZn# z)))xShvZ3)(DG95!5-;aS-%Qck%qRnF5O^`_@C?c|7F;H%U4?>v{PPP)n{u;hwMu5 z|AycH4-LPwxv9^lb0rk|Q}Rwk4Qnd8%XP){!9Wm$RA*oGR|G6fHGrt}9wd}Ne5m*k z(P%R<>f;_bj;8xfa<^6nb(>#~oN)IXzJPa^CO6%Q$+WDKl+&CDa99N@eO}TW@@xsH z>5)_Z_<{G4!BC?@Bdo%13W{1BtoFktowck~Q-WCkwo#4jNAZ9Ax-qB(m)}WFJ1%lZ zo_SLjy-wweBW`?A=6r$zZyHmhMl?q~0D|$**z3Qr6*hI?_S|VZzI6;Rm?QWKBR1j& z0e-B@j+1v}C1(e#$Nx1m@CbcIC_{GAo zvS@?^#u)!Y-9CHdm)sE<2rnlcJnV(%k5;lX9xE6nf0gKS>3{z;NBK*l?+0(9A4a33 zq*T&`9_cQBxq{X*Wr4HhcBNV#@5`7k=Te1nu(>Ba6Q08p?dUQ-Vp%w)Z=$EQ#Av%G z5{@rp*e_)$mziIlZ_!Vz?^AblHD~1FAkH& zB;;qGYCMR8#Ms$%3U%>LS5cnv)7hE4#L`r4YmEA?TAd9wMG^go4mD;|rd z&~E>iV)4f)skmELEYC7s(@+ODBvpYb;0&K@l(AlrCQ2(&NafyTRyKYlH)Yi*p)h2B z2Zgr7Pa437gj&cZ$jF)s>G=L@Tgv&+XV}KW{d*gbT+KFfWk=g@iB?Og!yNryHl0i6 z3j)uX16*KgdPCo2-XrQMD=T~7Ilsy!8DR>SA;mq9pQQ+#H_x%D_0Yqjx)+)kq`Nm} z*;w%`Fs~o#FS;mg8@y=5FlY8kgENTDSXV*?DZ1f;W&)E|omwkPyWJeM{+fkASbxe8W3uLIz?Mk{;-;ofjfZ z(_=L<;nO%dF#NMuj^zy^o@$A}*;XIUe7FsXR8TQ1DqchFK^0_P-voM%e5~y|LpRB% zED7_aq7L&S2Z(+n^NhhXZIk(wbNM!Hm4)G`PTplgxT)T0dpGWe8a{aUv5`wRkqdqW zNCdlNX3SeS?dCH_@?-a5kuCR_r{YkO_py5=ICyXn38M{n$F~#<)y*}%6qHN-p_zEK z%v~Vm)jJ_!E+>DAHDtFg47O!jSlpqgkebz86$zY6k~Jm`QC|h9!w4@ZbDh zho4>d{%l$5_{%p(0br=i^;37D_8iLkk^M4$M@?jtH7pFfe--~f&>74>(HTO&p)=h6 zj?M_1`&a0U4yr%U8NFowE7YgS>@=%UeY?HkdcmF4)uHKn0U=6Ti2nFgK~a1Sv6Pp; zGY_EGHrZ>vs!h{I%>m9|k1arr;6Ju(pe%D>wI%6ZU$S0agj8)I*Y(ht5C_bJ2TvSa zBj3_$h$(tBN4tOWZd}LF#`zIAcTIoMe#lS82 zZ>C~!q5Ve1`07y9Ah8luN=^FKSRC)$XsqIbk=!Zy1a_$vrDp4v0ts5`ivo2cwhs$^ zq!)BBqT*ysnWV>?BiPbeG{PaI){3UP#K@?N3+&xhN2!j3B>p?c)ZS;q`=v8}5Mq$v z6-5P+q61ra?0o<*ge6l{`t@#P5l1atj6WWnuPdh|JQOg|+DI0if&PAhA$fUu(q|C? z7RE{G{i+D1@Ueu3Cc-P4J2}UWmJPc80wh{BrIE6+v++0p9pbzr@x~fs{7Ng;`!-|! zh$KN(`xPDZe@C3YOz(djr|%@6`cKB`=axW|wAE-Qa4YDe6VWh}q6OiVXm z<=MpvSU&j0LOO6>l^vf85m>LEprbOO#UcbP`eZsXzxpeqPx?(!xL&7nZ({C zA8d9jtTLSFn1zPkt`J@OBaJ8;$4T9id3-h>JCeJY_<~MfI{MD(nj}!94CdOiBE~bj zW#U{OK;9A$st}~tArbv0$_|rN8*+jO<|IM4N2i%*1P8^MY$0rsrC>%*1+Plp^J;rz zP4W$B;nUvu)N|+J`xS0!`vJwn9Ol_Ri{2d6Kx+I>;Wg;<4%IILaYs6CkSL_r@q$*M z#w{#9U*PKSnaAEj!{L0PS3>s=dhnr+h@b|fK9+kW+2zB$k8jJPQNVX3g{Oii7=HB} zxeP3BbABbSk5>9>mKY;A?6(%O(l2M$^6a3cm}*}_KLW-07^=<84lPzy4ysTE-wy$k zlK!rH!?Qn8`%FXBO><*%%rT2l|4Xlgj04) z51BfhM++M2F@SR@cfB5NfxO`K(s>%2k~5ON?nY{8_&8E&l@O-Rievviv< zzLB`hZ9ZOd+ZLi#crGsw?Lb>@=Zs-WrzM!eVI^ho;XguO82lZ55ul?1@^=A!z<*sp z??7TTRy|qP+&1e+SjLe!b2uGg*p(`0{fL$zKbL{ISzLAM8g*u6U>@QR)y6Zsvbac| zr~oCT=1lqNKgM6I^DDimG5*hp7gl4&l(8UDH>LRR!ENh!=pG2g&p)<~YWc(`J>FP+{*GDh9oxMoEInl(FtD5% zW*v!{F2(Sz<=Dmq^LEl=1=6lV9;UeO7K4bLUDzdf%ZQ-nL|Bygo!WAyC0i6+Sn(Kb zktxc8d={AEj~M#pERW6qF1>uwe?>3PA5#6_ioBR@Mq;@Lf7{Tc-`}v%-;Yr$KZkqT zguxQU-6Lx`Y?P4f8W%~AaqLg`)^;u5f3WyGRc-pilUlJ(Hok1;|8>4OsSEoiHXg_xNgjMIFmN-9c2qZ%Z^f-3T2ITT45XyPYnm`Yi3T}KcY zYnU<*GW%muae*nwkWt7Zg&s)X#I@F9XTo0ryQ`BZYEQWzG4AC0X<$cEmw~`z2g2e( zPFY)P(s=x5cbRCvHs^xsi}aImv5Mcp7X5z%Tl@#wcv}!$!6zHh*mQFo?J_>i9v(jp z1HvAK(@8N}keG|v2z4dYqYIMvrql=Es#R zu*07;XFgKP%=h~RQ$?rT4Y2949&gRwQDRxW+y(Go1;mcCz&ootLXV5ko7cz#go&D_ zcSe#Jw<&U*qw|AwFNe548!SIi9^@N7fX{&01fIomeY=5LcUz41(Kc(;7>a08li z0|aQKCG7!|#Qm9h=2RC5x-f_usV;$}-e+EtT~WY;vbq2?{Kd#q8owjGti@2lK=xYV zd@uz=+$pjEH&9Kk?^V~GCIFP0~dCw2S+;Zl{5~Ds1MYpK3ul^%&=6MA3B?EqvoPdk0a(3Oc2jnJu z$Ul_YsHbyI2&=ozm*5wumjhU)jgq!kJ?ei%{D*+TdbejkM=M44)T3KgAVxz~UUKtp z?Xe*&Kk~1-_D8?pXYBu9BX?iF{Lzq_isJ#DKgB+n^~(0HB8m06+)sakqCeVRW%D zwKe_ylbON8*5+7qG#r-`trPQ60M6O{nSCn?$-Zj9jbq3Tr6w&NufBmVx8BTz%#)J> zT*~P%UqM0|5&n8sm>QmKVnX^gIVh0h2kB#8^;=TyQ;G%nz#40fqpiD4Kd-Zc1JB*z znGj~z&%kM#$>|$?inn>Vu&anAzb1Oycj{X-eU7HWCK*Oqb~4v!Qg7scJ%(waBgHd@ z+D3K6u3wfI9@;TdgfqYjleYPYnvBa#Knkr(fDwX+AF!H?L7T5sjzt>VB=^xs!P}dS z1nk(3{`)P}vvi&U7})S#yi(g|{LdqY(%(b`o1FtKMuO0Tk39$l;wyw>qM0V0q#wuW z?R2Rbf{K=j(&CPeI-8eu47e&RxCsd8Al{DOS)Jtj!Dl<+&z0U;wJ$1VL1uRY+)|yG zU1w=Oa1QJWT2AM*YM-f%K=K!MwglTh9U8B*Q`vf1-}?y++9ZSJ%lgBgxu<4S6zc~} znc2eYGuF52t#nx{h)A;N2`4>kv1=T=L1Qzw*amU>R_a%N+rTbK4zEz*k4ZD47Ym8F z1If+^CzHhEPFqLD0Wb*0<2h^|M>BowO3My}#$r&F`k{pWQW{mLy_mbtBF25-VnH+; zj7BI9$CWae=nYoa!aaDTUMZK^#^-tYGz64&%llkrDe4KQ~hjHR>V-^S!AV>6;f0n_BO+112lwwLWnQ zlrhL0j)Y_!v$o&@ls>h7ssbRJJT@kBdo=JQBvf-4}#}rDDn{Gs^h5gefaNJY= zA2w?NY$k|E?5sNtq#sf|3Cvvrt7JXX2E;zuIvYiP9%Ew@@`f`0^m!7=cyHAeH&Yfa z%{3=FEYzlshgm;dgjaLYpvzxVx1zxUgs1v7g@gC0R56TnE(@gV$B0i%MZ8)T57Cw=@0>f*KfrQN z*@mz_K*UVy1yWSWXjUS9YCcrpLZ!%dnCekmvHB>trOP2BKC;v2EQ=(_f*bPHni;_~ zRYu(U<``sEsr@wQ@!iqK_p!c&D9wE`D5|hwk4EQcC|>4!Uvn%CClwAkYbNxaE4y~K ziEfyd@07%l?2x8}cM4VfHddVPt|^RDmnt+G=2UdX2c{vzbedAgoD8wLcdB`O#yafG z7})ozmCL4!IQfGsf%*SK@NCT9W6bHzC5Tp#7E?~*=W)=f7vR^>3B^e8af!Uq&cgl@ zW+@KDtr8;CGV=LPQDMEyU&w+r##=v7U^hDDSKQu*#abuwza2TKY>T_F2!wL#k(do? zRta;xjWC!oR3>W2gpbMqXKLBuLy`7$rU3scd6sd)j&Fc! zYKywFu5-*0#|nyhCz8|mnm#zIPt`$M99Bxa10RVRms!|2SCIF7am&QYTZk}R^H0m$ z=gpu(kREi4)CyH#WYPWV9nk3XBnwgmqZn#Ngb!r6Et{jH{FY#8{1m9zpY^&+z4C>6 z?A2jy*D4de{hGg@s1O7wnxG~9a5>?Bo-eF@!1aFHaXZVe7r&SUNyosV3x!!<B=ztX|_Z6u(r=x473=Fu>n$f{r() zE|}l|KtBusfc(cz(Am_*#nR5)`R5HVPi@I=iv!7*al)VJT1PzAm^Z9tjHDqhveB_M zwD!#er&JytN!uGT(*gR&GyK?`9k%YTlKM7QW8YFXmh+=9uoqQkZ_5jl{FnM^+M1T+1~pD#|rQ zSwjKZr9|@?j<3Y^Ta1_0p&zklB-|)MlvF?iuv6_zHv+UGaGM#HJ^*P zb5MNNP=l#Q4E?MSO+4!xD6K-MnX~5oV@9&cmu6B=H>b3aeH6D|eWlA)j?yq10|kR_ zGD$RTKpYDDH@<8iQxkX^T<~|9AF<*<2ye^G{)Wtrrq57L100? zG;u4Mz4m|(Yp2M%<|(wWVv_xX4nq-MpYrY766Tsby|q^DhZSf;=w9Mt#@h`gBMrNy zGR5orLv1kkzB<{-rO!q>Vsc@|ba^5sG zCKG4Ue)5uTy)!P2DJ`T%2N{Qk!{ioGlO^c#TLVia?YAQ$260H*z%FRXjGM|>Kq+<| zSh6bO%x6;5zteHs4t>lqQTBF$#cxAOc1e;5>6wj%aTfQCKBCZvpanTv2Uvl>80YF_ zwp`n=g6)uS2)GX>tVH1+j8`r-7+3+{uO=2%_?mAYxu7t&RLEF`TdqU*?0xVaoD0Cf z<+534zIzYj%{pO+m|=rt0_9cs&TVlh_#MZwNiGqtYmH-cWI0?SiekhkN{cCnx=SS% zibn7?w|iQ})|=xRjnI$m$%32)n)^W-*z;Wov*;y5%l2aX{09&xCEwd+A(zZikbzGD z@iS=7*EZ^h1~v=Yw!v;S%#1GdW(h}0aJR10a<iV6iEb_PD)hSGOg{-pL6mb5rL zLqbhdch`s~85 z3aY0ZlK8&cOT^T}le@sn`E66^?FQJ_yv+y$k%P_Trp7DXL~>Y3sTF*fjBAQ(UA)2z zj#kGlg-?yzYob%k{h4`#f@YoMTyBb~DCzmA_~|ZsUCr_<6MV;blEyGalBZ+c*j_w_ z06Qez8Zr$iB`*MS5KNJ9@v=g&h~~`uIz7QaRd~s>kncoe`yqMXW4`%GSm8t83YWka zR`GXjfs5uj;BHL}4T?Wg1tGk&$*&@<_YOTn37prOf277GkJu8=@@66_rwLTJX+oBv zj(Ce!q!)&=Hg?CQg?6&0azcG0y4t~|La#7!QnT%p7?@c@G&vn0(0s@o~5qj63<@m(lmaSn)TXq4zyXbyFABJUP<=trLuo3~)}nC!vC; z6KQBUR+B+rN30WNgjhbxW1gChrZi`7Z7YLPu>*QU@d+`=A?cXN?p)lJ@jAC`RX4ZW zg&d#bsE3m%%TeufpL_D!(RrgV0b(}H*T6nZalfWGYPv8#B=bu4y|J@B+0 zxQi&<$}1Zto+`&x^$GXznL<7!=Aq6Rg+)2|bn}$?Q}q=RlGyfbatt!R@moRv*bhR8 z$LiO;W78d(liX%6pL=Qch)1io%n0u`j{6FESvVy5NG)I^Bq(~WO)|TVZG+FDBGSj} zU_`@|yfXT_iBTKc>eLIYw4K8e1D}Yl{6(SAmIWw_%6i5iuWj^bVdJvtmNXjL6ce@d zQ1C21$Rq63V1E5?6M}&0ml#FJrtj7;t{gG-weHNh`5^<_qavI*Yr+28*|=)(~(c#r{??=;rwpJ zyp;0kH?+HnkRejF{n2I?KetYaWW8eKsHCi2K(6>FO>Mz?#tv@77%p+>HijE$=SnK> zv&54=Y{_5?&v&tr3I)HxPGCc35gBYcC&M7#%C9@)Hkl|C&Wf&P{!+euc`qDXVaMj$ zOd~YxsUCV06ZhHw<WYl4DPn>gli$krbL9X>b02M( zP?Mv;W_V<^5||#sDq*9kR-vj(OcL&q?@XZE1cytx1usLRKIB32CXDF2dg>dYpDVq) z*r!C`vq$+hHfqg$FGS#TF!8Rgy3J~?K_2hO^pL`45-BW6OhTiHh5G06!lEzegW7&>aNqmX28V zzQvy>2>G6HwqFdlYR2_4=*K(kEt-)>U6P3Tt0@DWr1xZcWb9aTM)}%v2@?vUB)PfYCxs4?Quw>9=$ihuc~CLMxdGn>Ej`q z9wVfwi?kWsnYDRQsOBK5i;qU+``LO@FP*)Nq~Rpm=ZxkdAI-cMhF>J zaB{)@$3!KMj)(WHXz20l?g+&MJD^VIA-qgQ$rRIg?*7MnrrWnTo>m7tWfAp8)D(qk z7}ev7VC=W>Ai2t|8u4HVX0loD*k=>zgq4HTPGv|f>yxr@pjaEMU)fowCuCJ;*lt)@ z=fhKmx+83FaMKpP2cByyG0T9V58zd*2O$NBf0PCah>f%5py6@&Dtgq6;kS^)*sE(L&5c?j7VI&7SC zmio;o6*;SKM-|_3vS11zvZ0AA98c=n+*;CKywEG5dXN@IlFk;nEp0k{+6NZzSM*qeC93p3Kmtd2rK^5uvI%o zFi>rs6^UxU%{GSwtyv@~L_Rn_R>=s6Mad_oLf~?3#l)4~j5}&z8O29Qs9;*#TbdtiqdPw`SG$*>Hl!@AhT1+to{Kq z?~o39T>OepKRt4QCjBa~*xjjH`NxKg8T81_$gyL3)p4=LaUhFkm5z)k-R0cH%jxMw z(N_~+Dklz9;#aaBk_}6{tpV&PcEtiK28@z|w|8IOj?ihDuxv{m$Wb~DNheDrCASL$ zq(qV>$;}^lS5ituG4gxv+)nf*+d%O)08ao1yh}{HtvaI!SCcqW61+W+A!NmXt+`}K zK~YaOamCvsNBlI6#9D6UD~gyc9M`(ub_%nI*z@CC<^=4t{?t5G#5#u$5NWSRFKPbA zS)VS=~_--e1T?ZNsZbPNR^ zEZq9^rwlX38L44?UO;Hmo#1!%e>mk6@pVpg+@m_I}v9*Ght^uJt1d2wRg^V za)QoysQ9Cf-;CJ?m}QaEb%HB2=aY}2J}<0}uJ}cqZ`lcu-8(iCf;=}Zp>`0ud;8sO z*~#zuEuSnbMYX=w+-lxYqHUVtkiP0>NRvz{WZ8Q+rXM`5QKH!(m$#7;Krk-BBl(`1 zRoj(_Kn6C&x7{~bBkqxK+*#QU!U^MBuW^hcd(#Hrk0E2-eJHw5Y*9$b7j1)hq?@dT zto&Qp^1^c5)d$(t5}wf*L-7W74D+Pe;f2T-=QAR)c~$8XWrKVDWFHNbDCxd!pBG4q zB@*((vip?5k=<+bGE*A)en~M(WN+iBO5Vu-&<9ruM`z|?I581n6-t9MDWS@i15#=s zvZV$b1Ude;G8x)mo&!(V2o*EH(Z{Kd@BL$ZMYn${eSvWKbC`!riXH$?U|D5PCcrzf zdLV*+_X9Wk@@Lyy?AEL(7B6@Ls?V0vC^Idqaxu~?)-f18P%e>TrWNE-gL={gG2Mx6 z8yKzABPO*#US+X4MIW0zdpY?H%Sm9LXmd9HDMun=mLV;ePPk0WLN>lMQ$T0(PFYMG z$yK=)cbg&Ytb?|#8Tr(%s>DaNeiok`$sW#1*vx~PV@sw3t^R7CU1WU6So4~6YuS-A zZAmZ97aGlPDJTtUTEf=?-{3fFo>KEE=hg~ z59v`}!NiVE$9~k>zJ1QsJzTWlSw1)Yieq<=(p^%Fh-mhL;^GF+*;Z@KRWhd6FGIc2 z7I-+CduTVBdwO=hS-Cl+VOC30fZK&5!SRtj+UAX^4q0)|arF+e+x%tGlQfqpUDSGf z`pyqhROA#a46*#cAjKgALlqnitYSdV*@R_R@LIw2d-zWE!a!tt5FDlwPcg51%9i2j z_ll#eu(_$`7-H<>bP-mW_&zZAHU*g7E+NT~u~uX3(JoM_Yr_LFfWSsf#c21;T zl+>J{8RhH9AW7v0d<@#%9$7XJ)cgxpT{K;h`|WrRq!VI6Z|W4G%FveI#4EmQN>U_- zuvG~a#6>3o9)V>_S2W}sD`Hw5D2m=RR*`u3`+lGWG3al_W}I!$;OhV+4hM&xHvs@#tbjJZNEi$%Y7Y5C1eto_%`tQav6 zm}nvfu+jUUu+Wu!B4DV0|118jrk|U`MMTRZMbRIBRjzwg+ahw%GU|uuKRfL?;P)!6 zuA1Ay-Z{J?f`2VOQ|?sz=#ROP8!!Lv2on*}rhO8Tq@X(b7Oj4(#w*U+K1@*e{@H8c z2YeQV@49eO$(hi+bpgO{o)aZqnfo~Pa#TWL8{cj8+Kf!YmB7t(&5Vo-j=+sv_h{s? z0L_;Rv|HD*0J&}N&Drp5qyty4E_Vy^8}TMuR!-0jfcwst*t()Y?3LNI;;MsPGA4Kn_s8&%LREOuuQS+pDus}|HuCS@RI&V<-b*NAQ$rH zqqv|fO5zU4%GIV_a_XMVeGhbJA+|cp%TVt7hSDs>c07T~AI}MfON4{dOM715zM)Rw z>Z^YmrtDG^Y~8Rm$^2sOXUQ0TjHsE?06r7tuG*<@nx>;$Rqt>AQm64_N$=LXd@old zkEOL_l{R;#GPPM>_l;^NgRI3?d;gjLLP7Ufpz}&vl^0K9E-R*T-=#jUYnBSvcrV=? zDXiC?2SJ7HidE*Ew)$|n33a13YW{Ix?nU#3s(8;x_wJ$}-z;u}wPMTkO#hu~%fhV0 zN}i>TRncpcEGfK4gWtS)scC)%v$8dPujW(U%2W#VPZ zpIfsdN1{rY&AhVoW#y`~7&2@HbmC)6FT>Zo(uzGXG8dLAW^#Qz6}M{^^Q@rKEJx@Y zYbrT7XuK^S-g?8vjwABBjy_x}eqMUw>_vLrs@5E_8L8I9zvU1&yOCU~YI0s*cPvrz zx4?jMy{>pUdu%b9vri4lN&;qn%_X(J4yq!L-L&3DH*tfWE7rV1kvf)$Z`Ns*-Qa{h zGTv+m{L2Rnk25RkRuBLH3j+9%@y7#(yQz`#UtY_n$B%wqB0?HCOSr|SJH=g^q>w6i zRr^$uKyw2m#lG+25`*XmMWKAU#($1!l0`hy`^I$s%FOEF;m3`ek>2GQ)L`;JVhpYE zL1{sF$giWF-%eu_tPx1rP^f|RxUKQMzh1_mKlT@VQ#Va1Qr*FnW8(ToHl{%DyFrt_ z0azjitxGUeYKyQ{)03+W0YqvrRK2=ZkaWwp8g^W!Q&O}t8uE46Pq`qtJidp5sL17& zs$1e(Psm!~avTT{9`X-ZD_3ypx1WZxP6frFLkLQ%vQ2_RdC-Qg%4#DzK-8`PBfMQ& zU7iHStV~Kua@EqrPv+zqVzosW12S;$SQK zywz~=sSFco%$(>W$LI2+e&uqZNayvJ`Cwa2oX7KED6>eC$TeQ8)JSe+T)`f8w z!57MyQRUOuP>al2S=KC4S$oO5T`o6HCVoTG2McuWYys4?>l*VvtWb zH088Yd6t;gK+%L_DEn)ejqv)3`!To_goH_|7c`tZ16zDw2U}WrE&jlUtitg&}TwmOD1!M+q6yTG@-uW&{I?voa{W zQ}D`6nwF$t0m8MP$(cJjNAYKBh(A|0t=kb6V#zB+F*ho>s5%aJShd8+G*|I+$N&^+ zwXgaKEKca_)Y590Q(3IPm{Id&lIh?xOS(bErsLSU?uaSKbmqSQxC4zqO1%nrU=s9# zOjc;X?N7dmz*5U}^5BkO<#zJ${QA&3nhvlz81_OUf9nzXLu*q)*2Y)Lq=zGc(9!z< zUy|pwL2WTQ0f4f6^TtjGt?LE%@J_&R(2e+xJX`dYO5{Wb8Jafhy5GSphp-Gj4qkDw z27I?s1ntdX@+Nb~Kt~Q9KlYfyv3wpobE2n$JIU$hCkckz5S7DN?3q!v<;h1T#S47e z*5ym~Jlxi=5MKRQh2taShq2(dP(v<>kxyhd>M)g0{Mdx3$EY}58%zgWY!09Iy4bhI zx>Lf{8B%U2aHd7Q5ujzp{@>iL?fa$?eR49I`~uCbH@5Vydh=uZl2oA3B_B5 zzV#_RQF{2nH;N@-7mFeHjxdp8tGk{P;A+mn0dquWXwSsiG0ezHM` zB7if6)vz;*U>$L+WSqBfEsT?U=QtDS&-S8>`lE~CB) zC2rz;2XA=u$qw!b0W~-S-8sd#mWrMJ5dyp8l_!+T2G5zVV^;ip z!JKtHC49cYDX6DxR-kpGQk7`sRC4&)8|bSCYYXrW^YwSLkk(x_&QnLhWHJ=HI{TMP zgFO5BdkwCc_2ABj?aHD_O2b*YF*Rw!d8|iD^5Q;?J#B9|G7F0e#gdV!zi3CN#gkDJ zXN}tBKICTe(Vg4(>1EfNKXZlu>sM?rhv<=>!1gx;Y@$eiZK8$_4nMoZ|Jy=;|7NBu zjmY&dA+?cSAS#}IxBP-wQjSa`gy*4RWig&uD@lj9RHG~MnkfrL9E!_*GUheFirOXT0>n} zMw7hr`<-Ji1RFqCxwDoeGPU|kUS4B&BC{uVn5-74K@X`QWCInVUY{|o$^6$`mb~@Y z^$HSOJr%tso^w&!sxt%Hpni7Gz5EG~4?h4^4Uwq9_Zs@hYuX;J&F$8;_gW9|2sWpx zWsPzYu@3Ah@*%-cr1L1BAh`G0uK|1$h!-vG@7-jB`tDLX1313w-myG*a18JBn%+jn zT)YgO%LA|b?@1IOp!C4O*&n%td&E$>4!{OG2OP4Y{<~gNTNrphH??!oSMhW(b=Lja zf)%9y?tmZ$r|}GECNgfB8dzX1^`OfFego z7_}q-;OFnpbjm*$cHCc{Y)wrp4H+$M4b4rN8651)|9LL|%*g!HMMus5AV2e;F8-O% z@~4ZQ=3g%Un%(kG^M3}i|1^j0_+|dLu=YRg{}~(j)Bd*ikLv#|Qt(gXe~$bAG*+Md zW&Dq)fPZ@U=V06T8w)oc4sAK|2M>o0zzTpF zS>oaF^8;0#ZNNZLLxdp&W(HI=Gy~fK*pXi=17!`Z&FD?Q%wpmU0H6v2Zf}fGg@eJ$ zFc<>B!F4@E&Jb=TZEXrevblN-HUS`qfdvGBa$tm^36dv(os;u=q`C#z5e!#^gH6G3 zu(k2kB%Ieg1O)JKz}6<$(p=5^7qPCz0gA%xtq}lr;ByNTJ8b~R6+ZySl?uwbfM3b4 zzWEzDQYGYsNb;-kKxMEU%pQ)M9x1T|%o=g^!Up)Mj08J?_j-^72Y~IGJy3!Zz;-ZFx1#t0lv2d^hcsSWuc-fKPiXxW+ zxkz?^pT?4e!|ZL4B44}4^#`B|P!(=yZD(_}n8waPQ5B#V*ulaWtSl*lluy;r$PPK= zr<43#a616&ueP~j{JU)=EFcIl94G-nE}R(H7-j+n%7U%U5as|*P7anXd{?9M6<53p;h-kv~%A0)s0(K}H)2KySeGX?9)fkl=XqtfWwQX?vYckT*Jm z9NsKBu@?9-K{}+}C+e87LrqRE;)et&{!xTyxGwW!(lY|tAgjG<qKCr(#@^A9Ol6@X7U93Y^#gBx}bXWVGN2waCcNQcW6!}@XjVQHxNu3mbH^9my2!6aX4?=VQw0!Iqc5bG&=$JafyR%Tj5?^;) z=H=(add$Vc1=;BE%#G(Z()4!6QKvuX8i!(Rx0(iQZ$BK1O-+_;B1G|+t_l`+9wo$n zyme2jmEcp-;CNZk6xtj)U#gy%m=nEMrQsc`R1JN~RVL-bK9TpRP{NgdO(~)h2{ad6 zpT{rD(l$YhS8k0yag&p+o%-DEX-c}ywcaF5!aE{15aBVOFL|tD zS`$9RFo9hI=v+v`oRqj;X(@!^C^fI5`KK^q|lF=E1c4jW9;}@Re8e!kAKpA0!#8xCj&f3P_hn* ziBosl(~`<}gYEkt<)E&VQtefd79Y^C_3Z(k{IV0uB;G+$dO-rjYNg#v&d#p>(T9-g zPfwJEWN0J@zGFt%qY&XTa?@3Ab9ESK{ ze)0*E`Q8!Mvhpw+N1e!_P3@tati$wU{Dv?FMrv|?-Q_MdJ%B`I{npOM0vo$M>a^|A z?B`I&s&MGd%py+jO*==vvrp#|DN)0!>j!2x-p%cJsu_DOXI?Z>SnESuO)$A@G3we1 z4s|i9EHJ55o&Ds}8B=`ReY=cwFfk5l9B>^28{&d+*em^nmuse%J*?wI*KzF}W4B(c>* z$52DT@gkOu%g&u&jQzHjWq(^{rOqt?eiZ{O6Or_0@rSegg|Ea|3-QAaqjb_U&PJVR zl?dH=bt&80-ZO*Jjj_9_sy-bprZL+^W)8_D17qbP%dEck8ata*M8{ZE3@)R|n3b_H zFEo4d^cDE;?au+q?=Gos;4{S{+LWT=2@E`J@A(pJ+ZaZj_*J&F>a$K}7n0DD)^ZY$zlyE2 ztz?O#Z91EqW$`*!K5OLEWS~mj$nEQYD4#;&E@Sib9eajj{>DphGN1~HU$P@@`Xk@I zJKbp`D~9{YuQrEQlXq0)G)$&xLk#*wvsXSFXntkB=cC>Fj5Kvupa&~P3q@dVfyKw@ zw&1vs+O18~0eQAfY{5+f#ULDmCXx_|R8_K&+XAHd)z3q2VxP)_lH4@7+SEYSolo}V z!JhoSR7w&`*nA1zyrCG^ z=zX_jG=l5aQG**%FAxRolgrt&4D8lqe8ruqXELDYL;m`e0fVjW_jZI`)Wm4giQnVn zqD-7~k@HM(VHtV!QunHB-gXN#%3^NYIs$7P`j1+@gZc$dD&!>`Zk|tOeCtBLA%k1z zUOp;xS{CmQEvUFGc``9718t9C>>#np6QH^g=F8}LF1xj1^+Uos11mp3I-oqF{o!<# z^)t6BztPV#J&>T72HDsL)*psOTkuM7f>~FUOZ+h$Xr~HSHQ0$}#Rmz?fk+*j&`etKGN8sOhjEO%gSF8%M?j;a6(K1Hc?oxQiPp8%Hd)OX=wK;r z6s3jOcL#2(5n?B5ZK{`V(&ookG|0rhhq4%HaMJn>hH!H*h1FD9A+{A>q%({@uVHW( z0%r_KyY>>RY5GnTlUS$hGtn=B1}H!Z zo0?ZxIXRrwOfiaFZG*Hf9LXP3fH77%)t!hI_7IM$%S~U`v;YE|anp^&?x53V28AJ^ zTTVk!Pa=u&#$TcByj{&)J!AZk@KQj3wkTnB?13zHkY4deM@Ayd(R+LjD$Y!|N;d4c zqdS}itcqq-Q;T#9T@fgx-GSjQZCWA8HX`OiyRRGkGvw1=g?@TFvovyYzl4ITquP}k zLW((j zD8{h=!O6#6ru!Kb>kZF9>B@(jwl8Pb2p%46g6C}uUs?tSpS6Ymkm{A}ek28PUHxwR zmf81ReHb}p&{hy8IP*G3W~u@Gcrs^S=)gUZ`k1}-@m-3& zZQp#6rF~TOWqPT2kw@p%t%6LG<05e)?EPs6wqmYXqVr5PmOFsQ_2%(H0c|&iI)?#P1!zG0Y5O7ZCwJ$6j6Czy{v z6ot^nyeu@5D|M2QVk<^5TYeUcJO2GHTS3k#>YCT)x-KdscOw5TS<3gcrPac1EOW~t zf}9Q|hMVtCBQ6$kh6u3?b$Ei2nfOc@h8_ACZ`fb$AHDAKZ~*)!TJ zg?D}sBuJ_r!IP)g9P1ftL0x=Z=W+VQ=Q4^%^K(#4PeiPZU}J!PK-$6VhudUxOJ8j4 z2Uu^PAyNzCKt^lPuxK;a&3hs8SJrVn_<2l1SGIbZlLj7yWH}De;a!ebmn%i zpBHn#2*r`{9t-BW)HKjc+B{zNvNE5kYun|mURU1kmQQHBuhH;x3I&JQu~LY;v?b;1J_Dyh%-h|HzNo}yRLRHahj!=L zzJlN6mwA{l_&J1A>Xna1<|mSB2CEzbhik@uaK+;J+l*i`J82e{te-U6c{MVb+Bu@0@|+ZG1;dl@t?lCHv6Iz!HIe5cO-Wsh zFV^=^>@~>Y{*NVh8}iB*PCTkQRMV>pC7*qrQ!U5AEGSi)-@Ygv=L|}F8}p_+b{T(H z7@fGYzH(+(AVQwP;36C13vf?#?rw@zG5<=&q>pm4p`sVPP;)r1ie-?4QKBNa1(#^o zqK$f4@%Z*d+%W6vJ;^zi83n#qH-J_8)DXdaT{6v&a{I&U{`@FV$~cnP(k>#NVF!<; z!K2nAgkMvq!u^f;_$ImUQ(OgGtFJ6&*eQ|{B^6Uyeb!gR1;`3zBJ<}Ky*|AbC0X66 zB)mDNiF*3Bk!0f5@Rw4QmIF=S3}Mk9Wx+#wDVsLzpC+YLq6_cGKkzReBL4Q}QyE6` zjJ$^>tJMo4k;d8nkD>&QbbPtu(IdeRNmhaZy+fN#ey7TX#WWnDP{8&lw# z88(V-n_=o--meJ3UHi7p6d&~MEDb}vL-%-SHm7YrZJf^+$yb18t_<`T31 z}aub9(5-LPuRo9r=pHSfy<=@2plk+vWcP;Bb%7=uW z%#)JHHrJlymksPe7z!$$!UGY#sfT8M0lIt-d*GL>Q7ZPT?ng9thvhrT> zMnZ8{ERX;hm>sG0HT3u^igtzV{YC3P07#&S98ewxhZ;hF#)bg4Ux}`e!2gy=RQRt% zSGeI{h^~$OFWiCsFYX}mr#t+|j=**eiv9~vU`MXnAD(cHQ2wgkHR<0({THtAt8D*o zIRV=*PQcB{@v9SX{wGer%JG{MuyeBhgA-gszJF2pZ|MEMvkk|ewh@L~7((P!|8t}K z#LWMBdm=Z_A2zw(#(xpSkOdoF#V7y=$8VdHlk?ZjiKIr_1?gSvtgJ}8u=D)2*>K$#_?z4PtANS*rxlO^ zN%CrkAo~y&V7NTkQBmdp#3tAN|I0>UV`KX@X0rWhmFq^pU#xOP`!|vQ$twRWh{jbb11JID1PcG&6cIK?Sir22Ee|zi>8ro=<_Lt1-BTbCZfIv?1ct+% znH>xv5U?|gp^H5n+4?fFvp|6DSggSaprSLv9A?cd1~xUchai~M!En1PUJO@_o?kmC ze+|gjJ)eK420Qze8eBZwNXf5K6ALRB0L03MeE2`n!_m=^fkmb{rJ4pQ&2}}%;#SAj?BTKlS30@VZW^HlR@k3TO*R9E4r@tO^ z?c&-1_TT!uf9~V!C%%8k|hY)I7cQ%5buA9%E-dzztr#7E%d)B z`mfR_=ufXhrY5OB3ruDaWGwoR3mW$8TI!eN$<6(1?)>w-!FlZhe=+eto{#>?F#jar zzqiqUeBR&y{h2x0xOv#Fz2Udi$^Flz=1(*Jao*tlz0?G8{JqrV{M&is`cA>`R{DL1 z{hB+uK`bETWe{ZQ+lm<( z=>D2%jFCrDxZ%$f^fO>HBV!)3&2^gnhuF>a7rXv9*#}t_AQj}~Vqrrb$aq0lk^85* z|2Jz0`iqADU3U6uLk>0;5HEm}6~uzPI`UKD|B;FoKh5zEISCGifRU&H@*)y4xcog6 zasNf=Uo+7^IUXk~(k{qj^Ody7TO%yUOE7H67w-Rw%vZIJjfJB*7!0vvfx*pyHgMS0 zr5)gJ-eio-CvXd+-#v=@x9Z^UDqP=P`5jQXzUTA%-nkCtzug7;BgkFf2>MyC19;eZ zfxn>zY`ok+Q5XaUSFtfP1|u`+Z})6~A{GccMKD|x2DO1%U!@KnpfvK{4%`NImF@wi zh7da>HuEnp*RXSOa`Rp-^6$`(Q{qMvu42^K%%!8L*~V#X9P4L!%a;it`^TqdUveLs z=k5j*6JEAUaK?iy%LB+xV>oRQ>GXl>J3EwjR`u+`+fBoZ=1Z(W| z*sg!on^0*zUbLEw)*XT)EKWK@c1`0s+ci;|vM*7;>M;{?r3j3l_?$$-VCgWJ}p^Hesd{UYkU0jd@18uKz;6og#=(ZSSN!w7`avmXz| zQwYtXG*Hx)r9Ua1O!3}pZc(8d-?r_uJk;v+xKimAu_RD+?eOak^+Y^6L8|$K%xK6d zc_yF3^p0m_nR_MBWIQ8Cp%w;8(@M{3P2R+?rlQgcbp=&$e)BA>=60!0kuyt4_(=r_ zyZ@O40zp{>8Kjcx>k}2BE;I?<(6#H|^Eb_CT8Y7<*czs7qKl-}veM;YYo-zB%BfxH zED695`nXHIb=t-ObDJsD({e8pSJx4HL^$KV`K=^Cuo3oZ<2!3xGV$pr;MP2B@`q&L zv`ADfy9FuL{>N60NXD6qg}IUo*TWy9lHBXedzBuP=Rt-pw#*eOnSKua?Ri!=k<#k5T>G z0WVMFBKeg*cO5sq|0X@iFxJ!a!J`=JQq;wN?)ix5Ca%w}hk*=Vlu~!c1R=FzJLaW6 zqM2l@XPp!AwWukHoYDv+$m79n332XgI>wKeYkIECiyc!BFYBWC-ds#^&wj)APN~IS zH{@&eWRJ2@3Mq$(wvHVq<=c=FxvzfRVF&82y46vzOLnFDC-%AD$8Ze76vKBa09}Oz zF2w@#Oee7;7TiX97Bxpiu6My~lWVh~eeW3@4u^BQ(KaMA3T|ViTDkayY&}i>I;`V# z!n{AueQEoyFz4vy%hVo_;}+SQE`qy;gUR|F)Z2%=$Lbqc9C7bA3F!{syM3lf5tj<8 z`SIRF^6dl#LDIVV8w$U~L|`D>t%kv+H$ z&^!YUky?P2@v${SaHf>fB1&i+_%m=w5!VVR4vdU8Gjh`OuZRn0M%UK|zt5NB?Rt;y zuiZE_x%r*pmViZ{@_Y0T@*ZfqpP%pXn{K-)jgBrdU7S68XZpl^YoqFX#1(ZL+9HG9 z+g&~Qv;?oV<0bQpnuNS=&BOb5&2mKm`lXB%Eor69}@WJ_MZB!4d)b}n0?+p+re>gD6DNpjWcCx#{ zAWFNr+6b|FiqP#PzF*SY0E_3Pe^e|j`P^3JWQ9$=1mCjNt?@9HD2@PE3t?IAer+d+{qaEZw=&rCfEkkAb)5+M=3UawbF}EO*_S zGWVQvpWD?%<(7|bDyRXb{BEdq-0rFXCZP z?X40{?X)xXp{J{c>A2b;uZS!r7tG>M7}0aSc<99GA{GB!OrJGe!lz zSEvHd)Z*0jI$5qW^^@CpKG9VdH*4O&!G?WTD{ zX|C35ZJ{pCF3$K))E^*N^D(nW_ea?3MW-yAWp-CZ?V(r)IJbetu}GtlR+6CaHHrKu z?gSB#{#ZZl_q(f^Fy95a=&wJ1}PlU*#f?mc`-@+0t$8X1-L$pPrZ(JZgjVB`%%CTb86iRNt=($iQ| zt%n60moF)1*4=#4JwJgiB19To!YkxixJuiAF;1RU2y)(1eQA&4!T(l$a&)!I`M?u4 zVJc;JQ^u%MuYdoz2S4tC%duVD-lMR}iux!z9lC8o6{ZNI-ZpEWM_($!dVRmUbnSOI z;yy`RI5$iLwfGagh1!{ft6Fh6D5idmp1CkMHFqdta5|6#Ik(2Ot(exGz%J$+^(mn^ zS})r*SiC|_shi3K6rP-%ayQE^?KqNGh2m}dC4>i^>d6YNY1^!YP{|GCboO#S$I zt}~8994q8Z$EBV|AV!bD*_FKBJAFKgKEqZW-y<;z+Q_KY;TPKwDoJ9op>HMMY2*vY zL_NV?iPs~bq|6|3c&B4^P!?rEaR>N-U;7nH^n1#I-usjstOxkFi5}~E_jnw}1>vbUe-SOP%w@-P7pmhtb_w?DA~?Y)Xd~^@*c~drKr}RzBzl;^4hj%*UIEW zmY4z#aR|*od=+~p5uU`$z%rJUDwYCC?oC1cv&`(MwDcxl>K~xl*tI^s9eaz_v4BR& z)v$}*q&Lf=jBh#f3Izr5u^aUZ6}U=<$VWZuu9Rp z!W{b&J1nXicB7Z_zD|!5GwTSWB5M*{^?G_8Va=clm!r~-gMJI{(c;;-1P_5R#H!Sc z^Mw7wR3-^+$~#x7_Sx#b^$BxU?P`0qKvFd0IA|I`hq8g&q$JmKK3 zS+syKIxY9<|NK?C8Mg6SzBKPWp-Pv#ZI00xO(`z~v+;Si?y)B4VW~`@zNzKqNr71v zcQF4)Rn8Sf98iS<-}yp!4IAV7u<@vF+Tcjag~g3SV?%t|qt96*7mMi>Nd;9j8PAqC z^#wnjSB+ zs60DaTC(6yKf6WG)@@Waf{KcegS}{U&rp}DQ`XQAJLBFTugdJmN**ovg$AzoVP(;Jpw{chmE{AGi1O^6f{qPK=>{K6EO_o2;zm$7F9Tdat z6_Pp;wZ%0V$|Ayta!XEYZ9{y!E{@B!#8OC!-fIwa^U=YmUnQeJoXbAfh-bRujST25 z#Z;>;ug1&sq;jjVLuaA3+a^y|5DHoDqn8r<38`0HQSsJIGUDZS3Y2n2kw24@GMD0I zC?5wht1FgHIZ}uzCl2L?FH=1)gs29^Gd6J$klUv@YahNaD>xswPtm6e4oD&f&1+Fs z;*)J>MGoS*)=&6TjWFBYpSIz02t?a>Uk0mLtJaY+>)jw3?U$Ibs>))~+i==`<|um$ zhmxFI#1jSe20GTAJ<(Zj-({+lA%9dNb1BiSx^suqYCHCGiC8%dWCc4%d*?23j%Kgl zu-2i+ng5uH_fIkaI=rgXH28DKd7e~baW@{Q8jrOkBg5PDN8Qei&78iJ^L*ojjf4I5 zheAImLM7VbKh((@k=ge=E$)big0en$F>;SDfIb$L;B}eSr1aK10@qRtzkM5ZJKb<_ zg+dF0z1;ILuF8J{o=2bdU&qJCzn|~m^Efl6xuw&1s;i4;ZOAeKm5~}w*$*ZZU3B)|t6)i%Z7*J4HDbfg^@tuyjAA`s6-95{M?MzMW z?Ch+pp4eAmn-K=ZaCqH#GWC>?Z*d~EYH53;)G>IG3s9Vp zz#w1K;k#zga+h&-APCs$`TaY*qdi#aL79X&7bhnV#qAn-MhXV5D`lDsK{dsAR1)K5 z*!#~iJ(5x)1_#BeC~O_7VY2E+`#*2gm~l(jTN^Fi;vRC1bg zN}yL@AkK0;mukgBammU`!`_dL*sbQ`gm;dpr|3sUMpR4FlG8sZKIlvtzgEWJW2)e* zSEesHx5q|DAK}@Y9zesOek~6q6r-VPYHH0fpv;=^1Bvt3PgRe?ac3P(uSLHjVll-5$WI!(y!=dl`6-HdnbZ81B!VyE>%CZ`CDHSpZOQb(#E8^4kD9;*_9qp zLa(}9YDsYN0mj75t6?qVVNf5!Fevh^5EY#9D+u`TFiV?VqAgSSei^F$oi(~)O|5gR z5~$6M5wCCbSwm19tgIMypytXu)^Ke?u` z0w0dqVBDB}aAS=J4Eu!ueY_tehW3|-_tDEdD9=|m(Bvc%9uAbjE;1>rw;n9A>9`A3 zEvIhC{bcJC+T2MdI$@vI0LCSBLEhec`HuC(>bB9_H3v2KUsAKYKx`k8)q?1W>_3O8w@Kq zJB~CiDBG#l?wjP#m;Nw7Z=!UMyyaua4axSedU~hsUY0d!L2xnFmRq3$o5ib=O=~Su zmE&i<50}2Z7OQn?(>>mv>DQ*3N&FzSUU$oX$)a$zX0+qKv4H zB)OT!BS0<3z&h8E;xFl;E?tT|WO~1&J8revZ1Q`2d+8ZDtd2CpVTp-9f^c-32rTHj z_nokwUU(t_Fm>}WVCvRK|NEzmjKc(E`_WVkr(fbxBcRENa*?*vaI>~0RVg-gaM64a;?1hN&f|tJNDO-sg~{>CQ;FEz6y-P_mhG1efVs4d(DL0R`*D^^Qc>(2Vm# zQ!bn37;YYDIB$-9c%vauKBc{p=aQjNlBm}xLoa3B;JW^u+dcN>xz3wI%NSE{wXvD9 zhuO$~2tnVityVZu1Efd}@oU_xgz#D9tywf0&C#Y{YmE1VsJ7!m!qXk_X&7f+X86mZO)d`ZeQPJZm{JHl>-4;+5| znzB+04C|^7D|Sf)&%LLZ zRT$#Gb*?EA2+WLf&fJe1kJ?mVFd-*arb}UAWoq~K_Lh}Ri$WgPWtRpswqv_1UW>i= zrdCLh#V?rU&W>#j;l=<;?>puoSi35c{oEs=qBQibgeFtpS^*P&8n55)Jm>mA6aAeK z+kL_J10M|&)w8bFL5$=$C)(#FA4RDm{g1e^!X%fhdo2wt=f>W&#*^KC_IS6a(^QG5 zb%doZljv~_FrMZOKw)LmteS}7-tmx#t@X!$nICTqJcr5e^JskF7C@>*L&d;!GVW;Z;3v2XS0(z>c} zBdwn((cVKCzhNUJltf!xJg@MBl)9}ao6C4lHaw{|lXhx4;I65<0hgM3MWi4B^+JhU zvML%=J5%_RtOPkjKE}7}FJ56VpUpP|pc4{|^@8jwtT(Zrjf|+PxjdRGAqImcpKhmX zb}Ctg%1Mz&o9AsnCZ!{w3|k0dl5i$k5x-^IAdKigtx&%k2-Pq(5LuX>yOIllXw{S_T6ZHyU4FfK* zQey9Xv#}SFTQi@98$E%~l26SdDEL{%Q(14BObxVRO-NL5+2--IS=jy%VNQDzvFa1d z|2>~jZ#qHulei`wtUIk=+0-kG2TG`SlE%iV&`A>!z;eFRuW!m6_<@0aM3}00l&EW( zL+gN3y?IzC+L0d~IP%n(#QDGo_0gMeGoT}EXnBa+Q$}BXOlmufxE_39@rxLW!Z|wo zM`_3qTwfBajUCkmdYN}S{5>>j*^VpN1nXhB@*Ad63`z2Z8a$6?mAE0Yq=cN$TEXqP zUq5Bw=Fh9s@F6nqWj&@-_)(4hyw#r1dC{cMHQGRv)4DsdyWXTK-S&B<)tR7Iv6dCq zO7IK0ANNXvgS;be1zl)VEo|K5-%rSD>t-{Hv($@`7 zQ-UI9I?ZdR#3O0PMtaUPf-3Q(b6Ct~{Eo_s4q@Q(>3Ul>Nzn@DsXRvFO);1EU%t$o zTwIhX#f22cxI`2)b%@gv&O`ZhzfMoUkIwUzcE5tloM3w0t6r78Q}+AK(M4}J@WjiM zWFyrWiJpHmS;OR76ss*{T-o8A)C-mbjg276sV+S^>q?m!7|0*2Dt_h+;&%0*oi6&YgT;e^HuM_xrry$SVj&g0B5D1kfArAbnAU|8=3 zGihEoh*zv7|GaNTNxhkNO5geO)WPgUSrT~Wg%=>$urkG}RlY(qcG9UbLAin`8AjBG zFSA;%(HE!`4c)R!8G0i%71sDU{^@i<$3@rHteJ`S+2U8KPw&UVlT-`#ox)X%RI0L- z88S?_IUx@V0G02)1*w>8jW&@fQ!|E*ij3$wN}9S{d`c2|maP17 z=d9ks8U_`O^ooThXTJOXj%l~){o8I|dS5{iCwa^M`#fF4n;4er-z0gtv5%CYCO2Nh zXWm11k`LdKGTI9~!|n~Feg}yx)1;dkpU^Q2jnNv+7oQtt%sCbIfF+nG>RIiR^?0A> zd#_6(A-)oDK?41V2$PdiQQ!}89BXaeof=X*lQ7+~`SY-$Tk)bou-3J>4xYp}rTF6V z*u`2S61jaSqg&=UGqwsiW-mVmwcOL@L{-ZhWtkCIwvUYufy}q%tJx@>1n0xs#>CXA zPvb}<8yYr_JVb}&$EIJ=z3|#TOgyh%_-g5!{JDyVtj!2N{v<%r7ef80r$oZ(iN9>D z^k+jVvJb;F`}8IWHZ*Dc%i}UM7(UA>kO*QrYlqJrNImwRX1#ANNlecDzUa(C^CgV9 z#+kuvd$U%fH1ml|`r0?Hw|-=Ukx;gY5AHKZZmy28ReD{W6-gTS>eCk6b7UiCC35eS z)j1!vzy-=a6Divcs+W01L)kRnT!3rWR=(2{ON$$iOL#%ZC2rM0R3?g_2#nwBH!ayW zf6iiG!2ozFf6A6qS^Rbb+{B2k5bTmG=lZ5tW_M0?NU)@LSnMS#GnBJt#g|!UoO( za7!PL+WIIeL(}9-?VoD3i4-^b9)^7DH867orYa}SzyoA9ttOWCCvSfe7YFigohGt4 zliEYc+K+B z@KeFNv;pXgVsu~CJ_vkeP`1E^9M7WArKPLYK_Xv#89;+2uo|6%LUAxHeKWNb@av!= zX7prv1;U+^fMdoISc{PJsaD0HM%pXCH75HX48`+S?N$uVI=HQ-2ND&lZW95I72WB@ zwJQvFp~12empBSc>vN%(BZIh)vA*xMrk# z;DmJb#8imQjxM6cuub51vG z&i%+1q$ASyVF$^(K_(=piS^FtQ>qMvtJ?Xf#TIHUy9tS(fzJ0KUtOn=>7M?^%dU!s ziK(Tr@>PU$g9|H^Z-S<;1@i8{KNm>E$D4vs4?lE*&k)zg)K6{G+x-{ZLG-RAb0Mp(oRIUkH zzm!IzMJ=zIY$G^Lb~`qtm0jQwd=d1Ya*o8}j}DaU%*>i|_Z((AG8X_37(!;w#FEFg zU?)c56x6MfEl0f~-KEhAklwor5ta<3@Iu>F|F*VDwaTUS6q*si*OKh7;2o=-3?N^T z7$BB({@{;z^UBe$^-zM$O%_=l0Y{}9mjQLM!4LWuZsY8COOY@zW*4ut4TMfl}@@3zOhc*ZDHcA_qyr zG2-19yC0^AB;Un89sNFp-%;?t8rt^m6SH_9d+cCcqilsjoHKdkM;V&+6wX!$VH43)#Hh2p^{c~ zB`*{UnvM07a_10sg-z`P9r`^6m}aI%dm=OM_{k_9&~;Wc1vEfJErVSovLT{wuWDdI zU#Ey^FAX1@#LzwO%&b<@LHaQ1dpb5;%*Lsn%KDAk%w(O0j<1Im77TIC=+BSs%ydh~4fQcC8I)M;To=e_vdp)I}PU zANQ1d%_)3K!E0Z(qPQ{McWW-qe%f_vhe5fRI@|9vpWIq;G0dokzsy@OZ}9oV`x9{1 zHgAjvl}X$pb!3fRTnlfBDE%Q|ic`P+ea(8WZrt8lE9Q;@{aZ<`6hY<80|~Ai{NSU7 zw_y3C$jqg1!MG|}WmDMKshft`=s&Tg`!rK!D_x-di)`(#)R!7{6*(u~(&O=Xdqfh* z{TQ{I3dV99yNUjtT)9C%-3r|)pj@Ld0_6|#BZetOvystJ5D1hmAdMN=8!6F)4_Faa*mo?oKd}05ph!C#ohjR@_n*Pmyz>o)U?4+6>MnEoy0P(Zm}d( z59O>Q`)=OIps07+zV14{>Vr3Juvc-Cm%AR@(^I15U#osJdQd=<4;&M|lDeNP?)JlsN#>1~i*31pE;Xgiv#&U?39}SI0uaWO|_?*P&_!No!c{Zfcx=zhEPMC2_-a?(4>c4<%J3J5lo>-@| zJHKLg$4^IyGHXMQvnQkaLGRHOASpP4CNts3g|^+!c7>37L)O>U2*BD9rWh!&QMPvV zU~@{f8fk?`T{F|uK`yz-|MrOzIT6Zz(BtJxCtb!@5AP|)<~Y#)0lHp`Npq8eoExVE zPdts&8zRcsj0Ae=r)d#)`5)jDj62FF3Z2g-^Ay+AklCwXpq>jmR*j5}aU<<~0l(ty z8P$r*ZMNNeCcU>x_2K;SD`9d)%rt9}q<15s<}s3X&*FDi3qg!G6FrX5A}~Y7^hr#} zBW_9YDq>%O)AyoE3JNmO_n#xtc*T_D5SQG#x_$Zm(A6wR;9mI3 zw1YtkV>3lqRnDL7F3+VNa6aJkOAjkUn=r*vt-xN-q%^@b>WHAtw&KSZ)QODMX!4<< zm{~9_olw-6{E0b-q+X%idnS(@%4g~(*d192Zc-X|jK;)LEBTuw?A&zEq^dbxEI07P z@rGnz5kTE^c%Zhy6HzHQ5_wbPSoV;H1U4nrk!FVB$cTpFe7!jkYjY?WSM>k+`2*jW3q6-&c$K%OAx*xK^h3r{9 z%>EI)#?3hU%KaMdHyHmYzl&Itw8ZN|If>kDeluwRMX&;xlyR-Og&V7}>yK@Xt~5gN zyMtJZ#RdtNBC8tyqB*(Mm)Dvaj8kWG-GTT}9JpEMF9+{8UeC?SWcb*v$-l)CEyErM zuv{2rea+(rK^zUMRvL=S^mXS6@QqC?6y;g_%EN0`A5jd;R<6gTHu6Nn5@N$nKQohN z7!3h#O?p1XvL{?)%ILW6IH0ueoKz?H;Syjy!^!Rmdcbu}b&E6WJJj#Z9jwDUAD6FN z3101WuXI|7t{MQ@^Fa@Owuf%f7o|X7-yF7;2>rQ(Mg9%IKcLWpg>u95|95bI*0Up~ z+U)of_DT#Z^)<$y6WXWo|0%?uhBhIC{%_+H;!wrCImogqoT&!-w7?Ejrk_pG5e3 zHYDA>xVrzOqn(4@2OzAG@V-R0I^&u6{p+>nDON%Df(?%)a0gy*qN zu&>igQ4JbydZU~F3jqJoM4M2m_x~EWUmWri-2XOa@z$U3!fw^8R7ofAaWPl62>S-{ zXw%WHMORa48uUWK8L#I!R(r7b>Q-E2d8ouPX?Ywta|n|JK^NqyxxlF2Sd1hm=g!uH zyis3cbh9XH=$fdIYwOxu$(I2^C6AP!&+BF58GJC6q}B;m%b$PU36dwAlg%%r?N(j{ zm3I3aYn$10mg9J|JgYqTB;4Y{mlYU*2A>|HsTXFVG->``yf!9wF=L}UA)4}pXZ_;F zIgsz-dCRl^66WuqY~ zoR)6u|6%VfpyKEn{7nc61lQp1?lc|ISvefKs=JEns=7sY-RJ)9eO!!(hIX#({4PJuw@OelvPdYB-%`^a zN8;!wAg?c?6>t4??uP#ag|09G6feU&TFy~3o4Xt@TQr}A4ny&7t?*n`Q-?EMeo^J< z{$x;sgr@o1pp0X4Mq1>{i?m4x=|*qbM=7{ZN>{i`a`H9|XAj7wXOv&WNh{UpSszjU zFk%+9z-)uSi{w?#*C-IoTHeo zue|iFDoQ}1DAck`=x_OH6JpS~q3qS8rdQy~<*rvgrE{J6(Jjb*CCVZ^E_w5+Z`<}g zuW4kH1KszmG57CS%+UjiAJv-EP_(%n_B~=>=-9RFQDNIL%Wbp8zzG}}U&>0f?4f|R zp1G52uabYuOaEPMtU-FsVX9DOHc*|6Sg&?`v~np?RlY6C&Z?G|&<92wk8C7~jk(-k zF;q}A77_I;yCYLO6%(yKAAi-oTTb7S{I5S?WVuKP@7b zh-^+CBvG>1t`*YF6?$R-|QIq%-Eq>hkieEV=$!id=u1j2wf@7Owxa9SeE5&Vo%)bJc{+6O+J*oondDTQ=i+*7|LF3?$pNG25xQjYd!Z9E%hnTen<>5tnCem zc7WfZEPWWCwXHNV|He(tLNe&ykqvCUo>-}|2vbs47HQLzM~=L~{(abpXh~@MUjK1u z5wUaC`%c08;Y)kPKsPBi@=L&5#(-I{k1ZRrJc zXe>Rqq+?+233$D=_=0V_&m>g)<;~uiQ(nquSqJojq1V#KgSPB?_)!GYNfsNhb!KA; zGb~!5K~(dHh9Ws!o90Ei-40kuk|l0#N6sWFu!c9 za-<-uA_at=WzA-DI3<=skY_8pegGY?&b}=VZTZR#?;+#^&1SeTFZ;YM&G3&DJ>!r|M*2l0$*eAb>Q1;8L zy*8n}v5y^>kIxeX0+f-HOuGabIMP~6QVmPgkfGvca;fMP9$hM``Y3=%ggGQ;BvOyb zjv2AG?x2V)0mx;zyOt{{Rzy zFLgOFtG_0(kFYctcI?#V4NyjtlMdJw+Kn?u|A94ijx{eS8Kj0i1W+84Yl)l&6!ox> zVXH@O+nXuWiX7d&TJULogh$NlC1ow2MpAfWyi|3u|ai)dz>qj!0@OJS0! zmn?qTXi2OEGZaJ_MYAO?%(aTfiIcncULnkH_A1{yhfj>X^}se?UzY0pURLVW$Tqo@ zswqMr&8v%S*#47$@%{48B4O{agGp|D>DUMhNZc}S$H^n4OJ?#+8>@DF&S`sB8SX=a zDmEsR|7p^m`mLZ6j-=e=r8Kn?!M_{^2-I|?*o$T z>X;xBl{G!Fj zcm6K*37=nz9I{B}F}jy=r_|SZPLFUc9?EP-k9`jvX0yz%IMM*DYnZpfl`6vx^e+&b zQP{6G#`#oqkNTNmT{h~B)QdLkVo5LQtcDE&Fg_=;;vQcP`+dJ(H5i>T_r)lu@rNH- z=Z^-ksrod21xPRB&NHL@G}fph&jJ$Vc!J>D`1>v{v*JZ!2wi0uqs@F$*UuoVcn`Gi4U?8$oPFoOZUaq~Z06j!B7z+0f6wd}T0x*Ia6i-p)&F=|c zL(vv>;SbP`1Um4it5QAF?&tI@kfs_CIV6yS3%m%(igL0I6>C2}4q4_ovYo~v)M)n` zo;eiJ5citW$ZoPyM6&+z7S2^eQQvuBX4GRL>@x^^7$E*SGzo-_4Z)p038$Vai>-?N zX%!X;LO!LYp+?*s{VJa&@8NGd4e&0F)L(gk{BpK;Pc&;}nZj8Sk_v2h34R+w=}2{v zfmz8ZM$7Wf3>9|jPjR%Z1XY&b!KFi{bYkO?#X?6(&NeZS6Gj0uyG)Cfm4o07KL9|} zb4%dlokN2eq3q&UVc6Q(d(%OR>;Z|wF?KP(Zw=fW{JPTuhN<&vc8m$m+qEpgi{Qy| zzjKa0DZ9ncfq36Sd~v}Bek`nPb`LnrQ9$7(y*;GK9CB1e-H+(efO5#;SVctD_?`R9 zrJJFM2sa_&zDN^Qa7v+-eP(%S1)YiB>=#X5a&#r?pzXTGu#k&f{HwY8$B<**zFLsK zt#x_U^1@}t;_?7vGT#+Xp&S=c+)82ESG9R?%X4KE(ZX*zXd6OTdQBB|?D$iJfoxet zmPQ)&!4Hjk%JIc^oc<$fUTVD_Mn z?1(z2m*G|mx}M(hm%RJUl)}wVUU$KK?))T1{Z^_&ZYGVT0LpU2&IS*22>mY)-W02E zmN@p`&#df5E)?AJ0s$yPZX*#xeWl4Ka|mhy*C+M$$8&Z+HZhA?1vEl_T({=eBUy{3 zNpzFE3dUqAAr;fAVQDBv+kMZr5Z35O8~}4ENSFz(?w5l&Ea0b5^jB*cyK}$1tA7Z_ zM#66fs!O!hzS1BlS&kr2`^@OG9k~ga1sZ5xvoz#vx2KaUMkI0f{jO%h#_UipQvG10 z@L7Q;D*o+dpFPFfXg_G7C8iXPbb#U(wcp>^$YbfWZm5}|fC$B&tzLf2@=O+;OK?VM!Yj&9Q2x z)g;g)KVm+j;af~G3)inY0L{!wC2^-g6bFR3VLD<2-FNKLq39>;>*pU56!oKf9z*~yF?!A2ythrucK$6?? z&+o&!7*mdk?>ou`4ldy0IEY;a6*u`_S#eF(gU6A)r10A&*#WY51?pY+F!n_39ON6j zzA5*a2va@!EiOijIU?U=*{Y&jUIm$>gs(U!0pk8ZITPpzi;&y$w4#LKtBJfh*~bG) zE|~1LSrYd*uLkkanzv|e@&6`yIIM%#F!*5X_7v(e<3{#5+$PpCAO*%zD8UB zArfj&?8T+O7X07v5Ln}Uq2}rByt>s|ws%?*YQ9YBf-t;F?WtA&FH~~M^meIS6_46U z!fnV9_gM9Bs;S`${j)y@X3p2O`xll(C))3KSJftWirI)4WL-A@D;*uk@Md$#>KBEl zLH!`wpTip?hwoT>nC&F(z&^J7wZdO7U~BwUB;P)tRpclBC5J2&Ojm)=95Yp5dYrFe%SmeG3#5slk;*F*DCB2_?q_Yv_f&{eA@VV zJz(+4$lOFMgFO~w4KSNx-X0W;p2-f8cN`Shtz^C?JiQbAecB~It?p=CTX^m13*P$_ z@Oz0^I>xp9nt23R&P7XWbuzl6HMPSQ@UhBQsZ~X`^m1tZS$t&vhd}aZ+c&1YDNQ8# z*#?ArmkYU>-%|x{=6&kT=EKf4!C>icA^Yh=>{A*@ z!XqcEv-ImB%a7FW22XOo=^9wRLPz$;<9|9py>XxBeph&YNXnr2s7_<+JaO*Lzx5Es zt4?F>vgY|qqHDpN@pg8gxIW38IU1i$1h-gT&HMZ__H=}8=owhC9oX8zwpK%4ETyRn z$IE!rcJ}$R356x5)$f+dnz;0j$O|uf#o6GARt5WX#sOj``SIdYVx!#%j#02NUOj`? z`_+!8QxXnjz-*c2S0lk6HCOR|8&>b&B)?Le?u-ry2Aq4Je&mgl&S~LchjrfA3j&+8DalaKeB3gSD9Ei`xMEG zC8^R5?s+%Sq&SDI{^>w5IxA98_`%+dX?OuX$dw3JwmfQiKy@)$*SZ*U7L;kX6vk{4 zXW0^(r;zTF~o} zI-0#GvF@$WGY?s0JnLHFuU(fXYh0G!w|aJxzLSbu&Y-xOg}$ z3j!8M-Y1oV1tOV)5IcSb>XkXePQNL%5Vaif{2BZWu$eh*&r)1N9IuinDQDgx3SOEj zUqlkq8LVd2)9kCg;dUMsvj#3@@`(qEm>p}MwzgUPJ~K!CiP2af9>wr|(0aw68iETl zlfb+UNM>MJc8VLDpWUWRt))ppIPWyU|I!dO&l6<-seBu~%cubdSY}XP zrNF(izoApooco20o=CBdpGC(p)7@f2KDMJMrLJoIjb|hs&b+V6dSVw1or$MX?Y&`L`Ah)S-CH`Ojz&h3|h@RJiQ&=S6yuZ zv`W+}`j=8RA~#f>T9g};%5-mZtM53p3>S%AE3kfT6TDFTRz)h!s?i$T_u=LKKtS{R zT8Gc?-}I>w^MoUcUAGp}J9Y0?$3uP%pxr-wCSEu+Da9WGhm#P`7kWGEF<#DWWjQbH zjJSQG<7C)aN1F$i6lV-Y^i82?l< z!$Y?!;pNizvo)cKxU7E9xd({c!vME(Ij5P6ZTQfdxSz;o&ZTm(S<@Nj?N+N{MtKOy z4S?WG1{>)0bd>oK7+X)@+Gb4tO+#MuLd~%17va}Y6QBDqiw2)ca^%_k{vR~Dq~|5M zl7}}g9dsBCyMNuUYYS7ud7-JT!R&h0?|bL$1!%EMAIOFDQBvrXqNK!qj+U4Cp^h}9 zd9?-Rkm5*5xiD1h z{N)yx@%<<3xdmju!Bz1>uAdR2GEHZ+dYQ*2tl zIqT^A9J*A9T zJ%8k$=egUb@%$67P)L*oWd$f_twYfF^q0@c{TuDF(@#}KXA&kLp`FX$`4xb! zufu>$cJ}yMhi3!*2;w1)jhBq@wZ{|)_V8v>QtuJQi37zmP{QLleVFRVqV705wi%m$ za(P`)X}t25VB*TYKdj}zRIg)f5iML+ft45@v2X5Z+T7+Mw5Q^s<37d0GbPCvuCP&0 zfZ%7@7=RiTEY_sUR1a88>m z_K6a<1Y&OIA2%L~bJo1{S{-A3(&9hqzgGcg;QH&CtO4ffrE*~14SwPl{i@`CdI@gr z8&3Tm&0LH6rNd&E22|O!;?ZnoKi!5e{nT-oimb^ImPYm~$S!Xg75R)Q8dz|*bb#H+ z+z{ah$sSE-fQ*@-*4;-1{2FlQ(N!Isn+!pZVl?EHihq^B6Sm+3MkiR(GVsd%vRYAF zxxeBk{xrlOz83dRFuis>;D&7^vliU_qP<3NvHo%wFpTdn27c%J_(;_-^5`P`t^v0u zN#lF*Z`uMCvGL!tt;mh9)>8-6-?!VIP#HE!@QZ)Lkz9>MnbR=TJDKKEwbXyQ_sQ$1 zQn()9&BzI}pK$t9{u{I^;E5*~_9CXy0t1mxonm_!*s1Ry&vW4DhZEalEA(;^^}a6m zmI8z-zcban*PcoWzIl)7-7j>SxH-fKDFI|!5S0eJ=FqFC7$zjh)2vthKn$_cP@2_K zHJ6S5s>%BtVUfAdd7CPshnzrv{v5@k#tT9Eu*PmiLB1dLZw&CJ%$jp;@+~}jXTiH< zzK7IhtPyx%{gBxvm!%EqTOkq(m&95{yH`s^!$z`ZL~q}uBQ^Tw9`UfO)OU4&##Ad7 zUR{rs78bmy%wXv%741_yTAK#xblB)IE+Rp|hg9KLy zay)sj5a4j>N>vGJRiO<_$T55d<}I~jP5+GiF0el#Z@qn2C^lK3TDW`{)EjdYi_o_{ z1g^%M97Fckeh>w5OoM3WthB+AdZQrbni)wse>iXI!)|%ZAwXu(`Xa6+lXO8!YD|M8 zoW{8FrTO4J)x6Sm`T$Y;BJGE|HzeaFdAgUJ3ci2_RDvvqoNSvVTqJigrKCC|AEBox zqc;0!EDC##hy-)$C=DSNCkLX|Xce@BLDXw@Mos_7jt0ARf66qFmNxxu3=I})=6cB_ zx4OU)o@Ab_f3Nw7XCh@ALUv@HFL<8X_h=M6uKg57pjZda3-E6_JU6p+FIw0;c3mGl zb*%&~CUe8vXQtBr{<;kk99`iq z*a#z?i3>jc>>lgJ15ld>Y`qU`K>ZmEEwW$%oTKgp&;x_p?iY||QOcb+pH|^Kz%N*8 z9xcxfTOaa^#(UwWSShF>7Ib53sW>My2SRv}kkZIx!_L@$dq?W zyl>RU5~Jv<$D`pQLQ}PDYh%kkI=+T(v2VGA)CDv>KvcFU3Vq(r#T?tPCIp=3%)A5I z5xMZ9G$;RId_@dU##h_~?TXT&wq8-94Y8XI5UN$=W^r;P(g#y~Vckg{4Y+PrJURT~ zNjuHqh&c?-$dW!D0kn;|?o;&QK=Wdye~89N(V1#4M`^CmZawcCmj+@%a7*y%`1(L1CChX>3vUPnp}~GJDTju;#QQO^cp;z!em5Uc zzCAM}hLbHCemm z%A@}X_$ciD0r=WxpBI>U-{g=7XAVIkd`%_FW8!7yKk0~za+~#Pqw_@L&087tkan%a z4H!quz-804!SJrvT3D226&CraRKpp328l!;$3acTFfgeEiVF>;i&+)zt3}-x z8d(OMECO1YWSxTzS$e(4G7kraj+qp9g$x&dpvQ(uJ)oaNn9!6iE$`Yf9wMgWx?9bm z4uxRLlHVUyZsZDiAo%edL&x7htpR2lFE23`-(0uRf}y2BgXV`?Iz1Lf#k+4=oZL|9 z5Kz83(9oHPV?kun(~`&yxTJESDXn!;GfXTIQJ)gvGQU;BsY|M^u@mLHS4BQ|^008j z%ntvkZ=0Yp`gSUqjYVgu1$?n2^tmQ6x!B^YWx$=RzP_GI{IZ`(wtw{S?2YpQC4U=W zJU@P1E~qSa2)DfoDjermPD)N^#&15?L9@o~ zf-E4PhUGf1zEXL_6in-tci%Mmuv@5+PB1WHbUs{PO}wqUeft;JBdVw-`J3xCb&s93 zGbDkcR5Y&-;GaW3eG)3DHE%$!eH|Ay()x*A%cyBs<~B1J2fWyKHK4RkzUl`&GyyOui%E7P zoguuP6yAyedHp_m@%{J#Quo=0@^l>Y|uMq8f05c~_s z6D#vmXwuEih!g$?#M1=zah@xF0rq-MH?7m?&-y_o8Py6#aZu^>6bXOPdYY9p45w;@ zUqIoVs~!|$9GhL@YsJ*}agxrsq;L!YPON}qCA6e#%*BaSdP%s$x#bxo(^d|*HBK!3)Jw#6D8WZ(S^Bpp+@K3^Y`g|TU z5W|*k7s`W%65<_iVUJ@z5o&z2y8$%e3nhNE?!>S&7z5N5AKt%yGfW$TaafGdAc?MS z_@(-+yzKqB;=c3NLAk*f=L~Jue*<@^9Chzgpy2M+xBn8{k^bl4ZtnjFcmJOTcc_SN z3piFR%JHX6&Vwr2ao6xj?}0r)wsw-k^v2p*`?jsqyG}SZS!HO`QsVsj`BFCC?|W-k zVvdQ*`8xW#%8B&!^k%j+eaW=Wr=8{t{$nwkZxd7jx9AM|B8Muhm#I}*(%KBnstcss z=SK+->o^jfgqMcdY3tX0%HAIuI{9yo>$Zj{f*s%kmpT<*lvdkGseCij(gXXx_o|To{8fm;79Q z7kN}Q3h8&aoHO0$%!1u+&O?p2&&qih7i4EN;s?I_6mlOcrxf<{4<7Es95Ssbq+|_$ zT+hRXHU3<>qE3`EtrSxQ>)%QSn3UX#(CUM~{yIT< zPtO7i9X|tH!!}@53`FKWQFJb{ur59$^UD4jX_^xzf?Me#us}T1p!8D1Gi(3KH)^#4 zoUfUfoFQ^9mH)PEL93~Upt023tW2fdHi65zSUMoqf#!ah+{vMr(pFOy!iSW}c?fjt z25&SST^NmiDNO|pz1r0&dX%W2G ze=zvX=jmFIC4*{L>yN>{rwbMq(dUP3IdTs0^$}-mkv;RtsGnaEm(5YLE=zM&;A$3Lv zO@@A$@*9USIppY}pC!@(Sj|+UHm_^Y=R&!H+YiPt7a90EG6akr{ z=uehT52*&~*GFPXuDqxg)!X#)bBfQX5Vh%)_$sI-MeB^DYLNE51e;pSGwLn8u6kFM zC<5a%r&e9+GS$L4BW0eAGI5&9`ISsQjDpI|A2@8z`x zWP;p_3(R3&)LW*RA!D-q)=rvx z>wl4;gsc$o_^tL9rtVR)7CSVC?doxzSyFix^b6$&|S6 zY{(2GAtXfn6dbfW%CeuDU(=Czoxbi@daA{aLrBoPv>5uF;{KR|EG^ZduRr5i&tr4H zx!l6Luzs30Q_$iL@`$v^d{UEiz_T-4AJ*y=x(e zL5T-ZWXSK!IrlQ7cE8)FXH{m*N}lA*z|-Nl5o@LvX!}zv$;CApf7L>_8b>?`UGT2F z{Gi{qbWEA9V{kK4LGG?Gq=Dst!TLh!GiQRiaq6b4UFk}sfyfBKPR1g&Z^9@z2ZsHKV)=v;J2s=Op*7;k^L+Dl zLiO_93Qaapu-&6d)~Hm+l1T2MuiVEdahF_skLvR`V5P;tTUs4BR!SL>Cby-V=;AM^HwtcG8e`{k1JSn<2j*?bk?cgJQ(y6V% z-w6#~oB}$|!mL#i6ha%0XCR*8rzCzqggbxAUAO-M3((L=o=NY0^tqs2BM(z7WyTo& z?9hjl(o@d{)8m#JXZp#7XQ?2AJ$=LOpsmxbFmw@mrgF()r7O2mo~OKg$NI{SwRn&u z2({OaWUcAW#OCbA!>C^rEWB+pE;=?eq*?F<&PgU|V^Y=1(eg$d(>M$yI{YT5JBch0 zYm&Hp%*%=?_SRu$CAP2jMF%Q z(x0nf<@Lz?7c!UuH_MO7hs9^wzlhCxrmaPOyTZ`u);1<+B6DMxtNZZHh0z_mTh8w> zdaF&Y(TROe-!oJr*0m&>vq}O|U4Q`X~k?*7FFs_m0?ETD4?wGE}jhhw9q*Nz9 zjpq~|Xw$b`o;|!PNrm6_6sM()Fcssb=6ki^B#VGzF*{ki`(;UCGq8#ye9D1Dbk z(+-^FciSUdSmWGQ#|B4sxnk##$t%6wY+X%_@R#2ngsbX;ew_>U4j*dlU2X)@~J=tFwU)GDfd^w zb*#m0@$Po5_N(t!^tnQr%d=&VBZLAOk_JcL)!W9Pgxrnq)E1fL8$;tc@WFNkvyN*xZe*z%wYLt zBZWGik!HGCR|*>Xsc?TKF$z{Gd34Iix?bMe(7xI<)K7k^TObxgOr=^lX4}mPhq0c0 zXBg_DBH~aIu1b3$e^sgZc!S*xtZNZ#wq{PSS$vZERP<%qw38l5@sfyCy(0-{L-hA9ka=3Kz+WPG5n0jRL<4@-ZAI#y?>59sJEz)LkCjNeSXTVy-w7w zBR&j%x4wdY=kR<^!mIld84sElD^+OIZbSXu`e?XYM1abBmF|J#8&F|iSYDG*3nsXQ ztpLH`0YL?Lvw0@yaljenJgy(Wa!!y*Zn(Lm9cYHm#^M;G3{cY$Yn1N^3OK;gfh<~) zY0YcIYOH#kDO-@Y++#r(F6cR!M}B4X_6-0~BYwI*w}>mV7U^d$^?C(q;|r9ru}z_c zlw{mWnSf@bD`F2Q7HC12`BA9QMwc1E-f5k~@m^Y%y2u|zUDDQ?!spml|Ncv2$2vy zjoCYr%~qKz`=?Ik9vsVO8@=QarbpBM>f^CjX8rQAh_y)gq_><)O^v_8?RAb>;W+R8i-DRHW21W$?T#91K7l_%x;wL-K&-2wb|cO-SZ@6EiE3Mo!V zM}|j0^>dgGc%1DRn42A`8R95UMmEJB5jajZwEt-5fR$2CF9VqETwF*0hA};mqjz7V@X}y8% z3sp;AVO8JEjM(zBj&C@I{HNHSXT-mY?Q#7b+w;g`-2UyGYc@7$fiG7B!g%w9wOkM%A(#RkvgF& z;7k$LfRoReP2l|)i(fwxscrC1#ljEu3sf1)2AUTUCkey_+P*c%nkOd<4z@`85OEzH zyPO|hTBV?Kr>U8*kCM2XX{WcghNY&K9ULVUkm;_ z4^#brrQ%>20i6GRA1GUoqkzD88)7A|b`5Vd>?@_LrqZ?C**A-xJwCDnzyq4i1iPuKG}a z;-B|{idPgZEo@EyGMoOr2Vsp z>%S18@bms9oB78%_sSYK&~;pq!t%$-_2Xkas=)=Z^fv zLFU5t*1oJL++E~WL_%PRgle}eGl3%C{?-2Q599cUtL?egnWVjS=bl#?ecP+|`-p9V zMT zj5j+O$h)wQcKSVfhrsxk(n6B!pry>0=w@Zju#ar_H^=s3C-~TzE9n*zy1t+HT8pUK zzQpB??Zgyu(e6L?*NO0^S=h+H0@QBBTZE!eliQ6i6GY~PJvD6k$_TQK0+ z`l0w_uykhl z4>!k@x^TFDUirWz0=D%lz~^W;^Ahb3de^jC`T6A;?u%U)Z$ryO%*W&Rfx)#!q@v=6 zH4l?UsAmsF=O!S=ycm(;^`)y&aiyJ^s05s3Q^Wf!A!?4;-)}c0yKK+u&vc>VGTOSr zay+7AWJq(zD1n3xoqlg9w$f-;F1n=CRWM&X&DN`yrwZ!uht7v`ksUD9 zEo`#`IBM1GTo&Qecl~Wzks*b=i&pe>T=`&*59@5dmr-EoG3qOMJ|b%cQ_TKEM8&9t zq1S%p&4e8IV|YqvpdN|IaG_fWF(#KRB&f@-OAr5DVy(`x8hQkJI=*#3oCn9f z5stvnj$!{)YE0pbdK?XEQ%G%4b?%P4E1JIPPg2UJ!fBdDvaX-p>{4*(Z@YD_d^cs^ zm^x({!0guXlGHzDp~obLb-wa=DKmu2ObM^2>G5Nq$oLGTfhqZ!Tg)ztNh$}M8093zQ1I3%CWJJ@ zV#Wd5cW0KlR`h{Y zY>I07UPlK0jRk`6ZV;)J_vNrPb29%mfr%v{t}$~J$AG`aZ)PpT5mGapY6;S^{Zhhs zwC#~+*yjw;1S69CX*`E1S>>TIs0>*$%9 zTU1U(8{`uztlVNu=KEuGKR_FFuexSgp2uDW04ujeHY2cko zE3UjUzZ+z9F;sK|n{`KZVXAY|8l=iEytB=5yVGq*8XU|{bPe0=-cF*RrE}xCIYS!5 z2k|&7ZnSy58i7bUW8eFl`iiVz<=!bDvE{R>3o}YwUT;aBV?&sbGkQOka6#Qn{_-ZDhs z6aTL_(kksggfz}5hwmAf$f4?h_&GbAjn+t1;m@%3vLUQ~D-I4RLV!f&f;Y(DgzPD)nUJ}obygFEi^!gXjkxD-AA;>7O`JRMyp@pK4LHP zg(e^>#S7{)#&ZFP0Ui@L(zS8-XH|+^7zdjX#~IfTMG^)2hvVNTGfy+n>P@w@7YIbO z_F-u8Holg-(W_H$Q}9HI+c5DJUaJnv!GQRGRf>*z_#D*t6 zp!>mH$tZS(uzdE%>g@)IoVsO%z~(B;gd^AO5X4pb*wmy05yZ9(^b*dr@c74gVjc}D zUC||3%zdU}g2f`&Y#k~QPpiut_eg8=@pq-Iv9Er~ek05*xlzS5$;(#?CVd^ssq`x! zkGP!6g~JVuALG+%w-EDa^N665o}=g$fuZC=@1v@PUxY61Mm(zNlXSEkCiA9qnn++Oles>{-Jtjlo5LDiVDPn1WfFrec= zc;?lX^CbVsK#BpPow-4nb}y-~n2wRp>K>X4}%?Z%Rsk+?n6Jx zX7=$4-3ysP z1?BV$Uk6Clq0$2p;lQ7WgRz_=&xOie_kDW|3?_tVzAKviX(bmbaf^4+3YDH*&8g(W}uPy;#4R=JZ&zP}0W3BLC8UMFCS zDt?qP`n6*ke>%=0r?_z^QlpG81c2-Iel(9jH`Y` zv)?qf0a1Td6&u4x&5DuyX5r={wlhyu*98954?GO)RU0eGy6+&Khw(k%PFyrB6&byr zdG%jU@;tvxk^%F>!Pjy`@nz_5qRaiEw_Hfn1M1&E*=S%taj?5mwP8Wyz(q^<`=TB_ z>=w4wId8$ANyARnV^wcEZa&J`wArvw81NT97%1P6=s1}_9=Bhu9t1U2~nFBFk~izAKyMI34Tw~CYns;p+^ zV&ws_m^*Rt|NQ{8b#V7GHgy4c@B&#qZGcp+Hm00B0AVYBGk$&xD|S9kUT%OTHwPy_ zrv)dU89Rr$84m{sCoi8FpE)->2d|kafRme#kAshw-<;RV)SREw+?

oBuz$DLMX~ zXPE1sRq!9&p+a&O`VT-`a(PP!QwOM@SRAU9c7wk5`vpc3#ummL#s$U^#ufT&1>*)& z1w#&_4V`y^v4GB5!8}3d$p4e|$YHo(05ClN*c?=HE&=sa(n|<%vU75>^FV+2*}1s^ zOzfO=?Cf;V)f62q{twoGR!=S5%`N|@cI=;(qklPn|8YJ2RVh_Avr~5iu_=*r@B;pp z29t9@_xZn4Yyx=xcZSVxdWwH)mhA{b0l&6RY<1?=MKQW`=@u(U2CH8mQ4B$youEp~ zSc=a_O=*LDa~i{ejz<7l#rEHG`3_iM5EixT(I!(G)r%ss@q9iQ*s{X`%vS>bsMHt7zFi%%3!+ea$7oo)r@1d>;3jBff8iCgZx9wF`NLn-8!o}x`cvFlLP9+HnN~}=**GI3J7-He* zdtRBV49K#^kN<2(f%ggKLm$Qn#14zS_-2*9j917Rj9pytCM7Oc#yGNBH#CEwUT2 zF_#@l<3kDl2HyrijSd1S}YI z&I(#Ga8WdL_pe$L`38o}E>z*S28-o#fJX3mnj;0!NS2pkFIKX|Q_XwYrKfQBMqLAN=il&Zlwumf|A zbw!+?m5U|8(4;1u==GM~T9!s85bMpfcPXWq^#;G)>qVxq#vUUEY<0`jBzjd$0r_`# z8{w<+Qyl5qtmZCgN4yP3 zyc!nF*QF6cFZ*XV_VQyq{XNV#_UcwHg)|W^YOmSeA3KspO!RvGh!KenjitV0V@u?~s!(eBcNFo%&Sr0-$5 z;_TmP|2Nj&0m_nSX%{TBi(RfR+qP|=a#fdY+qP}n>auOywyo)V-@NtydH;R?oqyJ> z%yrH_`(*5h*b$j4cYYZW90O5MM4Y3-XG87kHiEyM810uO!d!kex{7%v=N;RXTFO%? zko64?E>coWwU=_}P+ANMI{hi4U8{m_xO_ufd!%`*a^DovX#_zhGpHwe={DJC<898T zB87d{pQ9$*GUv!NVoe}vHo#l2lXI)sdYD0{d1}R{s3=_F&G1YZe6`)_}z}?xXSSS zkw_^DYGzdVwgk}g_im{9{UzEftCvX&vv$T5P zF+^XnpTa97YQ_#WZ{t4IYh6R@QR%5sTMX7YJ{srol&UjnE8PGJmF{icm}hBmw%F^; zIKAIMIzD}y>pZxS)*S}y7-%xO`Iuhy&l4I$bI{R&_E!TBFw2^-+2U{aOdcE}W`l!z zIJ$Bmf+j2}Y|CcetL^3{Kx}4Es_OCT#)Wu;rBik#8qVo!)k?!;=TzU;d@~QuJ9p)9 z(}d4cMHOLn+s1{dc#1ubusg0>XVdIPHuXi5kl`cpGMqvsfrh{}wEJ6ixqa5+W^b9P z)|HM?3TRwa)-6 zY^!62Dvpz+;#DvOaLdt42?^}u$A`aWpsx{Mx$C& z_BdD3<6Ws3lO;_|G@78M*$&$K?8;<@#kH7_Mo!k&EEgANQF$|32Im7x&{c!4porDX zD>f(boQK;nN|MCM-^MJ;TVWZF>73??cIIgf^wovZNc|M1W~cnm`mI%mNpNAY;?}gw zk;a_wx_1}MqAmNkr(%378hLM8 z`DZjBb#R*m5N)uqcB3MM4l;}N2=SM$88YbKqpg|fg)?Zd~y(mT@b zz*FwPvY#NC=D@Ol8(AQkZ|g;jkey$oEP*(PHOo0V)mvVzIO{g5n*Mj-qGY;E^C!4N zOFt+38D|}*O?^o2#S3Iv+}W7aMO33kE)LqN7v+eO8C|4AwMUx>=e>diliHjK7T&3S zr6L^P>_z1yTi~aPH<$PmSEX}~OO4ASz-yqY>F85g_5CEY^#I4Xetx+x8&>ARBDP+z zeT8ICTZW4?P}`^a-s6T!>n2ytUdH@~LGo;Z3$)@BS%|kVPgdWn%hlrA70yLJ?(7{# zDoui^ye?s4{)`jR;;gQkh{%K}cO)rSHiWCHvG8^0;BI-ovTyy$fE`lI8;{;hl=Umq z23F-mB!AYw1X0jtT3w3`cOp;)&Ev~1tgZ&Z=aP1aZ8)o=8pCG1b!6hW zdrM1aGZOv!c`skf)gBb?eSdMc_^XTO_oqor!s&81LJQt+Jb1h*=p?F~?U;Rq4(==O z_3l<4YVYrTo-aCYzRoorye%49EiIi+9CU*SoF5)8?cZwX{7%($U7jD2;E{GLUiW0? z+&+_f$I~zyAJHp08X>!mXmV9GjvAoHrv(!d=k$ChvacR>-|XIB1s@kZO=_up!<6(e zj|o|OPOApXF~djG;gr6|HcH`Il{%6OYy*d?>b4oHmv4d{TNyf`s_fHr++}^)XFtmN zGR|D9_bmj#95~G);AXOz8mabzA#Ts0KvnNvyqN$roF!)fcUxGl%!nl;mMf!E#dc%hc!xc_o(#QYCn=YQVI%F#)i zIXM3FL)OVk-^kv~+T<^=Vqj*aXZas*fLZ?In&4j&37Od&SQ`CD8n%BRJ^v-6|4hUF zZ)yI8aO^DqeMSs_lYaag>4eQ34F01&4D|m1!v32IG5izJ`Y(z8g+>fa|JF$VdF~jP z|E-b!lS-`rmgHaXVf_bW_+P8^zfT<->wlYs{ojiAuO^QD-^=!&y_|2S3u zu`zS}!zY;k>3@uj|D*SRe6ukA1(pBr9(sHxcDBDrGRNN@dTsoFUIhQ?t&IOfqyMMl z{+}^m1`ZZx20CdYYZFIP27ER)RtCm@TnGLaBz)lo?Fd*-JMxmEDfiML>7-nwTqG@% zd{y4U`HIT&r)ml=nGRKA;XPa--OpgzJQLs za^{*p-C^gI$hkeG$02hjjS@)z?kbNT(7zZZo*$ifFI08$dj5LX4i7-|LyYf*jOrV2 z109^VfNUlZjugD?39jo6qs#vKQVtyY0j2P4ow{aL_I`QS548z;c`zCm7G>}gZ?hXO zFDwD-w!6jde3R&F2K^YZ4gEJAJ0B-E`+(15*e~)AnCv-)(%4pqFVvwHp;cbaW{2rk z-T?HmW2Ciu&-0gH0acyZ2B=Te#PJP-01s08Z&@hqFmg||pp0E-squg=Itzg?KTN`meSAWPYwAeIEO1Cy01KkuWNtK5Yg|%{dw!g!{Arm zkNK%*AJ~QS(%Yl947X0q0gi_vIc)T|jvJpjNYj@CRywS=#yURegH`O|q95em?cN>pfLyh1EbdB7InwyEEWf141C{n@ z8#(4>9rLtLz1v4z(6)(qeeHq65&1-E?=}HB-`haWlf1v!ZFg>tf$l1VugIFKfeKq29OeR&G_pu>HY9pgLa3b9SRzC;FAF{-)Bn{m z{}}xLKn@y$ay0!Rrqtut??YeukdXCdq2q%*s53wDOZ^m!F+2!2t_Y)zI z>6jz7+<2<$9E7h*QkgIk8NDN)dbZ7%=U_yUCPgx+pK(&sNbg}GVfzC=2V4+yUU1gm z+B3f0F%w50wmJV6e-)sP{!Tl5QAZRnk;$pz>YWSY2%J8=d1yGabM-Y~&DnjU17FC| zm)Ad_TToG8=ib1+KK(u0~U_` zSYi6XBW8VuDf{???tvI3*&J|ruaRz*JP<^hamZX(UV(m>V^jaAj;y-k37JunWhnpy zUV*w*%Zx3WK@F|nGnX0qx7v>duBrcRy#8Ob)=urhdRRnvh})fyzMfwWw0Zg zDyal<;-1;h!~!>4^16-pwy5e{~(A=nHz6hBfv&lzN^UXj~TQd+mA7+pQ*T)Pmb|wXs2Pja0DtQ z%=5w=Jq~%%fqE;Hl_CKoe#x5dR{hPq*emj`ZgfX*TKj`-uOdQ9OHCOHg8Qhf|sOK#?NE@4+@cN0?=RgTz{~*{-xC}wyQK|;@ z&=Dc7S;h%ui0Fm=!sh1jvHh$wWcRepchhY|j?s&N!)b7OR>@GGQ+c>zyB<_3lgFN2 zn%}t>+%Xeh3oXzdrMGMJP`;*u34Imwa5s_gxBO*#BDI!R^^B?p(Pv2$W?@G{t7Z!v z(+C11*xdb^L@x-rs*+x%JB}^;Gqmcb@flR(E^}JBlHr+nX@QcHUNbx>vmplft6-&7 z^AEkvi*sxq)|n6<*i-(P8!2KaO&ZDA<~3r0I}+$Pt3Lz;%`7`_oLV|5<;VN(z-r)m zMEvQK1))m2V^N9DSivECl|e&{@Dj#NG-1EJQxLcw-+%9ROPc zf(Yqkc?K_8Hy;AEPpG~X+7-Up%@JpFKV)~pZji!#s6#yI=c%|83}n9j=N>vY)rh|I=D z|7%@yej;Z3iShfz8S}m%g7j!^P~oU=5Iqg=H}}X09kRiM2uOT6+ccj37W+5-eZYOu z#PhT>pw^_e7JPmu|) z8B9qN7A7k6K3>L=ZjTeh_hV_(3?cZm1;_AK`w6V6AiX`K@dObASJa>w*?GOF}) zvV23HhlZIPueJmZk*@S#K!SpO zX5Sy)Uz-&!!_S#(I3~_~_3x*f3)cEJp4(@gw;r;WQF0d~(bc@a`0USYGdVs+c1sYH zLdtM%50yZb4p0o6qap`?8Vt16-PVB`EMP2LXmt|tx|xe{cNpjn4@3uWo!}g}dcL3C zz6_MzzOd)$vVtmPXs`~6$yr6lH2xS2k%Jp#YpjexX#O$K4?2*ZtbDszq7tOJPnEZ? zO~Z5hQkL!NBBkm%SXS+d&eun6aT@8O2wz-KM6=~xPP4U!95pN}vac}L-`FU;ht$`< zaxWZ7Gu?w9j;L6Jv~uIB8#k;FQ*zm;>n)-w_{Y=9pFQ2yaevx&xg6}<6l74f>PiNA z0R7Ea29d^ng?uC#@8#oIK~_kUu%z83+5IRjGq|gHKp+z#Wc7o4is)pn#d*$w>Td;O zIoUl&Ms45iEZ9jfZt^VmIkySNA_!1^m%BbrQ}$t(=>D`aO5gbhqs8S&nJH!`=6)7RL> z{Y~c4X!1g>;~a;4fpjNR4$C)RZb*7nLPIBSPv?=W5?ttX7gg66Xz5@H2xJnt(Wxgz zqK_1A@9P4#f5}l7%QMJ2W$}1ujsLGU1(I^H%Th?!hyo{5*f#XBs#u+vC&4zXDctrM zf`!nv8SNQf{~>+|$e9HEkmX(JEqI>gK1?H!%V4&c-2D?U&Y&&+Pg{FTA+A}T6O2sx z-$DhZT#?Tl5A>YSm!GG0UC^gsZP+-%2>EpRWVs|TR<1)vGd>8Bc0a6wSEKtq+Hbe- zzZ^hqQ0MYZj(qsG>7O6ZePlSup>KtDUZHFoVO?9pm+qNOVW+vixd?5(K(kH1R3H3a z5km7{xABphV)~AS_t*zxyx@ zqXJ<4KwFL{At<4d6Rmg3o2%ELRLhi7OqZ6S8BM0s!Y*v9`Hb$E{=JR8ciKpp7Als} zn4a(6Qt#4PQ9VPL3&tHzhVo||t7?)c%I*5j$h~L`|M>n8B1w@B^t|H@JNsha{%vOX zS+VV`Ne}xLF3-2n?ZK^c=>lOrrAEO#36QKsr~nIYp0u!9d-m2f0dBDuCr0faz+X?n2;~W&V|8FIWw!JTDGW09zyS{ zr5c4Q0OljFvJ@IDU~EUx-!d?=j{{v#iB>VpDOTz%3?JCv1p4Uy#lBM#7%ag7g(9)6 zh%pKvK4J7zz6L_zF0&k>fJuyqSoSt_S=ikmnlZUZAr$@9{a}BRnLJ<*+VpK4&qk-z zXVDtX!eJ3lry#Dk97(@!ubuSqL-~wkE%;(23abw-V2i{~lI}E19*s*RC3ZeeX@`?8 zuGBtQxu8Q~qM@A7H!6}I;)jj4f(oFN5+y~6qPBAWM*XLQf(p$2g=6p-)Kax+XAeXY z-5s%U))qB-tdK*twN13K?^koQF*(f3wY%| zO)bI&Fl(-tcb)72Nju<@a7RdiJ`rXKw5S$H&pE zF_Dm00)r?*cXP?}fa_+HO;Gcc?`F@&!LNzSze(M;4%$+U6f_se#~rs$53r}DLbQ%w zDUfR?LVJP|P0&hY`qPUC!wfCn@8C|Tu^%#-hFqa-U9M%&_{W!&_t~_5h7P|0ob&4C*l-(x`KnR6- z)tIpfXv0zZD;K>L14oAA!lMw`gP`(`O!Q(#_msMii|o0X`4Fg1mGgsJ0vzjo#Iof$ zi^!|K+4N{Z=YmAeBE<1=mNHA!lLG!?P_JezC1lcNkAEX`s@1Fy$A4U84uFEx`NE`0 z=l1xFOt@F!{gJutr-Hzy5_{CEP(63Rq&<=VCCs6tBW?@5lMMF!Tj+BC*BGX9Ar1;; z5kOo~fEt7c%T~9%^bZ4J(dUCKl0?ZNYJew-WG4#99AedUav2GTy*1ltilElwsH=(* zz=owPNH~)eLYT8bS+k^`I^6b6qgy^w2n2@bZ&XBuNrL)7asC2>l3(HJugC1rsl z))3V`ghgdB$g=GQWQ-OE4gG{h=uN@gfby>JRq`DXue~9{mQ$M*F8Q)5^2G5iH%9}7 z&xuxb@oxsXWA7d9QxYKVGL<;}LM1c#vB1F5?kTx?8pbNcvV@y4Oao_D^_5r0l+Mo1 zP47=CsaM32#(IvgaGOf%lgrD;SjsMpjHXlsJ2THO&^4H@Vo};(Dav0pH?$6 zSqLS{Q?{K&B^^v9QKPnT)#ou{*igqySq?s4ZDTziyjKSc-fswT~;+RkFy2ODyr$5q}$g|D&MW4UD`&1RVQW2?a~Z@2kZ!5~|zT8?NgK=BpCEvT5~ zO)PI=K)oJ#9G=HGqScte_Q{42zY^w+U}KijMlY{*)P4DVrMBEukCHyu2loNmV{;Cx zPvPANMT~VSeQNoRi0U&SsVYLx)(?j zD@hn~w$4nX!hxeM^&RO7cq{BEPxg=jlRV!tPjw%F3DtF+|@mREIuRKUY4(Z5I3Hfw<) zJn34rXbHDP5FXd+uVgkqwPjR!F%TDDdWNO^uH1TfDR0MJSB#Dlftwv(Z7!>JZd~k5 zI8AiVN)*dnd3_n0+X)`NA|Yb}zJE&*8(zMN=N~VieZY9s;sI(vJJdUbSQQ#qL@jRx=5xs`5%X)AY0+-js*XzRCqqNh_ z%c?og9^c$1+}V}IwI3=&tT%6}>q%%^gYR`LPrNdy(fFx=7P3LWjyKmbThRoWpnI`^Xp)^AC9 zOj3ZK=LDe5r)RajuF8he|I||=zC*LXguC_dXoaf0-qmJ(;M#qRd$#f^gRPl~LQ$LH z5e*;gXpF;NqJ@tdAuL)@e3hN@EZyaqTW5A>81wS79Kf@`XuD^b>G4B0&qRUdEU@7- z?dIzvJ{(z5F;QFC9i{?i)K?@wRFZoYS5a*@x^>iEb3ruS5}Z{rXMc=9Dhi3HjD>TD zXgp4&9Jwpmu-00omfEcNFbE*O0}C@@EHD#d!>nLa-@|}Uha!Ji#T0*qS>>FYnO+u+ zs)l7erl~rumR!`5?er3e^yiMXnIKl1;Ux`M-vJ+Jty*`w`35vzT8}1GSu&F~ixxFA ziLo zlcvh#XZxlN8qXL1^piNP6Tap#Chhgg{N*H*xuDw=A_bvC#Z`Ib){^!?={4M%`M8j= zl7+w}XNiNd_WLGJ7qy?+l@8lg<8t*4g3OvlTeXd8sJ!~>AaG7s)20TNo)?y*E8a;h z1dUkJZpYrnp#*V4-bf`EWEFV~>N4$gk<}wp*HSa4W{6&J3bMnPP*N{`;E_Wb{n4Mz zHC6lm;_L-FwbC6lKAK-&XDZiiSBzDNvS!=_awit+8w=cP`p?eILt2>2G^S?J2e5@2 zt%5{{n|mEgG-MDR`bjFJ36kLUSz5Ua=JnVu!!57oIN_0OR5;47?To*@X$~hZbyk{p zCUyLAJpR72=-`W*)~~qfQr{$jbe|I$j)9%CHEP*y&4W^UO9S2flOB;UW%I0WlT*Cl@Hd$`mA?CPyz+@NWB-gQumCm=c9GOM>b0+KL4RH#}U5OD1ALLnz7){nK}~d z6;$kyWs{lm5okq@A(P>*OwfsdVgWqUZw^SgJiK%S#EDTgb70%(fU+kPCh{q@E-UK~ zd^@FO)>|(QRV`ojrH&xP?}rzVLmEdjDJdm|FW}smqH&ZnN-N%_E^%w~>&@50rLIn^ zheN=UdXkpxXn8#Bw~Bf@##EgQe*IPz&!f%rr`%#$=0v(Qim;S4^<&2k!t4YL2=WY13Qk&PW60huiBEM zw}ckS@3`<#DN*P_br1%RdBmJ=wmaoqj$qNgCb4|7{K8R*(5%2@V$=9ixs}VoL1YJH zm!!rwlQH3G3dk|iv5`Z`d};OK1?B0547b;+wzR|#KR(Yv6sU*i{SkyU*9dLt2+%ZT z{7L4|wC9T&w7=1Xun)}Oj)W88e{!vjg1S31?Ur3?E~;JD*PUsmEvOwVz+I~*_h4!A zOz~&-mfNx2Ri2ptK>z+6DpjgxGqaCUjFOJEnq2yG4g*8UqoOI`B&V$Fgc3p@gzdbnv{%GgYB#zlkc@#d^d2DZuluYmtE6!>{qx!j#q()Tdg{N zc;;Td&%0nJ7n!z-N}NY|5$QeVBHJ}6h4l*ZzD+VQ@BQWss(NX|sZ{A9lm<;WKvvCT%2a-Xjmls6TPQ4jQ`f?;!(Hjg)_d>R*QXvc2~}W%uinI zA$+2qiA=rkh-^GCebJ)Y%bVatncAMJGr@F@dya1Q+c#K2ZUwJ2g@h-oQ^G8O8QWVv ztOU|&=*&P_IoVV%G4g4Qn~8)0mA;d-!0zT)-*afd;-i&6wlh!sm5wSvWN~}oy9zgJ zce~B?rYp;8hVB-1RrVrN`KWb+WRHNnYvbZir*G)9>qQkjD;|<7ZmJw)N9oTyu*%nw z!u-a_Xdkr9upZ5tZ{Pa+G0ZP`_f&#(L_tL?efT2lj0;1M4t4ODo> zrt$cU2)1$7=VzN_g`+7$AtXu8iA^RUoY|)}HHC@&x|e0Ve`NheRybM129 zX~Qr4OOxA8kPEj61??3Q#q>nOu6#j(a8zSYvmO1GSZT$}h}HpagNZIz)!4SxL9WUN zo?aR@Fg9)Uq-2^c)Q#bUQxwV$ZO_b2TI0fAYf8L}o!x7olq;`WS-bsN;JM`8$2Xtr z)yS5lgFTfD;md=J!W0u>-q3Xxy$F6yjwnl3l=S2ZxtCAtF}pc^q%^s_H;bI8HgwzNnc!`_>1+&@=UOq6z6+_j*KR>!+Yn_`^m`JjfpS9*uZps{vkn?nP2 zwW|_;TLv%-lAga~P)&!?sM^eV&sBcmM>!!vW?P5mc);%sz@56CpEoX=yT}wZPA`^y z`wad_NWMHvsIN>2&MF)K{lr?#&JV^2EAb}kxQiv4V$XL>lR|%>QDAxH-9)FMUqjOz z9thGpeoV*AZNIU~sJP88pU@e0bz!U8>O?%rWa<@1Nc0jSXt6L#EcjL@mNCqAw>kMq zU}n-Xs%bX(y~Zu9MY0U^xLAr)6`UYhdoIy(CLwq47zgS7>nPm}_$-z6S(@wQC+hxH zb;8;SbgK@3k|c5RP1xKem7H2J<2J1zf7{(tQusp(jLoOxC?Dt_Pm;m?tZPw8n^r78 zG28J)@iS#D!e#NmP0pgGwvrXxrO-p%^`F^l{tS{%{vPVq2qne5?2TTNUr_;kLRqsjxZb>}N!Pl9Cgy1$=iZN$*S7@mvi9RuRfIZ5^V1$-llr~2eO}6N2eM0r6bElCY-{@2vM{LQ|z7*|x78}kWLjWwK z9R}-VT$9rktxj?7D65(Oc{pWzxMKt}Qj@keNpEXG1TtdSV7}>yP0cB#-0-BBpCpSb z$=8+z)}d@fB-!D*6`30s-~03LqJx6!C#$%pDx``6ClAf>BeeC@`dki_&=*JcOPpM9 z9fWpk)%B&LV5{-YS1Zl@?C3FWDYugCWd%J<8L3Fugs9!-TE^+XgL%?fRju{=gS8Fl z%Y(hv*EfzuoT*6)g`zJc&|A_>2(vulJXNYx?i7LoE*aaMLO;`cemd$2Lm#YYeMfX&`&yixVRxxt;3PxYuX1T6`>QYr`jU^+Z!E@wg&MLzk1GJvm{&EB zTlXq#gquFvEo`=(m#=A#l3KQ-T`1)Mieq1W&M%Xz!7`>vmmFcEP^K@{@zEE5)sx zSr4@y_Jn(;G!G0+lo^m^^q#Jw?Q~maT~7sfuJUF^_ZI&^iw5NpAZ2r?W+W$l(+G7tVGYRw@cA1Zen@*fH0n z2O~IxLL;L#-e(bp0q9%gJaR+dMfV_US}IfJi`vIvLF0|bn*-!42Wixdm-VV=rd#A& zYP}m#U*7|e+Co)r=yu2;8S?;kS9(lCN1sY8wYf+iE&{DyO@X?Y(|RdVS{x zrz=PL9K;E{d8CrcQ?hh&QPyGwy$hd-gM$g5i;EALg9DilzWV(-WaH|DcW6m8PgSPe zo-Cin8aQZmA;!+A_4csCBgc4r)Q`U$>>Lc+*HYCo);xT%&UDSsW=01G`?nI0j0M+* z0(bTGET#<8*3~+k9R~^6{#ZcTh%qsV*}RN07x?}L*~7FyT$&#W_`C`E@`Sutb^-BUjMPeDxlIZZy1D``xPoxc)df8ZqnSwYu(2;le}r5+uuEPBwqJ)*Fb$7Zlk*q-LaL%wli-U=8^c!pY=x z>*NrHgj6Vjx8V%HKCdcnb@Z7J+Bq;^g4$r`4iD9JYR-fdE*15(&BIO&9jrL3r6(8 z2ao>6$egf~Xaq)t(F-L??~fGuxBvfrGw>T35xW=a`x_)!FYZB(;p`dW)cHh=ST`fM zb@sE26{jKZdBCyTS0Y!rc9L1GzfbG*6tE5!d8}lmR2leNplPyJ7b3wt!+?D}(p+=q zT+I)#jy%r7!{1a@tJ@$8SYI0&pbj!$*X``ZI#Z=GGoCsHWCW|XL;W3^$w}eFI|tdp zX|!lfI)H_GVyr0DJj3@^vfL`%$stX!X_B7o_*c1A^lIy`!Zox;>6DzZW)ieHf>3i! zJz1RkvgRg=f`G)ogL}WRIQz0~{`QDG%-7uabv4jsF6Kb&@%tiMB()M~me7o~nnN?p zFslu21YiA+J^#<2e}zR&QTWH9>+lV;*G-VAoCZ|%zaj>ps6isYtC*$3!{}Lx2bgz~ zV$6EgKupURP}^X7Ls1%0Fd!#v88Cr<{q(H^sYIMGla~Y8BGGk%{uACSX+aI%c-IpO zMg>mA2*e8A6Bm_KfEHZ@GI4m~LPH?(0-FLJ`tw&u;>q*x1$V4J*(M+nAT&N+HLz&m zC$PW}sA>_QZG!xedx&}>3u{}ss!*c4mJB;`1u10Mky3F#>YwitVjg@AI{En!CVCVI z<^(M;CAcLe6@MvEKr|2%*T}XP>DFLHp#&)F7qa|{M9;5djzn*b_13PGjHWMzaI*rC$nRx~q+F67I<#N`FkiO)OT( z0(PP*5fL}Ephg?dUr;iKsT&EVV&2I&q3feLQMeT-4r~5kQ7kVS+75Nf)ScwL^|%DG zKnGEOrXqAcE`aCmk75>Z?=yN%6Y2fkJ#?)EOh!nJ7-?FIZ;m@0AE!#rcT!?cFDJ=A zMkUO>lgK6>hj0|2z>%*E?_QK!B;q^=RPL5bBeWns9rPCKZvk8+fn0$$Dm@3l4-*kb z21m7kg^bOI85Id!woNEXI*HS;zCU|Ttw`&V#ZXIXWDP9-hF`?T zDUB^2C11P@V8!q?jdiCJdm|)Lt(X>VTjZn(BbN(i49Uj`ItKuj7#}oH>%;u#(85Fn zAA=G@BxAjc#0BHqxd%d~rmUF4lh*{?v){7fJUf5n;VGAmU?34?nU|>%pF>Kp0pv=m zzzXG(2zkQf!J0}M6eSfY%E1^D??=ZTs;_o&8K*#c-*Qzjo4qcG5x&c;;qKYe&TT3G zCOkY8uBsQ+gx0VZ*6eD-d;v-fObnXHYSXA*I@V(t6NX8Q3%pKBeBjZesv+E0lhBqZ z8*yd`N3Zt|R{%(rt%>dVQ5lglb!>}`Dh2!3OT#*A2a3blTM~@vD@sn^Ea*j@wFbru zq}VuVSYIyYjW7x>pq*F9taEO#B7>|SfJ1YNCFXxniY$ewp!zeE=7H1-vZ2fF{#q+; zhE$kckEhYFFNn_Qws(IUnTNUBf)Fc*hrG&rKHAldY)qK47}26BkmaD+`po;8Iz_eZ z0~3%uz91Gow1XnEmhk`rEfO)Y0yB1%CvME*#5tGcF-obxzp8#bT&Po4>HsPs5GG3? zNL(_d6sR^%(P+i{j%}H@9E&&#|43S7-ioNwOw zfOSv=6POj5?{W?LrGzlE*n$!nV$4)aJaZ0NUIY|%(~ALLUkWsF;QyNtLk5fpSXTs% z1<#jy5n(Y8rIr?QksUb%NNZle2WZSr0wemZb{Vo#4)V{=Z`>uk*&Sd@b<-lCU0KkA zvYjlkssJwJnQI)fd}qcnf8tKZ+K7(fLnW5D17_^VeRpV0he0tEr_3V1Y@i*vJmXd% zHoV<{51}`S(yZ!Zrm(%l2uCmmp-gDs0>DB*0mlx{&&qMZhgNap_;p8OA>>ptqg?`c zd?R=gs#f?jvXpc`R!MlB)jdREp=#VRl!{a=W1ckGxqym9^E{PufURw{ zbTDioc#sAhZLBHr8FE;vK^Ebtl9c4_k0jwZ?dtfQvOasGIbEIszp~ivutTfNViPmoH2PRky?qCuoMwP!&`l!^43ZsHzbqf<1e|(N@<6vl@P~LQs;>_G z)bidda$d6<&Fqdjw0t1|)-;qk?~f=gSTQQJaxPrUNQ99_8B8t!Wt_ykR*`6y5DZJJ zNChOs7Y202h|!4RHx$@#5&w@kv=mBPng!6EAC|!8DnJ_i6WnINg3`!yl4Fg!4UOV` zvt?}j0pmg8_F8}+9pMMq=F-3ym1rR93yN^e<-+8{etSTezI8y6riDa3_EpGG01y%6 zy&lK|VwzG@g*%uo9DjmiXzQNlGXwY~2vGa! z_pt;arh$D=lNj#EhqE}Zp0ZqP7nz11mPb?R@<<5h+DF>X)2Twuy zCDMtbWGb4)0Z5Rff_@dX;s1K->mADj_(OvniCFt}Amy9!5jCfb$|_PR?uIMLPxdGA zla^xbEHcI(#i68|k#xAO-i9oD2T5V7Ll`B9ABlEu^QCbky$x ztr%L40aZvL3=AuOjM~4m*ds6|C4yA7*lgtdYRS;BQF}6b#Hd6vdy08WqPZ21x1qC-dDK2p{hH7W`gPL0@Hf~(* zfMgGNgL(!5YViHGL5b16-mX@#M`jw=*MxX?;{n(e&)Z#}eCj-hV#mm67ZW5iCpqo! ztAh;Q?t*1@-2A*qxiZHkWY9pUzp?0euu-}l>4Wd>1t6y- zcZ-V&6*qqMbOPO`#Kv*(_sGeP4kW{*e=$+d0Oe?YGo;YPNW-4)>PbKKn7EuFlz*o=vnu{ z)alJ^s$mc?*FiDlF_&a394(Glk04MmXuqM5S1r7g;5QZaROuu=2sPb2ZaiVvc$Z;fU04kXJosnymzYACXoP)k-JSKUp6*2+(aa1u5lj6Ol0B_U-cHO4;~ z!g357EA9?rvgkve(yUK2!%XuJ1?r2ra8@K1SH5Z39-zjZ8kf9YBVlA^peEoXrJ$yW zUZ(1cxgj92w^7ZfzWeEzAAV$2s)q$@0GPF8ptlyN$)Bw)D-U87N?x?yxU+ms?^m#> z{uo~>sSz}Aruq!Z*^V@D`+UYL)%tYrU(6^N;zQ{CD0P(9Zmf{n7-;->jXXHVnbPrK z-W9R@Ie%YK7X5jNRWbeL)nZd(sv2~e_To>E6S#%S^|7hId% zCo31`m@s;llgs|N+`(a~h>liEu_t;F4n0eORzCXdu)%ZWn7rG5vOQ0TmeB4pMzZkE z(bCuaEH|aHfR9ozYHyq(rnQ>*wMc5W&#_KRsw#E#yV>W?)m@#1#$7|^=ue&_L}&qt zGP6T2651{>;7qUI(U|jykL#2-){A@(jfbDLLicJgI2s$C0PpTbuMK$$HJbPTqVAo; zD|xy{-(X@*Y|oBu8xuR3*qYeZ#I|iG6MM(DZQIt(_kGU~_nvc}^UwL`K3%nY_3G-v z>gtW!Ri8CI>4Wj3l;+~zQ{|?N(&&97g-nwKm+s1_2-yqTTiYJLn#=EHO1mJb!A&ur zDz0bR_9Qi!Ov^^hOw>qZD-rK+z>$psBv^buQ{i+u#(8&fOpAPW+EV6+txyfx$6z!d}TJPB_yywCjjtXU5 zF{*+i%l&?)Y~pNY@ZRac`hs43;C`=4!(+j6$}IXk_|nNNrEyX0pq|=RaFS9c?Rxk) z^(o<4xQx)sNQCZ=7vF|ylmBm+59iCn@F>4kR+GN}X{6IL&&qHWhD_k2)KY9*mFa5* zfpg_)AR{RO!=!km_h~JeE}y|-5MEt1_eZO#o1t#(Y>y$UVEY`Vw%RD!=kmU%-nw?W zl4v}kdpMxy64#&f-K#iib%u6ti=JJ@Mm36N11AaR>b9|4$=b5QZ4JxI;XIC%qE z$}%jdG+7&HJ7qLDS!W-F?xl*w;3Dyx;ItpcYWApDBoPLophgqY+;X!rbW&W|WWzmt zW{FR@QvDCh!}Dew4H%3M(-AMiVLu9wl!kAXD@bq8;RZ5Ax)ZShDG3P-$g~lg#WBiC zXCVLYtm~|2pY%vF&z63d>f^Z<^nKat_uvjyIcTqEM~U+Qb&2$3JOVDpb_=xP*}K~0 zd{tM~X|_m?v&Kw2isOk7&Pg3cUIHKe>jh1P&*q%h50J%EzbqdHi{`wxPAjLE69NzG zvHo8wjkT-mtUL~XI$zsL`|!$p0Z`fICS1u`DfN{HP<8ZI9+^plJTI1$#y=0D?*YuMOZVlXz(H37b2*+Aul$NUvyg6T%BR07^I$qVMw;4?UE2GX{ zO0xVthy?|7jFByTT`%J$&rsfkPpgP!^^zX?Aah^&@JBGHe;bQh0wyrzh!+ zX2Rh9b#KkTrdPw@tEyHTqggW0TlKuCdwGS!->R(&L&-?=llu`vBS=C*hQ5FKy~@NS zMGg`j+!qZN6&LFd>KAj0f=3-35ZQwxB{j^u6|Gd%3ls9>jdv>hT_}}Iw~@X#iiAFE z3gm}qOwUcALKykerR_@yP7jF)|L#K39)PUf6Y?toED(~CAsD0>-5+h}wdtFq`W5(( z`T%sJ44M26;~AI=84l*MBbKqg<4_H$NhbVAan4_AmoL?^-}U-lH5{O=gC_m_Z?pL_`26f^G(Afp+309f#wE;oZJs-pTWBQ0hGT2)6zK8T# zx7%uTY@;S)Bi+r!K+)CM8ZlXLKP1aFh2pYrLy={Z%KVNM+8|BiAyv>j^ia z>i5@}0NaJTFBb70*g{?o}{rsokl+3}76 z{zcTAw%+Gan{g`6zrB{a4o&@Nh0anhiK;cUPkl(~6Y*+jrX@ zTsl~`y}S9mg`cXu<6C{)->e;X9HY-?D<#X^d6r!0DzYT9<8F4kAzq>*`g5RD{*YYx z(tu$I9rlO0pwh9~~jEPVq&%ZehOb9K8%b zF^){0C?10*Q0Y6iH6EPbe3(2wjc++vf>?CaU413Tku9j2MrnS>pUti{<+`@udPu8r ze$ear#H=_v7{YfUPg~)u)1UC>xDGkorr5lOBjBgCMe^dk8Tb~x%081kclj{P#qtce z6u;fguHps)F&(bMZH#m=GZ5ZbDJ{J2uTT9XGJNOfu{puYEh47WdW&U9(RQ=pi7?vV z-lVN&WvW*uv$Ku(8CNp^n3j7onhAgIWha`Dpht9dJkAp&j4%7?2UwTLxzp5Waa`z& zD^l4ONoUXC#-e_`wN-m_Mm7rHm*D4zW={Q;ML(n#q4R_#8?78ekPLcptiD zm}<~IPxgRfM&Z_NZm^R%em30qnl+2WJ2&;;Q$14|1mbc!!5!B3?o{u&X}(M&74M#n zabk+ywXfK$wthIQSG!J?so79_@Ekv_e?7u#o$0^3xfnS2PkL#J;jqh1wAvWmp5)Nm ze5&0nQ9dAc7w+_QZRX4Qb#IDCNo2E__(%^GOF3^W^R3h_ z4$Z3nu72&ywK9(D_BW%$h+-r6vCDWLB>x<_+(g6h-tEVn7k@d}(8?)`t&Ec5eK&oc zNUS2g`9naHg^6aRc#`Du*Mq#pbY6IrP^jrWWyj-BxM>hwJEwB42}TX*f3@iJh7~B%xM( z_gC@^Mr_Y~-AWI4z`#2xTT{T!VXf)Z-Dz;pnZNKov%w8^$^NBFvQ|=J>Qm=nd?O6C zpH5F|jGIP(&wG_P+5LtO$1}hka=hNVfRNI%f^>Zm~d5tvRpE zUcPjGh`qbqzp))VI*#V#54>4bf7Dz1;-8f(inVkpzp;TaInt^N32b5CoYo~|E^NOqfPlY)j961iF9QHQd#y%Q_=2+ zU<@es9V6(nz4&*~5i!dj$f)8NkrO4jf?XH<-h+ywK26S3`sX?RE1(9}^S#Nxxm_mG z9KsS+oXw{-34K;KEflJwx<#8HO{&%mP)-N-Z1C$A%Vq=VL^ruJ>O8U9OrwTh!0q^_ z@kre>)6BH#2tM7$8FZIh1K*yTVEK97%ykwZ!>2k*alodcd*>*^r!a1I7q6LsyG z6}K+GdmARIA4JQ9rUz%N4(g*-Q+e9|`Y1Tvaz8AV(4-#>Q={%e|veh z!IY`=!O;Rn5IXk0v~f8fyRfbkw!hFC2qSIL}WG_sX_evI>`k;<_$wz6CQVkdpLjfbNW`usVWd4ay$ z#4bxr5Z~YgJWoU9+ePGGA1TRtvpnkE{gbIXAF&A>l{#ggW4&4>1@)`(uG@8Y`LI*o zjcB;wL!iF7gyjv^h1sLW;@U3Tj8W3@a)*AfI>Edgb#*o9%el(-^j1QsIZsz!xx;I# z^B=&J)?bN}aaIOpStw`Tdu>Vr`AQ55xJKu$6=d-^{ z5+o#FEBPia2dm@nNjmJdYR^~>yKJ|UkJ(X_o(AS@%Bpz(}VQqtdR^rz&_ zT3RU|jtej65>BSVyfcP#6JD+iH4!B z_?%qhoF|*>Cf(Jf;KK#>xvg3SGq$h|_IbqwpqU{TXrUf@rC4v`TwA>jxkuYL&kT)t zcPw*;2B9DNoKTNumtqxLcK5Z*$#tXa9K{pnaP5cc6SbWrX^h2-71QLSl||=C@cRJ| zu0S;}A-=}vwx@KA;MQ|#otnMSyLPi;m$BLv;yAn6FM8g{7^f+#;haPh--R6CF{^~F z=X-kIrhAJ0pM&>}j@{bXbRJ2IovIBKLg_Odww^;6k306+dP8!4-ZMEl*p1u5^~OT# zU#hnk=s=piu?n~-B)XGHOt&;Cba2zjR>y9pT~o ztnN5~M=j>McjU(=!J^y2M+Qn(PW_o*XU9*O z^3BKF)tfFx-Y%i9{xVzd;c-`O4L%D>UGXc%2VRP22gm5z^5;jlTU zxC-iSC{JVo9MTqAs*&VNr7ijSNzz%Q)Pd`>FAI8WrX-8-t3HZ98LMN!rE@3U zuNdcc-@UL}@LZkC$Hc|3Ic~fWsou0XQ>Af!ZSOtX&kAR$&TsITJO^oV(dJ=#rafK>rOM`qRMpo)`|&c-?&Q7pb{rSK`}I|7L*am-K=Fb- z?{*ed7e0lt1{g*F9YqeLWn&60$>a7T_Sj2P`l!LZe#oxJe%kxqGd~5O)y%@00kM0B zr{3`yKqBB^V`gDx`sPOwfo9&Wa#1=D$Zc=Nrg7<9PqTLzA&e?F(czrCzplht_WpXIQK?0LatRKM;3V#=Nro^atgJg`^mb{PL&`>b;t5V!XJ zwNc;_`8*Mx&#lvtcy0C*BFnzs{bQYllW0Bbxz4sz=xzZXu)OeS{1#t!ce(s)QPwgk zh4p-ixJg$(vYo9hi%$*4^_|v2xLPB(l$~E`$@KO`Qxrz)1gzG_#`wu)4ra;AHvE}w z_eGU-qOY0!0?f++HEGhu2+sH*>9u_)qAzI?^@Q49#%6w#_LIfWJUmZ%+95w@m-7Dd zJ(>{&WQ!-p>wk%AWcn`@`+vaLzQFEZ7&HzdR)&AjZvSk|49x$E|Hw?N{~WV@?U}wH zXl(yV^-s$GQ)FWM2W|KdV2$Hn@gJF${R^P>1#4sdqNcGi{)1R#Vf_b_$Hv6`@BF_! zhy8Ix8DH5eMf#d46fXN@D(3-T(diPl;dJ zf68KF{mP5^-}^5uGsD;M7y9iVJrna++n71OYVdEF{R`OlkDm1laQE*$D-rY84Q1wJ z{Bk+=kN)f0FB`Cb)tlw3{w%CtHsbuJ4gY`N*#3p0|L?8%$9Dhy_S~Oge1bW<6$kT<{)dF@FE~Jr)8ARRv@3Ai455-5JBYSBIo_0evs)%PFIJ zS$6%|xybed<8H0{=9SSWxQGF%hKlNBY1*d4`J>$TabN~TfzN$h|HJz2=DE1z>7mGh zc+q4wqv`nI3rHTs5axM?LX+o<$8NF{QL6`aDE#Z#N?Dq&IPD{eF51m~HjMaP2fopV zX9D+RMxE<&5@=nW!bh0d!b7XuybArZYJaS&6;jXOWT^&Qr?{oN&)lR)yO-?wPMO8z zYHL9Ku$eICSY?`$$8N5_k_4O-*-4ApN4seFyQ)U?>GF@=L;8x$O%t(VXOXYZmSZ3> zCkC-UpBE9G)7ab+F;uBGmW0Psx(dC7cd^-V$|fSdHS{K7kb@$Jd0&t-v134=60)Wb zx1*WGlQ*dnoJEtid&T;ZN$nI9Q+{D!tr@t2r%#5OaHX|gT+t7}BLxN9OUPq4&7sK}*9p+VA?wEw-1vh1H;T z;nneVjcVp>nR!!pzFn!%c%`4_%{R@%DjL=CAT~a65^Pi@f=AU5PHnb(D9C;N(}ebR1I(#%@Dw*u_2gheYGDT@&Y_cx{4GaV;XJ;hdg z6L6RkAT#!SLSIe~o9uN>6+Z0to6Ke;7%QKmPNj5P#j?fFE9V0z^21`HLAvilDtYK0 zZCIWiTN2ITC}{UIB;h>5XQWqis^N}^6P(G;g_APWGe*AFg!kA*wZ~5qpGreTJqs~Y z$oPc4WG0*jSRg!ADQL9IoPMbLd^hY%=b90_X8+Vgsjb*_*UGZcu)tLmUaaTYl<*qn zd(LV&;I6s6X|2bMnk+9?l(4WGd&Iw7yvU#1wf7b*EqdAYs?5S=JYY+}(TF&F?eHO> zH=LRr)oclZ(+pp<$&o!ch`-DQCbXWgD-_4SX_vk!A1*X10Uy-LX2o!Mge;1-cVJE` zv_;qJt4n2-l@e2SeK7aA7z0nFcadKM{lN0^S3-2Vcq!F9#SoXpG{xE8xku&}J8UK`_CXKa@;avn)_NJ{=C>G^YMw)X%0mXNGVXYXk7j)CWu}tb;n4 zc`H(n%RYy_kE|rjvSZE*_-J~#zi}A1ZN7hU(0LN``OFnnGj?q zI)^_g|3@{14KSxPS|q#jZ4W1K%@k*GkN(3YE0>v(hg?aPV)4T!o7qh8)rN5vwfMs& zikbDC$Y}~#KQMWE6pBe_?>zY}D+%4y9 zN%f)6rscDnlJ|t=Gxk?C{@VrrC4Q=-7QazG{(p1)j|At@j8=U8;NZS2MEFP0Mj+=P z)G&m|C@6D=hBtZKXdt8p#&_`4;Jk2C+IyE?oVYrZ zi6VX?Pk)FT%xzs+f3FgD@4g|esXEqWX}Uf9y!X>;r}3liUL?EnRpbNW$A^?=p>Ht8 zEXozE9;p@6Tjb|X^X(_n5!*7QSAlmTP^wePdnTg#VcT z6zU$PNWTb-Ndd24iAjta&cz_ajvlrI%f*0j(0v+9;HU)6f z$D=@>6f(afWEZ>igAREy462XdI>1$ohgyfpoNV0)UV0RoyWIT}McGEVOrrN}`Q- z%!NZq^FYi};8E1krT+-bxcti=9)^N+ci<`BvhL9}l@D(v$qqYBQBaf@*g@oG0$iu2 zI)~Wk0WjmjQQgK?Q!dsLoQA zSJ}na+@Tz5Dk@6Cx0AYipxT&0UbJeN;aJYF-!={L|v+LB6NZsyu8Wp;lbA0D7JHZdf4LiNh>#QmhR2L-dRaUkr6YjXnL##ls~zs{`}HTOapk{p@t>c0tc?k( zsR?^j(3=YxL?_8U39(bVQwcvpSYj60g@A8Pn3ozRVWhGOUgIBhz zLp;~&KKvON@w~_x+o!lbE8_d`lJPoU!1pSlPaO4jjlZ!~nt{XNWu5J4l(ay|nml%n zW}QE!8oWy6=4sf!q2r|4sCu<+{Td_`qt;r5nWp-s<7TFjvYxMgZ|~N$^%rwzYY6`j zESz<+LV^p6OUOEDML~~kl0bm+;J%=s+$b(_x#Kd6O#o{HPvSX?b}i(Kk?d_y!;rDx zu37dIPw^R!_$Z=N1LoX8)6tfl#JC?KWNpKSoJ^GHR-YL%nav@zN@217K~FJp>}CVN^@ubE?z$XonY4*Dl4vbS5hl`96(=~r-5G2!{Z+) zTi#kJ=GLshr-5fDnVfQ@KQDD199kuCU)Y_`5v96=H`{yycbD zdo8=AztEERNfO0cWDsj=3Wfn}QRP=>jo+8qliWPhrf#SlM$@_SC+b$CFnzb@3)Hxb zyXoMVlqg~K#oC>k!b`eKSg{fFwbp5dt^z5ub52Ymy)gI`TJ1+7l_3?AgtQ$Ad^gaKJM+4N`7yA>UX0&RwyA5IEZ`EH#(ldhGN$X zpxP$5uqvsFdnalVZ$yxxE~6W4SB61W+(Qr#x)OjCLBp}y^Ayhi&10d0DBKfSn4i0* zd67VVKSsrVP~I;^is#=>e4H_xK_{1>vVnDZWJSZ5a*Aw@`y(*lhGn9cfI5srofApX z49#8VTtLOCFf%*WE@kwsHso(@F@^<@vSUFYfK^`NruMFW=m^@z73oJ`(_Ad?UYemS z94-Cf1a2XMA4taPPNYcmj3&Hs4yYKEifAqU(}r)=$P~FzTAT@$6HV_7CAXCktY`x} zS7iyT4ZAfVOf;z`D`I@Oo)OWY(iuXc8bP4lVBFMM=WGp6L!kb7j1B+uTdWg18C4>C z*3p%$qw^#@_cX3mGcIm#o~BLLE;@!+0zz?;D(xn$-g6iBWO|1HkP4qPy?$EOKd?KJ z6yMd~%p7Iu=qQD+^dZe2JCH7#b)i-XG89AbhSlnaO+CCS0GZ-?-12UM1ITe}RwECV z-d#`79^6$p)PONL!l`P5pCye>av9PwZEXi{Kjc5{+AIRFo|!iH8zhv=%wR#7cb;V7 zIs5tR*Yd9=il&lz6+eBrjA8CYH!aCAIKN=1(7kbQW*XIXKT$~7UmWbPx4d@JHsi-@Q(6Ixq6y2T!=2mmCa)Fz8tiFE?* z-Eg98gou`*XTFynY0}oPt4GRA`Jmd43Sjf6^RB9zxdAGK3h{tvoPpnLdBeC^VY{n>#P9feVk0?Nst}0YlO>BJ#T!k)sn}lAJhYP3Eu5^clx{ahxMiu>6R;XpdTT% z4UfZ>9Z=p-Z`Qt>kev|TsBe%@PFEINgWb`7gb@6HkOs)k!B5~4KwlJ8z}LR!KBt(o zF~Nk1cnHF*#3vxlfrR^wf-)Nr3a?uYQ4CdS|7GS;kLW|-)eXH(;7|*N1T0Y1h=tBe zRwx=2qEILNs_J6`BvI9fgt`MTsB33h5iNdm+6iOygFsg+ zY-Jw^;DvfxE)*zvr;yi#(vfgfDw2f4n{ZSmQiLK4Xr-K1355r=Qp8q?q@iRc+EPqQ zgen8hB$pD7N<_j?@Du)0P3wd@Oa7pqRt^0DKuP>5pBIU8E!jt@uRx+2iUr6^3{fOe z421^tBtk2ZD2ILrj7g>>OeIbwe3z6LfbK*QPvEHNyOew@>LUhxQ1~^ksSM<>V2$Xj~ z0nSu4!Z6t=GLo)|&AOql3C(Ju=aQC6I|P6W)g3y(h4Ky}z*=pG4PdRZg8{HsH7G}6 zr?ksQVW+U`Kv|@;t3+9(uxmh3qqHkTQKPVHK}n{(CI`@|?4SVXR1LyW(i4{o`a~sJ zsO)l4a1)nu`rH8vl(~((LRKCDD-X~O=g7ZTN#`2HORS>hR|)@K6ZX%tt7B5+kl5fM$b^ao~ z2BVFyKmenJt(KTyxyb%Z?9Q|Rqx=OBv@}NR2NoG&>(ivlQ&gEM^)xP|upu=Lq=vz1 zjWv3dC!NAu>O;(lxiOyqRqr0cx!VFeBu#tqVjS^UjgtA+VWlB8Ir z=cQ*;%2KFOi3#Z;1$P3oWXUv)AaF|ucJGCUbc=$;EyDrPRUo0yPBERxOO zN*tazx0J?DRZQVxP)}WXU5SQlFqC{~AO!zCn$R~PQuNKaQ9sNyY?lj8iH6%SB`-Cv znJ$j<4`s$_Pl`~gkmvcppD~~_fCx2yLN-$BPbfT!cM+*r2*+6AW~#R#SD(}8)=agu zkLgR`aSnY8{9PTr8{QE)-TTlT`0S(BBYrZ_n5(Nk`xL1Eg3H}$@)C9|RYcFRSFQDu zA(d@+OfQ|ybiUgvm%?XqC@xLN>hM%UR0d&F%~Ole+*N^D;@2yZYUjHdy8BY z{6#if8J`=AZELfLH zKbEa(^FMYjT1~MwZVfy(DXNrax2whF97T{`bXa$iUS!hlb!2644wF)2((ZQTT+|*= z|LAk1T-5GyWSqq)8kbJzxFMlqWzrt1k2MFMb=NgOH_N2A!#M&gy7da36+C3jJe6|v z$MFpP716JuYv415Q#8NkiQc3+T;G%Y3tVGjJel#|-LXE!9JyF$XdIJa{1NaqU&^kw z1&=ehG|HT9mQ%W}d0n~)d#&zxtv#tUh_pPlh*5=6g-OOZtp}4g)r0M;GwM1*6w@7| zx}}iWx(wciu4NQPnByr?DuvQ_L*SfB#h7C4O2tG!n(6jD=O_RJE^_KP3%)ui{TDY# zp|22Gx_4RqNa`0j)fK=JWd9Xq;m?gH3_^~Ut|ze>R(v>Ruz3kR!KI5GMsm>8&WNCY zD&+IdUOrMzl!gRowjZfAFx?k-X`j7KRh0DVHT78&;&yX$$BGm zGG=&h&uBfR)xH+Kh)()g+8oHiDVdpymr;j$D7|mwJYisXGIahwdNFgutvZ>1oL3TM zP9xu_vOJ0si4vF+Zjr0?_dx`jvk2ob{VnIoHKp0nG%L5 z(t9_9vUL)8p_PsJQI$3Ci0`{5S2X>~l2e>Q#OM_YQxyly09{q`K&}HEfXCr2=uCF)2OUsdi<)qkTEiWqX z?1SSS{Hl`KtLdHxiYu7y^W)YN#}(F<*&Ns^sREo8k~nQGFIvM=Bd!^wbN46F708Km zo;8#wUk=O_*rPaI(>i8LkF(7l)#}60e4|yYh2Rr_*77-#AZL z;oKiAQej_hTfNrU)3Meu*I~J;_$a*Mo>o=qfM7?zta`~dPd-OqwdhlYxq2e%-EpK& z&r;Sjw|aVC(ti7y#DddC8W=i_4gvBJ5vWls#49^3d_Xvvy z0Ot&xA*!fl9<2iP6P-c@x%N^Zkv{0Le#kNa^J)*SUt|a@V{VbKu`SoyGZmv83?};?L7HkFtJSZf%zptMF8w1D@(jE8< zn3wN{0DTU548$EsD|o6e>Jg+P1d}hF09Op8BP5;xIT89_aJC%M7%)eW3IUHC{22Ip zh&W#nB2Y8%K5#oDz3Q#NpAR??_e0JYB;NKA6 zAo0LHK;K~B@Sh+!px@x%zP&-efp{W$LV3dBfjoe3g7|;~A%S4PZ$OX_SkM3QezXMT z3FZm!`OOpB6Qmuf9jYC!9lRZ(9iknk9kd;>9kLy^9qbH32T})2=bH{l7E~5^7DN_k z7GxIK2Rsi959k>v0Tcl^0R#ak0VDyKGx&HH-Im}Ms2=%W@GDRmU(9C0W~64!B}7Z` z3(yP53$TlC7a-P9*5K9<)}YpqjbQAc>|pF5i{Oi(i(rc&YT#<1YG7(0$>7PLK45eZ zbf9#QjUefMGF_qqq_)I=k(WR!pcWLy|3dmb(=&WR9CUWQVWxb7-RkOnz>WF*@0I;R zAQrw|I7ELTp({Ya{;!JvQFaSG!z082uxkx7!&pQ|H>q=Mmyt zGA^ygoPi-3ty|ffQ~4BbXq1-onoV;=p@SJR>y`ztRnh2?-HUJ0^t1fplxnO01sOfB z;N&d#=Z?)p#zGVg`(!SK7iOS5F3BhHryr>c$oN4DM%JkOZ^K%`+b7x&8@Velk3FW& z{htm$c|@_S@*5Vn-zD%K2z#h8jeUI77uW;_acI52+E3f=aAc@(YsU9C21ALLwtLa@NYpQ`LQ^l{ z)SMESG;OaTNNtW@iHMhslciFkuAh($YGFouG1lUkm5Ww)=-KMKuw7-96Y02ZE`>>; z66uI+)#J4xgqf{q!&l`91EeeD-*#)xsLG)}{>_8AIDJpQ(M zExEuaG&u$CNVqPCgkz{4sj!Qzu*JHxytshXJY#&7 zy_nY@*Tpe1bc;N!{=_##Q!a-UDrJjM5v`4!T*XlJnbNdhk}**}u=|2R>X4}rcOr6G zdY9-3Zn8DbF`|%of?=l0j@?@LIgnqY8F^Fbh`S!;VORW5ix7{vkLkwFJe1{WFE z6&W|hA8}kJq-6H;@#|4tZUk3E$Adl{ohqMQzP$nSv)1pv^G1;GF|PP09!PjnPUc_n z5?t{aKQ+Joc*i!z?l1frF|UxPp9;T2=@633uV3Wg3!W*zRP*{KgK4}ZU_Ge4B9VR->UB}`Q$HVi zR+qxh`Ne0N+>aTt(qy(gv0Ff1jg&;5;Eo>@ z%I$4$YcO9Beu$X!nSRgD5FWJ(iSUhipjVWkT4&6-yOu)T5s;bOk=gl>Gnk*Ro&EG` z5NZd?1pm7_q1VfGaT4Lf0K(YG?Qwec!-=^Qpgwv-CAjDvukGaJm^l6Pt{HE616?P1 zcSeYPkyye$s_tFqaD(PhhkS;^2U;jF z*FAj4_Yo^y5IKF z+r=`*)5a!|Q8g6{Q^>?WHs==?6$7|P+KGsS|4>2`RnMz`#gFFJu{(;Q5Lfn;G;d!$ zE6T{oc=7|Ut0j|*vfySwWU7kZ7r1QZ>u`rX-0NE1Jg)z8&S9*)GU-I?&^Q)ffr2ly zD}u4sUSCk~eK*Q!TIn9^%Z|JqpCH{$rB)@va;)>gFj8sbNeg6aESFc9heya(c=(Dl z2BIlfsd=kT$7Yucd;|50o+Md3)WbN2Gs5{j{ad7y!Q#Q`V~h(w;^~>lq!)V)NZAQ> zLW|wFVt)LvOacXD+X!cR015^p}?0!b|bVtR;eeSO(PV-F%zKCF+prVp8 z8`~DaPb74wGI1zDn(_r8P48{qg0K`mc6+V({k(eQHUP)7x}xXKga7y~USKr76SjfV zLarEYxw(I+(;>u-`?;Vy)1)rK7MP?k;fH590FR~~&(9jv{+KR{VGqdxRhxuLk>^!S z$G=D%D0;A@Zb}K0sP5_X(?2xckOhn4P+BNrICh1yLsx6_!%LBIA=LiHaZYrTAgvdf z+*CH*aOQ{QvSAq}8h%T))nb*E=*!M%xd%CCu+u4fDbeoG_QNtdsh8Y-;)4x4m(|l* zTt_gZ$IVQCUY$t-;(3zUsR37YIBlI}e+hwcZ{05GOX9@5$inhF7hdWa7SF+zxId4u zrFbnjJVj!sl=lAT2Ac6j$o;@C3unii?ujl6F7SNl4SI3pw@N%wQ$4iIXmhW>BV^$; zl7)`3#}>>JcYlw>-b;#)rqTEa!Y9+Aeu!WoCGVdXGZ@H}4v&*qh-?2&zcYz#sjC`q z_AWA+wi4Iwyvn-a=bFz2KweN52Y70s8VG4r;1P%VC5lurhQi>pa%$X6vx=G@{Qb5# zmndqX)9yOgOs%V#?j4QBJbnrEZ|!oG^PTSH?BDf)3&|`&YxwbVKh<-3Ot?CfKH!(*-B} zc!U_Kxmf<-(R&ijXrtwD4Qb~O@~HEtSUv90cwbbMRktny6aoYobLfB~Br`u0v)uZ^ z)ue}+wt9)PzL*Um7jU$d4P%#Ck0#pv#XC@S4TTk*W|+`K-ovqxi<0 z>QIW|Wd0SJ9sda!PMJSMq>*Sw2w4$YOs|%ZGsF7hAmoy5*$?;(z)bdv(Pm>13EY&Y z&0Upp#m4+`lt3$97d&sz$-``je~ET_J&X#^MsjEI+^v(oX0=mF@w}YFZQ`kdvb@~a zMO$VA+b;8#?6!hIC_g=qdF|1U-0--eU%diiSu%cVRR5?{i(8jGp_X|f{uMSO5 z(u+(@AtRP@;OY>IR6O^b#{!{wFD6ljBgDG?F<}pR*?^w3*(y$RZt^3Gk%bf*{OTHp80c&m#M zG6V_Shy*IB3G`(|>PqJOH7E@da1ifr9}>_vd$8Lj>O^0Fk=5eum_)&^`>d>&dC#k7eI*b~Ze*NadD76c z!fk}ncomtbnYq@63N?ouWbrpl>t-C^N-?oL!)Ex?EH#-4;=CK6Z-l3p(4y}EfBua( zYjIjJ`KqBX>c~nkxbEiR&>DLU;BZX)=CVas{&!WS(AO(q_4dyy>a}1sB~Jx^Xt7`( zH8^n2+!H29=-GLFu%|c98&Zx?=|^V7kb)Y?;0zY@{4_u&uy0XI*Zc5zh4IY13Jx0! zUl0t@D07?3r-YBFBs1a8^>NGZ9>KkC=r(WG0P4?(C1bdgaOR$xUNq&D;W+so=hP~^ zGYDhW5|J-(PWY{8OEqS;z1MvGu>Qh9FhGJ)Xz#s6RGD3ES zB|KtEOb&G6{X0+SIi==U>4_$`Yu4nGj0-W`T85@!v4Ky0xJcx<6Hn zcq(G&r@_QM?VaUPAr5R`gh`4Iu_d9Lv9zO>rRt(vf0*IIlfL>{X;JB94_KB_TBiNC zaAl4pZ9~vFzBNqBlIO;AKg&6p9&G+JuMnu})-mUnxGa>g5w|&bpfSD#V$6*>mG?cV1$$wW7{+I{6jHvsHje~$EC}YT% z3Ass)H5M~rm-r3}^w{^a?X`#BY*ku7%j%*N8D!rX?Q zx;{8RUzoc*TZ;|!1D^#$YU(03cMb7<#z1m#I^ft3m_{{#UxrS5W}+@;k{?UM$RM&X zO|UFWLxJ!7yKdd~N6@Q)6t%pIykXPK0Xtp$+IC5G4ddwU`?qdMZ}kVwC7i{->=%z= zuHj9T>qNWX5P^0p;24dAurtj5?D*u~neVO^m%Uz?pBe-)a;kwG;ywJY`2*J$gNoFh zb#YIiLwppl5i(B|pM_vfs+ivra0Y=D2iU4+q|^aL=#tFT2_~xQtcGOONP726pfUMR zEW58PW3MdJr?Vo}a?HTXAKyFh4*F2V$4T`$oQdt!BT<`+4%<@^dTV z9y3JBhk|2;%H`|lmVr>Z+;7j>>$CGNPYu0~nIC7h$4~RRYc03oxTrPvy3OW``M6~v z!JVGhLj%*~8(wd*kH9v&arVy_IrfiHUTW2K5BpP@$#v%D6W}Fj2W<1JD8mBpFR)jt z87pLsQ>sd*#DyUCN(7rP89{LGd&iWoj+sDp7w$DE{a9PO?J*0Pn5pdrmdtIAN04D7 zF;Hl#^gAjQ3qC@0mTK2Fo9kP`AK70e$31X?cq+s?!s(x_xAXLsODp)qgll(8kyib!Hz40)^zm9O9wMsRtEO2zfMuq55uI655cYUP z#+*8ZH_qfgug8hZFXt-!_#n>X#j}T~GMPnW_);gNR6U6@q3n3#q6*xa;3u7gZyeyj zJi&{biO1F^zazweMz_SmyI!Oyz1Ejy501-UT|1Fqb1X#IxzgBt2OoEJ*y!2?C(%Og z>8>=XQLk?jR9HJ+0EnG|{KB~rFro=zn3N+M4EO^g!(R+SdxGf@V_D^YdWRMZ&e;nC z(U9glo+U5e#L1x0j33Pm19N=<^Xyk$yko;RTrqcF; z5ltIe5ZtucY{fC#`tGMPJl)L}S_B=(#t@{u_)BV5C9>*n$cbHtJ+8aD^TmPa7$IP< zWb>kdA-C*r^>dl@ZP-GP^kA7bb@0V6B@5>lfIo#4MohI^kB-fIoY`zxqhpbm5vDSo z92nmcc%B1Fx~fBig~c^Up99DaC&)UwZw(a)FGP0RSa_lao-1aZMSg!(Bt}~hQX8Ab z84lgqSmz{JBlW={prnzW^mlkyKjDD9_NN~guiBL4xg-PCH7ai3_wbAhN{g;aYm8h2>k%uzPv z>K1*6(O41-P}C|M@=lORUz1nVD`MNOXcGNj^=aWAI`EGQb^U=&`q_dtTnjJ_9z+2xrpj>_bERH>h|TeO(bIn9Ms0IQm1CP01^k zP06o&GRszvcNYJGvbKMk&URMWTHVbfT|anjQCG%` zXByWmONRt+ko-o_8|oVGjCZxvn%L^4Yw<_JEmij7v+mlqVt;Z;om~8eJ=p;n;f70Z zlLx7KY!P-nw8Bvti_Z`pd5*VVNO^N!!W){wEg+C@AKllLni+i25oS9~@y812 z4d}(wupD~aYrmj)MRWqEh+7(nb4mjPk3Xt_6BblLsS3?Vy~k$*C%*_ zu5i%e5?O!5*Sz)NvCR)pWPD*C4U$JN8XyWT0u;asXoosKB@tOz$Z*gwB0l(QTbCg2Pv?Q<4{#f96M@y@1>(e&&Yd6`;$YRs>hby{p|kXd$+b+e5+yd3(z<4{!HNhYOu8kp)TMWYu>64NC64cyZ$p4 zqn+oB6Y+EO94a;6Eql`bOk8qC*pIMoL`MZiMfel4D{?9 zun&ZVF)$SPn<-w4Z)FT{egtekf`1DrAF2yjn^F722yTnTX=aN*VFZ^XIt8ZqO-68{ zo?Uv6cpLOAj9rC#rW0Y$EFOcwjd|R<^P=dWE(9hV+vb;5OWiVyy7LM+CO}+pc?P-R6h4F0$KjA8;RZDdY7Q$E{w*;*tyl_r9?Aj(@#>rOoe`{F-mc ze+Mxpg{?=v4fF6(PDjK5e=r1dR8K}ALFg|FEGU^zs~$d9J;_H@9=SZ2e3Z&A`R}X^ zcsF2Wiau2T2*S!)P<95aEdCqD8jzhq8|x@rl;%tBEUi9z|z`d03fcz`1ZqYD$iQr1t=sp3W*Hb8Z#YFV0 z!I$2~zY4Bjg*DDy-)jm_%8;9N+tmu3dLQ9QSZ)OjtlM5HzWt42>sMWiH}<&d6JBsL zAe+%MtkoTH`KlZoFc+*dvUuH^8&}jY29tpo9A>*uWO%btjI`#7pO*nXCk!cI@bVbm4(UxtVswAmhzT_H&i(l~wbTlj98rAe6qTtULZ zzc~9R_A*Bm7vV31y(HXA8%(01cym$Dnv9&r;l$5@737kyBsdBf<0Kg3rX$qWQneKN z$~Z~Ey+;*l>s+CfzCxXp__<&%`mK?<`-atlWf8zoQO#-f6j9rIj zeQIpAaR7-+d40&3;%aL{jU4<-#6pWEYV8Kn8=LTM6EsJZt88gYwjFpP3&tnVdx(S= z9$un(u?^mUf$Nl92)qXTZV`#e1l5mCa+(lf_2}#u9Kk{T3!BEB>0Gtj7}+ zp5~pwglCI0GB;{=U%Z>=0a$5r-^EXt_r4ekz=Gv$Bk?0&waX9)mH6px;E`5r0P$1M z0v|c{Q1vtTj{y`tO_PRd2aGD@)=DU9%#=4&em@U|G!y`8P0@<-+l9TA>~=?{j+}EQ z`F6;4vp*&j&sMFB<2ZpcqQ@?KVQ=?|ZmY-^ldD160{){d9*67|=_R3nKS&t*9~}tw z_4kEle^A+1)+`If$k5k@t5&aGQ-#07sxu!f)H+)(y+w6VX;e?_h5ddOvl1Yc!F=Ey zRsqdN&0E4VxLM3A1Vj1_c=9@cY%fspD^v|np9HKTf=;yoSBmj>bUHrzu5E(fR$0y~ zsR&&h1YR9)i@vL%L;h9)&+hP%Ok*q_i8)v4kGm9C;0KC@!7uXnSk(-m*x4wi35XsPwXlC)Dmq9ExN7KWGepevUit* z&)Ww=*Vufvy7pYy5$XEWj}PttG4SvwU%Y$i?bol5)OFhEDAB+6@D0G*pTFvwFDPwG zc0Q?ijDBcGGeVrESfz@@%7eLNA_Jf7FD(?WoBp6FgMHo z8wwkiWUKhP0*(x1-&M3@;kYg5W##x0uOlxn+Lp332QW!Rrba%-+M*tJ(8iH#%#r%e z#;qk302%4J_7hjvdlwBfxND<9VI9Z((N^D|f9P`^E7DGh0VGB0O@_Zzccom#m1R`? zIOvUaZ|#IRoL~sn=d1p&%SpTzZc93gPdQWYw~d!ydW*OS@X?Rmt=eS?VVRCK#u`mt z_}d_u2~e0Zug`Xz_b$~Xw*Xofj|ZfBiI4!_8_~cU8cW0elv+wXm3%)P@c~s-;$rKT&y8d@4lEKF9YFw1ZK&`!*D zA6ws3-FVHu-s<6{4JP!xp3x`Thcoefja<{Se#82n8a&>2%gP#y+ankRn_%@?_1>`8 zo@k9F+EdZ$%(6|L`R#oP!R|C0L|KqP-skeVY|)H2*-;m-N_TC5{OHzo(U@eF< zFbcrwnB7bXAod<}PjHAU(s=6BpPz?C9Lf#9m{QO;kRsCMEMrcD&S`LvEOnSo#WQ+K z(CPA9+2WZ}ibR}+z9xSe4PLyf?DhSyP-Ah63=G!b?!Jn$ix^3s-uT##764z_6_nWUvW?m7hOQth1!S*Z zV(>?A`NHqj*P%mEXFQrsw^WGHscpOT!@b=j_j5|KSKIe^nj<75UC(fIx%5sh2_yCm7Vl zDV{OMLcXX?PhBMb0}Oz$Clod54ERIEG8f_diIr+i6{J4(_$#b|rYN%$j)E=>Bcp&b z6KYQXG$s(6jt8vZg~i~{d~pFpp3Vs_hWKf#Rs;tOrIn)~lLnmdsA&Zpri-a;we^F_ z;juEMR+Ph|@K$GUmi-hf81df}gMt7xE}`%ePm`>fF9r$BWL{=*3#{K4G}|3+iTG(S zB0-KdTa5{`&E}M58$wWVU2~~G=E#?k)_50k1e>>tj_0vPp^<2Pt`Wx? z@y5ow&gvN)4EmCvf0ZAku#{HI8-um>)Yz^KW2GtiQ^^fuun2}Gq5*Gg z7<1>11_$2gcwT|+g;0Zb1q20ix(*!wfxF02}B$Bv_!48XBmut}TC zY|`d7n>1g*zmYGRX7F=)w=D@5Pb33yEim+L63(P11++w%;PQIR#x*SpQaT`tbjM?T z?rvcq3pS7F22tPz?r~(xYft91m*&5g610DHg z@vePKJJvQk@i%Tgd3U!MYf0S5sx?nmx44uV&i+2JI4rC<_`O?~-8Io}sa}#UJ~A@e zHhw#d+v|ai1LRBCBJ4*}Mvr}+6R9u@{j%!oPac!+hTr)rTd=s7! zpCJr+W6Fe^oNxH^oUzv*nZb!EsgL|a1N^xQy|EWgFwjTzgHWp`3+S!9bp4dJ03*-) zoo^^=0}^cEgd+7dkbh8MBh#=EJ#6Kuq90U6E1Gw>@)B%Xm8kT*X@>xzW1MaMYg3yZ zo?O(q>ydTIp{_-;o+d0tbG&VJ%kDdZ`LVX_np~2HE2aL6=oF34sMnIe{n)Lacy@2A z;0nnmNw&oO!K&bir$!I1O-7PoR`RM=rGKfr4Z9UPjdHxL8;7ogzaCuQJcM~>@bkyw z@r~A#_<4*4`reRtZA=z+^yFGrwh;9L`2nJ3pk<&ZcQ!rI+XK3o=hhEm9x^mw8gN3p zk$niDFdP;o>nZ@JjHxvNH4aslKahQyxgD{XmJ(1`HotA2lkARI zOmi1$qi*ZDuYdjR{UN;tWLthKMb@sK zy{EK?`dsS^+Ll>mzk%70~{2c!A@Jb_JA5&qqN{tuS1vF8dF- ze&M26dYZ8Y-R`h#qKkJg@P8;Z=l@&p{0RQXS2Gr8NOl6*XEGO`!FTYS3vwmN81?vH zipGT)SiCy*_h8`Sukep?Mm9jxU|4_qtYG#M7k0dsC1-0V3iThq=gqwpAk-p2oEmvuz#4qCWHw90@VJ#(Ki%u zg`3bNj0(6$yIfGfRchg7K+LNhWS15ca4A$J1jtCw#|PwyvRi?&(|igmFZK!;P+Iv| zQ-Nhg@}aRqG~2CpSKl=<^6}y1>rj!1!t2f5j)=#`>R6g&OrCh!-7}u|?KWE|y^-1N ztX-0*T2klq)$4VHg*QeQm&v0<^p#-&fM=yGNlv-Dl1p|B*VV1LbM*#BbV-pw(YI@( zp4010vc(rN8VyXe|K`p3dx41L5}CfX(PpOz_+( z1)U6wCP?(mn;EFon_#5Rl_bg>fjrwxKE{Y1D}2te=aH+&?;Wj5Z+>9o%7b~v>W9il z|HRUdcjbW20lKF%*q-l>J4-TW_u!hrgGV;sa`Nt;WlIS|Y31$gGN5oa@5^`HH3<~Y z(gtANF<{+CKpv398nHK#b*ocNxuzXWqy#mh6o3x}Nx>Sp7P1CfSABR7WduNJoS*JW zej`c1$IPaohHRuXfkbIK2>oq9pK3}#L3;;-H7_2Z?x%?7DEt))rzlVA)mWeW=C!7s zCc>nD(=(_Q>`*qa>!wl~kp5Lt6-4L>KIDK9Rr8|qG1NvZ^(xSvCizVTGYKZbOqx9U zHx-YngwT^Q@Vv3|T>1NsAnkNUcI4&^77r+CIdlLX+drZv0rJHB;O zOWUr;$FF@<@b=~}> zwbEUO>joy9VKj_hdXG2+qG22MDO4Y_3s7JpVd>U%yt|~|-I~C6Yru8GpW3ajj?P?q zC2xUG97VZ8Q;#cFsP7F72)!r(rlI~xp33}L)sPvuSS2quaRmh&SsyDX+Oa5n!G%U>QvYf$0u4O}!)BdqZD%Nd%S* z%#y>T(;M`1W~gPe${ZK}Pzo@lO{I&m4WC?-Fq(PE4Ot)xbVCcU)FgJBx(;-zy4n^= zLDiV$uGtCy{}e+uTXqYK3a+d(G%prnVU zKboZ-nhtoMB#CYT1ObzzWmT*N5{Jl8dRm{UZ*X*6RVO~JhJvRTI;MK`(3op+CgCuV zI3osJ@Vi7`@ACNO&x}_6vmGW??VZcwOZRnktX+Jiq>6f;B!Tmsjl<3F>&XFWSdb>$J~F0)?+Y(D$5$aQY|M{Ju8%E6`Lydo z6NHKzK*esN(DXDwN)c?OUIC7PcpVXvqxL}8@& z`ktC4HV*IuXJl=GDpy|{etH2ih|s37PpnaKq`wSDo6E@5tA#y1^@57bMMKTwmm~9X zl&*%YoAneR9Vy@i47FwFsK(ROa{!H-)RogyJw2h32;|>7-VWjCUlp>1ELb*E#}?N2 zjP$N7SHzK}J56IRzn=K~w7*jy&txf&$OYtt=?;>};x_l+Tyv z@R2LUuacwwAuk8#c2mzm!~22Z+h0=hfE%#KkPkAMX1J8({M34%Zz&{!qc_wxgOB5bJxeDgSa7BDwrzbIyW@qCp5~>6 z!QS?P+FqxY8_sb)ObdUSQ72BJODxDAQ=o}$s126_8ySfg6wnMvC6tPWtA>KK+yRC8UNo)?P@VW^BbE7W9K2Y>NEAJ zPmLd*fb6|I26Swbh?}7UZ3T%Au&Vn+(c^7OSur<7b zuBV{r%51G&q=Ks#QuE&~1O0#F=pa$N0wwp{*SBt8e~`7P1yZ?gd40$2T>ud9;YcJ2 zYMcKdT0Tt4SEAt$Q9uHG%<2D8K8EVxJ@Pofl!UET*Pv>h`QYoheqRw{5gLmdWx&3TBFr5 zcGmA!sa3!}yX+!GsW%Zt@4`+aJ>9r5a|fj3ne;+h0B+U6bJ{fc>i0oYd)M|1TK7iR z_20qM6g7Idc4K8bKVPesGyeYCDBn3L6T80d0mFJpCb_%|_X5&O(d3pwV5kxp>&C-1 z0ghIkuL*Fx(f*oHAF*X61aEX+x@MkSfjx>gYvScRqu$imnt5hJJVRQwvhQj- z;Xq4e8L%`H0xYEntwCjSL|q#O0ZX0N%+j^g#c`Wji}X4qpPM7Ur#p>i~R2ss}}Zi;JQ_s(>ia zwt&-<@zU=>yQKwkwM4wwp38rkg7+wW!yCk9<%>BseoR zmK)~(2mVKFGiD{Cm=X&EZ!5Pir@BB0=hz(h+8nw-%$3}Z#re&BwcQndb8V5Io*d?r zlghB(JE^Z?Ww|b;zM)iitdLvvX5qCaG1=W$6TmQwn`^@51KArR$-7PMKtJkHE&hRg zXL1&@3FbBIL5Dra!Mw&d@Fl%gZARG>TDkHGnVE!IrR(VK;I8gLcgG|Tnp%dh{^f^l z@)?@8iIey4N=x3e8`yxyuvf(TlTQ%B+0X6RF}QCC`p|qT#YNCA*q|`@$g_aQ_hUNf zNpm0J2EiAvh3t)BbCdl2I_gu^yUV`Xt(ATJ&9(XRP>!$7@#`nMCnG#t4~y!{b+IUg zMh!0-u)5Wc7TXhI($Kg1Y62X5{^pthho9qXLdy-raH+m}7q5^md*3c{`=z|eJzphk zYPZ@PbT-5Rt>!=+^f^%+uGvRkWdTILygSCxr0kdDn_-F#h@w?%Gy+jPsMobmvAwTd zixyh|1ZlN@-d0jY2_wG@Y{o|H7GyJgzED{U*O-lt5L{L){hhgqvbk2Qgv1RE2T#e4 zUEPO%scr$R6Xl}0CV;Eu4GeU>y&fpH#K`YABqqQ70W=rIzK#!|m#DQsw6^~Qbn|u* zm7}EDw5cQK^XU-X&hE_RhW4X$-{iuxy6c}P)ttRGN$ z0K?)ErSks)ek(qPV6f?f}vLsDq|9GVA)vHKSsixExnA7R>)A(a*Sasqz zsec6#woLS|dw4G5wyMBI3Jv2;WuM2skL=U%uyY#bzRt$BZ#M`=7@<06oFBOls#wyn-{1sav5#T- zRO}T)dMK9QPnHo3XPCe?u}Hu#|X6@kKY^XmA}?7KESNf9TVdI{xbVQ%gwZIy4BmiVzHx0Cf=CD5AijNw{4O?(O=CN74q0fcDN})U0u{9$ z{O~hoe6rG6yzuVZFtJ*cDg?metLNaKimDm#Yso;H@W~mMg6<$YRVtXzW>6OP+a&|| zCDW;Lxm2AQ1JuAYK zUwCV7`i@O$4`<4HLs>#g1lDaE9A4UzkCe7`1%~@N9gfNLx}DL;!uqK@H)k~Jz{;E3 zZ}#_9FzV$j2ASf56rO&*uS@eJ^-7LTiEyl?fllC_#k!@ne5XH`~T|_nlRSy}O-kE`KJtJhs zL-5TdjS!lo>~9E^#1t|AC&tH-c5!caOItf(tE>3s^&-8(lS2N+O+6Mh+{=I)z10to zm3FQ7C&nw4m|Q}VsBEYt-J3UN%V*5sr#O~ z;m|YpRoKX2V!uXg@6oKhh002b8}8jbWJ-+Y;ad7Q@GAd?TE+?@hTEB97oKc@$x0;+ zO(?Sx>MU4qZC(*t6@jcGd{tCzqJw|aim0mIBuej`&y0u5^@I-4pgQiRSeV=|LZ&)0 zD37BN>X(bosOc){n$;WnX~brqhKh+p+Av5dJf>soM?64|YQl;^ZQh|3L+ zBt7F`s)&X!*HTiQja1_#f)qR8MP~)^DnPpDQXTDD`_0fjgq8xB2z+=oZOrMxffW3% zQ-{1?#wW!)29k+>@GZ&>2++RSgfM`=eMT#NKp9~``XKEQ1}?jG(;b1Zk|YpYxIR1x z^#25)dKKG(?GmN*8`wCpbb4caJ3I#dI`1%`6S4* zwKaIT0D>X})ZQ=%FTKWy04lor08?;;a{?S9)Zf-9JXc7`16|E7T-M~)+aN|0PpGEf z$xnCP@>jRy7iaP|6+tK{tx}WNuzReyeVsSBp*$Ylc6-GuujrQ4`kbqO-B?U^0pIy9 z;A1j1IG7~H;7|BWG#adhrHvLN>jFl@hK$Ayk)AL+c+-PZM5j#qcm8=`$7q;M6g!B+ z@@sVokiRkF-5;$yg6{_W31c1DQ>a#WrU0?{Q8_(${8uR35xJ}kg7iZnk3iO?^q6#A z6L_yE`*jpw$WpDtffce)53+zCPse>#3v>%bW&&*xxrl0~M2&#@a0a|5o<%AF-auK9 zpC|F{xs=E+ss(0`YwbL2QIuJ4X?Mk+v1=4KsnV#FAzL8lHOl^&S*A3-`{?GBT5DiU z29uLfQI-r}CI9>73WVW^ZnHpx!eSrgDLtI(IWW^`%k(4(h3G}-PpuQVhG|^oqhI+G zODn`(V1?+yo<_czzSx5&@_4>v!l&{V-hC0VD=(E3<{>}z=n<6@=AR{ESP;`oTwNQx z8!Q@5E|b9^lf}h@3+kwN)m^S{d7*8ti(t9PF{nwS3B>rW z#un54tF05hSuNFFYYedN`hcPRrch()Xupvo+fG1HD>hrl-9CtLx7!0*H*^Bo*s^S6 z8#vqL_h)DupH=)+)QiB++K77*pIs)1H;s_>qX=s+hJf~#=J4|6sPiIzXA?U8Z|i=7 z*ULd{S8qW?Dn^*a2(B2p3vdVP{6>iWxN$fp4cpX{^ZQjjJBUEBXYsdz!@}Uo(-Y%i zROLM~K3vZ9RWp;0CTVGgitU%sjzMH7K|w;V1D(w3IyC_uk2Au=_y`)!D6|eIi<5*Y zaV1w|E#Jf{^*%}hN4+SzD8Oo`$Flv0P=yHGli|{%+5W?Ij|GBia2`fA`MGM}wy_Q- zGdocUZ@sPRYxpjq{#Cxq<*zi>)7!VqICCTM{8-WqoSI45{{vhvU>6YAX_4#jhvW*b z=Ik2h4#1{0ua044j}QUyH(~GNZ=aW(LgeLXGvi4|xGJ*?MpX?ocBX|xu2Z68s1usr zBc>>>%8yp^uk9Mv34LPfDz0G*F#!>GLeCKPq5Tq25n`7-kLc(wCh)ikPp}ZFGpfg< zYCNjKlL+Lo4gT?~ZSjv`aO2HsacvW@vcDBr`58hB5pU1a*whkWrvsjcPLGE!;Dl5) z3wzTfvgYb=$qVv75j+JyBhcdzJcX!N#a;_|3i-={Lw|eg!T)-or~lB`4}r(tzjF=m zodKatz*XEk!|xsQu?#d{%!6rvvL zpZqB5%2#2ZM(8PzuN)_T1gHu-C{M{9l!2Z$i1(s>Y(U?`@Wd)MPog$TgcvOLQMT=# ztTHoec28cpc@n(yYR!{v#BuHB$%hx?V;c#6z)3ifIwu})Dyo6h|Ct#sy(d=`*==8v|dwMMGC zbt#{M$wK-#$NONf1~UcSn)baKa;t{?K4%uyouagDu==TDIlx_n3Zi>Q7k>2bt1XlZo zfUykgXGtR|0e}Atp?;53-0QNbs6&3ko|pH7`c1shxp?n%$JPxUoK^u*y;H^O`jTS> zSFE&g%WNr@m^(Tb<_8j16-lBxiC|APmnWM_V;=W*N_`0F)UDdSSWM%X;$3S#U&cT^O9qw3n+#gI1ZN@}yQv0cc*u&T#M9i)} z{O8ynY&)R&2)2a3dM2DOAN>Rj;s$63y?Q%3pO`vx9tKXR`bhxMgBJRlCF^!p!O3AmEIReSsd?C+C-@h>N_3T^@;ds zhco0+!}v}U>>h7s@|8s-vvJ?BBW2=j9oz5QJa^aT~3UveeHSTqE|Y8Gj;cEAK9O?iP9WQ8^U*HOzzqYWG%bVYfw1 zX{gpTj7aM%Bv25I6o01fz;U==yG+G*Be!3>i^WrE5K^#WO)dXcl z_JCj-4UPnDT5AGKT8`JqX( z=SCP9)u+7P6b-GUp6pJI@Jy;U*v-S8o~I*I8nL-Acp}1Guz1RTBnF z-F(D?oFsHTa*|LbG^*WdK0oH#sr)rBCpWFH3LMYY*=q_gD*h^RQ6p2^n-m<%^2G%)O?(li z^y|Ij66qY`Fc{D9gpTC9=Z%n%fQ%-IjH>M|>JLSlf!(R?^1|F@{i*FJ+5r7!+t~@l z57exfp)Yz@Mw$ptkSf|6Z%=ak@8yj=)In}C{<^19&-h>O2{1;jMD-?Suol)}Fs#K6 z9G4HNSb4X-y;1E1_U5zL3y4!U93F+7TD&I)lXB~itY@5%Wzp!p(b@IgQI3mtub-u` zTNaNVIC|g~ZSBFzJ^W#QH0E5a-2xFp6sPB>V0rP`p{aU5tjNEDm@NXFgBD3#-Mp{n!i`vUq0;{oI&H-ouq5ncgJfK_r8B?C|t*Lr9fs%7- zHyX+-G{(aEm_A?*`XVz}#8nd_yj6j$fK~Pc8#bU`?epHbe+>p1-_y)u*0+YmZLpXa z_1I8l7eo@zHHl5bsF2w;s&}r$#I-GYbNLV8<8g@pKz_5nljtjU;kp=%%3?4oi@~#N zVrVlV#-Pnz{Ez3v_|_+Raz4qE1CMW`h#3CzG&-hO3Xy$M+~-4GXy!~crlTCy@CBS| zfIu`Kaaa%2#GM6=^af)XZ7ejPA6DUX*O4DqrR;hd89VZeDwD@*^RUXP$IzCcoQAXI z_>SSD8&nn_Ou}jEJBhb%nI5|Np&N)`J-_z>e)`9B@ZDK4Y(I}R@w`D zNInyYJPN3r#tjly`MYnIY;Z%~hfc(a_;9cIix=l4Dp{pROJkcJJ({FP79N=03x4Tk z@hFSO7(7nl!8mwfT@Vik@c{g_7{J2;+>c)M<6%D@qw(7UcmTo~Xjm&B@Pjxy08bjy z0OvaZaVX$3$j1O2Uk?whj8_9XXH_>Tg9aj=V5tSP6PXhK!}EuvGq>THJSo>%EPyi< zx{(gts6K{>E|>ypC>d>4{oui-9UvJ;(Mym=ae^dPE+NE8Z`@&2th}J0;0bs(PsprM ztSHDo6Iye?W%IHs@~;&dtzPw){{|5z6>6iF+^RQeNT3e{_@Q~nsn-*~f~bjvS_k>k z{qCQr`vG4z;CYcRm3O~gS_g>G2TPX{eRwZ?jAZd>0FV0ds1J{N@u&xnx$(FHPmuV) zAU-&N4`%S8G|u>~_!J`*4Zz2e7N`Z|2O}`F^fG)b>ESjJe3>4pqU#{pi_8pjkog#+ zU`i&AXF98q>cHpIcpCmK4R<^kGsoYYzCBHB0I%96kze^ENb`k@#o{GEbCENRBca4Y zp~QcPB_+>Dm1cYlyp=*P-lv3av@wm7=q4s`lW4jpO%Om-72<6G$9@EXby7Rvoq%7u zxFGJ{DeJ70(J)2m&^BCb`jxt0L9M(+8foNP{!s^~J!ml|KTT2mDrd z5ICf-5XAo`G$x>nK9idGF@e8KXv_hp-2*wTvd|4a6Q9v&-Z|9ZI&D#Dbb#q917NyF z1DFnr>EP+4cKI2h)&dDg0L`2L5|G2bEJ{EgP=SSrHPGT^p{*Ot;&z}5=O9L-9k)qV zjgw#Ea1GRyB+T`}_Y7fpUkLBf;W|G|r$9dIboq`%HKb!bRkofo5zp|-)z9yOiX-X{ zJk_=N@)iJ68CIKY3qRTfK02y4$9&$9Rj2sLj}@gBC4eo^7cLPq`(Fe)uzPYn1;|q*b6xEw1cN(D2+2whbmpT zv;fcSb)Akl;3a1Ta1A`R3#M%*-Xl(_mmq$uR2$XrylizriN`;)@-fB?Px2xZI##cO zudN)xzoG_lLK#Anx&uKY$2k~cZy*BUpp{0B^&9OrC-cr@D#nEo*ckC0qC~l{3^suM zC*;eitOFPD>&{@XGW!C)t>mF2kNW*C>oI>8@5q+2L^i8+JsMx`eMEbhJS61>(DB+V zLc(t@)rMhnVyeGiIv@ZM1zr_QpA^LFsLoRYxKd6- z^{;}nS?$w^@~)DyrdBZtMSKb3iMb2ejNKJvI3uNGoED3dp$gkNOCRcUe%9bCgd^o# zypjkNd<^+kd3j?>%Z2PidIKzFB;6FmMh8Dup3g*b8~2Vy#=8B9o^NF{zOGS-)C*>A zryN)ZHYc6u6}bRrOlz$tJi#x}%jDzf_!m`&#S^*_3cPrDSBdjjPY515_yvJpPLq!d zX;u7-g6eR~v0dl{vpPqcHq4bGThWXOJE2eoKQ#EsPo*Yq8MdV2QJYRlLi423#*2Z< zR485G#PaklU5P1?cBBVdQV8h!X8i_%<4`h z3+!08h(LT=4>dMaynXXcDfx#y@q=D(em%p!=`_m>9X5=p5^CdPJ4XPjOI1oI+ok$*ZA2J?9 z`x-T|(d5m-MX^D_(xHA#=s1uJo)D58_!h538x+kc`1+nH*RJoXr_p?hvfZ5YJV4tRK`SF4+Z`Du)^^ZoAmN4jDNl%~M?Xj;E0qYfz|M>s3 zd))S(-nnk4TB9~tFcM4RZxMTd&JJOd*!9@2#7xgDK8D3H6Fvu)v<>*C^Z69`in!KY zz&D*%VN=kUxv`Fy#F?=m|CbUi!uU&_S(w}wSEaA_NAXgBv26=^fiP#Uds0?jtk%h*4g6%W@W%F z^m_i7fbQ}tx=T;#m;mnsM9PmF$fg_FQ6M29&VgH{nCBK!{dMe~E7#ojTvl=qzEzi( z+$)vRGh&ZHGhs3YhOg`P^_jGWsQ;1dWS2WsJvhPNG~!Li-2RZ0a|DOCcDi%cb2{C( z26|lymtmmGopKqn-MI%s_KC67K!{fS%)yyc_AFm;8uVHlW3m%U!W!)h#z(t6T(sLC zAMqM;&d{KZ>rdtQPM4Ch|8++%>xo%9x)@J*Wv|yuC|ofvqV#B1v}n zRNMqP=*DzFoN3I>#%;z^spa5)+acKW;z7U_F#MMVBuR_@mbqvfFIO1RG(%Q#Ya+#G!<>$@oPcj}J-qa5))h?i? z1odZBxm^;am&1PwCWnD3LNF5N83(h3=Jm_Qxk9j@+IdcJl+d5Rbeu0hxiDY$*?XHR zMgNVWomTl1Tb(pz2zF!x!E8sM@tFy`QmrHi@Z)^a>rEtmUW|D6C-_^q9(=|iu%;7I zjF12HbV858WyA&|f9JWVXUXz7bsMyw=yP5aKWH?40eZ$lwB%W$ERPEml56SXHj>Ic zV$QLr{RhV}Q-<&1vK;t)I<-)6k3xLmDy;FU;ds>-6UL@ zm49p_Six76S|tf*thSg=uDoHQ#TjFN05fi?h~Iif%j`$9k}Kl(i!*2|MSocRxf-9N zac@ae$=`x?w~QS?n%WZri|y&`5DNvxjg`U5Ytb4{jV$|5^)Cb#?nk<4W3Twz_Cl zp*y&~uI7$V$W~a)SGU+*ow=NSU^;JETh&)TC+vFgZ+&OBH|LHzbm4V#eUc;|1=LDQ zCvb(?hFvpaZsW<=a@giyk_jYMB0{T%NMno{PYO*}G}Rqmff|k3LyLA4GSm#F+S3=R z#APBlLIFo%XXoHtzQvkV;gpVFHYGF}U;t=9m%qt20H5ngFdH-Qx}+0fEjZjF^a3wa zqjfw+FNeM$Mhi_R!qSe%fYVHcz95GOS1<4DA)ZXk+{(`^RfDm~Bh%vts*%sok#*VR zy0`^CPH!RK8eN*p#3uJwMi0)W6BGNZ@k+PX-Bn2^%UzyD_-O_FZQ@Jd)50j{261=4 zLkr1oMNy6gv%(NZ+mZ7rDAy8mq#o$N`mS1Ypu>q#PUF(%SO;F+JfqbmcXeJ|9_thr z*E!S4xJ?#7%E&Q6XXmb=My$hU;^la_6UI7~<(ZVm?6Iu8L%}n$l)#UwLG=VI%6H^u z_MuoO)%C4R7REXtDgg7pQ<^`D`cHdwy<$|5!yhb}&AQ&0M_~*tB~RLyyPhx}rjCe# zK@k-o)&m1hzRsRJDcF~dT~7$;S}`^#wiX=9&Eumb#NdW8lZyn-KA}P&cMjd zVMivNbt`F(4LHQ`AYjyixEF>8v^-~A4FWqULO_w9NP@4o#s^6A@E@T4fHLX}2|6(v zh6Q@3_F`-~NL#&2)!Su;vujYTBC-OZHJNl`}vi8`w*%w)Y>Thb0LYL6On1W()1>#Cp2B>Q5CdQ7@oKEnM8z?H|ng z^h%OaDYTw=Pb4#(9Ih4<{{Gp5w=?F{QQ$9>k_+d21rU0S7n9`esr4DVj;8fCtHG?N z7!w_gxdS#^ywn{VN^u&!K@0xE>L~+bNI1PAI~PIcb%y}Go}!+_3Svz3G#2v3AlDf) zt@9m>op9(*m=C5NSBW<25`s{;_~v(hgkpTF@1Qw$LU5Q%X59(Fd{C8oTnh0~0gjC8 zhQhnm(N9Z+1cQC}8vc}0%XtIz&dt+0olZZclw#jcgJ-%=`;*S7QlX>>lHu&Y=}~N7 z#G^3!`6#8P6bkU;qbN4|zxe{K(3y}-e}}l6vH%}VJQrFM64luQR}(g*ArnisCI88K zh+jX>Ks-cCx{wd9_NyB#5u%|LbR=^UKRb4uVZTO5qCw?Duq zz<~Fm4OyHYv%63GzWg&A2EF@U!%^Va2G1Xa{PY8vj>4t5;J5n#~Tg8Gllx zR+3;IY2y3j{{r(E#qL0THU2=J%jGN?Fqx7rU@;9^)T(vsEJe5z$*L?pOSyFx(iLBF z9g=4z9wu8q2n`*6CZB``PensKT>ZZ0Q)O3AI=1%wn|>wl1|q2<6<;RR=77@`uxf|{ zII)M+Sl}~@mi#XYQpGwgHaDvxK0*+;;VQ;qwK|MS^5X-alsFnYpv@d~gt2p;Pb2cqU(%G~e?TZ&Jwk6rt>P@n|B;MjJt79j& zlQ^5m9zuXZXcEfS!eeVo+Y2Q$6k14=IEf0ELf`8xw2;0>e?Vy;(A&$>`{>PW?<+Kg zV)@M+Nset2E`2vnbQaA_#xvjY|GxR=oBSA4d_vsogJ@|59K#T{w}37@U>cLKa;U9U zK?(j~CPtG3cT5|Z5Tnk4bI>+)l`;NZWejEC#m|8#c^Lm`9+Ax1>@uRCUw+==1TYpA zxK~kFn~20kZ)I`VOhrhqP~Za;;9M5TAu91}zCfrTvbVT6`iWySU|kloomUe3_7Y0g zYOpw2oPH4hgG!?yamdauz6iyf9Ig#Cd8HPm9eVhKd2-;_X-4Jdzi!^imOVi2fr{!m z_;%zr%#OHgGGWMLU7hM$9fOB!fyg+VNvTVa5D7}38hdOtvCbLVekA!(`= z7v5%cGQwTY@oM({kK{u}m8@Q?Fd8H^%W@3pwODoVSumMJn?65HY4vFTFGifhAEF!> z18ral%M_?mgZE+rM&rGwFoP-wnoqNYv=mFdRVSb5CLgPM%0QrR&SY64Eup9%%P;EY zPR6|4W?KMSe6d)iHx{o1lcl?vaq`D;pEsKD`J%;Z{>P|SqznHsb$D^FCgfgC6nfQ) zdbRO#y=pD=YUAhXRmLBWdwub^58UXF$9(=+!aI|qD&l@$EaCIUi`V>5P_HN*{x-e3 z*sEBnSHp6zhR?uW!5(3ux9ZN%b?6LwmzFRoWI?Y#vnQQ_M8fA!B>do8{$$)Ah$nqB zcIu-wLi=o>mcQIcv7%t@Dfi zX8;CETha3;$n(XGv-y*#ZPEpLsG|$pJ|MTfaZ%eFuh4c+butmCN+zoEXW&Ry`BC5D z2;x9J`xo#AwGJ96eyl={r==>aeH==z_a;g{dzPw5%g@Li^yV+-!)~H-ZsYJm!wNj3 z5Nrm6O;GSaX(+Q<%M41L%ItGG{gO&0`JGOmSq1i&_LvY)X#@>LLnZgOq`S(>FxD!! zyT;5g<{D&EU6{Q9PLS*69wrJiY{WNWF3f-@&oW#9_IE1=JIno~WdEH-FVj|Z?y3d7 zU*I~4T&dIdDWLo`S~(iyAsojF6m4}4a`sVRzK zHJ`ZbUWs8Ouifr5tJP*7lth92TVOBt&zKdn%Ls!p*)Z-Elo{nTlZN+D$6C%87cLcw zg2s%Cb6wdBkvM-Vr4`L0Z&CyDC?l0y%;hHLPG=-oWBvsadT41Lh=;5$QG=qzu374xH&0e+3Q-iKGGd_3W1)NTcMq{agBlA_*%N=lJ7|dVjBb}Rl?yLs% zL2B;BPzS4ijD|^-CVJ)06%ItaM9MotElol6@2VxCUOk{s-4Dh_gS3$)_9q*iY?nB4+N<71>&bQsJofu#OJ%PL8QR@BnhX;~ETz_CEsd;*e)&jSNcpJ!4ls=e@n#HS zi^3^W3g!Zdp`{{3Gt(qgQ+<+ZX(oavR@wm zQrOYF|J6uh9REo(F9n~4HN*bpaUh{MC=&xP_kN!-> zsbOzv@_fD@_V?NRbGVf{fH@1Y|BU$s*77_ej?e`eZ?WbVvMgzNzCa=KXryFPx#uE# z<;7qttDD6%MuA}jqXu9!qa<0~v190(|HN%Ys}Xb+3V@(F6YsKp+(fe``GmaubKvZ} zesC|C+2`p?{V;PcWX&}De3qW;hn%NjE;jU1l&HK-|12YzG_yJu9mIX?Sm8RWF$v7a zdMl@}x_J{zQ$Mk}coRn}c&mu|aAcMRKP9Ijb%hJ78XR~CqcIEogc=wpPC??rQ26wJ zyjIZmC*gB}K})oyzbtM2xVO8vi_|(K!7MUFb*9EB1v3F08X*RVOG)x8$MWAjaWenC z@2Gh-g)sE;@1Hn5IyU`-?=7bkBtfZBM!0KX@1Bx-S684cC-}lC1B*S43lNY{@PW0c zuIvb$?(FG;X=6SSk0cpwv$!0$#+pZGrP z3?wDk$49`*lPAFHV=A4RB1qWX@BLsJ_V5R@F7PgS9O|WL;oNT?qWqW6DveJw=4T2t z1;?)7nou*wr?U)cer6tFA$TOaB9I%RuDIa~(UlRrt7G0VdOXH=AO?oYNktzy!sw1W zY<3r=@mOh{kz-9d`fnTMGcz{Bemo%eBUX%p0xzYZ%_3-?HZgM@`AK29Q*F{boi&ln zxevv$aPufXR+^7F{79r6Hrz6l$HI`?*B3V(pwKOa3= zE0=IfAbL74t69pLmY)%NDuT@tdMbIL7rikW@WYVv1VLhHlUnaKNp8IgLdZ?bX}ZVC^jYYa+g2&%N<{yhUl>}{HG2;1SrZ341`e-FP`{=b1xYEi z%GYVpC>cBsqg_I&EkC%XfH3+E9iQP#rjYrFoCUx zrGEowWe^+1HWyCw{UNpr*QM3IWN+W(28vG5Ij|?K=4ysU%RD@dc9ri*YZ5*#k;x=D zA1RIHa6WCpq$Tzx_6@y5ABWqqT*RZ1#ZUo+&{0B7ekoYdtIRiWc0;oO*#7wY)J z{*qUUn*&g;Y*VuMAt%KzG*>h9L3UAt6l(E|Jm1S@Xn+oqlv}MTwE7tnZW0Ufwtmj1 zacQ=_1Qc*ZKfMVZG36yta)94-9R9~^pa^CyMx#|nfIy(Pxyq=blp2-B9bcYW+5d%= z!Q|c(2Py&7Sw^D;mho8|YAbXK3P%D+>!@iBFB`diT{yp@YjdkZtm#dHE-AV?J=n9i z-w$>Ue`!1dUN0WDPXCVLMWeyvG&)43Xtk@r&!onotsU(*jwhd|3wz$_mnlvZby`8< zkOPa8%*yUG{%>quVYlM5*f3NE zA&drBV;mL*2hy765ZBz^-pqwaOGOSB(nieEW$&_kmc2V%LA4|z8$B1{EN+%(aXxP@ zn8T8z$t+imEPFRwtW0?>$}_gPIAil!^waaDn73#ixuhiH#nmLmqO_i9!W$GD~+a^ZN%54sgI&1}BsPe8)PwoX?e3 z`_pR@(eAL(99`ZVUf!9g)Ps1(&Ve9x0{Q7kXzx5pat1xa@Df+)vN+5ux?J_uUeOro z7->#!>IqYiXc$!NVRgCLV&?T`mNq-ghRTS|Q|t5SS+_ToY)9Ue^>98$;Cvvy+l2l1 zBHw`vX?1|wFxl7JNH;*fyBhKxUtTdf6r#Cs4qTJgGz7SYOs0Vgke0C=_+#3MS)5Cp zOCaC94cVlkZ!`eeq%j)WRKDRoI9*%*KBn^&+6oQ5on6!#wc$N^{kmpP3sA;UI>D@cv&tF^LQ)}icnyUdT1TwY3E&D-T$Buy@@r3M! zRj5_pCpkgXXjp>|t%(bgnFdorFf-bkoLUwF521}bB;>k>nKE9-ikt{FA7?O9`F}LU zJ4f&a+*ny}w^vuVbooc@A%7X4&ymZKx1fCXW8zno9$N$T^Fi4Hu@4NbZ0`E)NR%Fl zDAxYg5#Svo@Ta_OsC#SOTNcgU@1q~Ig`jZ|92Vc$Mhq?KC5(Ey;K z5n*z0GzjG158xDQGfC*lFPG3LZ}uN#MUlP54-~w^B-vOp0E}Ls@15iMY0!)2zz*l& z1F{n9$A+%5)gNuOKT~zm+aA@_mX*NI73Qp@IGDB zQ(uvmdQ_?&DP2)dV}Z_1z46v}20|nh34`xgDz%mY_~UBa964;Kg!ps96 zU~%Jbd2IPV+1wuY$E7*^_|l!*?Xdw4efz#n2bo*({`*>1zPi`v{{%QVgX?E6kWsQ; z_9ZPX=q)|oiJ~sxry4T@<0*EuEC-Aysr3s&FXZ$}#_23Oiqb0?Pi5ivpEtb{8Ev}m z=^MUy?&>(Y-jusKeyY6pn&G~^{l4O>N75KU^wXNO z4Sh46mA^s@lyk1R!m^}-rV9a&@rojc;GKA1s-}k4!TzR}ykd zGObGRMuHBGa>*QMI7yGmTO*K4;gg{SYZue7Mo_N8WJ`5uROwU}){?hMAhiLjUD_{E zOrs2ct4o^$c-?Fhg%plJJK`Hsh zReLfkj;sw?+V`&h1753S)znhKDyX0ck_;}hsCvI?e4xYaO$RM5uM0{rBQy%NT)9W` z_U`PAOm4lp^I28lOvS-`o;V1{X9T;mFh2EoS6Vx;!Z(od4Gj1)gwCGBx1PgvVzbyR zL0KZD)jcbMvmOuCGb-hP`6M+_NMsYbkmMHQC0;0~-LrD-9=Ws0B#6 z6UD4Gp{h%O>9QG5(df-HLW^%MH=YIuK^$z{^Z3q=y{i%$1+>kfLS!OCJ3BHP+CA07 z2Yav8uqu*Lvdpf`)-JClIuLD~?5s;wGF8q!=W1nZ{N~n zbUGQW!C@33>~UARJS{8hW2;jh1!pmcW{xgPt%>=&V$L#u83mCtYvgtMiZWAh`Tox4 ztwRY7PSp+VfL!mHRg-m4ZU?YnF?uKH=5jba?IlU9CWlk!j8&X_ZP4n?0n-_7Og&DH zVa3BCP?BRvj>x@?xawS?Acsnx%@&JF#nT^(367!MoC__E&QW72N2TmkCUwZe$l}JibaXzN{RS>6W#IX?xE|)QkA?I>OXjVhqt#i#47=< zQK>7F%j(zNyt0b0WR|TCZ@YV~_Xkp9BvaA5BxP}@*QZ;@TO8n_72mq1%h#Q~?U8lE zkN?B%o10WRfwx-(3#Zg^y1ql-9c3L7o7{B!c=P&9nbzbK4n4W8rgmUcVGpdDxB=3R z6KgN7GvixM)3k}t;oYY(6UFC1(`m~HGcKRGQQYn!FNRVU^xbq;{!%_lW5JeQd2eEY z6AENN+(1w&I$ulcq%y0!k^?mO*O~iNtcubL_#br!h9+JU>=ujeZ|9NNP@rvx-D=S$ zcq?eZhU}dUa7KSgxuG79VjbA_!unYdH)DFthnv%C2U{kukgH=)X=$B@bvNGv8I z8>E=0KSunrdC&1}8y?))@T!*B`sGWVW!)>* z;}gp#CRcddnv;(DLo?qF_M}_fuI5Zv^~APqkOM}cT*yJq6UH*wCYb~5wTJ^yy6r~D z0cXAGyvL)6AqJ>Ms>X0qF<&Yacks#d?@8yOLQx=*0O`MaR3XXqub5a&|Kwtk5MP+y zZ&9Qwk<-@>U-$Sf|3G`Ks3vKRQsrwIj!xV$QiEGsdsl|9x_87I$sXT(@S#cH2~XQZ zx^-QX&D^jy)B9!c((v~mykn|K&55Gjifk`Vn+23LObKTUzZ>lYDjHLbiwrulgy%gfHB%`l~xr4cqQnUA#h$eG;<5 zgQOqowZQ_%`f*E|rDs`ODUnyuW(Kj(y$1P%3K0emN(BY z22=I!%fLl>-Gz9NR%;bAqY4c}(@-G+-2&_=NP^@N==YU671fUNhe72n3Km|4|3Lod?44{%{val!}T>u^DpCP>vH&)JrIpiYP_)Q8*s+p+Hz$yqes{vB?NG&(9 zp?O13ot9!$l-igY*_&E(bWOFnec#9hJOWYAV#t`980akT8*%yEO5P?~+{W@Uv#%$c ziEr9ggo^;%4AJ7CTIMUd*bD-wP>bo|_ z4GssZ)7uT@R%fNt)3TyIw&oI0^rY9s{Yzs`4+M&=)ySI!jjp`hdI=~hB7@tJEO0>$ z_%B7Ixb6~CG!>B|_z)sRwR^4C8bq8=BK!w+Vf|%+qFfGnw@qiVg_2M)*8kl&k@y$A zaJN!)qckz9nU@%7a-r;i-~y?4)i~W@Cwn(%%s&M3P2isv;Y5c?@32T(qF14jvmq!y zxpfXFZi4#Yei}k-5Rbveh0z&(tdSsCS!WE3Qd3bReR+53@ zsjQ~~Du;7fx!$Mkj`{Z5<%|vweMQiCf9Ii-*{1CBD2Kv*m5f3iSh96#+pfW&ckud^ z&8sVA(AbQuG`*0wmvx1A9^aWfwdLD88w9hY)$$fzu<}Z?!|cp#>26t{a%vWWMhcHk z+y-f3JJbu$K+G7ymSR6Go>npYT-wCqedFDrYJUn$r9fK>L{gwU1yVV@Ev?tsY?^DM zU|SUQL_tFoR7C-lR&8f@VZa3k9i{$Y3kl)R!Dg_q2554#AE(vuLDMiB4pWr{0NVa3 z09rNnN>$a^*ze>V9x@h{@9I#J1gMh34YOHTU92oY()P~*N$Jl6NuPlVclY4+qs^T~HlScsOl7RE9-Y-kSX%p5)^5FPP34oO7zCx=9T1eJ)}_+xTI}H1 ziU$ua1f{INXjxGw!%|+?d+4zuEWLHSVST0?!P1+a+*%VJ+yn=01LVmQkW%dnQK{l8a$q`vY> z8ITg#5Tur;WIP(jmVjagkr+yM#Dm@#%6o)DFQU2Pn`r4xr1U0IdLt)g<}W2@?n2QT zi0HaNxKt)wN~ARZjeJl;I0wtJ8;TmWkqN^_ zGo?;vN}bLWJDowy!>dtF5OuVfs(}!_V*x}jW`BC6kXC43#$Nft&CBnN_yMab{((Rq zHfOGpfqci`3*<{1FFuXrpK98DeEY_K*wNtY+0oH73X%K<8Oc|plnYCC_IO{nCx)Zh zoe;}6ZOR5b9h=)z>zkcNj~qG-dY2y_2?mC)=__LS!O>Xzfz=Sp??~0JTi)f8vHbc# zd)SQ7d{fdHxnbtvU{7na+u0(c`R!0P4MFLBTt@Tl1y9>TG~YH|fy6hI1!+|v4e@{? zFdx#-6W*6#J(Ar4xr!$1N+S#aD=kF!^Cb6W!To~mO5~>??{R&2@Vdt!y3d64N{A1Y z>Og9+cH;Kc)p+#Y@$5Zoymi|i*)#Zs(X^L8;mM4rT1OjgW@zG8cDFTymzIC8fbaEs zw;lpWB`2`mH-2Z-8E)Ks$I!}eT-^!Hs@uNZ3Gsb6)W0#(G}%_c7#&EfZ-%`2#$tpY zml1wkM)=cCXx|7t_Pa}LCMEFw(;p?D0-TL_v{u{|XkMF$dST@?Q>+xOa`lN5k zWJCL=Hjj77WFxxb(oMg(tGD&Yi+9}gi#vPMM_#&j-?zqn4cqP?g;#&$w)@dR0J9&1 zH%Kq$#v10lzT#=O?DcIuZQF!anhv)-O+0sZc5-oK*6$WfRerUl{URTKBDe z6ucquoxDX*(GW8l9A=aD<@TK;y>3rqwPf{J6ueEZ)$>};VK-FwY)nQr?!9%@!(Nmj z2+~`HjN1>FaC_oYxILXVmVo<55VXIIF!z-4qXGqfHeyeki;(>z1lQk|;rbM1{HR2p z3$Z%*FJtwJhU^0q@pWCbEL8g%h6!ZGnj6+MIb7{qJNF>Nj#O)v*~qGtM+{^_^{YE- zP`YU}pj;dd=&}QYen+&orfED=4gyQB9;)F?7L=q{?=nLeW(j3{fsUY=)|vH!Sw~sI z9e!8bFIhYmN@o^X1E*yjHho#!WJ}$Owi-1-1u|MimX#gLFhczpp_Pje8dhc9O56_rY5Az&{pi+wg) zci=KyKPs@_Qs|vVl+`g`4b4Le*|p$STJNlKc!Cb?w;(&w`3Ld**T6Q===S`s6ngzT zinB|CO)}~5K}M%8Bq=+f;LP~I%s*pt%*7uG2_m5kwyU&*@%S8OZzy2)L=JFiO?At= zkSMA*O7rOH3UFPnz-3)6?`GvHl(c}3F3J1LAoa>myLS;!^2L08Qpes0R)$x$)o5u_ zmKSPoa-cplo_5}Gv&C2Dt1^gRIR8Fcis{C%P$zm5q@7fRrD_0#I4Kci4&F~hH zGwBq(k#{BfYsNHHb|i?Z{J#MLfU5pokS2D_{yVV+N~1cc7qbOFFI0#6 zlAl+f7BNliyy!?d{yVxJJ&l}8>%=ZPOFt_cWoxHZ0C|P+z@Z;_O8Bh$ou%j zW_xpE#NdbzMk34VB(S{q$ojZDHL~mG?j>LO;;!Maf?+wcgfm!r1|6tMh+S07-DN>c{y6kK_1>7 zIG0=DTp~o>gY7SPc*8g@dwB7A9^S;$xB)8j3~q!NGLdPHhJ7?u^{RPl$*aZ5Ep5VH z?B>;%N}H=*&6=mEC9lrS?V_#NSN8Q5!-eKS#NzyJjcxhbX9N$>i9X9F1{f(2WT;}*lCbOaz!nT#oZEK!Qc z3-?^rU4a`DnXZa8hlai3@w>)5cMdmI@Q*o?19g$Uh5mcbM`*S$zO(yQVr2z5bH1be}!`np2s9cgtl+`Y}qD1|6vhtq1b?*$Vv}|2Z-r1 zR3fd;!ZI}f;T*!67bY<0{jI;E_qUo-G4#w+6oU>XXQnm|2B_H4B27Q{aF3q6)B16K`Z0>KH*1DD8~mlf6*c&CX(*;BOUyb3qgTONFAD zCn25+LOewiFRKteH$eH7{q%Y+D0%4~ku`ttB3bqMG=C6k{;L0i<}di^vLbtvN_3kf zr>MxM7!Kv;QqkagT5wc5(MgRrX$|=to_`dq1p&AFcJ zTU~h1%&T%l`5%dz`8vN5>ij??5J5UWQPBAfvd#~-yzTK&!Hv>~|C!DYHWYNer{(Re zT!oT8{M(l7ewY8tP4V4`V#XMp*C#C-{z996k*ovN;0s;X=0>V4nW>AidQ zr6)6!%uKe)zVEA{O-KkxSP}vOR1mldsCfOU&wBwOBxZwQ2N^`ccts+h_r4$UR78K~ zDvNS2lFmElRP{{HWC7mK_ud4$s!pBiK6TD_&i6m(`_A`AVfQpp=?6Pk^tP{>7WPit zJm||OLP+h~NyMgzsVQqKo&E9EcdW^Qg^O=ok>$;111CAng4L)qLv>b~*P5N4b7(Y_ z)elCY+J}HZa+wUZ|0K!O$fZag)~S6LX?+}n@kk14ed;(`je}aBs0akt`nybqj{X&D z{I}{#;T&x*XIg&;wLL9I|B5yJTNB#g6Re+2j!ZJ%*VC(SUpsv1{MLXV#`?D2u`V*u z66LfsfWo2E`O7n!=4_wgr;;}PId}f*2uQ4zi!Zx%r4qJk{K$wV~+JV%@0R9XCp1Y8upUj$=2M8 zYR&Cdt-0l6&hu({r=sPZ7uE6&*4$qBGIZ@S+6dKrFeYljznpmowyZ|t$*&qEW?a!f z$Z|SW(KE8kq^Np|mCXP))5~sMTkv(S z>TVt>yHZPTTDknX`LS)E?OxsEdtv$NRVyr+naztA`(3SzJIb^2_H8>Z*#<0kd^Ip_ zV_$CGj4qeGt#?*a$A-aVw0~uJ@r^lmXa5}VcF*kD6>l+{vPIX|ETOhS&S}dQI)XFj zji|b+Tb1|4%IXgkbJJ0t=t9!Y$EN)*7}T}C)wJ3Emab9Q-#pc(N_ueG@30BC^{r~t zW}W@5x&r^?##_x)<88N?7`Wul^@*X5sAwRyEOH2(nV+|7Fa*q{p5f%0eX|4URky7h zzI0(rkiXvr>#KE6-XdpbwoczcwoSX``rV5Q23|BeJZ6UgrNr32?z#a4VlxXbn?AH3 z_k$e=@*AM$j&#n6r-zE}P~E4G4j@=a3@fr@A+u0Sd}Q`0!8k_&b5vLmhKD2cVpV^S zy{Y)O2Uj!tQ)7Tb zqgCy~NsBz^GtSDNz^M;eXItw|eIG9UOpD}(v!?d7gINZ?Gfip@Qv2fV-K)Q}Dr22- z;Ik*mEHcmzo7}QNCwi@Bugzuz%*rocxjvPe-5w0a0(!|~;cXHxM#9eW%Bu#tuesyU z&Q}d4l?RX718b?lN?ST-wP$&Qm6pX;+V{|^%>paDRK@a>T4`CuO8XvO_E~_ewCuRx z{-alZ_O$JMaA)tOOWGtoO&NK%G_rkq-Jx$!-AcP-+@WvXaM7r){K(}cJ9<}qVZxyg zY@7GREgfdBpEsJl){x6L)t^t$1uIKikUw8A5kTR6j5d>C3WQzRg}Y~TeP&)8OVWjf zia+0n8Vzm6_;2$B{xjC`-`|f}v4s{^w2JHIGm9<=0gowGTDHPg8*-ly1vYSfT+pje zV>Z30H)Ct9{>)qBUVSgp=0L_9%KP{iWf^((tpN9d{c&II4WuVEXwQ29xkSr8w@V`b zsy7-G$G%?%$XCzIRHbYi%|je`MSbmOY#av`#@BwDDuzt=dkECcvriVS3qV2q3p<;= z_11ub7O5zxj8G7q1Er5S=dp{VkpwC-Cex{;68E%ULD z)^~q%*}m^9K3bjY`oa@FTCgT$A_Z7n6OJjQ%#k`Na~hFXM3B8S4I1c{5qG&6@zu;VEnK_Dg=e-iYo^AM3$M$ zkObyY5ilJ(HjzGqt@;E*FzF_jf;Q=5!3Gkzbxer&B8Te2h#DjZ2vqq*LS=YNs8ya&d4m#al_ykUjRe{V%6skT zAv=pcSxL1$!y~puORDX;A^jQj6_^so0H64_1SGT+(sL{{KOv3+T^*jF)ex1Y=45It zReT4ZKCc>(C-_oU)t5?oPE_GnXwi2}AeHkimi)tA<*3*1ViZ?75dk!f2B4iuf4?2M z%Bh}m7^S_4+r*e?1}QH{co0KPWwh?HQUO-f8?4x-Z}nr|5;8pw=ad#{dyL#iuu2uO zh>wOv?C+l#<{{I(iuCHHc+5QWe;@QsamoAplFzi6+~tu{*SeuRhgMohT5s=Ic1ec< z`fl2Bk}O^v^u>dElh-0xMcxt)*)iz5`u4-SFzCbUGN-98Bh&|GD0*c%tY|l?=;a`< zhgF$J!%iO8b{^Gs9x9v)gJ9tty9{Pm5Yti=W{DzNpK+w97wc@t_(>Of(xr9>2qV)c zFtH4BAUcjl7hV;6UhPRdYHk@e9^X-cmmL*MNYVi$b+5LB$0d~bDZnVEqrr?qr^bCR zFgArJ7@pRLEEJ%&1F+Pmg{l(_Q2$?i3Q4VAZeO&c(&$qNb}hSobBo2}QC$s~w(L(GY7@ z#Q+^2fvsl#x^e6&ULF8cbIO zZ)-HU?8RZU%373C`FB$Vsdy3}Cwl&rzke*)rd~m=k!FP?_RJqS(MY2Rb>Qc;>}~P} z@?P=FlBMPg8e8)YwgY zq>QPt77%MjI3r5oG4)|(NNrKa%@*X*#KLaX@))esIBcXiOpT*H80Y5}b`fD~k$Ra_&Z&6Xh3FKH+k>S}R$W=;lG9XEV1m?m^ zMnRMT{b;cAqbli#D$!WihMuePpe;X?|4g1r#~T~(nqT|` z9&a;mB_CUO@LRWR?O;Wd$>+2>d0H?DXg`ZD*RlE51?Tv?l{?n=N7AUR2FWq+=HKD)szB!YAw#gjsMmd`8Ax!sHXJ8lmj@0S5uZyd&xbT*I|mJrmQ80 z!3OQ6FvX;%EQKj6af*ym@T@RF{)(K1{rbjZ=pVx|^pBz#`p*e_LeJ6HoD)O;xvH>- z4n0?ex9StA$hi}#NsGPYuVVABUA*Y(k!XA#I?j!K#hLAIDhwoLSFS%*=uerRSb68R zw(^#{)-1nkTYK}CyEZM@(dUT`ZtaCfcXV(oj;Vb%2QDXv>U+2!B?92kalF(!3glz% zE9tA%J>1H2ueyg@K3>JQ#`kd37xU-4TyJur(Ac`WcoTzckh;ex7UEd5!>Ae68EA-i z3_5c6h>m2?hFK#+?*JMSplDjKgfq`JlUrb&HY;oCkHQ*#|3o^F4iLpra$1Ek*j|tC z;Y!a@S1H?V>TA2WZjEu<>s5H0Dm_<)8I&248r5pDr``p(*I3Cd9A8Va$UQ`FWWHYL z3Nn?BNJrYQ*BG>v)|BiRjQ6bR_8BuvhPHt@oN&9xtKqDYXq6@Q?%bS8vpw5swwkqq zO|-g94h!#Zos$j?EZW+?33EO5LaA8>-*8UdJ1-4>UNKG|3QunhPoExcrFh3Ec(39l z_~B&5>KsfS{+ajJUefEObI%u5#S}4rRH8XJv*z#kJelnPU|o9C1%aH=Ld6x zdIl{kGP;2UOFO&5J^28fWReukHFXR|yH|C2=FE-{l-!oqrJa6Gl60(KGg;jxv((w* z$@)bU){HlE+G!n`X4xj&Jw}t!z}d{ayEG#;xKSi2Prd?sVB~C$JVEwPvBZN1>npL> zBTKwJ^Q?;Xu4Jyhm?hpWW}dA!WSs&FKiQ5Msz{#Tj92JI+zGU`hfwrLlGbT~2exe3 z;f_1FD_Fkv7Wv^vHmCdR_4w&u*TDA^92PSJ9?|NwIMiD$?g!tf;gyd8C^ZYo9*786 z*7wWoulwGCmn#Cn2cJT_s$WZg&i?F#h)kghklTY#q21K!=GUs}&(Zd08$}|O`*b^E zR4xp1VS2%3(?h-GD5ud-h+9VINc3c5y&1bXHKQ0UIZc9W1(!i&tl^Edf0r}%>01Ya z`O5mfke-(q22Prx#@7iZUI;eF{W%}6H(9}Kn_17>c#r=$349%kkk(%T>);;Pb23Cl zS$97YYZ(RKKhD_gOm-9;uGk2MD2XNVFC|bqluXjWXQVxyPdC)Ys-06T?#a@jD$FPa zpQ%cF=+37vP#^VSLi6z!ck-U_aCLCu_KJ)1mgZhM)0R#8IXxQl^p0@O(>k|k2ma+9 z)7Nx5Z|D7`XnM9+4!1;N<)DyhSu-Qvvf=Y1xlQX9^+sqt$5}0s#YpS*x=8Pmyx9}3 zEb9uEL$cjGu)Nu3jTE6|6ks1%3njy+FjArD-0v#hbl`o6Y%G-NHRtEptEa9nR}7TX z^ja0)x|k=Ois8t$6s6bHeodi}u5Q0TflTd7Mvl@lTJUFx<7iUDTTCYQ%vbdWEgbZX zdUChV4TpU_ZM0!;x&-+(*mgdmiRdKuDcUj$9wJ1-4<4#8PS)cT;Xi%)ac~5x&=zo{ z;?}1M6cG|bF?jrAey z$g!??q#P87X3rV|mwXn{NQBz{W7e`O>&Q=Xz?H0iz7|2jLB$>jc0MHw4lF7kQN83 z--Ff?NDZy&-n;*X{{92|_w|qvTfrGGEZtIAx|O(erFz8Fy*Oej>~JVz>UGBiC!-V( zg-pfB^e9SE$`_f4nObUTMgX8OX6i~)s2rpWf|a-0S*oM0t%D>vyDV9GEeMvw@(=oN z*uS?MmW)J89asJLf$KlK0-nIK(PYoQ{Rd!6ww`^5+(h0zft)KQGvQ@K2XGt~_W59M zKTKZ_g&B5mD9F#AdR9fwhpWOqI&`=SZ+_B!FyyA-^h*YZwodZ}`Y#!pdr8H4s~BjG zgh~NXhVKzC`ix-4?7hnhnI+eb3|+Il)V$)Vq1Gks9(U`4*8UY`i??F|tkL}055W!O zmmzX)tt02d6$V4+kKFs{{gZHWg+ajiqbj~}(a1;RsQCuHT)B-r4G=#cuEI=I>2MY18OP6&PlKPGMy9rb6Hqu`mc21j z@EV&l!E($AXLdA2^RaSo2-d7iN({BE0Uq91b~_+`mIaGRFkBTXWttqZf+!;hD4ETo z*~GaD{fR(N-|X}V#?NUeJ0g77dBi`}@$<2Yad38cusuwD18^om*KTZk!;Nhl8*FUr zjW@P!+qP|QZ0C)!v27+mdsfnfN;})`Sbw0V>A2bNKhq**jJ$K8?qoF6>$1$xgyZuXMq{2@5P5BM(>t{W%+n zslBi%`;ZeA?RY>)qPyDmeH7gfg7TWM(zxqm_Gzxi6^`7GxO0-8Ap3`5SRHR=0`;rP zv?}IQMsOD7GB}`eIc4WCCQ7WQPwB^^T-q+se>!7BKX=iVbg!8Gkou5LM%~0A+{1>!h)q%g#IWoyc zP3z*=9ds^w4ysn#D>$DW%!NB=0BkHAyl@>euvv4-_ZU(heF#|%#!qfKtsvW@h-fP! z*GTCh{BXpqLxO@zqKQ<)7A62AL6|c?VSceGsTLkd&@MPn^r7Gvv} zhyj-+>x=Slk^qHQNGeqHH|1VhngXBZepDMFcRW+(39b z5PDYdt7PomOhIQTu%O-+6GRQ|RCH5$X3OO?c6w2aS?(z-jQ^?|eyfpwRT_qfU1$IlO-_O{bYAG~9euu`nuRd|uqIp%aF`Fl?i)mY4 zhgL%-T3M(u;w~I~BS4rC8jDGv%((@y6vFswPuMNdy(r76_Zub6(pb~FledRI#_>e| zKmz3)NaAZJ+7RLeM2N|rkl9jXbH%F4N)D8YW*I6gPwx9PL~;yZbY>{|xf|m~bPea1 z>;t551rJn|1Foq-kL=5-QPWEO?%EcUP-JfQFp#B)dsoxVz5>_eQemYD0>{j9`|w^( zoJV@9B(UgECzp&!Q2hedN-5^TqgX-xcqG)~4ySAy7l)KQPC=%$b-uU73-3Jwg(Xf< z4f2I&ET{==P4f||p{f@+%Zd5GPtA#L){OkIZhQjFP zMD9^o!`cVY3=!*tSA@M{R-jD{_ItdIEu9UTAI2?tYQyzAI?Y>sFNUwhKep*?uoO?0 z?<3n2+EuzupN2I*ij}^qqC0ELPdB91*ER<_YtkHtnkpaVwzOATy4^b+Uo5UHfbT;~ zL&l!1Et!(0&y$#*t+jd^*<((Odfnbvm>tK54}DZsY3GvVguPgh1w5K<&(K2hX#!K| zf9(Ez2-514N35SJW1(_dQE4z>!ZSZmRZ3)fRBbKGw&!?cdFY#Edl97Hemray${k1* zZ)6|u1seWMLoRI8k_P?cSD8GD-1oY79}Y(w#T&lM;ZCB(gfoa^OupTQbZTrt^Y-y` z*b1L6>x73PIgD&xju?8!y9V>Z%%KzCa$$c5k;iH{U=6%JuhFRyeO;7cC*X6(uQzjU~DzcqMe6jkgKmVoti!tkmm z!96-$-rpKkuIZx9g^5ZRAIpp(AHMHgtnREUR`WkUQBGSF#=8Cc-g=v)IA#qOrKhv9<3)w9-0#!tM(7C;zs#)qCF z20lv8XSw~4wNa-uue*6`*JX8LnPx7?O<4L%@ z>O>`Yai1Fw8m#!C2-BI819LRsUg(Iip~H+Oo==5gZ7so*?F0rJFH2rwV@$*qt%yVM zrPx5Z7NfEVEjQa@-*H;*1^(N3_D_uQsI+Veu0mKMzXN^>X3ar$w8wj^d+)#jgZ zo>f(%k#~JosnIMJeMf&F!Om#Y!BmX{#UzUdt(no(=E-3Ly_#KU=sM{_eT#{%05;r( zdGae+wj4%UOakknvx8C-Ut>Gl{P_|*%Yeqz2j7&(xT4v@k>68BX#5oB(jxH&hhV?+ zQIPWs>Fbk%eiJRi*7q5~tU}WW2{2J=h#9J9uBvZ_qsw9KM{2LMdMKVrSdPgfXaArn zOT%*Hdo6}z&UHxehrJ`J?Cqv@KDuhsFDl2;V+jH47Do&oUdODgv1^Zr-E%Cky_8fo z&DW>8Nx@+gLjT)F%*S{TX6+l`4gtk;?DZ-Cdm1LzW!oHY$!c*L4Ll4MRUa~{e!<9i ztg2#8Xf*DXhH9g+*B;OxLyrDJJ zx^d$ox7e*6dpwA-$-r0P^WqcpktU&AU(B_8`Fagc~>w}aF#^xP`PS#$S} zeLMRS(CW@F?q(Z1w$bakOdS~_(o4^Kaz?`#HZiuFXR?qcr@z}eQJis2cwdrqT`k*qrIqdB2%zt)1FB zw9sGcF%-K>K&myJIIOj$=`|EIPo2Q)X=*ST0K89T_W4&=$ocQF4+cs(MWJxQr4Tg- z6U&5*FQo((m*I#Q3GVf}8{1-wfCGt(}+?4vc{*y_3~ z?^M$`{)gx5J&vh{M;TaK4tna$+0kk^oa-ymMzsvZqSr+Z9kNX5|rS1=^ zZ~SBnmw!Z86`5_A{xe*dt6iFKI4pf}w-(?Zn2TEd;e2AA0bDl}fYb)?BiJv|u{$8$ zacMwRQbB1m^@cGlBp8yyIz6)KNqXIH;72#jL|{b?_qu1a|4j<}6TD*w>AQ98)X`7T z@#w0>ymWRSFoB>S{V6n&zR8i92s~b81!4aFxjIw)Lqor!@J3;r|p4DqJYwC$;CYgpi!MX2O4Hj}L_#SS-ubTv}1eZJ{>~ZF{ zt>E#pCo?O$gXlumD--8W>);W5qy)&DCZ&&qyMwGj9Ra<&y{H3X_X75$_(2LIHLT#2 z14K~~hEBIlB0u3e5-e`;XJinkg3SP& zWL9~F(M9FL#ooK4VW7UV*0%a@vw!5#6s38^++$%RYE)q{$4YeM5-Yn@8eUDbHa_@t z0dU2b98q!h5b;jjC8Jx^&2ZCWtg2vVFl1N(VbVNLo83v(IP|h%6fZ5H1+`^kCav&{ zkp@5VRypx4&%35+n=^E=Sl7d{c)n&z-R(AGvXKxi zADiDgcZyB$?ypVQtk?sG1cuSRS@8h4)Z%wP*mydCz!h-&BHTeUWPGr*7C^d zG^B+fPVp*9XT#ZR%6QzlDVM@%yWbBFB(8qth7xvdX5pP2<5O$wZ){9YyF*5r4rJ6? zTME7es=5x$9Px6_$ds7bTAZu}#AMcs4?Jhsp~E(J_iHT#lJ$c>@c#O}!(5G30RrFe zud^q!@8nHU5IhQbSTDq{IKysE5wK=yfoG1(zWHx7>H!@7p$ixQ#xQa_#fTS*zS^%}vBeLCG{Or8rmfG#;-vV*Z6^VCrmk=q4_h-7}X zNOjXO-qAD3;r;qmuj^ei+@Ed)NXRP7PYSJxT7-`Toq6P6=^dE`khO^QLrVMFP*AJN zKfuctN55AT91d`<;)|td?EevY~MiPqoaKWyx$zZ`0`U&ED6EJ3n zi#03>L$=F_}ypU_8ZWrHTLP5br=5a8hzH+ zmpOvB&Qvvs?#Ohj7mG6V3mr(xi+x!+yyF2$7^rC^%gO4BvH@QwzkU0V z=zserFhuj)?kx}E9%&=o*bmnVrl$bHxC~;V;|hbUwz$UzJLXl?g97XVMa~Z``@v z58OEq+wT_K-A`ZLoFf{71P%9(i;r({J#xkS8507Ca_JC0Ar~P93+S{{O5S@>Y&GY2 z>+_48W&M}(_Yp-cFX3H5d*Zip6K zl>H5j^YHFq)AQi7>^g79y<~b3D|vInZ7N@Gs|!4iDak)h(toLYS_S)}isZnqvftTh z18njt%c9~wMThf86ainWu8Z;*HhOD3OaG2oOoolsojW-fY}tk_9>is}`YPTZw3m%x zUyKl^IG2oH(csTgmd`aZLagcyz3tKTm5nU*^>`i3d>6~SEq@s#sHOh?5^51vuQzXE zsLtzca@G~zY-l>aXD}PsJ&$;BV#kU%@yfOElRi?ChofG(p2>158U6g(-X%WqnMi)y zs$P2I?J@BFqdF}&$7El1kVO!M2pwDcGb^i%W@cQvohbolz-MERdE=2$D!)gcF?j!m zx7QX`eX@E#CH+4AwV+^LNqC1+r*uDRdtAMqGFJ4Z@u3nPzV>moyPxEdnf6t~dZhbl z(#O=!ZvAj$cqir1|3R9=77w?}Xkd7{Ww5`wxYf*Rv$?p@cVtOG?<@z0(5A=yn-rqF z$y;_h;Y=$6FEk~2b?hfmD&|EP^ezxhM^`;-QbT0` z-(Zf!S-5?*myT-Nx4*o7pUF0^M$iJol?vf$PdXP>+~?ND@v(&T-*`<@5l)bW>D;Nv z-D8K?yOO8Tx{$T0US(tVmdSX=&l||-MU#?(!Jq(V6ZYjhTUP<6I!3~=pQN;_05%mT zUJE6MJ6Ub@YEuCR38M>zk%;pM=k^!p380w8GS4O6N|>$TTYA`s3Cz9l9Tw;^)fs+j=k)!Xk=j{5S49_>nfDLoHr;wQ$}+E|_Ug=$9jvXMURbC|cR z<-!!v;>h4}tQQBhnx&U$Von2@dF`|t2Rk!0*!&xb^hzbX~i4R8~c$De9(4$_ zD|X2{M57fm)fH2i6ts!d=uH6!=T)SH&nOOk74Wn}3jW#%js(2THD z({3GQQ>xkZN=}TtGML8M9aJf0HQS`Q9mms?iy3NlA9YJi=(T$>a93{H9!B8Q=*?(C zx%X9DnocLK{@_~ET$TgjGN6|Z7L&~^ole~@b`P2%vol+?GVgEdp&TccR)&fVr%w<5 zEcA61V?MM8{=!>+Iwwmsxmc8rx(RQ~mt_3?TkcO;4eLm0F&W!Z5hjD&Zzlv0y?RSg5;m10Wj?lTTo1%tb{(+uh^AnG%o>ztx{J)W^H8}l((#5Hh!Ghh5zfG+BPgil_{hHZ*d707rS?L54HqorRyJe~8*@B~=r$3%B z1;keRRP7EZ#~U=jsA!dsQhyKLmu3aZ>0m&2$6cwl3c?WKwizE*^3^cdsq)pkXTd3jN~N^U+n)2~-5vrc|y zV@C4?s+Fo!nlaAkHRsc18{6vZ0yX11Dr>5{j3kwWoqhT$HryQ4-LKZ(Bc{-+OSFeG z(AhGc2k+gg^|Uvds#&NhJJ*z&Tj*)a4c1Siq7ssw2(;Q0y<+UZQ~OP_FwmlBSff4_?v&u_*PPHM5ABWAq#O$fG~)aLh!! zFaxwGQstM|i2$3Rmt3Ub?ay&mHhOHN^H(+N{?gGJXti0%-eoamVZPaerSo))9;p_}QFS%`wsvm>wnZfBauQa}cyy(8?VqE_ z$=0am!PBPSBd~3H)HW+h@oiL|$;C}ot_1G}4zglB zb=!cAMrD)ZV#jAy`@|sh?xpV{3SHLsX>+T8?Oqs?o~N~!_r3XXk?j@y2u{~}3DG;B zNkh!Z-Wr_SW>@(bNW4MmBagP*m2lns9{Y>LNmpz-NR)kQe{mwb1N=7R6>(~u>80>7 zKkhh1S6D7M@^emlXci@Dn#q0WxTC6zL{i34p#yugvcB8kHa209Tt{5T{w`v$GaX<5 z=T=@q?=K`_Js3$tv)u_|Ng6mQ)gZpOdm#sxTcd;`r;xG0hIg@e73;JWJ6^1xY)+-pvrFgMi{rBTtiBzj(GTslC+r_NCU0cVV6wc23@OOe#DdDeT zr7-om*WKL&YjWQt9E7!XTB%6<&{Np`=qy zNyJm)^s4r@f1|(XIpOMJb}4k-t0a=(5gYw;v_~ZU*-T48qxgb1It}Bll;Ya?@51KP zMZeH1{|#FBWr9LAs4D4Tw&V+#xNc#58ZFh= zc23p^c770MQ}(g$+q-{Whf{V#dp&&0?oEswQL>@S%XoQy>$YYLnj%)-$NF|*|A|6B z<9%))FVU98-SakZ?4_JuIre;d(+bk)YX;c#?$@h z+(!=r0)&ra-e+$mkhkOlSaULV(tCe?uM0$$A_qrfG zz=q!9F&1QldLi5&`X$J9zm~6$?{PNE8!Eu(bc02o2u+NZxU+P?)S*ivm}fJs4KQ)QAX_kAIXDUi+c{5 z+qbG)$rBZI0u~Iv9%$REV0mi}C{%n?CU|VpJ#uxH91C2u81SBr`aZ>N8XqGaxI?r{ zvJ)rj?QydU%|K#z#I8KO-GN(;)>v)gfU5??8zN@5M|hO7 zoZmrnmoAxnTIoNTs)0AzcM07^=l|9H-$&mWHwVM7s`v@CBMI*K&X~$=ns#<7>wWyJ z=sO3S>Cjr@=~$y^DtB&V&(_^AaB^(*5G>ZCpfDZe7(_?XKX>&R1X~x!{QghE_fZFH z3b?(fyqw{Lx;RrY%&%>mRb&nXIPRRo1ZJ!}inyj3Vb2bQGd5Y3lf+QYHQN=(GfvQ! zmF-F#J(#3!v+9FgYLmZ4>g)?0wh+srx2dcNLVf<`@q2S!0ywy8%VtpaemWqQA?F1X zHnYIRb4cyAnMjLqOmJygPwE9U>?xGFCFKA~;c6KE+xIxeWv8%0{X48=mqIsp$BH;oR|+=W>)7+S`ej$=zY zuW#TMb+0DCiI1@m6XaR4S)fq7ZFIiw`wNhm8|}nx%S#XrSiPiX^8LE-Xw)NYR}kq%;zJPXs$;pD(Xr*wyN{YsUuK!^NWST{>=)oR$%n_w=F{X#z!w2w=ClXg=2a6fWw9^qp{opv3e$y^9nn+7 zCK;PiO`^j@I@ZOl3D{N+mnDjN<2bWEgsZgucTJ)8&SNjaxI+xf)edqkjZwP*EU7g_ zCjKNiq8)sS3xOK9>;>Qf?S4z zJr;vsK$=Ax#5%MT9Ev&J4wy$D^{^*MmuR(UUUJy`D~r6gQ?|$}i*C6hv8e?S8@wlp zkFd<~*MMUUU&-Dod24onFqwp>%rIf3f!GYBRx~jA>iXsEY`Ax|aM*rG=x~7cI;f-VESAvRV z_L#bM3~vhQbOEA}Oc3lPXESa4na#5?YU69>||l+*1o_mkFZq^;eq1 zn*JqVuN<@&MO`E;cPFi$C%Y0Ibcl2?;HE5V-WZM5^4&hfumxu7x@xCqN0e%`&ocBd z<$h3BiN(bwugbM^@9PLsja10~U2 z4jLI?j~xlVv`G4wI%87|-;mkwv$HDaqQ0#GXf04b#-+nx&(Hp?IiMuh&RgheAhk(& zNKEMRbzuC-fz~o0rG?C-dwwg&PN*|+twU06(B8ExfJHu?ha{yV@jBnEjJUN!PCMY5 z@jDkOKFt_^MBtEzrn8T$vomRQWq=LG_{wP-B{g$)0IS9Ixi%rTcv5hbPQj&XEsTyq zH(Ivt_MoOi*D6-QCH?x1)g-q~M=qRiV#Fc1Ne#SVGj+?cT3tiYcGiRFP6-=NH=;oL z%1f{Yn(9p(Z+TLyvkhyZ1<-nKAVJ4PK@$0xXToYB%e59nDt0_a+lpPEyw?Ai&vAcd z=9$GNj{J{TackH?2cEKlr@Fb&mvXq^h{I8P6@Ln+I`!e)z{qxR4#~?l=9!;l;?4&Hb6lU1s`JLQD2!2v(DH3mYrq@uW#ZwUXetS>uMt za@grID6FQ5DZ*MB2eD7|^FPF?ljrI`7e>6I*wa>qDUU23Q!K0&*3gx~RS9Yp@tVKN zm7rv)YzNgX<8+ZdQ=Lv=@#qsav6cu=$yd!%w(x$mGklwY3*#4GSG4Aql-%h((+J?r zHv9SN(;rw^BRvBN=li?V=k;#n%wuYpWQ$&o*Fw$;+?w5zapJ`l*WM12|0cl~+nCCf z>2>#|*Rrt~-YMR6M2xfViib{}J+_Cw8y2pg*?U&|gs!VnWMs?Lk1+G?K85qUqGg*oj<(A3BIXzS8EYGmi2 z)=!-_v|AT)b@cJ?qOA`@dpqU)MGcjA3W;Uie~dtk0WBRcb&j}=I^G*DDwyHFY+e{#xld--%8mi84V*T>9NEA@xzo@Uh}v~V2>-Rj9O~H0b#fgW_V)&~2gu;DnYO%x z8=`c4+8Vnfv-}gpP1mKiR;KBBh`4i6@Ur<$w3a}P>pgAYkQUS1NX@QUlb;HxJUpE3 z6AJySV|e6T@BV)E!wMw#_@<*CS4BJO(y4=T2(=fJz=*JNijhaBAh#jJlnauhgDKVX zH@+Q0qknPu`y;iWoJ-W&gy`xdNiLVE^97dmo`#a7ZNv=Zs2;7=4Ubn+*(MJ~jOQ^M zjy;ZqR}BC0rRzW*tVM$AAXt>7qvBD0v_`kW^}js*MH=O0C0~u^A7RI6 zDyh2anEwB+nvvEZpY4WTF_WknRri9+bKiE`l)luaJ=jsoykintc{MRzKK+D8SSwF&nhD!H2`7u_g4jy#07BZ=as9AI;Cg2h z9C{Q!Am)t5s%VJnP{FTz&8u9;oWvL>!I=09a7~uw5hoyg%Nx)-d_+nGYH@q~qdH1L zqhoZ0#{zdso1pBQAg?8w;OF@ho2xL%*=~R?7O3GsQqQ~*Z=O&P^*gD)e$&a`i_Q(e ziLzpkg^y&_Gdymo{f`MVPyCff{5*3iKkS12EtBav|yw@l$!i($R(P2=L0z*~I=D zFH=`hg>#dkhK!cUpl0UYCG3odWbX8iKww4VDA;sm@XGGuYkGkq7`=EEOvkK;udU_!lZSH>5b?1v7(8<$ z5dTeI7li0PIf`@Hx7#9rY|w8U=ykn^d&OYvRmOTZd|5CiS8!8=gW0#nBY-&*0hFs1;&_0nd-u@OM{pZ%u zTArv@aL>n96hC2*86VjSw1$4tUjV$^r#0_ubPoc@xPz05YIT+5uWnB@ci# zxRMV*2fU0f;8l_-8xrW*cW=Pt4JMNd4pi^GXJ`7r)Y<|C-t9BhLIW|FzPV?Wm~;_K zWC3fCC7r!a7&4{3_sRes_>w386YSo5TEHt;S`HNOyNJp3u?2YLNXx;#L^teZgOo`P zFo#U5fdnD~Iv{1B`|i;J9mq1a;J}%HlL3GSnoJLP38UW$a^Jlf;MtT+`C^Qz-{N1p`P5==k`k_@Nc zi5e3DlrmA8T;K@-;Pq#k52j2HVhLm52_E3}3cw>l;YQk?E>8)3S7Mn^PgviM4DGvw z=^>B_?Q_DHLH0Yz2E2l#`4Gr_ca++9ugdhno%RVO^953Z4OyCp+9eWFCou|@?PVbN z&h)bh7COip;DMg@+Ix?Z_7AZH+5d#I_g<5UAG|~m9N5}>55D7_{L?E>3PV8U0LHNb zZAZR>CRwvtZL!KixXe|8C3+-^lR5krBijW7}~wkiqspabT(j2f8qAB9)*6JmAy* z!I$WR0hIx-q)mUPtckTQp%SK}<4BmSL6miF6TJZ*f@uP9CEo~xWb#J*kDiI*46n}) z6UQE|2eRL#cDM2RnuugVd!5i^kONOX0I%ds{E+_vk$m61EE7L;3HBJP2m&`f9mNbR z`97opfxF@#%37H9J|_s7O#m(kP?M<}TZRZid0!dy1%tU$|Cw0pyBEiPZ<@isG^%cB zK2I$b;?LRRB=367vThOwAi&N4!Fu3HA>frQ?JvTAH2ei$ausj_soIG9*2+VjV1BAT zkj4NCWMQ&~R`!sV6!i9CO!N66lMD7AEPao4oFyeSKnu165wHedLdevOCqo1c^aTZO z`JLd0A0d+Y>&rAen~-IGPq^=?0xmPs>m-;)2nqZK!S_(j0A8h;yrE?n<1~qLHYmdO z>l7qoV9v&lf};1e`{kGjAc2{E_k>K|2r{`)z}$e7+P?c%fQRr(!91LH-$c0Xeqzv{%Cu8 zdb(p%!ObMZb;ivxn`kL7ry~kuQ`xKo0}7YKx&}N_ZmBhoh*O2%`#fxx9Jy32(5fk& zbVQDb*TFahDKEt{8+Z~nef@DI7`6QI)Hs*p`}ut1dCBdSrdop3u#AD{M=c&&f_u(` z#uN@K;X;jDX9I=EeB8lZJU*G3V`D+{y`eSxgFE{s&rM+ium)IzKm{kcKXS^rqtOlc z1DCa27B227$H~q*a0qm9G{8LO`E|5h7AAg?-{HApPjC?+cWyj0sq>SmSh?>W4#byT%<0nxN5l`qFoh^?Y=K8{Ip4>qgjH&>;qy;gFOJ0xD>#B06azHkvolhe$x$fgVhPv$EfoPVQe?=2C0V>mnvp9Dmc zIm{dksE0LfFFPqJm^gc>`kB)=r5yno%1I!Dn@XKfS|>Y-P; zIq|e>i;1~D#dCAvS5Gn%rD`vwrJ+kEbDLs0qnH!Q*9{G*QSDSk961HRYE}NB^Z%z= z4nUKKw~(t&-#c9JtAafrU%_mXq9PS(vdo|gEn)XfR9MmzNO}-8^J2Kx*^Z3!{JFqP z@=HtF>6Kj;=q8NLGwaJ&WF(;W`)pCP7BOg~Nj$=QDGwfs-Fn4&AKzNR-UH&{pg`-# zIh;W?TyS&FtF}d*mNe5agR@wppxTof_g06Am8*FP-$YuCxMn`iTYfSQ6B-!yEojk%m z*(R*`NdZ2GKMBMNow9&IFuymIcE01?6a6=yihY9bit30z2coEXCK;fv zbBiUGwJWN9;${N`kBxKdT6dF*ytMv3KPO>{!!sMv=}thk+>W(WqlsOVNL zj<9T(GsNb{>>~(4oK$$QR0c-ugxXJ35MPZgGwe)B z!MUdsGC$<9oz3r45YxO?!>tt1n5wn)pSj^JsAxs^A^br zuoX*(Uw~ui@$ppZ@>??_6}QW7kcba*W>&qInW!IluvuRYEIkheQ*ZTIVl(p{q#y20}Y zjmQRZTC{v+4`ElImP-gNUN9nS9o6Odt)5$Y>@RmvpE8U_qJlVS#*U24hzK}qJ8>vc zj5HqeHA4yDiiy@nk%AA1jDMcQ;XD>_kB&MhWQ7a@I?4m>HV~3ex0Av^{BjQt4s<6i zJ60AJ2De9B$CJ76h%IQd$I4Qd%BU@)y7KSa7)ti9@6f6X2O4 zAEEOGz8EjZ_wrBwJ#VZ|nwASEu*0ohM)7+-HYAD$GmT~#TjKpnDOjkSHI!5;k8>IS z!-kQpGA|V=71l((>v}~nv9SK;xwS=fb>te8-hRY=lyjGLN_Ix`qB6t>ydDfG}3!XoMMGRgg0)5lV+VAS3*x zMD>+BJAqFlolkKXalU~PvfbXR27p2XY`(Lko+QdDh=>Y>RfQ>u(mlB%qM#0d1%Ia9 zhQ%2eDCb2@2143fllM=IUk62qPKr*BLSUpbA<68nKM#`Q!pu^jTBB{}v50}>{XBNh z2mRN@uJcTdBxW(Ep!ElFS&LEhb-p!77^z}Nn4&;1+M(_rERbnr?OFPW4r^vcW=ai( zRc=>;hnoXZPt0zy2#gv;XlI!-Q3wh~1bbDd9pi6QP*q~!J4O%5ssG`GdX81N5TQ~| z76F?g*&Acc8#AU}VT{fv)J7r=@RF+i!8 zfPix63WJF(jWx&gpI$|mc0-^ZDHR$QG7LCj2@1B0g~?D56~m;bR~{yQ|8?pP2opcu zVL?VLeIv?@gCvV8zoB=x1-|!U`4NPmDB9tof(pN5kle<6p`lV6V+7L$p@~chvqVc8 zy*9K8X_LCSiV}lh%u}UKSmPWxaN%E41;d~kD=Nz9V`MVEiC}1A1VboL8BrlM=8PRm zKgb|CA_UV_GPm&!laf}KYDeGY_Lt-l54?nf&}(-Ms*Fd9XzbZ(Yw`dMSJ;0)J%=b#3GFWb; zHufW-iqzsI4jwz7(3=P`9ZQ5IK_axgo{Yk=a#8O}LLuBj`&sqKIG;N>_}n9dT6pVEthIK_7*+iDWHb=mx6*R)nI6vCD-H zFjR1(QdU44C5-`?MBfG6M(gnTu%>#_s-bXrK!FEml=OD0q=iz}a@%=)1YuvZq7377 z>C!<-g$V!ljw8nR{Kb0YK%88|fj0Q7#B~4cjQ5;3p33e6Yt*X^Z#6TYmE(&CB9t@? z3ZmYMBbQ0EZZCifMKr(nWFgoNe%XQ88`-6)$&ONPUs@DS2*YRo0c!so`UbJ?k~;n7x&&L09>m+YDLg(9 zp;SDx;3qc0_F2X!71x=FCfv9v@iB6Xl5cokmmlASRG}~|RYd?84T=8%07nvs83%v- zY9?98DW;BHDn1T`m=ss+R|e`Ib5zkn$QLY3G6#Gpp}n8tt1_boX!do4D4jvwlqwm- zerRaDd(;?Zq_7&ucFG)nkao|)eqp$QGO`fE*o4`sdB0GINhHu=Yb3$~1-0LP#=?9i zLD_$|#^MS`0U<^UHrz#KWcUG!5Ymad|AX7sh&jfJ2}{^Ls~zS=RP88;gA!AU_+-RY zhGc|GWMwp%4R#74VTKLVHfW^x*Uz7nK?%a)zgV(k4fCoLb^d_x*~<1+a}KO+a?RmV zjQ(^ul#As;dWgx5TgQ+`2&xaq8m;f4o*ZIovc>EdQzZe0qG-cxLBdD#bGakHqKViF z`N0%&q9WxJKgkIzi+ZJd^$aP7=pFmjh2@1CMLTyHAwRG1t=TqbWVt)J1!ZK3i8!TY zWXS@Iscs1oyCtASMy|7MEgT+wm{Gs9ErXyzcECL(u|;wV_)>}-g;VZ{+;#(0))#66 z{6IRw$g{!X0jct&fuekJsxW~mgZV;&e3F>R=~&Xph=9lqA#8={SQ6*oA=JSvA`R%b zcvHXk)q&Tx9nDl*CG(A^7!$zVlHnj#~={=F5yCozkgGzD12Yz%i?x;|R zd2=c_!R9 zPK^nT4vsp1s(&r3FV6#k%FCR~z~U)zOgGkq*NdnPH_p2NIsijY5GX#}09cSBN_LPb zD99<9ror?a-pP1D2P1bM?G5$#*%Z)}kl#)Q??T&a{uN*HTJ%I$*s`(}6HU*4bK3U* z=F9)(oO5-Ook~^eW?$6KPF7W|_3*ks?<;+3(r)OQKm$lSb_&aoPvo_xt}5b*?d9I< z`c6H~th(W@)Hzk#NJ<5!Jns+Ee9h|EfImE&0DD@Q^Igg2ESX!pNky6B6$~4-7ep(* zyfS+{-iy(lH+JSixxQKH)k$ricg9+`7nxIMZ9)wg4N`?ok*Yy)P{p91CG;{wRTPJ> zHrwcaGbd|#g)U&C`uA5`-5px;Kt^U0MX{|9fXP#r-Wco*6PNgeHW-T5nu&8YfNKrBb!pJ~C%ZibuVhu%^O5CjY6>63({V|-IeKS&#`nZn8q9Ws@ zs0~a78OhfBAWr3jcCh z@=~*ckz2{>_yXjfq~IAdlF9z5Q31CB102B44~^_YfPmN{GeOx?|0UsFu`&tl=;#0< zqTqZP0^&d`S^6IuFcR1|D8p|529FGAB%Q_rekqxBf2B5yD8-TSxtwA)D}<2SAmQ3b z{nx3LvDJ~(oE|y}Qm{Wv5!20|>5kHue!X~wtMJD%Mm<|MuQA3fZR(q$9g*ShdtZf0 z4%(n5s8Xmc@1{F&Y38#d?rkNQd`36yyRb;TSQH4YqrcerZ)onr!>9C*&FtDJZ*ViA ztkqiHc4|&`srWMIQn$JheXIQ!XLJZ5-L+-hMQ9`z`)GJ>z8^nhpE&a#TtoLwWMyE;We**+-D7~s{YrjXi{n{~3&k|3xN%D*y^ z6ld$RJ}~qbcYgbs`P(Hp*zvY#a6)!H>FAK+P64S^8T7@Hl^e>`CO>;NQ!wifTmbr0 zMt{YRF1swyLnM*B!IhXz#Llhn#>%a0^F@)e^W3L)E}i|u%!+AwF<}Gq$M29;8_S8a zQM(Dtb-3$A=X(vc=5bzQ@pU`mppfLyRu*k%swuyb=j7XunK{z@S@q?de<|K2p8X%6 zwFJb~Mg2dB2ilp&wf?#-aw1PlA9)3G<7&PBY2{pG+j@`Df;2l@pQh?|0e^s)biDvHEd{*FxO;h7Z6Hfj)6ddy*H&6%reRfi9}9(?4kuATK^~ZO%g(;gNiCQMwF5T^{*$9s}eiLkbDj5#cbzV` zClEhEyy19-wFQgUABdnfRM4H-j_#Vn=EujAJVYUYsQ_TkP;Ac;mF|}fVQ_Flr?3Zt zHmlh{!`J(O<3&LpZ$-Rt0WmQI6J+5I95IB>*A+-I&8uNp1Sl=bp&AIzcu^tF_33ep zaUm#%f!_07Q}wJooI<$X0r4a9PlxxL;Ky(~57C!{JSk_#2jYw$v*bhX%j@*Kro~}R zh5iTHqAl)mye(kiH|8up{YZ9rKHhiW{nR9oYDIK=sY`JRjbDRq*K2a@qq*w&c_=pz zKp<5a)yXC9;L#u+caD&kwbi0}C%yQrKv}7YaE^c;duNUl{vL5e2Dr(zk++ObeWh!h zrn3HdxTIm;LO+FPUbj`3ROH;SZJK6MAT_O#EpgW65}7{Es=}VxGMd>f4qp;`p_B5r z;(oerTB%7}iIcJ1p|tlnN)2l!Pw5zzZIb_T8J|!mc1V0V?$L5(q3G0l5$V`oU;Q}D zf$!`ZVKrTN=U*`Wx10v5;rFH@0Uz1irhBNW+uusap8KW5ekiYD++YG|*l(09JN)E= z@b>v3hWhllUS$#8a&HMbiCa5Z|w_j^4$_!@&6d+P#=KRGxGNRSdc| zEDy7TpIoi5nx?nu@Lc)o`M#phcAj7TiR)}E zdnM@!-oW{GPKo=9XYXki(__k+TmbR1k!IPO*{;W-dad7PT6c-9r?QDQ6*wZ{#i-UJ z(v~B*;zB!XI0-cg2u~1b9y-ZeB(yF0I`h*d)+%(6Dw^}lXN*wTGFafo0nP-Tb@QOM z5Nf3A7+hu{9WobYx$_doFElX42{$nu(r#yF1^umlK9fKRQQ9?jKaT$E&cvez;&mhM^7KA-If9GdfY$(BXoRGL^9o zUdc>f*RiHD&b`4~u{Adu?UA+BbUO6d#>jqrXwOpf;K$Uf4yl#3J5#H8veYWMXnC3>0|MrA0@f4y2@k)?musqaH|}M7uTVfzwS<2I?}#!!zKeTJ__e# z;Wc9YcLMM_I6NaiUbzTPs|dW&fPYCNQ-X$6BSOGg=b?%7emp#3hud3Nu;4cg>1cwf z6<#4&LCOjSO8AL5sm()VwB?iw(HRWckW06(y_Xjt=VTmw()xMz{spYx7;-75Ne{`~ z9gIqpQeUlk`=b@uKm^b7mm5Kg`d3U8P4KTg=HEn@SM8Ux3@XKOLwE{?8^P;@r1>I= z`~+;E4?i^QJ-s8p3@6k`xU@TK7%{l#Kj-k^zPxG#M}sSsqBgJc<%lx&yPFTs!sESI z5KSQ%`WfSPZ79cG1^V zGWw2JRQE-QFWW`Ozz{B*18_|*@Mh^EQTurD2SI#&GcS#5+rGCpj@Hr9!>hrHoCb8L z=a)W;Wx{l6z1m++R{c?&f(kBA?xvYe+N&g;%C#qL67DbUee#xuM1pw5u7h`*I-!{l zrjwF&nYO((jN-P{)%1kHS#a2~Z0JXskR~LDS4ElC6xnbmQ|&sB70=7QP4>#&Ya*7L zbB0FMXNOO-rh603pCMdWk_HMYQG;_{#Po*k`ResY{&hC7&XUeGz|07fi^Q%&&JZ^8 z0K4MwT%t=i>nXL4*$!VNUV96WO|4#&w^{i5qwYD4-CH5_o49Vvx_w`=@+S7ao-V5! zz+`y$QpM$|&0;(nSyT_(=?W%d$##O{P3nJ|jyJvfYnmi1ze0K{X-m%CN4lG~b=gy$ z3vGAlc?nK1Jbl*~SXQ*?hyb}d_uyu*6u}JJ7c!b8DLY{uVAJ)+!Yc3mV54p*M~u>` z0Zk<3BRnqcl~CeFhfCfl%9UF!excy z*u7LTo9$GB!=S|UDg&}6B%-NswBl|HCVt2Es^Wg2iRS$c?*AiE*EYwuyzbbGq-NNw!ia=MD?nQ*P9?DB9PG`9SJ5sr#4~n z^Bj$eMO@yQ5>xrk|{A;Xj<>IYo~dSM%-= z!E@%5+8+A;k&)K@f4@{EruTBi9cMn^n2R;wy0ymDk&E-z@`RgxiJC{NnCCj%-6ruv zO!kQ&yLBSr)9`BzR$PnFyNiG{9;@1?hWH(Ht^b@|H1b0TwKV#Qo9DV4OdhQ%4<-B@ z7Q?=uzbPI6sZCW(NlkG=vKl(-65hD#upJLJfzw|cq@)TxhP$SCR$Z|#xmE@At^6tu zvI@8|s0_83&Z|cqzH`a5Sx)WqdCugvF6FP1%YUjFxLewt(zz^Z=xM8aew?NSZXmzS z<^enUtxw;!I?SGzVT-I7B*Ol>iJ zgTvh{z@Fw2g$wa}IkgG&O_|?6+|L~BjTS7LE%z=?xc{&W%JS&lo z)@=J&PL@CRznjs*3Uq1SaHe?E6DdhY`)0LIt+tSFjSi%hm*%}w%ZdGn;uWwr7D()N zKo~83m8$C=**&>JM~-~f;8XvS6_YT_OW?4kps;V3$MN(WNE+8{H|7h0wyH$V%k6Y_ z(F|BcO=nEJ8watwir;*|&IY^(==~NG0I0gM6qL*~u+}VCmRKGoY1!8KYxUFlTMbaz zOZrwnzWy4{X685N-{mYrw=fVuOA9FQ`BRI=ZD;m}$JocB&b!E0`^t79%`O`ChUZJ6 z5$$9=fS?(3*P`QbD66_v>P+!VqV5cEJz$+7YhKTv@^n&bHnx@1lMQPq2PhTQ7PSq; zZzXyNL5835nDxBTDmr28ugwV62y3H`TH+mA(z0TWUwH403l#*Z z%Zz6+<~Wtd(XOTdnaODN77BGdO@H4?wIF(*g71p7LiwX{jfk z03=|5p32wrg6R}hmE|~Ds}Cv%vKoRa02{U`8B#Bg`BSTJ`}?s?%c$U>YHI#{(Vjho zT05=c2gCqdcM=iK+`-J%&Bff<{(oFY6B|UhUu>KtEF}Nq>XQ88;?^hmf9JUVpByI( z%l|jW{l7VWer8E)J2!I|W=T6^H*;}wQ%5s%W;t^QOE)VLR&H)i?*ALGlCZP0aB~X^ zBEtReHN0}BeY}0u)SCiUI@^dks|_Rz$d=Ekh_djVLRRK+Y=@C@G&-o}EQB3%dYca;}j`Czj?;U%p>= zWOMy$Rn^Ncc9B?7L&@hS1sqfyfM+?jXb~M)rKv0MRTmZx1YD6cr9x9gl?H7i&uU)s z2+D@b&~^E$T%LoEQdL8dcEl)<1bIGRQ9N^>V^3fUd&H5!O*Xbw7Wvw!&j@4hP}~hz zkb`r#q%kYtZ$z!|X$%1$eW#jaKX{2x@0Zvbn#Vn>KB&rxh;N&a>VGebLPTAZgy631 z;ce2?toh{B#XbLir}kIsC?)@NM_L(rLo3ftb%jQ!x0ElLEnhbI?I3cvcO#D}Qcros z(7c)_`5?Xm>?f8ie_2R&%w~p6Q{+o6QK?P7ieKlVO6~=4UYyP-n1!hMypbSzHC>6I zoJXKq$t}%!F254Xr~TntGD4$co)GO+3yP{|skbR4-j6jSYd$Ac6+*Cn8q>j?K~<(o zhFK2&!vd=>B;Oko($XVc3!=E8r8F$>(IxWJGVtm%GGmbEk=EXf`s$ec5z;R%EBEnNZh;tq#LYrF!Hg7lyoEC2&iKNk zpv0n@ju^5kk9|2HE1rn2F&iLTV4(JUX28X!6po#CAypm_?B=aBaTKl0eew>0h{1LBE=Pp4R9rdx=bgZ7pn>?(EordvU#0e)qzORyOB=xcBVa%3KBlCMO;eL02l z!gYBPO40ld1WKt&t8gaYN#(m=^T^N>%p#twg+t7bS5abTMucj39uo=!f_~1n( z0KS{Rey=*KNXcBP;lAg4d+mz{#d~x4hq+*!$Y~XHeoAo;KrHaZ{jgo9ng7(R3nISB}yZcF-ZKeF6VFv_N;34DXu-n`Ys}4gJ{8D zSJDEuobU_4yvmC2>k4CAF@~VwQrMPdvA?`}Czv(G8@or_5|_)E zZdUECrlh#d-o@Kpxp&Q7<4JDiiT-MXcYt$r5Bj%~xU}Ghs$iKS-t>#hB;V8;g1scj40EG41^b(&~@E^4u#HbsK@eiRlSr!mXeh{S{GuG8EL@z~O?SRm|-s}zgwGKxaL8B>u%pJ?cx0qrynZaD@c8KAz=6O$` z(w&Syh*uIZB+*IyD>FZS)|HgOusMIk48a6$OJ;vL^9YWA_|Kqm&R`<mQ`@* zSp2PNPcrtcc#8d@wjS*6Z&i6w8sXL!Bf|R)n5o|zNs27Of{G%l-?>B}--NY9VGJM* zXqDx?$H=#}z6A6E!N|qEn5TRHBEHUlLZEGdV}lHunBcU?Wn99A&;xJK-aiG*` zV^ncQ=c>H-aB7|Y0k=&>elC8#$6!=_>r7bCJ$q?E%z~il+HV=eR+G>SA|ejr8X5qN z4@P4%BF=C^3I`o`#%nLqm~Dz}yVLZIn-l3feQ5q94;VWy!c%(?c7UVJRV+SY`%W_) zil`5ULpH5@G3M%#kb31c2*xBT{L(6h@A0MPW=ZOUY`=l-kwq2>*dAj8VFr7C5c9nt zYvrpT;gML}sCo_ZqA1Au79qaxcQSA+l)g|sldstq*&PahMHGY)c!lF`Pt8q;f#^$% zZdHHGy6Dd}+!MO}`^i60NunCG02(eFH2W6KLQoq-_;Ol5I_o(;w|_b>dp~;%MC!-t z-X+Lw)Y%|3NU=?D)hk3kbwS7l7sVD88W)ad(&Q#)f&Ze6gSLG6YTKF9H{UMGWF_MNz0ZRG0JtkLH^>^8Fu!+Gade zpzo*767_NVsK5O?ymPomhv>gGtbPp7d2}5}jZ+~tJl5ORIM-e@Nnu&P&1KJT19^%= zYl8$Xq@GBZZ8~UgXfrR?SH_HfOxAgJXm8g3$=@K|74eI3!27|v^1X6icJJh(ys&J4 ze1V%a*Id*?9^=2Yn95|ZwjJ5uGsaU`-ohJnjSV411CHoMerLkg661xOQiXPn=+BJ| zmF1SCm0U)ffLHRygtEC5_jY6*7Oc|6CRdTJgZX-GZI5sxz8&xxx+`<+5<40lncr_O zaDu@cE-?I_$j1ZaZ%Hx?CoYt`$D?S=(lpoh^0YMK#4~k zU>59X9_WPs%Z}X3ew%7Yg8P%P{T@tnQI-{jYs&u*)3)lzv3`M$o$?+Sak#4x(O*pd zz@3Tiy)V)~$P|9Im-3p$^b*e zDeaY9W4ryrLfAs1lb=l_)9u8U(<5gV8_0U@1mu95_RClAd=PVzwd`?M-th;cRVTEY zKsYfcaD>}yGy;CKRR&NlWy9^2`RHRs(q*nr1y`od?D*zvKCFT-Y)183j5J=er9*yrVM< zsww<;oq;@iiqkJac8S8Y4MSfl6s3w&AP#~nVzUpw&PYzv&(b5Zjh)TK=sszr&~R*F zDTadyWh(PP1aNAOI3yQfFN1TxAo4Ms8@f0jV6V-6>j@=on$wWGKQV9D9`-_$X=wLf zP_BhPl6y4&a`l}G6>2gG=iQC_CM)W`x@Ty{U!~-Io4>lIO?aHmL>pX{(lBCFij+Lx znVxz=p`j08`akxIoqaO>H~2xXpSe^R$=h#v7WMstqz@EwlZ5w@ECNcUVMU8qmVVyQ zQYc9N@Tb_17+AUo{YT+!M&V3-rU#ANfzkDdZv4rkVBI{l$5anS33RxCXqw|3AU#az z%H{>f{S6|M(D*wj#xaIAa3zp}T&YgK7S(niuv0{W<)b0?7A2a$l%<7y%xLgh{E_em@e9e^!Fvep$%KB5krL`+d$7 z=}198iBJkDj;IuNC8wFBQc{JbM_2CfPShPUg8~>&#O1o6@kCo=F z3EAd%oX&ZEXJ2VZ)-Tr(|d9yH->3%WBaOmIDN4jKkvVxV+s$OUPq zlt{?&CU1@y+Kn)McvBOF-Tt_EPDCiQvI6x!ynj_s6!~5#a^vg|J#$fW{dF^a_d8It z<>D(1kiLEs@A>qXxw+;m%}diU#ObCtLTUF**BPiDxHY%9vW>_|Jvhq`ew{4}j#@Vs zDe2ZjIj7vS4r5Ry@?p)LyuuyAA46ENE5up!0&p5PiVMWv4z~nn*Md8ga5R9+fV^Cn zC^ri46B}110^g0?|JA4CJVz53*6tsNO?x&ck<|Q(@3f9;E>sfo0ktQ0+}i=wE{e?< zlGuZC5n7iOD<3#}yl4^tx|j&5PgHnKW-XFJ8Ye7!ahnVGT*>9(zr%E6yuJM&_WOv| z`{yC_pUI7iMtXrCp^MN#QaLW{T?9lS`!iM{+3_oyERC^mmSxI6E-LdbobKJH|^I< zgWm-);7k*G_Q@hAT^Odx8(-2|XVO*!qx+2enGAo;C(P5+#O)Z`?t$FqLci||p%S%J zYiN1SE8j>cC9#Tg8rkQL40mn|YWO3n#AcWq=j-#x@T9EvjnP()#E1qffCiawgwDPW z#tZGU*P&@=RC~Rb96`L-c`<`^SA73f?i&L83G?c>TVf>e6B__AbJ-D>R{$RfH(!EJ zsUM77;VHM<5DlfAW=f;~>~Y=H?vm?g@U|V3d6X@Qcx##SivLOT6Pb@yxtFZJi_+!E ze=EN`7I54T_Y5Wk^ZUizU+%5({_5HsS$1aq16ath;$ylAs_O)QWQqGZ>(G(t0Zy#~ zg*goDgy!eNyMM&9>v3pEGjS%m!|_4Wz=K7$M^V{PYKyoobzJ63)Bz+I4!!oD8ut+M z_kaMt1nGheITR)@$+S)}-2`;^$$>$*)Jx+H5nRPLLa~ZxLKV1i=%1kN-y8{TI94_4 z3AZ?Sf%s008q7ch*?xBvw}?O7M$KJ(P`KfI?B30bYv1Gbu9xCzNullgg*T}{o|Q;U zT6)24XjCn843F<}q}c0#Mg(a}nmPh?J?L zICZQP9TUoG=sC&qv4f=2VI;?qp359ApDY@%sD?>EnhAn132!vQQOZ1rIFnAW_%sa_ zjx9ESQEwUNq#c9AoGMt%N^5>li9BJBSo^$f#M)*K3t~Ghph=;2 z>g@bR+=lwnUjZHU&>Zr`NbhKYJNh{ay=jx%ilX~1zBMXqL`v8PyI))LPTe!jrlQ`b zI^78LbSzNsQ7x#ch~z9!0F*|zB*f;(R?K4}gJ@cN7it&H-LDmHSu>eqcTU9w(~jc* z_Htzj$$cZ|8)hT3;&@Y4X3VSHOrXuR>n2$75cV6@w;GaMpr2EyL$dg7DrUvl_%UM) z@;0~=z4Ho+rl5ZJF{FP9l(9z-uR6sIfW|cg^>kE@>0zatbI3DOd>038DqhM36w0aT ze>X`Rq0VC15|rJ8yELV@MtH~ZMkAvBC*&itRTcHAmjHA~hwiBxm2XbCTUMlbIk9J_ zuZ-O8oP~?8Na29~;%H1Zm+$Q_EA}Y`&*_nhbgdcB*}cOyj*MoioOyBD{!!6CtYywU zqRC~c-PTxCIxn`j-k{`h^fQpkWj>B%qFYP)!%UgPt`1+8v%8>WG4&sii}wwNp*rUP z%N|0n4!b4k#qb};im^n)&Nn?@D^5UqubNw9wqs_V?`i29Y5Ebfwt-bh806)7QH!09 znAk0)J9$A69-bt(9XTEyew9wlyEanx=)!zesqHzUJ7wxOx;^W2-S7JtEyE!BWf#E? z_4!*#Xdr9h!i=X}md36PY159f#x8_dS%r(2gemg!ed~_|} zb+VOB6FGcj{6#fWccpA%i<<6dKMVfI(IPB+t?zeEHg4}mDCFT2(5n>;!lCX~J^LGhuJiuuOs5@dH(h2X*2}?5aH(^uyDF2U9DQHEpowJ?ec##G>3 zr}QF4ELzp`WdmXI3sZh~YWA|Kz!dP%)J-#lUfoc>mS2DH)woqd57jy(SL&3{L+N^z zO5xSI2GY}v9m-iGLQ#J$Q`sa}P|H$TnX2&{&u0|DSW19t-ni~I~H(VJ3NgQM*5M%{YUi@EGJ;ir!<`8V=^hws2;!sNi)EU>jvV5 z;RQznn+IxQ7KnqpZUpsiS)l=1wc{xaUJ(@ei{-o2Uara$j-`ExrimS$GIPSbNRevO zqUaGrR71e*%WDs657bb<%H%|CG^Y}W9MF`Zb}leZ zkZY)ING)Wo@TJI&EB-wUHfRQd28)3(&@JeHL{3J+dxFZ46&R`4Mp4lrj3}7a3JfhU zgmGFpKT@IHYF;65J9IIg)C$OaZ2vK9oEsx6Aa@4{P;Gl ziFt-Od56omhoN|fp}2?Nd4>VJ!*JyLHHxJy0OinO^>xkAO1hT&fX_qGc3nzJV$nMEWz}}EkQ2IE#0K+SirjBJS&*|NjYso?)ATc zHE>I)E^uh*XSiwDX@qGQH;6Kbv5ewrEY}X6Wdb|SZ1CwW?SYLrYgUEs+(oe%*oFxj z;q=0~MXWaueV*idB(vrM)}zQ(S&8$KeZ31FE&0-ByS#iV9vyBRjnGNf90BE0H62d= zLAD9)O*AXH$0MHK0wRa?w2o$d-C#WN9pLRA+TQT^O;+|v?0dSAJU&d$%!o{ z?bhFYCHjX-J~Gb5JJ*hEYOS~c>+H_-w#@3y>8I9(9{Yc}n-F|;$e3=D3xX{*P%qwuZGWwgByWj*DcmvCM((4^FzB_1ZKYV_0PJj(;R z#%ikaacg9e(kM^yIxj7&JSQ&_UF!7z=OkVy+Axhi8U1EMtPgarJFq}SLx8ILlgr6J ztHLGc{D9&!bWzgFdxwqzW=V+==ZK@Eag1?-F-0exSp%0Y*jP_z{Dk7OXy8AjX)fYi zV#-aKGEF}QJQ(d8ssGev$fsOm>eCp#aJ)+@74}xTr^fsZ<3od>uEPWQu1zz;z|pk4 zxp^>(`CB^o;-HjbT$d~)df2lh&BV-LI}_2$c}?m*EV(CQAB)+%B?&S89Y^Y$VLZ}U znrV?M>YFmrADNO=Vbx+Eiuy1GSt#{qh2(N;3qh<08B+a3>_!!;!!XpGe-(epju*NvR8d~)oCrgf?KknCjejQR^-dw z6iZ-;$yqcYY`3I2r%Lssc8u4wx-oX>Txw?XxoKfsE6^ahGAV7*u`w6`$cS?a#i2vO z@vQnmuMjzZc!c5`R7!2Qr7C*5v4U@1jCM;AESSsW31;ie10I@E+3+p3|H4v% z)t5i4Bx==AI22`-&n~-K+;#k@`D#gwY5Z%2%UGUm74l9T^>WVqh-yowl@d9*$s}7; zc(L}CE!-R~GP!n+k=-eD+MMlpSU&no-H7MEYR7NJAL*T3!M9=bVR491 zH{?xJSl`gkFna>#ugNXZHU_ZL_3;rhEomI{8uiL2NC|2aGd@k5*UE}guP@NGCjP?p zi=Wqhg#1ERTjb6t7KX>?^CD9`DAgKe`i|QYCzxec)ML&XVV$WYTqwVoYq_$Hw|BJp zsoP=kz#}>|Ka_Xx17}r4RWO2D5g9m&6o~dVC}evCx*@qjIKi#*M*VICGUEJT7=rL+ zj@sO{uoxjaE*q?MpX+YcI~dOQmG*9&ImSRbTC)swDF+A(cF6eG>(uly3RU?7ml3Xc zM1U?})UJ+1GpZdq^qpzgE`-bn*{|&d7s);0FL$ut5&fYr1SBRR)a4ae?GZksM9$JV zD3N0HZfm~RL)w}<^!KE`vCJP39V>#7 zNG<0m-gdGY9|jFUOsXXUVMasf_q7KFhl4NQ1jB>1H$?w*#e2`-#&?52(uCki6Qt+c zgmqf}1Li>21E3CXPhg*pkQZ+1`pnfP;rbl-rp%SA{#he{*e|J1GGL`7(;}NQyDbYT z(}2wn(+IX>tel+528Dk!`+(9f!$aUIu)VQ)u782RvJ-?6788Rjs*A_2}DXZBh^=MVbSg2&~3`;6HyTWmRa@eJqg?@}478&PB= z^FqQCYu~i-Xl^Cjh>mQ63EfwQgN1~F{=f%aV~>;b^42Glm{%C{wWZ!qwC9faI_!^E z)STZ$M|V&}b6YKfEm_z>cp>M!&cvlryoqrBd2&B{y4+YvK17yFV$-Dh@aQXUa%y|a zz!REH}@U0b;f%xDdh4+xm^3ifB469v9j^{mSxt^mg1tUk|jpV z7VpF&&3nS(t4>}4evuki3~h_*zg0!NJAx?>zpLuXNVV*E?uXWJ z|4p>D91*-DG6tV~F$1#joW)XuiPnmM1YJnS(avH!;?<--?Mr~Oz?nzxUh+J&t^J2x zQUlb#BmD0Pg3AUg+x($%=bjg~R%pHlumrK0(EBMW^NziDL>8AaGV|YGq@{a4hqi}* z?!>p0V_rHw)y)T--gyfmepwUzlu$Rpz9X_PpJp9#)d=G);GnDz75YFV%cR4`27}8< z^OswbvC=4|35+k#@L!ifoUWu1>zi)rY3lmB#J08!PI|U>QaqpUFfS5iTEw@^t(*tT z^LY*07d@CQA&chqaSCGQXUSdY-f(-4L?+CM{o!HBU?AwFKphski9;$LN$9X-80PlM zc*5;a4kG9vst!*hHigw_a|Z5pqF3} zds-}aqpnb!hv}RcGFr%n`TV8|c&Wh|maQ{le{F<>l=Op%_vt@xaEB8bfxT1MHUXX} zMXo5}k8K^b{nwddBzneT!av&U-^T7q=Qz+`@yzjTU6o$!LK{)r5d z^BFxM9jU-WdvbBS$L5PCiZqb;8CyoTA+LVRCY(_MG9tw98X*3ZJ57 z{gGmUp3Bq-;NT!FT|tyfWTP-Q@wb_ZS{E8oZq{iy?)PLyW+RKK=U5PeY8YR!1)AN4IQ}We?5fb9>0~st(ji5oHF$^u~~?w7Q|S*oa})#S2lnCWuEr4q<^Vw>7K5< zgPvRrSt8L!lssg9qNbXnOZA2&!|6uP(M!kjBg3gz%vxS3Iu?{-V7}U06B$S+) zyppHG154LE?Z;1m{d8PC&oI3cjMA;GANyJ0JM_;SI5!PRpN$PW%qvq;q zHW}rMR&>yK>q5Q;ehxxs-~0&w+<$_}ObYe29X!VVRjl=m6_OKv9}$ziObFy`}Y<$w&JZ&f(UT zPP7JR?a&Y&JM0&(9-LGr&$@PdOiKuKwmtAW#zd<%ioF>e+EmqvB^a9Jhka^gm0@vW zb{dNzI3y%jK6=*UcA%lTDFg+&$WCN@d3hV}9|Sbr%v3FT&!hb(bBIZ}H^7K=P)mu# zqQtR2&M^Oq@Aipj^m#xKj{Hi){fY5aO1u5^@^_T{4pG$1TDlLmUO#KV?hHOjc0Jfe z4|Xk#p7xg124Z6gi+qy&#}?L>#^x-0&K!%&T_BR+i1OsMJLi(mggO8> zkd39~sk2hlm5EM5i}3cgd$~J?r<{MGj?Aa}V(+$mT4elbV zp^oY;jz9x+`T}WyhyeaZ2Ul)a#WH0y&(2g1PFWhloKg9F6>k*Y*i?G3#;k(Fc=cXl znx8))Ydv${Y~De`^oBdmaeOeBX3cos!Y-;{ z5?yWe-H(~nk8}CD>|y?M3r7~7yk7{hupaaMPp4U(an)fb97 zbB*`O3CoK(|I5)E{b}Flxkk57+)s@ACfMIK&;jC9e_9!c^Q5l?gkaf>vZy*w=B4;3 zL7Eh-6l+pk3n6uvewHCj@>2;n%U>qlp#CppWFXYfsBo+r@5eowVa9rzc{*V<1~Owl z)si~;lPy~Y_SNR2JzYKST_qAt^o8v9pl|Y_J?WC^P$*~$;BfW&+d>FoK7A?m<2`K^ ze(S7rtsVlk;?p{Jdpb4JQgPxSO(4dGL`9e}{|=AraiAuf{<?3QeXJD}Zw@1;b*I*c1IWLW1u+K$`2qA?U+ycT#6w@?VgF_F-HVM;>Fu$@W z1F{|sdMz3V4efOdQVxM0+=udrX(V4+9CFz~-9R$`uox@KN_8Lt!a@VWfOG~2LKqJF$SmeM|lFB1q{dW@qloVNNQy*bcf>%jX28uY0nD{}()6Y4{VA^-vxUe7k z1r+2HnL3g-2P<{N)sYF^{ukj18Pb7jVK*5h7Zga2MJuQzJSk_@m=cOkl}q;8&@fjQ zGsg<#Ge;9MdDLK4D~2I)R9Z1N3~4HtdZiGF8eDi32grji5aG%NyzSK3*Lj;)ANxuU zKItvmfQ-q7$${lq{FYHp7Ri+G8GDkJ?zp(DR>nRXA7eea|DHHk8i9f;tX@xjgpK2f7y-JBqPL&%_*le&>pq1TF2UMKSi_K3lUT(^b-~S#`b1KGLQ@V zC5|EcPqPB0f{~|Mm1bcWK-gKQ5)&i^rzlL!)Il>s@TMS9Hd{duXKQZm)9R@J_)Vd* z<*bZcKMLlr*_k+=Z%9(XSkTv14=3M7aV^QKwj&i$G90))gWW$>T~@vKrn*K!oiD#>q?+u&>=Am{ll&8zV;{VGNg!dXMCp#Q@rVK(bmvdG>KX?& z`vAOXg>>ZhDVF{zcg(ki|L8?NTIS^~xlTMcZ+EG&Uv4rq}Sx)It5Pi0+^G!ZNbLEW}MzT)^rJ0BmN&^$1oNg z$3Q^}364*JAok?gu}MY|6mwcVu|hfq5Uc~LJvN_VKu)?yX zy{M|5-H}t*2~x1GRGG31&Z7*$kg_nmvhx-FL+}B_#wTio%bXoM3cS!+k>0~R6#mdY zPTa!)2#wXJkTDX7Z3oh~ROv+u02cC~A#KkoMw2>R_cFu&VO5NY8n9;zEwlv@fZY!C z;?__Q3kvduM;5t7z7%811kvNh;c6v?nk>qnNue;*Rm31WArH{qf8kaCg(@xotBMUO7O=+qOO(Pe2%3l6 z0g`8kibdYmQ_N5n4A{|9R)cv;H1+5@CYIJ?qK@7Q?D!re(86``gWlO;gTzj-l4{XsuzXkXO9^4a%+o2_;Pi#}SRijsPg>7L!~M)q>5<1u>604)sn?KL$qp9BS*kP zjCI5pD_t#U~Weuy#y4O)3R zmos>6D(sUvtkiFJr5v%ek`c|HL^ScRzz~vSR;|ke77OM}(i`%r0_Y!1@8~T>=;ZY} zI@D@q$tc7NKH#9Ox?vUIbN;aqib_%NdHG2wLaJnp3DO5(O2!wkQmUXqpbq=~Xj~ZO z=)LgbMpP8RW2gZ1Xm&UfQMF3YFep%%0N-1(6_nnFo)F0U6udSS8pc36E0w9i21uIT z3Z3FRm8zfKw+oiyVnY5Jhx%k=sZ@g`5xx4=lZgI>ARy_$8w-5{y?qRtQ7o}}XJig^ zJ~MV?Sma$i6(>LLIM{LtK1DjnfRKd^F4=q_zAyyKK zoI(g;=uXKrO2tF4^!%8BYJc%QSDhThKsl&n5}^o@Gc!MZxY0NvU;;cj*fAkVTWu|f zhM3TBgBTp!9yl{c0>+UQ%MiEzV7wFpvWyfj8*CV+^n!=I*33p4j$6>WRVol=yM)Xp z93XutBLoWhGbt_tb5uM5@0msdP6-VxK*fW~0QB@+1gn#VtLz4a$Pffsha74Q)4;#^ zpLv6Jq_2gi@HLE4zgdDz|nh97R8dEf3yTwcM6PF(5(i{GIoe z3}(lo9)vp)G1Rr<&Gcxxd$PXpyx*vTCv?3mU54%Mh&L@i z-rM1jXq3)XVg?sGaI`nZmz9~8(0ky|ZL2IRqD9A922X(aNKPKq-VYfM6IV>J7ncWx zo;naVio)HQ=Faq6(h0S-3|p2jCNl%VVHmW?nlEGua4=j%UhCm5gTqLkJ_EXM47y5Y ztBi-FxI^Zcp!@-<$`t10=?^*`Yeq>%q~*eV>dcu zNJg*R)`}-}#&Fk~AQcH6Wds0qoX^u2^E(*g9IzXwkym~rfdNWnpEZ2Mc8xM&NC~3j z-Kt5ryakrkG&+lp+=&yPPaADrnPvo9P5ByhL+VUlqN+A{Uj_SN>bzCS;3=(X|1dFAr`lkeWz@b2k&@pZfVxV&ocxZ}SX ze(G#mwdlUu@ZRcp>3;fZf7>MDz^!J6ER{DOY5B`u(mKmt+B&OB(LP<&QqXX`ajbis z-OG$81TXX$baeWAdc3f*WZ^GUlIRMEUc@FUl!N)EQ?#Kz|E$}gaxLMP#nCn|u#BXP zPJP}@G5%wEdnTxgE{^#o%?Owj;ieRMm+l@YL~l&{jfLd-2xE7@`xD$dsxMa@=d5#N zyl*!d=vU1e?zlu9O5uxsgMLLvC9!boxa7#h{zoRib`l0vG^jb{Th|p-0tP{D4M+Jn z=~j^@L~n|+9aXjqJ5{_&Mz=Pml)Ugat)xuwP#ddta0dZPd-hCAqVrhI!=*g8-?|@; z**a`)CV3Tu+dk^Sy^k38MXDuEvIvkR5G1PSvQb8+Cy|uG&*V5we}oqZz`YG z@4^ZdosBneOR*YPs8){^0k)YDbKqqELy)?_$64piHK2|-;G!SgSfi37n36gUkO+1& zhWjqYJtV5%hX(#fO}?(bx0AXlOQi4c2{sa@sX$)4ao=UGZn6nJRKH>CAyQb^knIUh zTW9U@wBG=8i+4!ZY?4>)?^WO1b83%f^S%Hp+{H2OPHW!n1m$6;OWW4M#axN=&u2ZCW{d1#OF~pxlVJr)s)G@bwG9EMfA&c(B+mS#3Uh z3R}?nS%%yjog?K;+PbJ)&W+BTVFo4wT<8(9qV&t}Boc~e3}^t&kYp(b$;Qq{kN+9) z>lGZipd_C;8~_eZ3KxPe)w{haJ!DDS3!C@HTzi@}1 ziDT+Ocw-cZl+B0P6By#L6_xcn z7x6a~Mi-{1UO^1HBNkWHe<+V_**8pd&BBwC@Oq~uPNjF+E-dWKo^XrOO}opR=rPoE zv6{`Z+`QyfY<@?@2#|~;3XlFJI5nyJNKu_&w0qyfNBz#fYO#WAS}zMG(0ao#vK(*R zFNqsvF^-8N{q{H9+51LZndXaOMWIXiW5`l8gc9d}NuuFnK%OI(!wRa`o|A1J(+h z4DQSzoleShJp1xKWd06n)9j&ady;IflzCVFOTK%Z+y&NmhTY7LAz>AAnR<5A>W!PE z_ULVKBihL~asM>sU7{k>K6Fft`iSU}tfk~h`)1iz-k`pfs?=;Y!00|pClPIElAaMg zZ(6t`L>e^aJP%cjt7|CFRD4X5T%1~+!pgF-PCHL9m&0g2`+0M=z)UQE7Jg~J68ZyW zIDw1gu|ot^7NS2Mw9v)?S-2K#4Q?QO`y9D0PuJev)xKl9H?P*!Q1}9TEuvpUgFCRd zH*Ooxun+-@5p@H=%%%Pbi2AD-I|WG_gRX4$&Iitsp0^K&E)&?R?T-si3H1rT!3j^W z`ZNC`oGA$u(i5908svz^6FcsX5!O6$TR%<_^)Su>D&#g=p1k&|AYDx!Xb{nm5ERy> zESu?e6zJ6A!7*^#^@Gkv*N^nk`|P6WBbxSTl7HGWuk_XQsC|>l`&qXDd|oFNngQJ{4+HgxiN_CPq%5HZN@r7t!8J0$Ey29$c=@;#Wr{C%X#l zDJSo!HD>DFn&yHR;X9zj3PN)7v2Gr3Wq%)+50M_;$TWE^54EKv+y${`SJ-o#zS<5d zz)6iI1kI*c2f@Ouhekdv&tFxy-ZD+DyZM% zlPZ^jo>D5mJ$zZNW-9yZml494>^@_XIDMek-QCLG2rJ>I`;mJz2QUtUO|xa-J?n0k z<%^FP>#B16#-q3HnLK+ImoND-hm;`oy0T7RW&IjSWNz{}a%D!CUsLO6(23O3AX}E(YV|8{vOKgEw4)7fcYP0 zWp+cq;41QJUKNqz7$y*-g%I6^|AG7p5Y=On`C!<}?KRD}wzOnftZ-blDh}V>7XsfL zbtKVZ%V3-3rnMhv{=I6w5y#=-$UU0f>q5@`y||+vUI{tnYf~;@2yM%TArFioQZtZ3 z{`teviwGVu2!`wZDqHxg;=a39qJ3w0@wv=b^!6kBn_lg6w4HUCLv1Cm=Ok-s^YA`- z%L;3?0AYJKcr0>?;B|SQGW%<3`cl;WBj!L5uk*|T7{AC1cs)-|rd*GKZX)3Sa`CB# zzq{LpHr62&l+w~^SW5J>?w6dVPMr5y%GM*4)4w-Nch{S;Fp?k22IhkOWS7+ z2ODufT^cHeLT9eZIh5r+odF+@eFk3w>#~`f>Y&kdl$FldBmQ{-m|f1B`(r?Co#ujp zt(9t`N<=v}^1hrvUyn?xOu56u`Z2MUU}fHk#e)zh}8m;MjHCo8cE+!6bpi<+$FM@kwrm?DJ zuk*XlaeUo1M-5~CwgZ$9DD-1fJ5?u(ZOCucRUhf3YF8&kF7iTk5xBitK#$+8bx@|` z;H4w`y^(KyYTcHXxe>}9`{g@deVo)Ff=kR2$lX|74i z0IX^8N#k(78%V8nN1f%I<5^64an$iD3CwF}tWtY8p<`9cHQ9YCHM<>!O2>V06A4y5#Sc(|w4FA6aJ8zei7m#r>WrKci2&%>!7baCr_JFqE4i~6iTW*QbO zvM7z_nJkJ_xE>W0cl9`JjP8o$?gDkTpfNfsu*Ew{+&phihGvbE%9*KD~a7~8`0gM_y!YC zz}n^3ax@HdGz+r-9$08%$eB<-E-eFgZ1FjbYt73n8PV7EMzaN4XX^kV*JL8>FlCc4 zlb_1UxOWtAa`(D`{_@=EsaCsXhT^2GVt%`+U1&s$ltzZf*TY63#J`t){z249nHaxf zH@rSQCQRz5FIYVE04e>zx^(rYB4t{@>uxvgrhWAVVPRTQE~zJUcV*utBGEPoKiPJ3 z9Br@AOo|z+!$J2WpXyBihH6b*Q8Urlw~1A%{lWdk@E`WfxIqD?=K40`!v5*tsV(}t z-&qb)xHfVVk%kKxXCtHCv#LqHvNgx-*N5uqPyiO@tf;1e&xKX~yL2d~HQGqi|shE9*&^kJxcYEYw z2l0jOzBR_XQyW2oW-7?6?((KX@N#t_MC0--64+o@jKTYs+fh0FP|A{N;RN7vk-wkJ zH;bC0yh1m7Vdd@Fs^R;^w0m|r8df~Qv*4oH_8h%mUOP?t`=nC#W^_z+WC&QK%aO~o zU+Kh|%86e%awA{!mEc#>5Z`0B_#sc9nzMDT#m4{sNIE;aEFUDpx5X67kXcEI}$8hWt?N|p;SLMC5eB?;J>>Jf8|fy9Bk_PY#TVyC1(6> z^KVo8=;Qppi8JNw@Od&L+k9p0yvgA}yyYVxrbVB?+O7U6P;T9RUbOh_N0p|R-ClCV zqj9^FXDH;4YgdBq{LQxy7gGp^^e*N?76l6ZZq+Wq@m_oEp_vjV4>D5b5o-mT-8JpR z(2miIFvk?6PRs*Jy-vZzRNcBQ)n#9`q3ErxPQMF9)}^<9b^x<%-yG`STPHSbcFtx4 zBm=CWxNsZiuJ90<`kxM^4b0fL+>OY(fDH+aDEjhnV zU2*RCTk|a~$Vy68NAnw^FzD&1Z=SIP2W%Eg7;=Din4L8fTdOwzE+|8SvT_A}WdAx} zzE~U5n(6F+I}`9Fcp9v{uPYj8pBZHJ>>tGCgo6+M#gm=BMM~XP&9+f!8Yh?MH3r{& z5<03tD^EAJuFYCZ<0BMXZUF%{8h+@K2Sh*z^TUV!~eau zlmjY;*JFQO^e^6T^oB~3E}B3a+|F{&oZ$XYofeXJO-N$NbtZHH4l+{ zr6{C%ZfuLI%_Lkj7Ls;(dAEs$-#4cV{oS}79`XQmZNQ6iv}I*!ufC0CDgi?EaFJWi=ZVhJDjeoF zqVEWjK3r=a;Kg+w0t@i7g6Opx>A6Sz80G7{8;>szK)rIbP<+ZKc*KS;X$rP7IHq`f z9*!ZCYzW&4IV(@Tu5DVMwr^m?SqbG4oZcI{i?dF_Dok;<`v{T)Az#Ea@BP z+z@zoQWtpoxs5^iknhi?i)7|wHaGQ^O%7x_f|mf~Q&Lr$ovm7?htD1&Q-&+_ZT1MZ zsA;@1KQkU&cB*>&S%26#V%lx~#s58~I$Lv9Mm2C+RG>W{MR=#b2!ly4#gKtxnYy_o z?qK*lFtKH8n?iTm!r>(`qh}ROfX$8nRk`ZuaLl*IuZr3k!w}=Xo4RI0?V`9aJnz+^ zx^JVi@>0;0)ur*E^b(-lv&ixywxv5^gkP_=wUX>0(C}5lLt65e9aJ>-JcrqB?EsR| z+OofJMVoSlq2>5*3ll}pSAqUVS7Io^mb1}I^vaAR!5T+xm$PfsCaWOVcQEUu*S_6# zvBGh6E49ET|CM`=kG*X3u93xPQz>Fp0O%(nR(w0j|3XKK`~R076blQ-|E&L{8`oz! zz=RNbsRdBV&7=W)W`Rgf>Tqx!VH_33Hy;A%s3C0X+}2|tO5JCL%o zhB`E@%-a_P`pu5=|9Q07nEro_76%h6>;DkXVrKroiDymmz`Ls~rhm1?rjL;kC&szK zv?XRf8B@T{1WQpc3V{=ofwKB1j7`d!8YHAo%cA^RAc-^*%TN<^SS)Y)7pc(&8lSJ4 zZ)xME-eOC5-n{w``mB(aev9E$OV;$u>F^Np_~>=T&P(Bv`|B(B>;1~+AUGk^jY!#Qz75lMzUM14?H)lYkKj__riWcdH&&}@`=MLx6XHWsm zp15Y_#Qk7$LK~E&Q)+GFs&#k z5j^6hBNuEWMEO&X(T8LItLpdNQQpf$C92N$OS#QUJ?0mL_19PP z4hm+;h5zsE6CswHe|_hO;FuRbBjI;3my{eVZcn+FhZz~J8fvV%;fT*sdP9Biu$V;p zSJoTOrrW2XrU|0NK&CUyYJ*#v9uwgUe>b@NtLm(D6_3G03xH{ z;BW8tbsCZyn7oA5{uAzODRcW?B=)Pn^SM>S_>XW6Dr63+;y=2Y)0BlkTUDyKrAq3O z)^njiV~l+55=3q<#_dLNZkze6L9#^=8;|q_I>0HdTs;~^CZ{v8 zw^yQd>H;f*-iSfIKk-d?+ElBErMw%bG|fp$#-gg|y0HiLA6(;~ zf5fHa5sS|>Tg2KWUsna0bcGH;*3%iy=1f7Y4no|PZkn$%(G;JQT zh2e`jx3b?zQK3l*h)k?XS58*Ak361H(0g=-zmoau!$jj?G+W~Qq#lAHdMrWTD2 z0FDY5&H9bmMgu-=Ew^bCM-(m!jz62e-BK^+b5s)|8ZYP8@^Qfg$SH~C{MFX0L~(T(h3RO z_31>479QWq5rRQlWXmq$Oe^$RT9j!iivYy7!nwt9)mglyJysOcDpEfvV4)ER8x5`T zA8}|yx2%g{yXmci2*eh2aoY=8A^0`J`b_DYDs(urxw+XB>*J@-GcwZkHtCJY1&27C zA}y)XJ^@U?X*<`YQnCl9;^bp*K-hY{p;PJt-)hxRGtlV=Mz+2He~Iwv`!It*)I1SG zlv#sDl6FZt^&EN$+{<%`vG@ht8KOq=ACV%}@-i>E#unVEL&jDrJZ^w`ouO&EX|dKX z9!z~^=v-sHb*fk>ct{<<(c!GpUKaQF!eNhPrNeMt;?dRmlE=26v!g3pH;lWV-Ia5B zMCjFf2|o1)h|lo|)rEG=qI_D`rf z_a8TkMM|fTWw=(b>v)y@wc*!?${ORYX`)MY|hK~JeRfJw@g05nM|4Im*PDZL1Vyot2=@(-)?~Q@4b>1Hu zIRYzjznQQPv3C(9g8K4UVW6f#Lig?W5w)p(Bq+~ho&Ul%Vq#hEnk(pA*ZFfBZEwRV z9^KLF03^XJloWXKfs)FCr*Q*=;pylHPz4lCLI5+-_V&(3^2*r8iU}EQ6`!?~Y6p?W zwfJ&JFXr8BT|<=oB7V(#Wd0w)Vvp-Nz-U+Aq92RECGslSO1(wrh-&RKfLZnZ+=@D2 zZIm@m6iPVO=Qv#8=m$U0mB{6f(g@Y}VZLFpLyM<u`o z6!OaUp_$mNy}d#*Vc15p3A`<@W8|c`P*f>)Tqe;nF|+9AH9IwJQ@Z5rpx9w~WqpOc zd%lnW7f8XPb1y@oFq&(-J~PQU*2P|0K==2o)xpX-~0uO{ju>ROo>vuoUU z-zRH1vsac^E$c!XtjJT=f#-H-+`bB?^B>Jz_3@k|;3mBU@o@Cof9Qvxn=NVN_2j-j z+qI+`4)h_I?sqDJSl#};uWB!X$%$|v^+ka4F$chiF`Y+DhOb=t zci11eDJ?}UPxnJ;%BaO9?>o-PcT3w+Y*BsNfE*(fbUzNP|U>VCJ4qmtsB`=t4~!Ot+okF&xEux z%O4E4p{`q8_RNmsbf+WTZfzso%-jhc`Hg-3sQ85IBX$oK-jNqZ_a)6jpN35&31{Jm zIz}eUayPFxaRd|*)sxJjw(+xsUhx095xFl0ft$_cm@G&2taE%{?hdtqFm8e7Z->7e zS)M)SM>KSYs<`)5pAUl5kb#p}k-IjB3aF^#`{c@X17+4d_f0;Xm_w&;l37v7JZ zaQABW>hLR|C)dv(>5lRx6+*uB=W4LOeIXMC9~mF@yX1DEFxCF%-5_BWod)aC)CFXj`=($J7&$*AoPW)U*0 zB0y*z3H(~$+>qmiyycJ181IzHS)0#a1>5BQ!Mb_c|LGuMsQ{pVv{d_HCV&1WPfY05Fwv)#A19O zm?#vQ@9yDgQE{Sk(H^m?HZR$9CX3pnsLrbdxEhZXk2oCkb}DF7@K+%J+%MtyIUPin zRiyvmUeK@Yo--`WL&pn6=G#rcibk;{n5<&G|7=Du(f2GnDV#?w zQ4rl2lx%V#V=ilNuQ%$R#H7>gbXG$5%{x7!Jloen>?+FxEE!alVzzk#JbZlGfI0U! zK!7sNQnhZTlH(^Nz^7}-s6qIilWm7C=gD*Z3EIXgb->X!dK;skC}LJ6k(r8vr?)x+bPV*-HJpY5vWfyF@*%BQSTqaZwiXscC-U! zYt7EuB+^YIY8_{SLd@V?uSwc;_B5Xp1wLttM{#v?bMtDXA*x+n(x&4^REx&kiBLkh zMWNs8s@8i&rGLe!>hDhyl0@-4an_wJf#VGKfBZMbPsEq;5t&l>eu3+sOC^I%iNZ&N z{m1G(m#X)F;d+{qYnt^e@as3N(NXq8nA_U8Z~OnOuRD4e8rm{YGP3QDQ+OE4RMU;Y z!#f_>?`x0D_0j;fEB`UwtXNx|j9FNmlu

(WCb*s37FAPh~Iwlbur7`pt?;itYdP z2V!~8H+(|z%+buxv5*5i3}fyc!FEcK3#o|<3vNxEK9szJEKPtxs-kBlniihWw8!{) zb){86A1!t{7b6!EL=mHhtiQ5d0jg?o6_aGl-1IK91!#8(;;IuV4AMF}L=%9?;Iv=+ zI$aD0ftvkCa#<7Ob`@!K<;zK_tL@*u6E#Sq7WEUJCVHoV?;K}?W0AB63lT^m+a*gn zslO6(do z-V1IM=m@y#NH`VbVNe_B1)9c%Xv+7OEfJJ;(vD;4S807T;*`jD&#LH~*R|s6I$uSd zA~GQ@H`1NVWKqrJTjrRAH`>3wIi}Mrtxz??=gKv8(a67P(+Qn0fPpE8^(6vJG$aH?U8PDr(dAWLtn)q584Nvn`Om*A5&0jUQD0F|V?_KGC zOx9e!P5X55+BF=S-mbwN)LiJy@iZ2Do7jdh?;I92x}?;9erkt>K6bIZj5L%V5xSkR z=ySQA56)bjJiCe$L{09~@NuiIpNL<#R+;D1^O^ub>+A2D2hSQRl#N0>$SvkJ2V;$% znMGJ4gX;>kL}eu$(k5}YVsQzr_9rcz7oBlFB$v{!BJM%V@#>Q4>DVYR_bK;}FeVvC zV`<03Gz$3?#wpuCQ>O;DPmx^hdey!bDQ$N=P;Ds63BK^d;_>d1i%qclFbd}zYs@cameA(sOLr$x z(UrOJ4g4kky1n?SzVa-SAi}FUboK`a&iCD7T*CIx08(f@83i)!@3~i}`!#Nl%UUWQ z{>Rj<`nuISDl+`xUTaUS+}CM4da-Xw@l{$Rl+!oq zwy=*c2Tz+_BW;Lum(?*BYR?Jg@}B{C_k>_+AM>+-w^&fLSfIX@J)KIt6Am79R4W)_ z3?-wZD`E~b265(RJHz$~go6ltkoYSjpdSP349f>P=;4pRqyc!4ouNH~4BXzEA{qrU zB@bI#kjJhYds>*QLl+c$gS(g%X6}Lke%fHJMJ*`Ypx^v9mN_yd+w$m&vkML$Ji`Dd z8`lp{L^~^cz8=m)$M62M_db*RhBXC-mEZOzu<8zofS`=Y3F#h{27DE%4RXI-@szxMmULHy5yr4_F=dMV#_wql2`eCE)^9m)dOV= zq#WdK7M=gxKRtIUPsO3YC~wM4)WemTXOCcCL;7^(@45o*{+_{D&lRU?(9jeD{<+AX ziHQ36fQD`Hvt$-Y$$^g4n&l~IZLOUNbJA3#s=KxG3zsc%A}3@mbMt`(9}sClOGrzTq`Pvu`}|SJpSS`EO7;oy>_edn zb3(m+WJaqXiVPJ->A4jnmMXYbl4{gX-L~;u2}%ZNA^Cm4m6(0T{ZWJ>{g6d5REjua z_nmpcewe{zH%rY)6g|9xDJ#N;l=3b!~~h|<_9O6(WC$iv5HeX?X^Axy32R=Q<%fi&ya+58CI zB0i)+^#EUurPe8>x}0!wF4HG0h*K)oq)paghuno54>h~@*SYwwvzd@XFw-l8!rv>G zKS|7(AE2=*lBYyG=jvb|If^ht4}X~Ky3!s&Y2YaXQ9>2LJLlIzsdtROle%1)KldAi9-zw$?p0pIQy0Actf@Xhi|5e#^8_>_F#`x|lg(OQprl+7h$ zk9;Nnu2Ou>lOzTVVPk;BNw*d}!MrjH9bN@C?>!Dq~Bo)q+5SOw+$e(8%G>D%|O%B*Q%a7dogl zL&9<%vLbc+6fTKbdxGcw(x|84=R8GX+TomR3u7n(Tt3=>~xdLN@f}xuF zfMcfx9zPvQm#i8LaS0Un@+wi{sqh&ZRLd^cB*~F+lTepNbUT_g@3N(3c39O@J^Cz9 zJo4R+%&Fi(z7nIcCqJD4sN5%-8`56>;~Wi^SUIZ6kLfXs@FCH@*{OY*N&QIaO64rN z9K9^3X)SS9Qs%Y7vsdOsm85X&{l~Aiv0nUl~ppz)m zjDOV-xa{3}YN;p#BC=L&yty`0Nsz)t?rG@G&FWE;Oo=VV7`dViyr({bYr?bV;tc>} z3XAU$%k=il@<3s^gfMYhF1M1Ia)OKBo^D`VC0gV)v+_6=H--ozpDiaDE+LG=|5%oEmS?6@;y|_FBLo^Oer!xT_ zOu!GL4zmCqYL+i0!jJ6EBJ$v^jH_`9-xep;u(7WU)cJY|GTYUn}|u9=)XW3 zM9eJTSvHPurpEt7)nH@!Kknt?Wc$AOpA$Rxe;WH=C3aR$B6cR$|Gl1t?LTM_CZhj# z{;xXL|3EvKh*;U#iCDS+vtjdH`ak#opNId82J-((^LXCwH7oaJPWgB5ITye-=y+ z0T}fELH@0U7OKqX@lLP0?Q3QPk>>@=Tdp;@=@;+$s5~X`H^uhnvEH$u#dKa_kq6Q6 z6cFmHDd}i9>qu0~iF!Dx?d~UFJ9e{pGVX-hlje^~$!=3(q8oTF0ZH8Cs&ex+Gpwor z-5xiZ^zAR0X2mR7$Mc{relqY~E4Vbt=FI8v;AZL-=zl43V>tD#IZd|q7u5L?=X0~W z37mMx(1_iOIC#NiJkfjadP@g_P>O+yd4qT}paBW3wRWPit*E|m3RHVdmX0cN_q@9d zhwcaL5HZb-ROXp#a3F~K9O1A)AA?Qrc3OT^Ee%SsN24wpuNz4Rl@Wy@AlDWBtj_}% zdctp}H7KXDLUT1Pm+=kaL#p^g8Ohie1Xe1LD(Z|yAky6j?E#KFE|#h--H-B$)Nx%AiBqu@KZPbA~0OP18Gm(6>{sBq{vzV>p9{prl8)8iKz()5!;3A^ZrV8vUBnET$c=MbLxv)kG0 z77E_m_;=6!ONumnh`>D?YxM3T$$<&O;-o9DvWPR- z7*v4A`{Zb?m-cGh|Ajtn)F-lV2 zf6ub$`A*NI|EHb6|5NGzXZ9983hVE3uRi{23^M|(TM$C^h&vzFZ2#A1ECEVBw!FVB z7q)zmo(7x`9cA@k^FPLqCL&pdxVes^`+38$R_DVnoZc_`LBsB#KP}Lch>*GZ&%$PT z8(1gOUqJc6=P#pghE|O+GhGh7nbk7+xuasOT~V-I%ondT7ABaVs3$%)`GLJR|P20g#mi@vXcRS^>*b1nYY3&STTtoHTSg(ZaB#yWR{LI!(7*VH_Z zvFAi%*{=*rPIit3h4PB3vg(ZE>dG`lJvA+59TknZgYiiz2cCrUh<~N_iL-f2mJ2jM z5e8I9DD0-k_8*x`D?7>D7Y`5Ve709&qDNe!VN0RK?Z$WRHu%aMYjxZkX-<6(+#5;r z{3(gpHV2A1>4p_eYn?Mo%QivQrU2Yf&VS2mHk-uM>vSO|^3(+*QX*g}wTluD3(K9A z18Qeg+MDzGYf3>6i015zkEgbBrcemxjaUmtqnSqfe7(vvX=upGmH=o7#S@PPy;-HT zm4obmb86bg(o*KB){69t2dXMw$;!ITi5|H-Mr%UDIiCQiXr>xc-6fN&Q@*k(T34fd z-@4|WFOeGRjZDj$o8szK%n;GbN7H31%dQjG%1Vn@4e&`?H>`{1&l*r(JHppp4S-p1 zfjw3a6v4hIUM7VsAtLxAqF4AHKx*5k&HXRp9n-Gv(wu|??qtJrZ+7zLB|4A$c+MI1H{(^rZFQ#E#ANSEYB&CK^a)4J@p&3&Pgp}k$ClnN(Yt@E zhb~J7y&KqARu@997>8Hqy~0_OCHcUqOcVWc#1?fWD7NKRB>9|K3UpYsyHItnd}H6d zB+iQF(5DHtwJjx4{C$@d?NwKioUe}n?@SZz?^ou%3gaxm$=lz0VK*vF@MlPoJPsLIJ?5TugP2z+;;tjr(PUWAGj>zw!dDmu8z|y>zA}t)m84wg@~Y9Syqit_BNDQ z_D*v&BDAiAVzaKf;(%J?YOQ&Lp_cQOuRuSp1MNYOS?=9m6rF{gZ=T6rskjC&_(jPp0q1u3$24;Elgoib~ATU)Cs*PSM%{Gh3;(xt3xou|%xZwE0FlbjJ&&>55-OH)_-A0P_2 zj1|solXfE}yjsMj7T4u>JsuioyV}Z^ip?#*j)2P{0SL>@ zvlT-G+#_-(c_7n2DA=#Nx1U0mRdXJc^yDUpWLt70DB-UfRT5DWyF4XDDG5GR^rA4l zZp;nQ1xXtNcyfp;5@L12?ihHqpg-J})hYfYP9pI7abJhPrewF)=68L$2f3+vnueab zSLm@kJyqNb>iHi6fP|zteI*!zxmc8HdkKuFl)HCE%(%89=t2~x!kDKl3tgow%)exV zjA|pXL5rg-x{a*nNU^#QU3klnw}K?R95>{tk*z8+x%BD6jOq~hjHv4&H2=_vV35Ga zy^Qh|;ireo3a$2npO9=tmfV(b1YV?v!dIHljR{RYPTv&Pn*BmZh=C z_=H+>YlCXDYooVG5uS~le(LqB>01-*3iNjcd%2C_QDqUX;u7FWTsV35qZ~bFWt=bhI zsV~x+_E(`byfy$%M@mDX)!$xVhp)d)03fi7*U!_h)xQ$V9=sUb08;@&3u}g=hMa=> z3Nrx%fCa#qA!DF=`Y=O=&%;K+l>C$@&l2HE|0}8?r6F`o;r#Ou_3$STEPilXFn91u za3V|t3@hwK_c4hW?v7YLECf=1GE)#!_Fs`Si8Prs5pxPz3bvm(u!zB){bVBe{~KZF z6r))XCE#h>wr#t6rfu8)+qP{_+qP}nwr!g`yUFg`?k1Jos+;>%sk+IjbL#u@lI4X; z$(51FkvD@R1;Gq}^D^Z{DajIF^Cc07f`|o)^8DiDMoA8l9U#|`g^wd03}_>x=mp7% z0^?-GNeYn_A;&-|`t~Aeg-DSuyT!Y6QM}`e!a$kVAyox=iDD3?Y7q?~=Yl8&Nr?jV zi_!bbf*F}ZkflLM`hG(aokDR3B9@yB}sp1cbMTIM9_;j(o)!q7ZO{TE7T3*_hn?MNas~#sZghg+uvJol`rl+ zw(?7raw~>CwEhX@wVcvTsUQp}2E=R%UNN`iaXulHpZHsd^UsB#lF}{;ZzPp4iqhVL z#aW}p3$e0#z92D31QdMY%l}$bDmR7Y5M}2OrRN_|Jdhh@dDfr=27c*J$j!*t1?4wn zn?kFCtr>PHGf^k>9SeZ$u+~7kj+xD<*RB~}F#C)dUbH)&9Cz-0)tt5HS2~k5X$^LT zWG%{xsl;cTad(snyly~nv>##_#y{j;|?%ThJ81MZ6lI4$n93+g_egM&QN#g_MNn7F(?u+UnkWkgr zM4ZRPM?2N>h&+MFR^WTsGiU{Z^RK1IcC;t{MNfsBKU)DF9OpWvuKC;k9z5qa3bsOD zc;{86JtA9C?NCqXGOI_0%sLmwkWThxe z{YZO=nZH!`80#S_t*RKJY)2Zr^SsRINNAtZUgtFhIRk4`XQeo^YQyZX_mumof*b_X zW=0hxDTz=~A|Qi@o4LN7JfFySxbu}#2!)>*-7n}T+tQp|pQv|?MekCAJfE<4AdeUK#S{4) z5pVFP*izr$-q=r6ixdhuzrAst5ubn_U%yIu}R^woV zuIA>U@%ie4XO$x4rRn(}QSG9m-If76vE|A7#xq|I_wmL)^cgK>sp_JmQx0;(3Ftl5 zKLh2xk;r-CJjE^9Mq@voZcL9j5l_2+TdzOplFVv7A%+b2c)gjFswn1!UL6@p380KlFihUekdDdN~u^Nioy;ixnkgd@>bhkwc+zKFwD)j{I zkz0JcMi%w!OX&~Wn2{ZV;sm4EH(n8xn@JAN2CJr*dr79B@mk@PE2$V@S^4MC08CiT zNK~g~YiOBztm{~3qcHtIRjbAVq$&U*6+WZ*;YX+(5F& zOrr<;n94~R8cfwIxWC({E^ZI~_wP>W4Wt%MS#{hOpG|*Gd;3d&b%Uy4EW_ntZFCas zTcTN#b!=~q$KunfIfp=0MCVAMm0;*dnG0#b<}quJvBz!+@Zu@*KD#u7zXlb5GA&Xq zU*~r@Mw_u$m*d0)bzzZ8LMGwP;E>>mc1k}eZIh(d*R&>@q+=oXs#`0(Nj4r^5R zs~1kXFYY5eA>i5Kv{@M29fAd;M=5F)+|p_y)Dry=>XCbqd&sMhDg7pPT-rct{VH~B+8`_a z&USp8zc~73IAb{m=xV9rh7VeHSDF;5<=W6}LOXWx4{vtaU->qHTn#^MGQT```5WVd z_h0f&SU%CKS_+a*_M59-kAi`2``A}w_zPamtPr1jzixjyPQcHf3NeE;S61@>$-O*AR-MmdvHv?y1E{cO1QVnCqO zw<7H~useeIvfY#rY7_T4+BMSUn87-;Nub3v&F3_dYI{Az)s68rX2k1d)Qj_Cunm;i zH~02&r{PJoiZ_tY`h`q-}|17+BcufA!(T>+VfE{?xu7Jh>fH zfBpOW9q|*bJE*4U1n3nfo5D9V5c3C|4ubJn=;AKf_U?k`6(2qiBDVLob@kSsYJf3- ze;5WFB^8oHOh~LMVLUaH>Q-8biEV)qZ=nIvwD%(7{+~5NJ>m?4s+~YWNh(<$eYpRw zGs;+jHbmogGs@_0avWKGzqRqS4i%?(RRh@(be^~{Va~{;DwV`Rds{hqN_Cedt4$p} z#8I7}Mqi{7lr9Z(X2^}I*hH!_z?>05Hc;qZF8HMO+m2TS{Ci7D%ZDS?py7wdcWJb0 zpkwGmgT@mcKnnVKTDFje!#;7+TXsM6ZL6ODV|y?0&0rk*saK2p^Jg|JV+gDjfj>?r zWXTOCD86Wa!_6=wV2&f}#i@3Pcd!-l1Wa@u$(6SZG79}KnA&eSzyT5gLv z^%Yoz)F8T&P|-bd9vXG%j`skj&wf%7iUDx8ngUtZW-FN`{BbNrH(LiDSVkE;A9sgBYv|V|Dgp3fRWlv<|OK z?xT4%heV-riA%WKbY7bCs_}*DWh>AQP3OmnZjOqwmqFxRe;5$zgog*IRf-X1G`&x5 z;h18?qsixyP%d_zxkBE0-fSKT6f^xD)!=I$aOz8@s+Ir%F;f$BZ3P9UK*5#gLfkCb zKz|6rvRX3DlIZmz<~}(3AO9=S4n{7(tZ%RNL7q5?@A(;4T2j`5qPmi1Www0mCWt~3 zWnMhlCovLBVn&A6Sp75nQTlzz{A#33p(>Y*lT2L5Y($r-k zl_}?gK{w3U9X4w?&~ylEz12!g(WO!rvrQb`VoFwevN%j;f`^&5zHy#Z z2r_v;XE^$nAYqFhA(S*#Q1|u^tE|t%bD^1z(_yY%lh(zum{y6{m{7?Ar;rr2hLh^z zc52=%@&V6lZ`nU18m!mte3;4DsYh|S{oG^)f{<~m2s6UzK-tc zcMNUW%9Lx_aunqpYX2kQMKrch(q<>L^{cTl357#Ad`fWglKDvT5*t(vl4KSYd!D1 zQuJFgUSkrHnzbL!k4SEYONxgZ=B09a8to4=g5FK=O0w6R3)iSfbsy&2OeAFQBmoH@ znIrV&JPmxVtt4x2|y^|h6xbCO$epd2F;A}Be3;xAJMXzup+EKIz`9eRzmuni1xij1DxV^Qj7ve3KG}soaMK^1fD~=qzUcV57 zpLEH3lU_vQaj6EQP<*%9xrO z@@F&h*`Sc3JDG@@4RHe0v}5bUu$vnT&KecH&}oq?)k0m&i<4mMLx9hL1I|D;D%XGLQ#%nB>wX$!1YaAd(Z0l@HhJ(3OI zAi9g?0))40b+UEiag(lYt#%Ofg>HMY zv%gts?y%5T-`eAIJ_GUiuhA&2?YtDy?d|62d-N558cwnv4&T?o56?$97Gu3k%1&)k zJ9Fm^%T6uKv`=Huke7QLjt=i1zMYo17LZS;|6Nl4 ziF<{ruIBZ!RK5a@wtIQTHEO5I@l_wxFx}6I4wK=4V?JuGuOChLQ@qos^BX4W<}(lZ z((YOpHo24CCWDNkL-?ak%EIY|=3r=aGz`+^aS&NxZcUP9;lY~7&`~^-WMVFiXJBtK za$yf@dEHw>&Rn`L@$-~bEPt**A)431AsK^SfUkym3b%N1R3vS<^no4+!X9hGtmYUIGpM`?72MdRS7bn9Sf#A)ul_aO8$>IlKi0x4XrRgG=RO=DSvXe%D zJR)!mOtjX4*@I%Pe)?rV_KK#$G}_3sUw(83lm9v=Hr@XRA?_!g@W(d=1K7BCl*I+eJ==nkJ+Ec#zh}NxQVqD!R5<*xV3>PSjG=%23up zP;$HQO(lAwxF|TaAetyXe2xifuY5FB+{ns7*sz&}iOUuFhXHeX#5p~ng8?tWB~V5^ zx`I_uy+!EYlHElshJJ4RAmwy!fv%Qw8@_I}M|kw^4{3HF z*#-&;Tm11KgC?zl=%|aBIEa|fxt7nllun~ZXqIQek_8!$JeZkrxNtOgl=Do{x6!|v zZEchk9gMOxAG)uq#jz7Jqmyt-<_pn#*b0m3LPHlx7ne6$RK2 z1#YwiMjKm1Q}<(^)W|G}x?G8}C7jc4^gR`=OFyP)2ewXN{!*=nMPZ{%wtHt>-%(V^ zISax?!yAhEO-(CVjrvBOlG~j$C+tFFq7I4^U z(nhn>wwe6SU(Ihc__}%mc~N_OvK8`X(dm1i>F=3mTVO*alcQ{;=tZ|5awOC1U;7uW z=l5yi4b614v_yfv%M^Ti?4u`ie+Y#fG05y_wpS?h;hnIq9sQ{zX>L zgXPsBR)N@hwZPlCk^uS&&AO|}`iLXfta`VFfXl%B%C@z?_4!~fqe%+Po`#0%#;MG# zMBM$@29V(qtVuNn6e;K&()2WaEPGa$C)9Ukb6R>3+wX220^S!Nf2724 z)cLTntJJzNFRCCvy#TWrX{r;A*u@aVNqxDqbpusQ%X5?(=mg{*s6+MEi|+*>KM*=` z8)B}|@&tn^_JG5F&GUiZnyd7gpKi1N6l#dJgE`mD<{Az0IiTE|AH#o^qXh1Z*^5Oe z*o5VZ`ELghWBr}JeVE~s#PONGX~1IfKz+@r5{n6<#-&HfHM}%1GX5TphA#z^C2=;K zphdweqWraJDiX24woPTSlyTf&O_cWgZpX!a5w-V zTWN`BD~Y?w6`7gES(4#RYc<$tGH?Op+XdS$sm&SDXjYv~16zT?;#DHBdI7c`%O;wq zYtEc;1AFrCnV8y`f#Vz0vvBW1x^KGo=C2~7BQb57FI7>Z%l-wC0_fNn1hx{B{J>bp z%C7uYPE`Wi$oX-TfYrpqh&f6XNhL-1;oEJ3_GS*(`~^blCG0FNxRLGM9R{ypBU?W? zvr8U>{N45V#Eis>23TmeYvI{3?a8JZNRHfN6s7o#njK=TctZ5F2x+7c2i0lcHG@4_ z+8vHLVma$5#`8xjW?{wx+WiWJoc+r}DU6oKwfQCM>r1DDS*uTcU*CpS>z|Hjyey}+ znJyA&y`)ENZW32jZ#(2iRnwUZ&nRlMy) z66@j;ZiTp>2zfcHb{`Tisc|{+A0yYaBRcFbkulr4*$cuYnkb3|w_#~M^XbQ+*0}h> zt-pov+z_47qqjAYM{?--Fz676VM)rl&nS^Khqpn26QMhTGf*|f1As@L$_ijym>A%> z{#v`@$SjM@Suh=Chp{-^EYYE)?0bs62amSm;P__!T=J+H#M>zuswT2&r>$w|vUHG= z_iZVhrC;v$T!uM{mJ&oVG-@jTPXs!P@ByiyKQDw!t*r>$|(l_ae&WMyZ^^-fAg z3!zZOSa{dhLiSm)8`Fl#KD+i(yz}=v+Kbpe%ItS%C(O0Z@>JK9 zj-oCr5vETD6)rX&VXHb-vBG0E79?*<$w0NRqewZr=ElFNF+5gdgdTe>^{OxSX-Rd{Atu5mMgK_ZN8d4`EQ^y8iux0W%rNo_XON__5K~#>Ly&W@mUUw~juVXw{Gx1_=ngLLuQR zFbzJUV!;<=-NTJi;=i^C$Wk7ab~%{M>9(G;E7<+AMXbRMY6{Ep>Y8ep_)5%ejit?u z=V7e!Sm?+Em-KM*Ax|pyKcmgblm!n{?rX_;5vN{CKI(cR2s^7j@$ufCZ^s*7_iW;* zV!}2sPZXO(!w_Tts&@*=L)twdGP)8lqQxGoFQ=TMyE?=LjhG|avCgEGlcp2vJQR}id5 zBPha?)^8KdkAd+}f)&N}X&g>n1y!zWf`0O7@bfvKC>}qFpNL5(PF@_dAgtX_kGy(@ z@(_${U*JHLSR?=s4Q?MfqoS13^zp(Aa9e8sQ&$BcVu{lrhXr~Tg#=UOzneTo!xOMl zlab0GfhtfT-hlzlN&JcD^L(1QA{fZR%@KNuYj2wf2;lqLe7)Y<`~>#?>hd;Hri$pZ z?U(~qd6JlHTKu_`QGNAqrQyq$|Uc`ILvIN+h-*A2G4W5MkhZWPW7HzR291y zZeNQOURgX#eD~%y@sU-cyb^Q9prTRyNFsfY@V@&-9(g>E$AP_O~f_0IrxErWJt)I)Dshj!rBYIrunYZAv_*$9PF(TFm7p4f+wPQ zp`*WEsA=WwvL+Xxr=TN)@mX&MnMSzkv4iwMI}F1l24zMAVEpO(TG(tU`V8{}j=Jfd3VC@d7C#clEs7x&c=zd1cUMC8)jW+5XdPfD05TTD$&%yw|d z`a%!j5xXYCfIP~+*Aihe&%3_PGiU0WAK2U)n2(Pc*O*y&5E?Ul65mF&**j=`r7N?tgaRk*3oiOQ8QBs(<4YkL`gy?$DJy#3zL$f&QLn;M8~x*Ee_Y1%judelk02T7oMKI@8(&?GPzbgEvU9UvD6w? z+C=xyuXA6B`1775&2PRzq&np{`A0(xk0z9?b3z+@S5JsLPJ}`Vto_LAKO{F27vEi^ zqx#~f&jdyH|6*eur~WkI0iL;dh7)o6n_AHx@D105yN4rwFVE*;lWB7CpEWvw~tlq{jnx-H{(Vt?~i7*X%QcjRN? zXbHgvIaHo4`NlCKZ%>m|hScjjo$&^*;BiZU^+R4)wlz#{*I#Rx@NOSED=lHS593;E zroFwD;bt=9IwLwNq>X`S>j}sk{4*;p`om{R;|&{OVT8w@3^I2^#A==&s^+QHG;|WFOCwLnMLRn!LWb__#zvg%mFjD^P=_!u z3xlG)K|EFIqioj%x!1sZPwZ>OztC^3;YC}8M`gl%KmGBJ5?xw_&T=-kldddA)hzF{t4}tRw^pYxl_6Oc4K9qbO_p8cm zxSi2+z(z~d1tbO=6-veGkhrYeUk&bJ!)_+b+d9^A*mROQiXggDm)vT@VRDVAu0eFv z;>iua;0g4U=5E{h*%Vc@Vi*!`6`t*q-nqGj?>)+jl;jJmN8OHf)krziJDPaqayBHs zF5#$7&=xt;280e1VTrr5aF{8;<+n3M*JYX*NJ8n|JGP7W5g)U1PRFBg#{#G~9aCmB zw&!BWFF|7C=;=>;`0o#~9=M6Tp}BD5^xa#j5t}X^SJnDD8yC9*Gt>ND$WLxuxgTGL z+QFpYpBdvluQ*{L;e#xU0o{@?TwFAA+B7)GqgsAp-Z=CP^r^~j8jnP)$swT*pb>ep z7~;bWa7>lJ%c$t9QrKdk2|v=(-g)M>*uHkPC{6p!jJoS6w9AJ$HL*Rm)XN1wx+(SH z4}hi(cbpZYv77ZMm0v4ZMmn{2Nw~ zZKcWSo{eWScqQw6e}Q5p&>ftt_&s0Ce*m~XWv_^;HTFv`nUeK2mB1LuHI?daStdVL z11AGn#Aj`D`%KZsR(u?Le1#n~CGzLRto(>E2=eEwxd`)>-LL*b3c6}*5B3&D&aef) zhMzB^#Ib$RWfr{CXS{q@Zgb^5uM)sjRDC<{i4s=u7q|vbJ)WEL$}0+ zq#Eg&`oL?(vCgQpS_hU=atGnY6 zPV3baHX5=GS(;4dMv5>@Ihs#qXTtf&Y`}^ln(tST*o7a=#|FHTr@f04Vu>550DKcX zOAzd+&HiVP|C|r_GuSWq*c6H64qL-dHG)fu(FG{#cmRMt5o&7OJIZ)k-~vA#FzFG@ z7}*v&YGg@FVvZH11P{j}l@zMhD5d(V3%Pg3q^A5A55fV_+uZwFtIla0OGc7%uys%z zk}0@&Vyvf`6Q&nBh#r-$Ua+%iA>AOi@qn{v3N0;=rpDO$_$|=~ETD#vfavYspSw}f z0O!!qocl5bC})U}tAED7!lYFYwQo|o(wwSU7u}Mj(3=0+*=)#-8pJZNGQuhj>I(RE z%zm{Rw1=fWku%S^c?;63D=>0SZm1ieKY_M~e-I73Ni)zbsD@U_AXSU6qlV{C!UW@3 zRy$l2135N4Hz95tD{C4}Od*l_6la@dQYXgtayIO-_#OK|j6St(YN-kclI5Pes z6w>tuo+M6Dr~u{>J!H(LvRVrJcgU)~)Y{{(fioGD&XjSb_{DrI1qVoe?^xnm&MR4yWOmas^EtTLENK6`3Ksdkfs(Y&MY2za@yZAE@1+Yx(UOns`pPFEdr5U^@SHMS{s zlm4$r1wb8j8841Xp3;T|EjNwHoIkn;PGw#lHKk<}_GP(3MqYHW@(QMv2g`44%W`M* z-Nth(%wEYfI`Uia1oT-ra3wqyu;HS|CDW#1(X;48xHW9jK&spVq-ACm`J;D9*hyOB zg{2MsdY=GUi8>2WjWUa-f6(1UKPjxX__rv1<4A0$y3?xBd~Rnp6(iA!Uxs8+>Q8_D zwoHM8E#f6DpmF7=DuV;Wl!}`D^tmJFk>tHEEfoW0z_08W;h4Q6n3=-yVucQ}CT$x^ zo7yd!On~RXI+l>KlJuQfu$NPNK}EZ!%O`8*ge$6;b|t7s44K!;t=JoyX#r*)@Eoi6iDjt`p=T=v?UBt8?{N^d-z#`Jj ze`1WFBn@-D?OHOOvrNrxWbiYZi&?_ZBGg2v=4F`JJukHQ# z)wjumv`yomWz2PtG(u&3d`CVDl?ML&7bpRT-StE{TzZX4WBh1nluNZU{nG+q!|h&ujlovgO7#LEEbP(bxRRScw2%qXdFG_+wuIUu6DQW5#&t zBVydjO+8_awX&!V(OISV9O5*r`A_r^WdA9n-!iB>>IAcHN!=PqB}oj?Ym-Ok-8&-@ zym_VSiLz5PInFw>wBSm^Mrt_fb3zmJQjmjANmhl6;$lE@%Q|7|<1Vq-lY1P{tk1+$ zzGP(mglMT`eQBp+d26AWo35!UIa(2QuuozDcD!1$7Hee`a164F!5U>bWriYJOg{b? z&7`ut21%q;y`)rGJ~DMpE~fwk%ABM$%shF2K_f_Ke|bp zNqT>kdgyxSiQG%{e#|5`TyACaYH9%jh9Ye6`4s&(=7(X{HqA%M&^kjpHs8i2cNlgRn#}L66Z)nJ7mWl__F>!JH$|Or6J(u}CE$ zs27>lvot7Pd$8_$u--DPE*CE-h) z2ur>{CKk?L7(Pm78E$ZhGRWxw&*YyewwS;ZcD6}0i2GX|nnl&6Pty^B7Q6NxSQTGC z`d6hxZ#j6SM{hcix+BmYTI!u9fR=d%Y67D+(Qk>M_WqY8slEzuGFojmXywT-OD6p) z@SrM2BRK$VOhXLcFB+s;a1d1@eLiUQCBLO#>MTH({Q7hdl^nnJ@KW!5Fe-&))q(ty zfYrJDRKV3Wd&_|;MSDGfYITABeR5&_n-i8r_FuA<-k~2FxRT%R-%J+(Spw_x0jbXb zEi3kF!l?87ssKae_s0S#bVd#aiP!tCyZ&A&@n*iNFemsG4jLrBy1t&Sr6;#Q%uj^; z`nHtguh!)!2MK!2!jYbFF@!5PX>CKEekV0ZB)n<*9Nbo&2L)J#x@CrG4f1o1G1x@{ z?rwX_Oydv5VzlfE6N+$q<@R%ED-7FNTqH&QOMIpS3=I{3I-4g19!NwdLrXX)m?q5* zMrxBqjED$q;C*p*d`y)^X6)&LgtP(2T9A|lRO3Or($pa=44aBZD8WQRoE$F~7Jq3- zMU|WrxWaJq&2$ZuTX4V+j2sD7=l0Y1wZICRkDB|NFx)RO?DD=@771EB4>GxD%PUv6 zpNNb%ba2XQ<-0#KYO?BCL)?NW7PVfHqoDa$q)g5LFYE^@=#Z_8`1HYFWFqEvVNuD= z>9m@@i-xJNn1zuuq2I_%L}$djB;Cs5l0(35OU?ce>lRoNHi4flB~I~r<3vTW_Z%?6 zh4P;HgbK4NTreXAZ3|}?Kx6BDK1st z*zofRDPFCIuL%P_pJ}G|h~u@Mx@nOy0BgCJ zqdkaY28g6Hy`)$R=5Z zIFgubO@n2_bS~d$_OOJofFJe@IKt1E$2W}{D+-cZo!E9N`B1n$Sdl4%bM(<#K&NSGrxF5;HLk~F9bkk2|Y zgBzA?u4-hy+AFQ9FP%V399OhfmdAoa4-6QeOC#TPh3V9CniZ9EMpf=!H~m~`t%D^^ z2Js0{?JdC|QJghwXF3(}Z(XU})L1zxj9Xa5d_;wnIfye3NJNKut+UaN5y;710+y)s zJB*Jm!%bLGmP9B*lO^%52q*zV{$fPi`#?bX7X7tV1OHq2#R>SpeW_)tj#u+-I(20c zV4Ce^b;%$gMe@wb_6}Eo<4)&9;_8i(BILlCEkU?X-{W(^e(Oq7KBOz}Zo%{{7CEkfbU%THtXkqqVB~%tQYyDyFc&RghK*Z8T`rYAd{jZek zd3Ez7CY!)ZYr`;NbBZ&T>B&tAdcl%4>9qup^77 zll(C)akbkrZNuigld-s%>Xa@UkMNkDAR^n=&gbr_Vr2AT%VVky-}(j`3iDy+OhN@7 zd3#34XdZ_%6MDk-(rHk2w?joOX~+wPVAXIq zo~zZ=(mZ5ibJjJLMK1(P)Cv5K#9c#GLS?B6{$ZtM#N_lA4EX@chaC;@ZR*9ou_L)B zb|DrnLRLSG2S&1z=GPNX==Ab{%(A>$$kg~+1v@`Kzi#rIzW-&6a}|CVy*p+Wj+a1l zUQg6UDcR3TlN33i=99aD*LHh3=3uzTy)lY04DivY`_%q^el1eT5s<4{JKwm7J)u3B zIypyxYp=cQmv||m$tThBWej;+N_oj5R`>iW8uYH%{voxOd;+8!Ng-Slvg6Z!@m+}a z4WnTlbc>hyWOZsIe|kEcFD71i)87sA;T@A@BWEvlyJdSV`s2~?`|z9MDWLeCJdLL0 zybHInrrTa@FRI&z0Ncb=!L+Owmd1!(#(@T`|V@ zKi#f^?8yv1H@iw&$JV@ERL{z5Rogkfoc4VPeODfzAIz_3fHQWfWjd@M-fllCS2I`jSTqPxf?h^tArM49G+q+$1hNVQv@d6)GJ(9-Z&SJNvVgb($Rl z&->BrCmt={lv%P<2M+wh{1u1O%IYNYG+AMtZh(~`vJ3_3@V|KB@P3=yS=+(G-9g*I zP*GD`Q&G||lfBASFqoUE&eb8fSP6QP&7z0lILE?kfrE|BN$jb;fGsoSItTbBEl=8% zW;y_o9&rS52(g#ahd|8M-pK+tD?}-GoeB)TV6SmM5xC*|4EUM$)sM`o5f60`^dESmZnJTk?T_q9=GH3>Z-^{ z*P~NQ(}_i3;$gEs1n{g{`C~XEmuXdb8jRUxch=xvwVxtO3o}``qkiD% z<~p4!VE8vj>#l!vF}c52CQpoypLgN(PV~kSM<})v@%#PpWLgv~y9P3y@An#ifL84e zX93k+$<@YwWT*i3>OS9zc${g8%JbHlMErFZx=EExCX@c0ACkMXLBmF~8?Mi@XZ*`- zU|j*qaAB-o<8>2&oHl~}%R&F3$Xlbk>L_@@HSz6aQm4MSeGSw3R8)C1o@Nnep03A> zRHylN<8I=KMYY&VQ>JO^v#`r#WgpdE%eUrBs&m^cd:{^#9b56g$g0kAy7ZU1n1 z@6X5oknX+rhNiMNqx|H3aNF2F3X`Axmd;btd8!lFX!P2eFBjVRm%T;jZ7F#~ZzA>f z&(|HDCwV91z-{Bvl$51S)$90=)4TN*#1e?`j$B%u$-wqf2L*v1>t~Y$Nv!h>mB8A= z`IQd@a^cDWgxN}A`Y;;8W*S#bsl@wchmzR0d+WXV6Gja^m!0u6lhQQD)OfI3^JiOH zbvG}+?beV;lh|`OeB;xXQ30Dut@G<0KEchK(cWAMh;J5qHmwJbe!KHV8B-p}N7N(F z$K*^B#QpNHFV>y*&xowH>~tnUqic6%vwiOp2w-3Jp3~9MR`)ByF|z|Y!VfW`SEU$I ze34&ejPnJ;g|DAKW}!^DlhENgvWL3S7=eIZ*hmmzy=5VH6{!+2q)vJp_GUDLj2wkj z4pPTESht}0j8Y}KYTS5k16$sqMCr_!n~JE;6*&ujjE@P(^{?6D*GyL4kA{z4-F)rQ zB}N;JhfG@T<=s@bZ4XbYlbQNmNNo2#4Xo?W8-e9dd{3XK(v}ahkK%KJ>vq3i9}GJ^ zmXw%}tn{n>efLB|@b$`;n^g6b4IEc8>i6Tlo^K7T@@BTzURtgc2DDnuCV{HQ<6BRC zOr#ud^V#5WU#AL(9!ID8oB3nPT2Tfd>y@8d3+rjQGT(r#{y}@F>h9rqZn(r3M}*#Gx46Etm)+CNk6`+v4iB`)|}pp^XS1dn_l4^-r?kq@Y{3_ADBrCrBi}caCRYu zvCaLR*W>+Rp>uQF0+++gZE^pGgxB`vT~zUXzVxA{YO%BUdt;zsuD~F0W}HaIlKoIi z?3?~k`TW$dnDTH=J~W*|)|^mE}zLH|PL`~AFyLHbEDbK>J!wfIqy{{%Cw z=jQWCvGDv;d#m>6Q~G-7t*c*Xs^z=WyW6Ks(MRXC%DqN!@4gL%H2Gh2A?;hle7!5* zB6Va0@lNX=gZ)}HD{yCO*&nwa^j(L37B6-DeZ@kS4cJL3J<p5b$^b-gsI z*!eKOL7DISD?s1$_?gDvUZTt40((Yt=fLyUwvr(5-lfFkpr!p8oWTNUg2($laCi_L z&^mh~_*uK%&hYs=Zd6_E!9}hif1>yO=V$jkZrE~uAcAj%rPZQ6r z5M`iUY+3RnX)=wIn`glCrrA0JhB~$qbCbU+@1#+!>nwr*!qs>3XS5=ZPtI67s;l+n zWm9!%1b|#}B=$TG=j-)Yw5O)`<8gLh;4S@8EVc%^{dGeeHu+?<8yVJy1}f%cIM!MCL|aYoQ$z6jGX{qpHEa$Jyp^eJSR+B(G( zE(O$bVcq)(_)b>N6knJOiE>YcTeBC7`(;8x7k`~KjT()*DU+-4BH%MJKibyeVkdvK z`}@j|ujPAP4?XtxQ3%U02M!-}g6q%w^Jq2=IK*SRoxRgtMhB?*cW398T=Qr^k7Slz z=?B(XQ{zhtG&_OBPtc2g_x(FPF;q7JehO_s{IE5AF14 zx`JlcuL~LMmsX`f-OjtM6~Q+OBb?>KA?>)HjYEx>>;7*wTW<`Wb&^7Tx5sjcWXG+K z%Sq46$s`)A2Y?s5`^BLKP!8NG^T`|m0E6%7=VJU-;pF5gdLxzfes>zkI=rOW{=;^y z-Tcq`;@^o9s&7hM{vNW^Vs}WO{?uG((_{7&JUm?b;F})#J3f(u?Haekqr_- zR(^+!y)j*Csa`r=`^e0r*I+--jdgTHv#?D%4)~W9&cxZd6K8v_&u`J0jj!5!rk*6A zR~5O?Q}NM8oFF(=?S6w87~r!^P2aaAK-}t7dzUPIahwSLWJuzB5x=x6v-ftmwNqFM z%s-)idFhzqegBS=e!V*0=IG7_pHO?*@N>#*g3{uK3zF;Wqc8Mtz9a5fGnArqmjybk}CF@`B2D1 z%p<|gV02E->am}wg2UT?Iezm_Yc`182*|`r!PmYx^;pzqFo}$zaLy@h!3*- zkM^!Stf@2WYwe&0P+Y-{39;Hy47p2ga+4y+T2T;06s$lE;UcLaiCL6IR6wB)R;yG5 z0db>RR}_n);6C7jbwxm20C7RIxFb;XdxOgd_kHL4#y-!?{9_)}%i)}R&hNa-@4WZH zdn{z&e{0u(+p$XHt?)1k;Gj-P9D|Zb@srmd~7H`Lb-rw>aRgsemE}!Ibd<)_d zecCKv**#W0`$BP{dzw0~U|`zxnygik-EKz?AFiP+ll;#8WzgqVuWY93CY_wBi!2r3 z;hDX)g`Z8ltxqT(W%5J6nz+p+$_o|BnZ5ItFP&VP<|8)U*U^Gr4=l&><{DwYR%>t2#G>T~?_&!l?q zng5))vpG4!ZS~VxHP@Ja6V}EY$($zFUY1@NksEV*!nAdjYH|T{-z#W6vwQWhn)ET} z)6*>}`L&e0g6=JjhZMJ3<*~Be^{w`02JiCiM~XB}kUc{4qb<$$P{fcV_Z}E*n{ZB27_=!hF({=dS3S5X?%d&T|N650Y+drzgLZ}=KHSo9R6*+Fncm1c1 zYI~YC`bf+EL)sJNQI0QXshiflzHd-0xsu_3m$V&G_~zTH9hs9`lx@2mZ|p7@63l$F z`;Gi@>OZzLzHYm8y>4l~<-Nq_=02!U-CI`3AZ`EX>K(FvXH61Drc$2EmYMu8rTf9!;|@c= zPa8V@LgeRuug3Mmzs$4g{j}8d!hjyu`)#tj?%lC9j&L5HY?B+FF=r%S=bqot*!bPE zTRZa}J)J9)@{~WuZ(dS@m2yMlrX`qM&3S5*_N5iyZ??~|^gNg9LofSZco3F4(%ocF z&X9X+u&e%dk@E}f6$>8u#x3NtxgNoR-QPzS&I?dsj{$G5EV#6){YXO;w-UJ#Q=>PfX#_isvZ*}kIa)LgG#3SuGXn)AZR(Q{Q$vbQTo%J}DxqEAgm*?Q4cb9T_%PY(>(p=-5m8R(==B>&d{oTvHMRii?Tf<$jK(YkFA~-Jg+A3(eGm^+er^8ETnX2)A`sN zhqh&whr6}p%x=y$AGXTdvS9X$o%T7ASDX*eEAqMNnv^=LMwe+mGRd)`&-FKDy6wO1 zCxabhMGs1~O}{)m&~R9B`gBD0;h~TG4yjLNS%%f~=H}Kl_bxXNbL!=t+IA)-I#};u zpIKkGWaiRbYGOc8^iHcSU-y^x*JT%!T}>Tx@bK5+D&16{Q?h!+iig?L2L7SLUyqHs z7&CB<>6UxN*FA2Ydv+ioXhw0>hQGCVt=~AMqJI4!MFpnn+@P{Z!=0u2)AHygj^7xm2!YWO}vMp*?F; zm^p7RlnMtYrGh(=QOQ(qg8-7Z7wtNNj+TEiNHDe^qU@BwNSOvih_y!ul%y_iL-bRt zmH%1}*+%vs1dOXp&o~em7< z=s*BoUjKJqA|l?W#>-3Us+Jp!gX-bxU@#~`r8F&c$DN$q_%6=0J74HRiTF;KlPg~$ zalwS{BC#`txwXUWY-Wf97r-Y!TqO|A7zZKj4G=$;n4$`BS<+rR$?I z@KMU2X4j{5eN+ZMO8L|5`nYtxrF~F(gHrKB?e6fuwLhZ*1^+xy+5eLx2G!w+;i}MS zl(HyL-uNl>N`|Y~8W`h=0^Fnh6!P|6`JE0V0lrL_V2UD|F}_G&;6H`-Cdk1k)H7Ov zyAo8Fu8dqA%Dg`aQ8d-@92A*3_R80u{bg3gPbv+4zKVaIlb8JcFy&0kV~N8COrAcb z!gTBR;KJ!2d*hQAnO6kuv3Rko`-}D6?TQnYuA^{kdkZhK+WC#Q&Pz%KDfZ`E7o>O; zW2M{omxtU;Fn6g*62$*Hp@-pUjLk#-AAOFK)p-^dN4Tc;oZRfJ=RWW8vfSA-*`lFu zt?#k!>cy`@?pyS|?i`Z;<4>c$sEWG?UYL>>xHZb|%7Oud9eS*@c(${&X(B1Dwy}>d zdEDjoi0rf?_bm(dE*j9X=*F^ruP?0ZlI!$0vxLJIAp@`Z>SdT&{5Y#FZR;J)7IUpW zJBIb&Xu~reF^skYa)4V+0Q2GORLHGKogEw7Z(6NUvQ zPCCHxua;h49IZKF9=>SCcluQ88$GPUZgCGv8BfcKu`f2X^)0+dmo@+~2DB+w?!O zULaRcq?QRY=NS7)h`~SaJ|x8zQ55*w_@j3pT+D61;;KH#a1rP@Vf=KE5hPeNESX3M z8c%eVQ8)$qSFmIxY)esM$S)-#L7xkjt%!zw&=e#SVG^*AJKKr~3i2Tm(y*RL1m91B zfleEp?pywhz$1M8wVkNsAyKBrSpG0fvzxcB^Tga~%fkTYI0rnEu7J);Iz(K<}a01$M zFpNgv0DCs%7wo&Rj2*L(z(FifuE5>|+ahp)eF)N%Am$b5#{}yka7Yk1!2So@BF00) z#s*MH5I7_V9H7e++XoJ24=iKHEF@4ojf3t|uq})O$6y>ro`G#)95~?1&UzpgFgMs^ ziQ@_D)z2frCKc z0JQ~di@-r3a1aO_1Of-(TF3{e*I*etW+6d>N)j4haG-*15jYThjzioHd_c7Y7;{kP zKt2$BjwARS2hG}GTQ)Y}L&%O%SS4#fJP+1NnL5XT#3 zi19%1IZm_ZkdOd}D>yzHv0owh9H$XDAnq0tVhphc5qyqI5I7KgjwARSm$0#cydd}- zNANk0;Bx}vZh!+mCu0N#@4KM*hUO`O;Bx|l*ExaUa{|HVpg=|VfX9P?I3B}5Q%Tqs z!RN-aWasrJB)}mSnlA)`&j|#d6F5At2%H@=@F9Y0Zvw&R1cJ{A0x=#0j6(wavezYn z;Bx}O=LD#!JLd)D6yk0nAp!df>;qos1cJ|vH6hyvxED(%Br$-kvkxIjK+kwY1P#s_ zo%KYd5UjUOGMtb|z&wCt>}M24J+MD_))UeaP}8tv5(3mOon^*{S*)0GAY<>7Kn5O2 zux!O55p+MXga&!(>;uR;a)4edQz#j&IS2GwefNM_{BM7%{PwxAOsjwUSP!Q$ka2U) xh!Jky?&h5L&-yTP&VMC3fsdA%%Y7S^!1&~Siavr14ssxGP~a5kJNqv1{{dCwQ(^!B literal 0 HcmV?d00001