diff --git a/car/.dockerignore b/car/.dockerignore new file mode 100644 index 0000000..6cb7243 --- /dev/null +++ b/car/.dockerignore @@ -0,0 +1,8 @@ +**/*.jpg +**/*.png +tests +MotorControl +Messaging +Web +**/*.mdj +**/*.pdf \ No newline at end of file diff --git a/car/.gitignore b/car/.gitignore new file mode 100644 index 0000000..52005fc --- /dev/null +++ b/car/.gitignore @@ -0,0 +1,7 @@ +**/*env +**.vscode +**.DS_STORE +**/__pycache__ +build +dist +CarController.egg-info \ No newline at end of file diff --git a/car/Assets/ControllerSlideIcon.ai b/car/Assets/ControllerSlideIcon.ai new file mode 100644 index 0000000..43d77e1 --- /dev/null +++ b/car/Assets/ControllerSlideIcon.ai @@ -0,0 +1,1068 @@ +%PDF-1.5 % +1 0 obj <>/OCGs[21 0 R]>>/Pages 3 0 R/Type/Catalog>> endobj 2 0 obj <>stream + + + + + application/pdf + + + ControllerSlideIcon + + + Adobe Illustrator 24.0 (Macintosh) + 2019-11-10T14:22:25+10:30 + 2019-11-10T14:22:26+10:30 + 2019-11-10T14:22:26+10:30 + + + + 256 + 256 + JPEG + /9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgBAAEAAwER AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE 1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp 0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo +DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FXYq7FXYq7FXYq7 FXYq7FUq8wea/LPly1+t69qlrplufsvdSpFyI7IGILH2GKvM7n/nJXQdQne08i+X9X853Snj6tlb PFaA/wCXPIvJPmY6Yqs/Sf8AzlD5gUm00nQvJ1s+4N9M99dKp6U9H1Iq+zIMVXf8qf8Aza1QB/MP 5ragpanODSLWKwC7UIWSJlJ+fDFWx/zjJ5XuBXV/M/mbV2b+8+t6kWVu/RY1NOW/XFW/+hUfyab4 riwu7qU9Zpr24LnwFVZemKu/6FR/Jpfit7C7tZR0mhvbgOPEVZm64q0f+cZPK9uK6R5n8zaQy/3f 1TUiqr36NGxpy364q1/yp/8ANrSwX8vfmtqDFa8INXtYr8NtQBpJWYj58MVW/pP/AJyh8vqDd6To XnG2TcmxmexumUda+t6cVfZUOKr7b/nJXQdPnS089eX9X8mXTHj6t7bPLaE/5E8a8n+Yjpir0zy/ 5r8s+Y7X63oOqWup24+09rKkvEns4UkqfY4qmuKuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Kux V2KuxVhvn783fInkaMJreoBtRkp9X0i1AnvZS2yhYVPw17FyoPjirBf0r/zkD+YZI0q0j/Lny1Js L2+X19WkQ90hIAjr4EKR2Y4qnHlz/nHD8vNPvP0pry3Pm7XGoZdR1yU3XJup/ct+7Ir05hj74q9P tbW1tLdLa1hS3t4hxihiUIijwVVAAxVVxV2KuxV2KuxV2KuxV2KqV1a2t3bvbXUKXFvKOMsMqh0Y eDKwIOKvMPMf/OOH5eahefpTQVufKOuLUxajocpteLdR+5X92BXrwCn3xVJzqv8AzkD+XhH6VtYv zG8tR7NeWKehq8aDu8IqJKeADk93GKs68g/m75E88xlNE1ALqMdfrGkXQEF7EV2YNCx+KncoWA8c VZlirsVdirsVdirsVdirsVdirsVdirsVdirsVQOt65o+h6ZPqmsXkVjp9svKe5nYKij5nqT2A3OK vG3/ADA/Mz81J5LL8toG8ueUeRjuPOd/GRNMAeLfUYD+DdfdDirNPy+/JPyV5LkN/DC+q+YpCXuf MGon17t3b7RVmr6df8nfxJxVn2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxVgP5g/kp5J86SfX54 X0vzFGQ9tr+nH0LxHUfCWZf7yn+Vv4EYqwtPzA/Mz8q547L8yYG8x+UeQjt/OdhGTNCCeK/XoB+L dfdzir2TRNc0fXNMg1TR7yK+0+5XlBcwMGRh8x0I7g7jFUdirsVdirsVdirsVdirsVdirsVdirD/ AMyvzQ8t+QdJS71NmuNQuiY9K0i3+K5u5qgBI132qw5N29zQFVgGg/lZ5r/MPVIPNf5unjZxN6ui +R4mItbZT9l7uh/eSU+0p+TbfAFXtkEEFvCkEEaxQxKEjiQBVVVFAqqNgBiq/FXYq7FXYq7FXYq7 FUs8yeZ9A8s6RPrGvX0Wn6dbj95PKaCp6KoFWZm7KoJOKpB+XH5t+SfzCtJpvL12xuLY/wCk2Fwo iuY1rRZDHVqo3ZlJHY77YqzLFXYq7FXYq7FXYq7FVk8EFxC8E8aywyqUkicBlZWFCrKdiDirxPXf yr81/l5qdx5r/KI8rOVvV1ryPKxNtcqPtNaVP7uQDov0Lt+7KrP/AMtfzR8uef8ASXu9MLW2oWp9 PVdIuPhubSapBSRfCqni3f2NQFWYYq7FXYq7FXYq7FXYq7FXYqwP81fzVsvJNlb2dnbnVvNurH0d C0KGrSTSMeIdwu6xKep79B3IVSX8sPyj1C01VvPn5gTLq3n++FamjW+nRn7MFsoqgZRsXX3C92ZV 6tirsVdirsVdirsVdirsVef/AJl/nJ5f8lPDpcMUmt+bb2i6b5csvjuJGb7Jk4hvTQ+JFT2B3xVi /lv8nvMXm3WIPN/5vzpf3sR9TS/KUJrptiDuPUSrLK+2+5B/aLbUVT38x/yU07zDdw+Y/LV0fLPn iwo1jrNqOKycRQRXMaijoR8NaVpt8S/CVUB5C/Oi9XWl8kfmVZr5e86IALec0Wx1FSaLJbSV4hnp 9mtCdhv8IVetYq7FXYq7FXYq7FXYq7FXlP5n/lJf3eqJ57/L+ZdJ/MCxFaiiwajGPtW9ypohZhsH b2DdmVVOvyq/NWy87WVxZ3ludJ826SfR13QpqrJDIp4l0DbtEx6Ht0PYlVnmKuxV2KuxV2KuxV2K sP8AzQ/MrSfIPlttTu0N1qFw31fSNKjJ9a7uW+zGgAY03+JqbfMgFVjP5R/lhqtpqE35gefGF95/ 1Zamu8enW7Ci20C9FYIeLkf6oP2iyr1bFXYq7FXYq7FXYq7FVskkcUbSSMEjQFndiAqqBUkk9AMV eLeYPzd8z+ddVn8p/lBCtw8R9LVfOU4/0GzB2PoEgiWTwND/AJIYfEFWX/lp+T3lzyOk16ryav5n vqtqfmK9PqXUzNu4VmLGNCf2Qan9otTFWeYq7FWN+ffy98reetEbSPMNoJ4t2trhaLPbyH/dkMlC Vb8D3BGKvK7Hzj55/Ju8h0bz88uv+QpHWHSvOEaF57QHZYr5ByYgdm3PgW+yqr3DTtS0/U7GC/06 5jvLG5QSW9zA4kjdD0ZWWoIxVEYq7FXYq7FXYq7FXYq8p/Nv8sNUu7+H8wPIjix/MDSVqKbR6jAo o1tcL0Zig4oT/qk/ZKqsm/K78ytJ8/8AlwanaIbXULZvq+r6VJX1rS5X7UbghTT+Vqb/ADBAVZhi rsVdirsVdiqB1zW9M0PR7zWNUnW20+xiae5nboqKKn5k9AO5xV49+Vehan+Yfms/m75rt2is15Re R9Fl3W2tgSPrbL0Mkn7J/wBkNvToq9vxV2KuxV2KuxV2KuxVIfOvnryv5L0STWPMV6lnaJtGp+KW Z6VEcMY+J2Pt06mg3xV5HHo/5ifnZKlzr4n8o/lkWD2+ioeGo6mgNVa4b/dcbdadPANtJir2ny/5 d0Py7pMGkaJZRWGm2y8YbaEUUeJJ6sx6lmJJ74qmOKuxV2KuxVRvbGyv7OayvoI7qzuEMc9vMokj dGFCrowIYHwOKvD9R8i+ePyivpte/LdJNa8myMZtV8kyuzyRDq0li55NWn7O7eIfsq9O/L78yPKn nzRhqmgXXqcKLeWUlEubaQ/sTR1qp2ND9k9icVZRirsVdirsVdirsVdirxD81NC1P8vPNY/N3ypb tLZtxi88aLFstzbEgfW1XoJI/wBo/wCyO3qVVew6Hrema5o9nrGlzrc6ffRLPbTr0ZGFR8iOhHY4 qjsVdirsVdirw38wHn/NT8zIPy2spG/wj5caO/8AOdxGSBNMDWCx5L/ww8a90xV7fBBDbwRwQIsU MShIo0AVVVRRVUDoAMVX4q7FXYq7FXYq7FXmf5jfnVY+X9RTyv5Ys28z+err4bfRrU8lgJH27txt Gq9Sta068R8WKpd5K/JW+utaj86fmheL5i82bNaWXXT9PFahIYvssy/zEUrvufiKr13FXYq7FXYq 7FXYq7FXYq8p/MD8l57jWj52/L29HlzzxFVpWTaz1AdWju4wCtWp9qhr+0CaMqqI/Lb86INe1J/K nmyzPlvz5a/DPpM/wx3FBX1LRzUOpG/GpNNxyG+KvTsVdirsVdirsVdiqyeCG4gkgnRZYZVKSxuA ysrCjKwPUEYq8P8Ay/km/Kr8zJ/y3vZG/wAIeY2kv/JlxISRDMTWax5H/hfeh6vir3PFXYq7FWG/ m75+j8jeRNQ1tB6mosBa6Rb05GW9nBWFQo68d3I7hTiqD/JT8vpPJfkmGC/rJ5i1RzqOv3LkM73k /wATKW7+n9n51PfFWfYq7FXYq7FXYqo3t9ZWFnNe308drZ26GSe4mYRxoiipZ3YgKB4nFXieo/mN 55/NK/n0D8rA2l+WonMOq+erhGUf5aWCGjM9P2vtf6mzFV6F+XH5V+VPIOnNb6PAZb+5+LUdXuKP d3Lk1Jkk8OW4UbfTvirMMVdirsVdirsVdirsVdirsVdirDvzI/Kvyt5+01LfVYmg1G1+PTNYt/gu rWStQ0bjqvIVKnY/OhCrANE/M3zh+W2qQeVvzZP1nSpWEOi+eolYwTD9lL3qUkp1Y79zyHx4q9th mhnhjngkWWGVQ8UqEMrKwqrKw2II6HFV+KuxV2KuxV2KsB/Ov8vpPOnkmaCwrH5i0txqOgXKEK6X kHxKobt6n2fnQ9sVRf5Q+fo/PPkSw1phw1FAbTWLenExXsAAmUr+zyqHA/lYYqzPFXYq8T1Wv5h/ 85A2mlA+p5a/LmNb69A3STVp6GFD4+mACPAqw74q9sxV2KuxV2KuxVin5h/mb5S8g6SL/XrrjLLV bLT4hzublx+zFH+tjRR3OKvNbLyJ59/Nu7h1j8yPU0LyajLNp3kqB2SWYDdZL9xxav8Ak7HwCd1X tmm6bp+mWEGn6dbRWdjbKI7e2gRY40UdlVQAMVROKuxV2KuxV2KuxV2KuxV2KuxV2KuxVA63oWj6 7pc+laxZxX2nXS8J7aZeSMP4EdQRuO2KvEptH89/kfNJe6Atx5p/K8s0t5orHnfaWhNXkt2P24l6 kdP5qbyYq9g8n+dPLXnDRIdZ8vXqXtlLsSuzxvSpjlQ/EjjuD+rFU7xV2KuxV2KuxV4npdfy8/5y CutKJ9Py1+Y0TX1mpoEj1eDeZR4eoCSfEuo7Yq9sxVKvNfmC18ueWdU1673t9MtZbp16FvSQsEHu xFBirAP+ccPLl5p/5eLr2qfFrnm65l1zUZSPib60eUO53oY6PTxY4q9UxV2KuxV2KvJ/Pf52TJrL eTPy6sh5l86vVZuJrY2AGzSXUoIUlf5Qwp3IPwlVX/Lz8lItJ1b/ABf50vj5o8+T0Z9Sn3gte/p2 cRACBa0DUH+SEBIxV6jirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVeOecPyc1vQ9bm87flL cR6T5gb4tR0B6DTtRUGrKU2WN28dhXuhq2Ksi/LL849E85vNpN3A+h+cLCqan5dvPhnRl+00XIKZ E96VHcdCVXoOKuxV2KuxV5Z/zkd5du9Q/Lx9e0z4dc8o3EWuadMB8S/VTyl3G9PTq/zUYqz7yp5g tfMflnS9etNrfU7WK6RepX1UDFD7qTQ4q8z/AOclbmfUNB8v+RbRyt15z1e2spePUWkTrJO/+wb0 yfbFXrlra29paw2tsgit7dFihiXoqIAqqPkBiqrirsVQWta3pGiaZcapq93FY6darznuZ2CIo+Z7 k7ADcnYYq8UuPM35hfnPPJYeTjP5W/LsMY73zNMhS9v1BoyWabFUPTlX/WI3TFXqvkT8vvKvkbRl 0ny7Zi2hNGuJ2+KeeQf7smk6u34DsAMVZHirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdir sVYF+Zv5P6D53SHUElfR/Ndh8WleYbP4LiJl3VXKlfUjr2JqN+JFTVVivlT839f8sa1B5K/N2JNO 1R/h0rzSm2n6goNByeirFJ0rWg8Qu3JV7OCCKjcHocVdirsVUrq1t7u1mtblBLb3CNFNE3RkcFWU /MHFXkf/ADjVcz6foPmDyLduWuvJmr3NlFy6m0ldpIH/ANm3qEe2KrNT4+YP+codJtCedt5O0Ka+ IO6rdXz+jSnj6UiMPlir2TFXYq7FXnv5jfkl5b/MG6il8w6nqrW0H+8+n29xHFaxmlCwj9I1Y/zM Se3TbFWPxf8AOLnkeGJIodc8xRxRgKkaajxVVGwAAjoBiq//AKFi8m/9X/zH/wBxI/8AVPFXf9Cx eTf+r/5j/wC4kf8Aqnirv+hYvJv/AFf/ADH/ANxI/wDVPFXf9CxeTf8Aq/8AmP8A7iR/6p4q7/oW Lyb/ANX/AMx/9xI/9U8Vd/0LF5N/6v8A5j/7iR/6p4q7/oWLyb/1f/Mf/cSP/VPFXf8AQsXk3/q/ +Y/+4kf+qeKu/wChYvJv/V/8x/8AcSP/AFTxV3/QsXk3/q/+Y/8AuJH/AKp4q7/oWLyb/wBX/wAx /wDcSP8A1TxV3/QsXk3/AKv/AJj/AO4kf+qeKu/6Fi8m/wDV/wDMf/cSP/VPFXf9CxeTf+r/AOY/ +4kf+qeKu/6Fi8m/9X/zH/3Ej/1TxV3/AELF5N/6v/mP/uJH/qnirv8AoWLyb/1f/Mf/AHEj/wBU 8Vd/0LF5N/6v/mP/ALiR/wCqeKu/6Fi8m/8AV/8AMf8A3Ej/ANU8Vd/0LF5N/wCr/wCY/wDuJH/q niqGvf8AnFH8vL+JYr7Vdeu4lbksc9+JFDUpUBoiK0OKvSPI/ky18n6Imi2WoX1/Yw7Wq6hKs7wo BT043CI3AdlNadqYqyDFXYq7FXjemcfL/wDzlDq1oDwtvOOhQ3wA2Vrqxf0aU8fSjdj88VXfk/TV Pza/NbzC45FdQtdIgeg+EWETRSKCPEqlcVexYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FX Yq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXjv5wU0v8ANr8qfMKDiW1C60id6D4hfxLFGpJ8Cz0x Vv8A5xkAuPK/mfVz8Tav5m1K79T+ZWMajr8VKqeuKvYcVdirsVdirsVdirsVdirsVdirsVdirsVd irsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirx7/nJsC38r+WNXHwtpHmbTbv1P5VUyKenx Uqw6Yq3/AM4o/F+TVhcNvLdXd7NMfFzcMpoO32cVewYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7F XYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXj//ADlcOP5NahcKaS213ZSwt4OLhVBoev2s Vd/zij8P5NWFu20trd3sMw8HFwzGh7/axV7BirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVd irsVdirsVdirsVdirsVdirsVdirsVdirsVeP/wDOVx5fk1qFuorLc3dlFCvi5uFYCp6fZxVr/nGQ i38r+Z9IPwtpHmbUrT0/5VUxsOvxUqx64q9hxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Ku xV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KvHv+cnD9Y8reWdIX4n1fzLptp6dT8SsZGPT4qVUdMV a/J+ml/m1+a3l5zxLaha6vAlR8Qv4mlkYAeBZK4q9ixV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2K uxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KvHfzgpqn5tflT5eQ8iuoXWrzpUfCLCJZY2IP iVemKrdT4+X/APnKHSbsjhbecdCmsSTsrXVi/rVr4+lGij54q9kxV2KuxV5h+b/5P6l5vt31Dy15 gv8AQPMMaUQQ3dxHZz8RsssUbUQ/5aCviGxV415E8s6Xd63/AIO8/wDmvzj5W87IeKQSasosrwE0 V7SZoWB5dlLGvYsagKvVf+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6 nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl /wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBx VP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8A qhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhir v+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hbNK/6nrzl/wBxVP8Aqhirv+hb NK/6nrzl/wBxVP8Aqhiqhe/848+XrG0mvL38wfN1taW6mSe4m1iNI0RdyzM0AAA98VeSWXkvXPPX mP6h+V/mfzS/l2zkKan5s1jUZDaMR1S1hjjt3kYdftfMKKEqvp/yN5Ks/KOipp0N7ealO1GutQ1C 4kuJ5XApWrswRfBUoB86nFWRYq7FXYq8b0zj5g/5yh1a7A523k7QobEEbqt1fP61a+PpSOp+WKr/ APnJW2n0/QfL/nq0QtdeTNXtr2Xj1NpK6xzp/s29MH2xV65a3Vvd2sN1bOJbe4RZYZV6MjgMrD5g 4qq4q7FXYqxrz7+XXlPz1o50vzFZidFq1tcpRLiBz+3DJQlT7dD3BxV5bB5r/MP8m7iOw87GbzR+ XxYR2XmqBGe8slJ4ql6m5ZRt8X3Fj8AVe1aNrWk63pkGqaRdxX2n3K84LqBg6MPmO4OxHUHY4qjc VdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirE/wAxPzP8peQdKF9rtz+/mqLHTYaPdXLjbjFH UV3O7GijucVeb2fkPz9+bd3DrH5kepoPk5HEun+SIHZJZgu6yX7ji1f8nZh2Cd1XtemaZp2l2EGn 6bbR2djbII7e2hUJGiDoFVaAYqicVdirsVUrq6t7S1murlxFb26NLNK3RUQFmY/IDFXkf/ONVtPq Gg+YPPV2hW6856vc3sXLqLSJ2jgT/YN6gHtir0zzX5ftfMflnVNBu9rfU7WW1dupX1UKhx7qTUYq wH/nHDzHd6h+Xq6Dqfwa55RuJdD1GJj8S/VTxiNDvT06J81OKvU8VdirsVdiqncW9vcwSW9zEk1v MpSaGRQ6OjCjKymoII6g4q8V1r8qvN/5fanP5m/KGQNZSt6ureRrhibWcftNaEn93JToK/I0ohVZ t+Wv5u+WPPlvLDa89O1+zquqeX7wend27qeL/CwUugbbkB/rBTtirOMVdirsVdirsVdirsVdirsV dirsVdirsVdiryXzv+dly+tv5L/LSxXzN5xNVuZlNbDTwNme5lBCkqduIagOxPL4Sqi/y9/JO20b VT5s84XzeaPPc9Gk1S5HKG2PXhZxsAEC1oGoDT7IUVGKvT8VdirsVdirsVeWf85HeYrvT/y8fQdM +LXPN1xFoenQg/E31o8ZdhvT06p82GKs+8qeX7Xy55Z0vQbTe30y1itUboW9JApc+7EVOKprirxP Va/l5/zkDaaqB6flr8xo1sb0jZI9WgoIXPh6gIA8SzHtir2zFXYq7FXYq7FXYq88/Mr8mdD84TRa 1YXEmgecrL4tP8w2PwTBlFFWcKV9VO3Wo7GlQVWOeV/zk17y1rEPk783rZNK1aQ8NN8zxCmmX4G1 WkoqxP49B4hNgVXsqsrKGUhlYVVhuCD3GKt4q7FXYq7FXYq7FXYq7FXYq7FUv1/zDonl7Sp9W1u9 isNOtl5TXMzcVHgB3Zj0CjcnYYq8Wk138xfzqle18tm48o/lqSUuddkUpqGooNnS1X9iNulfvJ3j xV6x5H8g+VfJGippHlyyW0thRppPtTTPSnqTSH4nb9XQUG2KshxV2KuxV2KuxV2KvE9Lr+Yf/OQV 1qpHqeWvy5iaxs2NCkmrz7TMPH0wCD4FFPfFXtmKuxVhv5u+QY/PPkTUNEQ+nqKgXWkXFeJivYAW hYMOnLdCewY4qg/yU/MGTzp5Jhnv6x+YtLc6dr9s4Cul5B8LMV7ep9r51HbFWfYq7FXYq7FXYq7F Uq8z+VfL/mjR59H16xjv9PnFHhlHQ9mRhRkcdmUgjFXjTW35i/kg5ez+secfyuQkvak89S0uPqSh 2EkSj/Y/6nUqvYPKPnLy15v0aLWPL19HfWMuxZDR43pUpIh+JHHgwxVOsVdirsVdirsVdirsVdir AvzL/OLy55IEVhwk1jzRe0XTPLtl8d1Mz7KWChvTQnuRU/sg4qxHQPyi8z+dtUg81fnBMtyYz6ul eTIT/oFmD09cAkSyU6ip/wAosPhCr2mKKKKJIokWOKNQscagKqqooAANgAMVXYq7FXYq7FXYq7FW A/nX+YMnkvyTNPYVk8xao407QLZAGd7yf4VYL39P7XzoO+Kov8ofIMfkbyJYaKx56i4N3rFxXkZb 2cAzMW/a40CA/wAqjFWZ4q7FXYq8N/MBJ/yr/MyD8ybKNv8ACPmNo7DznbxgkQzE0gvuK/8ADHxr 3fFXt8E8NxBHPA6ywyqHikQhlZWFVZSOoIxVfirsVdirsVdirsVdirx7zf8Aktqmk6zL5z/Ke7TQ fMrfFf6ORTTdRAPIpJH9mNm33G1d/hNXxVOPy4/OnS/Mt8/lvXrR/LXnm1+G70G7PEyECvO2c0Ei kfEB1p4r8RVek4q7FXYq7FXYq07pGjSSMERAWd2NAANySTirxbzF+b/mXzlq0/lL8noUvJ4j6eq+ cJh/uPsgTQ+iSCJn8DQj+UMNwqyz8tfyc8veSjLqUkkmtea7yral5jvfjuZWb7QTkW9NPYGp7k4q z7FXYq7FXYq7FXYq7FVk88NvBJPO6xQxKXlkchVVVFWZiegAxV4f+X8c35q/mZP+ZF7G3+EPLjSW Hky3kBAmmBpNfcT/AML70HVMVe54q7FXYq7FUDrmiaZrmj3mj6pAtzp99E0FzA3RkYUPyI6g9jir x78q9d1P8vPNZ/KLzXcNLZtyl8j61LstzbEk/VGboJI/2R/sRt6dVXt+KuxV2KuxV2KuxV2KuxVh v5kflR5U8/WMceqxNb6na0bTdZtT6d3bOp5KUkHVeW/E7d9jQhV5/pn5ledvywvoPL/5rhtQ0CVh DpPnq2RmRv5UvkHJlfj+112/b3fFXtlneWl7aQ3lnPHc2lwgkguImDxujCqsrLUEEdxiqtirsVSD zr568r+S9Fk1jzFfJZ2q1ESH4pZnptHDGPidj7dOpoN8VeSR6P8AmL+dki3OvC48oflkW52+jIeG o6mg+y1w3+642606eAbaTFXtPl/y7ofl3SYNI0Oyi0/TrYUit4RRR4knqzHuzEk98VTHFXYq7FXY q7FXYq7FXYq8Q/NTXdT/ADD81j8ovKlw0VmvGXzxrUW621sCD9UVuhkk/aH+xO3qUVew6HommaHo 9no+lwLbafYxLBbQL0VFFB8yepPc4qjsVdirsVdirsVYf+aP5a6T5/8ALh0y7c2uoWzfWNI1WOvr Wlyv2ZEIKmn8y13+YBCrGfyk/M/VLu/m/L/z2gsfzA0laGu0eowKKrc27dGYoOTgf6wH2gqr1bFX Yq7FXYq7FXYq7FXYqhtT0zTtUsJ9P1K2jvLG5Qx3FtMoeN0PUMrVBxV4nd+RPPv5R3c2r/lx6mve S3Yzaj5JndnmhB3eSwkPJq/5O58Q/ZV6V+Xn5neU/PulG+0K5rND8N7p01EurZ/5ZY61G/RhVT2O Ksd/MT867PQ9SHlbynZN5o89XFVh0i2NY7c/78u5Bsir1K1B8So+LFUB5J/JS8n1qPzp+Z16vmTz eaNa2pH+4/TwN1jgi2VmX+YildwOXxlV65irsVdirsVdirsVdirsVdiryr83PzP1W0v4fIHkRBfe f9XWi03i063YVa5nborBN0B/1iPshlWS/ld+Wuk+QPLg0y0c3WoXLfWNX1WSvrXdy32pHJLGn8q1 2+ZJKrMMVdirsVdirsVdirsVYH+av5VWXnayt7yzuDpPm3ST62ha7DVZIZFPII5XdomPUduo7gqp L+WH5t393qj+RPzAhXSfzAsRShosGoxj7NxbMKIWYblF9yvdVVerYq7FXYq7FXYq7FXYq7FXYq8w /ML8idK8xasPMfl3Up/KPm01SfWdOqpnjccZBNGjR8mK/thgfGu1FWRfl1+V3lPyDphs9Etybqeh v9TnIe6uX68pZPn0UUUeGKstxV2KuxV2KuxV2KuxV2KuxV5V+Z/5uX9pqqeRPIEK6v5/vhTiKNb6 dEftT3LH4AyjcIfYt2VlU5/Kr8qrLyTZXF5eXB1bzbqx9bXddmq0k0jHkUQtusSnoO/U9gFWeYq7 FXYq7FXYq7FXYq7FXYqw/wDMr8rvLnn/AElLTUw1tqFqfU0rV7f4bm0mqCHjbwqo5L39jQhVgGhf mp5r/LzU7fyp+bo5WcrelovniJSba5UfZW7oP3cgHVvpbb94VXtkE8FxCk8EiywyqHjlQhlZWFQy sNiDiq/FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYqsnngt4XnnkWKGJS8krkKqqoqWZjsAM VeJ67+anmv8AMPU7jyp+UQ42cTelrXniVSLa2U/aW0qP3khHRvpXb94FWf8A5a/ld5c8gaS9ppga 51C6Pqarq9x8VzdzVJLyN4VY8V7e5qSqzDFXYq7FXYq7FXYq7FXYq7FXYq7FUDreh6PrmmT6XrFn Ffafcrxntp1DIw+R6EdiNxirxqT8v/zM/KqaS9/LedvMflDkZLjyZfyEzQgmrfUZj/xHr4hzirNf y+/OvyT50k+oQTPpfmKMlLnQNRHoXiOo+IKrf3lP8nfxAxVn2KuxV2KuxV2KuxV2KuxV2KuxV2Ku xV2KuxVgP5g/nX5J8lyfUJ5n1TzFIQltoGnD17x3YfCGVf7uv+Vv4A4qwqP8v/zM/NWaO9/MidvL nlDkJLfyZYSETTAGq/Xph/xHr4BDir2XRND0fQ9Mg0vR7OKx0+2XjBbQKFRR8h1J7k7nFUdirsVd irsVdirsVdirsVdirsVdirsVdirsVYZ5+/KHyJ55jDa1YBNRSn1fWLQiC9iK/ZKzAHlx7Bwy+2Ks G/Rf/OQX5eEnSrqL8xvLUe62d83oavGgHRZjUSU8SXJ7KMVTny7/AM5Hfl5qF3+jNee48o64tBNp 2uRG14t0P71v3dK/zlT7Yq9Otbq1u7dLm1mS4t5RyimiYOjDxVlJBxVVxV2KuxV2KuxV2KuxV2Kq V1dWtpbvc3UyW9vEOUs0rBEUeLMxAGKvMfMX/OR35eafd/ozQXuPN2uNUQ6docRuuTdB+9X93Sv8 hY+2KpN+i/8AnIL8wyDqt1F+XPlqTdrOxb19XkQjo0woI6+IKEd1OKs58g/lD5E8jRltFsA+ovX6 xrF2RPeylvtFpiBx5dwgVfbFWZ4q7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FUq8weV PLPmO1+qa9pdrqduPspdRJLxJ7oWBKn3GKvM7n/nGrQdPne78i+YNX8mXTHl6VlcvLaE/wCXBI3J /kZKYqs/Rn/OUPl9SLTVtC842ybAX0L2N0yjpT0fTir7s5xVd/yuD82tLATzD+VOoMVpzn0i6ivw 21SVjiViPlzxVsf85OeVrf4dX8s+ZdIdaep9b00qq706rIxpy26Yq2P+crvyaWq3GoXdtKPtQy2V wHHcVCq3XFXH/nK78mmotvqF3cyn7MMVlcFz3NAyr0xVo/8AOTfle4FNI8seZtXZv7v6pppZW7dW kU05bdMVa/5XB+bWqAp5e/KnUFLV4T6vdRWAXaoLRyqpPy54qt/Rn/OUPmBQLvVtC8nWz7EWML31 0qnrX1vUir7q4xVfbf8AONWg6hOl3568wav5zulPL0r25eK0B/yII25J8hJTFXpnl/yp5Z8uWv1T QdLtdMtz9pLWJIuRHdyoBY+5xVNcVdirsVdirsVdirsVdirsVdir/9k= + + + + uuid:C1BCCE1871B8DB11993190FCD52B4E9F + xmp.did:b642e7ad-f5a4-402c-85dc-33696164c53c + uuid:616ae305-b0e4-9c4b-9f0f-c10567351d6e + proof:pdf + + xmp.iid:8d064694-ac3c-43d5-9729-b5e0c42cbd93 + xmp.did:8d064694-ac3c-43d5-9729-b5e0c42cbd93 + uuid:C1BCCE1871B8DB11993190FCD52B4E9F + default + + + + + saved + xmp.iid:44458260-2538-47b9-896d-dda230f8f7e2 + 2019-11-10T14:18:21+10:30 + Adobe Illustrator 24.0 (Macintosh) + / + + + saved + xmp.iid:b642e7ad-f5a4-402c-85dc-33696164c53c + 2019-11-10T14:22:17+10:30 + Adobe Illustrator 24.0 (Macintosh) + / + + + + Mobile + AIRobin + Document + 1 + False + False + + 40.000000 + 40.000000 + Millimeters + + + + Cyan + Magenta + Yellow + Black + + + + + + Default Swatch Group + 0 + + + + White + RGB + PROCESS + 255 + 255 + 255 + + + Black + RGB + PROCESS + 0 + 0 + 0 + + + RGB Red + RGB + PROCESS + 255 + 0 + 0 + + + RGB Yellow + RGB + PROCESS + 255 + 255 + 0 + + + RGB Green + RGB + PROCESS + 0 + 255 + 0 + + + RGB Cyan + RGB + PROCESS + 0 + 255 + 255 + + + RGB Blue + RGB + PROCESS + 0 + 0 + 255 + + + RGB Magenta + RGB + PROCESS + 255 + 0 + 255 + + + R=193 G=39 B=45 + RGB + PROCESS + 193 + 39 + 45 + + + R=237 G=28 B=36 + RGB + PROCESS + 237 + 28 + 36 + + + R=241 G=90 B=36 + RGB + PROCESS + 241 + 90 + 36 + + + R=247 G=147 B=30 + RGB + PROCESS + 247 + 147 + 30 + + + R=251 G=176 B=59 + RGB + PROCESS + 251 + 176 + 59 + + + R=252 G=238 B=33 + RGB + PROCESS + 252 + 238 + 33 + + + R=217 G=224 B=33 + RGB + PROCESS + 217 + 224 + 33 + + + R=140 G=198 B=63 + RGB + PROCESS + 140 + 198 + 63 + + + R=57 G=181 B=74 + RGB + PROCESS + 57 + 181 + 74 + + + R=0 G=146 B=69 + RGB + PROCESS + 0 + 146 + 69 + + + R=0 G=104 B=55 + RGB + PROCESS + 0 + 104 + 55 + + + R=34 G=181 B=115 + RGB + PROCESS + 34 + 181 + 115 + + + R=0 G=169 B=157 + RGB + PROCESS + 0 + 169 + 157 + + + R=41 G=171 B=226 + RGB + PROCESS + 41 + 171 + 226 + + + R=0 G=113 B=188 + RGB + PROCESS + 0 + 113 + 188 + + + R=46 G=49 B=146 + RGB + PROCESS + 46 + 49 + 146 + + + R=27 G=20 B=100 + RGB + PROCESS + 27 + 20 + 100 + + + R=102 G=45 B=145 + RGB + PROCESS + 102 + 45 + 145 + + + R=147 G=39 B=143 + RGB + PROCESS + 147 + 39 + 143 + + + R=158 G=0 B=93 + RGB + PROCESS + 158 + 0 + 93 + + + R=212 G=20 B=90 + RGB + PROCESS + 212 + 20 + 90 + + + R=237 G=30 B=121 + RGB + PROCESS + 237 + 30 + 121 + + + R=199 G=178 B=153 + RGB + PROCESS + 199 + 178 + 153 + + + R=153 G=134 B=117 + RGB + PROCESS + 153 + 134 + 117 + + + R=115 G=99 B=87 + RGB + PROCESS + 115 + 99 + 87 + + + R=83 G=71 B=65 + RGB + PROCESS + 83 + 71 + 65 + + + R=198 G=156 B=109 + RGB + PROCESS + 198 + 156 + 109 + + + R=166 G=124 B=82 + RGB + PROCESS + 166 + 124 + 82 + + + R=140 G=98 B=57 + RGB + PROCESS + 140 + 98 + 57 + + + R=117 G=76 B=36 + RGB + PROCESS + 117 + 76 + 36 + + + R=96 G=56 B=19 + RGB + PROCESS + 96 + 56 + 19 + + + R=66 G=33 B=11 + RGB + PROCESS + 66 + 33 + 11 + + + + + + Grays + 1 + + + + R=0 G=0 B=0 + RGB + PROCESS + 0 + 0 + 0 + + + R=26 G=26 B=26 + RGB + PROCESS + 26 + 26 + 26 + + + R=51 G=51 B=51 + RGB + PROCESS + 51 + 51 + 51 + + + R=77 G=77 B=77 + RGB + PROCESS + 77 + 77 + 77 + + + R=102 G=102 B=102 + RGB + PROCESS + 102 + 102 + 102 + + + R=128 G=128 B=128 + RGB + PROCESS + 128 + 128 + 128 + + + R=153 G=153 B=153 + RGB + PROCESS + 153 + 153 + 153 + + + R=179 G=179 B=179 + RGB + PROCESS + 179 + 179 + 179 + + + R=204 G=204 B=204 + RGB + PROCESS + 204 + 204 + 204 + + + R=230 G=230 B=230 + RGB + PROCESS + 230 + 230 + 230 + + + R=242 G=242 B=242 + RGB + PROCESS + 242 + 242 + 242 + + + + + + Mobile Color Group + 1 + + + + R=136 G=168 B=13 + RGB + PROCESS + 136 + 168 + 13 + + + R=127 G=71 B=221 + RGB + PROCESS + 127 + 71 + 221 + + + R=251 G=174 B=23 + RGB + PROCESS + 251 + 174 + 23 + + + + + + + Adobe PDF library 15.00 + 21.0.0 + + + + + + + + + + + + + + + + + + + + + + + + + endstream endobj 3 0 obj <> endobj 5 0 obj <>/Resources<>/ExtGState<>/Properties<>>>/Thumb 26 0 R/TrimBox[0.0 0.0 113.386 113.386]/Type/Page>> endobj 23 0 obj <>stream +H\A Es0@iV4iXxBMOdhѶ!f~LP瀪 }@Pk+( +| +0D|0cnQgQgCW'8>stream +8;U<+M[]YO"@MRki*5C.+XQYq'd23YX/OJEV\ob8mfc"^,,I'qX6AAN?5:65fCt3= +3Vjbf!!<3T6"Q\~> endstream endobj 27 0 obj [/Indexed/DeviceRGB 255 28 0 R] endobj 28 0 obj <>stream +8;X]O>EqN@%''O_@%e@?J;%+8(9e>X=MR6S?i^YgA3=].HDXF.R$lIL@"pJ+EP(%0 +b]6ajmNZn*!='OQZeQ^Y*,=]?C.B+\Ulg9dhD*"iC[;*=3`oP1[!S^)?1)IZ4dup` +E1r!/,*0[*9.aFIR2&b-C#soRZ7Dl%MLY\.?d>Mn +6%Q2oYfNRF$$+ON<+]RUJmC0InDZ4OTs0S!saG>GGKUlQ*Q?45:CI&4J'_2j$XKrcYp0n+Xl_nU*O( +l[$6Nn+Z_Nq0]s7hs]`XX1nZ8&94a\~> endstream endobj 21 0 obj <> endobj 29 0 obj [/View/Design] endobj 30 0 obj <>>> endobj 25 0 obj <> endobj 24 0 obj [/ICCBased 31 0 R] endobj 31 0 obj <>stream +HyTSwoɞc [5laQIBHADED2mtFOE.c}08׎8GNg9w߽'0 ֠Jb  + 2y.-;!KZ ^i"L0- @8(r;q7Ly&Qq4j|9 +V)gB0iW8#8wթ8_٥ʨQQj@&A)/g>'Kt;\ ӥ$պFZUn(4T%)뫔0C&Zi8bxEB;Pӓ̹A om?W= +x-[0}y)7ta>jT7@tܛ`q2ʀ&6ZLĄ?_yxg)˔zçLU*uSkSeO4?׸c. R ߁-25 S>ӣVd`rn~Y&+`;A4 A9=-tl`;~p Gp| [`L`< "A YA+Cb(R,*T2B- +ꇆnQt}MA0alSx k&^>0|>_',G!"F$H:R!zFQd?r 9\A&G rQ hE]a4zBgE#H *B=0HIpp0MxJ$D1D, VĭKĻYdE"EI2EBGt4MzNr!YK ?%_&#(0J:EAiQ(()ӔWT6U@P+!~mD eԴ!hӦh/']B/ҏӿ?a0nhF!X8܌kc&5S6lIa2cKMA!E#ƒdV(kel }}Cq9 +N')].uJr + wG xR^[oƜchg`>b$*~ :Eb~,m,-ݖ,Y¬*6X[ݱF=3뭷Y~dó ti zf6~`{v.Ng#{}}jc1X6fm;'_9 r:8q:˜O:ϸ8uJqnv=MmR 4 +n3ܣkGݯz=[==<=GTB(/S,]6*-W:#7*e^YDY}UjAyT`#D="b{ų+ʯ:!kJ4Gmt}uC%K7YVfFY .=b?SƕƩȺy چ k5%4m7lqlioZlG+Zz͹mzy]?uuw|"űNwW&e֥ﺱ*|j5kyݭǯg^ykEklD_p߶7Dmo꿻1ml{Mś nLl<9O[$h՛BdҞ@iءG&vVǥ8nRĩ7u\ЭD-u`ֲK³8%yhYѹJº;.! +zpg_XQKFAǿ=ȼ:ɹ8ʷ6˶5̵5͵6ζ7ϸ9к<Ѿ?DINU\dlvۀ܊ݖޢ)߯6DScs 2F[p(@Xr4Pm8Ww)Km endstream endobj 7 0 obj <> endobj 16 0 obj <> endobj 17 0 obj <>stream +%!PS-Adobe-3.0 %%Creator: Adobe Illustrator(R) 24.0 %%AI8_CreatorVersion: 24.0.0 %%For: (Michael Pivato) () %%Title: (ControllerSlideIcon.ai) %%CreationDate: 10/11/19 2:22 pm %%Canvassize: 16383 %%BoundingBox: 0 -114 115 0 %%HiResBoundingBox: 0.000000000000909 -114 114.000000000002 -0.000000000001819 %%DocumentProcessColors: Cyan Magenta Yellow Black %AI5_FileFormat 14.0 %AI12_BuildNumber: 332 %AI3_ColorUsage: Color %AI7_ImageSettings: 0 %%RGBProcessColor: 0 0 0 ([Registration]) %AI3_Cropmarks: 0 -113.385826771701 113.385826771701 0 %AI3_TemplateBox: 57.5 -57.5 57.5 -57.5 %AI3_TileBox: -249.30708661415 -452.69291338585 362.69291338585 339.30708661415 %AI3_DocumentPreview: None %AI5_ArtSize: 14400 14400 %AI5_RulerUnits: 1 %AI9_ColorModel: 1 %AI5_ArtFlags: 0 0 0 1 0 0 1 0 0 %AI5_TargetResolution: 800 %AI5_NumLayers: 1 %AI9_OpenToView: -48 -2.38722294654326 7.67 1908 976 18 0 0 6 43 0 0 0 1 1 0 1 1 0 0 %AI5_OpenViewLayers: 7 %%PageOrigin:-31 -161 %AI7_GridSettings: 72 8 72 8 1 0 0.800000011920929 0.800000011920929 0.800000011920929 0.899999976158142 0.899999976158142 0.899999976158142 %AI9_Flatten: 1 %AI12_CMSettings: 00.MS %%EndComments endstream endobj 18 0 obj <>stream +%AI24_ZStandard_Data(/X ~)& +VbH !LgYV@ ,̺k5g㏎ÝSqea2KGQd,aϑ$,yl]{;(%/uSM9%ٴhhMat,ijʂ.vsl/ǔM]7RJ(, mxȝf,fnb`7ˑC4c~UVb63_fYneeYe_谸yqv{&35*S]g,d9HN  8d {Q4rhA4-2zr^^h6ʍybUVGNbs ]gC!Ks#9Wӻ I.w)~%#Z\nCY]9EVo_u OAUn<39zN觚{aR&AaAU?8hrvĄzsP QNv(aQh{ÄR^ U#ֻhXXR>GXW6%FE=GxdݝVd'Z, +B,5SʔsQfn 6s[h3w mƇ +q]C$*A.(kJgscR04)ΆQ\my1QD}fե7RP1/~;DTȃ^RC4%ec1J~>饱 ]&Mc1NE,hd7pl|4a8?nV\&D*,RrvА2J铅w}88Tq,NN#dPQoِaqvSvU dh'4v)shyՓBA; .8`\p@@ DB >  @@ +bp &8 0LP`pA!@BX`AB +@ + +0 +H,!)N^T#FǢCs <@(H84qAxdDƬ:ӽ#T @  Š` =:;/wh7ap))_`̇a>8љw1y4r̸!Ň)}ZI|ͅZst*jb,0 DA8(pe(B;t8$2', +d V]DC9470 C#Q +P#3p g}t~AxPPH Æ, !Ej^t!bMĮ9pp`GP + YZ:# + e(`@(00404 m# * ֡5twF v" @ `i7 ;P CcJRHA* `Ep( QCbAa  hx~ | Q,q +uaHPÂ׭ &u8gESa04, lApDQ AEq-\'(JTQ-iDSNE"\,kG'RP+9Ƅxu&aĐwΐ֒ $lܗ* !ňUe8Vw4$2HG ܝ 9p ACp>"P9t:A"! Zp' ZBa `2 C1 lj7$F +:a!q8$?7w-Q0MQS@t`CBFSڡh* pa"V5DjQCb>$X)>~heÂ9 |ۈ <(­FD!fas@if8 eu6HP4Ja!~`HY pZDJAGED$!QxlVj‘8$DAr9)!i "(@"DuHQW%wݐΙm(p8Wˬā)Ђ5fgbA81,w+4\<H($dHLpBBAAaAAAAЅYZ/  + =p p ήz=5X1(%,qlggVFF66lZ8$*QBmeUUS%%@ @ @ JN&QC54 A 3 `@ p2 ааXp@ 0 ],4(4(0(,(,((($ +B +6SQ(BC!H$O8q%(1LX@, #8!CDB#~7a!al.q]qp o5TC5PCaaApcb2 {lf p+B)(PA + DKT I4  +ˆB C>`m3/|vsS0j^.))JɈPZ04ѷ:2082HB.&$<$Jj&F2C0c;4W;χp[8cc7Mx\ٶ +X ^p,9マ8 +W8@0#B`@0  IH&&1JT%p DH$ Bb!`H,$ DC!! + aPC +8 IX|AB + dYbx<L@2Rm)ZZW}M4Q*t ehA3әx>A(0 4Pih )1P5jQQU]]^D%JjJLVn-؂5X)\5|`9szn_8."(b(D! qHɑ 9) !9nDS;ÌqH!^@L@A` +&  +$0a!UXEQbB +:ي/%CeG[Slajȡc?IhRxU(eݍC5Eӻ5R (&< B pDx&, B  AU8A +D@@ƒ ń !#4Ah6p +L008 d@a & F D@ d A2 4@D18 l * @` @H XP >Lx Dp6)b2 $d L0AB,@` pF + 8`"6x@ |p 8 +$LX@"@A8  "@0" P +@B&L =) Ml˝}ȆzlS]NZt_Hfj;7XU[2^NLKs|B3Nk";3dXʬBu dbhw61wyG62g#l:2 ,urmFA79Kϵ S1&EL KivdcwSQz}DK^cV>%dLtQCʙ +ⲓh,݊ȔjnUd6/<:UܜldE*_]jm> Y|6y zQm̶ٰVQuek*7^#bfV>d5*>Wl92EކVój5rUk˙:*{gq|M8Eﯘh܅uL*QUEixxr23OFWG"4^/͙ۨI38úbȿֺ[y64ʰ.1z܉j36N-?٫B3e7BC:7h&fA<]ɜ)1^8*:Q8 W7Cnzxۊmm1"˵\oT xlWfϝSeS:d4հgԔ4HUrk(UWHfBV^ŋh+oJRgIcTǢ[iљDu!f1v%]ddz]gt~saM(̸+j=:]T?*+2ϮqYlU{v[Rw*v^*VLl*~7K8NuDS?_EِȆ i/^cTX׹z5tl&6gDtz wCJhk%Pln(ռZݸ5,ӢƴhlnD5-<{sX5%:ɹ:d1:5Z{2h{hjtIH#2Kjw +r.R$] 2**^w>kTewܥkDFEѵ3?sY :N4ys{Z)ί6>dXO:g59Sy{TuS\m&v ka*n4TD:|2ߵL]T2'#&ݗ98ZƏ +jkaMlG>WMT(ەFv++*2K\Y7(]jvTU(Ӛ:DI)&laJ {f|~Wɯ.zrcCVnXTƒKm,siy[{RrO{Im}͵qw4aK$u4ɈH+7 ^#hNHxwUSI+&F +yDS~qc].Nb/"ӷ'T\QDwI i4/uvk58daL GJRZ #rՠNFoX9-.w)<m)2zNDtr>ס!juSvvKײq;"fF'%c:ԅb̢uFIQr !i9kNga"fO51fߪ+vP]7hI\BSxɨzK)Nl*)BgMQ1ǂ_sT希M7*7U1CXUݔ:33ugб0+m_] !yANrDdWCȉx쏸#G="L=#bSq>(I)g cĢS"1ĢgCqsqG6Z{kQˠUH$P  TE!@0GPnSkydqcDd?չHQMg4F7baNB8f8ѮDĒyvHX/RȴVL{L'3-}(*ޒy: Dv$0Q1/k8fTRsC +GJwv.Ʋ{њ+]J"aEEcmҎ24p.smDEs;Yyئ6Y2mVƢl! +E{LjU2.}XxJu()/2.]MZm2TnyO;[醰{j:8T14DrR1TFL&mg+_l^V +8rcJŝF?;'apAuV2[ԉi)OZ4NsAgzWwsⷽu?y{ka56:uVF䝲A3UZO(ș*#-NÞ 7TW|,4mY̜5SRFZ~\{o&GU>rmE KOuc22⪨T7XFz.9UlwF/#,ћ#fch|f9jz3;yvYЦnt1 1>g>0i9cP4N|h#'ѹ.nNR G ]/25D"! $gԖBS՛*茈q DjxBt0[I[|5jW83WqﻇŻ"cf +]f>FbE'i47}lƓFgįA*!i-SqiQ33ŵhG7'ϼ1Uy]ĵM/CE&m6pQJG2b  9d]+%Ŭ.fdle#~7Sɸyu:Lեc9GCETUc|e4t>Xf&_/׃xop3|_ )Έ!?e5l.CD +r#ΥJS,l*sCbqV=u8өvA7xX2 }n:8UxsTc + +}>lW{Q⬌jB8`n0C6|2<s0}Q~ORE<4g]J#4~D<ɥInz1A-i(E՞UɪukPB3gBe^>1&sESgk Y3_$wc^Y#C>r KIykW7!:Mbl!դ$vai{]KŎU4}RS7ޑt0s"o:sN{yhڌH./DU|ve˄Wsfi,*͇݅'MI;T\uSYu:4** \]kH77;9YuqSnӓ*Bi1nO)eraWj*&,\96|WQhǯy$RpEC'UFShT:ia+X.u~f+sSm#^I#2Ҽ/D`mבNiڙD܍XȜ,'n[v>ͩcgk6UKfgjrv9Οav&"d?#53X (Tpde8O܈|áed\: FdT؝K3hHk'ߴDNsU7tU$GMNhI䝣1+gTfE (cs̤Ӕ#$VƜc"ǎK +*>≊D"LJEWw2hZW+Hv/=5W MjI:꘦JQIoCem:݇α;^WG5XRYسdH̦ղcUYH%$4_j,a`^bv^_6ZW+v|yIsb{e(̮e$%qli7\F3NUkR<)5&y&%Yh&sye: +TiTsQUQTEɬEUajֈN ++]Mi>iAJٔY}2Sd)MTc2z&I)3֊2RcgmRX̥~u+n'43uYŇHX]I.$֥aiad:9VEaYǥ\Hq8Ux.عeebc(ER$hTr3ǧW^Dv._ϧoeV1{;q&sH^wf*sHA%?2H5ߨ;Fcutl63.ϥ}HNjg'e#5$.6sqGA7rK)q~J!$RS73%}wLRWohT汙y1(T,*I'E/Rh6;}#^m1+=m5cQ~F=6{W5.K]<+yJ擡)͘KB1vcuIhc7›!ըө;1HQj6OE>"ĻЇyZo +럊5!urPMMS-o#K̤c>S|jGxN(Oۙ՘&9{!G" W: +[ uܧ>~u<ݝcj5&T6$wUZfSϰP N1}B ]d{~SCո4sXYhcԔ6T~Sװ|? Z^d!Rj~ДCchE^͌;D<^RTJ;%dc)ayVlfdB􎺞c2tZM:U\̽'}]]MݺE;Ċf,)>hnXƢɾz]zKXDډÈgEˆݵ=w+|W7f's#bOI eTI**(w5ޭVJ&c֒FD;w'kDΤZlՄVr7'$*ݣ~/;;%I7w{Ƽv;?]tut#yM>M-t" z|"GZ<sVf Uar1G2=Iv*ioߑ#yu~;~nF"܍#/dQOȒɽ܋]0'hfnmʿƪ.Gv;Inoɱ4*]%+ -&G"gXqy6GV;IW="dۢ{ň4ޖC7wfRN׭$cJY+w$͌$ӛBސPMK}+~:,s2#M+#t'RH;H-ܨ7L=9?IyI.24HBæ7UHI,1;é#39%%:,gXƨwâ><'zZ|$Mk2"Ӻ,Oj"|)ьeyT\nG9GDu'QB)=׌xD7!3FĪ#]*: Γ#q۩tX&7lhCT<4>9ΣMJh71."-;ln5=+6\C?.'^HjBN#nRlȳYOqN" b؋ZXts#cÈ+ޘrlcʯe:Զ"^}GЦC +i{qӦlFvt'%#55GR"IzB^&KneÆuTΔZ(CZ ePk}Z*U{ee#:uY3lcOKf7Y7HaxUuQDΌC]ҳ[|^ƢbzuhhEj8󙐔˲|.Ԑ*%>ۦLj̙f"Oxc "^9$y2~>_8W ,n6v~w+\ND>5+1e9oEWǻ cEUgxʌDj?C$nmo'NQD7Sx#0~l}jSSKZ'#eQ0)EGUI;oWt͓zHrJ6b6EW3#Θ4fFGoԉOjHvCBSGF3rRsJCA*+Qw^äU4G/ŨZ~L6赚ѭѯCވ6JsY{%zjbiir mծzqJAVfts2w=#m둅W\"f \ܚ12Z, Kxg8CFi'd:iubݍ~s:Ŏu5'*,ZTt&UR]DheyULRJ5r}oU$Y:H^xTO8IM&[9Uj\E߼dջuNݪ~jaJ}kn+2Y^<{Ob _ɕͪ2 +QŎXiiYBZ \)5^93SeT$O:"oΥ}z5\CkqssW7:5¾h6iωh^Zͫ#4T*i7ҔgNecj殌إcƈY\R4jױ@b14` Q V@ H$ c +10 ̟Jw ! 79*W&#/ cs0 Tp+W{' N $:f0RNSn D>!:PN\ ʘ-LD? g%]8'C^\|0/,%Lxx+0Y1a06Ț3^@aM>R}YpA) hFaeraka@$! ntw a`.5 $@ TAENdYaoyrt􀆡O쟯lqXa\E1mRÚA +oXaC(` @0H|&Hfߞ&AZd/aǞ 5[d$ Ban4{"}RѮ0bsA0ᖆ1w#4T +Oa`ɐ]i&j0,d8laJpzZRKC " @]`4" LKONj0@@D||aoyPg\aXifШTsx\a"# +źVa=zê>eD5dQ\a`Z/VB0)7{eD'ÀĦ g5 +ʆ0l`(êa+OE5 տ5_5 {"j ((pJS2) ~Žx  O2O$Wa+G-OE-a00h`yX4pgJ'ꌂI, Q@0[Q, b-SL@~5+ E3u TVo B2 !r, 0ܿ`DoU}Ob& RdiZז &}W s8`З,~B`%֌j r$UԵ3$g6[?[&TLR}:esڄ}1vgz@ 2R{mi +WsvuHǚꇃI Yٰk˶\0,B Q^0|~t - Kڍxx`@S ,P6UdCg Dq~Ft/| ^pv&,? $ }!SF_~ C/TsA>ؚ.0gҬ &vhe-s8:ʇ%2n8 a݂LF/+Hc}/1]Pгhf8ZP#מ/P"쀧V ՟A~5] *i1/eVpȌ/0.|%E,_`T%_(AjR "¸ғ/lbR{A4nw~MRs/z/8^]Wi ~ͽ ^\Z3)d<%nm/Hճ'l/ 3,^f Bs#V +|a7ܢ!\ 1xh f ;H)-_q"|!kCD[mrp=Q0I*e%4s (/ ^yy,\ă7 +!jwh/ˮ.BKZN ]k\6ݍP&_X[u '_e8{AJM$@Fry)\Q|B 81J2R`$! `ex| @s +3/7Sy_!ҼH6I1! ;6=r$( ӽλW𥣨@er+/p!+% \R 2t >"NR;VBIJI_hBK1-<$@MT .1L-KQH Ń 8Z@ztۂJHk8)\*2~-&3ȺTB{+$58uozZ@ L&[PbWP^!+GYPWv#|,ݸ hb wzy4%z=+f`i. 8G^A^?aW(2v]8zvP%#Z=qb+jb$ro0%/KYQdd3&ʁ)lF+rF0࿅22 X-^p׫d}WbdaKHئW:8m+#`t\BeX@)Ɇ !c$g m+51膘Ic$t{k&h`%"MH1: FzaO2\.{rΝ7RoiPlM #.Ft؉ Fm#;d_Eddz(h7"jQCִ@/{Q6At>Y/2Z$TEx~ᩓ"VLI[+64G/"aNY{7/{HN5_$j0sw`dVA`=30w4ؓ ,09XV΃A*q`^E.EJDp+"h4M"i5(qZ YGbls- +@)Zoh`\ELnҬ+Bq0 OEr\[I%ֲ"z%W]g{0lD ~?IE/Quy0.6Z\ޜN[=θ0<;oM\Ĵ^dTcDƕQI=Z#ψ8a#FZfGm8戮&KIuD"nՐ~ЍZ<&5WC,d:JqJf/$`\PE(pHMI"(A_a +] T} JJrxV$B4φ<~ъ(2Ht$]$`$eI"7,`ODޚPrw!,%v"JKėh mO-%|DÅFQD|K,VҸ=,ٸ,P0-i_4,1-qm]1_С{K@bd"Q_Ӳ@1/A忄L,_7l.I&9KfB޿>/01E8>Sj G&tN&wQ&DvD {adBe&v SA}n{b'LaUDK"`5qA| &ۇsȸ`rܨ&O -__0ٹ˯&KLJOX0iϢ`jØ`4D`؜6K}(Cu=b%d9W6WkI() (Pu3W߇ֿW#d*%d3:Oۗ:8ncIsK0~} ++GixK g<4N') +;xv`.QgBtp]B:BtݪDCH`oN +<Ēk (ۣT- BSJ%qк%Cs Vgj4Dz%@Ad/ӭu.A/tɋd@3%'`Rt zұ KS_Mdo<pʏL,2RĭI%-%0XLNnH=cQB K:I_L0dtnv8A#tIݍ ]0X)D/OwB%]ٷtHgH֒%$ĝlO +-q7oK(|K(ҏ%)诨[b=hyo f flJ*"f@D%O٦VB囮ߒM]:[2'vJ!$*+oIͭ-Su V˷/ޅNԈ q&0;gɟ#{?N9KXn}_[fg@IK6ɹ3 RXPrĒ>šD;ܸ% tR d6eŒ8rMK};cUb2Vi4qe Dؒ6oA@5`K,"kн.4Öa^yƖzƖ*x?>TڦkI 5Z,:_ZM:%36KHCYҨ ydevfMQ_lKNud~ˡ +ge+!zؘV"8[T+A$J W=d[ٷ7q%B$PķǕHKFrE;•\Jƛ|&*@U%XRH~xA-WH|Εp/E@ɕ@ZE˕ }T Օ_p͕`a[BU~/oTpI&cq \<;*2I\м6@x".Av]R-Fx]P|i.iA%HE"qɜQFG\rLQqlZ\`D#]jKOMK d垭NLlLA*UUt/. Ax.9b3Bt (P!KHNq"K$ʑMoVkw&7_OUb] +YA.n"F^xB9sP%䢄z DJz_) .4%Cɣ +cKmَ;:PxL (0DO#gq`1dAƳ/Q!3` WV"a,9!ahBWH+֚"2SKU@ ^;&_2T) +k>GDCa*,DYabQf o&8 B0j 1`D +r,  0Ihna^Pvޟ%z~)`*!WHQٗ0ˎI : hu2:J)W3puNgdRfz;;a" +{6- 0pwTD?tn&-HWMXg'MԍRdy/U1jIȿ<&`匚t Y(x$Q^ Sj  0L`ښo6InoQjIDB\99I̘r:OLח;W@$p۞vӔrO:s-`uɥsto{l.YܓPeR͹'> s!O,!'?~b1'ȅq(&/RC`UlOJaǥB9|rH['R> |/F哌u{zw !9$ccoS*>1T_'7l}qԖ,.{R$ ؍=I' ?0'|Sp{b]18Dٞyd&ڞxįqOL[;8#DO=[ 'z%OJpz>qx [|3 :,'5 ~:E褧O t'7X瓅 ը!6~='S&:)cj]Iҁp+?q8X)NMmdA?ѩOje "'an,O'@ogP'07'8fPdb~'(o4edbd U]O-}=4}zL[?N}'yݕ(H?gtI*˽'%<#,!30{#KA'bQ y2 }Pdo> Vťv Az0OݳY2|Yxx2$Pcxr(y"~~U=!)+{n,t$mO}'ZXPtLA0uB%D`()P8C2*DY](>I(zA,Fk=W ?%JSmy]rQ hd(cSFƺQl6U(NUXcL@ 9K"/OIW@JixJ1[E)Rhj)1WHkRg +l_x^Ɵ8'Jmm@ej+6gc@e3M^e"v^]jqT 5*XcJm=.x0GQP7jNHi p3B󿛇nͤmLl=UV@˄e +M09mdI'e2N@jmC;0xp5& +p0f,!@0躔%c;m2Ftm~#_n=j%ieh@Xj< +'Aֵ󥅸5U*au(g؃>`RB&FUu;_9&zޅ%|֭32lV݀13q)&D)>)^n7{@̗p Jq'IXwzw.V&7izB|^EZ~ʤ$ + oW"2C$5(p.aF.L +oճ|MXWbq))OZ˯+/:;"K!yܔ`E6pgY K"_H<58${LyOs}Q HfO,Y2Y{,0i +6hGr)N&s#{u obh%TmUQP:)IBk~>|e3yUTMc| {WHZJvy}VH]qȓ<, bEzz3A^J*tXu^CV=8`lJ:ǨĀ qx'M]D4ҶiXJ{$DCt-u?/O7B>'+0dLfgqz' P_&lfz;1ހcH#Vu@+__? Ck̳jxrT6^AF8' $g ~ŀhcN8%sRGp> FY3viaaO"gqW}E-e鋃MSNxAe؃5 ,5QWEipq(eOsvx%8Cg հ-.ndH70mj8ǘ{r𰉼-k(I"R:FxC(c]--4uKAʀ5$X -CUeswtK]>/}?Ĕ Yoqƣ}1NKXPA*6HR}־xY)nƼb[>}V T lJc-ּ=/GTo[$g>$kwIk#Z={(%GHou =E rV8aU_hkHl1l+zhhKB!lVB5=iߥ+6P?x%q'>vkmHb)@o z&̅>U%V %uϰ01W'3.E`Y +Rw1Z׏Ohl}F(/dP:]JBM9i-޽A)ia:@ _[|E]!41rW݀K֢v" :|j Z*J9e\x䚈 j[7LqaMh}/]c;eo{GUS?TcV@d8IL֖5\PbV& VCYD} +ćEbSlS +[dGD|qVe/ Ȇ;Oi9/ͪj,T8_nÒܥ1[[ xVIԡޜ`_jՂQ*"Sw +Nol2BȞ ]s?is}@mUˮpl)î 1qPo?-K, */]);MUT0Wj@r{*v{_5J}A0Ú_WbS~#P TojR\^ 0r8prPxGzg]{uǎ G-f\Z^ܡhċ>1IIs>'NRѮ\5!2m#?3!*TGaxeU@`^p_7Xs x|p5G 5gRt/Ozqf:*GD ǃ'd_41׉Si%WB@> *[[1@ۧE +lB%lz`g]:#'{ o ;p Q/ɗnau4Po96u8 \}/ =ן7 lbaM.V8P + ?R0Ye,pvr2 f|Qd5*|e$0.V1t\Jr %]2OaPJ/;(jiE- uqQ[>Ӷ_B YbDB4kQu'zTӀ6xNE@tiIK-ㆦUUU +a@p0VBjzk=B4Kԉ~mESphLMdvL^5ZL9"Sy5$˭2JReŒ MVnO3 kNDQ5 +"/{BLn'hP +ry$"$cQ47Kޕj>O?ĭ<8OyU'dܸ T"%&K)$?/Qc=N39FpKT9Ѕ8 +=VVЍKxV +0KeD0/ u7-|p^OFIўʜ(|kcJ.t80E%mS sĩ)2-z5 ]+Ӕ7w;Uբ^2nEÅí{Ռkt2n:\!zO뱋(f> * TXб9|YIxf@BH9 d h c~ֲyTNXP4#_^J¸oyF=j`lyzEe'%,o(@:v=7Bd9/Z0%1h Lܥ﹕UZZps5{3R1G,kDYzÏ3]OOi,ʙ?ٚX.W-XrY!y~cmr& yrD\{!\I:ۅ4t{}=c[E[: tw ^$\A/s'Ϛ|㖗G8Qj|*ov ,@#Pegcj}NޖPRI>  +E pzK~MLVXj\h|'4>TL( hLMLHڠ,>?ۢ^N@&"?۱(O``3c!x`N ؞4b'21:]##·dpQk%ϫICL8}v"YPT8Zr|/1rl]6ʳ&:NʚN@)Ŝ5]p͙e`]nïc +n)8Z3;e' , ҤhIG-]OsCaL2@_"(&F:F ~no>$f+e!AKk)ez9o!loeZU3YvQS$~,>hw2<^\ҽ)Ƴo9RiK.E ߚדA!Jѷ/Oy>^6bLy{#w*ϋ}COfe=o܅8 7E쓬HjuW(lkw-tacەH4K-5nDF`mWZXk(ЗrJdTG~YvG ,zG'myާ%aݧ~qsQX=Lr9'\22Im2w/RHE(^W0>l5|O ^Kke$nP+ = ۃ8t- g]|J-L)^ ۥ+7hҬ5P&!wם갔UcX7Τ${"-5ݠ~'C q@P""%[U5Rs(-+'UtRqR_5Ǎ/k'+D'Xߊk9K+ 0R\ Fwњtb 5$W#}Ƨ}^B{Pč#R@+0M;#h.s {6 VMt.bq:C$q(-0xw{3Tֹ?d ʞ f}z3<ƸNK>Z}T@l<zA%4HY4t1\p33 J(D8+nģspD^'%_fD-Qq'Cc"l@ZȁoY1 h$,R>-u0}=luGwl#ֱ @7/ y k/9gؑeͭG!w(xsZQ17G,ՠB\l,ؖfzD NZ]',5E5:pHTa8ÇK# (`Oök W|աрT(%[z04^VRHy[ݣP"-da\뵻7z0@SOv +T+gح)cupgf,ӷE8&x&,5*h{Eoˤ":u)y{(Ve/\= [s8/ ݽ$ێC-/hP*dc%kQ'[ιT)нO췮ȅ;RQVuISr0u紌J$lyy;<u,sI%*-@K!S8ޡ1a$|d>!,X<pVu=3,sxsn[GJM<S+cRm*֨D߶;}6$w( U+b<1OB}"c*KGom9J,,:rh䪀lٝa +ˎ$8hdv^BKٷ-*l_yAcH8' >XkQ! +3 V ? +Κ֭wwNNg'ZR:݂Y;6-}K"Ua%=נP@?^!? Aw+@eްmէ \`Gwu^-z \:ɧ;WvvSqRg E`5-f +a,|Q?\Y(1U(%a$ ։R@fKe^ܟ+hjPkE:AC_u/hIϾr׽,?' WFu55lba%KW2TO?03~' "ϸ2~\|\pUx;H+oQV>_A3mETuoGL@qL_^?G>&*K(;xרY\Q\ +F4@*p:7+ͽV +^uO tieFlq?9GyD()lnd/ފF(Sc_C'>&*OqZ[hf^E(,v +/-BoχP]#)3_ +'lbn#sf.V!.hqt=f²_ٛÄy:|jZi*'R @|FwK!0;»)UgF؟;$8b .Q(_"7X +)եBď\#ۿJ\WO8, +.;]~ ^ΩAtkoTtFHLc쐚`)ŕe0 ` &7G9|6 c U.wp)\_,* k Y>j*RO ~PQ1rŅnx_$''C$'_\5_;GW /Q!bjꟐM)rj-QPO^9~G%7Si08:|ȭv_!-ԋذ[ xDL&7 +M|3-oR8N:i"&X&CGVQE=+N`!2ASL!?;va%x VeF(>6ԣZmdeoo#] ,V[=UpdﲅCjckqU,$+Д0 ef +d&6Ez#2 Xb}aA-a4h/olQ(=7܍%eј |K08ҢFѺxWfQ2L&"~!k3h@M*((-^B+#R!L"-%\8LziE6d+P0`a#Rz+Hds_[+sC:UoM}VNruL-J@[ Ո=S +i2rL@EvѰx\t tx +F ;p63d5Z> R[2Xȧڹ6o'X`Y>~nds1{+GV/uQc2Am+ÊLL uOaW@ZՕ/5ȴAj5L﯌/yzp\gظixucK}ʑF3>nv %@^&7 lPİI5 NH-bod05QoiAB%Cߖ;{ؔ5MI +++yIf-_3l*iH*PN1Zf%E\=QBO(~bI +4kԼnWf sGoMثDǫP w x,&h` %?= }s +S?!C + clT$9 +wk&e^M\nNj_nnT$$GOؠE o#'=gYkW&+D W8='l0cZMSL׳^^Nf(QVȷ[Vx5Q|3֐Ŋ A֛həlW<-}+tF\Vm..m\-_,<ڿ 9+j^1 d:0R Ōaodmzmp>P]] +N<_1o +`COG`CeP)R"wO +bLt˴`Μx<,61q#"ef7I҂{WK-3GEJ:!6E7XShfX ]b{h5-@y~P~D4=?ocUu=G%ra^+H)Ur|?!M[ G6nPOWݢv(MsE F3J!қ`_W^BJIRr.6%ߗ7 zs/Nђ;d$RIGtɉUe/&2:)eo؞z1DsBe`M"JlS!?)>Oc6$G^:8ϗAr+fy @SLJJWe(oA*[ RYg# +Nʃ8PJU*\l@UZX?Ԁ:ѡnLVw}K궡))|YEB\XC7}['MRgO-=ъv8V2Ɲ-xb9NG h0`L3]3ט2Ny3MX/JH8b7|,*Ա13Zv`,  + !?xiUx[y0LH40r9gЛ:.2/r^`ߟzq5Iƚ:qP0I)qڪ^pZHB5?dJRj,P'QB&ԝSYU;Ԃ4#/S}TK(A:MzKW<-0t ?)&fPAbbэ& {s^k#=={ Y)ό V22@2@}q2)7LWc +⚕I+LJ;tt#?P؏GПO WÐۆ525p5 :045`>j`[6ִا4`'0&Mgƹ:!-T/{F 1 ӫ]ڨ$E0TF>3LSFD@apف?|`8@҂/D4oj? mq sBs!`p0 3d hպۭC/<]m^oI#\ BB'p#8@b-y/TQ7BNr\孎btrOMiaaZZF (׊{0X\Άn >Wyk"#$ L~!M\B lbāI.;`}>ou鯂AfQk:^Μ*\2B75h+3,Y!m"MŲ&it"M@M#z?`KcJCJ?Hî玣0ezQZOG!89p"1͊A6Si21^sj2p@4zq|qf"3҈Ү3'nFN`WPZuQ0V[m{LOjDjμ^Q, *phlyz +2W"хT?#5)[> l!ir#gwNG7 7&"iIA ‚9s ڌҙ[*Y'Hw߷'t{BsjòP +d:;`X.,nf(I8%K:{51tetT8\ t0|M<; ٍ_)ƖBrSTT =V5y2oT#l RMcO"jܢGv+mi7&Fp?{ 1ē` ;*rx);?/C(j4p T 1c>7Õp?Ę/˿|W'T0F\xw'|εp W=$[Ƕ0b>ft &ۍ|58;`ԗOķK~oC];Rg;*e*$iV&Aٓ a[k+|R6WtÏqC4TY\O-9G4~ +ņ44z PlT>*>S x-O?%0-?ER+}зP an'%J]mK꬛QQu90gU~00n h7Ⱥ?Ut'2>bQ9bޥ}ψ_>|G\Qo\Ľo5 +\M#Aq@ P 0G91m:quF%F(]@ 7;X$+9u` پ[C?顩^je_@I.k\+uezᰭAEiJݮq[)[Q%Y+Q= "G)L/'YڜBK SY|s늨J0^5}SeBfDdy}D4`lbP]VO(G01)[}q}@@sf%VڲϳDi|4TPX.ƚ󀼦Seb{.`g~SnQ+ZD n-%ýw~N(4@m6[ 7]d&| S:x(zPIp%iOˋpbd H2Fm#C%:R;r}N@ ` ۀY\ P2qc8>9kbc8Lw(?ܵM#IVBG({[ah]tՔƺHTH6pV##p*7YQ_)m‡hM ?fO "=GدU`?16dCZTp{|-x2 >qV1):TMY +j.d Lկq/ہrբlNq3+k$ TJd<\;n'18j0LKol<YWԙNEt?}yoaPm Kd7p]VOMFTVS6 Gq4ܐBZ~= O C$i= ++x{fӍu6lJ>N/-&#Dt;_ٔm˖ $[ϰQ ŖJ"@VrWo|շ/nlܐV2 Qv.y=4g{T/\m\ZR4cba{h~jVBctEn׃Vl.WbC_N415Xe/CٽR}+"%0|ñBmgC,I}g̐>V!aQry &=PͭTX}. +xJb XAX}0n.r=DŽkR~b +K/H_60 .߭!`[+r^#U}t2Jn#ƶ1+0(抾ʐ;E O[ޞfwN۸i 3 -΀ii1ir䛧{ڸ*1M >`c._1p=8bx.ܸbmghcڪA@(w /Gқ\"}i-LJdz{HmKa V6 "$k,ٮHTՃEw#۝XAH㺰 'pAe_x)@]Ą(Ga]MM0WWUwy^Dl-wyˉ]VPr˄IO#5N[H\KW߿vռըW׽ ~^I'&oJl*NhJk$wI`D)ڙ~O* uos+'fQJz;(f4`<Ѓ!-trA2/ĭJOpGY;8BBN7$S=i7>s|GAKjdѢt#۱j\ 0qwpsΧ +|o9HH_Z r~q؍r*3Y5uo"N@>J KOQWΦ~WR95(s5@e>q+9YW],yq.H7v%(V/ .6h<FrtP]?Tu?s P8Crse5{jyxG|ӣ9?%9]ڍlJ@ET#ldCx-5  { }y#&~;ZG2dS7NjQ&'{f^z +u:Q(S(Qr#NBг< '-&;э8 Čmtm| h5υгGtRd"rlLdAM2 pi9+QE*qz"U".j35x2$^J}]ov GCM 092,+B8H9ԁݰ.z־׸KAԦ^J*;༉Wx4zfAᑤKZQ,^=(pT>f^ro:k|JI&N1%6RZ%S]DwNX⋆78S0<#'#͝1;E2B\Pb))<1{ն7v*^}>h L胩֡H}O|M7mb\1cƳXi«㏔T^-O'&Qbo (73HI +Ar-ڥC<˹cZp̯kjGe)^i[N^_Ŗ^Rk|d$e`D鯂SyY,HM/̪E,h"6X1Z=]sSֶ>~":)'{uw) s [YgHLQ9%n}c֫3թױ"3Cƚ?H8`7?I>joǰ-g7ݦN7 ]=4--ub!t4_m$E\QFVGϏ|ޫdt.e Y#o:4E\5׮UU뇕c:5CZu6gf~tsuU}TR R(̚ŚHdD&T/馹v-wLUd!|n-r<3cKأ=U?+k6O4l*7IQF;ol}ֱuKƳrd9OlSͫt;יLfg"'3;,r|OJjdǾ)[bgJBx=}sVsBT#]'wCT"Q[-5K9IIM#Z}$4D32אbqbOn쓣/ئ]dhHbGTj:YMQo\]QjKVx#l6+[NeԐ\Ŋsјnvi)JR"{m)uiMMXoXoՆ۪ďGZCQZkrڣO~T{]TU:3_]qdK6__ Mfgu|Bz!IVbo o!UF;>D4b"m;*VeW4LC,dsS=JWZo)YO5.k&_IlbϔWxSo;;ܵseG".SIi޶nGD-ϽsUT)J)2rsu?ڒԵ#>וm;10s2bi~_.,cvوO:>r\91)T2PgusuuTxش>h5sԏY\3)kDâ1kwwӯ^"5һY3mC5#l(&E']k[sд5#[Jcik%b&SXRSEVC%a2hq,iomUKTTMT| MMI9|=G3u#ڗ>'Ӗ7i{=䯢^&ʇm|-gDW_K(f9^%v'*j=kDg=͆ZFv>sgϥSL"WN9ɐLKX+3߸JHG_՞7}9jUfNױѱhh|OtE*ߩ4i;>hڒcf*Lf>_u+l| ˹cb>Ln691%ɽF>bb8"e;Ή7)ٙu~֬|6rvuO}LU!"cM9!HaE:櫔oP/GwkÄZKGQ][D1]r"ݢgxыX?Kgg_J?uO[N7&O]!IJCvVXl-ՉHj*fT/ilf(}T /R_s-#RGꧤ%gy̸~ŎH̹R!-sȸC6q+yBQH4ܹSUA2g[cr&gbB#įȘ?/}uc1gO+{~o>z.+OM"sM}ueL0֥4q˵)b_w%W.)S99sx9sDd&?3$3G9ehojstOsGF!ǎ8rNPP 0 4+X@,(B"QA + F  sBRBJ: +6P4B +i*ȀP#`Ą`2 рpP}@DCD Vc FCDĄ  D CBa(X@ + @4,  #B 8D*F)4(`phP/\pbh4Hp +  @h $"xh` ˀX,$CH,rb1AC`44`A@JA1#h0"p/p)ahDPaaB#  +h +&X84DPa .LT`$" "P`LH@ &"Dxx( +,4`A ,x nAC , .( +@4L84HDxE pQaA! Fa ph, ,@0x@  "*,$ HXXT0 uh,,X`!QE#\`4HHV`4 !B\ *X*Gʦ哄z,seuK0iLVWQ5u&>"2!LL3ܐeJF1W[wƿxM#E<Ϯy9XݦDB33C08D`\.B XLf0,CC0 ̀~dJs?dgTfF +h<ʊ@{gtH7ۤ)/12%(' Y'??I9ZR^s9cegwJe%8OqGH*=uCYwB{̺cpoA/EauzH7/30og-a٭GdE  zL\U#ހz D@P/8i{^*EY#mқ{sL}sJk\u.S~\$݇t@ިSZthES&r@?]ubV|”kG`#/~MfK.oXb ~Zx|7Q +ʿ~F﹖fN9⼗4jXy?.!9|{KtǢwL`]}6"2A0۔q>F \FtG Qo].|4>ƺ" +S`|d *2kOV~ J8ՁY Zv&R<9aLjsn967WbtR\.'2>c=dFiMf4HX?}J q JUYZ\pש@(C_O2FC{}EuFtFžӗ^OUesmM۩]4U 1jmPݰWGWa*wd 1rSY? #uxxx;ucRRV= ~-i 7WŵNם ݴ3Ӣ,Gmwq >PO) ~^.'"_\sdܝ[F=Q}ՖEA fUM[T𫞡LJ#$+1X1 +,\Zlu,aҊ[)e|-]n70WIPv,4 Bg; i }|vǓKQ_y6$H,l^m]Bt/pTDA,Z1qI34ӐTʸut#4r |6}9˪Gi^LFq5Pn? k)fxdi@Shb.+ Bd 1tN @ M}?:*ᱠhNmHgmZ%@ԉc5Vh$l;47,{e^ܚ'pLf[SJ\\Pω?v:W,AKeHhs3v5PK=24wgW(5KHW)B Kӱ. ,zƬ]WxU#Z$TqMћq߄A@m`6KSĢX-G0 pnS]Z< M N\,K*kcSzʙC X` +htw'l&^.2+$lA#X@au=\WpdS'm+*`m:IiO| b53Y1rZwf b0|#6O fm50I=QQl +hYD׾3ɁJÖd-H,1F(@u)4gъHQ5yĐԺ*$C-v+i8 +wB6]suIyQ i`d KUma֟{O^ӊ)u?jJM1f)x焨 + pÁvХ3 CJ>uيG9T92J +m{N6rri(ٜ|Bj )wh**L dW 5Amp@xp$M"°{ $ Lu(42Rt;uuhA #FmR^ mSLaj*:͍F) qűjѝaѨPʫrY9&Q3QV1 +ZhMaȿ\ .%{>4 -Y5;0d+ >4iڑx QR1 uaaneu5 +(` J`t9`埼bGF@G,d(o|5c]ڞ"O]G>D!+f vmsZWE*bxgW*b [onD.b z3@X\%B0;G Q&{Z0QL9؇fDә;9kRmˢİfxSkϚYrhbQO\ ض}Ke-lCЈFbpn Mx_0l w1:aQ&F jNmj|OS`kjXiD[[e:E^e +1$$lkfu!̀Dh/@=}DXH~vmVaJœw.m; { 2LR }f6W,Mbh0%}A4&-{7P0_ sa>왱65vjsܽG0`6@KD[D)ØLZ-BMS0K[H ں=08SH:Pm&?"2 @L"=ƾNi1&m@Lm;7yĔJD1*ivv6*"ik6 S:̊6ESS=?-R26yQc=VE^[W& Lp0 +fmFNwqJut/_ֵÀ]wmK& zmu?z2(aghlab`qqKMeԝm0~R' a8W16Rb6p/m ' $0G\ܶmmM 7S m@CFwm Ԃj?X } ƄVI>ڵ#;͈(d0 +6& "eim0D lT J0­aMja q+7`w q'R)r6 n&ArcHjn-@<э-#Cz`HnJZ=vCѰ060y-漣o O%&Do0Yo .0c`֛ٿqB _lYo$P-;EKRД_"F7a/ ؖ~97k v $(2vI$8HŶ  RESKP)~!#2=IG|k -.[F$(Kg-_<߼3+ ƪtnb8y_@svZZ2Bn^5xX[K$/875r|Szo3o_K1,|__7JYB |_L9/])mZ|ksߤVʷAKj1}s )ߴ/ӷ }G, B&4)W _B _,,o~_ *KܛyP¹ЯۡwޖPpgҿ1-3&_ 89eV޼)/KVуoOd*/ }#ҿ+9/͚)P)b|_G:1;lfg(ܾ> lGbS$8A™(} +1s:1p^ P΅7 `<aBYP_+uz*iФ]E'ex.j\q< 8e@N8̸Y{j'9C6K+0iP]R*KHw h4M_B,M(u$'I`_:9aDn 6+ZSG`ٲ5fz&-IG`:SV4`*r^^ OyWۋqLf@Hf(*c\On(msHG 鼪5 %@a`)\r̊0-V_R֙C=BW:Nc`>KD- +w@( yܣ_Z}(De#Ng)wytkJ¶*/ z(*R n_}O12_P mKD ,SCXL}K Xm"|ϴCzR)b~_&=ְ}BI Pd0pz;BU=(yEoGv Ȍqkh֚~h}I&`Fy+^`f x1+LKs4>NDz@ JkNLXWl5` + \p}o,ؿY(v,+xbQCn1çe;][xh%h4k>::N<&tx.BBq 8og@JuQ1*.~„gʸE)H\{07%= x-T&U&u 0pTZ̑@lhƬUN*3sC00Snd$` F% K#VX%`ָIcBƸ%`b5s]T2߲ 0Y{?x g{}J&LDzϹp P]n.sGϗr`s^0 XT6` >V 0B aR&P +;x7`~MP$Ѱ hS%`04)ͯ08ja#7k `0(i$ag"7\k0PsUmpõ̮'Rϡ摑qȻ\i1lۀyZ9FL>R=D `eJRMan |~{mcqf&heEh + ` ioTR$`QO=r+i0pYM"QS5~1:?h ~jmv/,CE`  n  c{ eۜv +6 z>~ +1Y<6 + O*: pC`np!0M mLӉp޲5(kxWwKZ`> ̆#j`$Tº A}(cTQ\x, BD {+e !ls u/n`ɝ n`ȱ70ܨV|L.eDl`v&Ha60K>Dj. C6 Lӌ%@&BXQeGޑ^Um`D`H#Z L@B24b::PX70mkB|Nn`fh~hFx>cޢEF>WU$F!y~`9@nK2L+N{Ls $^:av"KP?0hJH2sM5}{~`X] y _fʢbtƴ;$u<[`Vq/q)sW[`^KkeLmی: +kU]^#"6s%rrI'&0anxI't'apuR 3sL&0Fr9Y~PK#W#p0ȻhNF0J5(08 dO`tXu'05]v}``et鈁Z%g,@ܶXF44,T"m6(F{Xp.f*B ]`, X&ǥ~ɤ/`.`DbO H!6i0c+w}GGXkȴ~  F/xwǕȷWD.)s_:ݸ8,E#9$ ~E Si`-)v1Ye&p$e>CSt:U9F3nrFtsdoHXA#I P asAwOi`s?B(cY+0L?Ͽ"FN /NꑈHCo!eS`@TEίB$p +C|R&kO0r@=JN#sk2EO-]4rLذK .eTI@_舸:%&(e(%u`9(y5 )We,B04=H½? Oi}ҋ , at'[9t)P0?; !¼ٷ:)GQ~Q-DF\F^>EN![n\^@E04F` 3ice,OG1oIМ@PF>T|f (Kf.)dchvAE%2T5QWΌҥ9i@V<4@XAt?t#U)Jr&c587ڂPEԨI*@.R. Н/eԑsCk03#zw(ZE)@P4F^larġ,(ӝfj3 s5{̶Y9*} yTxЕUb$_߯J@ڪ,Jv¤>h,u/襨[Bޡ\HP:?"O-Aȓejw9#`JP%TuoWD)7.}aYuHIۚPG3Ŝ>(a>3'#?f. g:[o$HGaoꙂ#9VӵXNSݢ-o:!^xsP.` Ql##hQ1SڂgڂH&WIS57ߦnIG1;`֕L.l<^͌ +,e5LTOOe_~KYp䶟۝$4j +:,1m?NŠ_ 9ѷ{.P_š$ &Ϟ, 1(l0)w*t[gŚ<0Puҷއ_R +"O|LZCT̛*RhP}4ioDWڸtCRٳG蒯B_!Gg;DwQ\!\zV{]=Eu1aAXC[k"4d.CSԀh)1$M=2 :B t|.;(&9$ХE`SpQjs_g3p& +& 0ӟZt;~(8!dp?派ѤeCB0`5quPK@"l9(6Y&cE|*Tzc5r~{9ݚE{F1 "$8n#v.܇:qC@J%{Th%Yv?\UP.ҡ˰܁j5Yq4(X` $̖ (gQ%3r8g[em^mƴ9+ :]#Ʉ.vgpؼnKNk~,fs:Jo{5GOAZH|ʡtŪi,]GϢՏ_YOSMa)9!9PȬoԜ'I>!yӕ8ޅi`Q.DFb^y-Iss 0ٵBz@T{33#,\2E;AQo-^*!0QsrpoMc?)S'B\\nP.Œ.Ibe-~3TNxP.DӀC0,ʚmyKG]u֌.-j&G&Db\[ZR3gQE7\Pl/c*SfJ_?n +Rcڧ.]b)2PYG*c>V{肥pThFu* d I;,|x&.VVH=}+0_՘? E\.9\tų"Ϙ|WzU\b ga˄iYΣ mAQyiܣ b:;3ۖy5 Yp|t| Xq#]bAɢ?({(ء~MS"0qH% J{xA(K½My@*DQ߳qY Wom`*PK7y$Hp[Rvcae+HOBCnL.kP?<NP@Lw$+Ϝy/Vn(I(W?*Ҷ$AN>6b)'vT:]1jz);Sv? NHJ\Y- +@p_= oFSC b0PCpd3#Ig+woL?l8X׫oi{)CG wk_""0:hFnR0v +;@0ۣ#[ ?UW6wRPL) AM9j+/oGDrn'&Al trM +x*:;A5JA\oZ7o;VPbpBm昳z(gp ӃK ֑޵I^jO\tv.k fZ&œ0L9 ~jǩs>KjɧVP8An'{EY%r8\M U%1}Wpc p z*h_y0>7z=( (ŇAkh:Q7`'O<,: s lnTx i})6!x \l*jVDnrڽܹG fGHXȎ'FN!:N7Vvz0ݰ,ɛn>˽_=hIa(7之h ZæjTt_OUQn8h -լp10j1t !m 8h!3eMtɎpyڂ]4iww5:cr?!rp I`Wƛ8nV(7\vE^~:A|D|H%(tYkGax%%Hȑ @mF P͹ +Pnrl?e`wne/Plǹ{ 'J2F /B70+]`B u]Jb383nOe7(oVl- ?@ڰ,(C_N0KJ7ܷs5`z,'`0?%A7Di7 wCu^L8oHH%wVÑ7aA*XAY0byvR%roPK O \ KG2|M8t.-߀ ]H۶ q!x #>A1Uag: A2n` *$ڈ^3+=Ӗ`"ضH6ˎD֧l + B:66&=ԆYǫ2furtcRú1 `BT5$\ *E?۸r| Gȑj jxY!yYj7pHjxo480 + +K\ `IC RDhQ J*W^uEOKljhFmj*Eㅶ#`Dt1Uћ@T%/ 7dU/{~-bGloCz#-MIS&B^LqMR_MGvu,SMVlhDͲ1؜ %NJѮ{4,H "aE g w6 C_ +po 9HV]3% +>$Ԋy9UT(kǐzߩa|G]ϸɜiU_X9WBQ}S$ȰO:uᙑH>p/`>$;zlj(Jho~`)Ze^G0k5ڠeągq ^hb@i^ld1.αak87hC]p +`l<+@ ˺eʇƋJ{SCX+E>iq`z'JG&cr,'0R.8B'P,q01Fŋ? PHO+n>f1>q^ՆK.toaijZ&N6^#(gٰ2 3Q(:!$J9vd!54JJZ,DcuVSTQ;$(/(<1SIX\M[lȥ\zY1 [ ٛ*Sve\-O v*<ʡ,p9գ΂ ۨڝhr0& )P q/ =y,>gA@PtKi]ACY$ɻj*zLڳ(yT)4!/.q}01,&}LA0x @tEl t7׶,pdr_Az $3D^Є z)I? I7?w:AYj +i"&3?\"cC$A69:oB uMKCw ٣Qxߋoc҅F#YrFt[afTܧ@X9 +#5F=:)xt\%}֬e;h$hjuzn7Ӹt79eyxR2d@Lǰ,w:O8J``t/]YE/cQ$%*=<^ 4$7-cvq,:JʶKl)"`2đ[K6ď -AY1۟%& 2$D[6GM +A0O+Y%5F.txO>dӞQ2N汩.x(E>"2 L` c ~;B`XRc10t+`+r#@I78óÑ|PZr"ۗÑɍD-vlŞ3q %A6 +;#$9lj;)C 75e 復$ +ahݕn +&~=5y>J/Jòۢ ~\VEnk,L I)NO#*Jd rj]Ԏ'M[)I_j_txt i™%Teƞ3> >$t4tlz3!D|M-9;V4s" Y;`ztM[w}:! 6`k~VO\C5h5$XÓ\{#/h22 "ݍ %j!M(ڱi;[C(1;k )$M{E5xNQ[TŨvojn)$jf_-==פbzX_Nj+.ZS$܌JR u8%ԀX$10kjRaÎv6%.!Az^*p 砆-VA RuPCr%;e.  &[2,7-fj(`:iOs 4Fr6" rrH6Jr4-b:(NYR!쩾i@%4/J~$L1 [s !R@LȜiAaK#5+}ޕ##A]mS73SF?ou75q!h 7-P_A0Mv|H&O1` +c94t# $ +@˴P48q@$ Ǫ1FiYl4wh4{FUF;g33H6B1ЈָVU6xF w7; ]%L@p/Ja +M4lU ]gA$d^D?[h8?[ePJ)ͰGeXJ /uoG NđȣA_=t$hPIz4g,,P(xlF( OrD4p$||4i@v +C U& O.L hJ2i-a-GȥǴ/ ҎTKm04 9Veͣ:S c''ۊLLChfBL6B`w4464l,r,Yi 4I) i`D?,k$P=SE9 =fx|eL0+e `F^T4u{jn4 amX 0 ߭Vs%{wp4Hyc(bF.:cNITuN msI +|]h΋bx}4x?ͤ0 @Alb>MQ40A TB b4!\oHNfOPI:AځhH K! cS@vݨuU% F,~Bw/;,R"q!v4(!G\[K+PnhEs 5g4@p]s+بѰQʤF4'hWgF5A!s jyh0;D r`,FV>5(t4k2AkEA+˹AY8g UN h4,Cr5_ ۬FSSJ.?pǕANu78ɠY$$2 hJR*ſL)@AѴē jıIhIP#@_y;bWjoR +'õVdx m +d_+]ɀ+q2 hs2x ^ɺRWF6: r~Nɐ.nKeZJ$~;FӽWNTw283f saɇhBPL@ F#fb Ud3*_Ȑ%eyOՊ 7?QakXV d 861~8)FU`%[Hbc˯b7@Ca7!Lc pA sp 8Ks P7 sQ"YL{ "`! CerĚ$|ðUq. jk0a~-s`Vi^(OM8F 6~@E 7BLΎŎ%2K\sl0@a*: +`c8Q,A_O>BO %I ~cR%5:K`Z7.9, 4rn <|U/@jSv[#˒ _8q/@åT}e& BsE-E &ЄE{V 3~kL=., + +G3+)bb TB-c`>kݽxxhxzAa0HTX T8|4._S4XOe4F_vVM]ca\xEoԪz6=i̖9iLwW}e{A +"9Xi(s)6+)j'LV:2kKCkEK%Z!YN+2u60lIo^cF]EDHSd  s"Qǂ% <@q F؍W +F)15ړ! s|553U[$XL,=kQվ~H,SHʣZ`6',H *1UhO)@ã1`@  ,` H0P(B!P䐀`` + "&DX(`g^640DDxp0L01p" [ &&%bJ3bp9B[ }eoNu7ͳd]]'rQ~vww7j)[U^v*kk}xzwXOʠݡ1SK>%UO}4p>,Hd$Tkewj|WHu1}9~οlʣ/Ջ<)H.[z}q iID&Ҟ(֥ercWmJ|͚f,Rr2I'Ͷtך+ 'M~'HصFiɤsW6`j Gw]ٲ͘АutWW̟]"炂[^Ve:ͬkj]Y^5**j.gqLݣ4`vT1˓Ky"=ݝ=DBRO͵; VpkL|2`! 3[AϖIӽJj!b]ҽ6N*˓f.*N!)sP>$būEK#Meӣ=]Vh$U]X'#?QN} +uX_iдӒ VigFYy˰1[  uBH#,VFt+5tYӈICw{-6SR8Gko޷Nى endstream endobj 19 0 obj <>stream +Feʉ#IDŽMMnZ//~~GUHK߱U>zz61 aPSN/d3nx~rtC8(qk2.Ύ׵|{-3^o[qqfZbZ՝m)7Ef6+#0@ J:Yj$\'EOZQf̖-kKVU#Iv8Rq,E/WYwXc8ld´ &~!2#×,$ɗKR*{Yfuiڴ}Џ2/#d| -xgGVF¨ʹ4#Ё8ћ +q2m9B"zј"ԳNu*"1lR1'9l$8FQ6`b=Ͼ&aI czJ; @ZD$_& i ~/]̎k8:o+³vf"],@9C$X$& E`* +(H "<<` $0 LHhp<P1qIxP,p<H CY0 y8` @Da +s(ixCpY@0(yP@((((|Fa,,x@8`P`kB +MMTY@:'+ d\?,H h(UIxJM??Y],ͅ^i!u#C=C`;$7V}TgEI<Q.)n*n(]z]juB#3)o̮dN6N]+i$i,ZMztf%riyl׶;x6KtYmDB9 T,#VX?٭۳۳Zten.>Kq)OxzLJJ_ q-OvUTyC-;ךg7~^vNRs!1[|KZڗzIk PKZhhxXmcG.5A&p1Hqbv8w+zU4:+KW|n2!/79"*юq 7}iřFJաh@h(#T, p8H,H`810 P 0 B1bSQ&`V :O-ӵ2=)j*(O70#܃pGjx*H7crmS~(-Ԩr3{0hXNĤUS$sK;`Z=;'1NB 1.1rd4w@@a\>@ n +-PD DM(t74֡HԲ$ 9J^њ3s=},3ިa[U1 {wkDsJO 8f򣟹g#:?'X<nk)Xlig<ۨ wj*W*{ +AIs4`GEY[s"tf=ShkB'`Ma30S'{:)c z]h3~]&Defسh93dL(\8y|^G@yD 1J\.MSU}ujƩC7Ž)n3NNi)^OWvƺ:Ņu[8KwP;ϋ: C +оYtTK@-Ʉ'Sp-,g Ն(edڌ`'ЬG) $ɭA$B^P&eeuKwCőNKD>3*1-Y!W5Ȏ87-8y\[yi!T.AުZ +]E7*ɛf8 +nyoFԄ2I$}0:B*: +iP`q" t\\ l +{<\ė@3RA,ĿLt^lT wHnKxpyN%..M2SK",^w/;4|Ju郈t?64h`&]rېfaxt4olsL9W4MFYFy(ID_Jthk80"L? &\ɓh^䊬}QKP+.NgqF׺vXk48WMoDc;ʶ~؂916@cٿWw$5UJџʲ9"w+kVbDEH!%(x`5=阊M̎ )9~]W}4˪}pG,JS+3F6dXb⧔&Ov+dvs~3?fV4d_2q>Rٷ68LrEc:q28 h^ A"`* %`@=RpA 4䞂B+]A0FĤé,Xcu5_6b#nIf"{:UM{ߎj˦sMz OY"t8lıġX_6({U#"|"o찌<*D<\)RhjZz@8DVQ˻[Zwt~V)N3vǀu{'Ņժy%!EZI4S[7XلhE?oDŽlj-Bd@*8ŒwD@-*i,ȁf U6\͢p#TrIVBg,L#jȺ4#*-5^ݽ!_o܅aA7"W뎀l.%g(J^'zB9G|m-϶S^}%Pu;^ϜѪsOiFgƛ8!.97 um.^s+,"|3iDKuznp' =hvݹSfwN!&T+TzAo߼?QZJ'6a BeUg^j쨷C5v֐sm5jXNi9,+BoEJ3˜D~e!pS(Z(7+zmHŚ>wP15 %ɪ׌&Bضa)).U!6x!>l葷TÌ qe3^aX?: ٭)ċ.:lMt4J⬈4Am|*|gX$^&/> ՚k +@3JZ7zǙǼ6 +g'0`$})$uYIާ{33CW-SE5lUuP_ ͛Ui5SPi'tivg?Q= +&0)|Kkm>!<6DfeXRbnskSheZmb軓Q2Z&X+IJbۻtwާZzfJ-9c eˌƖNz+u|R-~6I ":W+?>x[&9NygZW ]5HaRw)@Ґ + qV]= + +aq@H )QMNn%+ ȟQxsP +3u`H!AQp(e|Ai z?%[J?Qd 3"S}eE`rLaY `GogZQDV[-f-%3Bjb7TSCy <\eVS)M<@5xao5ab3j֠ +dOhmc*6g%b]Lr*6Z jHVn`%jBMd^'fW5~kf]_˔H61dR7L1mpb-6BIq]zhKap|AJ?}IJ%Towh4˫rY}Ҍu <\2iebgÓ&C!2%?OfȪ֤`iI2ZAhn2L" H Rq" []6 S5{fnAF m3DNFGpQlrAb`D'YRSi +o\TF& εiA iK>:B;eloIGege+zf#C| JαmͿQ,A#r{43Qy\ m{񍭳}i O)P$h[R*x+32'b@"J-k\"@ɾzE.DkQ|»{~%'IY7%ȘMxs>-,P:p]ZLpir᪄ncad`O-:gޚ)!Yi<y8ʯAˉ I,`Ga}my|G.̸n'-rʌ 0j3= IC/g'9LzCHrmykZ&wkj 5\8-_ֈB̊+klœVi, Z)v{iIg$.ie){)y@N=i?C ֫ $e3qnQ啤oäfHޣ3nh/I@ ~ 8:͢37RGV11U::ű U#$@YX _KNҫp3^+!DO";1)k-*w-N/pT_fP_VaTrИbe=]^8[kr5/A_0zinYH7M`ca͖ɡrɖf@(plU5rih \B7 e#3iY/p '9B1BAM^wG"Y. t>2+% E,Jcm(W'& 5:l4p6Zi9-aX]W,ןDl_[-C&}ХGrۓr4Ip6 ݣ&kh":ya9n4g|:&{.c։TQ1PkF05VmjJ3^,3 H1'y]kV07u Z ,U(.  ot ,kTIֲYa@pRɺ{46w$󓧺~؂K{*4ڕLHv pf\V>y41@ +C9,-Rv۔s֞v5J,c;5#w!K8IBاSȎ"5*fNY:a6TXfQc] |+.EqpʤZ`}}KaqAz5`dE4ooG[J." 6DBBQSP c4>:v>lVM ;sƐ+;wp.X\aet )-"|ige U 􂴄z5\ (}WıԮpb`!c#!<4{ibriVg遚xFj:0eoZ%iiݑjWVG+fq7 7*棷.`e)TBὣ+hiJ'WlZqoa$׀9bN쥯:4Rv5eWOU"5R; ;_gR#KL BTzV +#,#r +;`"B{-Tg,[ufVWU\ґޡ(8pyB?_"C]鯌l|Vo>B ^CE3ŕtc8k0ǞPKXBe²EJNݕi4QK=7MAQ?*H2&GgWHa'zțJ9hթtx|J%14:;^1N'6@(=L*#'[/*O5bFb)7 F +5< T6gu"pq!9:X Zd vlV䌙{6;тt<~ +_;/>CB {iÂp/2kk& )Kpxj l.HղiĐNAG s#=G'Iš*SB薧pbbUSU994@# FUrT0ȜסRNv~Y%NpL] !E{N p‚(4*+NzQ'tBӹeIʙRKvjS3` ;J [A/m>x]Hr%eITn@RzH98\pK3<'"LJ8x +d![˶ߴUQ${6K)kc?sp:e8}~kSJ.|9Ledf?H: }pniTo#Tu@fd70+샄N.{nB"$mAAcر)>Gs#j'V)%e: 8o Œ bM:-b@$zrr.Gu>}.Fyrߴz^}Ҽr=8( +=FIox9`<Yl5HrX6j*5t-$jC~ z-B\Êw J77/)0ۊbq >I5u^7.yOIґar$-qϚrD@g 7G{Fg_zMU:cn (K'-8]m~? zs$ԩlp1hgb zbfǻx\0F%`CZ[ + ?hvp r-N44U4PcHvچ@ +u*hY硵@'^8N㡣ZͽK4֪+ BE{|9𺺢aݐȼ0Nei&t4"SQ AIȽ9- GmM/čZCq̲%G-Ob\PVx5yc4eފۀF6N3[+X%- {R:~ 5G߰irڪt/V {_G +bpn3\Gk'kԕ¥4y?Ӹ )fn!+@?DG*K$L ViIeRV(6 h 0ġ%P#BYCAdB~I-~0'K!B!q0 i9 7'ŘH&N& +-Qÿ|@3YWlDjJ{2paGp~ܒ2o!rbK92ߕbM6Îm2wDx/m1t+<"tV}F>T܀ j|A5NOB%3L>BA->uot%N>20,GQxڎ4lJfs-!"I_>3V}(a&U1hq]D9O_-&!Z981bfSԾCtLL0qĈi8GKN9XsYT߮5{ˋU]=+0Z9 ֲ\?-;vZ\#TYk+sg@2CǰG%ݿxӹVZG[[ɑmx$ÀޅMGSc%9ϙLIZ0a0HeyhLs[ nk,\wԳ4ꡆʂIʜDEv}/ +ҏԄ-ύzDGXB,)ug:"n#OnDV%k)SUQJ/׆P!XAv>oZ(1D#V~MLni}O]BZ ՗ #B/ՠ)It8kOGTiGi(2ac{.duO[=JϤe.,PK*e/_^".E#|#-7XЉa2b6Fb +6OаʞB߻:AP tJb"Hl[N=Lɇֺ9, +Bom7u;W/kasشưhO +-5ʞS ? ɄGL< qnLiЅߏCڇ 2v 3gaf?44~)7~2[bg;()F AX4ØĝYrWMptʚs2Dգʑ LRl.G4ġ#P +UE]V y͵/ 9+a6]Y'gg <%իwS(Kؼrvt2|\IFwd<|19jɗ k5PƩE3zM8`gj-%w'1p ^@*_ߠ,q}Y\2䏉*Kf4FPke.UA-/6)souakf+Zzr8 ߀W]Cj(CW$+X6RbTnd)x3A>)bg҄x#hcJO׾ugZ8N[#6wK`0POo]X(ܫ8 ̂I 9aHiO[ +a ~6;"cНGn6v>OKDReC[JUMi|KSFr]{d޹CA5](HYt{%8$ܕ̃ T-!L'=9Di eeYV!icT Vk6X;`|~`fzsct¤+ In!W`mBMJb' ‚J'0IcTiӳB"& + +Zu`eҁَfuav1 keulҭ,tuGTliev*/ͪ2~ fkЪ2~tcmtb}:쾪skX]$YӮwX/ vy]94] f"|0mcźnA3-EKs/XpV•DZ5;9=h]մI;buk_o]#x9rӓvGӏ?Oډܛ9bɱovӟecsKR$9y~g/o_4c'8fǒ?{}eib,ϱ,iW+Kι=7Go9v}G{,MI>ҎQ,?<ӟMsoH;tC \s{/v4/7I/4yY#Or{'yH{OS,.9x'?H?֎?Os_,Gѓߛv$9Iey'/_ɻX~e?99H.]翋 Ho~&Ib?|y>n\IK''͓(H"őh(~KNIr$EOi#rybG~)?Y/HX>Ү7} 8)~wO%H;쏵ޛ$d眏/I$yrq,Mv~EvkW NЋxbyO|9's|e$Oϳ$#LX0;cAޙ97?}Irq4KͲy}'y/}{{SdOM~qMoџ&'gKp܀RuLY%_R{ca$Ay/Gw\"GsO?Owq)HrwwRc<{YyGv>ޗ'^;IS~]'AK,ES4;;Qwt Nߗ\#EQ4y7A/CmfC- s}c0Q[5֝>9E8tct3kqV}^vO+1:ר|-OrcAKE ,u!b:nd%תˊ"+QtUQ HVTo(Ni@Qh<v +[(Ԁ멨z PhK:b?sJа~ ( @42eO)SKJ36Xq8YB((l& ; ?RSU熂\:V;Q' Qـ Q$b'l]Qh.N#&!14Cc̓v ɶH5'!#&% 0ߠE192C $4R\bofA)6w&5srˌlVUe.`앮9 +TNYب3b%\dDDL).Z) KHd9 -)'[0-kjD P5`MULZytFsRkI`I3(~ -V{EwE D•FNV!Et42PM], ٕQ+7+ΙU#?2ᒱ+>Td` ldpgj1Al0`H&5 5hdҨ؈ qGeȲX! c * & +))B:2GdZgp"hSX&Y֋.*ij-=B%)et"AaZ!-n4 ˄65"8hTu%i§E`jŚ(τ !~j9$h4!IF"a ͗W0Q&/%Fgxx/qmu,p꾌E}K!@"z8ub{Z4Scx!6}@*h 5,75/3l}>11/heXЌ@a=emHpsJ?n -6$kQĥd,$ FPo`Dkr)~'eR hGVh/ +K+#?>\ڐw`5ZK[ŦR1;:qJ}[ Ul\$Z·$hMRdbp*6 ."cbJ mE +@5jSBI1E:os(5*Cmy jꢩ> 3ld7A>$Eu /bLj@5< R. ]0[$Milclz +E!TiM!3"ׂ$npF!(ł\+ϋf(kj.uXGg*T L^,Ax2l@0zhA}`V{(4q[c{Z˄͢d p0XBg+$CI߆| '*a`[NBMG,٪1Ua !&)7٠|4XT{:'#m%{L[$MY^]s0lG-`0%-s0OI?پ$F`< H_\,[.97$)#)x±ЗFOR`6`D +lRBvZq+pD00p93Z8`F@ECJ cX:reHs8*X\hq &[DbRJD).P4tl1q 3`J + ys1 K{2g a`?h  t)~1C  3N@yW[fY ɉAhQR#)Ѷӟh>(M(2 0B(]` 0&QCdQpQ&Y=Y'PU=YQP~,\, WEt~+S!D"7xN)_OzhE 'S~A~P aS.3DS^=^| ѭ,k-`%*[}D+H~^]J ېYy<2vr<90$a2&7 A"H!9 dAc$Dvk l] D2q3X|VzL'ZL:k ~"`$ *RE5a~ɭfA& L;0av$ +fD?فQ+l> 1rm[4tCjT^r`8=@3"b$v$zJA9Rq`"P$C ȆizğPJy@3ڍM +}4<'KDR>:[\\v9u><SxcRyLĐjo?!LH`Yd:*7ְ=m+" +{LF{e +#l\L_a9?ʕ-qolc^0_s̏֩ގ02 j|,8Dqˠm!ڳ"qbAB*$> FqA5Y(Іܚ<<P}C.!.Čjʿqc 2 ᪖1=4\biYh8:P(IZ \^4 +i' <+oMǭ-gSq{&w*#S'8WlY4_9 κ7J}8b)-?%?iIWe5iy;V*‹!DĐ `|j6QZDu f+!XY&Q"uqR@*MWx$nD!q!2P1i;X+L# 55s^x%o5i!Ϙ/jэy5SuD?eIDĐ`9Q$"=~HfPte|8,\t0ܧL'j9!KIG%d LjāL# +$:+ O* +k,DMFО=u#~{R(6q6c!̕)9 m~X>b&36I7|H]cqhN2G)02߳l#2m_,V

ZFUkm'G,#Q)` Jq`J͈zDžh )MHKDbq`ؖ{A@ XLePYU>_x=2F5MKb<0`ˈ~2fk-6ږJn@ah? 7`6 i&ZfjQ-ytP3qVb<[dQ&ic5`6BHxvVA̔)K[8U[ϧ,aum DQY@Q-S8 +E4%u cG8 +Z> PXxiH i<ԬEe"kȿbAfd^I_}*z(3(X +CsH~v`:uȬ* +/n}XUjfI@q:m٤$ +DvMuREqa(!LL8sT,q+Aѹ%TJAr;t8*3lZHPѻfCƋ#GF/U,}֔JR[TD + 6h&SLo=8"N4S +ӗ&JAاJ=e m;W,jpCVZrsT*󛨦S#MF;LS#qԈ~§oDTSA@ggڄ,?\+q#EbR'HЏB҈~Llj2ETgЈ~H3RkKcx>uT,z*h_2 Ց$b~‡F[ '+y\=#cF2J3ZFx`K)ZQB`D-]{LR`MEq8xq + -h`!#(JGc 2:66䊗er @Zя~)ZDƐYۤ ѵe<1E ?xЊ8dC\ <$ |zpu<@\J W_kj9D`-Zzt+ K`- +DkO4k1X#*Pҵ|J Բ +~!,:mUY TEOԧ Ta80<tC& P4)K +'Z7H .Y%~ue<1PɝNt)aK'h9!p\<8;6 R$qs*!o`zlP )" GRi5!y<I,"&:UX~i\Đ* +cYxbp/Z$,ckя8D ZYx %T˨D?7?h勀h +Q!K^ CiP`]rË +PԎ<ﳌ 0kgRFubiDAEC؇C,|KdjD[1WzVe#XHTF(>!?Dڹv:UPs`)BTB6ͳmXUEC5ou?-M|}_NԫZ!VMR+䡨,|i5kȚB~h.`")$"<=a*,`֙HqTD8(8p dSk`ſ1@/4ߧ6?uB^NO:!WD9MEpôEQz No[6S1`Y*>(8xyV&zⱆ:Gk~-U קQ/3߼5[q̈~S* yYWMHRQAUaO!谄eDn{ 1,FbpŪz¤abUAl\,SL>К&1#u띠(".vKAC.6LطJ*qj:85l(zg +!ċds'B +U8 +]sذx`ycp|&8ϴ#>_M#`{ʝRЕE鳂\PPqlЩ   }0 J異6h'>Ռ|)L$lX̳!T> /ugh $N#nf"BV > ҉ w 4)VnI*A 2 /l*=AbLDrx>}$ +N!ȱ~*G λ>;*+rfǺAƱ8BOIBZ3ɋͷ uNX͆nΝ$ EmPK[#% + +el?Ok$U|oCIt\e,;9ɶ':c%xH-k4Vo%~|`+5;MGޏ?⏤~ hx9 ~L&<2$!G1HGzRDG?Ad=19|d> آ/`E/ +d2Ps;vC!̘ h qĻƈ{~4s #61&?3ʨ3;LDf"_G5sТW>#x!х cZX9 +F܇|?+t8ʱZ_CZ졏:ubߑb;.XϯX±_G!PB{"a|ڠ>&Ds8업nd_g:<d e,o!"y o'Uy^TY\O.AwG=fیqU BZ8Fp,l7)jqk'&`e;gcқI_YdiޤjY1سN^5c_"ap%C|+_&Bv+I_v >9v y&wN k;}YjPyݛc..e M]Rrb QNv Fا://.#Ks:0z.@̳ו.jO$#,p:Ye$,1s"2vlЕmbjgE;ǯ; f56@>)'X6Qm1%,ضG|*T)B}=?CUpL~p[`o4P!GbApۈ0) MRHQ4Dv5.C[BE[bv.K&B&nBDXf !ϰ*` zs~x +ɖ&*N +a>pE 2Hh:"c Un).{bQҠECXb^} 5Nڒ +6m7]b5:!42RjteV'7;"pa<繜5:Y}Cq88 64&QPa}0ЖW\PdD(t{◯v sִxڑH[I>X? ~ܔwfT$&2Z`ь1At՗ĵ gkIߤfA;\ժk(u&I~ƈvnCq)33r=n6)uɪS1}u޼r}k[hV/a=ޭ|VdT'ήydK]T-:uQ}fk;`MPxLnE5 גFXA3c0d$$Hf"[=UQo+wœ9vF \.㈁{gw71; 9za3^j>Jggw3nx3q~JL=_ +h <&d|2/n3e·w422Wq\ XbXz)f ghۋ@9R"/&UPb.q_^(~n0ǮQNUUO#8zBO2í)/N"A 5[yB&%ŠEXJdH)>SW +Od> + +mdֳ6,Iu]DBSQ4&y-hiX[b"ͭdfvm䧖ojy L0r\E3S+%&!cubl97_/$iH}ݵ%chIǡ96㲵 =-Ngeذ')Q]i I?&ζ"6L~OZ=vi$TRNAٛdX3ZdJUȧ| +[*R@UȆE}gEGDr KM&=(ӭN{gqIx;7o9$1q6D0ҀX<,X˽[A[;XAe E ͓&4q?W07Ω&W‹4{0%=y ^5&\g~5Qh/L#,grS@&Pd3v+w&.Y!ƼHHW0Nx5DJjGv.YP,ST xӮ.̲憤dMϸI*<b2d\7Cl@9?Y,b?u=7X +@]Tҏ`q84{(d=BFEb&Bb  4a:߷un͐P*w7ǁUt`Ukqek!Y +m ]&LmĖ\]ZP%Mܳlnʃ_0UZf*9%9;U,a/Q +˲QSE)>ƗyEBb7@k8[ݟQք,跘!2RN#>iHXZY{kE|QmN\ͮ Z3]ڎ|:l& ؔb5~F<<&?f ﳜD?@"$BZØc/ yn/ .*Qx +XjB*/y9(Dxp|B<.Tvn~xRhWZ'ƂƲq@"x{Z9ߑ>ʬ HyznxhxUw9 +Wh(XL]_fX?f" ngJ='6+r>-`[n}$/jk֪}YfwaLnW3CTdqhd;Mۀ{h>'"KCNR+a8|R`-…OjzE(ʤج;bBtfn2Z$ egz|$ѽ-L1EJq@ ;˕D x'C_R[<,0"sD$kXFQl{I+^)EMLKRM?RXԍ ~傯˾DyF@QLԵh&q.>PYK{IpGR}.2|k-EaR?[^#H7Η/+qi-O~Q7OyTxF@KfgM&QQGt{ݬӽQ-nduX.E _e|EG]ꕦo s]ƢnǞN>]d_{tKusɮ/D]pfzx.2%Vq0Q7p~˸{~>[Q]NEAbDc.z<-RQw2@j'[: LBYWaJEgYjzL]hZ("G#Ps'!õ +&@%xKm% 5O > mr5 +,2' 4& NDlQM/bx2bƉ( Â16>( iQy +{% R?O0t9b!56n[ҵ\m(I(]ٻLke#0GDxjTeZO(ѨP곛*挶ߡJQe +j ~7OÛ@d(՛ʎ>95ڏ(ve>&Ul^TD;OYu(B(|t${' C%"9\-GkdFd8ܸ8(G M{wF":\_gإeX,+]fjI8LZ?p79ez@)HqwpE]Oe1OAFDǃ (hIB2öߎ_; ]Lj %,Y1Ex_e!cYMڠNx ʀvM;freŐb8u>ْ"!}b(+V *ƌj)[Q.PIw nLq\w?GΛE + _6`yŹ´X66iy\|]VwI.j"!V0!I,3|p %̗#nB 2m{B@LX?wu'n<$2OO09qu +50{Mgms;$;8TjKZ|͊UEg}F5۱IɈ>:qlRq- fsP7? ג̚eas0HL4B8Т榞}9ѨʨiǦn6zJݢC +³/tّv\i0JDd.yF7t屖(PЕmX[Xu` imG+cf +U9~9_~,X WFDq H U#VU7'"@C4f;"{h/L|;&m 1 GX `8\դW<;| +bV9#zTEse_C`$("7ͥ@'l'5򵌧,n}u$̧. 4@Ak .,ST+:`D$}G=g4:G ~* 8qk7$-1G̍_:£Si_b Zy6 +3oO̭A_E5`MK(h"z_v[ohsBm[p ilZKa#mR,1 y/bJ| +`N*>: $g40G#ya"2pݩ{+._/ߐ@&Rs X$P<17Kae|H;gĿU%l +(1ð{|rk\BjYս>UߚW%$ _%?ϰzMOw?ZrWwT$ǵU&)6#7?ݐwW{ ^ - ]uqE60r#e+9 y:coz1J\F2SJ b-s\h,^AK ڢea>WajJR\i`(4Rv2^3"(qtXoT+;m@/|񩽒85xI6Dm]`̛Qlmb†񣾕BrYg!8w}G6Ϣkwn{|+Q8qoG7,Efѥn=d|ړ Gb4Hf1ؘJH<7>ST9傿 [xʽ$rk~x܃yGضꪥJ' EY(6ګ([R c2ʴ7AsLI-k$6Jٛa}_Őw(("PLb0 +0V2ǘA!01wIx5(IzKK[p2QMM3ڒq4~ZM*h|rrō.ìgY1iVnPH(G7 VG<] FN]ġO >G\{ϞV9-hFJg/,%2(΍2`AoN8y.տqƚ\mߛXw\v܃eE#σER@mTYpU.!PX%jx!u>sV ae6¸0}>Xp_;(s(_vt| D? G5c/9$sS+3VRZIi^<cYN<<j*o K}8oQ@3`W0/g]ϼ $ I N"Zt[< +;{SIv`@t h`+AF#ES$DKS|*@"ǂ]oi(i5~Xb]JC Hy=ɥth.ք6T*p+Ȼdy34o^"$a5ohywdd|9$O߆/\t׽] @UG f,2ʽJJ>qj}ql'=z#t=&{ ZVe\8~ufr1^<:1㤨 BxnPgK8:ўP$ ,$NeaP"`qeOO 5z_tFw:›\A V41%Q lux@ja6")p$}o+.DI53v򠣫~!e sXaTy 4ae]4ߥ|Ο +cLA" X-OysxWB͘&7Fp?&̐MEkCD H)?Ebczj.k3NA.cٱu/x[ 6Ogc(GY@0ulk*bR@UX5(9uϴEYdo7C7&5pc|0U-Y;D6f:&Wc)C!8П~&G8dN+37Mr^awEzEx+.mbrRFze6m+d:vpjl8?~(Yĉc&G~ xξ/oul@ ťDuWObqf V',EPMG^!\茅ZօPԍ74%dΚpBV}&.\C'S;~ź?`;L8HC**nq(OL *| BUpyQu)YZRٛԉtxM˃rT$IoɭCniuI.SbÏtmm^$z.ܫ!ycb⤖|lYi eO4o'f[_AMmNf65d=UX~7HWp) 21:A՘'4PaꌃNvuCͶL Lou +10ȯM XU$aL> yF;"woG<<]hpu2.tn'(nI'0D=/;xIֶZ/fBBjM~#cjItw8^syi 3H3COn2mM4Zh~|K@] +A([5 +oɶhEMLJYkOb?\]U>O%\ؚa|+{=5L.܎~Qq?v+d} (,tSRPpB)TmujCKLo5+]G !E;s/^^~ItrU=~Ap=~@? ~’Ha,'v_6XUFtNX8JS7?ӝ-w>Xr@#" {'kPV+`RSB;+Q2WYB2;GyEs=ع#1ico~(bEIەB>B֮D -ǖ#*[ +e<-U:Sb)M< C[:+QIˆF<?\bq#.NFV\Ls*G-BW`%E +&&:k~o Hi3e+FaͲJjK18Vŋqa7;ϮamI Z+{EB,=ghngwf$|GXCm+:"/.ҤA=wCۅۭ)K[c~'STlt sEBNT7_?LOFJ{cmM8` ld~=OkBaNdϤV2C!x >dY €,o&(b`Pv]FIQ9@X6UǞ-<;,ꥹw+< bvo&~#Gn>"PI~jGq:5&6@btc3WT{wrĂrEcP;]CwM.pV}|Iےx.NlXb~ab#fۼݬͩPYx}eAbɭ:zv\[ Bvb_RR2g3#(>R{LKk%7d(OxrA[LR=\ U81":{vUyJv2!D܃< L$'8:iɗ.H/ǥ|+PojNBn(!+Mq;O^R?e%!(qYO_1,vV`MyKXkocTJa6ؔ|S|5樌9(hdG;GFfV^;ئVB\w+V@[+ğ.]7\*qr&%V +ҬKʒqo3VUg5&L ?Eb)si / gjM}a|^( grYQ::XsW 'փ~0V0SN@ T +DH)RD +<8[S?YzHkmŽSﱳ$@yqe1:ag _Ş6m40pFJcǞ 4dS^wAh7bQ/ȷ#>%i}naNwWER7`(nHA[ +!%R}A0S݊'iH2wvH_4YA +'َ# ْ8SZmHb/e>%+S-Q,'pj q&ֶ OO1 /dfZ|sPiNjT:8Q橬yS?1<!52t,$\&`)^]~R틪rP3 +QL?w7U-M4Hל᱊>%)!6:A7Sݴ+dqO};l}Wh[Yb pQL.^BV.lƛ޴7$1m\w7IbkWw|xBWe|3$h|[\:>J^^1hߣ=׆g/D)za`R++KvrEE9_m1쮙:zqm81;zf@,@zƒ'e2[pVktk^.̐g@mIE]UWxwsK!"\@xAL9Z 3{&s@B O$MMĨ'u ƥ~ž&_?pÅj1eCRUUR:l#n4&N2ӯ|fXPØ32y= {\o&o׹'ҟ6 1`F?. p%du82 7Gh8IמּY"l7'|JzsɾlS,J\QRգ&2Dg9o9n#E8g+TV<*ϗPdNg'ged:E-w|vؚ(Ck;n +c–nEzP]d +&b\x V#ncck-I(ӵ= +))O13k4@ehC kbI^摊6)\?"pL}xG% O.irYع3"sy+=-Ymkz]=PE] ]$;lWGh5v48hUm U6wiV*5o8;7֩SbAVz1a +t w 3u_ƣDtw5"qlOudߜ]&p4) F4B|MmHUX:qFn*X!SW}1gU.$QBt܉zF5~3+6UQ琸R>c~WƸdRU_>\D4,IםÝHqwyӝoIJxNeHMv)=ff3ZTgW4J'ܪ@qG` 3#V]-mj9ɐ$/\r *'V?k8 à@h8c!,8='cr$6^w-Ej@J,Y.uYZ.H4Dˆww˃~Eh,EGM3n4,(S [7ͤWd9q|2v@h8Piy#ǂm1٫,k +$$Re߅{>TV֙~8#UڭȺSDm(j(O'l̸@fk(1ʕdN\RACGx4G% HeӄrHȮDn+LHvXk +BS9sӦ4 фʀÛ`8^J}̜6q~6>F_^Uy[M/ +,S3䚃t*k']zPɣ,_Qќ4$03Э=*]D`M!:] +ɱoU<3De4ҡΑw4aR1 ׫+x$lɝpG vˋF+BiaG0镘h8\v,7Z騺 +8;^1jN| 8Է !?:d@ACox6]@e\l6!KRu3OxU.<-za512;u +~ +P#%ut(`;>)HQ$BXϦdKPa\@8=QCIf-hfJ wF9{6T w[ݰ54ǖ؋Ϩ };-9amR5(W+fI"E"Uaw]Cgx3Kd1*D+@m;B7^(z/~2T՛nJI%z`|.s^ :zԠ+l_XʠKݝ;Llk gn,7sRӓYX2- 6+ S?oJ"^!34h#T7(KCMS-^ڤu <ۣ, +V`d\UA[, +WQ}lJܺ;Tb 2:v.k%io >pL΍-.K54> endobj xref +0 33 +0000000004 65535 f +0000000016 00000 n +0000000147 00000 n +0000047373 00000 n +0000000000 00000 f +0000047424 00000 n +0000000000 00000 f +0000051848 00000 n +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000051921 00000 n +0000052095 00000 n +0000053300 00000 n +0000118888 00000 n +0000000000 00000 f +0000048865 00000 n +0000157011 00000 n +0000047823 00000 n +0000049165 00000 n +0000049052 00000 n +0000048082 00000 n +0000048304 00000 n +0000048352 00000 n +0000048936 00000 n +0000048967 00000 n +0000049200 00000 n +0000157036 00000 n +trailer <<72A08720E16D4D0BB75F13CE4AAC5C60>]>> startxref 157258 %%EOF \ No newline at end of file diff --git a/car/Assets/ControllerSlideIcon.svg b/car/Assets/ControllerSlideIcon.svg new file mode 100644 index 0000000..640fef6 --- /dev/null +++ b/car/Assets/ControllerSlideIcon.svg @@ -0,0 +1 @@ +ControllerSlideIcon \ No newline at end of file diff --git a/car/Assets/ControllerSlideIconHorizontal.svg b/car/Assets/ControllerSlideIconHorizontal.svg new file mode 100644 index 0000000..d7f566c --- /dev/null +++ b/car/Assets/ControllerSlideIconHorizontal.svg @@ -0,0 +1 @@ +ControllerSlideIconHorizontal \ No newline at end of file diff --git a/car/COCO-classes.txt b/car/COCO-classes.txt new file mode 100644 index 0000000..1f42c8e --- /dev/null +++ b/car/COCO-classes.txt @@ -0,0 +1,80 @@ +person +bicycle +car +motorcycle +airplane +bus +train +truck +boat +traffic light +fire hydrant +stop sign +parking meter +bench +bird +cat +dog +horse +sheep +cow +elephant +bear +zebra +giraffe +backpack +umbrella +handbag +tie +suitcase +frisbee +skis +snowboard +sports ball +kite +baseball bat +baseball glove +skateboard +surfboard +tennis racket +bottle +wine glass +cup +fork +knife +spoon +bowl +banana +apple +sandwich +orange +broccoli +carrot +hot dog +pizza +donut +cake +chair +couch +potted plant +bed +dining table +toilet +tv +laptop +mouse +remote +keyboard +cell phone +microwave +oven +toaster +sink +refrigerator +book +clock +vase +scissors +teddy bear +hair drier +toothbrush \ No newline at end of file diff --git a/car/DecisionSystem/CentralisedDecision/__init__.py b/car/DecisionSystem/CentralisedDecision/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/DecisionSystem/CentralisedDecision/ballotvoter.py b/car/DecisionSystem/CentralisedDecision/ballotvoter.py new file mode 100644 index 0000000..deb9f65 --- /dev/null +++ b/car/DecisionSystem/CentralisedDecision/ballotvoter.py @@ -0,0 +1,51 @@ +import json +from DecisionSystem.messages import ConnectSwarm, SubmitVote, Message, deserialise, RequestVote, ClientVoteRequest, VoteResult +from multiprocessing import Pool +from messenger import Messenger + +class BallotVoter: + def __init__(self, on_vote, handle_agreement, messenger: Messenger): + self.messenger = messenger + self.messenger.add_message_callback(self.on_message) + self.messenger.add_connect(self.on_connect) + self.on_vote = on_vote + self.handle_agreement = handle_agreement + + def on_connect(self, rc): + print("Connected with result code " + str(rc)) + + # Tell commander we are now connected. + self.send_connect() + + def on_message(self, message): + print("Message Received!") + messageD = deserialise(message.payload) + print("Message Type: " + messageD.type) + # Ok message. + if messageD.type == RequestVote().type: + print('Received vote message') + self.submit_vote() + elif messageD.type == "listening": + self.send_connect() + elif messageD.type == VoteResult.type: + self.handle_agreement(messageD.data["vote"]) + + def submit_vote(self): + v = self.on_vote() + if v == None: + print('Could not get vote') + return + print("Got Vote") + vote = SubmitVote(v, self.messenger.id) + print('Created Vote Message') + self.messenger.broadcast_message(self.messenger.swarm, vote.serialise()) + print('published vote') + + def send_connect(self): + # Send a connected message to let any commanders know that + # it is available. + self.messenger.broadcast_message(self.messenger.swarm, ConnectSwarm(self.messenger.id).serialise()) + + def request_vote(self): + """Sends a request to the leader to start collecting votes.""" + self.messenger.broadcast_message(self.messenger.swarm, ClientVoteRequest(self.messenger.id).serialise()) \ No newline at end of file diff --git a/car/DecisionSystem/CentralisedDecision/cameraserver.py b/car/DecisionSystem/CentralisedDecision/cameraserver.py new file mode 100644 index 0000000..6cac3b6 --- /dev/null +++ b/car/DecisionSystem/CentralisedDecision/cameraserver.py @@ -0,0 +1,95 @@ +from DecisionSystem.CentralisedDecision.ballotvoter import BallotVoter +from DecisionSystem.CentralisedDecision.messenger import MqttMessenger +import numpy as np +import cv2 +import time +import argparse +import os.path +import sys +from GestureRecognition.simplehandrecogniser import SimpleHandRecogniser +from threading import Thread +from queue import Queue + +import MyRaft.node as raft +import MyRaft.leader as leader +import DecisionSystem.CentralisedDecision.commander as commander +import DecisionSystem.CentralisedDecision.messenger as messenger +import DecisionSystem.CentralisedDecision.ballotvoter as voter + +print("Parsing args") +parser = argparse.ArgumentParser(description="Runs a file with OpenCV and gets consensus from the swarm.") + +parser.add_argument('-V', '--video', help="Path to video file.") + +args = parser.parse_args() + +recogniser = SimpleHandRecogniser(None) + +# Checks if video file is specified and if that file exists. +if(args.video): + print('finding video') + if not os.path.isfile(args.video): + print("Input video file ", args.video, " doesn't exist") + sys.exit(1) +else: + # Exit if no video file specified - we aren't using webcam here. + sys.exit(1) + +def on_vote(): + # Get the current frame of the camera and process what hand + # is currently being seen. + print('getting frame') + # Need to copy rather than just take a reference, as frame will + # constantly be changing. + global vd + recogniser.set_frame(np.copy(vd.frame)) + print('Got frame, voting with recogniser') + return recogniser.get_gesture() + +def connect_to_broker(mqtt): + print("Connecting to broker") + max_collisions = 100 + collisions = 1 + while not mqtt.connect() and collisions <= max_collisions: + time.sleep(2 ** collisions - 1) + print("Reconnecting in %s" %(2 ** collisions - 1)) + collisions += 1 + +mqtt = MqttMessenger() +v = BallotVoter(on_vote, mqtt) + +def on_disconnect(rc): + print("Client disconnected from broker") + i = input("Would you like to reconnnect? (y|n)") + if i == 'y': + global mqtt + connect_to_broker(mqtt) + +mqtt.add_disconnect_callback(on_disconnect) +connect_to_broker(mqtt) + +# Start the video capture at the next whole minute. +current_time_sec = time.gmtime(time.time()).tm_sec +if current_time_sec < 40: + time.sleep(60 - current_time_sec) +else: + time.sleep(60 - current_time_sec + 60) +print('loading video') + + + +print('Press q to quit the server, g to get votes/consensus') + +while True: + if vd.frame is None: + continue + frame = np.copy(vd.frame) + cv2.imshow('Frame', frame) + k = cv2.waitKey(33) + if k == ord('q'): + break + elif k == -1: + continue + elif k == ord('g'): + # Get votes + pass diff --git a/car/DecisionSystem/CentralisedDecision/central_server.py b/car/DecisionSystem/CentralisedDecision/central_server.py new file mode 100644 index 0000000..c310410 --- /dev/null +++ b/car/DecisionSystem/CentralisedDecision/central_server.py @@ -0,0 +1,15 @@ +from DecisionSystem.CentralisedDecision import commander +from DecisionSystem.CentralisedDecision.messenger import MqttMessenger + +mqtt = MqttMessenger() +c = commander.Commander(mqtt, 10) +mqtt.connect() + +f = input("Press any key and enter other than q to get current observation of the swarm: ") + +while f != "q": + print("Vote is: ") + print(c.get_votes()) + f = input("Press any key and enter other than q to get current observation of the swarm: ") + +print("Thanks for trying!") \ No newline at end of file diff --git a/car/DecisionSystem/CentralisedDecision/centralisedinstance.py b/car/DecisionSystem/CentralisedDecision/centralisedinstance.py new file mode 100644 index 0000000..923124e --- /dev/null +++ b/car/DecisionSystem/CentralisedDecision/centralisedinstance.py @@ -0,0 +1,106 @@ +"""This module provides an instance of the centralised, distributed voter""" + +from queue import Queue +import json +import argparse + +import numpy as np +import cv2 + +import MyRaft.node as raft +import MyRaft.leader as leader +import DecisionSystem.CentralisedDecision.commander as commander +import DecisionSystem.CentralisedDecision.messenger as messenger +import DecisionSystem.CentralisedDecision.ballotvoter as voter +import DecisionSystem.CentralisedDecision.videoget as videoget +import GestureRecognition.simplehandrecogniser as shr +import GestureRecognition.starkaleid as sk + +class Instance: + """An instance of the centralised, distributed approach to voting. + """ + def __init__(self, node_config='config.json', video_file=0): + with open(node_config) as f: + self.cfg= json.load(f) + self.mqtt = messenger.MqttMessenger(self.cfg) + self.we_lead = False + self.node = raft.RaftGrpcNode(node_config) + print("Node initialised") + self.node.add_state_change(self.on_state_changed) + + self.voter = voter.BallotVoter(self.on_vote, self.handle_agreement, self.mqtt) + self.commander = commander.Commander(self.mqtt) + self.recogniser = shr.SimpleHandRecogniser(None) + + self.last_vote = -1 + + self.q = Queue(5) + self.frame = None + self.vd = videoget.VideoGet(self.q, video_file) + + self.kaleid = False + print("Initialised the instance") + + def on_state_changed(self): + """Callback method for state of the raft node changing""" + if isinstance(self.node._current_state, leader.Leader): + # We are now the commander (or leader) + self.commander = commander.Commander(self.mqtt) + else: + # No longer or never were a leader. + try: + del(self.commander) + except SyntaxError: + pass + + def start(self): + self.vd.start() + self.mqtt.connect() + go = True + while go: + if self.kaleid: + go = self.show_kaleidoscope + else: + go = self.show_normal + + def show_normal(self): + self.frame = np.copy(self.q.get()) + cv2.imshow('Frame', self.frame) + if cv2.waitKey(1) & 0xFF == ord('q'): + return False + elif cv2.waitKey(1) & 0xFF == ord('g'): + self.voter.request_vote() + + def show_kaleidoscope(self): + self.frame = sk.make_kaleidoscope(np.copy(self.q.get()), 12) + cv2.imshow('Frame', self.frame) + if cv2.waitKey(1) & 0xFF == ord('q'): + return False + elif cv2.waitKey(1) & 0xFF == ord('g'): + self.voter.request_vote() + + def on_vote(self): + # Get the current frame of the camera and process what hand + # is currently being seen. + print('getting frame') + # Need to copy rather than just take a reference, as frame will + # constantly be changing. + self.recogniser.set_frame(np.copy(self.frame)) + print('Got frame, voting with recogniser') + gesture = self.recogniser.get_gesture() + self.last_vote = gesture + return gesture + + def handle_agreement(self, vote): + if vote == 5: + self.kaleid = True + else: + self.kaleid = False + + +parser = argparse.ArgumentParser(description="An instance of CAIDE") + +if __name__ == "__main__": + instance = Instance(video_file="/Users/piv/Documents/Projects/Experiments/Camera1/video.mp4") + instance.start() + \ No newline at end of file diff --git a/car/DecisionSystem/CentralisedDecision/commander.py b/car/DecisionSystem/CentralisedDecision/commander.py new file mode 100644 index 0000000..d84e7cf --- /dev/null +++ b/car/DecisionSystem/CentralisedDecision/commander.py @@ -0,0 +1,119 @@ +import time +from DecisionSystem.messages import Message, CommanderWill, RequestVote, GetSwarmParticipants, deserialise, ClientVoteRequest, VoteResult +import json +import numpy as np + +class Commander: + currentVote = None + + # Stores voters that connect to maintain a majority. + # Voters who do not vote in latest round are removed. + _connectedVoters = [] + # Dict has format: {clientId: vote} + _votes = {} + _taking_votes = False + + def __init__(self, messenger, timeout = 60): + ''' + Initial/default waiting time is 1 minute for votes to come in. + ''' + self.timeout = timeout + + self._messenger = messenger + self._messenger.add_connect(self.on_connect) + self._messenger.add_message_callback(self.on_message) + self._messenger.add_disconnect_callback(self.on_disconnect) + print('Connecting') + + def make_decision(self): + # Should change this to follow strategy pattern, for different implementations of + # making a decision on the votes. + print("Making a decision") + votes = self._votes.values() + print(type(votes)) + dif_votes = {} + + for vote in votes: + # Get the count of different votes. + if vote in dif_votes: + dif_votes[vote] = dif_votes[vote] + 1 + else: + dif_votes[vote] = 1 + + max_vote = "" + max_vote_num = 0 + # Should try using a numpy array for this. + + for vote in dif_votes.keys(): + if dif_votes[vote] > max_vote_num: + max_vote = vote + max_vote_num = dif_votes[vote] + + print("Made Decision!") + return max_vote + + def get_votes(self): + # Should abstract messaging to another class. + print("Gathering Votes") + self._taking_votes = True + # Publish a message that votes are needed. + print("Sending request message") + self._messenger.broadcast_message(self._messenger.swarm, RequestVote(self._messenger.id).serialise()) + print("published message") + time.sleep(self.timeout) + self._taking_votes = False + # TODO: Work out how to broadcast votes back to the swarm, maybe using raft? + return self.make_decision() + + def on_message(self, message): + print("Message Received") + messageD = None + try: + messageD = deserialise(message.payload) + except: + print("Incorrect Message Has Been Sent") + return + + # Need to consider that a malicious message may have a type with incorrect subtypes. + if messageD.type == "connect": + print("Voter connected!") + # Voter just connected/reconnnected. + if not messageD["client"] in self._connectedVoters: + self._connectedVoters.append(messageD["client"]) + elif messageD.type == "vote": + print("Received a vote!") + # Voter is sending in their vote. + print(messageD.data["vote"]) + print("From: ", messageD.sender) + if self._taking_votes: + # Commander must have requested their taking votes, and the timeout + # has not occurred. + # Only add vote to list if the client has not already voted. + if messageD.sender not in self._votes: + self._votes[messageD.sender] = int(messageD.data["vote"]) + elif messageD.type == ClientVoteRequest().type: + # received a request to get votes/consensus. + self.get_votes() + + elif messageD.type == "disconnected": + print("Voter disconnected :(") + self._connectedVoters.remove(messageD.sender) + + def on_connect(self, rc): + # Subscribes now handled by the mqtt messenger, this is just here + # for convenience later. + pass + + def get_participants(self): + self._messenger.broadcast_message(self._messenger.swarm, GetSwarmParticipants().serialise()) + # Commander needs a will message too, for the decentralised version, so the + # voters know to pick a new commander. + # If using apache zookeeper this won't be needed. + # That's the wrong method for setting a will. + # self.client.publish("swarm1/voters", CommanderWill(self.client._client_id).serialise()) + + def on_disconnect(self, rc): + pass + + def propogate_result(self, result): + self._messenger.broadcast_message(self._messenger.swarm, ) \ No newline at end of file diff --git a/car/DecisionSystem/CentralisedDecision/messenger.py b/car/DecisionSystem/CentralisedDecision/messenger.py new file mode 100644 index 0000000..73330cb --- /dev/null +++ b/car/DecisionSystem/CentralisedDecision/messenger.py @@ -0,0 +1,138 @@ +import paho.mqtt.client as mqtt +import json +import random + +class Messenger: + _connect_callbacks = [] + _disconnect_callbacks = [] + _message_callbacks = [] + + def broadcast_message(self, message): + """ + Broadcasts the specified message to the swarm based upon its topic(or group). + """ + raise NotImplementedError + + def unicast_message(self, target, message): + """ + Broadcasts the specified message to the single target. + """ + raise NotImplementedError + + def connect(self): + """ + Connect to the swarm. + """ + raise NotImplementedError + + def disconnect(self): + """ + Disconnect from the swarm. + """ + raise NotImplementedError + + def add_connect(self, connect): + """ + Adds a callback to do something else once we are connected. + """ + self._connect_callbacks.append(connect) + + def on_connect(self, code = None): + """ + Called once the messenger connects to the swarm. + """ + for cb in self._connect_callbacks: + cb(code) + + def on_disconnect(self, code = None): + """ + Called when the messenger is disconnected from the swarm. + """ + for cb in self._disconnect_callbacks: + cb(code) + + def add_disconnect_callback(self, on_disconnect): + """ + Adds a callback for when the messenger is disconnected. + """ + self._disconnect_callbacks.append(on_disconnect) + + def add_message_callback(self, on_message): + """ + Adds a callback + """ + self._message_callbacks.append(on_message) + + def on_message(self, message): + """ + Called when the messenger receives a message. + """ + for cb in self._message_callbacks: + cb(message) + + @property + def id(self): + """ + The id for this messenger that is being used in communication. + """ + raise NotImplementedError + + @property + def swarm(self): + """ + Gets the name of the swarm this instance is a part of. + """ + raise NotImplementedError + + +class MqttMessenger(Messenger): + """A messenger that uses MQTT.""" + def __init__(self, configuration): + self._cfg = configuration + self._client = mqtt.Client(client_id=str(random.randint(0,500))) + self._client.on_connect = self.on_connect + self._client.on_message = self.on_message + self._client.on_disconnect = self.on_disconnect + + def on_message(self, client, userdata, message): + Messenger.on_message(self, message) + + def on_connect(self, client, userdata, flags, rc): + # Subscribe to the swarm specified in the config. + self._client.subscribe(self._cfg['mqtt']['swarm']) + + # Also subscribe to our own topic for unicast messages. + self._client.subscribe(self._cfg['mqtt']['swarm'] + str(self._client._client_id)) + Messenger.on_connect(self, rc) + + def on_disconnect(self, client, userdata, rc): + Messenger.on_disconnect(self, rc) + + def broadcast_message(self, message): + self._client.publish(self._cfg['mqtt']['swarm'], message, qos=1) + + def unicast_message(self, target, message): + self._client.publish(target, message, qos=1) + + def connect(self): + try: + self._client.connect(self._cfg['mqtt']['host'], \ + int(self._cfg['mqtt']['port']), \ + int(self._cfg['mqtt']['timeout'])) + except: + print("Could not connect to broker") + return False + + self._client.loop_start() + return True + + def disconnect(self): + self._client.disconnect() + + @property + def id(self): + return self._client._client_id + + @property + def swarm(self): + return self._cfg['mqtt']['swarm'] \ No newline at end of file diff --git a/car/DecisionSystem/CentralisedDecision/videoget.py b/car/DecisionSystem/CentralisedDecision/videoget.py new file mode 100644 index 0000000..f5196c3 --- /dev/null +++ b/car/DecisionSystem/CentralisedDecision/videoget.py @@ -0,0 +1,45 @@ +import numpy as np +import cv2 +from threading import Thread +from queue import Queue +import time + +class VideoGet: + ''' + Code taken from Najam R Syed, available here: + https://github.com/nrsyed/computer-vision/tree/master/multithread + ''' + def __init__(self, q, src): + ''' + Must provide a source so we don't accidently start camera at work. + ''' + self._stream = cv2.VideoCapture(src) + (self.grabbed, self.frame) = self._stream.read() + self.stopped = False + self.q = q + self.q.put(np.copy(self.frame)) + self.src = src + + def start(self): + Thread(target=self.get, args=()).start() + return self + + def get(self): + while not self.stopped: + if not self.grabbed: + # self.stopped = True + print('frame not grabbed') + self._stream.release() + self._stream = cv2.VideoCapture(self.src) + # time.sleep(2) + self.grabbed, self.frame = self._stream.read() + else: + (self.grabbed, self.frame) = self._stream.read() + if self.q.full(): + self.q.get() + self.q.put(np.copy(self.frame)) + time.sleep(0.03) # Approximately 30fps + # Start a new feed. + + def stop(self): + self.stopped = True \ No newline at end of file diff --git a/car/DecisionSystem/DecentralisedActivityFusion/voter.py b/car/DecisionSystem/DecentralisedActivityFusion/voter.py new file mode 100644 index 0000000..c08fdb9 --- /dev/null +++ b/car/DecisionSystem/DecentralisedActivityFusion/voter.py @@ -0,0 +1,128 @@ +import paho.mqtt.client as mqtt +import time +import json +import umsgpack +import numpy as np + +class Voter: + ''' + This class acts to replicate sensor information with the network to come to a consensus + of an activity occurrance. This is based upon research by Song et al. available at: + https://ieeexplore.ieee.org/document/5484586 + + The main advantage of this approach, as apposed to techniques such as by using zookeeper + or consul, is it can be completely decentralised and so works without a central server, + or needing to elect a central server. Additionally, it does not require all nodes + to run a Zookeeper/Consul server instance, which were not designed for these constrained + combat environments, which will fail if half the nodes fail, and also use a lot of resources + for handling services not required by this task. + + The original approach in the paper requires some previous training before sensing, so + that there is a probability of a given action based upon the previous set of actions. + ''' + _votes = {} + _connected_voters = [] + _taking_votes = False + + def __init__(self, on_vote, swarm_name): + ''' + on_vote: Callback to get the required vote to broadcast. + ''' + # Load config file + cfg = None + with open('config.json') as json_config: + cfg = json.load(json_config) + self._cfg = cfg + self.on_vote = on_vote + self._swarm = swarm_name + self._client = mqtt.Client() + self._client.on_message = self.on_message + self._client.on_connect = self.on_connect + self._client.connect(cfg["mqtt"]["host"], cfg["mqtt"]["port"], cfg["mqtt"]["timeout"]) + self._client.loop_start() + + def submit_vote(self): + # Publish to swarm where all other voters will receive a vote. + self._client.publish(self._swarm, self.collect_vote) + self._taking_votes = True + time.sleep(self._cfg["mqtt"]["timeout"]) + self._taking_votes = False + # Wait a certain amount of time for responses, then fuse the information. + self.fuse_algorithm() + + # Need the error and number of timestamps since voting started to finalise the consensus. + + def fuse_algorithm(self): + # First calculate vi -> the actual vote that is taken + # (Or the probability that the observation is a label for each) + # We're just going to be doing 1 for the detected and 0 for all others. + # vi is for each hand (action in paper), but we're just going to do a single + # hand for our purposes. Will be able to use the CNN for all hands/gestures if we want to. + vi = np.zeros(6,1) + # Set correct vi. + vote = self.on_vote() + vi[vote] = 1 + # Now send this off to the other nodes. Potentially using gossip... + + # Set diagonal of ANDvi to elements of vi. + # This should actually be ANDvj, as it is for each observation received. + ANDvi = np.diag(vi.flatten()) + + # Nee + + # M is the probability of going from one state to the next, which + # is assumed to be uniform for our situation - someone is just as likely + # to raise 5 fingers from two or any other. + # And so a 6x6 matrix is generated with all same probability to show this. + # Remember they could be holding up no fingers... + # m = np.full((6,6), 0.2) + + # Y1T = np.full((6,1),1) + + # Compute consensus state estimate by taking difference between our observations + # and all others individually. + + # Moving to an approach that does not require the previous + # timestep (or so much math...) + # First take other information and fuse, using algorithm + # as appropriate. + pass + + def custom_fuse(self): + vi = np.zeros(6,1) + # Set correct vi. + vote = self.on_vote() + vi[vote] = 1 + + + def on_message(self, client, userdata, message): + try: + message_dict = umsgpack.unpackb(message.payload) + except: + print("Incorrect message received") + return + + if message_dict["type"] == "vote": + # received a vote + if self._taking_votes: + self._votes[message_dict["client"]] = message_dict["vote"] + + elif message_dict["type"] == "connect": + # voter connected to the swarm + self._connected_voters.append(message_dict["client"]) + + elif message_dict["type"] == "disconnect": + # Sent as the voter's will message + self._connected_voters.remove(message_dict["client"]) + + def on_connect(self, client, userdata, flags, rc): + print("Connected with result code " + str(rc)) + self._client.subscribe(self._swarm) + + def collect_vote(self): + vote_message = umsgpack.packb({"type": "vote", + "client":self._client._client_id, "vote": self.on_vote()}) + return vote_message + + def start_vote(self): + pass \ No newline at end of file diff --git a/car/DecisionSystem/__init__.py b/car/DecisionSystem/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/DecisionSystem/messages.py b/car/DecisionSystem/messages.py new file mode 100644 index 0000000..5d47435 --- /dev/null +++ b/car/DecisionSystem/messages.py @@ -0,0 +1,101 @@ +import umsgpack +import uuid + +class Message: + _type = None + def __init__(self, sender = "", data = {}): + self._sender = sender + self._data = data + + @property + def sender(self): + return self._sender + + @sender.setter + def sender(self, value): + self._sender = value + + @property + def data(self): + return self._data + + # I love using keywords... + @property + def type(self): + return self._type + + @type.setter + def type(self, value): + self._type = value + + def serialise(self): + return umsgpack.packb({"type":self.type, "sender": self.sender, "data": self.data}) + +# SHould make this static in Message class. +def deserialise(obj): + """ + Deserialises a given messagepack object into a Message. + """ + m = Message() + unpacked = umsgpack.unpackb(obj) + print('Unpacked Object') + print(unpacked) + m.type = unpacked["type"] + m._sender = unpacked["sender"] + m._data = unpacked["data"] + return m + +class RequestLeader(Message): + _type = "RequestLeader" + +class ProposeMessage(Message): + _type = "Propose" + +class ElectionVote(Message): + _type = "Elect" + +class Commit(Message): + _type = "Commit" + +class ConnectSwarm(Message): + _type = "connect" + +class RequestVote(Message): + _type = "reqvote" + +class ConnectResponse(Message): + _type = "accepted" + +class VoterWill(Message): + _type = "disconnectedvoter" + +class CommanderWill(Message): + _type = "disconnectedcommander" + +class SubmitVote(Message): + _type = "vote" + + def __init__(self, vote = None, sender = "", data = {}): + Message.__init__(self, sender, data) + self._data["vote"] = vote + + @property + def vote(self): + return self._data["vote"] + + @vote.setter + def vote(self, value): + self._data["vote"] = value + +class GetSwarmParticipants(Message): + _type = "listening" + +class VoteResult(Message): + _type = "voteresult" + + def __init__(self, vote, sender='', data={}): + super().__init__(sender=sender, data=data) + self._data["vote"] = vote + +class ClientVoteRequest(Message): + _type = "clientvoterequest" \ No newline at end of file diff --git a/car/Dockerfile b/car/Dockerfile new file mode 100644 index 0000000..3de27be --- /dev/null +++ b/car/Dockerfile @@ -0,0 +1,30 @@ +FROM python:3.6-slim + +RUN apt-get update +# OpenCV has a LOT of dependencies. +RUN apt-get install -y \ + libglib2.0-0 \ + libsm6 \ + libxext6 \ + libswscale-dev \ + libtbb2 \ + libtbb-dev \ + libjpeg-dev \ + libpng-dev \ + libtiff-dev \ + libavformat-dev \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* + +COPY requirements.txt / +RUN pip install --trusted-host pypi.python.org -r requirements.txt + +WORKDIR /app + +COPY . /app + +# We aren't listening, just connecting, so probs won't need this. +# EXPOSE 1883 +ENV PYTHONPATH=/app + +CMD ["python", "DecisionSystem/CentralisedDecision/cameraserver.py", "-V", "/app/HandRecognitionMacbookFixed.mp4"] \ No newline at end of file diff --git a/car/GestureRecognition/HandRecHSV.py b/car/GestureRecognition/HandRecHSV.py new file mode 100644 index 0000000..2f0222a --- /dev/null +++ b/car/GestureRecognition/HandRecHSV.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Nov 22 10:51:21 2018 + +@author: pivatom +""" + +import numpy as np +import cv2 + +img = cv2.imread('H:\car\GestureRecognition\IMG_0825.jpg', 1) +# img = cv2.imread('H:\car\GestureRecognition\IMG_0818.png', 1) + +# Downscale the image +img = cv2.resize(img, None, fx=0.1, fy=0.1, interpolation = cv2.INTER_AREA) + +e1 = cv2.getTickCount() + +# Hand Localization... possibly with YOLOv3? v2 is faster though... + + +img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) + +# Need to shift red pixels so they can be 0-20 rather than 250-~20 +img_hsv[:,:,0] = img_hsv[:,:,0] + 30 +img_hsv[:,:,0] = np.where(img_hsv[:,:,0] > 179, img_hsv[:,:,0] - 179, img_hsv[:,:,0]) + +img_hsv = cv2.GaussianBlur(img_hsv,(5,5),0) + +lower_skin = (0, 0, 153) +upper_skin = (45, 153, 255) + +# Only need mask, as we can just use this to do the hand segmentation. +mask = cv2.inRange(img_hsv, lower_skin, upper_skin) + +# This takes a whole millisecond (approx), and does not seem very worth the cost. +blur = cv2.GaussianBlur(mask,(5,5),0) +ret, img_thresh = cv2.threshold(blur, 50, 255, cv2.THRESH_BINARY) + +# Uncomment if not using blur and threshold. +# img_thresh = mask + +k = np.sum(img_thresh) / 255 + +# Taking indices for num of rows. +x_ind = np.arange(0,img_thresh.shape[1]) +y_ind = np.arange(0,img_thresh.shape[0]) +coords_x = np.zeros((img_thresh.shape[0], img_thresh.shape[1])) +coords_y = np.zeros((img_thresh.shape[0], img_thresh.shape[1])) +coords_x[:,:] = x_ind + + +# Even this is extremely quick as it goes through rows in the numpy array, which in python is much faster than columns +for element in y_ind: + coords_y[element,:] = element + +# Now need to get the average x value and y value for centre of gravity +xb = int(np.sum(coords_x[img_thresh == 255])/k) +yb = int(np.sum(coords_y[img_thresh == 255])/k) + +centre = (int(np.sum(coords_x[img_thresh == 255])/k), int(np.sum(coords_y[img_thresh == 255])/k)) + +# Calculate radius of circle: +# May need to calculate diameter as well. +# Just take min/max x values and y values +x_min = np.min(coords_x[img_thresh == 255]) +x_max = np.max(coords_x[img_thresh == 255]) +y_min = np.min(coords_y[img_thresh == 255]) +y_max = np.max(coords_y[img_thresh == 255]) + +candidate_pts = [(x_min, y_min), (x_min, y_max), (x_max, y_min), (x_max, y_max)] +radius = 0 + +# Check with each point to see which is furthest from the centre. +for pt in candidate_pts: + # Calculate Euclydian Distance + new_distance = ((pt[0] - centre[0])**2 + (pt[1] - centre[1])**2)**(1/2) + if new_distance > radius: + radius = new_distance + +radius = int(radius * 0.52) + +# 140 needs to be replaced with a predicted value. i.e. not be a magic number. +# cv2.circle(img_thresh, centre, radius, (120,0,0), 3) + +def calc_pos_y(x): + return int((radius**2 - (x - centre[0])**2)**(1/2) + centre[1]) + +# Now go around the circle to calculate num of times going 0->255 or vice-versa. +# First just do it the naive way with loops. +# Equation of the circle: +# y = sqrt(r2 - (x-c)2) + c +# Will just increment x to check, no need to loop y as well. +# This is extremely slow, need to speed it up by removing for loop. +# Brings speed down to 20 fps. +# This is actually fast, it was just the print debug statements that made it slow, takes just 6ms... +# Could try a kerel method? +prev_x = centre[0] - radius +prev_y = [calc_pos_y(centre[0] - radius), calc_pos_y(centre[0] - radius)] +num_change = 0 +for x in range(centre[0] - radius + 1, centre[0] + radius): + ypos = calc_pos_y(x) + y = [ypos, centre[1] - (ypos-centre[1])] + if(img_thresh[y[0], x] != img_thresh[prev_y[0], prev_x]): + num_change += 1 + if img_thresh[y[1], x] != img_thresh[prev_y[1], prev_x] and y[0] != y[1]: + num_change += 1 + prev_x = x + prev_y = y + +fingers = num_change / 2 - 1 + +print("Num Fingers: " + str(fingers)) + +e2 = cv2.getTickCount() +t = (e2 - e1)/cv2.getTickFrequency() +print( t ) + +cv2.imshow("Threshold", img_thresh) +cv2.waitKey(0) +cv2.destroyAllWindows() \ No newline at end of file diff --git a/car/GestureRecognition/HandRecV2.py b/car/GestureRecognition/HandRecV2.py new file mode 100644 index 0000000..c2c7c53 --- /dev/null +++ b/car/GestureRecognition/HandRecV2.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Nov 22 09:21:04 2018 + +@author: pivatom +""" + +import numpy as np +import cv2 + +min_seg_threshold = 1.05 +max_seg_threshold = 4 + +def calcSkinSample(event, x, y, flags, param): + if event == cv2.EVENT_FLAG_LBUTTON: + sample = img[x:x+10, y:y+10] + min = 255 + max = 0 + for line in sample: + avg = np.sum(line)/10 + if avg < min: + min = avg + if avg > max: + max = avg + min_seg_threshold = min + max_seg_threshold = max + +def draw_rect(event, x, y, flags, param): + if event == cv2.EVENT_FLAG_LBUTTON: + print("LbuttonClick") + cv2.rectangle(img, (x,y), (x+10, y+10), (0,0,255), 3) + +img = cv2.imread('H:\car\GestureRecognition\IMG_0818.png', 1) + +# Downscale the image +img = cv2.resize(img, None, fx=0.1, fy=0.1, interpolation = cv2.INTER_AREA) + +cv2.namedWindow("Hand") +cv2.setMouseCallback("Hand", draw_rect) + +# prevent divide by zero, by just forcing pixel to be ignored. +#np.where(img[:,:,1] == 0, 0, img[:,:,1]) +#img[(img[:,:,2]/img[:,:,1] > min_seg_threshold) & (img[:,:,2]/img[:,:,1] < max_seg_threshold)] = [255,255,255] + +while(1): + cv2.imshow("Hand", img) + if cv2.waitKey(0): + break +cv2.destroyAllWindows() diff --git a/car/GestureRecognition/IMG_0818.png b/car/GestureRecognition/IMG_0818.png new file mode 100644 index 0000000..b4f9d25 Binary files /dev/null and b/car/GestureRecognition/IMG_0818.png differ diff --git a/car/GestureRecognition/IMG_0825.jpg b/car/GestureRecognition/IMG_0825.jpg new file mode 100644 index 0000000..dde1722 Binary files /dev/null and b/car/GestureRecognition/IMG_0825.jpg differ diff --git a/car/GestureRecognition/Neural Network hand Tracking.pdf b/car/GestureRecognition/Neural Network hand Tracking.pdf new file mode 100644 index 0000000..d4da4ed Binary files /dev/null and b/car/GestureRecognition/Neural Network hand Tracking.pdf differ diff --git a/car/GestureRecognition/SimpleHandRecogniser.py b/car/GestureRecognition/SimpleHandRecogniser.py new file mode 100644 index 0000000..ff78ce8 --- /dev/null +++ b/car/GestureRecognition/SimpleHandRecogniser.py @@ -0,0 +1,381 @@ +import numpy as np +import cv2 + +from GestureRecognition.handrecogniser import HandRecogniser + +class SimpleHandRecogniser(HandRecogniser): + def __init__(self, frame): + self.img = frame + self.graph = None + self.sess = None + self.img_cut = None + + def __calc_pos_y(self, x, radius, centre): + """ + Calculates the position of y on a given circle radius and centre, given coordinate x. + """ + return int((radius**2 - (x - centre[0])**2)**(1/2) + centre[1]) + + def __segment_image(self): + """ + Segments the hand from the rest of the image to get a threshold. + """ + self.img_cut = cv2.GaussianBlur(self.img_cut, (5, 5), 0) + + lower_skin = (0, 0, 153) + upper_skin = (45, 153, 255) + + # Only need mask, as we can just use this to do the hand segmentation. + self.img_cut = cv2.inRange(self.img_cut, lower_skin, upper_skin) + + # Apply another blur to rmeove any small holes/noise + self.img_cut = self.__denoise(self.img_cut) + _, self.img_cut = cv2.threshold(self.img_cut, 50, 255, cv2.THRESH_BINARY) + + def __denoise(self, image): + """ + Applies a 5x5 gaussian blur to remove noise from the image. + """ + return cv2.GaussianBlur(image, (5, 5), 0) + + def __calc_circle(self, image, radius_percent=0.6): + """ + Calculates the equation of the circle (radius, centre) from a given + threshold image, so that the circle is the center of gravity of the + given threshold pixels, and the radius is by default 55% of the total + size. + """ + k = np.sum(self.img_cut) / 255 + + # Taking indices for num of rows. + x_ind = np.arange(0, self.img_cut.shape[1]) + y_ind = np.arange(0, self.img_cut.shape[0]) + coords_x = np.zeros((self.img_cut.shape[0], self.img_cut.shape[1])) + coords_y = np.zeros((self.img_cut.shape[0], self.img_cut.shape[1])) + coords_x[:, :] = x_ind + + # Even this is extremely quick as it goes through rows in the numpy array, + # which in python is much faster than columns + for element in y_ind: + coords_y[element, :] = element + + # Now need to get the average x value and y value for centre of gravity + centre = (int(np.sum(coords_x[self.img_cut == 255])/k), int(np.sum(coords_y[self.img_cut == 255])/k)) + + # Calculate radius of circle: + # May need to calculate diameter as well. + # Just take min/max x values and y values + x_min = np.min(coords_x[self.img_cut == 255]) + x_max = np.max(coords_x[self.img_cut == 255]) + y_min = np.min(coords_y[self.img_cut == 255]) + y_max = np.max(coords_y[self.img_cut == 255]) + + candidate_pts = [(x_min, y_min), (x_min, y_max), (x_max, y_min), (x_max, y_max)] + radius = 0 + + # Check with each point to see which is furthest from the centre. + for pt in candidate_pts: + # Calculate Euclydian Distance + new_distance = ((pt[0] - centre[0])**2 + (pt[1] - centre[1])**2)**(1/2) + if new_distance > radius: + radius = new_distance + + radius = int(radius * radius_percent) + + return radius, centre + + def __calc_circles(self, image, radius_percent_range=[0.6, 0.8], step = 0.1): + """ + Calculates the equation of the circle (radius, centre), but with + several radii so that we can get a more accurate estimate of from a given + threshold image, so that the circle is the center of gravity of the + given threshold pixels. + """ + k = np.sum(self.img_cut) / 255 + + # Taking indices for num of rows. + x_ind = np.arange(0,self.img_cut.shape[1]) + y_ind = np.arange(0,self.img_cut.shape[0]) + coords_x = np.zeros((self.img_cut.shape[0], self.img_cut.shape[1])) + coords_y = np.zeros((self.img_cut.shape[0], self.img_cut.shape[1])) + coords_x[:,:] = x_ind + + # Even this is extremely quick as it goes through rows in the numpy array, which in python is much faster than columns + for element in y_ind: + coords_y[element,:] = element + + # Now need to get the average x value and y value for centre of gravity + centre = (int(np.sum(coords_x[self.img_cut == 255])/k), int(np.sum(coords_y[self.img_cut == 255])/k)) + + # Calculate radius of circle: + # May need to calculate diameter as well. + # Just take min/max x values and y values + x_min = np.min(coords_x[self.img_cut == 255]) + x_max = np.max(coords_x[self.img_cut == 255]) + y_min = np.min(coords_y[self.img_cut == 255]) + y_max = np.max(coords_y[self.img_cut == 255]) + + candidate_pts = [(x_min, y_min), (x_min, y_max), (x_max, y_min), (x_max, y_max)] + radius = 0 + + # Check with each point to see which is furthest from the centre. + for pt in candidate_pts: + # Calculate Euclydian Distance + new_distance = ((pt[0] - centre[0])**2 + (pt[1] - centre[1])**2)**(1/2) + if new_distance > radius: + radius = new_distance + + radii = [] + for i in range(radius_percent_range[0], radius_percent_range[1], step): + radii += int(radius * i) + + return radii, centre + + def __shift_pixels(self, image, shift_radius): + image[:, :, 0] = image[:, :, 0] + shift_radius + image[:, :, 0] = np.where(image[:, :, 0] > 179, image[:, :, 0] - 179, image[:, :, 0]) + return image + + def set_frame(self, frame): + self.img = frame + + # Source: Victor Dibia + # Link: https://github.com/victordibia/handtracking + # Taken the code straight from his example, as it works perfectly. This is specifically + # from the load_inference_graph method that he wrote, and will load the graph into + # memory if one has not already been loaded for this object. + # def load_inference_graph(self): + # """Loads a tensorflow model checkpoint into memory""" + + # if self.graph != None and self.sess != None: + # # Don't load more than once, to save time... + # return + + # PATH_TO_CKPT = '/Users/piv/Documents/Projects/car/GestureRecognition/frozen_inference_graph.pb' + # # load frozen tensorflow model into memory + # detection_graph = tf.Graph() + # with detection_graph.as_default(): + # od_graph_def = tf.GraphDef() + # with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: + # serialized_graph = fid.read() + # od_graph_def.ParseFromString(serialized_graph) + # tf.import_graph_def(od_graph_def, name='') + # sess = tf.Session(graph=detection_graph) + # self.graph = detection_graph + # self.sess = sess + + + # Source: Victor Dibia + # Link: https://github.com/victordibia/handtracking + # Taken the code straight from his example, as it works perfectly. This is specifically + # from the detect_hand method that he wrote, as other processing is required for the + # hand recognition to work correctly. + # def detect_hand_tensorflow(self, detection_graph, sess): + # """ Detects hands in a frame using a CNN + + # detection_graph -- The CNN to use to detect the hand. + # sess -- THe tensorflow session for the given graph + # """ + + # image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') + + # detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') + + # detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') + + # detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') + + # num_detections = detection_graph.get_tensor_by_name('num_detections:0') + + # img_expanded = np.expand_dims(self.img, axis=0) + + # (boxes, scores, classes, num) = sess.run( + # [detection_boxes, detection_scores, detection_classes, num_detections], + # feed_dict={image_tensor: img_expanded}) + # print('finished detection') + # return np.squeeze(boxes), np.squeeze(scores) + + def load_cv_net(self, graph_path, names_path): + """Loads a tensorflow neural object detection network using openCV + + Arguments + graph_path: Path to the tensorflow frozen inference graph (something.pb) + names_path: Path to the tensorflow (something.pbtext) file. + """ + self.net = cv2.dnn.readNetFromTensorflow(graph_path, names_path) + + def detect_hand_opencv(self): + """Performs hand detection using a CNN from tensorflow using opencv. + + detection_graph -- The CNN to use to detect the hand. + sess -- THe tensorflow session for the given graph + """ + if self.img is None: + return + + rows = self.img.shape[0] + cols = self.img.shape[1] + + self.net.setInput(cv2.dnn.blobFromImage(self.img, size=(300, 300), swapRB=True, crop=False)) + cv_out = self.net.forward() + + boxes = [] + scores = [] + + for detection in cv_out[0, 0, :, :]: + score = float(detection[2]) + # TODO: Need to make this the confidence threshold... + if score > 0.6: + left = detection[3] * cols + top = detection[4] * rows + right = detection[5] * cols + bottom = detection[6] * rows + boxes.append((left, top, right, bottom)) + scores.append(score) + else: + # Scores are in descending order... + break + + return boxes, scores + + def get_best_hand(self, boxes, scores, conf_thresh, nms_thresh): + """ + Gets the best hand bounding box by inspecting confidence scores and overlapping + boxes, as well as the overall size of each box to determine which hand (if multiple present) + should be tested to recognise. + """ + print(scores) + boxes = boxes[scores > conf_thresh] + scores = scores[scores > conf_thresh] + # Use NMS to get rid of heavily overlapping boxes. + # This wasn't used in the tensorflow example that was found, however probably a + # good idea to use it just in case. + print(boxes.shape) + if boxes.shape[0] == 0: + print("No good boxes found") + return None + elif boxes.shape[0] == 1: + print("Only one good box!") + box = boxes[0] + box[0] = box[0] * self.img.shape[0] + box[1] = box[1] * self.img.shape[1] + box[2] = box[2] * self.img.shape[0] + box[3] = box[3] * self.img.shape[1] + return box.astype(int) + else: + boxes[:][2] = ((boxes[:][2] - boxes[:][0]) * self.img.shape[0]).astype(int) + boxes[:][3] = ((boxes[:][3] - boxes[:][1]) * self.img.shape[1]).astype(int) + boxes[:][0] = (boxes[:][0] * self.img.shape[0]).astype(int) + boxes[:][1] = (boxes[:][1] * self.img.shape[1]).astype(int) + + # Can't seem to get this to work... + # indices = cv2.dnn.NMSBoxes(boxes, scores, conf_thresh, nms_thresh) + + print("Num boxes: %s" % boxes.shape[0]) + # Finally calculate area of each box to determine which hand is clearest (biggest in image) + # Just does the most confident for now. + best_box = boxes[0] + best_index = None + i = 0 + for box in boxes: + if box[2] * box[3] > best_box[2] * best_box[3]: + best_box = box + best_index = i + i += 1 + return boxes[i - 1] + + def get_gesture(self): + """ + Calculates the actual gesture, returning the number of fingers + seen in the image. + """ + print('Getting Gesture') + if self.img is None: + print('There is no image') + return -1 + # First cut out the frame using the neural network. + # self.load_inference_graph() + # print("loaded inference graph") + # detections, scores = self.detect_hand_tensorflow(self.graph, self.sess) + + print('Loading openCV net') + self.load_cv_net('/Users/piv/Documents/Projects/car/GestureRecognition/frozen_inference_graph.pb', + '/Users/piv/Documents/Projects/car/GestureRecognition/graph.pbtxt') + + detections, scores = self.detect_hand_opencv() + + # print("Getting best hand") + # best_hand = self.get_best_hand(detections, scores, 0.7, 0.5) + # if best_hand is not None: + # self.img = self.img[best_hand[0] - 30:best_hand[2] + 30, best_hand[1] - 30:best_hand[3] + 30] + + if len(detections) > 0: + print("Cutting out the hand!") + self.img_cut = self.img[detections[0] - 30:detections[2] + 30, detections[1] - 30:detections[3] + 30] + else: + self.img_cut = self.img + + print('Attempting to use pure hand recognition') + self.img_cut = cv2.cvtColor(self.img_cut, cv2.COLOR_BGR2HSV) + + # Need to shift red pixels so they can be 0-20 rather than 250-~20 + self.img_cut = self.__shift_pixels(self.img_cut, 30) + + self.img_cut = self.__denoise(self.img_cut) + self.__segment_image() + + print('calculating circle') + # Could calculate multiple circles to get probability + # for each gesture (i.e. calc num of each gesture recongised and take percentage + # as the probability). + radius, centre = self.__calc_circle(self.img_cut) + print('Got circle') + + # Now go around the circle to calculate num of times going 0->255 or vice-versa. + # First just do it the naive way with loops. + # Equation of the circle: + # y = sqrt(r2 - (x-c)2) + c + prev_x = centre[0] - radius + prev_y = [self.__calc_pos_y(centre[0] - radius, radius, centre), + self.__calc_pos_y(centre[0] - radius, radius, centre)] + num_change = 0 + + # Make sure x is also within bounds. + x_start = centre[0] - radius + 1 + if x_start < 0: + x_start = 0 + + x_end = centre[0] + radius + if x_end >= self.img_cut.shape[1]: + x_end = self.img_cut.shape[1] - 1 + + for x in range(x_start, x_end): + # Need to check circle is inside the bounds. + ypos = self.__calc_pos_y(x, radius, centre) + # y above centre (ypos) and y below radius) + y = [ypos, centre[1] - (ypos-centre[1])] + + if y[0] < 0: + y[0] = 0 + if y[0] >= self.img_cut.shape[0]: + y[0] = self.img_cut.shape[0] - 1 + if y[1] < 0: + y[1] = 0 + if y[1] >= self.img_cut.shape[0]: + y[1] = self.img_cut.shape[0] - 1 + if(self.img_cut[y[0], x] != self.img_cut[prev_y[0], prev_x]): + num_change += 1 + if self.img_cut[y[1], x] != self.img_cut[prev_y[1], prev_x] and y[0] != y[1]: + num_change += 1 + prev_x = x + prev_y = y + + print('Finished calculating, returning') + print(num_change) + return int(num_change / 2 - 1), self.img + + def get_gesture_multiple_radii(self): + pass + + def calc_hand_batch(self, batch): + pass diff --git a/car/GestureRecognition/__init__.py b/car/GestureRecognition/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/GestureRecognition/frozen_inference_graph.pb b/car/GestureRecognition/frozen_inference_graph.pb new file mode 100644 index 0000000..0be3459 Binary files /dev/null and b/car/GestureRecognition/frozen_inference_graph.pb differ diff --git a/car/GestureRecognition/graph.pbtxt b/car/GestureRecognition/graph.pbtxt new file mode 100644 index 0000000..8c3a660 --- /dev/null +++ b/car/GestureRecognition/graph.pbtxt @@ -0,0 +1,3146 @@ +node { + name: "image_tensor" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_UINT8 + } + } + attr { + key: "shape" + value { + shape { + dim { + size: -1 + } + dim { + size: -1 + } + dim { + size: -1 + } + dim { + size: 3 + } + } + } + } +} +node { + name: "Preprocessor/mul" + op: "Mul" + input: "image_tensor" + input: "Preprocessor/mul/x" +} +node { + name: "Preprocessor/sub" + op: "Sub" + input: "Preprocessor/mul" + input: "Preprocessor/sub/y" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_0/convolution" + op: "Conv2D" + input: "Preprocessor/sub" + input: "FeatureExtractor/MobilenetV1/Conv2d_0/weights" + input: "^FeatureExtractor/Assert/Assert" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 2 + i: 2 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_0/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_0/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_0/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_0/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_0/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_0/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_0/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_0/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_0/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_1_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_1_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 2 + i: 2 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_2_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_2_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_3_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_3_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 2 + i: 2 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_4_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_4_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_5_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_5_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 2 + i: 2 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_6_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_6_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_7_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_7_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_8_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_8_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_9_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_9_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_10_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_10_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_11_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "BoxPredictor_0/ClassPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_pointwise/Relu6" + input: "BoxPredictor_0/ClassPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_0/ClassPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_0/ClassPredictor/convolution" + input: "BoxPredictor_0/ClassPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "BoxPredictor_0/BoxEncodingPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_pointwise/Relu6" + input: "BoxPredictor_0/BoxEncodingPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "loc_pred_transposed" + value { + b: true + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_0/BoxEncodingPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_0/BoxEncodingPredictor/convolution" + input: "BoxPredictor_0/BoxEncodingPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_11_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 2 + i: 2 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_12_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_depthwise/depthwise" + op: "DepthwiseConv2dNative" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_12_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_depthwise/depthwise_weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_depthwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_depthwise/depthwise" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_depthwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_depthwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_depthwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_depthwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_depthwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_depthwise/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_pointwise/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_depthwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_pointwise/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_pointwise/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_pointwise/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_pointwise/BatchNorm/FusedBatchNorm" +} +node { + name: "BoxPredictor_1/ClassPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_pointwise/Relu6" + input: "BoxPredictor_1/ClassPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_1/ClassPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_1/ClassPredictor/convolution" + input: "BoxPredictor_1/ClassPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "BoxPredictor_1/BoxEncodingPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_pointwise/Relu6" + input: "BoxPredictor_1/BoxEncodingPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "loc_pred_transposed" + value { + b: true + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_1/BoxEncodingPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_1/BoxEncodingPredictor/convolution" + input: "BoxPredictor_1/BoxEncodingPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/MobilenetV1/Conv2d_13_pointwise/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/concat" + op: "ConcatV2" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_2_1x1_256/Relu6" + input: "FeatureExtractor/MobilenetV1/concat/axis" +} +node { + name: "FeatureExtractor/MobilenetV1/concat_1" + op: "ConcatV2" + input: "FeatureExtractor/MobilenetV1/concat" + input: "FeatureExtractor/MobilenetV1/concat_1/axis" +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/concat_1" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 2 + i: 2 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/BatchNorm/FusedBatchNorm" +} +node { + name: "BoxPredictor_2/ClassPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/Relu6" + input: "BoxPredictor_2/ClassPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_2/ClassPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_2/ClassPredictor/convolution" + input: "BoxPredictor_2/ClassPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "BoxPredictor_2/BoxEncodingPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/Relu6" + input: "BoxPredictor_2/BoxEncodingPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "loc_pred_transposed" + value { + b: true + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_2/BoxEncodingPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_2/BoxEncodingPredictor/convolution" + input: "BoxPredictor_2/BoxEncodingPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/concat_2" + op: "ConcatV2" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_3_1x1_128/Relu6" + input: "FeatureExtractor/MobilenetV1/concat_2/axis" +} +node { + name: "FeatureExtractor/MobilenetV1/concat_3" + op: "ConcatV2" + input: "FeatureExtractor/MobilenetV1/concat_2" + input: "FeatureExtractor/MobilenetV1/concat_3/axis" +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/concat_3" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 2 + i: 2 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/BatchNorm/FusedBatchNorm" +} +node { + name: "BoxPredictor_3/ClassPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/Relu6" + input: "BoxPredictor_3/ClassPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_3/ClassPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_3/ClassPredictor/convolution" + input: "BoxPredictor_3/ClassPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "BoxPredictor_3/BoxEncodingPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/Relu6" + input: "BoxPredictor_3/BoxEncodingPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "loc_pred_transposed" + value { + b: true + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_3/BoxEncodingPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_3/BoxEncodingPredictor/convolution" + input: "BoxPredictor_3/BoxEncodingPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/concat_4" + op: "ConcatV2" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_4_1x1_128/Relu6" + input: "FeatureExtractor/MobilenetV1/concat_4/axis" +} +node { + name: "FeatureExtractor/MobilenetV1/concat_5" + op: "ConcatV2" + input: "FeatureExtractor/MobilenetV1/concat_4" + input: "FeatureExtractor/MobilenetV1/concat_5/axis" +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/concat_5" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 2 + i: 2 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/BatchNorm/FusedBatchNorm" +} +node { + name: "BoxPredictor_4/ClassPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/Relu6" + input: "BoxPredictor_4/ClassPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_4/ClassPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_4/ClassPredictor/convolution" + input: "BoxPredictor_4/ClassPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "BoxPredictor_4/BoxEncodingPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/Relu6" + input: "BoxPredictor_4/BoxEncodingPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "loc_pred_transposed" + value { + b: true + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_4/BoxEncodingPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_4/BoxEncodingPredictor/convolution" + input: "BoxPredictor_4/BoxEncodingPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_4_3x3_s2_256/Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/BatchNorm/FusedBatchNorm" +} +node { + name: "FeatureExtractor/MobilenetV1/concat_6" + op: "ConcatV2" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_1_Conv2d_5_1x1_64/Relu6" + input: "FeatureExtractor/MobilenetV1/concat_6/axis" +} +node { + name: "FeatureExtractor/MobilenetV1/concat_7" + op: "ConcatV2" + input: "FeatureExtractor/MobilenetV1/concat_6" + input: "FeatureExtractor/MobilenetV1/concat_7/axis" +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/concat_7" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 2 + i: 2 + i: 1 + } + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/BatchNorm/FusedBatchNorm" + op: "FusedBatchNorm" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/convolution" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/BatchNorm/gamma" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/BatchNorm/beta" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/BatchNorm/moving_mean" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/BatchNorm/moving_variance" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "epsilon" + value { + f: 0.001 + } + } +} +node { + name: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/Relu6" + op: "Relu6" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/BatchNorm/FusedBatchNorm" +} +node { + name: "BoxPredictor_5/ClassPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/Relu6" + input: "BoxPredictor_5/ClassPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_5/ClassPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_5/ClassPredictor/convolution" + input: "BoxPredictor_5/ClassPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "BoxPredictor_5/BoxEncodingPredictor/convolution" + op: "Conv2D" + input: "FeatureExtractor/MobilenetV1/Conv2d_13_pointwise_2_Conv2d_5_3x3_s2_128/Relu6" + input: "BoxPredictor_5/BoxEncodingPredictor/weights" + attr { + key: "data_format" + value { + s: "NHWC" + } + } + attr { + key: "loc_pred_transposed" + value { + b: true + } + } + attr { + key: "padding" + value { + s: "SAME" + } + } + attr { + key: "strides" + value { + list { + i: 1 + i: 1 + i: 1 + i: 1 + } + } + } +} +node { + name: "BoxPredictor_5/BoxEncodingPredictor/BiasAdd" + op: "BiasAdd" + input: "BoxPredictor_5/BoxEncodingPredictor/convolution" + input: "BoxPredictor_5/BoxEncodingPredictor/biases" + attr { + key: "data_format" + value { + s: "NHWC" + } + } +} +node { + name: "concat/axis_flatten" + op: "Const" + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + int_val: -1 + tensor_shape { + dim { + size: 1 + } + } + } + } + } +} +node { + name: "BoxPredictor_0/ClassPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_0/ClassPredictor/BiasAdd" +} +node { + name: "BoxPredictor_1/ClassPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_1/ClassPredictor/BiasAdd" +} +node { + name: "BoxPredictor_2/ClassPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_2/ClassPredictor/BiasAdd" +} +node { + name: "BoxPredictor_3/ClassPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_3/ClassPredictor/BiasAdd" +} +node { + name: "BoxPredictor_4/ClassPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_4/ClassPredictor/BiasAdd" +} +node { + name: "BoxPredictor_5/ClassPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_5/ClassPredictor/BiasAdd" +} +node { + name: "ClassPredictor/concat" + op: "ConcatV2" + input: "BoxPredictor_0/ClassPredictor/BiasAdd/Flatten" + input: "BoxPredictor_1/ClassPredictor/BiasAdd/Flatten" + input: "BoxPredictor_2/ClassPredictor/BiasAdd/Flatten" + input: "BoxPredictor_3/ClassPredictor/BiasAdd/Flatten" + input: "BoxPredictor_4/ClassPredictor/BiasAdd/Flatten" + input: "BoxPredictor_5/ClassPredictor/BiasAdd/Flatten" + input: "concat/axis_flatten" +} +node { + name: "BoxPredictor_0/BoxEncodingPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_0/BoxEncodingPredictor/BiasAdd" +} +node { + name: "BoxPredictor_1/BoxEncodingPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_1/BoxEncodingPredictor/BiasAdd" +} +node { + name: "BoxPredictor_2/BoxEncodingPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_2/BoxEncodingPredictor/BiasAdd" +} +node { + name: "BoxPredictor_3/BoxEncodingPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_3/BoxEncodingPredictor/BiasAdd" +} +node { + name: "BoxPredictor_4/BoxEncodingPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_4/BoxEncodingPredictor/BiasAdd" +} +node { + name: "BoxPredictor_5/BoxEncodingPredictor/BiasAdd/Flatten" + op: "Flatten" + input: "BoxPredictor_5/BoxEncodingPredictor/BiasAdd" +} +node { + name: "BoxEncodingPredictor/concat" + op: "ConcatV2" + input: "BoxPredictor_0/BoxEncodingPredictor/BiasAdd/Flatten" + input: "BoxPredictor_1/BoxEncodingPredictor/BiasAdd/Flatten" + input: "BoxPredictor_2/BoxEncodingPredictor/BiasAdd/Flatten" + input: "BoxPredictor_3/BoxEncodingPredictor/BiasAdd/Flatten" + input: "BoxPredictor_4/BoxEncodingPredictor/BiasAdd/Flatten" + input: "BoxPredictor_5/BoxEncodingPredictor/BiasAdd/Flatten" + input: "concat/axis_flatten" +} +node { + name: "PriorBox_0" + op: "PriorBox" + input: "BoxPredictor_0/BoxEncodingPredictor/BiasAdd" + input: "image_tensor" + attr { + key: "clip" + value { + b: false + } + } + attr { + key: "flip" + value { + b: false + } + } + attr { + key: "height" + value { + tensor { + dtype: DT_FLOAT + float_val: 30.0 + float_val: 42.42640750334631 + float_val: 84.85281500669262 + tensor_shape { + dim { + size: 3 + } + } + } + } + } + attr { + key: "variance" + value { + tensor { + dtype: DT_FLOAT + float_val: 0.1 + float_val: 0.1 + float_val: 0.2 + float_val: 0.2 + tensor_shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "width" + value { + tensor { + dtype: DT_FLOAT + float_val: 30.0 + float_val: 84.85281500669265 + float_val: 42.426407503346326 + tensor_shape { + dim { + size: 3 + } + } + } + } + } +} +node { + name: "PriorBox_1" + op: "PriorBox" + input: "BoxPredictor_1/BoxEncodingPredictor/BiasAdd" + input: "image_tensor" + attr { + key: "clip" + value { + b: false + } + } + attr { + key: "flip" + value { + b: false + } + } + attr { + key: "height" + value { + tensor { + dtype: DT_FLOAT + float_val: 104.99999999994 + float_val: 74.24621202454506 + float_val: 148.49242404909012 + float_val: 60.62177826487607 + float_val: 181.87443025249192 + float_val: 125.49900360603824 + tensor_shape { + dim { + size: 6 + } + } + } + } + } + attr { + key: "variance" + value { + tensor { + dtype: DT_FLOAT + float_val: 0.1 + float_val: 0.1 + float_val: 0.2 + float_val: 0.2 + tensor_shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "width" + value { + tensor { + dtype: DT_FLOAT + float_val: 104.99999999994 + float_val: 148.49242404909015 + float_val: 74.24621202454507 + float_val: 181.8653347946282 + float_val: 60.61874659720808 + float_val: 125.49900360603824 + tensor_shape { + dim { + size: 6 + } + } + } + } + } +} +node { + name: "PriorBox_2" + op: "PriorBox" + input: "BoxPredictor_2/BoxEncodingPredictor/BiasAdd" + input: "image_tensor" + attr { + key: "clip" + value { + b: false + } + } + attr { + key: "flip" + value { + b: false + } + } + attr { + key: "height" + value { + tensor { + dtype: DT_FLOAT + float_val: 149.99999910588 + float_val: 106.06601654574379 + float_val: 212.13203309148759 + float_val: 86.60253986222344 + float_val: 259.8206130978267 + float_val: 171.02631247097506 + tensor_shape { + dim { + size: 6 + } + } + } + } + } + attr { + key: "variance" + value { + tensor { + dtype: DT_FLOAT + float_val: 0.1 + float_val: 0.1 + float_val: 0.2 + float_val: 0.2 + tensor_shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "width" + value { + tensor { + dtype: DT_FLOAT + float_val: 149.99999910588 + float_val: 212.1320330914876 + float_val: 106.0660165457438 + float_val: 259.8076195866703 + float_val: 86.59820890843783 + float_val: 171.02631247097506 + tensor_shape { + dim { + size: 6 + } + } + } + } + } +} +node { + name: "PriorBox_3" + op: "PriorBox" + input: "BoxPredictor_3/BoxEncodingPredictor/BiasAdd" + input: "image_tensor" + attr { + key: "clip" + value { + b: false + } + } + attr { + key: "flip" + value { + b: false + } + } + attr { + key: "height" + value { + tensor { + dtype: DT_FLOAT + float_val: 194.99999821182 + float_val: 137.88582106694255 + float_val: 275.7716421338851 + float_val: 112.58330145957083 + float_val: 337.7667959431616 + float_val: 216.3330743270663 + tensor_shape { + dim { + size: 6 + } + } + } + } + } + attr { + key: "variance" + value { + tensor { + dtype: DT_FLOAT + float_val: 0.1 + float_val: 0.1 + float_val: 0.2 + float_val: 0.2 + tensor_shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "width" + value { + tensor { + dtype: DT_FLOAT + float_val: 194.99999821182 + float_val: 275.77164213388517 + float_val: 137.88582106694258 + float_val: 337.7499043787124 + float_val: 112.57767121966761 + float_val: 216.3330743270663 + tensor_shape { + dim { + size: 6 + } + } + } + } + } +} +node { + name: "PriorBox_4" + op: "PriorBox" + input: "BoxPredictor_4/BoxEncodingPredictor/BiasAdd" + input: "image_tensor" + attr { + key: "clip" + value { + b: false + } + } + attr { + key: "flip" + value { + b: false + } + } + attr { + key: "height" + value { + tensor { + dtype: DT_FLOAT + float_val: 239.99999731775998 + float_val: 169.7056255881413 + float_val: 339.4112511762826 + float_val: 138.5640630569182 + float_val: 415.71297878849646 + float_val: 261.5339335100698 + tensor_shape { + dim { + size: 6 + } + } + } + } + } + attr { + key: "variance" + value { + tensor { + dtype: DT_FLOAT + float_val: 0.1 + float_val: 0.1 + float_val: 0.2 + float_val: 0.2 + tensor_shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "width" + value { + tensor { + dtype: DT_FLOAT + float_val: 239.99999731775998 + float_val: 339.41125117628263 + float_val: 169.70562558814132 + float_val: 415.69218917075455 + float_val: 138.55713353089737 + float_val: 261.5339335100698 + tensor_shape { + dim { + size: 6 + } + } + } + } + } +} +node { + name: "PriorBox_5" + op: "PriorBox" + input: "BoxPredictor_5/BoxEncodingPredictor/BiasAdd" + input: "image_tensor" + attr { + key: "clip" + value { + b: false + } + } + attr { + key: "flip" + value { + b: false + } + } + attr { + key: "height" + value { + tensor { + dtype: DT_FLOAT + float_val: 284.9999964237 + float_val: 201.52543010934002 + float_val: 403.05086021868004 + float_val: 164.5448246542656 + float_val: 493.6591616338313 + float_val: 292.40382850966574 + tensor_shape { + dim { + size: 6 + } + } + } + } + } + attr { + key: "variance" + value { + tensor { + dtype: DT_FLOAT + float_val: 0.1 + float_val: 0.1 + float_val: 0.2 + float_val: 0.2 + tensor_shape { + dim { + size: 4 + } + } + } + } + } + attr { + key: "width" + value { + tensor { + dtype: DT_FLOAT + float_val: 284.9999964237 + float_val: 403.05086021868016 + float_val: 201.52543010934008 + float_val: 493.6344739627967 + float_val: 164.53659584212716 + float_val: 292.40382850966574 + tensor_shape { + dim { + size: 6 + } + } + } + } + } +} +node { + name: "PriorBox/concat" + op: "ConcatV2" + input: "PriorBox_0" + input: "PriorBox_1" + input: "PriorBox_2" + input: "PriorBox_3" + input: "PriorBox_4" + input: "PriorBox_5" + input: "concat/axis_flatten" +} +node { + name: "ClassPredictor/concat/sigmoid" + op: "Sigmoid" + input: "ClassPredictor/concat" +} +node { + name: "detection_out" + op: "DetectionOutput" + input: "BoxEncodingPredictor/concat" + input: "ClassPredictor/concat/sigmoid" + input: "PriorBox/concat" + attr { + key: "background_label_id" + value { + i: 0 + } + } + attr { + key: "code_type" + value { + s: "CENTER_SIZE" + } + } + attr { + key: "confidence_threshold" + value { + f: 0.01 + } + } + attr { + key: "keep_top_k" + value { + i: 100 + } + } + attr { + key: "nms_threshold" + value { + f: 0.6 + } + } + attr { + key: "num_classes" + value { + i: 2 + } + } + attr { + key: "share_location" + value { + b: true + } + } + attr { + key: "top_k" + value { + i: 100 + } + } +} diff --git a/car/GestureRecognition/handrecogniser.py b/car/GestureRecognition/handrecogniser.py new file mode 100644 index 0000000..40d279e --- /dev/null +++ b/car/GestureRecognition/handrecogniser.py @@ -0,0 +1,15 @@ +class HandRecogniser: + """ + Interface for Recognising simple hand gestures from an image (or frame of a video) + """ + def load_image(self, image_path = ""): + """ + Loads the given image, can be lazy loading. + """ + pass + + def get_gesture(self): + """ + Gets a the gesture recognised in the image. + """ + pass diff --git a/car/GestureRecognition/kaleidoscope.py b/car/GestureRecognition/kaleidoscope.py new file mode 100644 index 0000000..3af817a --- /dev/null +++ b/car/GestureRecognition/kaleidoscope.py @@ -0,0 +1,73 @@ +import numpy as np +import cv2 + +def make_triangle(start_img): + h, w, d = start_img.shape + + #crop square + inset = int((max(w,h) - min(w,h)) / 2) + # sqrimg = start_img.crop(inset, inset, h-inset, w-inset) + insetW = inset if w > h else 0 + insetH = inset if h > w else 0 + sqrimg = start_img[insetH:h-insetH, insetW:w-insetW] + + #solve equilateral triangle + w, h, d = sqrimg.shape + print((w,h)) + + mask = np.zeros((w,h,d)) + + t_height = w/2 * np.tan(60) + pts = np.array([[0,w],[h/2,t_height],[h,w]], np.int32) + pts = pts.reshape((-1,1,2)) + mask = cv2.fillPoly(mask, [pts], (255,0,0)) + + # With mask, get the triangle from the original image. + sqrimg[:,:,0] = np.where(mask[:,:,0] == 255, sqrimg[:,:,0], 0) + sqrimg[:,:,1] = np.where(mask[:,:,0] == 255, sqrimg[:,:,1], 0) + sqrimg[:,:,2] = np.where(mask[:,:,0] == 255, sqrimg[:,:,2], 0) + return sqrimg + +def rotate(im, rotation): + M = cv2.getRotationMatrix2D((im.shape[1]/2,im.shape[0]/2),rotation,1) + im[:,:,0] = cv2.warpAffine(im[:,:,0],M,(im.shape[1],im.shape[0])) + im[:,:,1] = cv2.warpAffine(im[:,:,1],M,(im.shape[1],im.shape[0])) + im[:,:,2] = cv2.warpAffine(im[:,:,2],M,(im.shape[1],im.shape[0])) + return im + +def make_kaleidoscope(img): + triangle = make_triangle(img) + +def make_trapezoid(triangle, save=False): + + w, h = triangle.size + can_w, can_h = w*3, h + output = np.array((can_w, can_h, 3)) + output = Image.new('RGBA', (can_w, can_h), color=255) + + def mirror_paste(last_img, coords): + mirror = rotate(cv2.flip(last_img, 1), 60) + output.paste(mirror, (coords), mirror) + return mirror, coords + + #paste in bottom left corner + output.paste(triangle,(0, can_h-h), triangle) + + last_img, coords = mirror_paste(triangle, (int(w/4.4), -int(h/2.125))) + last_img, coords = mirror_paste(rotateIm(last_img, 120), (int(can_w/7.3), -228)) + + output = output.crop((0,15, w*2-22, h)) + if save: + path = 'output/trapezoid_{}'.format(filename.split('/')[1]) + output.save(path) + return output, path + return output + +if __name__ == "__main__": + img = cv2.imread("/Users/piv/Documents/Projects/car/GestureRecognition/IMG_0818.png") + triangle = make_triangle(img) + triangle = cv2.resize(triangle, None, fx=0.3, fy=0.3, interpolation = cv2.INTER_AREA) + triangle = rotate(triangle, 180) + cv2.imshow("", triangle) + cv2.waitKey(0) + cv2.destroyAllWindows() \ No newline at end of file diff --git a/car/GestureRecognition/keras_ex.py b/car/GestureRecognition/keras_ex.py new file mode 100644 index 0000000..150ed6f --- /dev/null +++ b/car/GestureRecognition/keras_ex.py @@ -0,0 +1,28 @@ +import time +import os + +import numpy as np + +os.environ["KERAS_BACKEND"] = "plaidml.keras.backend" + +import keras +import keras.applications as kapp +from keras.datasets import cifar10 + +(x_train, y_train_cats), (x_test, y_test_cats) = cifar10.load_data() +batch_size = 8 +x_train = x_train[:batch_size] +x_train = np.repeat(np.repeat(x_train, 7, axis=1), 7, axis=2) +model = kapp.VGG19() +model.compile(optimizer='sgd', loss='categorical_crossentropy', + metrics=['accuracy']) + +print("Running initial batch (compiling tile program)") +y = model.predict(x=x_train, batch_size=batch_size) + +# Now start the clock and run 10 batches +print("Timing inference...") +start = time.time() +for i in range(10): + y = model.predict(x=x_train, batch_size=batch_size) +print("Ran in {} seconds".format(time.time() - start)) \ No newline at end of file diff --git a/car/GestureRecognition/opencvtensorflowex.py b/car/GestureRecognition/opencvtensorflowex.py new file mode 100644 index 0000000..27b5354 --- /dev/null +++ b/car/GestureRecognition/opencvtensorflowex.py @@ -0,0 +1,23 @@ +import cv2 as cv + +cvNet = cv.dnn.readNetFromTensorflow('frozen_inference_graph.pb', 'graph.pbtxt') + +img = cv.imread('IMG_0825.jpg') +img = cv.resize(img, None, fx=0.1, fy=0.1, interpolation = cv.INTER_AREA) +rows = img.shape[0] +cols = img.shape[1] +print(str(rows) + " " + str(cols)) +cvNet.setInput(cv.dnn.blobFromImage(img, size=(300, 300), swapRB=True, crop=False)) +cvOut = cvNet.forward() + +for detection in cvOut[0,0,:,:]: + score = float(detection[2]) + if score > 0.6: + left = detection[3] * cols + top = detection[4] * rows + right = detection[5] * cols + bottom = detection[6] * rows + cv.rectangle(img, (int(left), int(top)), (int(right), int(bottom)), (23, 230, 210), thickness=2) + +cv.imshow('img', img) +cv.waitKey() \ No newline at end of file diff --git a/car/GestureRecognition/starkaleid.py b/car/GestureRecognition/starkaleid.py new file mode 100644 index 0000000..d6b266a --- /dev/null +++ b/car/GestureRecognition/starkaleid.py @@ -0,0 +1,58 @@ +import numpy as np +import cv2 + +def make_triangle(img, num_triangles): + print(img.shape) + y,x = (img.shape[0]//2, img.shape[1]//2) + angles = 2 * np.pi/num_triangles + print(angles/2) + w,h,d = img.shape + print(np.tan(angles/2)) + z = int(np.tan(angles/2) * (h/2)) + print(z) + print(h) + u = (x + z, y + h/2) + v = (x - z, y + h/2) + mask = np.zeros((w,h,d)) + + pts = np.array([v,(x,y),u], np.int32) + pts = pts.reshape((-1,1,2)) + mask = cv2.fillPoly(mask, [pts], (255,0,0)) + + # With mask, get the triangle from the original image. + img[:,:,0] = np.where(mask[:,:,0] == 255, img[:,:,0], 0) + img[:,:,1] = np.where(mask[:,:,0] == 255, img[:,:,1], 0) + img[:,:,2] = np.where(mask[:,:,0] == 255, img[:,:,2], 0) + return img + +def rotate(im, rotation): + M = cv2.getRotationMatrix2D((im.shape[1]/2,im.shape[0]/2), rotation, 1) + im[:,:,0] = cv2.warpAffine(im[:,:,0],M,(im.shape[1],im.shape[0])) + im[:,:,1] = cv2.warpAffine(im[:,:,1],M,(im.shape[1],im.shape[0])) + im[:,:,2] = cv2.warpAffine(im[:,:,2],M,(im.shape[1],im.shape[0])) + return im + +def _stitch(img, to_stitch): + img[:,:,0] = np.where((img[:,:,0] == 0) & (to_stitch[:,:,0] != 0), to_stitch[:,:,0], img[:,:,0]) + img[:,:,1] = np.where((img[:,:,1] == 0) & (to_stitch[:,:,1] != 0), to_stitch[:,:,1], img[:,:,1]) + img[:,:,2] = np.where((img[:,:,2] == 0) & (to_stitch[:,:,2] != 0), to_stitch[:,:,2], img[:,:,2]) + +def make_kaleidoscope(img, num): + triangle = make_triangle(img, num) + iters = num + while iters > 0: + new_triangle = np.copy(triangle) + new_triangle = cv2.flip(new_triangle, 1) if iters % 2 != 0 else new_triangle + rotate(new_triangle, 360/num * iters) + _stitch(triangle, new_triangle) + iters -= 1 + return triangle + +if __name__ == "__main__": + img = cv2.imread("/Users/piv/Documents/Projects/car/GestureRecognition/IMG_0818.png") + img = cv2.resize(img, None, fx=0.3, fy=0.3, interpolation = cv2.INTER_AREA) + num = 12 + kaleid = make_kaleidoscope(img, num) + cv2.imshow("", kaleid) + cv2.waitKey(0) + cv2.destroyAllWindows() \ No newline at end of file diff --git a/car/Messaging/__init__.py b/car/Messaging/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/Messaging/message_factory.py b/car/Messaging/message_factory.py new file mode 100644 index 0000000..1888468 --- /dev/null +++ b/car/Messaging/message_factory.py @@ -0,0 +1,64 @@ +import zmq + + +class ZmqPubSubStreamer: + ''' + Not thread-safe. Always get this inside the thread/process where you intend + to use it. + ''' + + def __init__(self, port): + self._socket = zmq.Context.instance().socket(zmq.PUB) + print('Starting socket with address: ' + 'tcp://*:' + str(port)) + self._socket.bind("tcp://*:" + str(port)) + + + def send_message(self, message): + ''' + Args + ---- + message: A message type that has the serialise() method. + ''' + self.send_message_topic("", message) + + def send_message_topic(self, topic, message): + self._socket.send_multipart([bytes(topic), message.serialise()]) + + +class BluetoothStreamer: + def __init__(self): + pass + + def send_message(self, message_bytes): + pass + +class TestStreamer: + def __init__(self): + self._listeners = [] + + def send_message(self, message_bytes): + print('Got a message') + + def send_message_topic(self, topic, message): + print('Got a message with topic: ' + str(topic)) + self._fire_message_received(message) + + def add_message_listener(self, listener): + self._listeners.append(listener) + + def _fire_message_received(self, message): + for listener in self._listeners: + listener(message) + +def getZmqPubSubStreamer(port): + ''' + Not thread-safe. Always get this inside the thread/process where you intend + to use it. + ''' + return ZmqPubSubStreamer(port) + +def getTestingStreamer(): + return TestStreamer() + +# TODO: Create a general get method that will get the streamer based on an +# environment variable that is set. \ No newline at end of file diff --git a/car/Messaging/messages.py b/car/Messaging/messages.py new file mode 100644 index 0000000..d38c9c7 --- /dev/null +++ b/car/Messaging/messages.py @@ -0,0 +1,34 @@ +import umsgpack + + +class Message(): + def __init__(self, message=None): + self.message = message + + def serialise(self): + raise NotImplementedError + + def deserialise(self, message): + raise NotImplementedError + + +class PackMessage(Message): + + def serialise(self): + return umsgpack.packb(self.message) + + def deserialise(self, message): + return PackMessage(umsgpack.unpackb(self.message)) + + +class ProtoMessage(Message): + + def __init__(self, proto_type=None, message=None): + super().__init__(message) + self._type = proto_type + + def serialise(self): + return self.message.SerializeToString() + + def deserialise(self, message): + return ProtoMessage(self._type, self._type.ParseFromString(message)) diff --git a/car/Messaging/mqttsession.py b/car/Messaging/mqttsession.py new file mode 100644 index 0000000..7353eaf --- /dev/null +++ b/car/Messaging/mqttsession.py @@ -0,0 +1,64 @@ +import paho.mqtt.client as mqtt + +""" +Wrapper module for paho mqtt library, providing a singleton instance of the client to be used. +Also adds some convenience functions such as having multiple connected callbacks, +and managing whether the client is still connected. +""" + + +client = mqtt.Client() +host = None + +connect_callbacks = [] +disconnect_callbacks = [] + +def on_connect(client, userdata, flags, rc): + print("Connected with result code " + str(rc)) + if rc == 0: + global connected + connected = True + + for callback in connect_callbacks: + callback() + + client.subscribe('hello/test', qos=1) + +# Arguably not needed, just want to make the client static, but here anyway. +def connect(): + global client + if client is None or host is None: + print("Error: Client and/or host are not initialised.") + else: + client.connect(host, port=1883, keepalive=60, bind_address="") + client.loop_start() + +def add_connect_callback(callback): + global connect_callbacks + connect_callbacks += callback + connectted = True + +def add_disconnect_callback(callback): + global + +def disconnect(): + global client + if client is not None: + client.loop_stop() + client.disconnect() + else: + print("Error: Client is not initialised.") + +def on_disconnect(client, userdata, rc): + if rc != 0: + print("Unexpected disconnection.") + + global connected + connected = False + +def Client(): + global client + if client is None: + client = mqtt.Client() + + return client diff --git a/car/MyRaft/Experiment/node1/config.json b/car/MyRaft/Experiment/node1/config.json new file mode 100644 index 0000000..e399fe9 --- /dev/null +++ b/car/MyRaft/Experiment/node1/config.json @@ -0,0 +1,28 @@ +{ + "raft": + { + "min_election_timeout": 8, + "varying_election_timeout": 2, + "majority": 2 + }, + "messaging": + { + "me": + { + "ip": "127.0.0.1", + "port": 50051 + }, + "neighbours": + [ + { + "ip": "127.0.0.1", + "port": 50052 + }, + { + "ip": "127.0.0.1", + "port": 50053 + } + ] + } +} + diff --git a/car/MyRaft/Experiment/node2/config.json b/car/MyRaft/Experiment/node2/config.json new file mode 100644 index 0000000..143284b --- /dev/null +++ b/car/MyRaft/Experiment/node2/config.json @@ -0,0 +1,28 @@ +{ + "raft": + { + "min_election_timeout": 8, + "varying_election_timeout": 2, + "majority": 2 + }, + "messaging": + { + "me": + { + "ip": "127.0.0.1", + "port": 50052 + }, + "neighbours": + [ + { + "ip": "127.0.0.1", + "port": 50051 + }, + { + "ip": "127.0.0.1", + "port": 50053 + } + ] + } +} + diff --git a/car/MyRaft/Experiment/node3/config.json b/car/MyRaft/Experiment/node3/config.json new file mode 100644 index 0000000..70781fb --- /dev/null +++ b/car/MyRaft/Experiment/node3/config.json @@ -0,0 +1,28 @@ +{ + "raft": + { + "min_election_timeout": 8, + "varying_election_timeout": 2, + "majority": 3 + }, + "messaging": + { + "me": + { + "ip": "127.0.0.1", + "port": 50053 + }, + "neighbours": + [ + { + "ip": "127.0.0.1", + "port": 50052 + }, + { + "ip": "127.0.0.1", + "port": 50051 + } + ] + } +} + diff --git a/car/MyRaft/candidate.py b/car/MyRaft/candidate.py new file mode 100644 index 0000000..a025957 --- /dev/null +++ b/car/MyRaft/candidate.py @@ -0,0 +1,49 @@ +import MyRaft.state as state +import MyRaft.leader as leader +# import MyRaft.follower as follower +import MyRaft.node as node +import MyRaft.raft_pb2 as raft_pb2 + +class Candidate(state.State): + def __init__(self, context:node.RaftNode, majority = 2): + state.State.__init__(self, context) + print("We're a candidate!") + context.currentTerm += 1 + self._votes_received = [] # List of voters who have voted. + self._votes_received.append(self._context._id) + self._majority = majority + self._context.set_timeout(self._context._min_timout, self._context._vary_timeout) + print("Sending RequestVote to other nodes") + self._context.send_RequestVote() + + def rcv_vote(self, request): + print("Received Vote") + # Checks the term... + if not request.voteGranted: + print("They rejected us!") + if request.voterId not in self._votes_received: + print("Added a vote!") + self._votes_received.append(request.voterId) + if len(self._votes_received) >= self._majority: + self._context.set_state(leader.Leader(self._context)) + + def heartbeat_elapsed(self): + # Start a new election. + self._context.currentTerm += 1 + self._context.set_timeout(self._context._min_timout, self._context._vary_timeout) + print("Sending RequestVote to other nodes") + self._context.send_RequestVote() + + def rcv_AppendEntries(self, request): + if request.term >= self._context.currentTerm: + self._context.set_state(follower.Follower(self._context)) + + def rcv_RequestVote(self, request): + print("Received a vote request") + if request.term > self._context.currentTerm: + print("They're more important, going back to a follower") + self._context.set_state(follower.Follower(self._context)) + self._context.votedFor = request.candidateId + return raft_pb2.RequestVoteResponse(term = self._context.currentTerm, + voteGranted = True, + voterId = self._context._id) \ No newline at end of file diff --git a/car/MyRaft/config.ini b/car/MyRaft/config.ini new file mode 100644 index 0000000..da25e2c --- /dev/null +++ b/car/MyRaft/config.ini @@ -0,0 +1,5 @@ +[RAFT] +min_election_timeout = 100 +varying_election_timeout = 200 +heartbeat_timeout = 50 +majority = 3 \ No newline at end of file diff --git a/car/MyRaft/config.json b/car/MyRaft/config.json new file mode 100644 index 0000000..8fbe6eb --- /dev/null +++ b/car/MyRaft/config.json @@ -0,0 +1,27 @@ +{ + "raft": + { + "min_election_timeout": 8, + "varying_election_timeout": 2, + "majority": 2 + }, + "messaging": + { + "me": + { + "ip": "127.0.0.1", + "port": 50051 + }, + "neighbours": + [ + { + "ip": "127.0.0.1", + "port": 50052 + }, + { + "ip": "127.0.0.1", + "port": 50053 + } + ] + } +} \ No newline at end of file diff --git a/car/MyRaft/follower.py b/car/MyRaft/follower.py new file mode 100644 index 0000000..1742928 --- /dev/null +++ b/car/MyRaft/follower.py @@ -0,0 +1,39 @@ +import MyRaft.state as state +import MyRaft.candidate as candidate +import MyRaft.raft_pb2 as raft_pb2 + +class Follower(state.State): + def __init__(self, context): + state.State.__init__(self, context) + self._context.set_timeout(self._context._min_timout, self._context._vary_timeout) + + def heartbeat_elapsed(self): + print("Becoming a candidate") + self._context.set_state(candidate.Candidate(self._context)) + + def rcv_AppendEntries(self, request): + """Called when an append entries message is received""" + + self._context.set_timeout(self._context._min_timout, self._context._vary_timeout) + + def rcv_RequestVote(self, request): + print("Received a vote request") + # Ignoring log for now. + if request.term < self._context.currentTerm: + print("They're term is worse than ours.") + # If our current term is already the same, then we must have voted already. + return raft_pb2.RequestVoteResponse(term = self._context.currentTerm, voteGranted = False) + elif request.term == self._context.currentTerm and self._context.votedFor is not None: + return raft_pb2.RequestVoteResponse(term = self._context.currentTerm, voteGranted = False) + else: + print("We'll be voting for them!") + # We vote yes, so reset our timeout. + self._context.set_timeout(self._context._min_timout, self._context._vary_timeout) + self._context.currentTerm = request.term + print("setting candidate id") + self._context.votedFor = request.candidateId + print("Returning result.") + return raft_pb2.RequestVoteResponse(term = self._context.currentTerm, + voteGranted = True, + voterId = self._context._id) + diff --git a/car/MyRaft/leader.py b/car/MyRaft/leader.py new file mode 100644 index 0000000..defac60 --- /dev/null +++ b/car/MyRaft/leader.py @@ -0,0 +1,24 @@ +import MyRaft.state as state +import MyRaft.node as node + +class Leader(state.State): + """The leader class represents the leader state in the raft algorithm""" + def __init__(self, context: node.RaftNode): + state.State.__init__(self, context) + print("We're a leader!") + + # For indexes for each server to send. + self.nextIndex = [] + self.matchIndex = [] + + # Change our timeout. + self._context.set_timeout(self._context._heartbeat_timeout, 0) + # Send empty AppendEntries. + self._context.send_empty_AppendEntries() + + def heartbeat_elapsed(self): + print("Sending an append entries message") + self._context.send_empty_AppendEntries() + + # Don't forget to reset timer, otherwise they'll try run for leader. + self._context.set_timeout(self._context._heartbeat_timeout, 0) \ No newline at end of file diff --git a/car/MyRaft/messages.py b/car/MyRaft/messages.py new file mode 100644 index 0000000..836413e --- /dev/null +++ b/car/MyRaft/messages.py @@ -0,0 +1,129 @@ +""" This module holds the messages for raft to use. + +Message -- Base message class + +AppendEntries -- Message representing raft append entries. + +RequestVote -- Message representing raft request vote. + +RequestVoteReponse -- Message for responding to a request vote. + +Response -- Response to an append entries message. +""" +import umsgpack +from enum import Enum + +class Messages(Enum): + AppendEntries = 1 + RequestVote = 2 + RequestVoteResponse = 3 + AppendEntriesResponse = 4 + +class Message: + """The base class of all messages used in raft""" + _type = None + + def __init__(self, sender, data = {}, term = 0): + self._sender = sender + self._data = data + self._term = term + + @property + def sender(self): + return self._sender + + @property + def type(self): + return self._type + + def serialise(self): + """Serialises a Message object into a message pack byte array""" + raise NotImplementedError + + @staticmethod + def deserialise(message): + """Deserialises from a byte array into a Message object + + message -- Message to deserialise. + + Returns -- Deserialised message object, None if incorrect input message. + """ + m = None + try: + m = umsgpack.unpackb(m) + except: + print("Could not decode message") + return m + + m = Message('tbd') + raise NotImplementedError + + def __eq__(self, other): + if not isinstance(other, Message): + return False + + if other.type != self.type: + return False + + if other._data != self._data: + return False + + if other._sender != self._sender: + return False + + return True + +class AppendEntries(Message): + _type = "AppendEntries" + + def __init__(self, term, leaderId, prevLogIndex, prevLogTerm, leaderCommit, entries = None): + self._data["term"] = term + self._data["leaderId"] = leaderId + self._data["prevLogIndex"] = prevLogIndex + self._data["prevLogTerm"] = prevLogTerm + self._data["entries"] = entries + self._data["leaderCommit"] = leaderCommit # Leader's commit index. + + +class RequestVote(Message): + _type = "RequestVote" + + def __init__(self, term, candidate_id, last_log_index, last_log_term): + self._data["candidateId"] = candidate_id + self._data["lastLogIndex"] = last_log_index + self._data["lastLogTerm"] = last_log_term + + @property + def candidate_id(self): + return self._data["candidateId"] + + @property + def last_log_index(self): + return self._data["lastLogIndex"] + + @property + def last_log_term(self): + return self._data["lastLogTerm"] + + +class RequestVoteResponse(Message): + _type = "RequestVoteResponse" + + def __init__(self, term, vote_granted): + self._data["voteGranted"] = vote_granted + + @property + def vote_granted(self): + return self._data["voteGranted"] + +class Response(Message): + _type = "Response" + + def __init__(self, term, success): + self._data["success"] = success + + @property + def success(self): + return self._data["success"] + + \ No newline at end of file diff --git a/car/MyRaft/messagestrategy.py b/car/MyRaft/messagestrategy.py new file mode 100644 index 0000000..26f8ed2 --- /dev/null +++ b/car/MyRaft/messagestrategy.py @@ -0,0 +1,215 @@ +import json +from concurrent import futures +import time +import multiprocessing as mp +import threading + +import grpc +import zmq + +import MyRaft.raft_pb2_grpc as raft_pb2_grpc +import MyRaft.raft_pb2 as raft_pb2 + +class MessageStrategy: + def __init__(self): + pass + + def send_RequestVote(self, request): + raise NotImplementedError + + def send_AppendEntries(self, request): + raise NotImplementedError + + def on_VoteReceived(self, future): + raise NotImplementedError + + def on_EntriesResponse(self, future): + raise NotImplementedError + + def connect_channels(self): + raise NotImplementedError + + +class NodeGrpcServer(raft_pb2_grpc.RaftServicer): + """Contains the gRPC server for the raft node.""" + + def __init__(self, raftNode, port: int): + self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + self.server.add_insecure_port('[::]:%d' % port) + self._raft = raftNode + + def AppendEntriesRPC(self, request, context): + """AppendEntries remote procedural call for raft. + + Args: + request: The AppendEntries message sent by the leader + context: RPC-related information and actions -> more here: https://grpc.io/grpc/python/grpc.html + + Returns: + An AppendEntriesResponse message. + """ + print("Received append entries rpc") + # Leaving this here for now, just in case we need it later (gets the client ip address) + # str(context._rpc_event.call_details.host) + return self._raft.rcv_AppendEntries(request) + + def RequestVoteRPC(self, request, context): + """RequestVote remote procedural call for raft. + + Args: + request: The RequestVote message sent by the leader + context: RPC-related information and actions -> more here: https://grpc.io/grpc/python/grpc.html + + Returns: + A RequestVoteResponse message + """ + print("Received request vote rpc") + print(request) + result = self._raft.vote_requested(request) + print("Now returning our vote.") + print(result) + print(type(result)) + return result + + def start_server(self): + print("Starting servicer") + raft_pb2_grpc.add_RaftServicer_to_server(self, self.server) + self.server.start() + while True: + time.sleep(60*60) + + +class GrpcMessageStrategy(MessageStrategy): + """This class uses gRPC to communicate between raft nodes.""" + # Only create the channels if we become a candidate or leader. + # Also need to close the channels when we become the follower. + + def __init__(self, server: NodeGrpcServer, config): + # Also need to consider TLS/secure connection + self._cfg = config + self._neighbours = self._cfg['messaging']['neighbours'] + self._server = server + self.message_callbacks = [] + self._futures = [] + self.channels = None + + def connect_channels(self): + print("Creating channels") + self.channels = [] + for n in self._neighbours: + channel = grpc.insecure_channel('%s:%d' % (n['ip'], n['port'])) + self.channels.append(channel) + + def send_RequestVote(self, vote): + print("Sending Request Vote") + if self.channels is None: + self.connect_channels() + for channel in self.channels: + print("In channel") + try: + stub = raft_pb2_grpc.RaftStub(channel) + print("connected") + # vote = stub.RequestVoteRPC(vote) + future = stub.RequestVoteRPC.future(vote) + future.add_done_callback(self.on_VoteReceived) + # print("sending vote received back to node.") + # self._server._raft.vote_received(vote) + except Exception as e: + # print("Couldn't message.") + # print(e) + pass + + def on_VoteReceived(self, future): + print("A vote was returned") + print("sending vote received back to node.") + self._server._raft.vote_received(future.result()) + + def send_AppendEntries(self, entries): + for channel in self.channels: + stub = raft_pb2_grpc.RaftStub(channel) + future = stub.AppendEntriesRPC.future(entries) + future.add_done_callback(self.on_EntriesResponse) + + def on_EntriesResponse(self, future): + # Pass to leader? Doesn't matter for now since we aren't using the + # log yet. + print("Received append entries response.") + + +class ZmqServer: + # Zmq subscribers can subscribe to multiple publishers. However, + # subscribers are not thread safe - Radio-dish pattern aims to solve that. + def __init__(self, config): + self._cfg = config + self.context = zmq.Context() + self.socketSub = self.context.socket(zmq.SUB) + self.started = True + + def connect_channels(self): + # Also need to subscribe to other nodes... + for n in self._cfg["messaging"]["neighbours"]: + self.socketSub.connect("tcp://%s:%d" % (n["ip"], n["port"])) + + print("Neighbours are connected.") + + def start(self): + # Start receiving on a new thread. + t = threading.Thread(target=self.start_receiving) + t.start() + + def start_receiving(self): + while self.started: + self.on_message(self.socketSub.recv()) + + def stop(self): + self.started = False + + def on_message(self, message): + m = message.deserialise() + try: + a = m.leaderId + # We have append entries + a = self.context.rcv_AppendEntries(m) + # Need to send back a message with our response. May be easier + # to do this with a request reply mechanism, rather than publish + # subscribe. + except: + pass + + try: + a = m.leaderId + # We have request vote. + self.context.rcv_AppendEntries(m) + except: + pass + + def on_RequestVote(self, message): + pass + + def on_AppendEntries(self, messages): + pass + +class ZmqMessageStrategy(MessageStrategy): + + def __init__(self, config, vote_callback, entries_callback): + self._cfg = config + self._vote_callback = vote_callback + self._entries_callback = entries_callback + + def connect_nodes(self): + print("Creating publish socket.") + self.context = zmq.Context() + self.socketPub = self.context.socket(zmq.REQ) + self.socketPub.bind("tcp://%s:%d" % (self._cfg["messaging"]["me"]["ip"], self._cfg["messaging"]["me"]["port"])) + + def send_RequestVote(self, request): + self.socketPub.send(request.serialize) + + def send_AppendEntries(self, request): + self.socketPub.send(request.serialize) + + def on_VoteReceived(self, message): + self._vote_callback(message) + + def on_EntriesResponse(self, message): + self._entries_callback(message) \ No newline at end of file diff --git a/car/MyRaft/node.py b/car/MyRaft/node.py new file mode 100644 index 0000000..2253b9a --- /dev/null +++ b/car/MyRaft/node.py @@ -0,0 +1,127 @@ +from threading import Timer, Thread +import random +import uuid +import json +import time + +from MyRaft.messagestrategy import MessageStrategy, GrpcMessageStrategy, NodeGrpcServer +import MyRaft.raft_pb2 as raft_pb2 + +class RaftNode: + def __init__(self, message_strategy: MessageStrategy, config): + """ + message_strategy -- Strategy used to send messagesfor the node. + """ + import MyRaft.follower as follower + # Do we need to know who the current leader is? For the purposes of + # the cameras knowing, (as the leader of raft is the leader of out + # swarm) we should know this on each node. VotedFor may work, as it is + # who we last voted for, and therefore who we think is leader. We also need + # this to redirect client requests to the leader. + + self._current_state = None + self._timer = None + self._message_strategy = None + + # Persistent State + self.currentTerm = 0 + self.votedFor = None + self.log = [] + + # Volatile state + self.commitIndex = 0 + self.lastApplied = 0 + + # We only need this for candidates/leaders... + self._id = str(uuid.uuid1()) + if message_strategy is None or not isinstance(message_strategy, MessageStrategy): + raise ValueError(MessageStrategy) + + self._message_strategy = message_strategy + + self._cfg = config + self._min_timout = self._cfg["raft"]["min_election_timeout"] + self._vary_timeout = self._cfg["raft"]["varying_election_timeout"] + self._heartbeat_timeout = self._min_timout // 2 + # Also need to check if we can load log from stable storage in case of + # restart. + + # All nodes start as a follower. State starts the timeout always. + self._current_state = follower.Follower(self) + self._state_changed = [] + + def add_state_change(self, on_change): + """Adds a callback for when the current state of the node changes. + + Args + on_change: function to call when the state changes. + """ + self._state_changed.append(on_change) + + def set_state(self, state): + """Sets the current state of the raft node. + + state -- New state of the node. + """ + # State Pattern: https://en.wikipedia.org/wiki/State_pattern + del(self._current_state) + self._current_state = state + for cb in self._state_changed: + cb() + + def timeout_elapsed(self): + """Election or heartbeat timeout has elapsed.""" + print("Node timeout elapsed") + self._current_state.heartbeat_elapsed() + + def set_timeout(self, min_timeout, vary_timeout): + """Stops the old timer and restarts it to the specified time. + + min_timeout -- The minimum time that can be used for the timer. + vary_timout -- Default 200, the additional random varying time (0 - vary_timeout) to add to timer. + """ + if self._timer is not None: + self._timer.cancel() + randy = random.randint(0,vary_timeout) + self._timer = Timer(min_timeout + randy, self.timeout_elapsed) + self._timer.start() + + def send_RequestVote(self): + self._message_strategy.send_RequestVote(raft_pb2.RequestVote(term = self.currentTerm, + candidateId = self._id)) + + def vote_requested(self, request): + return self._current_state.rcv_RequestVote(request) + + def vote_received(self, voter): + print("Node received vote") + self._current_state.rcv_vote(voter) + + def send_AppendEntries(self, entry): + pass + + def send_empty_AppendEntries(self): + self._message_strategy.send_AppendEntries(raft_pb2.AppendEntries(term = self.currentTerm, + leaderId = self._id)) + + def entries_response_received(self, entryResponse): + self._current_state.rcv_AppendEntriesResponse(entryResponse) + + def rcv_AppendEntries(self, entries, host): + # Always let leader know if fallen behind. + if entries.term < self.currentTerm: + return raft_pb2.AppendEntriesResponse(term = self.currentTerm, success = False) + return self._current_state.rcv_AppendEntries(entries) + +class RaftGrpcNode(RaftNode): + + def __init__(self, config): + cfg = None + with open(config) as f: + cfg = json.load(f) + port = cfg["messaging"]["me"]["port"] + self.servicer = NodeGrpcServer(self, port) + RaftNode.__init__(self, GrpcMessageStrategy(self.servicer, cfg), cfg) + servicer_thread = Thread(target=self.servicer.start_server) + servicer_thread.start() + print("Servicer started") diff --git a/car/MyRaft/protos/raft.proto b/car/MyRaft/protos/raft.proto new file mode 100644 index 0000000..f8321b7 --- /dev/null +++ b/car/MyRaft/protos/raft.proto @@ -0,0 +1,35 @@ +syntax = "proto3"; + +package raft; + +service Raft{ + rpc AppendEntriesRPC(AppendEntries) returns (AppendEntriesResponse) {} + rpc RequestVoteRPC(RequestVote) returns (RequestVoteResponse) {} +} + +message AppendEntries{ + uint32 term = 1; + string leaderId = 2; + uint32 prevLogIndex = 3; + uint32 prevLogTerm = 4; + uint32 leaderCommit = 5; + repeated string entry = 6; +} + +message AppendEntriesResponse{ + uint32 term = 1; + bool success = 2; +} + +message RequestVote{ + uint32 term = 1; + string candidateId = 2; + uint32 lastLogIndex = 3; + uint32 lastLogTerm = 4; +} + +message RequestVoteResponse{ + uint32 term = 1; + bool voteGranted = 2; + string voterId = 3; +} \ No newline at end of file diff --git a/car/MyRaft/raft_pb2.py b/car/MyRaft/raft_pb2.py new file mode 100644 index 0000000..ad2f5a0 --- /dev/null +++ b/car/MyRaft/raft_pb2.py @@ -0,0 +1,296 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: raft.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='raft.proto', + package='raft', + syntax='proto3', + serialized_options=None, + serialized_pb=_b('\n\nraft.proto\x12\x04raft\"\x7f\n\rAppendEntries\x12\x0c\n\x04term\x18\x01 \x01(\r\x12\x10\n\x08leaderId\x18\x02 \x01(\t\x12\x14\n\x0cprevLogIndex\x18\x03 \x01(\r\x12\x13\n\x0bprevLogTerm\x18\x04 \x01(\r\x12\x14\n\x0cleaderCommit\x18\x05 \x01(\r\x12\r\n\x05\x65ntry\x18\x06 \x03(\t\"6\n\x15\x41ppendEntriesResponse\x12\x0c\n\x04term\x18\x01 \x01(\r\x12\x0f\n\x07success\x18\x02 \x01(\x08\"[\n\x0bRequestVote\x12\x0c\n\x04term\x18\x01 \x01(\r\x12\x13\n\x0b\x63\x61ndidateId\x18\x02 \x01(\t\x12\x14\n\x0clastLogIndex\x18\x03 \x01(\r\x12\x13\n\x0blastLogTerm\x18\x04 \x01(\r\"I\n\x13RequestVoteResponse\x12\x0c\n\x04term\x18\x01 \x01(\r\x12\x13\n\x0bvoteGranted\x18\x02 \x01(\x08\x12\x0f\n\x07voterId\x18\x03 \x01(\t2\x90\x01\n\x04Raft\x12\x46\n\x10\x41ppendEntriesRPC\x12\x13.raft.AppendEntries\x1a\x1b.raft.AppendEntriesResponse\"\x00\x12@\n\x0eRequestVoteRPC\x12\x11.raft.RequestVote\x1a\x19.raft.RequestVoteResponse\"\x00\x62\x06proto3') +) + + + + +_APPENDENTRIES = _descriptor.Descriptor( + name='AppendEntries', + full_name='raft.AppendEntries', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='term', full_name='raft.AppendEntries.term', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='leaderId', full_name='raft.AppendEntries.leaderId', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='prevLogIndex', full_name='raft.AppendEntries.prevLogIndex', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='prevLogTerm', full_name='raft.AppendEntries.prevLogTerm', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='leaderCommit', full_name='raft.AppendEntries.leaderCommit', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='entry', full_name='raft.AppendEntries.entry', index=5, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=20, + serialized_end=147, +) + + +_APPENDENTRIESRESPONSE = _descriptor.Descriptor( + name='AppendEntriesResponse', + full_name='raft.AppendEntriesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='term', full_name='raft.AppendEntriesResponse.term', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='success', full_name='raft.AppendEntriesResponse.success', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=149, + serialized_end=203, +) + + +_REQUESTVOTE = _descriptor.Descriptor( + name='RequestVote', + full_name='raft.RequestVote', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='term', full_name='raft.RequestVote.term', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='candidateId', full_name='raft.RequestVote.candidateId', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lastLogIndex', full_name='raft.RequestVote.lastLogIndex', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lastLogTerm', full_name='raft.RequestVote.lastLogTerm', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=205, + serialized_end=296, +) + + +_REQUESTVOTERESPONSE = _descriptor.Descriptor( + name='RequestVoteResponse', + full_name='raft.RequestVoteResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='term', full_name='raft.RequestVoteResponse.term', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='voteGranted', full_name='raft.RequestVoteResponse.voteGranted', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='voterId', full_name='raft.RequestVoteResponse.voterId', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=298, + serialized_end=371, +) + +DESCRIPTOR.message_types_by_name['AppendEntries'] = _APPENDENTRIES +DESCRIPTOR.message_types_by_name['AppendEntriesResponse'] = _APPENDENTRIESRESPONSE +DESCRIPTOR.message_types_by_name['RequestVote'] = _REQUESTVOTE +DESCRIPTOR.message_types_by_name['RequestVoteResponse'] = _REQUESTVOTERESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +AppendEntries = _reflection.GeneratedProtocolMessageType('AppendEntries', (_message.Message,), dict( + DESCRIPTOR = _APPENDENTRIES, + __module__ = 'raft_pb2' + # @@protoc_insertion_point(class_scope:raft.AppendEntries) + )) +_sym_db.RegisterMessage(AppendEntries) + +AppendEntriesResponse = _reflection.GeneratedProtocolMessageType('AppendEntriesResponse', (_message.Message,), dict( + DESCRIPTOR = _APPENDENTRIESRESPONSE, + __module__ = 'raft_pb2' + # @@protoc_insertion_point(class_scope:raft.AppendEntriesResponse) + )) +_sym_db.RegisterMessage(AppendEntriesResponse) + +RequestVote = _reflection.GeneratedProtocolMessageType('RequestVote', (_message.Message,), dict( + DESCRIPTOR = _REQUESTVOTE, + __module__ = 'raft_pb2' + # @@protoc_insertion_point(class_scope:raft.RequestVote) + )) +_sym_db.RegisterMessage(RequestVote) + +RequestVoteResponse = _reflection.GeneratedProtocolMessageType('RequestVoteResponse', (_message.Message,), dict( + DESCRIPTOR = _REQUESTVOTERESPONSE, + __module__ = 'raft_pb2' + # @@protoc_insertion_point(class_scope:raft.RequestVoteResponse) + )) +_sym_db.RegisterMessage(RequestVoteResponse) + + + +_RAFT = _descriptor.ServiceDescriptor( + name='Raft', + full_name='raft.Raft', + file=DESCRIPTOR, + index=0, + serialized_options=None, + serialized_start=374, + serialized_end=518, + methods=[ + _descriptor.MethodDescriptor( + name='AppendEntriesRPC', + full_name='raft.Raft.AppendEntriesRPC', + index=0, + containing_service=None, + input_type=_APPENDENTRIES, + output_type=_APPENDENTRIESRESPONSE, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='RequestVoteRPC', + full_name='raft.Raft.RequestVoteRPC', + index=1, + containing_service=None, + input_type=_REQUESTVOTE, + output_type=_REQUESTVOTERESPONSE, + serialized_options=None, + ), +]) +_sym_db.RegisterServiceDescriptor(_RAFT) + +DESCRIPTOR.services_by_name['Raft'] = _RAFT + +# @@protoc_insertion_point(module_scope) diff --git a/car/MyRaft/raft_pb2_grpc.py b/car/MyRaft/raft_pb2_grpc.py new file mode 100644 index 0000000..6dd6884 --- /dev/null +++ b/car/MyRaft/raft_pb2_grpc.py @@ -0,0 +1,63 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import MyRaft.raft_pb2 as raft__pb2 + + +class RaftStub(object): + # missing associated documentation comment in .proto file + pass + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AppendEntriesRPC = channel.unary_unary( + '/raft.Raft/AppendEntriesRPC', + request_serializer=raft__pb2.AppendEntries.SerializeToString, + response_deserializer=raft__pb2.AppendEntriesResponse.FromString, + ) + self.RequestVoteRPC = channel.unary_unary( + '/raft.Raft/RequestVoteRPC', + request_serializer=raft__pb2.RequestVote.SerializeToString, + response_deserializer=raft__pb2.RequestVoteResponse.FromString, + ) + + +class RaftServicer(object): + # missing associated documentation comment in .proto file + pass + + def AppendEntriesRPC(self, request, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RequestVoteRPC(self, request, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_RaftServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AppendEntriesRPC': grpc.unary_unary_rpc_method_handler( + servicer.AppendEntriesRPC, + request_deserializer=raft__pb2.AppendEntries.FromString, + response_serializer=raft__pb2.AppendEntriesResponse.SerializeToString, + ), + 'RequestVoteRPC': grpc.unary_unary_rpc_method_handler( + servicer.RequestVoteRPC, + request_deserializer=raft__pb2.RequestVote.FromString, + response_serializer=raft__pb2.RequestVoteResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'raft.Raft', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/car/MyRaft/state.py b/car/MyRaft/state.py new file mode 100644 index 0000000..25287f9 --- /dev/null +++ b/car/MyRaft/state.py @@ -0,0 +1,33 @@ +"""This module contains the base state for all other raft states""" + +import MyRaft.node as node +import MyRaft.raft_pb2 as raft_pb2 + +class State: + """Base class to represent state of the system at any point in time. + + Default behaviour for all messaging methods is to check if term of + message is greater than node's term, and if so convert the current + node to a follower. + """ + + def __init__(self, context: node.RaftNode): + self._context = context + self._currentTerm = 0 + + def heartbeat_elapsed(self): + raise NotImplementedError + + def rcv_RequestVote(self, request): + raise NotImplementedError + + def rcv_AppendEntries(self, request): + raise NotImplementedError + + def rcv_vote(self, request): + raise NotImplementedError + + def rcv_AppendEntriesResponse(self, request): + pass + + \ No newline at end of file diff --git a/car/MyRaft/test.py b/car/MyRaft/test.py new file mode 100644 index 0000000..8ee46e8 --- /dev/null +++ b/car/MyRaft/test.py @@ -0,0 +1,22 @@ +import argparse +import os.path +import sys +from MyRaft.node import RaftGrpcNode + +# parser = argparse.ArgumentParser(description="Runs a raft node for leader election") +# parser.add_argument('-C', '--config', help='Path to config file.') + +# args = parser.parse_args() + +# if args.config: +# print("Getting config") +# if not os.path.isfile(args.config): +# print("Could not find configuration file, aborting") +# sys.exit(1) +# else: +# sys.exit(1) + +# print("Loading gRPC raft node") + +node = RaftGrpcNode('config.json') + diff --git a/car/MyRaft/voter.py b/car/MyRaft/voter.py new file mode 100644 index 0000000..70d64b2 --- /dev/null +++ b/car/MyRaft/voter.py @@ -0,0 +1,8 @@ +from MyRaft.state import State +from MyRaft.node import RaftNode + +class Voter(State): + def __init__(self, context: RaftNode): + State.__init__(self, context) + + \ No newline at end of file diff --git a/car/__init__.py b/car/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/bitbucket-pipelines.yml b/car/bitbucket-pipelines.yml new file mode 100644 index 0000000..c10cd23 --- /dev/null +++ b/car/bitbucket-pipelines.yml @@ -0,0 +1,14 @@ +# This is a sample build configuration for Python. +# Check our guides at https://confluence.atlassian.com/x/x4UWN for more examples. +# Only use spaces to indent your .yml configuration. +# ----- +# You can specify a custom docker image from Docker Hub as your build environment. +image: python:3.7.3 + +pipelines: + default: + - step: + caches: + - pip + script: # Modify the commands below to build your repository. + - pip install -r requirements.txt diff --git a/car/build.gradle b/car/build.gradle new file mode 100644 index 0000000..9f1b034 --- /dev/null +++ b/car/build.gradle @@ -0,0 +1,16 @@ +configurations { + python { + canBeResolved = true + canBeConsumed = false + } +} + +dependencies { + python project(path: ':protobuf', configuration: 'python') +} + +task copyPythonCode(type: Copy, dependsOn: configurations.python){ + // Copy python protobuf code from proto project. + from zipTree(configurations.python.asPath) + into '.' +} \ No newline at end of file diff --git a/car/car.iml b/car/car.iml new file mode 100644 index 0000000..49cf236 --- /dev/null +++ b/car/car.iml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/car/config.json b/car/config.json new file mode 100644 index 0000000..5c2a972 --- /dev/null +++ b/car/config.json @@ -0,0 +1,34 @@ +{ + "mqtt": + { + "host": "127.0.0.1", + "port":1883, + "timeout":60, + "swarm": "swarm1" + }, + "raft": + { + "min_election_timeout": 8, + "varying_election_timeout": 2, + "majority": 2 + }, + "messaging": + { + "me": + { + "ip": "127.0.0.1", + "port": 50051 + }, + "neighbours": + [ + { + "ip": "127.0.0.1", + "port": 50052 + }, + { + "ip": "127.0.0.1", + "port": 50053 + } + ] + } +} \ No newline at end of file diff --git a/car/control/PythonRemoteController.py b/car/control/PythonRemoteController.py new file mode 100644 index 0000000..4b2a5af --- /dev/null +++ b/car/control/PythonRemoteController.py @@ -0,0 +1,33 @@ +print("Connecting to pi") + +import grpc +from concurrent import futures +import motorService_pb2_grpc +from motorService_pb2 import SteeringRequest, ThrottleRequest +import time + +throttle = 0.1 +timer = None + +class ThrottleIterator: + ''' + Class to get the current throttle for the car. + Will return a random throttle between + ''' + def __iter__(self): + return self + + def __next__(self): + if throttle > 1 or throttle < -1: + raise StopIteration + time.sleep(1) + return ThrottleRequest(throttle=throttle) + + +channel = grpc.insecure_channel('10.0.0.53:50051') +stub = motorService_pb2_grpc.CarControlStub(channel) + +response = stub.SetThrottle(ThrottleIterator()) + +while True: + throttle = int(input('Please enter a value for the throttle between -100 and 100')) diff --git a/car/control/__init__.py b/car/control/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/control/gpio/__init__.py b/car/control/gpio/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/control/gpio/mockvehicle.py b/car/control/gpio/mockvehicle.py new file mode 100644 index 0000000..9b0c8ec --- /dev/null +++ b/car/control/gpio/mockvehicle.py @@ -0,0 +1,42 @@ + + +# A dummy vehicle class to use when +class MockVehicle: + def __init__(self, motor_pin=19, servo_pin=18): + self.motor_pin = motor_pin + self.steering_pin = servo_pin + + @property + def throttle(self): + return self._throttle + + @throttle.setter + def throttle(self, value): + self._throttle = value + + @property + def steering(self): + return self._steering + + @steering.setter + def steering(self, value): + self._steering = value + + @property + def motor_pin(self): + return self._motor_pin + + @motor_pin.setter + def motor_pin(self, value): + self._motor_pin = value + + @property + def steering_pin(self): + return self._steering_pin + + @steering_pin.setter + def steering_pin(self, value): + self._steering_pin = value + + def stop(self): + self.throttle = 0 diff --git a/car/control/gpio/vehicle.py b/car/control/gpio/vehicle.py new file mode 100644 index 0000000..9ae941b --- /dev/null +++ b/car/control/gpio/vehicle.py @@ -0,0 +1,83 @@ +from gpiozero import Servo, Device +from gpiozero.pins.pigpio import PiGPIOFactory +import subprocess + + +def _safely_set_servo_value(servo, value): + try: + if value < -1 or value > 1: + print("Not setting throttle, invalid value set.") + return False + servo.value = value + except TypeError: + print("throttle should be a number, preferably a float.") + return False + return True + +def _is_pin_valid(pin): + if isinstance(pin, int): + if pin < 2 or pin > 21: + print("Invalid GPIO pin") + return False + return True + else: + print("Value must be an int.") + return False + +# TODO: Allow a vector to be set to change the throttle/steering, for vehicles that don't use +# two servos for controls (e.g. drone, dog) +class Vehicle: + def __init__(self, motor_pin=19, servo_pin=18): + subprocess.call(['sudo', 'pigpiod']) + Device.pin_factory = PiGPIOFactory() + print('Using pin factory:') + print(Device.pin_factory) + self.motor_pin = motor_pin + self.steering_pin = servo_pin + self.initialise_motor() + + def initialise_motor(self): + self._motor_servo = Servo( + self._motor_pin, pin_factory=Device.pin_factory) + self._steering_servo = Servo(self._steering_pin, pin_factory=Device.pin_factory) + + @property + def throttle(self): + return self._motor_servo.value + + @throttle.setter + def throttle(self, value): + _safely_set_servo_value(self._motor_servo, value) + + @property + def steering(self): + return self._motor_servo.value + + @steering.setter + def steering(self, value): + _safely_set_servo_value(self._motor_servo, value) + + @property + def motor_pin(self): + return self._motor_pin + + @motor_pin.setter + def motor_pin(self, value): + # TODO: Reinitialise the servo when the pin changes, or discard this method + # (probably don't want to allow pin changes whilst the device is in use anyway) + self._motor_pin = value if _is_pin_valid(value) else self._motor_pin + + @property + def steering_pin(self): + return self._steering_pin + + @steering_pin.setter + def steering_pin(self, value): + self._steering_pin = value if _is_pin_valid(value) else self._steering_pin + + def stop(self): + self.throttle = 0 + self.steering = 0 + + def change_with_vector(self, vector): + pass diff --git a/car/control/motor_servicer.py b/car/control/motor_servicer.py new file mode 100644 index 0000000..4e24b68 --- /dev/null +++ b/car/control/motor_servicer.py @@ -0,0 +1,40 @@ +from threading import Timer, Thread +from concurrent import futures +import time + +import control.motorService_pb2 as motorService_pb2 +import control.motorService_pb2_grpc as motorService_pb2_grpc + +class MotorServicer(motorService_pb2_grpc.CarControlServicer): + def __init__(self, vehicle): + self.vehicle = vehicle + self._timer = None + + def SetThrottle(self, request, context): + # gRPC streams currently don't work between python and android. + # If we don't get a response every 3 seconds, stop the car. + print('Setting throttle to: ' + str(request.throttle)) + self.set_timeout(3) + self.vehicle.throttle = request.throttle + return motorService_pb2.ThrottleResponse(throttleSet=True) + + def SetSteering(self, request, context): + print('Setting steering to: ' + str(request.steering)) + self.vehicle.steering = request.steering + return motorService_pb2.SteeringResponse(steeringSet=True) + + def set_timeout(self, min_timeout): + """Stops the old timer and restarts it to the specified time. + + min_timeout -- The minimum time that can be used for the timer. + """ + if self._timer is not None: + self._timer.cancel() + self._timer = Timer(min_timeout, self.timeout_elapsed) + self._timer.start() + + def timeout_elapsed(self): + """Election or heartbeat timeout has elapsed.""" + print("Node timeout elapsed") + self.vehicle.stop() + diff --git a/car/controller.py b/car/controller.py new file mode 100755 index 0000000..b49d8d5 --- /dev/null +++ b/car/controller.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +from threading import Timer, Thread +from concurrent import futures +import time + +import grpc + +import control.motorService_pb2_grpc as motorService_pb2_grpc +from control.gpio.vehicle import Vehicle +from control.motor_servicer import MotorServicer +from slam.slam_servicer import SlamServicer +import slam.SlamController_pb2_grpc as SlamController_pb2_grpc +import tracking.lidar_tracker_pb2_grpc as lidar_tracker_pb2_grpc +from tracking.lidar_servicer import LidarServicer + + +class CarServer(): + + def __init__(self, vehicle): + self.vehicle = vehicle + + def start_server(self): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=8)) + motorService_pb2_grpc.add_CarControlServicer_to_server(self.create_motor_servicer(), server) + SlamController_pb2_grpc.add_SlamControlServicer_to_server( + self.create_slam_servicer(), server) + lidar_tracker_pb2_grpc.add_PersonTrackingServicer_to_server( + self.create_lidar_servicer(), server) + # Disable tls for local testing. + # server.add_secure_port('[::]:50051', self.create_credentials()) + server.add_insecure_port('[::]:50051') + server.start() + while True: + time.sleep(60*60) + + def create_motor_servicer(self): + return MotorServicer(self.vehicle) + + def create_slam_servicer(self): + return SlamServicer() + + def create_lidar_servicer(self): + return LidarServicer() + + def create_credentials(self): + # Relativise this stuff. + pvtKeyPath = '/home/pi/tls/device.key' + pvtCertPath = '/home/pi/tls/device.crt' + + pvtKeyBytes = open(pvtKeyPath, 'rb').read() + pvtCertBytes = open(pvtCertPath, 'rb').read() + + return grpc.ssl_server_credentials([[pvtKeyBytes, pvtCertBytes]]) + + +if __name__ == '__main__': + vehicle = Vehicle() + server = CarServer(vehicle) + + # Can't remember why I do this, is it even needed? + service_thread = Thread(target=server.start_server) + service_thread.start() diff --git a/car/docker-compose.yml b/car/docker-compose.yml new file mode 100644 index 0000000..16ea5d2 --- /dev/null +++ b/car/docker-compose.yml @@ -0,0 +1,9 @@ +version: '3' +services: + cameraserver: + build: . + volumes: + - .:/app + + eclipse-mqtt: + image: "eclipse-mosquitto" diff --git a/car/generate-proto b/car/generate-proto new file mode 100755 index 0000000..6ca8031 --- /dev/null +++ b/car/generate-proto @@ -0,0 +1,3 @@ +python3 -m grpc_tools.protoc -I proto --python_out=. --grpc_python_out=. proto/control/motorService.proto +python3 -m grpc_tools.protoc -I proto --python_out=. --grpc_python_out=. proto/slam/SlamController.proto +python3 -m grpc_tools.protoc -I proto --python_out=. --grpc_python_out=. proto/tracking/lidar_tracker.proto \ No newline at end of file diff --git a/car/malima_SIU06.pdf b/car/malima_SIU06.pdf new file mode 100644 index 0000000..f761a05 Binary files /dev/null and b/car/malima_SIU06.pdf differ diff --git a/car/requirements.txt b/car/requirements.txt new file mode 100644 index 0000000..03863bb --- /dev/null +++ b/car/requirements.txt @@ -0,0 +1,8 @@ +numpy +opencv-python +six +paho-mqtt +u-msgpack-python +grpcio-tools +rplidar +pyzmq \ No newline at end of file diff --git a/car/setup.py b/car/setup.py new file mode 100644 index 0000000..7a9facf --- /dev/null +++ b/car/setup.py @@ -0,0 +1,8 @@ +from setuptools import setup, find_packages + +setup( + name="CarController", + packages=find_packages(), + author="Michael Pivato", + version="0.1" +) \ No newline at end of file diff --git a/car/slam/__init__.py b/car/slam/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/slam/slam_servicer.py b/car/slam/slam_servicer.py new file mode 100644 index 0000000..5209d34 --- /dev/null +++ b/car/slam/slam_servicer.py @@ -0,0 +1,31 @@ +import slam.SlamController_pb2_grpc as grpc +import slam.SlamController_pb2 as proto +import slam.slam_streamer as slam +from multiprocessing import Process + + +class SlamServicer(grpc.SlamControlServicer): + slam_thread = None + + def __init__(self): + print('Servicer initialised') + self.slam = slam.SlamStreamer() + + def start_map_streaming(self, request, context): + print('Received Map Start Streaming Request') + if self.slam_thread is None: + print('initialising slam_thread') + # Don't bother creating and starting slam more than once. + self.slam.port = request.port + self.slam.map_pixels = request.map_size_pixels + self.slam.map_meters = request.map_size_meters + self.slam_thread = Process(target=self.slam.start) + self.slam_thread.start() + return proto.Empty() + + def stop_streaming(self, request, context): + if self.slam_thread is not None: + self.slam.stop_scanning() + self.slam_thread.join() + self.slam = None + return proto.Empty() diff --git a/car/slam/slam_streamer.py b/car/slam/slam_streamer.py new file mode 100644 index 0000000..601a186 --- /dev/null +++ b/car/slam/slam_streamer.py @@ -0,0 +1,122 @@ +import zmq +from breezyslam.algorithms import RMHC_SLAM +from breezyslam.sensors import RPLidarA1 as LaserModel +from slam.SlamController_pb2 import SlamScan, SlamLocation +import messaging.message_factory as mf +import messaging.messages as messages +import tracking.devices.factory as lidar_fact + + +# Left here as was used in the example, configure as necessary. +# MAP_SIZE_PIXELS = 500 +# MAP_SIZE_METERS = 10 +# LIDAR_DEVICE = '/dev/ttyUSB0' + +class SlamStreamer: + can_scan = False + + def __init__(self, map_pixels=None, map_meters=None, port=None): + self._map_pixels = map_pixels + self._map_meters = map_meters + self._port = port + + def start(self): + ''' + Does scanning and constructs the slam map, + and pushes to subscribers through a zmq pub socket. + This is done on the main thread, so you'll need + to run this method on a separate thread yourself. + + All constructor parameters must be set prior + to calling this method, and changing those values after + calling this method will have no effect. + ''' + self.can_scan = True + print('Starting to stream') + self._mFactory = mf.getZmqPubSubStreamer(self._port) + + print('Started and bound zmq socket.') + + # Adapted from BreezySLAM rpslam example. + # Connect to Lidar unit. For some reason it likes to be done twice, otherwise it errors out... + lidar = lidar_fact.get_lidar() + lidar = lidar_fact.get_lidar() + + print('Initialised lidar') + + # Create an RMHC SLAM object with a laser model and optional robot model + slam = RMHC_SLAM(LaserModel(), self._map_pixels, self._map_meters) + + print('initialised slam') + + # Initialize empty map + mapbytes = bytearray(self.map_pixels * self.map_pixels) + + print('Initialised byte []') + + # Create an iterator to collect scan data from the RPLidar + iterator = lidar.iter_scans() + + print('Scanning') + + while self.can_scan: + # Extract (quality, angle, distance) triples from current scan + items = [item for item in next(iterator)] + + # Extract distances and angles from triples + distances = [item[2] for item in items] + angles = [item[1] for item in items] + print('Updating map') + # Update SLAM with current Lidar scan and scan angles + slam.update(distances, scan_angles_degrees=angles) + print('Map updated') + slam.getmap(mapbytes) + self._push_map(mapbytes, slam.getpos()) + + def _push_map(self, mapbytes, location): + ''' + Pushes a scan over zmq using protocol buffers. + map should be the result of slam.getmap. + location should be a tuple, the result of slam.getpos() + ''' + protoScan = messages.ProtoMessage(message=SlamScan(map=bytes(mapbytes), + location=SlamLocation(x=location[0], y=location[1], theta=location[2]))) + print('Sending map') + self._mFactory.send_message_topic( + 'slam_map', protoScan) + + def stop_scanning(self): + self.can_scan = False + + # Properties + @property + def map_pixels(self): + return self._map_pixels + + @map_pixels.setter + def map_pixels(self, value): + self._map_pixels = value + + @property + def map_meters(self): + return self._map_meters + + @map_meters.setter + def map_meters(self, value): + self._map_meters = value + + @property + def lidar_connection(self): + return self._lidar_connection + + @lidar_connection.setter + def lidar_connection(self, value): + self._lidar_connection = value + + @property + def port(self): + return self._port + + @port.setter + def port(self, value): + self._port = value diff --git a/car/slam/zmq_pair_testing/pair.py b/car/slam/zmq_pair_testing/pair.py new file mode 100644 index 0000000..510a2be --- /dev/null +++ b/car/slam/zmq_pair_testing/pair.py @@ -0,0 +1,28 @@ +import zmq +from threading import Thread +import time + +context = zmq.Context.instance() + +def client(context): + print('in thread') + socket = context.socket(zmq.SUB) + print('created socket') + socket.connect('tcp://localhost:5050') + socket.subscribe(b'slam_map') + while True: + print(socket.recv()) + +def server(context): + print('in thread') + socket = context.socket(zmq.PUB) + print('created socket') + socket.bind('tcp://*:5050') + while True: + socket.send_multipart([b'slam_map', b'Hi']) + time.sleep(1) + +# client_thread = Thread(target=client, args=[context]) +server_thread = Thread(target=server, args=[context]) +server_thread.start() +# client_thread.start() \ No newline at end of file diff --git a/car/tests/test_ballot_voter.py b/car/tests/test_ballot_voter.py new file mode 100644 index 0000000..c5aec10 --- /dev/null +++ b/car/tests/test_ballot_voter.py @@ -0,0 +1,8 @@ +import unittest +from DecisionSystem.CentralisedDecision.ballotvoter import BallotVoter + +class TestBallotVoter(unittest.TestCase): + def setUp(self): + pass + + \ No newline at end of file diff --git a/car/tests/test_commander.py b/car/tests/test_commander.py new file mode 100644 index 0000000..53dec6e --- /dev/null +++ b/car/tests/test_commander.py @@ -0,0 +1,13 @@ +import unittest +import Messaging.mqttsession as ms + +class TestCommander(unittest.TestCase): + ms.client = FakeMQTT() + +class FakeMQTT: + + def __init__(self): + pass + + def add_subsc + \ No newline at end of file diff --git a/car/tests/test_hand_recogniser.py b/car/tests/test_hand_recogniser.py new file mode 100644 index 0000000..caaa834 --- /dev/null +++ b/car/tests/test_hand_recogniser.py @@ -0,0 +1,30 @@ +import unittest +from GestureRecognition.simplehandrecogniser import SimpleHandRecogniser +import cv2 + +class TestSimpleHandRecogniser(unittest.TestCase): + + def test_5_digits(self): + self.assertEqual(self.recogniser_5.get_gesture(), 5) + + def test_3_digits(self): + self.assertEqual(self.recogniser_3.get_gesture(), 3) + + def test_s_photo(self): + self.assertEqual(self.recogniser_s.get_gesture(), 5) + + def setUp(self): + img_3 = cv2.imread("/Users/piv/Documents/Projects/car/GestureRecognition/IMG_0825.jpg") + img_3 = cv2.resize(img_3, None, fx=0.1, fy=0.1, interpolation = cv2.INTER_AREA) + self.recogniser_3 = SimpleHandRecogniser(img_3) + + img_5 = cv2.imread("/Users/piv/Documents/Projects/car/GestureRecognition/IMG_0818.png") + img_5 = cv2.resize(img_5, None, fx=0.1, fy=0.1, interpolation = cv2.INTER_AREA) + self.recogniser_5 = SimpleHandRecogniser(img_5) + + # img_s = cv2.imread("/Users/piv/Documents/Projects/car/GestureRecognition/Screen Shot hand.png") + # img_s = cv2.resize(img_s, None, fx=0.5, fy=0.5, interpolation = cv2.INTER_AREA) + # self.recogniser_s = SimpleHandRecogniser(img_s) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/car/tests/test_messages.py b/car/tests/test_messages.py new file mode 100644 index 0000000..2961ce6 --- /dev/null +++ b/car/tests/test_messages.py @@ -0,0 +1,33 @@ +import unittest +from DecisionSystem.messages import Message, deserialise, RequestLeader + +class TestMessages(unittest.TestCase): + def test_base_empty(self): + self.assertEqual(self.m.sender, "") + self.assertEqual(self.m.data, {}) + self.assertIsNone(self.m.type) + + def test_base_set(self): + self.m2.type = "Michael" + self.assertEqual(self.m2.type, "Michael") + self.assertEqual(self.m2.data, self.data) + self.assertEqual(self.m2.sender, 33) + + def test_serialistion(self): + serialised = self.m2.serialise() + self.m3 = deserialise(serialised) + self.assertEqual(self.m2.sender, self.m3.sender) + self.assertEqual(self.m2.data, self.m3.data) + + def test_RequestLeader(self): + self.m3 = RequestLeader() + self.assertEqual(self.m3.type, "RequestLeader") + + def setUp(self): + self.m = Message() + self.data = {"hi": 3} + self.sender = 33 + self.m2 = Message(self.sender,self.data) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/car/tests/test_mqtt_voter.py b/car/tests/test_mqtt_voter.py new file mode 100644 index 0000000..17ba79f --- /dev/null +++ b/car/tests/test_mqtt_voter.py @@ -0,0 +1,21 @@ +import unittest +from DecisionSystem.CentralisedDecision.ballotvoter import BallotVoter + +class TestMqttVoter(unittest.TestCase): + + def test_vote_property(self): + self.assertTrue(self.voter.set_vote(Vote())) + self.assertEquals(self.voter.get_vote(), Vote()) + + def test_invalid_vote_set(self): + self.assertFalse(self.voter.set_vote("Hi")) + + @unittest.skip + def test_submit_vote(self): + pass + + def setUp(self): + self.voter = MqttVoter() + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/car/tracking/__init__.py b/car/tracking/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/tracking/algorithms.py b/car/tracking/algorithms.py new file mode 100644 index 0000000..f61741c --- /dev/null +++ b/car/tracking/algorithms.py @@ -0,0 +1,212 @@ +import math + + +class Group: + + def __init__(self, number, points=[]): + self._points = points + self._number = number + self._minX = None + self._maxX = None + self._minY = None + self._maxY = None + + def add_point(self, point): + self._points.append(point) + self._update_min_max(point) + + def get_points(self): + return self._points + + @property + def number(self): + return self._number + + @number.setter + def number(self, number): + self._number = number + + def _update_min_max(self, new_point): + """ + Updates the in and max points for this group. + This is to determine when assigning groups whether the + same group is selected. + """ + converted_point = convert_lidar_to_cartesian(new_point) + + if self._minX is None or self._minX > converted_point[0]: + self._minX = converted_point[0] + + if self._maxX is None or self._maxX < converted_point[0]: + self._maxX = converted_point[0] + + if self._minY is None or self._minY > converted_point[1]: + self._minY = converted_point[1] + + if self._maxY is None or self._maxY < converted_point[1]: + self._maxY = converted_point[1] + + def get_minX(self): + return self._minY + + def get_maxX(self): + return self._maxY + + def get_minY(self): + return self._minY + + def get_maxY(self): + return self._maxY + + +def convert_lidar_to_cartesian(new_point): + x = new_point[2] * math.sin(new_point[1]) + y = new_point[2] * math.cos(new_point[1]) + return (x, y) + + +def convert_cartesian_to_lidar(x, y): + """ + Converts a point on the grid (with car as the origin) to a lidar tuple (distance, angle) + + Parameters + ---------- + x + Horizontal component of point to convert. + + y + Vertical component of point to convert. + + Returns + ------- + converted + A tuple (distance, angle) that represents the point. Angle is in degrees. + """ + # Angle depends on x/y position. + # if x is positive and y is positive, then angle = tan-1(y/x) + # if x is positive and y is negative, then angle = 360 + tan-1(y/x) + # if x is negative and y is positive, then angle = 180 + tan-1(y/x) + # if x is negative and y is negative, then angle = 180 + tan-1(y/x) + return (math.sqrt(x ** 2 + y ** 2), math.degrees(math.atan(y/x)) + (180 if x < 0 else 270 if y < 0 else 0)) + + +def calc_groups(scan): + """ + Calculates groups of points from a lidar scan. The scan should + already be sorted. + + Parameters + ---------- + + scan: Iterable + The lidar scan data to get groups of. + Should be of format: (quality, angle, distance) + + Returns + ------- + list + List of groups that were found. + """ + prevPoint = None + currentGroup = None + allGroups = [] + currentGroupNumber = 0 + + # assume the list is already sorted. + for point in scan: + if prevPoint is None: + prevPoint = point + continue + + # Distances are in mm. + # within 1cm makes a group. Will need to play around with this. + if (point[2] - prevPoint[2]) ** 2 < 10 ** 2: + if currentGroup is None: + currentGroup = Group(currentGroupNumber) + allGroups.append(currentGroup) + currentGroup.add_point(point) + else: + if currentGroup is not None: + currentGroupNumber += 1 + currentGroup = None + + prevPoint = point + + return allGroups + + +def find_centre(group): + """ + Gets a tuple (x,y) of the centre of the group. + + Parameters + ---------- + group: Group + A group of points to find the centre of. + + Returns + ------- + tuple (x,y) + The centre in the form of a tuple (x,y) + """ + return ((group.get_maxX() + group.get_minX()) / 2, (group.get_maxY() + group.get_minY()) / 2) + + +def assign_groups(prev_groups, new_groups): + """ + Assigns group numbers to a new scan based on the groups of an old scan. + """ + for group in prev_groups: + old_centre = find_centre(group) + for new_group in new_groups: + new_centre = find_centre(new_group) + # They are considered the same if the new group and old group centres are within 5cm. + if ((new_centre[0] - old_centre[0]) ** 2 + (new_centre[1] - old_centre[1]) ** 2) < 50 ** 2: + new_group.number = group.number + + return new_groups + + +def updateCarVelocity(oldGroup, newGroup): + """ + Return a tuple (DistanceChange, AngleChange) indicating how the tracked groups have changed, which can + be used to then update the steering/throttle of the car (or other vehicle that + may be used) + + Parameters + ---------- + oldGroup: Group + The positioning of points for the group in the last scan. + + newGroup: Group + The positioning of points for the group in the latest scan. + + Returns + ------- + tuple (DistanceChange, AngleChange) + A tuple containing how the groups' centres changed in the form (distance,angle) + """ + old_polar = convert_cartesian_to_lidar(*find_centre(oldGroup)) + new_centre = convert_cartesian_to_lidar(*find_centre(newGroup)) + return (new_centre[0] - old_polar[0], new_centre[1] - old_polar[1]) + + +def dualServoChange(newCentre, changeTuple): + """ + Gets a tuple (throttleChange, steeringChange) indicating the change that should be applied to the current + throttle/steering of an rc car that uses dual servos. + + Parameters + --------- + newCentre + Tuple (distance, angle) of the new centre of the tracked group. + + changeTuple + Tuple (distanceChange, angleChange) from the old centre to the new centre. + + Returns + ------- + tuple + Tuple of (throttleChange, steeringChange) to apply to the 2 servos. + """ + return ((changeTuple[0] / 3) - (newCentre[0] / 4) + 1, 0) diff --git a/car/tracking/all_scans.txt b/car/tracking/all_scans.txt new file mode 100644 index 0000000..6446f4f Binary files /dev/null and b/car/tracking/all_scans.txt differ diff --git a/car/tracking/animate.py b/car/tracking/animate.py new file mode 100755 index 0000000..451ec7f --- /dev/null +++ b/car/tracking/animate.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +'''Animates distances and measurment quality''' +from tracking.mock_lidar import MockLidar +import matplotlib.pyplot as plt +import numpy as np +import matplotlib.animation as animation +import tracking.lidar_loader as loader + + +PORT_NAME = '/dev/ttyUSB0' +DMAX = 4000 +IMIN = 0 +IMAX = 50 + + +def update_line(num, iterator, line): + scan = next(iterator) + offsets = np.array([(np.radians(meas[1]), meas[2]) for meas in scan]) + line.set_offsets(offsets) + intens = np.array([meas[0] for meas in scan]) + line.set_array(intens) + return line, + + +def run(): + lidar = MockLidar(loader.load_scans_bytes_file("tracking/out.pickle")) + fig = plt.figure() + ax = plt.subplot(111, projection='polar') + line = ax.scatter([0, 0], [0, 0], s=5, c=[IMIN, IMAX], + cmap=plt.cm.Greys_r, lw=0) + ax.set_rmax(DMAX) + ax.grid(True) + + iterator = lidar.iter_scans() + ani = animation.FuncAnimation(fig, update_line, + fargs=(iterator, line), interval=50) + plt.show() + lidar.stop() + lidar.disconnect() + + +if __name__ == '__main__': + run() diff --git a/car/tracking/animate_alg.py b/car/tracking/animate_alg.py new file mode 100644 index 0000000..879b12c --- /dev/null +++ b/car/tracking/animate_alg.py @@ -0,0 +1,55 @@ +""" +Animates distances and angle of lidar +Uses model-free algorithms to track grouping of points (objects/groups) +""" +from tracking.mock_lidar import MockLidar +import matplotlib.pyplot as plt +import numpy as np +import matplotlib.animation as animation +import tracking.lidar_loader as loader +import tracking.algorithms as alg + + +PORT_NAME = '/dev/ttyUSB0' +DMAX = 4000 +IMIN = 0 +IMAX = 50 + +def update_line(num, iterator, line, prev_groups): + scan = next(iterator) + # Now update the groups, and then update the maps with different colours for different groups. + if(prev_groups.groups is None): + prev_groups = alg.calc_groups(scan) + groups = alg.assign_groups(prev_groups, alg.calc_groups(scan)) + offsets = np.array([(np.radians(meas[1]), meas[2]) for meas in scan]) + line.set_offsets(offsets) + intens = np.array([meas[0] for meas in scan]) + line.set_array(intens) + # Set the colour matrix: Just set the colours to 2 * np.pi * group number (for every group number) + # line.set_color() + return line, + +class Bunch: + def __init__(self, **kwds): + self.__dict__.update(kwds) + + +def run(): + lidar = MockLidar(loader.load_scans_bytes_file("tracking/out.pickle")) + fig = plt.figure() + ax = plt.subplot(111, projection='polar') + line = ax.scatter([0, 0], [0, 0], s=5, c=[IMIN, IMAX], + cmap=plt.cm.Greys_r, lw=0) + ax.set_rmax(DMAX) + ax.grid(True) + prev_groups = Bunch(groups=None) + iterator = lidar.iter_scans() + ani = animation.FuncAnimation(fig, update_line, + fargs=(iterator, line, prev_groups), interval=50) + plt.show() + lidar.stop() + lidar.disconnect() + + +if __name__ == '__main__': + run() diff --git a/car/tracking/devices/__init__.py b/car/tracking/devices/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/car/tracking/devices/factory.py b/car/tracking/devices/factory.py new file mode 100644 index 0000000..dfcb704 --- /dev/null +++ b/car/tracking/devices/factory.py @@ -0,0 +1,13 @@ +from tracking.devices.mock_lidar import MockLidar +from rplidar import RPLidar +import tracking.lidar_loader as loader + +connection = "TEST" +# connection = '/dev/ttyUSB0' + +def get_lidar(): + # Need a way to configure this, maybe with environment variables + if connection == 'TEST': + return MockLidar(loader.load_scans_bytes_file("tracking/out.pickle")) + else: + return RPLidar(connection) diff --git a/car/tracking/devices/mock_lidar.py b/car/tracking/devices/mock_lidar.py new file mode 100644 index 0000000..f5ccea6 --- /dev/null +++ b/car/tracking/devices/mock_lidar.py @@ -0,0 +1,43 @@ +""" +This module contains a MockLidar class, for use in place of RPLidar. +Importantly, it implements iter_scans, so it can be substituted for RPLidar +in the lidar_cache for testing (or anywhere else the rplidar may be used) +""" + +import tracking.lidar_loader as loader + + +class MockLidar: + + def __init__(self, scan_iter=None): + """ + Create mock lidar with an iterator that can be used as fake (or reused) scan data. + + Examples + -------- + lidar = MockLidar(scans) + first_scan = next(lidar.iter_scans(measurements=100)) + + Parameters + ---------- + + scan_iter: Iterable + An iterator that will generate/provide the fake/old scan data. + + """ + self._iter = scan_iter + + def iter_scans(self, min_len=100): + return iter(self._iter) + + def get_health(self): + return "Mock Lidar has scans" if self._iter is not None else "Mock lidar won't work properly!" + + def get_info(self): + return self.get_health() + + def stop(self): + pass + + def disconnect(self): + pass diff --git a/car/tracking/lidar_cache.py b/car/tracking/lidar_cache.py new file mode 100644 index 0000000..f74b6f1 --- /dev/null +++ b/car/tracking/lidar_cache.py @@ -0,0 +1,84 @@ +from threading import Thread +from tracking import algorithms +import tracking.lidar_tracker_pb2 as tracker_pb +import zmq + + +class LidarCache(): + """ + A class that retrieves scans from the lidar, + runs grouping algorithms between scans and + keeps a copy of the group data. + """ + + def __init__(self, lidar, measurements=100): + self.lidar = lidar + self.measurements = measurements + print('Info: ' + self.lidar.get_info()) + print('Health: ' + self.lidar.get_health()) + self.run = True + self.tracking_group_number = -1 + self.currentGroups = None + self._group_listeners = [] + + def start_cache(self): + self.thread = Thread(target=self.do_scanning) + self.thread.start() + + def do_scanning(self): + """Performs scans whilst cache is running, and will pass calculated groups data to the sender. + + Parameters + ---------- + listener: + Any object that includes the onGroupsChanged method. + + """ + + # Batch over scans, so we don't need to do our own batching to determine groups + # TODO: Implement custom batching, as iter_scans can be unreliable + for scan in self.lidar.iter_scans(min_len=self.measurements): + print('Got %d measurments' % (len(scan))) + if len(scan) < self.measurements: + # Poor scan, likely since it was the first scan. + continue + + if not self.run: + break + + # Now process the groups. + if self.currentGroups is not None: + self.currentGroups = algorithms.assign_groups( + self.currentGroups, algorithms.calc_groups(scan)) + else: + self.currentGroups = algorithms.calc_groups(scan) + + self.fireGroupsChanged() + + def fireGroupsChanged(self): + # Send the updated groups to 0MQ socket. + # Rename this to be a generic listener method, rather than an explicit 'send' (even though it can be treated as such already) + pointScan = tracker_pb.PointScan() + for group in self.currentGroups: + for point in group.get_points(): + pointScan.points.append(tracker_pb.Point( + angle=point[1], distance=point[2], group_number=group.number)) + + for listener in self._group_listeners: + listener.onGroupsChanged(pointScan) + + def add_groups_changed_listener(self, listener): + """ + Add a listener for a change in scans. THis will provide a tuple with the new group + scans, which can then be sent off to a network listener for display, or to update the + vehicle with a new velocity. + + Parameters + ---------- + listener + An object that implements the onGroupsChanged(message) method. + """ + self._group_listeners.append(listener) + + def stop_scanning(self): + self.run = False diff --git a/car/tracking/lidar_loader.py b/car/tracking/lidar_loader.py new file mode 100644 index 0000000..ced6b1b --- /dev/null +++ b/car/tracking/lidar_loader.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +""" +This module is a utility to load and save lidar +scans to disk. +As such, it is useful for testing, to create real lidar +data that can be reused later, without needing to connect the lidar. +""" + +from rplidar import RPLidar +import pickle + + +def get_scans(num_scans, device='/dev/ttyUSB0', measurements_per_scan=100): + lidar = RPLidar(device) + scans = lidar.iter_scans(measurements_per_scan) + return [next(scans) for i in range(0, num_scans)] + + +def save_scans_bytes(scans, filename='out.pickle'): + with open(filename, 'wb') as f: + pickle.dump(scans, f) + + +def load_scans_bytes_file(filename): + with open(filename, 'rb') as f: + return pickle.load(f) diff --git a/car/tracking/lidar_servicer.py b/car/tracking/lidar_servicer.py new file mode 100644 index 0000000..5386fc3 --- /dev/null +++ b/car/tracking/lidar_servicer.py @@ -0,0 +1,44 @@ +import tracking.lidar_tracker_pb2 as lidar_tracker_pb2 +from tracking.lidar_tracker_pb2_grpc import PersonTrackingServicer +from tracking.lidar_cache import LidarCache +from multiprocessing import Process +import messaging.message_factory as mf +import tracking.devices.factory as lidar_factory + +from messaging import messages +import tracking.algorithms as alg + +class LidarServicer(PersonTrackingServicer): + + def __init__(self, vehicle=None): + # TODO: Put the rplidar creation in a factory or something, to make it possible to test this servicer. + # Also, it would allow creating the service without the lidar being connected. + self.cache = LidarCache(lidar_factory.get_lidar(), measurements=100) + self.cache.add_groups_changed_listener(self) + self._mFactory = None + self._port = None + self._vehicle = vehicle + self._tracked_group = None + + def set_tracking_group(self, request, context): + self._tracked_group = request.value + + def stop_tracking(self, request, context): + self.cache.stop_scanning() + + def start_tracking(self, request, context): + """Starts the lidar cache, streaming on the provided port.""" + self._port = request.value + self.cache.start_cache() + + def onGroupsChanged(self, message): + if self._mFactory is None: + # Create the zmq socket in the thread that it will be used, just to be safe. + self._mFactory = mf.getZmqPubSubStreamer(self._port) + self._mFactory.send_message_topic("lidar_map", messages.ProtoMessage(message=message.SerializeToString())) + + if self._tracked_group is not None and self._vehicle is not None: + # Update vehicle to correctly follow the tracked group. + # Leave for now, need to work out exactly how this will change. + # alg.dualServoChange(alg.find_centre()) + pass diff --git a/car/tracking/lidar_tester.py b/car/tracking/lidar_tester.py new file mode 100644 index 0000000..d41215d --- /dev/null +++ b/car/tracking/lidar_tester.py @@ -0,0 +1,5 @@ +from tracking.lidar_cache import LidarCache +import Messaging.message_factory as mf + + + diff --git a/car/tracking/out.pickle b/car/tracking/out.pickle new file mode 100644 index 0000000..cd86fea Binary files /dev/null and b/car/tracking/out.pickle differ diff --git a/car/tracking/readme.txt b/car/tracking/readme.txt new file mode 100644 index 0000000..f7fe842 --- /dev/null +++ b/car/tracking/readme.txt @@ -0,0 +1,4 @@ +To load the lidar dummy scans in all_scans.txt, +use python pickle: +with open('path/to/all_scans.txt', 'rb') as fp: + all_scans = pickle.load(fp) \ No newline at end of file