From 4eaa75030f45836bb9b5dc5775601148086e5a41 Mon Sep 17 00:00:00 2001 From: "Chaunte W. Lacewell" Date: Wed, 24 Jul 2024 11:59:35 -0700 Subject: [PATCH] v2.90 Release (#173) --- .github/ISSUE_TEMPLATE/bug_report.md | 27 + .github/ISSUE_TEMPLATE/feature_request.md | 17 + .../coverage/cpp.develop.coverage_report.txt | 61 ++ .../coverage/cpp.develop.coverage_value.txt | 1 + .../python.develop.coverage_report.txt | 6 + .../python.develop.coverage_value.txt | 1 + .github/requirements.txt | 22 + .github/scripts/Dockerfile.checkin | 249 +++++++ .github/scripts/auto-formatter.sh | 42 ++ .github/scripts/docker-compose.yml | 41 ++ .github/scripts/run_coverage_cpp.sh | 31 + .github/scripts/run_coverage_py.sh | 12 + .github/scripts/setup_vdms.sh | 359 ++++++++++ .github/workflows/CI.yml | 63 ++ .github/workflows/_CI_coverage.yml | 287 ++++++++ .github/workflows/_CI_coverage_compare.yml | 59 ++ .github/workflows/_CI_update.yml | 83 +++ CMakeLists.txt | 6 +- INSTALL.md | 175 +++-- README.md | 13 +- client/cpp/VDMSClient.cc | 4 +- client/cpp/VDMSClient.h | 4 +- distributed/CMakeLists.txt | 2 +- docker/base/Dockerfile | 157 +++-- docker/override_default_config.py | 5 +- include/vcl/DescriptorSet.h | 4 + include/vcl/Image.h | 20 + include/vcl/KeyFrame.h | 2 + include/vcl/Video.h | 21 + remote_function/functions/metadata.py | 117 ++++ remote_function/requirements.txt | 10 +- remote_function/udf_server.py | 34 +- src/BackendNeo4j.cc | 3 +- src/DescriptorsCommand.cc | 14 + src/DescriptorsCommand.h | 1 + src/ImageCommand.cc | 88 ++- src/ImageCommand.h | 3 + src/Neo4JHandlerCommands.cc | 39 +- src/OpsIOCoordinator.cc | 52 +- src/QueryHandlerBase.cc | 38 +- src/QueryHandlerBase.h | 2 - src/QueryHandlerExample.h | 1 - src/QueryHandlerNeo4j.cc | 64 +- src/QueryHandlerPMGD.cc | 37 +- src/QueryHandlerPMGD.h | 3 +- src/VideoCommand.cc | 185 ++++- src/VideoCommand.h | 2 + src/defines.h | 15 + src/vcl/CMakeLists.txt | 3 +- src/vcl/DescriptorSet.cc | 68 +- src/vcl/Image.cc | 102 ++- src/vcl/KeyFrame.cc | 2 +- src/vcl/Video.cc | 165 ++++- tests/CMakeLists.txt | 5 +- tests/cleandbs.sh | 1 + tests/python/TestCommand.py | 100 ++- tests/python/TestDescriptors.py | 594 ++++++++++++---- tests/python/TestEngineDescriptors.py | 221 ------ tests/python/TestEntities.py | 213 +++--- tests/python/TestEntitiesBlobs.py | 141 ---- tests/python/TestFindDescriptorSet.py | 55 -- tests/python/TestFindDescriptors.py | 650 ------------------ tests/python/TestImages.py | 232 +++---- tests/python/TestRetail.py | 17 +- tests/python/TestVideos.py | 238 +++---- tests/python/run_python_aws_tests.sh | 2 +- tests/python/run_python_tests.sh | 2 +- .../functions/metadata.py | 112 +++ tests/remote_function_test/metadata_image.jpg | Bin 0 -> 20678 bytes tests/remote_function_test/requirements.txt | 5 - tests/remote_function_test/udf_server.py | 33 +- tests/run_tests.sh | 9 +- tests/test_images/metadata_image.jpg | Bin 0 -> 20678 bytes tests/tls_test/prep-tls-tests.py | 238 ------- .../prep.py => tls_test/prep_certs.py} | 3 + tests/tls_test/run_tls_test_client.py | 85 +++ tests/tls_test/run_tls_test_server.py | 94 +++ tests/udf_test/functions/caption.py | 2 +- tests/udf_test/functions/flip.py | 2 +- tests/udf_test/functions/metadata.py | 116 ++++ tests/udf_test/metadata_image.jpg | Bin 0 -> 20678 bytes tests/udf_test/requirements.txt | 2 - tests/udf_test/settings.json | 3 +- tests/udf_test/udf_local.py | 8 +- tests/unit_tests/Image_test.cc | 71 +- tests/unit_tests/OpsIoTest.cc | 103 ++- tests/unit_tests/TLSTest.cc | 36 +- tests/unit_tests/TimerMapTest.cc | 122 ++++ tests/unit_tests/Video_test.cc | 88 +++ tests/unit_tests/client_descriptors.cc | 2 +- tests/unit_tests/client_image.cc | 79 +++ tests/unit_tests/client_videos.cc | 63 ++ tests/unit_tests/meta_data.cc | 131 +++- tests/unit_tests/meta_data_helper.h | 9 +- user_defined_operations/README.md | 8 +- user_defined_operations/functions/caption.py | 2 +- .../functions/facedetect.py | 2 +- user_defined_operations/functions/flip.py | 2 +- user_defined_operations/functions/metadata.py | 121 ++++ user_defined_operations/requirements.txt | 4 +- user_defined_operations/settings.json | 3 +- user_defined_operations/udf_local.py | 6 +- utils/CMakeLists.txt | 4 +- utils/include/chrono/Chrono.h | 186 ----- utils/include/comm/Connection.h | 10 + utils/include/comm/ExceptionComm.h | 1 + utils/include/timers/TimerMap.h | 111 +++ utils/src/api_schema/api_schema.json | 19 +- utils/src/chrono/Chrono.cc | 235 ------- utils/src/comm/ConnClient.cc | 66 +- utils/src/timers/TimerMap.cc | 180 +++++ 111 files changed, 4992 insertions(+), 2677 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/coverage/cpp.develop.coverage_report.txt create mode 100644 .github/coverage/cpp.develop.coverage_value.txt create mode 100644 .github/coverage/python.develop.coverage_report.txt create mode 100644 .github/coverage/python.develop.coverage_value.txt create mode 100644 .github/requirements.txt create mode 100644 .github/scripts/Dockerfile.checkin create mode 100755 .github/scripts/auto-formatter.sh create mode 100644 .github/scripts/docker-compose.yml create mode 100644 .github/scripts/run_coverage_cpp.sh create mode 100644 .github/scripts/run_coverage_py.sh create mode 100755 .github/scripts/setup_vdms.sh create mode 100644 .github/workflows/CI.yml create mode 100644 .github/workflows/_CI_coverage.yml create mode 100644 .github/workflows/_CI_coverage_compare.yml create mode 100644 .github/workflows/_CI_update.yml create mode 100644 remote_function/functions/metadata.py delete mode 100644 tests/python/TestEngineDescriptors.py delete mode 100644 tests/python/TestEntitiesBlobs.py delete mode 100644 tests/python/TestFindDescriptorSet.py delete mode 100644 tests/python/TestFindDescriptors.py create mode 100644 tests/remote_function_test/functions/metadata.py create mode 100644 tests/remote_function_test/metadata_image.jpg delete mode 100644 tests/remote_function_test/requirements.txt create mode 100644 tests/test_images/metadata_image.jpg delete mode 100644 tests/tls_test/prep-tls-tests.py rename tests/{python/prep.py => tls_test/prep_certs.py} (99%) create mode 100644 tests/tls_test/run_tls_test_client.py create mode 100644 tests/tls_test/run_tls_test_server.py create mode 100644 tests/udf_test/functions/metadata.py create mode 100644 tests/udf_test/metadata_image.jpg delete mode 100644 tests/udf_test/requirements.txt create mode 100644 tests/unit_tests/TimerMapTest.cc create mode 100644 user_defined_operations/functions/metadata.py delete mode 100644 utils/include/chrono/Chrono.h create mode 100644 utils/include/timers/TimerMap.h delete mode 100644 utils/src/chrono/Chrono.cc create mode 100644 utils/src/timers/TimerMap.cc diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..b1fd9845 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,27 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: Bug +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..f67c51b6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,17 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: Enhancement +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/coverage/cpp.develop.coverage_report.txt b/.github/coverage/cpp.develop.coverage_report.txt new file mode 100644 index 00000000..734ba520 --- /dev/null +++ b/.github/coverage/cpp.develop.coverage_report.txt @@ -0,0 +1,61 @@ +------------------------------------------------------------------------------ + GCC Code Coverage Report +Directory: .. +------------------------------------------------------------------------------ +File Lines Exec Cover Missing +------------------------------------------------------------------------------ +client/cpp/CSVParserUtil.cpp 345 291 84% 37-45,48,50,239,241,264-265,269-270,286,292,304,313-314,317,323,331-332,335,345,351,363,368,373,379-387,389,425,435-437,474-476,478,503-506 +client/cpp/VDMSClient.cc 20 20 100% +src/AutoDeleteNode.cc 9 8 88% 40 +src/BackendNeo4j.cc 121 0 0% 4,6-17,20,24,29-41,46-47,52,55-58,61-62,64-70,73,78,82-83,85-86,89,92,95-96,98,102,104,106-109,111,114-116,118,122,131-132,138,140,142-144,147,150-152,155-159,161-175,178,182,184,186,195,197-200,204-205,207-208,211-215,220,224-226,228 +src/BlobCommand.cc 87 66 75% 76,130-132,136-139,145,147,165,186-189,192-196,202 +src/BoundingBoxCommand.cc 180 4 2% 45,49,51,53-54,56-59,62,64-67,70-73,76,83,87,90-91,93-97,101,103,105,114,118,122-123,125-132,137-138,140-144,147-150,152,154-160,162-165,167-169,171-173,176-177,179-181,183-184,186-187,190,193,196-197,199,201-204,206-210,213,215-219,222-223,225-227,229-237,240-244,246,251-256,259-261,263,265-266,268,270,272-274,276-277,281-283,286,288,292-294,296,298,300-303,307-308,310-313,316-319,321-326,329-330,335,338-339,341 +src/CommunicationManager.cc 46 0 0% 42-43,46-47,49-50,52-54,57,61-66,68-71,73-81,84,86-88,93,96-97,100-101,105,107-108,110,113-116 +src/DescriptorsCommand.cc 594 107 18% 56,61,63-67,69-73,75,77,80-81,84,86-87,90,92-93,96-100,103,106,109,156-158,162,176-180,220-231,241,255-257,261,276-283,285,297-300,305,310-314,330,338,340-345,348-351,354,357-358,360-364,367,370,372-373,376-378,380-381,383-384,387-388,390-392,398,402,404,406,408-409,412,414-417,423-424,427,429-430,432,434-435,438,441,443,445,448,450-455,457-461,463,466,469-470,473-474,485,488,491,493-495,503,507,509,512,514-517,520-525,527-531,533,536,538,540,543,546-547,549,551,553-558,561,563-565,568-569,571,573,575-576,578,580,582-584,586,588-593,598-599,602,604,606,613-614,617,621,623,626,628-631,634-638,640-644,646,648-651,653-655,658,662-671,676-677,680,687,689,692-693,696,700,706,708,711,714-715,719,724,726,728,731,734-735,737-740,744,748-749,751,753-755,757,759-760,762,764-766,768-770,775-777,780-781,783,786-790,794,797,801,803-804,806-807,809,811-812,814-816,818,823,825-826,828-830,832-833,837,839-842,844-847,852,855-857,859,861,863-866,868-872,875-876,879,881,883,885-889,892-893,896-899,901,903-910,915,917,919-920,923-926,928,930,932-939,943-948,954,957,959,961-964,967,969,972-975,977-981,987,989,991-994,999,1001,1003-1004,1007-1009,1011,1013,1015-1018,1021-1022,1024-1025,1027,1029-1030,1032-1038,1042-1043,1047-1048,1050-1051,1059-1060,1063-1064,1066,1068-1075,1079,1083-1084,1088-1092,1097-1098,1100 +src/DescriptorsManager.cc 24 19 79% 49-50,57-58,73 +src/ExceptionsCommand.cc 6 0 0% 35-40 +src/ImageCommand.cc 322 157 48% 55,59,63,65,67-69,71,73-76,78,81,86,88-89,97,99,106,109,111-112,114-115,117-118,120-121,124,151,162-163,174-175,177,182-185,195-196,198,203-206,221-229,231-233,246-247,257-267,269-270,272-273,278,286,297,304,308,311,313,315,337,339-340,343-348,350,352,374-376,379-381,385-388,394,396,403-406,420,427,433-436,440-441,452-455,458-463,468-470,481-484,489-493,498-499,501-502,504-508,511,513-517,520-523,526-527,530,532,537 +src/ImageLoop.cc 251 231 92% 63,130,182-185,215,221,265,285,288,297-298,300,307-308,322-323,330,334 +src/Neo4jBaseCommands.cc 38 0 0% 7-8,12,14-15,17,21,23-24,26,30,32-33,35,39,41-42,44,48,50,53,57-59,62-70,72,74,76-77,80 +src/Neo4JHandlerCommands.cc 109 0 0% 50,54-55,57-58,61,65-69,72,74,76-80,82,84,86,92,95-96,98,100,102,106-107,110-111,114-119,123-124,126,128-129,132-133,137-139,141,144,149-151,155,157,160,162,165-166,169,172,176-178,180-186,188-191,196,198-201,203,206,209,211-213,217-220,222-225,228-231,233-237,239-244,249 +src/OpsIOCoordinator.cc 96 73 76% 48,52,54-55,57,61-63,65,74,78,80,91,93,100,102-103,123,125,133,149,153,203 +src/PMGDIterators.cc 52 44 84% 62,76,96-101 +src/PMGDQuery.cc 463 363 78% 89-92,94-96,129,131,135,140,143-144,167-169,171-172,211-212,216-218,248,254,258,288,298,302-305,307,309,311,317,321-322,354,356,358,360-361,364-373,375-377,379,383-384,386-388,409,412-414,419-424,446,449-450,480-481,492,547,549-557,653-654,658-662,664-668 +src/PMGDQueryHandler.cc 623 517 83% 82-84,166-167,169-170,194-197,208-209,230,279,281,285,290,292,320-321,338,340,344,346,397-398,400,402-407,409,411,463-464,478-479,488,490,496,498,524-526,537,566,605,607,612-613,649,651-655,677-679,681-686,688,720,729-730,737,741-743,745,748,751,815,822,850,870-876,878-879,881,883,895-896,915-917,921-922,965,1012-1013,1015-1017 +src/QueryHandlerBase.cc 32 6 18% 26-29,35-36,38,42-43,45,47-48,52,56-58,60-62,64-66,68-69,72-73 +src/QueryHandlerExample.cc 33 18 54% 65-67,75-78,84-85,89-92,94-95 +src/QueryHandlerNeo4j.cc 139 0 0% 53,55-56,58,60-62,64-65,67,70-76,80-81,83-87,91,93,95-100,104-108,111-115,119-126,129-132,134-136,139-147,149-153,159,162,169,172-175,177-179,181,184-187,189-190,192,194,197,199,201-204,207-208,210,212-213,216,218,222-223,225,229,231-232,235,237-240,243-247,250-254,256-257,261-268,271-274,277 +src/QueryHandlerPMGD.cc 344 226 65% 102-104,112-115,130-131,135-139,142-146,150-157,160-163,165-167,176-178,182-184,202-204,209-211,226-232,236-238,255,257-266,295-297,343-345,347-349,352-353,355-358,381-382,384-385,394,400-412,414-416,423-431,468,470,525-526,528-529 +src/QueryMessage.cc 12 0 0% 37-39,42-43,45-46,48,51-54 +src/RSCommand.cc 144 105 72% 65-67,73-74,98,100-101,103,110,131,134-138,141,172-174,176,178-181,188,262,285,287-289,291-297,301 +src/SearchExpression.cc 99 38 38% 59,132-133,135,137-139,143,146,148-153,157,160,168-170,177,180-181,183-185,188,192-195,197,201,217-222,224-225,227,235-240,243,247-249,252-256,263,276,284-285 +src/Server.cc 138 0 0% 57-58,60,64,68-70,72-73,77-78,80,85,88,90,92,95,97-98,103-106,108-109,112,116,118,122,125,128,131,133-134,136-138,140,142,145-152,154-155,159,162-167,170,172-173,176-177,181,183,185-186,188-190,192,194,197-199,203-206,208-209,212-216,218-219,221,223,225,228,231-232,236-237,239-240,242,246-250,253,255-257,260-263,265-268,272-273,276,280-281,284-287,292,294-301,304-309 +src/vcl/CustomVCL.cc 51 22 43% 55,57-58,60-63,66,69-70,72,74,76-78,82-83,89-93,95,98-99,102,104-105,110 +src/vcl/DescriptorSet.cc 205 150 73% 64-65,67-68,89-90,111-112,129,131,133,186-189,192,217-218,220-221,224-227,236-240,252,264,315-316,319,321-324,327,341-342,344-346,350-352,354,361-363,365-366,369-370 +src/vcl/DescriptorSetData.cc 55 47 85% 48,58,64,67,114,116-118 +src/vcl/Exception.cc 7 6 85% 38 +src/vcl/FaissDescriptorSet.cc 182 157 86% 83,115-116,132,167,187-188,204-205,224-225,238-239,245,258-259,261,272-273,279,299-300,302-303,305 +src/vcl/FlinngDescriptorSet.cc 149 109 73% 60-66,89,109-111,113-114,118-121,124,126,128,130,132,134-137,140-141,143-144,170-171,176-177,182,206,208,228,248,279 +src/vcl/Image.cc 910 689 75% 62,73-74,76-78,81-84,86,92,101,122-123,125,132-133,135,147,165,170,193,196-199,223,246,249-252,264,273,276-279,291,323,326-329,341,347,349-352,360-362,369,393-396,415,417,425,427,432,436,441,445,459,462,467-468,471-472,474,490,500,513,531,553-556,594,605-606,608,615,619,624,627-630,658-660,712,757-758,809,838-842,844-850,852,854-855,896,899-900,939-940,944-945,966,985-986,988,1028-1030,1032-1036,1038-1042,1044-1048,1050-1054,1056-1060,1062-1065,1088,1109,1128-1136,1147-1148,1167-1186,1198-1199,1207,1218,1220-1222,1224-1226,1228,1242,1246-1247,1249,1254-1255,1257,1278,1282,1285,1292,1307,1313,1322,1336,1361,1379,1462,1481 +src/vcl/KeyFrame.cc 303 244 80% 58,62,86,90,95,97,102,105-107,109-111,113,119,139,148,154,172,186,190,216,220,224,235,239,249,255,274,284,288,307,315,341,345,347,359,367,369,394,396,405,430,442,449,465,469,478,483,495,500,507,514,518,525,541,547,557,563 +src/vcl/RemoteConnection.cc 295 172 58% 56-59,66-69,82,87,91-94,96,120-122,131-134,150-153,155,169-172,174,186-189,191,204-207,209,221-224,226,241-244,247,255-257,259-262,264,273-274,285-286,295-299,306-308,310,329-332,339-341,343,355-358,370-372,374,402-404,406,418-421,433-435,437,454-456,467-469,471,484-487,492-494,496,499,502-504,506-507,509 +src/vcl/TDBDenseDescriptorSet.cc 115 111 96% 95,162-163,213 +src/vcl/TDBDescriptorSet.cc 51 46 90% 127,148,150,155,158 +src/vcl/TDBImage.cc 471 371 78% 164,186,209,255-257,268-271,300-302,305-306,308,325,341-344,346-350,352-354,364,366,386,406-411,414-417,421-424,428-431,433-435,437-439,523-524,551,553-556,558-559,561-562,564-567,578,580,583,585,644,664-668,750-754,756-758,760,762-767,770-772,785 +src/vcl/TDBObject.cc 326 271 83% 112-114,116,118,120,219,221-222,258,321-322,386-388,398,432-433,462-463,493-494,496,500-501,503,621-632,638-651,661-663 +src/vcl/TDBSparseDescriptorSet.cc 245 230 93% 163,190-191,230-232,252,294-296,308-309,380-381,441 +src/vcl/utils.cc 71 63 88% 54-55,65,71,79,91,93,121 +src/vcl/Video.cc 743 531 71% 66,127,133,138,160,166-167,189,191-195,198-201,218-223,231,233-239,241-245,248-250,254-255,260-261,263-264,266,268,271-272,274-275,278-279,296,314-327,344,346,348-350,379,412-414,456,465,488,494,517,637,639,650,656,660-661,670,689,692,694-695,698-699,728-729,750-752,755-756,758-759,762-763,782-785,813-815,818,821-823,838-841,849,851,853-858,870-873,907,929,944,967,971,992,994-995,997,999-1001,1004,1006,1008,1010-1013,1015,1017-1021,1026-1027,1030,1033,1035-1036,1039-1042,1045,1051,1053,1079,1087,1104,1119,1123,1130,1134,1151,1154,1163-1164,1166,1171-1173,1176-1177,1180-1181,1186,1204-1207,1260,1264,1266-1267,1269-1270,1272,1274,1277-1278,1281-1282,1288-1292 +src/vdms.cc 108 0 0% 39,41-42,44-47,49-50,52-55,57-58,61-63,65-66,68,70,73-75,78,82,84-87,89,91-97,99-102,104-105,107-110,112-114,116-119,121-122,124-127,129-130,132-135,137-138,140-141,145-147,150-153,156,158-160,163,165,168,171-177,183,185,188-189,193,196,202-203,206,212-215,218,220 +src/VDMSConfig.cc 181 168 92% 119-121,196,198,201-202,208-209,213-214,325-326 +src/VideoCommand.cc 474 117 24% 50,53-54,56-58,60,62,65-66,68-69,72,74-76,78-80,82,84-87,89-90,92-93,95,97-99,102,109,111,116,121-124,130,132,158-161,167-168,170,181,184,201,213,217-220,227-229,231-233,239,241-247,249-250,253-255,257-259,261-262,264,266-278,280-282,284-285,296,300,325,329,331,333,335,337,340-341,343,346,350,352,357-358,380-381,383-384,387-392,394,396,398-399,405,407,429-431,436,442-445,449-454,456-463,467-473,475,480-485,488,490-491,494-496,504,509,527-532,535-539,555,558,560-562,565-567,569-570,572-576,579-580,583-585,587,589-591,594-597,601-606,611-612,614-615,617-621,624-626,628,630-632,634-637,640-641,644,646,651,664,666-673,677,680,683,688-689,691-695,698-699,701,703,705,708,712,714,716-719,721-723,726,728,730,732-733,735-736,740,745,748-749,751-753,755,757,759-761,763-764,767-769,773-776,780-786,790-794,798,801,803,805,807,809-813,817-821,824-825,827-830,833-836,841-842,846-851,855-856,859-860 +src/VideoLoop.cc 249 200 80% 33,81,98-101,103-109,180,188,197,201,207,211,217,220,290,312,315,324-325,327,331-332,334-335,339-342,344,346-349,351-354,356-357,359,361,370 +utils/src/comm/ConnClient.cc 69 57 82% 49,55,59-60,98,103,108,114,120,127,130,149 +utils/src/comm/Connection.cc 82 61 74% 48-53,75,77-79,84,86,97,111,135,140,153,157,159,168,172 +utils/src/comm/ConnServer.cc 61 49 80% 60,64,68,75,84,91,103,108,128,135,140,145 +utils/src/comm/Exception.cc 6 0 0% 35-40 +utils/src/stats/SystemStats.cc 250 249 99% 453 +utils/src/timers/TimerMap.cc 82 75 91% 126,151,153,155-158 +------------------------------------------------------------------------------ +TOTAL 10068 6488 64% +------------------------------------------------------------------------------ diff --git a/.github/coverage/cpp.develop.coverage_value.txt b/.github/coverage/cpp.develop.coverage_value.txt new file mode 100644 index 00000000..246ac8fe --- /dev/null +++ b/.github/coverage/cpp.develop.coverage_value.txt @@ -0,0 +1 @@ +64.4418 diff --git a/.github/coverage/python.develop.coverage_report.txt b/.github/coverage/python.develop.coverage_report.txt new file mode 100644 index 00000000..eddfefcb --- /dev/null +++ b/.github/coverage/python.develop.coverage_report.txt @@ -0,0 +1,6 @@ +Name Stmts Miss Cover Missing +-------------------------------------------------------------------- +/vdms/client/python/vdms/__init__.py 2 0 100% +/vdms/client/python/vdms/vdms.py 98 2 98% 151, 166 +-------------------------------------------------------------------- +TOTAL 100 2 98% diff --git a/.github/coverage/python.develop.coverage_value.txt b/.github/coverage/python.develop.coverage_value.txt new file mode 100644 index 00000000..6529ff88 --- /dev/null +++ b/.github/coverage/python.develop.coverage_value.txt @@ -0,0 +1 @@ +98 diff --git a/.github/requirements.txt b/.github/requirements.txt new file mode 100644 index 00000000..54341570 --- /dev/null +++ b/.github/requirements.txt @@ -0,0 +1,22 @@ +blinker==1.8.2 +cffi==1.16.0 +click==8.1.7 +colorlog==6.8.2 +coverage==7.6.0 +cryptography==42.0.8 +Flask==3.0.3 +gcovr==7.2 +imutils==0.5.4 +itsdangerous==2.2.0 +Jinja2==3.1.4 +lxml==5.2.2 +MarkupSafe==2.1.5 +numpy==1.26.4 +opencv-python-headless==4.9.0.80 +protobuf==4.24.2 +pycparser==2.22 +Pygments==2.18.0 +pyzmq==26.0.3 +scipy==1.14.0 +sk-video==1.1.10 +Werkzeug==3.0.3 diff --git a/.github/scripts/Dockerfile.checkin b/.github/scripts/Dockerfile.checkin new file mode 100644 index 00000000..58e02bd3 --- /dev/null +++ b/.github/scripts/Dockerfile.checkin @@ -0,0 +1,249 @@ +#Copyright (C) 2023 Intel Corporation +#SPDX-License-Identifier: MIT + +ARG BASE_VERSION=12-slim +ARG BUILD_THREADS="-j16" +############################################################ +# BASE IMAGE W/ ENV VARS +FROM debian:${BASE_VERSION} AS base +# Dockerfile limitations force a repetition of global args +ARG BUILD_THREADS +ARG AWS_ACCESS_KEY_ID="" +ARG AWS_SECRET_ACCESS_KEY="" +ARG NEO4J_USER="" +ARG NEO4J_PASS="" +ARG NEO4J_ENDPOINT="" +ARG AWS_API_PORT=9000 +ARG AWS_CONSOLE_PORT=9001 + +ENV DEBIAN_FRONTEND=noninteractive +ENV DEBCONF_NOWARNINGS="yes" +ENV PYTHON_BASE="3.12" +ENV PYTHON_VERSION="${PYTHON_BASE}.3" +ENV PROTOBUF_VERSION="24.2" +ENV NUMPY_MIN_VERSION="1.26.0" +ENV VIRTUAL_ENV=/opt/venv + +ENV AWS_ACCESS_KEY_ID="${AWS_ACCESS_KEY_ID}" +ENV AWS_SECRET_ACCESS_KEY="${AWS_SECRET_ACCESS_KEY}" +ENV NEO4J_USER="${NEO4J_USER}" +ENV NEO4J_PASS="${NEO4J_PASS}" +ENV NEO4J_ENDPOINT="${NEO4J_ENDPOINT}" +ENV AWS_API_PORT="${AWS_API_PORT}" +ENV AWS_CONSOLE_PORT="${AWS_CONSOLE_PORT}" + +############################################################ +# BUILD DEPENDENCIES +FROM base AS build + +# Install Packages +# hadolint ignore=DL3008 +RUN apt-get update -y && apt-get upgrade -y && \ + apt-get install -o 'Acquire::Retries=3' -y --no-install-suggests \ + --no-install-recommends --fix-broken --fix-missing \ + apt-transport-https automake bison build-essential bzip2 ca-certificates \ + cppzmq-dev curl ed flex g++ gcc git gnupg-agent javacc libarchive-tools libatlas-base-dev \ + libavcodec-dev libavformat-dev libavutil-dev libboost-all-dev libbz2-dev libc-ares-dev \ + libcurl4-openssl-dev libdc1394-dev libgflags-dev libgoogle-glog-dev \ + libgtk-3-dev libgtk2.0-dev libhdf5-dev libjpeg-dev libjpeg62-turbo-dev libjsoncpp-dev \ + libleveldb-dev liblmdb-dev liblz4-dev libncurses5-dev libopenblas-dev libopenmpi-dev \ + libpng-dev librdkafka-dev libsnappy-dev libssl-dev libswscale-dev libtbb-dev libtbbmalloc2 \ + libtiff-dev libtiff5-dev libtool linux-libc-dev mpich openjdk-17-jdk-headless \ + pkg-config procps software-properties-common swig unzip uuid-dev && \ + apt-get --purge remove -y python3.11 && apt-get autoremove -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +WORKDIR /usr/src +# hadolint ignore=DL3003,SC2086 +RUN apt update -y && \ + apt install -y libffi-dev libgdbm-dev libnss3-dev libreadline-dev libsqlite3-dev zlib1g-dev && \ + curl -O https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz && \ + tar -xzf Python-${PYTHON_VERSION}.tgz && cd Python-${PYTHON_VERSION} && \ + ./configure --enable-optimizations && make ${BUILD_THREADS} && make altinstall && \ + update-alternatives --install /usr/bin/python python /usr/local/bin/python3.12 1 && \ + python${PYTHON_BASE} -m venv ${VIRTUAL_ENV} +ENV PATH="$VIRTUAL_ENV/bin:$PATH" + +# Pull and Install Dependencies +WORKDIR /dependencies +ENV AUTOCONF_VERSION="2.71" \ + AWS_SDK_VERSION="1.11.336" \ + CMAKE_VERSION="v3.28.5" \ + FAISS_VERSION="v1.7.4" \ + LIBEDIT_VERSION="20230828-3.1" \ + OPENCV_VERSION="4.9.0" \ + PEG_VERSION="0.1.19" \ + TILEDB_VERSION="2.14.1" \ + VALIJSON_VERSION="v0.6" + +# CMAKE +# hadolint ignore=DL3003,SC2086 +RUN git clone --branch ${CMAKE_VERSION} https://github.com/Kitware/CMake.git /dependencies/CMake && \ + cd /dependencies/CMake && ./bootstrap && make ${BUILD_THREADS} && \ + make install DESTDIR=/opt/dist && make install + +# PROTOBUF & ITS DEPENDENCIES +# hadolint ignore=DL3003,SC2086 +RUN git clone -b "v${PROTOBUF_VERSION}" --recurse-submodules https://github.com/protocolbuffers/protobuf.git /dependencies/protobuf && \ + cd /dependencies/protobuf/third_party/googletest && mkdir build && cd build/ && \ + cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_BUILD_TYPE=Release \ + -DBUILD_SHARED_LIBS=ON -DCMAKE_INSTALL_PREFIX=/opt/dist/usr/local \ + -DBUILD_GMOCK=ON -DCMAKE_CXX_STANDARD=17 .. && \ + make ${BUILD_THREADS} && make install && \ + cd /dependencies/protobuf/third_party/abseil-cpp && mkdir build && cd build && \ + cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DBUILD_SHARED_LIBS=ON \ + -DCMAKE_INSTALL_PREFIX=/opt/dist/usr/local -DABSL_BUILD_TESTING=ON \ + -DABSL_USE_EXTERNAL_GOOGLETEST=ON \ + -DABSL_FIND_GOOGLETEST=ON -DCMAKE_CXX_STANDARD=17 .. && \ + make ${BUILD_THREADS} && make install && ldconfig /opt/dist/usr/local/lib && \ + cd /dependencies/protobuf && \ + cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_INSTALL_PREFIX=/opt/dist/usr/local \ + -DCMAKE_CXX_STANDARD=17 -Dprotobuf_BUILD_SHARED_LIBS=ON \ + -Dprotobuf_ABSL_PROVIDER=package \ + -Dprotobuf_BUILD_TESTS=ON \ + -Dabsl_DIR=/opt/dist/usr/local/lib/cmake/absl . && \ + make ${BUILD_THREADS} && make install + +# OPENCV +# hadolint ignore=DL3003,SC2086 +RUN git clone https://github.com/opencv/opencv.git /dependencies/opencv && \ + cd /dependencies/opencv && git checkout tags/${OPENCV_VERSION} && \ + mkdir build && cd build && cmake -DBUILD_PERF_TESTS=OFF -DBUILD_TESTS=OFF .. && \ + make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install + +# VALIJSON +# hadolint ignore=DL3003 +RUN python -m pip install --no-cache-dir "numpy>=${NUMPY_MIN_VERSION},<2.0.0" && \ + git clone --branch ${VALIJSON_VERSION} https://github.com/tristanpenman/valijson.git /dependencies/valijson && \ + cd /dependencies/valijson && cp -r include/* /usr/local/include/ && \ + mkdir -p /opt/dist/usr/local/include/ && cp -r include/* /opt/dist/usr/local/include/ + +# FAISS & FLINNG LIBRARIES +# hadolint ignore=DL3003,SC2086 +RUN git clone --branch ${FAISS_VERSION} https://github.com/facebookresearch/faiss.git /dependencies/faiss && \ + cd /dependencies/faiss && mkdir build && cd build && \ + cmake -DFAISS_ENABLE_GPU=OFF -DPython_EXECUTABLE=$(which python) \ + -DBUILD_SHARED_LIBS=ON -DCMAKE_BUILD_TYPE=Release .. && \ + make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install && \ + git clone https://github.com/tonyzhang617/FLINNG.git /dependencies/FLINNG && \ + cd /dependencies/FLINNG && mkdir build && cd build && cmake .. && \ + make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install + +# TILEDB & AWS S3 SDK +# hadolint ignore=DL3003,SC2086 +RUN curl -L -O https://github.com/TileDB-Inc/TileDB/archive/refs/tags/${TILEDB_VERSION}.tar.gz && \ + tar -xvf ${TILEDB_VERSION}.tar.gz && cd TileDB-${TILEDB_VERSION} && \ + mkdir build && cd build && ../bootstrap --prefix=/usr/local/ && make ${BUILD_THREADS} && \ + make install-tiledb DESTDIR=/opt/dist && make install-tiledb && \ + git clone -b ${AWS_SDK_VERSION} --recurse-submodules https://github.com/aws/aws-sdk-cpp /dependencies/aws-sdk-cpp && \ + mkdir -p /dependencies/aws-sdk-cpp/build && cd /dependencies/aws-sdk-cpp/build && \ + cmake .. -DCMAKE_BUILD_TYPE=Debug -DCMAKE_PREFIX_PATH=/usr/local/ -DCMAKE_INSTALL_PREFIX=/usr/local/ \ + -DBUILD_ONLY="s3" -DCUSTOM_MEMORY_MANAGEMENT=OFF -DENABLE_TESTING=OFF && \ + make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install + +# AUTOCONF VERSION FOR NEO4J +# hadolint ignore=DL3003,SC2086 +RUN curl -O https://ftp.gnu.org/gnu/autoconf/autoconf-${AUTOCONF_VERSION}.tar.xz && \ + tar -xf autoconf-${AUTOCONF_VERSION}.tar.xz && cd autoconf-${AUTOCONF_VERSION} && \ + ./configure && make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install + +# LIB-OMNI FOR NEO4J QUERY HANDLER +# hadolint ignore=DL3003,SC2086 +RUN curl -L -O https://github.com/gpakosz/peg/releases/download/${PEG_VERSION}/peg-${PEG_VERSION}.tar.gz && \ + tar -xf peg-${PEG_VERSION}.tar.gz && cd peg-${PEG_VERSION} && \ + make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install && \ + git clone https://github.com/cleishm/libcypher-parser.git /dependencies/libcypher && \ + cd /dependencies/libcypher && ./autogen.sh && ./configure && \ + make install DESTDIR=/opt/dist && make install && \ + curl -L -o /dependencies/libedit-${LIBEDIT_VERSION}.tar.gz https://thrysoee.dk/editline/libedit-${LIBEDIT_VERSION}.tar.gz && \ + cd /dependencies && tar -xzf libedit-${LIBEDIT_VERSION}.tar.gz && cd libedit-${LIBEDIT_VERSION} && \ + ./configure && make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install && \ + git clone https://github.com/majensen/libneo4j-omni.git /dependencies/libomni && \ + cd /dependencies/libomni && ./autogen.sh && \ + ./configure --disable-werror --prefix=/opt/dist/usr && \ + make clean check && make install -w --debug + +# CLEANUP +RUN rm -rf /dependencies /usr/local/share/doc /usr/local/share/man && \ + mkdir -p /opt/dist/usr/include/x86_64-linux-gnu && \ + cp -rp /usr/include/x86_64-linux-gnu /opt/dist/usr/include/x86_64-linux-gnu + + +############################################################ +# FINAL IMAGE +FROM base +ARG BUILD_COVERAGE="on" +ENV BUILD_COVERAGE="${BUILD_COVERAGE}" + +# COPY FILES +COPY .github/scripts/run_coverage_cpp.sh /run_coverage_cpp.sh +COPY .github/scripts/run_coverage_py.sh /run_coverage_py.sh +COPY .git /vdms/.git +COPY client /vdms/client +COPY distributed /vdms/distributed +COPY include /vdms/include +COPY remote_function /vdms/remote_function +COPY src /vdms/src +COPY tests /vdms/tests +COPY user_defined_operations /vdms/user_defined_operations +COPY utils /vdms/utils +COPY CMakeLists.txt /vdms/CMakeLists.txt +COPY config-vdms.json /vdms/config-vdms.json +COPY docker/override_default_config.py /vdms/override_default_config.py +COPY --from=build /opt/dist / +COPY --from=build /usr/local/bin/python${PYTHON_BASE} /usr/local/bin/python${PYTHON_BASE} +COPY --from=build /usr/local/lib/python${PYTHON_BASE} /usr/local/lib/python${PYTHON_BASE} +COPY --from=build ${VIRTUAL_ENV} ${VIRTUAL_ENV} +ENV PATH="$VIRTUAL_ENV/bin:$PATH" + +# hadolint ignore=DL3008,SC2086 +RUN apt-get update -y && apt-get upgrade -y && \ + apt-get install -o 'Acquire::Retries=3' -y --no-install-suggests \ + --no-install-recommends --fix-broken --fix-missing \ + build-essential bzip2 cppzmq-dev curl g++ gcc git javacc libarchive-tools libavcodec-dev \ + libavformat-dev libcurl4-openssl-dev libdc1394-dev libgoogle-glog-dev libgtk-3-dev \ + libhdf5-dev libjpeg62-turbo-dev libjsoncpp-dev libopenblas-dev libpng-dev librdkafka-dev \ + libssl-dev libswscale-dev libtbb-dev libtbbmalloc2 libtiff5-dev libzip-dev openjdk-17-jdk-headless \ + procps && \ + apt-get --purge remove -y python3.11 && apt-get autoremove -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* && \ + echo "/usr/local/lib" >> /etc/ld.so.conf.d/all-libs.conf && ldconfig && \ + python3 -m pip install --no-cache-dir "numpy>=${NUMPY_MIN_VERSION},<2.0.0" "protobuf==4.${PROTOBUF_VERSION}" \ + "coverage>=7.3.1" "cryptography>=42.0.7" + +# COVERAGE TESTING +WORKDIR /vdms +# hadolint ignore=DL3008,SC2086 +RUN if [ "${BUILD_COVERAGE}" = "on" ]; then \ + apt-get update -y ; \ + apt-get install -y --no-install-suggests --no-install-recommends gdb ; \ + apt-get clean ; \ + rm -rf /var/lib/apt/lists/* ; \ + python3 -m pip install --no-cache-dir "gcovr>=7.0" ; \ + curl -L -o /vdms/minio https://dl.min.io/server/minio/release/linux-amd64/minio ; \ + chmod +x /vdms/minio ; \ + mkdir -p /vdms/minio_files/minio-bucket ; \ + mkdir -p /vdms/tests/coverage_report ; \ + chmod +x /run_coverage_*.sh ; \ + # Install the MinIO Client mc command line tool used by scripts for creating buckets + curl -o /usr/local/bin/mc https://dl.min.io/client/mc/release/linux-amd64/mc ; \ + chmod +x /usr/local/bin/mc ; \ + else \ + rm -rf /run_coverage_*.sh ; \ + fi + +# VDMS +# hadolint ignore=DL3003,SC2086 +RUN git submodule update --init --recursive && \ + sed -i "s|java-11-openjdk|java-17-openjdk|g" /vdms/src/pmgd/java/CMakeLists.txt && \ + sed -i "s|#include |#include \n#include |" /vdms/src/pmgd/test/neighbortest.cc && \ + sed -i "s|#include |#include \n#include |" /vdms/src/pmgd/tools/mkgraph.cc && \ + mkdir -p /vdms/build && cd /vdms/build && \ + cmake -DCODE_COVERAGE="${BUILD_COVERAGE}" .. && make ${BUILD_THREADS} && \ + echo '#!/bin/bash' > /start.sh && echo 'cd /vdms/build' >> /start.sh && \ + echo 'python /vdms/override_default_config.py -i /vdms/config-vdms.json -o /vdms/build/config-vdms.json' >> /start.sh && \ + echo './vdms' >> /start.sh && chmod 755 /start.sh + +ENV PYTHONPATH=/vdms/client/python:${PYTHONPATH} +HEALTHCHECK CMD echo "This is a healthcheck test." || exit 1 +CMD ["/start.sh"] diff --git a/.github/scripts/auto-formatter.sh b/.github/scripts/auto-formatter.sh new file mode 100755 index 00000000..a609b0bf --- /dev/null +++ b/.github/scripts/auto-formatter.sh @@ -0,0 +1,42 @@ +#!/bin/bash -e + +check_package(){ + PACKAGE_TYPE=$1 + PACKAGE_NAME=$2 + + if [ $PACKAGE_TYPE == "apt" ]; then + if hash $PACKAGE_NAME 2>/dev/null; then + echo "$PACKAGE_NAME exists!" + else + echo "Installing $PACKAGE_NAME" + sudo apt-get install $PACKAGE_NAME + fi + elif [ $PACKAGE_TYPE == "python" ]; then + if python3 -c "import $PACKAGE_NAME" 2>/dev/null; then + echo "$PACKAGE_NAME exists!" + else + echo "Installing $PACKAGE_NAME" + python3 -m pip install --upgrade --no-cache-dir "${PACKAGE_NAME}" + fi + else + echo "UNKNOWN Package type ($PACKAGE_TYPE). Choose apt or python" + exit 1; + fi +} + +REPO_DIR=$(dirname "$(dirname "$(dirname "$(readlink -f "$0")")")") +echo "SCAN DIR: ${REPO_DIR}" + +# Convert files from Windows-style line endings (CRLF) to Linux-style line endings (LF) +check_package apt dos2unix +find ${REPO_DIR} -type f -exec dos2unix -v -k -s -o {} ';' + +# Run Clang-Format on C++ Code (Google C++ Style) +check_package apt clang-format +find "${REPO_DIR}" -type f -not -path "${REPO_DIR}/src/pmgd/*" \ + -not -path "${REPO_DIR}/build/*" \ + -regex '.*\.\(cc\|cpp\|h\|hpp\)' | xargs clang-format -i + +# Run Linter on Python Code +check_package python 'black>=23.1.0' +black ${REPO_DIR}/ --exclude="client/python/vdms/queryMessage_pb2.py" diff --git a/.github/scripts/docker-compose.yml b/.github/scripts/docker-compose.yml new file mode 100644 index 00000000..24e34abc --- /dev/null +++ b/.github/scripts/docker-compose.yml @@ -0,0 +1,41 @@ +services: + vdms: + build: + dockerfile: .github/scripts/Dockerfile.checkin + context: ../.. + args: + - BUILD_COVERAGE=on + image: vdms:${SOURCE_CONTAINER_NAME} + container_name: ${SOURCE_CONTAINER_NAME} + networks: [backend, frontend] + environment: + - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} + - AWS_API_PORT=${AWS_API_PORT} + - AWS_CONSOLE_PORT=${AWS_CONSOLE_PORT} + - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} + - NEO4J_ENDPOINT=neo4j://neo4j:${NEO_TEST_PORT} + - NEO4J_PASS=${NEO4J_PASS} + - NEO4J_USER=${NEO4J_USER} + - NEO_TEST_PORT=${NEO_TEST_PORT} + - OVERRIDE_print_high_level_timing="true" + - OVERRIDE_print_query_timing="true" + - OVERRIDE_print_vcl_timing="true" + - VDMS_TEST_PORT=${VDMS_TEST_PORT} + links: + - neo4j + + neo4j: + image: neo4j:5.17.0 + restart: always + container_name: ${NEO4J_CONTAINER_NAME} + networks: [backend] + environment: + - NEO4J_AUTH=${NEO4J_USER}/${NEO4J_PASS} + +networks: + backend: + internal: true + name: ${SOURCE_CONTAINER_NAME}_backend + frontend: + internal: false + name: ${SOURCE_CONTAINER_NAME}_frontend diff --git a/.github/scripts/run_coverage_cpp.sh b/.github/scripts/run_coverage_cpp.sh new file mode 100644 index 00000000..752216de --- /dev/null +++ b/.github/scripts/run_coverage_cpp.sh @@ -0,0 +1,31 @@ +#!/bin/bash -e + +cd /vdms/tests + +# Run Local C++ PMGD Based Tests +chmod +x run_tests.sh +echo 'Running run_tests.sh script' +./run_tests.sh + +# Run S3 C++ PMGD Based Tests +echo 'Checking for the available disk space due MinIO requires at least 1gb free' +df -h +chmod +x run_aws_tests.sh +echo 'Running run_aws_tests.sh script' +./run_aws_tests.sh -u ${AWS_ACCESS_KEY_ID} -p ${AWS_SECRET_ACCESS_KEY} + +# Obtain Coverage +gcovr --root /vdms \ + -e /vdms/src/pmgd -e /vdms/build -e /vdms/distributed -e /vdms/tests \ + --gcov-ignore-parse-errors=negative_hits.warn_once_per_file \ + --gcov-ignore-errors=no_working_dir_found \ + -f "/vdms/.*/.*\.cc" -f "/vdms/.*/.*\.cpp" \ + --exclude-unreachable-branches \ + --exclude-noncode-lines \ + --txt=/vdms/tests/coverage_report/cpp.new.coverage_report.txt \ + --xml-pretty --xml=/vdms/tests/coverage_report/cpp.new.coverage_report.xml + +echo "DONE" + +cat /vdms/tests/coverage_report/cpp.new.coverage_report.xml | grep -oP 'coverage line-rate="([-+]?\d*\.\d+|\d+)"' | grep -oP "[-+]?\d*\.\d+|\d+" | awk '{print $1*100}' > /vdms/tests/coverage_report/cpp.new.coverage_value.txt +cat /vdms/tests/coverage_report/cpp.new.coverage_report.txt diff --git a/.github/scripts/run_coverage_py.sh b/.github/scripts/run_coverage_py.sh new file mode 100644 index 00000000..7f38588b --- /dev/null +++ b/.github/scripts/run_coverage_py.sh @@ -0,0 +1,12 @@ +#!/bin/bash -e + +cd /vdms/tests/python + +./run_python_tests.sh +./run_python_aws_tests.sh -u ${AWS_ACCESS_KEY_ID} -p ${AWS_SECRET_ACCESS_KEY} +python -m coverage report -m 2>&1 | tee /vdms/tests/coverage_report/python.new.coverage_report.txt +python -m coverage xml -o /vdms/tests/coverage_report/python.new.coverage_report.xml + +echo "DONE" + +cat /vdms/tests/coverage_report/python.new.coverage_report.xml | grep "coverage version" | grep -oP 'line-rate="([-+]?\d*\.\d+|\d+)"' | grep -oP "[-+]?\d*\.\d+|\d+"| awk '{print $1*100}' > /vdms/tests/coverage_report/python.new.coverage_value.txt diff --git a/.github/scripts/setup_vdms.sh b/.github/scripts/setup_vdms.sh new file mode 100755 index 00000000..c7886f6b --- /dev/null +++ b/.github/scripts/setup_vdms.sh @@ -0,0 +1,359 @@ +#!/bin/bash -e +####################################################################################################################### +# SETUP +# Supported OS: +# - Debian: 11(bullseye), 12(bookworm/stable), 13(trixie) +# - Ubuntu: 20.04(focal), 22.04(jammy), 23.10(mantic), 24.04(noble) +####################################################################################################################### + +BUILD_COVERAGE="off" +BUILD_THREADS="-j16" +DEBIAN_FRONTEND=noninteractive +CUR_DIR=$(dirname $(realpath "$0")) +WORKSPACE=$(dirname $(dirname ${CUR_DIR})) +VDMS_DEP_DIR=/dependencies +BUILD_VDMS=false +OS_NAME=$(awk -F= '$1=="ID" { print $2 ;}' /etc/os-release) +OS_VERSION=$(awk -F= '$1=="VERSION_ID" { print $2 ;}' /etc/os-release | sed -e 's|"||g') +MODIFY_PMGD=false +PYTHON_VERSION="3.12.3" +PYTHON_BASE=$(echo ${PYTHON_VERSION} | cut -d. -f-2) #"3.12" +VIRTUAL_ENV=/opt/venv + +LONG_LIST=( + "help" + "coverage" + "dep_dir" + "make" + "python_version" + "workspace" +) + +OPTS=$(getopt \ + --options "hd:w:p:mc" \ + --long help,coverage,dep_dir:,make,python_version:,workspace: \ + --name "$(basename "$0")" \ + -- "$@" +) + +eval set -- $OPTS + +script_usage() +{ + cat <=7.0" + curl -L -o ${WORKSPACE}/minio https://dl.min.io/server/minio/release/linux-amd64/minio + chmod +x ${WORKSPACE}/minio + mkdir -p ${WORKSPACE}/minio_files/minio-bucket + mkdir -p ${WORKSPACE}/tests/coverage_report + + # Install the MinIO Client mc command line tool used by scripts for creating buckets + curl -o /usr/local/bin/mc https://dl.min.io/client/mc/release/linux-amd64/mc + chmod +x /usr/local/bin/mc +fi + +####################################################################################################################### +# INSTALL DEPENDENCIES +####################################################################################################################### +AUTOCONF_VERSION="2.71" +AWS_SDK_VERSION="1.11.336" +CMAKE_VERSION="v3.28.5" +FAISS_VERSION="v1.7.4" +LIBEDIT_VERSION="20230828-3.1" +NUMPY_MIN_VERSION="1.26.0" +OPENCV_VERSION="4.9.0" +PEG_VERSION="0.1.19" +PROTOBUF_VERSION="24.2" +TILEDB_VERSION="2.14.1" +VALIJSON_VERSION="v0.6" + +cd $VDMS_DEP_DIR + + +# INSTALL CMAKE +git clone --branch ${CMAKE_VERSION} https://github.com/Kitware/CMake.git $VDMS_DEP_DIR/CMake +cd $VDMS_DEP_DIR/CMake +./bootstrap +make ${BUILD_THREADS} +make install + + +# INSTALL PROTOBUF & ITS DEPENDENCIES +git clone -b "v${PROTOBUF_VERSION}" --recurse-submodules https://github.com/protocolbuffers/protobuf.git $VDMS_DEP_DIR/protobuf +cd $VDMS_DEP_DIR/protobuf/third_party/googletest +mkdir build && cd build/ +cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_BUILD_TYPE=Release \ + -DBUILD_SHARED_LIBS=ON -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DBUILD_GMOCK=ON -DCMAKE_CXX_STANDARD=17 .. +make ${BUILD_THREADS} +make install + +cd $VDMS_DEP_DIR/protobuf/third_party/abseil-cpp +mkdir build && cd build +cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DBUILD_SHARED_LIBS=ON \ + -DCMAKE_INSTALL_PREFIX=/usr/local -DABSL_BUILD_TESTING=ON \ + -DABSL_USE_EXTERNAL_GOOGLETEST=ON \ + -DABSL_FIND_GOOGLETEST=ON -DCMAKE_CXX_STANDARD=17 .. +make ${BUILD_THREADS} +make install +ldconfig /usr/local/lib + +cd $VDMS_DEP_DIR/protobuf +cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DCMAKE_CXX_STANDARD=17 -Dprotobuf_BUILD_SHARED_LIBS=ON \ + -Dprotobuf_ABSL_PROVIDER=package \ + -Dprotobuf_BUILD_TESTS=ON \ + -Dabsl_DIR=/usr/local/lib/cmake/absl . +make ${BUILD_THREADS} +make install + + +# INSTALL OPENCV +git clone https://github.com/opencv/opencv.git $VDMS_DEP_DIR/opencv +cd $VDMS_DEP_DIR/opencv +git checkout tags/${OPENCV_VERSION} +mkdir build && cd build +cmake -DBUILD_PERF_TESTS=OFF -DBUILD_TESTS=OFF .. +make ${BUILD_THREADS} +make install + + +# INSTALL PYTHON PACKAGES +python -m pip install --no-cache-dir "numpy>=${NUMPY_MIN_VERSION},<2.0.0" "coverage>=7.3.1" \ + "protobuf==4.${PROTOBUF_VERSION}" "cryptography>=42.0.7" + + +# INSTALL VALIJSON +git clone --branch ${VALIJSON_VERSION} https://github.com/tristanpenman/valijson.git $VDMS_DEP_DIR/valijson +cd $VDMS_DEP_DIR/valijson +cp -r include/* /usr/local/include/ + + +# INSTALL DESCRIPTOR LIBRARIES (FAISS, FLINNG) +git clone --branch ${FAISS_VERSION} https://github.com/facebookresearch/faiss.git $VDMS_DEP_DIR/faiss +cd $VDMS_DEP_DIR/faiss +mkdir build && cd build +cmake -DFAISS_ENABLE_GPU=OFF -DPython_EXECUTABLE=$(which python) \ + -DBUILD_SHARED_LIBS=ON -DCMAKE_BUILD_TYPE=Release .. +make ${BUILD_THREADS} +make install + +git clone https://github.com/tonyzhang617/FLINNG.git $VDMS_DEP_DIR/FLINNG +cd $VDMS_DEP_DIR/FLINNG +mkdir build && cd build +cmake .. +make ${BUILD_THREADS} +make install + + +# INSTALL TILEDB +curl -L -o $VDMS_DEP_DIR/${TILEDB_VERSION}.tar.gz https://github.com/TileDB-Inc/TileDB/archive/refs/tags/${TILEDB_VERSION}.tar.gz +cd $VDMS_DEP_DIR +tar -xvf ${TILEDB_VERSION}.tar.gz +cd TileDB-${TILEDB_VERSION} +mkdir build && cd build +../bootstrap --prefix=/usr/local/ +make ${BUILD_THREADS} +make install-tiledb + + +# INSTALL AWS S3 SDK +git clone -b ${AWS_SDK_VERSION} --recurse-submodules https://github.com/aws/aws-sdk-cpp $VDMS_DEP_DIR/aws-sdk-cpp +mkdir -p $VDMS_DEP_DIR/aws-sdk-cpp/build +cd $VDMS_DEP_DIR/aws-sdk-cpp/build +cmake .. -DCMAKE_BUILD_TYPE=Debug -DCMAKE_PREFIX_PATH=/usr/local/ -DCMAKE_INSTALL_PREFIX=/usr/local/ \ + -DBUILD_ONLY="s3" -DCUSTOM_MEMORY_MANAGEMENT=OFF -DENABLE_TESTING=OFF +make ${BUILD_THREADS} +make install + + +# INSTALL AUTOCONF +curl -L -o $VDMS_DEP_DIR/autoconf-${AUTOCONF_VERSION}.tar.xz https://ftp.gnu.org/gnu/autoconf/autoconf-${AUTOCONF_VERSION}.tar.xz +cd $VDMS_DEP_DIR +tar -xf autoconf-${AUTOCONF_VERSION}.tar.xz +cd autoconf-${AUTOCONF_VERSION} +./configure +make ${BUILD_THREADS} +make install + + +# INSTALL NEO4J CLIENTS +curl -L -o $VDMS_DEP_DIR/peg-${PEG_VERSION}.tar.gz https://github.com/gpakosz/peg/releases/download/${PEG_VERSION}/peg-${PEG_VERSION}.tar.gz +cd $VDMS_DEP_DIR/ +tar -xf peg-${PEG_VERSION}.tar.gz +cd peg-${PEG_VERSION} +make ${BUILD_THREADS} +make install + +git clone https://github.com/cleishm/libcypher-parser.git $VDMS_DEP_DIR/libcypher +cd $VDMS_DEP_DIR/libcypher +./autogen.sh +./configure +make install + +curl -L -o $VDMS_DEP_DIR/libedit-${LIBEDIT_VERSION}.tar.gz https://thrysoee.dk/editline/libedit-${LIBEDIT_VERSION}.tar.gz +cd $VDMS_DEP_DIR/ +tar -xzf libedit-${LIBEDIT_VERSION}.tar.gz +cd libedit-${LIBEDIT_VERSION} +./configure +make ${BUILD_THREADS} +make install + +git clone https://github.com/majensen/libneo4j-omni.git $VDMS_DEP_DIR/libomni +cd $VDMS_DEP_DIR/libomni +./autogen.sh +./configure --disable-werror --prefix=/usr +make clean check +make install -w --debug + + +# CLEANUP +rm -rf $VDMS_DEP_DIR + +####################################################################################################################### +# BUILD VDMS +####################################################################################################################### + +cd ${WORKSPACE} && git submodule update --init --recursive + +if [ ${MODIFY_PMGD} == true ]; then + sed -i "s|java-11-openjdk|java-17-openjdk|g" ${WORKSPACE}/src/pmgd/java/CMakeLists.txt + sed -i "s|#include |#include \n#include |" ${WORKSPACE}/src/pmgd/test/neighbortest.cc + sed -i "s|#include |#include \n#include |" ${WORKSPACE}/src/pmgd/tools/mkgraph.cc +fi + +if [ ${OLD_AV_LIBS} == true ]; then + sed -i "s|#include ||" ${WORKSPACE}/include/vcl/KeyFrame.h + sed -i "s|#include ||" ${WORKSPACE}/include/vcl/KeyFrame.h +fi + +mkdir -p ${WORKSPACE}/build && cd ${WORKSPACE}/build + +cmake -DCODE_COVERAGE="${BUILD_COVERAGE}" .. + +if [ $BUILD_VDMS == true ]; then + make ${BUILD_THREADS} +fi + +cp ${WORKSPACE}/config-vdms.json ${WORKSPACE}/build/ + +export PYTHONPATH=${WORKSPACE}/client/python:${PYTHONPATH} + diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml new file mode 100644 index 00000000..9397495f --- /dev/null +++ b/.github/workflows/CI.yml @@ -0,0 +1,63 @@ +name: CI + +on: + pull_request: + types: [opened, synchronize, reopened] + branches: [develop] + +# Declare default permissions as write only. +permissions: write-all + +# If another push to the same PR or branch happens while this workflow is still running, +# cancel the earlier run in favor of the next run. +# +# There's no point in testing an outdated version of the code. GitHub only allows +# a limited number of job runners to be active at the same time, so it's better to cancel +# pointless jobs early so that more useful jobs can run sooner. +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + Get-Envs: + name: Get Environment vars + runs-on: ubuntu-latest + outputs: + CHECKIN_DOCKERFILE: .github/scripts/docker-compose.yml + CONTAINER_NAME: source_coverage_${{ github.event.pull_request.number }} + SOURCE_REF: ${{ github.event.pull_request.head.sha }} + TARGET_REF: ${{ github.event.pull_request.base.ref }} + steps: + - run: echo "null" + + Testing: + # name: Code coverage in Docker + needs: [Get-Envs] + uses: ./.github/workflows/_CI_coverage.yml + with: + container-name: ${{ needs.Get-Envs.outputs.CONTAINER_NAME }} + docker-compose-path: ${{ needs.Get-Envs.outputs.CHECKIN_DOCKERFILE }} + source-ref: ${{ needs.Get-Envs.outputs.SOURCE_REF}} + target-ref: ${{ needs.Get-Envs.outputs.TARGET_REF}} + secrets: inherit + + Results: + # name: Compare code coverage + needs: [Testing] + uses: ./.github/workflows/_CI_coverage_compare.yml + with: + target_cpp_coverage: ${{ needs.Testing.outputs.target_cpp_coverage }} + target_py_coverage: ${{ needs.Testing.outputs.target_py_coverage }} + source_cpp_coverage: ${{ needs.Testing.outputs.source_cpp_coverage }} + source_py_coverage: ${{ needs.Testing.outputs.source_py_coverage }} + secrets: inherit + + Update: + # name: Lint & Update Reports + needs: [Testing, Results] + if: ${{ always() && (needs.Results.result == 'success') }} + uses: ./.github/workflows/_CI_update.yml + with: + coverage_value_updated: ${{ needs.Testing.outputs.coverage_value_updated }} + secrets: inherit + diff --git a/.github/workflows/_CI_coverage.yml b/.github/workflows/_CI_coverage.yml new file mode 100644 index 00000000..2183ee0f --- /dev/null +++ b/.github/workflows/_CI_coverage.yml @@ -0,0 +1,287 @@ +name: Run Code Coverage + +on: + workflow_call: + inputs: + container-name: + required: true + type: string + description: "Unique name for docker container" + + docker-compose-path: + required: true + type: string + description: "Path to check-in docker-compose.yml" + + source-ref: + required: true + type: string + description: "Github reference for source code" + + target-ref: + required: true + type: string + description: "Github reference for target code" + + secrets: + AWS_ACCESS_KEY_ID: + required: true + AWS_SECRET_ACCESS_KEY: + required: true + NEO4J_USER: + required: true + NEO4J_PASS: + required: true + FACELESS_AUTHKEY: + required: true + FACELESS_NAME: + required: true + + outputs: + target_cpp_coverage: + description: "The C++ Coverage for target" + value: ${{ jobs.target_coverage_job.outputs.target_cpp_value }} + target_py_coverage: + description: "The Python Coverage for target" + value: ${{ jobs.target_coverage_job.outputs.target_py_value }} + source_cpp_coverage: + description: "The C++ Coverage for source" + value: ${{ jobs.coverage_job.outputs.source_cpp_value }} + source_py_coverage: + description: "The Python Coverage for source" + value: ${{ jobs.coverage_job.outputs.source_py_value }} + source_prev_cpp_coverage: + description: "The C++ Coverage for source" + value: ${{ jobs.coverage_job.outputs.source_old_cpp_value }} + source_prev_py_coverage: + description: "The Python Coverage for source" + value: ${{ jobs.coverage_job.outputs.source_old_py_value }} + coverage_value_updated: + description: "Whether coverage for source has changed" + value: ${{ jobs.coverage_job.outputs.coverage_value_updated }} + + +permissions: read-all + +jobs: + target_coverage_job: + name: Target Coverage + runs-on: ubuntu-latest + env: + TARGET_BRANCH_REF: ${{ inputs.target-ref }} + outputs: + target_cpp_value: ${{ steps.target_values.outputs.cpp }} + target_py_value: ${{ steps.target_values.outputs.py }} + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + ref: ${{ env.TARGET_BRANCH_REF }} + fetch-depth: 0 + - name: Get Target Coverage Values + id: target_values + run: | + echo "cpp=$(cat .github/coverage/cpp.develop.coverage_value.txt || echo 0.00)" >> $GITHUB_OUTPUT + echo "py=$(cat .github/coverage/python.develop.coverage_value.txt || echo 0.00)" >> $GITHUB_OUTPUT + + coverage_job: + name: Run Coverage + needs: [target_coverage_job] + runs-on: ubuntu-latest + env: + AWS_API_PORT: 9000 + AWS_CONSOLE_PORT: 9001 + SOURCE_CONTAINER_NAME: ${{ inputs.container-name }} + SOURCE_CONTAINER_TAG: "vdms:${{ inputs.container-name }}" + NEO4J_CONTAINER_NAME: ${{ inputs.container-name }}_neo4j + NEO_TEST_PORT: 7687 + CHECKIN_DOCKERFILE: ${{ inputs.docker-compose-path }} + DOCKER_ARTIFACT_DIR: "Docker_artifacts" + SOURCE_BRANCH_REF: ${{ inputs.source-ref }} + outputs: + source_cpp_value: ${{ steps.report_coverage.outputs.source_coverage_cpp }} + source_py_value: ${{ steps.report_coverage.outputs.source_coverage_py }} + source_old_cpp_value: ${{ steps.report_coverage.outputs.source_old_cpp_value }} + source_old_py_value: ${{ steps.report_coverage.outputs.source_old_py_value }} + coverage_value_updated: ${{ steps.report_coverage.outputs.cov_changed }} + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: true + + - run: mkdir -p ${{ env.DOCKER_ARTIFACT_DIR }} + + - name: Variables for Testing + shell: bash + id: test_params + run: | + set -x + + # Get an open port btwn 50000 and 59999 for vdms local port + vdms_local_test_port=$(comm -23 <(seq 50000 59999 | sort) <(ss -Htan | awk '{print $4}' | cut -d':' -f2 | sort -u)) + echo "VDMS_LOCAL_TEST_PORT=$(echo "$vdms_local_test_port" | shuf | head -n 1)" >> $GITHUB_OUTPUT + + # Get an open port btwn 65000 and 65535 for neo4j + neo4j_test_port=$(comm -23 <(seq 65000 65535 | sort) <(ss -Htan | awk '{print $4}' | cut -d':' -f2 | sort -u) | shuf | head -n 1) + # echo "NEO_TEST_PORT=${neo4j_test_port}" >> $GITHUB_OUTPUT + + # Copy old local develop coverage files + mv ${GITHUB_WORKSPACE}/.github/coverage/cpp.develop.coverage_report.txt ${GITHUB_WORKSPACE}/.github/coverage/cpp.old-develop.coverage_report.txt + mv ${GITHUB_WORKSPACE}/.github/coverage/cpp.develop.coverage_value.txt ${GITHUB_WORKSPACE}/.github/coverage/cpp.old-develop.coverage_value.txt + mv ${GITHUB_WORKSPACE}/.github/coverage/python.develop.coverage_report.txt ${GITHUB_WORKSPACE}/.github/coverage/python.old-develop.coverage_report.txt + mv ${GITHUB_WORKSPACE}/.github/coverage/python.develop.coverage_value.txt ${GITHUB_WORKSPACE}/.github/coverage/python.old-develop.coverage_value.txt + + - name: Start Check-in Container + shell: bash + id: checkin_start + env: + VDMS_TEST_PORT: ${{ steps.test_params.outputs.VDMS_LOCAL_TEST_PORT }} + run: | + cd ${GITHUB_WORKSPACE}/.github/scripts + + AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ + AWS_API_PORT=${{ env.AWS_API_PORT }} \ + AWS_CONSOLE_PORT=${{ env.AWS_CONSOLE_PORT }} \ + AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ + NEO4J_CONTAINER_NAME=${{ env.NEO4J_CONTAINER_NAME }} \ + NEO4J_ENDPOINT=neo4j://neo4j:${{ env.NEO_TEST_PORT }} \ + NEO4J_PASS=${{ secrets.NEO4J_PASS }} \ + NEO4J_USER=${{ secrets.NEO4J_USER }} \ + NEO_TEST_PORT=${{ env.NEO_TEST_PORT }} \ + SOURCE_CONTAINER_NAME=${{ env.SOURCE_CONTAINER_NAME }} \ + VDMS_TEST_PORT=${{ env.VDMS_TEST_PORT }} \ + docker compose up -d --build + sleep 15 + + # Save Image tag + VDMS_TAG_STR="vdms:${{ env.SOURCE_CONTAINER_NAME }}" + echo "$VDMS_TAG_STR" + echo "VDMS_TAG=${VDMS_TAG_STR}" >> $GITHUB_OUTPUT + + # Commands for neo4j tests + COMMON_ARGS="-n ${{ secrets.NEO4J_USER }} -w ${{ secrets.NEO4J_PASS }} -v ${{ env.NEO_TEST_PORT}}" + S3_ARGS="-a ${{ env.AWS_API_PORT }} -c ${{ env.AWS_CONSOLE_PORT}} -u ${{ secrets.AWS_ACCESS_KEY_ID }} -p ${{ secrets.AWS_SECRET_ACCESS_KEY }}" + + CMD_STR_e2e_str="./run_neo4j_tests.sh -t Neo4JE2ETest ${S3_ARGS} ${COMMON_ARGS} -e neo4j://neo4j:${{ env.NEO_TEST_PORT}}" + echo "CMD_STR_e2e=${CMD_STR_e2e_str}" >> $GITHUB_OUTPUT + + CMD_STR_OpsIO_str="./run_neo4j_tests.sh -t OpsIOCoordinatorTest ${S3_ARGS} ${COMMON_ARGS} -e neo4j://neo4j:${{ env.NEO_TEST_PORT}}" + echo "CMD_STR_OpsIO=${CMD_STR_OpsIO_str}" >> $GITHUB_OUTPUT + + CMD_STR_bkend_str="./run_neo4j_tests.sh -t Neo4jBackendTest ${COMMON_ARGS} -e neo4j://neo4j:${{ env.NEO_TEST_PORT}}" + echo "CMD_STR_bkend=${CMD_STR_bkend_str}" >> $GITHUB_OUTPUT + + - name: Run Neo4J Tests + shell: bash + env: + CMD_STR_bkend: ${{ steps.checkin_start.outputs.CMD_STR_bkend }} + CMD_STR_e2e: ${{ steps.checkin_start.outputs.CMD_STR_e2e }} + CMD_STR_OpsIO: ${{ steps.checkin_start.outputs.CMD_STR_OpsIO }} + VDMS_TEST_PORT: ${{ steps.test_params.outputs.VDMS_LOCAL_TEST_PORT }} + run: | + cd ${GITHUB_WORKSPACE}/.github/scripts + + # E2E Test + echo "${{ env.CMD_STR_e2e }}" + docker exec -w /vdms/tests ${{ env.SOURCE_CONTAINER_NAME }} bash -c "${{ env.CMD_STR_e2e }}" + docker kill ${{ env.NEO4J_CONTAINER_NAME }} || true + sleep 15 + + AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ + AWS_API_PORT=${{ env.AWS_API_PORT }} \ + AWS_CONSOLE_PORT=${{ env.AWS_CONSOLE_PORT }} \ + AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ + NEO4J_CONTAINER_NAME=${{ env.NEO4J_CONTAINER_NAME }} \ + NEO4J_ENDPOINT=neo4j://neo4j:${{ env.NEO_TEST_PORT }} \ + NEO4J_PASS=${{ secrets.NEO4J_PASS }} \ + NEO4J_USER=${{ secrets.NEO4J_USER }} \ + NEO_TEST_PORT=${{ env.NEO_TEST_PORT }} \ + SOURCE_CONTAINER_NAME=${{ env.SOURCE_CONTAINER_NAME }} \ + VDMS_TEST_PORT=${{ env.VDMS_TEST_PORT }} \ + docker compose up -d neo4j + sleep 30 + + # Ops IO Test + echo "${{ env.CMD_STR_OpsIO }}" + docker exec -w /vdms/tests ${{ env.SOURCE_CONTAINER_NAME }} bash -c "${{ env.CMD_STR_OpsIO }}" + docker kill ${{ env.NEO4J_CONTAINER_NAME }} || true + sleep 15 + + + - name: Run Remaining Tests + shell: bash + id: cov_value + run: | + # Run tests + docker exec -w / ${{ env.SOURCE_CONTAINER_NAME }} bash -c "./run_coverage_cpp.sh && ./run_coverage_py.sh" + + # C++ results as variables + docker cp ${{ env.SOURCE_CONTAINER_NAME }}:/vdms/tests/coverage_report/cpp.new.coverage_report.txt ${GITHUB_WORKSPACE}/.github/coverage/cpp.new.coverage_report.txt + docker cp ${{ env.SOURCE_CONTAINER_NAME }}:/vdms/tests/coverage_report/cpp.new.coverage_value.txt ${GITHUB_WORKSPACE}/.github/coverage/cpp.new.coverage_value.txt + echo "coverage_value_cpp=$(cat ${GITHUB_WORKSPACE}/.github/coverage/cpp.new.coverage_value.txt)" >> $GITHUB_ENV + echo "pr_dev_value_cpp=$(cat ${GITHUB_WORKSPACE}/.github/coverage/cpp.old-develop.coverage_value.txt)" >> $GITHUB_ENV + + # Python results as variables + docker cp ${{ env.SOURCE_CONTAINER_NAME }}:/vdms/tests/coverage_report/python.new.coverage_report.txt ${GITHUB_WORKSPACE}/.github/coverage/python.new.coverage_report.txt || true + docker cp ${{ env.SOURCE_CONTAINER_NAME }}:/vdms/tests/coverage_report/python.new.coverage_value.txt ${GITHUB_WORKSPACE}/.github/coverage/python.new.coverage_value.txt || true + echo "coverage_value_py=$(cat ${GITHUB_WORKSPACE}/.github/coverage/python.new.coverage_value.txt)" >> $GITHUB_ENV + echo "pr_dev_value_py=$(cat ${GITHUB_WORKSPACE}/.github/coverage/python.old-develop.coverage_value.txt)" >> $GITHUB_ENV + + # Stops containers and removes containers, networks, volumes, and images + cd ${GITHUB_WORKSPACE}/.github/scripts + docker compose down + + - name: Report Source Coverage + id: report_coverage + run: | + set -x + + did_cov_change='false' + if [ "$pr_dev_value_cpp" != "$coverage_value_cpp" ]; then + did_cov_change='true' + fi + + if [ "$pr_dev_value_py" != "$coverage_value_py" ]; then + did_cov_change='true' + fi + + # If true, in future job, push latest coverage as develop (future target) + echo "cov_changed=${did_cov_change}" >> $GITHUB_OUTPUT + + # CPP + if [[ -z $coverage_value_cpp ]] + then + exit 1 + fi + echo "Source CPP Coverage: ${coverage_value_cpp}" + echo "source_coverage_cpp=${coverage_value_cpp}" >> $GITHUB_OUTPUT + echo "source_old_cpp_value=${pr_dev_value_cpp}" >> $GITHUB_OUTPUT + # echo "target_coverage_cpp=${target_value_cpp}" >> $GITHUB_OUTPUT + + # Python + if [[ -z $coverage_value_py ]] + then + exit 1 + fi + echo "Source Python Coverage: ${coverage_value_py}" + echo "source_coverage_py=${coverage_value_py}" >> $GITHUB_OUTPUT + echo "source_old_py_value=${pr_dev_value_py}" >> $GITHUB_OUTPUT + # echo "target_coverage_py=${target_value_py}" >> $GITHUB_OUTPUT + + - name: Upload New coverage results + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 + with: + name: coverage_artifact + path: .github/coverage/*.new.*.txt + if-no-files-found: error + retention-days: 1 + + - name: Cleanup Docker Containers + if: ${{ always() }} + run: | + docker kill ${{ env.SOURCE_CONTAINER_NAME }} || true + docker kill ${{ env.SOURCE_CONTAINER_NAME }}_tmp || true + docker kill ${{ env.NEO4J_CONTAINER_NAME }} || true + + docker ps -a --filter status=exited --format {{.ID}} | xargs docker rm || true + docker rmi $(docker images | grep '' | awk '{print $3}') || true + docker builder prune -f diff --git a/.github/workflows/_CI_coverage_compare.yml b/.github/workflows/_CI_coverage_compare.yml new file mode 100644 index 00000000..e7ecb5d5 --- /dev/null +++ b/.github/workflows/_CI_coverage_compare.yml @@ -0,0 +1,59 @@ +name: Compare Code Coverage + +on: + workflow_call: + inputs: + target_cpp_coverage: + required: true + description: "The C++ Coverage for target" + type: string + target_py_coverage: + required: true + description: "The Python Coverage for target" + type: string + source_cpp_coverage: + required: true + description: "The C++ Coverage for source" + type: string + source_py_coverage: + required: true + description: "The Python Coverage for source" + type: string + +permissions: write-all + +jobs: + compare_coverage: + name: Compare Reported Coverage + runs-on: ubuntu-latest + steps: + - name: Comment Coverage + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + github.rest.issues.createComment({ + issue_number: ${{ github.event.number }}, + owner: context.repo.owner, + repo: context.repo.repo, + body: 'Target CPP Coverage: ${{ inputs.target_cpp_coverage }}%\nSource CPP Coverage: ${{ inputs.source_cpp_coverage }}%\n\n\nTarget Python Coverage: ${{ inputs.target_py_coverage }}%\nSource Python Coverage: ${{ inputs.source_py_coverage }}%' + }) + + - name: Compare Coverage + run: | + echo "Source CPP Coverage: ${{ inputs.source_cpp_coverage }}" + echo "Target CPP Coverage: ${{ inputs.target_cpp_coverage }}" + CPP_DIFF=$(echo '${{ inputs.target_cpp_coverage }}-${{ inputs.source_cpp_coverage }}' | bc ) + + if (( $(echo "$CPP_DIFF > 0.1" | bc -l) )); then + echo 'CPP Coverage below CPP Target' + exit 1 + fi + + echo "Source Python Coverage: ${{ inputs.source_py_coverage }}" + echo "Target Python Coverage: ${{ inputs.target_py_coverage }}" + PY_DIFF=$(echo '${{ inputs.target_py_coverage }}-${{ inputs.source_py_coverage }}' | bc ) + + if (( $(echo "$PY_DIFF > 0.1" | bc -l) )); then + echo 'Python Coverage below Target' + exit 1 + fi diff --git a/.github/workflows/_CI_update.yml b/.github/workflows/_CI_update.yml new file mode 100644 index 00000000..cec65624 --- /dev/null +++ b/.github/workflows/_CI_update.yml @@ -0,0 +1,83 @@ +name: Update Code + +on: + workflow_call: + inputs: + coverage_value_updated: + required: true + description: "The C++ Coverage for target" + type: string + +permissions: write-all + +jobs: + commit_job: + name: Commit Code Updates + env: + COMMIT_MSG: "Automated updates: Format and/or coverage" + DOCKER_ARTIFACT_DIR: "Docker_artifacts" + runs-on: ubuntu-latest + steps: + # Checkout code doesn't persist across jobs + # If formatting needed, checkout and format again + - name: Checkout Source Branch + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + fetch-depth: 0 + ref: ${{ github.event.pull_request.head.ref }} + repository: ${{ github.event.pull_request.head.repo.full_name }} + token: ${{ secrets.FACELESS_TOKEN || github.token }} + + - run: mkdir -p ${{ env.DOCKER_ARTIFACT_DIR }} + + - name: Retrieve Current Coverage Files + if: ${{ inputs.coverage_value_updated }} == 'true' + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + with: + name: coverage_artifact + path: .github/coverage/ + + - name: Format C++ Code (clang-format), Python (black code), and apply dos2unix + run: | + ./.github/scripts/auto-formatter.sh + + - name: Update coverage reports with latest coverage + # Change latest coverage as develop (future target) + if: ${{ inputs.coverage_value_updated }} == 'true' + run: | + cd ${GITHUB_WORKSPACE}/.github/coverage/ + rm -rf *.develop.*.txt || true + rm -rf *.old-develop.*.txt || true + ls + mv cpp.new.coverage_report.txt cpp.develop.coverage_report.txt + mv cpp.new.coverage_value.txt cpp.develop.coverage_value.txt + mv python.new.coverage_report.txt python.develop.coverage_report.txt + mv python.new.coverage_value.txt python.develop.coverage_value.txt + + # Update Code and Push (Should be last steps of workflow since it changes commit) + - name: Commit Changes + id: update_commit + continue-on-error: true + run: | + cd ${GITHUB_WORKSPACE} + changes=$([ -z "$(git diff)" ] && echo "Empty" || echo "Not empty") + echo "changes=$changes" >> $GITHUB_OUTPUT + + if [ "$changes" != "Empty" ]; then + git config user.name ${{ secrets.FACELESS_NAME }} + git config user.email ${{ secrets.FACELESS_NAME }}@intel.com + git remote set-url origin https://x-access-token:${{ secrets.FACELESS_TOKEN }}@github.com/${{ github.event.pull_request.head.repo.full_name }} + git add .github/coverage/* + git commit -am "${{ env.COMMIT_MSG }}" + git push + fi + + - if: steps.update_commit.outcome != 'success' + name: Check Push Failure + env: + FILE_CHANGES: ${{ steps.update_commit.outputs.changes }} + run: | + if [ "${{ env.FILE_CHANGES }}" != "Empty" ]; then + echo "Please provide sys-vdms write access to fork (if applicable)." + exit 1 + fi \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index 8eaf28b0..0dc378c6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -5,8 +5,8 @@ set(CMAKE_CXX_STANDARD 17) IF(CODE_COVERAGE) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -Wall -coverage -fprofile-arcs -ftest-coverage") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -Wall -coverage -fprofile-arcs -ftest-coverage") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -Wall -coverage -fprofile-abs-path") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -Wall -coverage -fprofile-abs-path") enable_testing() ENDIF() @@ -89,7 +89,7 @@ else() src/ImageLoop.cc src/VideoLoop.cc ) - target_link_libraries(dms vcl pmgd pmgd-util protobuf tbb tiledb vdms-utils pthread -lcurl -lzmq ${AWSSDK_LINK_LIBRARIES} neo4j-client) + target_link_libraries(dms vcl pmgd pmgd-util protobuf tbb tiledb vdms-utils pthread -lcurl -lzmq -lzip ${AWSSDK_LINK_LIBRARIES} neo4j-client) add_executable(vdms src/vdms.cc) target_link_libraries(vdms dms vdms_protobuf vcl tiledb faiss flinng jsoncpp ${OpenCV_LIBS} ${AWSSDK_LINK_LIBRARIES}) endif () diff --git a/INSTALL.md b/INSTALL.md index 821cb1b8..4e2bcd8b 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -11,25 +11,38 @@ sudo apt-get update -y --fix-missing sudo apt-get upgrade -y sudo apt-get install -y --no-install-suggests --no-install-recommends \ apt-transport-https automake bison build-essential bzip2 ca-certificates \ - curl ed flex g++-9 gcc-9 git gnupg-agent javacc libarchive-tools libatlas-base-dev \ - libavcodec-dev libavformat-dev libboost-all-dev libbz2-dev libc-ares-dev libcurl4-openssl-dev \ - libncurses5-dev libdc1394-22-dev libgflags-dev libgoogle-glog-dev libgtk-3-dev libgtk2.0-dev \ - libhdf5-dev libjpeg-dev libjsoncpp-dev libleveldb-dev liblmdb-dev \ - liblz4-dev libopenblas-dev libopenmpi-dev libpng-dev librdkafka-dev libsnappy-dev libssl-dev \ - libswscale-dev libtbb-dev libtbb2 libtiff-dev libtiff5-dev libtool libzmq3-dev linux-libc-dev mpich \ - openjdk-11-jdk-headless pkg-config procps python3-dev python3-pip software-properties-common \ - swig unzip uuid-dev -``` -Note: Your system may have g++ or gcc version 10+. If this is the case, please use version 9 to build VDMS. Optional method for setting version 9 as default: -```bash -update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 1 -update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-9 1 + curl ed flex g++ gcc git gnupg-agent javacc libarchive-tools libatlas-base-dev \ + libavcodec-dev libavformat-dev libavutil-dev libboost-all-dev libbz2-dev libc-ares-dev \ + libcurl4-openssl-dev libdc1394-dev libgflags-dev libgoogle-glog-dev \ + libgtk-3-dev libgtk2.0-dev libhdf5-dev libjpeg-dev libjsoncpp-dev \ + libleveldb-dev liblmdb-dev liblz4-dev libncurses5-dev libopenblas-dev libopenmpi-dev \ + libpng-dev librdkafka-dev libsnappy-dev libssl-dev libswscale-dev libtbb-dev \ + libtiff-dev libtiff5-dev libtool libzip-dev linux-libc-dev mpich \ + pkg-config procps software-properties-common swig unzip uuid-dev ``` #### **Install JPEG package** Please install the JPEG package based on the OS platform being used: * ***Debian 10+:*** `sudo apt-get install -y libjpeg62-turbo-dev` * ***Ubuntu 20.04+:*** `sudo apt-get install -y libjpeg8-dev` + + +#### **Install Package for C++ bindings** +Please install the package for C++ bindings for libzmq (headers) based on the OS platform being used: +* ***Debian 12+:*** `sudo apt-get install -y cppzmq-dev` +* ***Debian 10-11, Ubuntu 20.04+:*** `sudo apt-get install -y libzmq3-dev` + + +#### **Install OpenJDK Development Kit (JDK)** +Please install the headless OpenJDK Development Kit (JDK) based on the OS platform being used: +* ***Debian 12+, Ubuntu 22.04+:*** `sudo apt-get install -y openjdk-17-jdk-headless` +* ***Debian 10-11, Ubuntu 20.04:*** `sudo apt-get install -y openjdk-11-jdk-headless` + + +#### **Install Parallelism library for C++ - runtime files** +Please install the package for parallelism library for C++ - runtime files based on the OS platform being used: +* ***Debian 12+, Ubuntu 22.04+:*** `sudo apt-get install -y libtbbmalloc2` +* ***Debian 10-11, Ubuntu 20.04:*** `sudo apt-get install -y libtbb2`
### Install Remaining Dependencies @@ -45,61 +58,54 @@ mkdir -p $VDMS_DEP_DIR #### Python3 Packages -Here we will install the necessary Python 3.9+ packages Numpy and Protobuf v24.2. It is expected that you have Python3.9 or higher installed on your system. All python calls will use Python3.9+; therefore you may find it convenient to set alias for python. +Here we will install Python 3.12.3 from the Python website. ```bash -alias python=/usr/bin/python3 +PYTHON_VERSION=3.12.3 +curl -O https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz +tar -xzf Python-${PYTHON_VERSION}.tgz +cd Python-${PYTHON_VERSION} +./configure --enable-optimizations +make altinstall ``` -***NOTE:*** If multiple versions of Python 3 are present on your system, verify you are using Python3.9 or higher. You can specify the specific verison in above command and also set the following with your specific version: `alias python3=/usr/bin/python3.x`. -You can also install the coverage package if interested in running the Python unit tests. +If you prefer, you can install the the Python 3 version available on the OS platform: ```bash -python3 -m pip install --upgrade pip -python3 -m pip install --no-cache-dir "numpy>=1.26.0" "coverage>=7.3.1" +sudo apt-get install -y python3-dev python3-pip ``` +***NOTE:*** If multiple versions of Python 3 are present on your system, verify you are using Python3.9 or higher. You can specify the specific verison and set an alias for `python` and/or `python3` to easily use the desired python version. This can be done using the following: +```bash +alias python=/usr/bin/python3.x +alias python3=/usr/bin/python3.x +``` -#### **Valijson v0.6** -This is a headers-only library, no compilation/installation necessary. +Now that python is setup, now install Numpy and also install the coverage and cryptography packages if interested in running the Python unit tests. ```bash -VALIJSON_VERSION="v0.6" -git clone --branch ${VALIJSON_VERSION} https://github.com/tristanpenman/valijson.git $VDMS_DEP_DIR/valijson -cd $VDMS_DEP_DIR/valijson -sudo cp -r include/* /usr/local/include/ +python3 -m pip install --upgrade pip +python3 -m pip install --no-cache-dir "numpy>=1.26.0" "coverage>=7.3.1" "cryptography>=42.0.7" ``` -#### **CMAKE v3.27.2** -VDMS requires CMake v3.21+. Here we install CMake v3.27.2. +#### **CMAKE v3.28.5** +VDMS requires CMake v3.21+. Here we install CMake v3.28.5. ```bash -CMAKE_VERSION="v3.27.2" +CMAKE_VERSION="v3.28.5" git clone --branch ${CMAKE_VERSION} https://github.com/Kitware/CMake.git $VDMS_DEP_DIR/CMake cd $VDMS_DEP_DIR/CMake ./bootstrap make ${BUILD_THREADS} sudo make install ``` -***NOTE:*** If multiple versions of Python 3 are present on your system, verify you are using Python3.9 or higher. You can specify the specific verison in above command and also set the following with your specific version: `alias python3=/usr/bin/python3.x`. - - -#### **Autoconf v2.71** -```bash -AUTOCONF_VERSION="2.71" -curl -L -o $VDMS_DEP_DIR/autoconf-${AUTOCONF_VERSION}.tar.xz https://ftp.gnu.org/gnu/autoconf/autoconf-${AUTOCONF_VERSION}.tar.xz -cd $VDMS_DEP_DIR -tar -xf autoconf-${AUTOCONF_VERSION}.tar.xz -cd autoconf-${AUTOCONF_VERSION} -./configure -make ${BUILD_THREADS} -sudo make install -``` #### **Protobuf v24.2 (4.24.2)** Install Protobuf (C++ and Python) which requires GoogleTest and Abseil C++ as dependencies. ```bash PROTOBUF_VERSION="24.2" +python3 -m pip install --no-cache-dir "protobuf==4.${PROTOBUF_VERSION}" + git clone -b v${PROTOBUF_VERSION} --recurse-submodules https://github.com/protocolbuffers/protobuf.git $VDMS_DEP_DIR/protobuf cd $VDMS_DEP_DIR/protobuf/third_party/googletest @@ -128,8 +134,43 @@ cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_INSTALL_PREFIX=/usr/local \ -Dabsl_DIR=/usr/local/lib/cmake/absl . make ${BUILD_THREADS} sudo make install +``` -python3 -m pip install --no-cache-dir "protobuf==4.${PROTOBUF_VERSION}" + +#### **[OpenCV](https://opencv.org/) 4.9.0** +Below are instructions for installing ***OpenCV v4.9.0***. +```bash +OPENCV_VERSION="4.9.0" +git clone https://github.com/opencv/opencv.git $VDMS_DEP_DIR/opencv +cd $VDMS_DEP_DIR/opencv +git checkout tags/${OPENCV_VERSION} +mkdir build && cd build +cmake -D BUILD_PERF_TESTS=OFF -D BUILD_TESTS=OFF .. +make ${BUILD_THREADS} +sudo make install +``` + +**Note**: When using videos, and getting the following error: "Unable to stop the stream: Inappropriate ioctl for device", you may need to include more flags when compiling OpenCV. Follow these instructions ([source](https://stackoverflow.com/questions/41200201/opencv-unable-to-stop-the-stream-inappropriate-ioctl-for-device)): +```bash +sudo apt-get install -y ffmpeg +sudo apt-get install -y libavdevice-dev + +cmake -D BUILD_PERF_TESTS=OFF -D BUILD_TESTS=OFF -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local \ + -D WITH_FFMPEG=ON -D WITH_TBB=ON -D WITH_GTK=ON \ + -D WITH_V4L=ON -D WITH_OPENGL=ON -D WITH_CUBLAS=ON \ + -DWITH_QT=OFF -DCUDA_NVCC_FLAGS="-D_FORCE_INLINES" .. +make ${BUILD_THREADS} +sudo make install +``` + + +#### **Valijson v0.6** +This is a headers-only library, no compilation/installation necessary. +```bash +VALIJSON_VERSION="v0.6" +git clone --branch ${VALIJSON_VERSION} https://github.com/tristanpenman/valijson.git $VDMS_DEP_DIR/valijson +cd $VDMS_DEP_DIR/valijson +sudo cp -r include/* /usr/local/include/ ``` @@ -175,10 +216,10 @@ sudo make install-tiledb ``` -#### **AWS SDK CPP 1.11.0** +#### **AWS SDK CPP 1.11.336** Use the following instructions to install AWS SDK for C++. ```bash -AWS_SDK_VERSION="1.11.0" +AWS_SDK_VERSION="1.11.336" git clone -b ${AWS_SDK_VERSION} --recurse-submodules https://github.com/aws/aws-sdk-cpp ${VDMS_DEP_DIR}/aws-sdk-cpp mkdir -p ${VDMS_DEP_DIR}/aws-sdk-cpp/build cd ${VDMS_DEP_DIR}/aws-sdk-cpp/build @@ -188,27 +229,14 @@ sudo make install ``` -#### **[OpenCV](https://opencv.org/) 4.5.5** -Below are instructions for installing ***OpenCV v4.5.5***. -```bash -OPENCV_VERSION="4.5.5" -git clone --branch ${OPENCV_VERSION} https://github.com/opencv/opencv.git $VDMS_DEP_DIR/opencv -cd $VDMS_DEP_DIR/opencv -mkdir build && cd build -cmake -D BUILD_PERF_TESTS=OFF -D BUILD_TESTS=OFF .. -make ${BUILD_THREADS} -sudo make install -``` - -**Note**: When using videos, and getting the following error: "Unable to stop the stream: Inappropriate ioctl for device", you may need to include more flags when compiling OpenCV. Follow these instructions ([source](https://stackoverflow.com/questions/41200201/opencv-unable-to-stop-the-stream-inappropriate-ioctl-for-device)): +#### **Autoconf v2.71** ```bash -sudo apt-get install -y ffmpeg -sudo apt-get install -y libavdevice-dev - -cmake -D BUILD_PERF_TESTS=OFF -D BUILD_TESTS=OFF -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local \ - -D WITH_FFMPEG=ON -D WITH_TBB=ON -D WITH_GTK=ON \ - -D WITH_V4L=ON -D WITH_OPENGL=ON -D WITH_CUBLAS=ON \ - -DWITH_QT=OFF -DCUDA_NVCC_FLAGS="-D_FORCE_INLINES" .. +AUTOCONF_VERSION="2.71" +curl -L -o $VDMS_DEP_DIR/autoconf-${AUTOCONF_VERSION}.tar.xz https://ftp.gnu.org/gnu/autoconf/autoconf-${AUTOCONF_VERSION}.tar.xz +cd $VDMS_DEP_DIR +tar -xf autoconf-${AUTOCONF_VERSION}.tar.xz +cd autoconf-${AUTOCONF_VERSION} +./configure make ${BUILD_THREADS} sudo make install ``` @@ -244,6 +272,7 @@ git clone https://github.com/majensen/libneo4j-omni.git $VDMS_DEP_DIR/libomni cd $VDMS_DEP_DIR/libomni ./autogen.sh ./configure --disable-werror --prefix=/usr +make clean check sudo make install -w --debug ```
@@ -255,6 +284,19 @@ git clone -b develop --recurse-submodules https://github.com/IntelLabs/vdms.git cd vdms ``` +If your OS Platform is Debian 12+ or Ubuntu 22.04+ and you installed `openjdk-17-jdk-headless`, please modify PMGD to use this package: +```bash +sed -i "s|java-11-openjdk|java-17-openjdk|g" src/pmgd/java/CMakeLists.txt +sed -i "s|#include |#include \n#include |" src/pmgd/test/neighbortest.cc +sed -i "s|#include |#include \n#include |" src/pmgd/tools/mkgraph.cc +``` + +If your OS Platform is Debian 11 or Ubuntu 20.04, please modify file to use older FFMPEG libraries: +```bash +sed -i "s|#include ||" include/vcl/KeyFrame.h +sed -i "s|#include ||" include/vcl/KeyFrame.h +``` + When compiling on a target without Optane persistent memory, use the following: ```bash mkdir build && cd build @@ -263,11 +305,12 @@ make ${BUILD_THREADS} cp ../config-vdms.json . ``` -When compiling on a target with Optane persistent memory, use the command set: +When compiling on a target with Optane persistent memory, use the following: ```bash mkdir build && cd build cmake -DCMAKE_CXX_FLAGS='-DPM' .. make ${BUILD_THREADS} +cp ../config-vdms.json . ``` ***NOTE:*** If error similar to `cannot open shared object file: No such file or directory` obtained during loading shared libraries, such as `libpmgd.so` or `libvcl.so`, add the correct directories to `LD_LIBRARY_PATH`. This may occur for non-root users. To find the correct directory, run `find` command for missing object file. An example solution for missing `libpmgd.so` and `libvcl.so` is: diff --git a/README.md b/README.md index 0b748be2..2170247a 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,14 @@ -# VDMS: Your Favourite Visual Data Management System +# VDMS: Your Favorite Visual Data Management System + +[![GitHub License](https://img.shields.io/github/license/IntelLabs/vdms)](https://github.com/IntelLabs/vdms/blob/master/LICENSE) +[![Dependency Status](https://img.shields.io/librariesio/github/IntelLabs/vdms?style=flat-square)](https://libraries.io/github/IntelLabs/vdms) +[![Coverity Scan](https://img.shields.io/coverity/scan/30010)](https://scan.coverity.com/projects/intellabs-vdms) +[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/IntelLabs/vdms/badge)](https://securityscorecards.dev/viewer/?uri=github.com/IntelLabs/vdms) + +[![Release Notes](https://img.shields.io/github/release/IntelLabs/vdms?style=flat-square)](https://github.com/IntelLabs/vdms/releases) +[![Open Issues](https://img.shields.io/github/issues-raw/IntelLabs/vdms?style=flat-square)](https://github.com/IntelLabs/vdms/issues) +[![PyPI - Downloads](https://img.shields.io/pypi/dm/vdms?style=flat-square)](https://pypistats.org/packages/vdms) +[![Docker Pulls](https://img.shields.io/docker/pulls/intellabs/vdms)](https://hub.docker.com/r/intellabs/vdms) VDMS is a storage solution for efficient access of big-”visual”-data that aims to achieve cloud scale by searching for relevant visual data via visual @@ -44,5 +54,6 @@ the examples/tutorials. Conference | Links, Cite | Description ------------ | ------------- | ------------- Industrial and Applications @ VLDB 2021 | [Paper](http://vldb.org/pvldb/vol14/p3240-remis.pdf) | Demonstrate VDMS capabilities in image search application +2nd USENIX Workshop @ HotEdge 2019 | [Paper](https://www.usenix.org/system/files/hotedge19-paper-altarawneh.pdf), [Presentation](https://www.usenix.org/sites/default/files/conference/protected-files/hotedge19_slides_altarawneh.pdf) | VDMS in Edge-to-cloud architecture for video streaming application Learning Systems @ NIPS 2018 | [Paper](https://export.arxiv.org/abs/1810.11832), [Cite](https://dblp.uni-trier.de/rec/bibtex/journals/corr/abs-1810-11832) | Systems for Machine Learning [Workshop](http://learningsys.org/nips18/cfp.html) @ NIPS HotStorage @ ATC 2017 | [Paper](https://www.usenix.org/conference/hotstorage17/program/presentation/gupta-cledat), [Presentation](https://www.usenix.org/conference/hotstorage17/program/presentation/gupta-cledat), [Cite](https://www.usenix.org/biblio/export/bibtex/203374)| Positioning Paper at USENIX ATC 2017 Workshop diff --git a/client/cpp/VDMSClient.cc b/client/cpp/VDMSClient.cc index c72ab42d..6f0255f5 100644 --- a/client/cpp/VDMSClient.cc +++ b/client/cpp/VDMSClient.cc @@ -32,7 +32,9 @@ using namespace VDMS; -VDMSClient::VDMSClient(std::string addr, int port) : _conn(addr, port) {} +VDMSClient::VDMSClient(std::string addr, int port, const std::string &cert_file, + const std::string &key_file, const std::string &ca_file) + : _conn(addr, port, cert_file, key_file, ca_file) {} // void VDMSClient::parse_csv_file(std::string filename, std::string server, int // p){ // CSVParser _csv_parser(filename, server, p); diff --git a/client/cpp/VDMSClient.h b/client/cpp/VDMSClient.h index 67e20938..24b89a24 100644 --- a/client/cpp/VDMSClient.h +++ b/client/cpp/VDMSClient.h @@ -52,7 +52,9 @@ class VDMSClient { comm::ConnClient _conn; public: - VDMSClient(std::string addr = "localhost", int port = VDMS_PORT); + VDMSClient(std::string addr = "localhost", int port = VDMS_PORT, + const std::string &cert_file = "", + const std::string &key_file = "", const std::string &ca_file = ""); // Blocking call VDMS::Response query(const std::string &json_query, diff --git a/distributed/CMakeLists.txt b/distributed/CMakeLists.txt index 6ee16298..a2b17899 100644 --- a/distributed/CMakeLists.txt +++ b/distributed/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.0.0) project(kaka_test VERSION 0.1.0 LANGUAGES "CXX") add_compile_options(-g -fPIC) -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -Wall -coverage -fprofile-arcs -ftest-coverage") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -Wall -coverage -fprofile-abs-path") find_package(Protobuf CONFIG REQUIRED) diff --git a/docker/base/Dockerfile b/docker/base/Dockerfile index 16d17746..f20f3b1a 100644 --- a/docker/base/Dockerfile +++ b/docker/base/Dockerfile @@ -1,11 +1,11 @@ #Copyright (C) 2023 Intel Corporation #SPDX-License-Identifier: MIT -ARG BASE_VERSION=11.8-slim +ARG BASE_VERSION=12-slim ARG BUILD_THREADS="-j16" ############################################################ # BASE IMAGE W/ ENV VARS -FROM debian:${BASE_VERSION} as base +FROM debian:${BASE_VERSION} AS base # Dockerfile limitations force a repetition of global args ARG BUILD_THREADS ARG AWS_ACCESS_KEY_ID="" @@ -18,8 +18,11 @@ ARG AWS_CONSOLE_PORT=9001 ENV DEBIAN_FRONTEND=noninteractive ENV DEBCONF_NOWARNINGS="yes" +ENV PYTHON_BASE="3.12" +ENV PYTHON_VERSION="${PYTHON_BASE}.3" ENV PROTOBUF_VERSION="24.2" ENV NUMPY_MIN_VERSION="1.26.0" +ENV VIRTUAL_ENV=/opt/venv ENV AWS_ACCESS_KEY_ID="${AWS_ACCESS_KEY_ID}" ENV AWS_SECRET_ACCESS_KEY="${AWS_SECRET_ACCESS_KEY}" @@ -31,55 +34,54 @@ ENV AWS_CONSOLE_PORT="${AWS_CONSOLE_PORT}" ############################################################ # BUILD DEPENDENCIES -FROM base as build +FROM base AS build # Install Packages # hadolint ignore=DL3008 RUN apt-get update -y && apt-get upgrade -y && \ - apt-get install -o 'Acquire::Retries=3' -y --no-install-suggests --no-install-recommends --fix-broken --fix-missing \ + apt-get install -o 'Acquire::Retries=3' -y --no-install-suggests \ + --no-install-recommends --fix-broken --fix-missing \ apt-transport-https automake bison build-essential bzip2 ca-certificates \ - curl ed flex g++-9 gcc-9 git gnupg-agent javacc libarchive-tools libatlas-base-dev \ - libavcodec-dev libavformat-dev libboost-all-dev libbz2-dev libc-ares-dev libcurl4-openssl-dev \ - libncurses5-dev libdc1394-22-dev libgflags-dev libgoogle-glog-dev libgtk-3-dev libgtk2.0-dev \ - libhdf5-dev libjpeg-dev libjpeg62-turbo-dev libjsoncpp-dev libleveldb-dev liblmdb-dev \ - liblz4-dev libopenblas-dev libopenmpi-dev libpng-dev librdkafka-dev libsnappy-dev libssl-dev \ - libswscale-dev libtbb-dev libtbb2 libtiff-dev libtiff5-dev libtool libzmq3-dev linux-libc-dev mpich \ - openjdk-11-jdk-headless pkg-config procps python3-dev python3-pip software-properties-common \ - swig unzip uuid-dev && \ - update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 1 && \ - update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-9 1 && \ - apt-get clean && rm -rf /var/lib/apt/lists/* && \ - ln -s /usr/bin/python3 /usr/bin/python + cppzmq-dev curl ed flex g++ gcc git gnupg-agent javacc libarchive-tools libatlas-base-dev \ + libavcodec-dev libavformat-dev libavutil-dev libboost-all-dev libbz2-dev libc-ares-dev \ + libcurl4-openssl-dev libdc1394-dev libgflags-dev libgoogle-glog-dev \ + libgtk-3-dev libgtk2.0-dev libhdf5-dev libjpeg-dev libjpeg62-turbo-dev libjsoncpp-dev \ + libleveldb-dev liblmdb-dev liblz4-dev libncurses5-dev libopenblas-dev libopenmpi-dev \ + libpng-dev librdkafka-dev libsnappy-dev libssl-dev libswscale-dev libtbb-dev libtbbmalloc2 \ + libtiff-dev libtiff5-dev libtool linux-libc-dev mpich openjdk-17-jdk-headless \ + pkg-config procps software-properties-common swig unzip uuid-dev && \ + apt-get --purge remove -y python3.11 && apt-get autoremove -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +WORKDIR /usr/src +# hadolint ignore=DL3003,SC2086 +RUN apt update -y && \ + apt install -y libffi-dev libgdbm-dev libnss3-dev libreadline-dev libsqlite3-dev zlib1g-dev && \ + curl -O https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz && \ + tar -xzf Python-${PYTHON_VERSION}.tgz && cd Python-${PYTHON_VERSION} && \ + ./configure --enable-optimizations && make ${BUILD_THREADS} && make altinstall && \ + update-alternatives --install /usr/bin/python python /usr/local/bin/python3.12 1 && \ + python${PYTHON_BASE} -m venv ${VIRTUAL_ENV} +ENV PATH="$VIRTUAL_ENV/bin:$PATH" # Pull and Install Dependencies WORKDIR /dependencies -ENV CMAKE_VERSION="v3.27.2" \ - VALIJSON_VERSION="v0.6" \ +ENV AUTOCONF_VERSION="2.71" \ + AWS_SDK_VERSION="1.11.336" \ + CMAKE_VERSION="v3.28.5" \ FAISS_VERSION="v1.7.4" \ - OPENCV_VERSION="4.5.5" \ - TILEDB_VERSION="2.14.1" \ - AWS_SDK_VERSION="1.11.0" \ - AUTOCONF_VERSION="2.71" \ + LIBEDIT_VERSION="20230828-3.1" \ + OPENCV_VERSION="4.9.0" \ PEG_VERSION="0.1.19" \ - LIBEDIT_VERSION="20230828-3.1" - -# hadolint ignore=DL3003 -RUN python3 -m pip install --no-cache-dir "numpy>=${NUMPY_MIN_VERSION}" && \ - git clone --branch ${VALIJSON_VERSION} https://github.com/tristanpenman/valijson.git /dependencies/valijson && \ - cd /dependencies/valijson && cp -r include/* /usr/local/include/ && \ - mkdir -p /opt/dist/usr/local/include/ && cp -r include/* /opt/dist/usr/local/include/ + TILEDB_VERSION="2.14.1" \ + VALIJSON_VERSION="v0.6" +# CMAKE # hadolint ignore=DL3003,SC2086 RUN git clone --branch ${CMAKE_VERSION} https://github.com/Kitware/CMake.git /dependencies/CMake && \ cd /dependencies/CMake && ./bootstrap && make ${BUILD_THREADS} && \ make install DESTDIR=/opt/dist && make install -# AUTOCONF VERSION FOR NEO4J -# hadolint ignore=DL3003,SC2086 -RUN curl -O https://ftp.gnu.org/gnu/autoconf/autoconf-${AUTOCONF_VERSION}.tar.xz && \ - tar -xf autoconf-${AUTOCONF_VERSION}.tar.xz && cd autoconf-${AUTOCONF_VERSION} && \ - ./configure && make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install - # PROTOBUF & ITS DEPENDENCIES # hadolint ignore=DL3003,SC2086 RUN git clone -b "v${PROTOBUF_VERSION}" --recurse-submodules https://github.com/protocolbuffers/protobuf.git /dependencies/protobuf && \ @@ -102,11 +104,25 @@ RUN git clone -b "v${PROTOBUF_VERSION}" --recurse-submodules https://github.com/ -Dabsl_DIR=/opt/dist/usr/local/lib/cmake/absl . && \ make ${BUILD_THREADS} && make install -# DESCRIPTOR LIBRARIES +# OPENCV +# hadolint ignore=DL3003,SC2086 +RUN git clone https://github.com/opencv/opencv.git /dependencies/opencv && \ + cd /dependencies/opencv && git checkout tags/${OPENCV_VERSION} && \ + mkdir build && cd build && cmake -DBUILD_PERF_TESTS=OFF -DBUILD_TESTS=OFF .. && \ + make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install + +# VALIJSON +# hadolint ignore=DL3003 +RUN python -m pip install --no-cache-dir "numpy>=${NUMPY_MIN_VERSION},<2.0.0" && \ + git clone --branch ${VALIJSON_VERSION} https://github.com/tristanpenman/valijson.git /dependencies/valijson && \ + cd /dependencies/valijson && cp -r include/* /usr/local/include/ && \ + mkdir -p /opt/dist/usr/local/include/ && cp -r include/* /opt/dist/usr/local/include/ + +# FAISS & FLINNG LIBRARIES # hadolint ignore=DL3003,SC2086 RUN git clone --branch ${FAISS_VERSION} https://github.com/facebookresearch/faiss.git /dependencies/faiss && \ cd /dependencies/faiss && mkdir build && cd build && \ - cmake -DFAISS_ENABLE_GPU=OFF -DPython_EXECUTABLE=/usr/bin/python3 \ + cmake -DFAISS_ENABLE_GPU=OFF -DPython_EXECUTABLE=$(which python) \ -DBUILD_SHARED_LIBS=ON -DCMAKE_BUILD_TYPE=Release .. && \ make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install && \ git clone https://github.com/tonyzhang617/FLINNG.git /dependencies/FLINNG && \ @@ -115,34 +131,32 @@ RUN git clone --branch ${FAISS_VERSION} https://github.com/facebookresearch/fais # TILEDB & AWS S3 SDK # hadolint ignore=DL3003,SC2086 -RUN curl -L -o /dependencies/${TILEDB_VERSION}.tar.gz \ - https://github.com/TileDB-Inc/TileDB/archive/refs/tags/${TILEDB_VERSION}.tar.gz && \ - cd /dependencies/ && tar -xvf ${TILEDB_VERSION}.tar.gz && cd TileDB-${TILEDB_VERSION} && \ - mkdir build && cd build && ../bootstrap --prefix=/usr/local/ && make ${BUILD_THREADS} && \ - make install-tiledb DESTDIR=/opt/dist && make install-tiledb && \ +RUN curl -L -O https://github.com/TileDB-Inc/TileDB/archive/refs/tags/${TILEDB_VERSION}.tar.gz && \ + tar -xvf ${TILEDB_VERSION}.tar.gz && cd TileDB-${TILEDB_VERSION} && \ + mkdir build && cd build && ../bootstrap --prefix=/usr/local/ && make ${BUILD_THREADS} && \ + make install-tiledb DESTDIR=/opt/dist && make install-tiledb && \ git clone -b ${AWS_SDK_VERSION} --recurse-submodules https://github.com/aws/aws-sdk-cpp /dependencies/aws-sdk-cpp && \ mkdir -p /dependencies/aws-sdk-cpp/build && cd /dependencies/aws-sdk-cpp/build && \ cmake .. -DCMAKE_BUILD_TYPE=Debug -DCMAKE_PREFIX_PATH=/usr/local/ -DCMAKE_INSTALL_PREFIX=/usr/local/ \ -DBUILD_ONLY="s3" -DCUSTOM_MEMORY_MANAGEMENT=OFF -DENABLE_TESTING=OFF && \ make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install -# OPENCV +# AUTOCONF VERSION FOR NEO4J # hadolint ignore=DL3003,SC2086 -RUN git clone --branch ${OPENCV_VERSION} https://github.com/opencv/opencv.git /dependencies/opencv && \ - cd /dependencies/opencv && mkdir build && cd build && cmake -D BUILD_PERF_TESTS=OFF -D BUILD_TESTS=OFF .. && \ - make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install +RUN curl -O https://ftp.gnu.org/gnu/autoconf/autoconf-${AUTOCONF_VERSION}.tar.xz && \ + tar -xf autoconf-${AUTOCONF_VERSION}.tar.xz && cd autoconf-${AUTOCONF_VERSION} && \ + ./configure && make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install # LIB-OMNI FOR NEO4J QUERY HANDLER # hadolint ignore=DL3003,SC2086 -RUN curl -L -o /dependencies/peg-${PEG_VERSION}.tar.gz \ - https://github.com/gpakosz/peg/releases/download/${PEG_VERSION}/peg-${PEG_VERSION}.tar.gz && \ - cd /dependencies/ && tar -xf peg-${PEG_VERSION}.tar.gz && cd peg-${PEG_VERSION} && \ +RUN curl -L -O https://github.com/gpakosz/peg/releases/download/${PEG_VERSION}/peg-${PEG_VERSION}.tar.gz && \ + tar -xf peg-${PEG_VERSION}.tar.gz && cd peg-${PEG_VERSION} && \ make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install && \ git clone https://github.com/cleishm/libcypher-parser.git /dependencies/libcypher && \ cd /dependencies/libcypher && ./autogen.sh && ./configure && \ make install DESTDIR=/opt/dist && make install && \ - curl -O https://thrysoee.dk/editline/libedit-${LIBEDIT_VERSION}.tar.gz && \ - tar -xzf libedit-${LIBEDIT_VERSION}.tar.gz && cd libedit-${LIBEDIT_VERSION} && \ + curl -L -o /dependencies/libedit-${LIBEDIT_VERSION}.tar.gz https://thrysoee.dk/editline/libedit-${LIBEDIT_VERSION}.tar.gz && \ + cd /dependencies && tar -xzf libedit-${LIBEDIT_VERSION}.tar.gz && cd libedit-${LIBEDIT_VERSION} && \ ./configure && make ${BUILD_THREADS} && make install DESTDIR=/opt/dist && make install && \ git clone https://github.com/majensen/libneo4j-omni.git /dependencies/libomni && \ cd /dependencies/libomni && ./autogen.sh && \ @@ -158,31 +172,38 @@ RUN rm -rf /dependencies /usr/local/share/doc /usr/local/share/man && \ ############################################################ # FINAL IMAGE FROM base +COPY --from=build /opt/dist / +COPY --from=build /usr/local/bin/python${PYTHON_BASE} /usr/local/bin/python${PYTHON_BASE} +COPY --from=build /usr/local/lib/python${PYTHON_BASE} /usr/local/lib/python${PYTHON_BASE} +COPY --from=build ${VIRTUAL_ENV} ${VIRTUAL_ENV} +ENV PATH="$VIRTUAL_ENV/bin:$PATH" -# hadolint ignore=DL3008 +# hadolint ignore=DL3008,SC2086 RUN apt-get update -y && apt-get upgrade -y && \ - apt-get install -o 'Acquire::Retries=3' -y --no-install-suggests --no-install-recommends --fix-broken --fix-missing \ - build-essential bzip2 curl g++-9 gcc-9 git javacc libarchive-tools libavcodec-dev libavformat-dev libcurl4-openssl-dev \ - libdc1394-22-dev libgoogle-glog-dev libgtk-3-dev libgtk2.0-dev libhdf5-dev libjpeg-dev libjpeg62-turbo-dev libjsoncpp-dev libopenblas-dev \ - libpng-dev librdkafka-dev libssl-dev libswscale-dev libtbb-dev libtbb2 libtiff-dev libtiff5-dev libzmq3-dev openjdk-11-jdk-headless procps python3-dev python3-pip && \ - update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 1 && \ - update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-9 1 && \ + apt-get install -o 'Acquire::Retries=3' -y --no-install-suggests \ + --no-install-recommends --fix-broken --fix-missing \ + build-essential bzip2 cppzmq-dev curl g++ gcc git javacc libarchive-tools libavcodec-dev \ + libavformat-dev libcurl4-openssl-dev libdc1394-dev libgoogle-glog-dev libgtk-3-dev \ + libhdf5-dev libjpeg62-turbo-dev libjsoncpp-dev libopenblas-dev libpng-dev librdkafka-dev \ + libssl-dev libswscale-dev libtbb-dev libtbbmalloc2 libtiff5-dev libzip-dev openjdk-17-jdk-headless \ + procps && \ + apt-get --purge remove -y python3.11 && apt-get autoremove -y && \ apt-get clean && rm -rf /var/lib/apt/lists/* && \ - ln -s /usr/bin/python3 /usr/bin/python && \ - python3 -m pip install --no-cache-dir "numpy>=${NUMPY_MIN_VERSION}" "coverage>=7.3.1" "protobuf==4.${PROTOBUF_VERSION}" - -COPY --from=build /opt/dist / -RUN echo "/usr/local/lib" >> /etc/ld.so.conf.d/all-libs.conf && ldconfig + echo "/usr/local/lib" >> /etc/ld.so.conf.d/all-libs.conf && ldconfig && \ + python3 -m pip install --no-cache-dir "numpy>=${NUMPY_MIN_VERSION},<2.0.0" "protobuf==4.${PROTOBUF_VERSION}" \ + "coverage>=7.3.1" "cryptography>=42.0.7" # VDMS WORKDIR /vdms # hadolint ignore=DL3003,SC2086 -RUN git clone -b develop --recurse-submodules https://github.com/IntelLabs/vdms.git /vdms && \ - mkdir -p /vdms/build && cd /vdms/build && \ - cmake .. && make ${BUILD_THREADS} && \ +RUN git clone -b master --recurse-submodules https://github.com/IntelLabs/vdms.git /vdms && \ + sed -i "s|java-11-openjdk|java-17-openjdk|g" /vdms/src/pmgd/java/CMakeLists.txt && \ + sed -i "s|#include |#include \n#include |" /vdms/src/pmgd/test/neighbortest.cc && \ + sed -i "s|#include |#include \n#include |" /vdms/src/pmgd/tools/mkgraph.cc && \ + mkdir -p /vdms/build && cd /vdms/build && \ + cmake .. && make ${BUILD_THREADS} && \ echo '#!/bin/bash' > /start.sh && echo 'cd /vdms/build' >> /start.sh && \ - cp /vdms/docker/override_default_config.py /vdms/ && \ - echo 'python3 /vdms/override_default_config.py -i /vdms/config-vdms.json -o /vdms/build/config-vdms.json' >> /start.sh && \ + echo 'python /vdms/override_default_config.py -i /vdms/config-vdms.json -o /vdms/build/config-vdms.json' >> /start.sh && \ echo './vdms' >> /start.sh && chmod 755 /start.sh ENV PYTHONPATH=/vdms/client/python:${PYTHONPATH} diff --git a/docker/override_default_config.py b/docker/override_default_config.py index 75b41c78..21a43585 100644 --- a/docker/override_default_config.py +++ b/docker/override_default_config.py @@ -92,7 +92,10 @@ def main(args): try: updated_config_value = int(env_value) except: - updated_config_value = env_value + if any(b in env_value.lower() for b in ["true", "false"]): + updated_config_value = bool(env_value) + else: + updated_config_value = env_value config[updated_config_key] = updated_config_value updated_params.append(updated_config_key) diff --git a/include/vcl/DescriptorSet.h b/include/vcl/DescriptorSet.h index ee01b6a9..be4cfb5f 100644 --- a/include/vcl/DescriptorSet.h +++ b/include/vcl/DescriptorSet.h @@ -44,6 +44,8 @@ #include +#include "timers/TimerMap.h" + namespace VCL { enum DescriptorSetEngine { @@ -69,6 +71,8 @@ class DescriptorSet { class DescriptorSetData; class DescriptorParams; + TimerMap timers; + private: DescriptorSetData *_set; DescriptorSetEngine _eng; diff --git a/include/vcl/Image.h b/include/vcl/Image.h index 82f5c438..277b0255 100644 --- a/include/vcl/Image.h +++ b/include/vcl/Image.h @@ -48,11 +48,14 @@ #include "TDBImage.h" #include "utils.h" #include +#include #include #include #include "VDMSConfigHelper.h" +#include "timers/TimerMap.h" + namespace VCL { /** @@ -299,6 +302,11 @@ class Image { */ std::string get_query_error_response(); + /** + * @return The metadata to be added based on UDF/Remote Operation response + */ + std::vector get_ingest_metadata(); + /** * Checks if a blob is stored for the image or not * @@ -351,6 +359,13 @@ class Image { void set_query_error_response(std::string error_msg); + /** + * Sets the metadata to be ingested based on UDF/Remote operation + * + * @param metadata metadata to be ingested + */ + void set_ingest_metadata(Json::Value metadata); + /* *********************** */ /* IMAGE INTERACTIONS */ /* *********************** */ @@ -489,6 +504,9 @@ class Image { */ template void copy_to_buffer(T *buffer); + TimerMap timers; + std::vector op_labels; + private: // Forward declaration of Operation class, to be used of _operations // list @@ -534,6 +552,8 @@ class Image { // Query Error response std::string _query_error_response = ""; + std::vector _ingest_metadata; + // Image data (OpenCV Mat or TDBImage) cv::Mat _cv_img; TDBImage *_tdb; diff --git a/include/vcl/KeyFrame.h b/include/vcl/KeyFrame.h index c665c629..ed905ac5 100644 --- a/include/vcl/KeyFrame.h +++ b/include/vcl/KeyFrame.h @@ -37,6 +37,8 @@ #include "Exception.h" extern "C" { +#include +#include #include #include } diff --git a/include/vcl/Video.h b/include/vcl/Video.h index 1bb5a753..49a47727 100644 --- a/include/vcl/Video.h +++ b/include/vcl/Video.h @@ -47,6 +47,11 @@ #include "Exception.h" #include "VDMSConfigHelper.h" #include "utils.h" +#include "zip.h" +#include +#include + +#include "timers/TimerMap.h" namespace VCL { @@ -80,6 +85,8 @@ class Video { }; RemoteConnection *_remote; // Remote connection (if one exists) + TimerMap timers; + std::vector op_labels; /* *********************** */ /* CONSTRUCTORS */ @@ -245,6 +252,11 @@ class Video { */ std::string get_operated_video_id(); + /** + * @return The metadata to be added based on UDF/Remote Operation response + */ + std::vector get_ingest_metadata(); + /** * Checks if a blob is stored for the video or not * @@ -317,6 +329,13 @@ class Video { */ void set_operated_video_id(std::string filename); + /** + * Sets the metadata to be ingested based on UDF/Remote operation + * + * @param metadata metadata to be ingested + */ + void set_ingest_metadata(Json::Value metadata); + /* *********************** */ /* Video INTERACTIONS */ /* *********************** */ @@ -463,6 +482,8 @@ class Video { // Remote operation parameters sent by the client Json::Value remoteOp_params; + std::vector _ingest_metadata; + /* *********************** */ /* OPERATION */ /* *********************** */ diff --git a/remote_function/functions/metadata.py b/remote_function/functions/metadata.py new file mode 100644 index 00000000..944863b3 --- /dev/null +++ b/remote_function/functions/metadata.py @@ -0,0 +1,117 @@ +import cv2 +import numpy as np +from datetime import datetime +from collections import deque +import skvideo.io +import imutils +import uuid +import json + +face_cascade = cv2.CascadeClassifier( + # This file is available from OpenCV 'data' directory at + # https://github.com/opencv/opencv/blob/4.x/data/haarcascades/haarcascade_frontalface_default.xml + "functions/files/haarcascade_frontalface_default.xml" +) + + +def facedetectbbox(frame): + global face_cascade + gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + faces = face_cascade.detectMultiScale(gray, 1.1, 4) + return faces + + +def run(ipfilename, format, options): + + # Extract metadata for video files + if options["media_type"] == "video": + + vs = cv2.VideoCapture(ipfilename) + frameNum = 1 + metadata = {} + while True: + (grabbed, frame) = vs.read() + if not grabbed: + print("[INFO] no frame read from stream - exiting") + break + + if options["otype"] == "face": + faces = facedetectbbox(frame) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]), + "y": int(face[1]), + "height": int(face[2]), + "width": int(face[3]), + "object": "face", + "object_det": {"emotion": "happy", "age": 30}, + } + + metadata[frameNum] = {"frameId": frameNum, "bbox": tdict} + frameNum += 1 + + if frameNum == 3: + break + else: + faces = facedetectbbox(frame) + if len(faces) > 0: + face = faces[0] + # We use dummy values here as an example to showcase + # different values for car. + tdict = { + "x": int(face[0]) + 3, + "y": int(face[1]) + 5, + "height": int(face[2]) + 10, + "width": int(face[3]) + 30, + "object": "car", + "object_det": {"color": "red"}, + } + + metadata[frameNum] = {"frameId": frameNum, "bbox": tdict} + frameNum += 1 + + if frameNum == 3: + break + + response = {"opFile": ipfilename, "metadata": metadata} + + jsonfile = "jsonfile" + uuid.uuid1().hex + ".json" + with open(jsonfile, "w") as f: + json.dump(response, f, indent=4) + return ipfilename, jsonfile + # Extract metadata for image files + else: + tdict = {} + img = cv2.imread(ipfilename) + if options["otype"] == "face": + faces = facedetectbbox(img) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]), + "y": int(face[1]), + "height": int(face[2]), + "width": int(face[3]), + "object": "face", + "object_det": {"emotion": "happy", "age": 30}, + } + else: + faces = facedetectbbox(img) + if len(faces) > 0: + face = faces[0] + # We use dummy values here as an example to showcase + # different values for car. + tdict = { + "x": int(face[0]) + 3, + "y": int(face[1]) + 5, + "height": int(face[2]) + 10, + "width": int(face[3]) + 30, + "object": "car", + "object_det": {"color": "red"}, + } + + response = {"opFile": ipfilename, "metadata": tdict} + + r = json.dumps(response) + return img, r diff --git a/remote_function/requirements.txt b/remote_function/requirements.txt index 60864807..03e6998f 100644 --- a/remote_function/requirements.txt +++ b/remote_function/requirements.txt @@ -1,5 +1,5 @@ -opencv-python==4.5.5.64 -flask==3.0.2 -numpy==1.26.4 -sk-video==1.1.10 -imutils==0.5.4 \ No newline at end of file +flask>=3.0.2 +imutils>=0.5.4 +numpy<2.0.0 +opencv-python-headless==4.9.0.80 +sk-video==1.1.10 \ No newline at end of file diff --git a/remote_function/udf_server.py b/remote_function/udf_server.py index a476557f..af044a50 100644 --- a/remote_function/udf_server.py +++ b/remote_function/udf_server.py @@ -9,6 +9,7 @@ import skvideo.io import imutils import uuid +from zipfile import ZipFile for entry in os.scandir("functions"): if entry.is_file(): @@ -45,10 +46,20 @@ def image_api(): image_data.save(tmpfile) + r_img, r_meta = "", "" + udf = globals()[json_data["id"]] - r_img = udf.run(tmpfile, format, json_data) + if "ingestion" in json_data: + r_img, r_meta = udf.run(tmpfile, format, json_data) + else: + r_img = udf.run(tmpfile, format, json_data) return_string = cv2.imencode("." + str(format), r_img)[1].tostring() + + if r_meta != "": + return_string += ":metadata:".encode("utf-8") + return_string += r_meta.encode("utf-8") + os.remove(tmpfile) return return_string @@ -57,22 +68,37 @@ def image_api(): def video_api(): json_data = json.loads(request.form["jsonData"]) video_data = request.files["videoData"] - format = json_data["format"] + format = json_data["format"] if "format" in json_data else "mp4" tmpfile = "tmpfile" + uuid.uuid1().hex + "." + str(format) video_data.save(tmpfile) + video_file, metadata_file = "", "" + udf = globals()[json_data["id"]] - response_file = udf.run(tmpfile, format, json_data) + if "ingestion" in json_data: + video_file, metadata_file = udf.run(tmpfile, format, json_data) + else: + video_file = udf.run(tmpfile, format, json_data) + + response_file = "tmpfile" + uuid.uuid1().hex + ".zip" + + with ZipFile(response_file, "w") as zip_object: + zip_object.write(video_file) + if metadata_file != "": + zip_object.write(metadata_file) os.remove(tmpfile) + # Delete the temporary files after the response is sent @after_this_request def remove_tempfile(response): try: os.remove(response_file) + os.remove(video_file) + os.remove(metadata_file) except Exception as e: - print("File cannot be deleted or not present") + print("Some files cannot be deleted or are not present") return response try: diff --git a/src/BackendNeo4j.cc b/src/BackendNeo4j.cc index a981e968..703171da 100644 --- a/src/BackendNeo4j.cc +++ b/src/BackendNeo4j.cc @@ -62,10 +62,11 @@ BackendNeo4j::BackendNeo4j(unsigned int nr_conns, char *tgt_url, char *user, neo4j_connection_t *connection = neo4j_connect(tgt_url, config, flags); if (connection == NULL) { - printf("Warning: Connection failed to instantiate!\n"); + printf("Warning: Neo4J Connection(s) failed to instantiate!\n"); printf("Errno: %d\n", errno); conn_error = neo4j_strerror(errno, NULL, 0); printf("%s\n", conn_error); + printf("Exiting...\n"); exit(1); } diff --git a/src/DescriptorsCommand.cc b/src/DescriptorsCommand.cc index 033b740e..61d5988e 100644 --- a/src/DescriptorsCommand.cc +++ b/src/DescriptorsCommand.cc @@ -45,6 +45,8 @@ namespace fs = std::filesystem; DescriptorsCommand::DescriptorsCommand(const std::string &cmd_name) : RSCommand(cmd_name) { _dm = DescriptorsManager::instance(); + output_vcl_timing = + VDMSConfig::instance()->get_bool_value("print_vcl_timing", false); } // This function only throws when there is a transaction error, @@ -299,6 +301,10 @@ Json::Value AddDescriptorSet::construct_responses( } desc_set.store(); + if (output_vcl_timing) { + desc_set.timers.print_map_runtimes(); + } + desc_set.timers.clear_all_timers(); delete (param); } catch (VCL::Exception e) { print_exception(e); @@ -348,6 +354,10 @@ long AddDescriptor::insert_descriptor(const std::string &blob, id_first = desc_set->add((float *)blob.data(), 1); } + if (output_vcl_timing) { + desc_set->timers.print_map_runtimes(); + } + desc_set->timers.clear_all_timers(); } catch (VCL::Exception e) { print_exception(e); error["info"] = "VCL Descriptors Exception"; @@ -802,6 +812,10 @@ void FindDescriptor::populate_blobs(const std::string &set_path, desc_blob->resize(sizeof(float) * dim); set->get_descriptors(&id, 1, (float *)(*desc_blob).data()); + if (output_vcl_timing) { + set->timers.print_map_runtimes(); + } + set->timers.clear_all_timers(); } } } diff --git a/src/DescriptorsCommand.h b/src/DescriptorsCommand.h index 36d120e8..cab1fdca 100644 --- a/src/DescriptorsCommand.h +++ b/src/DescriptorsCommand.h @@ -51,6 +51,7 @@ class DescriptorsCommand : public RSCommand { protected: DescriptorsManager *_dm; VCL::DescriptorSetEngine _eng; + bool output_vcl_timing; // IDDistancePair is a pointer so that we can free its content // without having to use erase methods, which are not lock free diff --git a/src/ImageCommand.cc b/src/ImageCommand.cc index bc4b978d..76543834 100644 --- a/src/ImageCommand.cc +++ b/src/ImageCommand.cc @@ -41,7 +41,10 @@ using namespace VDMS; //========= AddImage definitions ========= -ImageCommand::ImageCommand(const std::string &cmd_name) : RSCommand(cmd_name) {} +ImageCommand::ImageCommand(const std::string &cmd_name) : RSCommand(cmd_name) { + output_vcl_timing = + VDMSConfig::instance()->get_bool_value("print_vcl_timing", false); +} int ImageCommand::enqueue_operations(VCL::Image &img, const Json::Value &ops, bool is_addition) { @@ -61,15 +64,18 @@ int ImageCommand::enqueue_operations(VCL::Image &img, const Json::Value &ops, } else if (type == "rotate") { img.rotate(get_value(op, "angle"), get_value(op, "resize")); } else if (type == "syncremoteOp") { - img.syncremoteOperation(get_value(op, "url"), - get_value(op, "options")); + Json::Value options = get_value(op, "options"); + if (is_addition) { + options["ingestion"] = 1; + } + img.syncremoteOperation(get_value(op, "url"), options); } else if (type == "remoteOp") { + Json::Value options = get_value(op, "options"); if (is_addition) { - img.syncremoteOperation(get_value(op, "url"), - get_value(op, "options")); + options["ingestion"] = 1; + img.syncremoteOperation(get_value(op, "url"), options); } else { - img.remoteOperation(get_value(op, "url"), - get_value(op, "options")); + img.remoteOperation(get_value(op, "url"), options); } } else if (type == "userOp") { img.userOperation(get_value(op, "options")); @@ -139,7 +145,6 @@ int AddImage::construct_protobuf(PMGDQuery &query, const Json::Value &jsoncmd, const std::string from_file_path = get_value(cmd, "from_file_path", ""); const bool is_local_file = get_value(cmd, "is_local_file", false); - std::string format = get_value(cmd, "format", ""); char binary_img_flag = 0; if (format == "bin") { @@ -200,7 +205,6 @@ int AddImage::construct_protobuf(PMGDQuery &query, const Json::Value &jsoncmd, connection->_bucket_name = bucket; img.set_connection(connection); } - if (cmd.isMember("operations")) { operation_flags = enqueue_operations(img, cmd["operations"], true); } @@ -246,6 +250,33 @@ int AddImage::construct_protobuf(PMGDQuery &query, const Json::Value &jsoncmd, query.AddNode(node_ref, VDMS_IM_TAG, props, Json::Value()); img.store(file_name, input_format); + + std::vector image_metadata = img.get_ingest_metadata(); + + if (image_metadata.size() > 0) { + for (Json::Value metadata : image_metadata) { + int bb_ref = query.get_available_reference(); + Json::Value bbox_props; + bbox_props[VDMS_DM_IMG_NAME_PROP] = props[VDMS_IM_PATH_PROP]; + bbox_props[VDMS_DM_IMG_OBJECT_PROP] = metadata["object"].asString(); + bbox_props[VDMS_ROI_COORD_X_PROP] = metadata["x"].asFloat(); + bbox_props[VDMS_ROI_COORD_Y_PROP] = metadata["y"].asFloat(); + bbox_props[VDMS_ROI_WIDTH_PROP] = metadata["width"].asFloat(); + bbox_props[VDMS_ROI_HEIGHT_PROP] = metadata["height"].asFloat(); + bbox_props[VDMS_DM_VID_OBJECT_DET] = + metadata["object_det"].toStyledString(); + + Json::Value bb_edge_props; + bb_edge_props[VDMS_DM_IMG_NAME_PROP] = props[VDMS_IM_PATH_PROP]; + + query.AddNode(bb_ref, VDMS_ROI_TAG, bbox_props, Json::Value()); + query.AddEdge(-1, node_ref, bb_ref, VDMS_DM_IMG_BB_EDGE, bb_edge_props); + } + } + + if (output_vcl_timing) { + img.timers.print_map_runtimes(); + } } // In case we need to cleanup the query @@ -302,9 +333,28 @@ int FindImage::construct_protobuf(PMGDQuery &query, const Json::Value &jsoncmd, results["list"].append(VDMS_IM_PATH_PROP); } - query.QueryNode(get_value(cmd, "_ref", -1), VDMS_IM_TAG, cmd["link"], - cmd["constraints"], results, - get_value(cmd, "unique", false)); + if (cmd.isMember("metaconstraints")) { + results["list"].append(VDMS_DM_IMG_NAME_PROP); + + for (auto member : cmd["metaconstraints"].getMemberNames()) { + results["list"].append(member); + } + + results["list"].append(VDMS_DM_IMG_OBJECT_PROP); + results["list"].append(VDMS_DM_IMG_OBJECT_DET); + results["list"].append(VDMS_ROI_COORD_X_PROP); + results["list"].append(VDMS_ROI_COORD_Y_PROP); + results["list"].append(VDMS_ROI_WIDTH_PROP); + results["list"].append(VDMS_ROI_HEIGHT_PROP); + + query.QueryNode(get_value(cmd, "_ref", -1), VDMS_ROI_TAG, cmd["link"], + cmd["metaconstraints"], results, + get_value(cmd, "unique", false)); + } else { + query.QueryNode(get_value(cmd, "_ref", -1), VDMS_IM_TAG, cmd["link"], + cmd["constraints"], results, + get_value(cmd, "unique", false)); + } return 0; } @@ -365,8 +415,13 @@ Json::Value FindImage::construct_responses(Json::Value &responses, for (auto &ent : findImage["entities"]) { assert(ent.isMember(VDMS_IM_PATH_PROP)); - std::string im_path = ent[VDMS_IM_PATH_PROP].asString(); - ent.removeMember(VDMS_IM_PATH_PROP); + std::string im_path; + if (cmd.isMember("metaconstraints")) { + im_path = ent[VDMS_DM_IMG_NAME_PROP].asString(); + } else { + im_path = ent[VDMS_IM_PATH_PROP].asString(); + ent.removeMember(VDMS_IM_PATH_PROP); + } if (ent.getMemberNames().size() == 0) { flag_empty = true; @@ -467,6 +522,11 @@ Json::Value FindImage::construct_responses(Json::Value &responses, return_error["info"] = "Image Data not found"; return error(return_error); } + + if (output_vcl_timing) { + iter->second->timers.print_map_runtimes(); + } + iter++; } } else { diff --git a/src/ImageCommand.h b/src/ImageCommand.h index abfd55c8..982d5d5a 100644 --- a/src/ImageCommand.h +++ b/src/ImageCommand.h @@ -63,6 +63,9 @@ class ImageCommand : public RSCommand { // Checks if 'format' parameter is specified, and if so, returns the // corresponding VCL::Image::Format type. VCL::Format get_requested_format(const Json::Value &cmd); + +protected: + bool output_vcl_timing; }; class AddImage : public ImageCommand { diff --git a/src/Neo4JHandlerCommands.cc b/src/Neo4JHandlerCommands.cc index c7bef459..0ae66956 100644 --- a/src/Neo4JHandlerCommands.cc +++ b/src/Neo4JHandlerCommands.cc @@ -88,9 +88,9 @@ int Neo4jNeoAdd::data_processing(std::string &cypher, const std::string &blob, int grp_id, Json::Value &error) { - std::chrono::steady_clock::time_point ops_start, ops_end; VCL::RemoteConnection *connection; std::vector enc_img; + int upload_rc; const Json::Value &cmd = orig_query[_cmd_name]; int operation_flags = 0; @@ -112,6 +112,8 @@ int Neo4jNeoAdd::data_processing(std::string &cypher, try { enc_img = do_single_img_ops(orig_query, raw_data, _cmd_name); + if (enc_img.size() == 0) + return -1; } catch (VCL::Exception &e) { print_exception(e, stdout); exit(1); // brutal exit, future iterations should throw exception for @@ -121,7 +123,10 @@ int Neo4jNeoAdd::data_processing(std::string &cypher, std::string img_obj_id; img_obj_id = gen_random(32); - s3_upload(img_obj_id, enc_img, connection); + upload_rc = s3_upload(img_obj_id, enc_img, connection); + + if (upload_rc != 0) + return -1; // In case we need to cleanup the query error["image_added"] = img_obj_id; @@ -210,15 +215,27 @@ Json::Value Neo4jNeoFind::construct_responses( try { // NOTE CURRENTLY FIXED TO USE ONLY S3 raw_data = s3_retrieval(im_path, global_s3_connection); - - std::vector img_enc; - img_enc = do_single_img_ops(orig_query, raw_data, _cmd_name); - - std::string *img_str = query_res.add_blobs(); - img_str->resize(img_enc.size()); - std::memcpy((void *)img_str->data(), (void *)img_enc.data(), - img_enc.size()); - + if (raw_data.size() != 0) { + std::vector img_enc; + img_enc = do_single_img_ops(orig_query, raw_data, _cmd_name); + + if (img_enc.size() != 0) { + std::string *img_str = query_res.add_blobs(); + img_str->resize(img_enc.size()); + std::memcpy((void *)img_str->data(), (void *)img_enc.data(), + img_enc.size()); + } else { + Json::Value return_error; + return_error["status"] = Neo4jCommand::Error; + return_error["info"] = "Image Operation Failure"; + return error(return_error); + } + } else { + Json::Value return_error; + return_error["status"] = Neo4jCommand::Error; + return_error["info"] = "S3 Object Retrieval Failed"; + return error(return_error); + } } catch (VCL::Exception e) { print_exception(e); Json::Value return_error; diff --git a/src/OpsIOCoordinator.cc b/src/OpsIOCoordinator.cc index c680dbbf..dae4bf49 100644 --- a/src/OpsIOCoordinator.cc +++ b/src/OpsIOCoordinator.cc @@ -83,9 +83,7 @@ Json::Value get_json_val(const Json::Value &json, const std::string &key, int img_enqueue_operations(VCL::Image &img, const Json::Value &ops) { std::chrono::steady_clock::time_point total_start, total_end; - double total_runtime; - total_start = std::chrono::steady_clock::now(); // Correct operation type and parameters are guaranteed at this point for (auto &op : ops) { const std::string &type = get_json_val(op, "type"); @@ -104,14 +102,10 @@ int img_enqueue_operations(VCL::Image &img, const Json::Value &ops) { img.rotate(get_json_val(op, "angle"), get_json_val(op, "resize")); } else { - throw ExceptionCommand(ImageError, "Operation is not defined"); + printf("Operation is not defined: %s\n", type.c_str()); return -1; } } - total_end = std::chrono::steady_clock::now(); - total_runtime = std::chrono::duration_cast( - total_end - total_start) - .count(); return 0; } @@ -120,9 +114,6 @@ std::vector do_single_img_ops(const Json::Value &orig_query, std::vector &raw_data, std::string cmd_name) { - std::chrono::steady_clock::time_point total_start, total_end; - total_start = std::chrono::steady_clock::now(); - double total_runtime; Json::Value cmd; if (orig_query.isMember(cmd_name)) { cmd = orig_query[cmd_name]; @@ -147,6 +138,10 @@ do_single_img_ops(const Json::Value &orig_query, if (cmd.isMember("operations")) { operation_flags = img_enqueue_operations(img, cmd["operations"]); + + if (operation_flags == -1) { + return std::vector(); + } } if (cmd.isMember("target_format")) { @@ -157,7 +152,8 @@ do_single_img_ops(const Json::Value &orig_query, } else if (format == "bin") { vcl_format = VCL::Format::BIN; } else { - printf("Warning! %s not supported!\n", format.c_str()); + printf("Warning! %s not supported as a target format!\n", format.c_str()); + return std::vector(); } // FUTURE, add TDB support } @@ -167,10 +163,6 @@ do_single_img_ops(const Json::Value &orig_query, // getting the image size performs operation as a side effect imgsize = img.get_raw_data_size(); img_enc = img.get_encoded_image(vcl_format); - total_end = std::chrono::steady_clock::now(); - total_runtime = std::chrono::duration_cast( - total_end - total_start) - .count(); return img_enc; } @@ -184,17 +176,11 @@ std::vector s3_retrieval(std::string obj_name, return std::vector(); } - std::chrono::steady_clock::time_point total_start, total_end; - total_start = std::chrono::steady_clock::now(); double total_runtime; std::vector raw_data; raw_data = connection->Read(obj_name); - total_end = std::chrono::steady_clock::now(); - total_runtime = std::chrono::duration_cast( - total_end - total_start) - .count(); return raw_data; } @@ -208,24 +194,19 @@ int s3_upload(std::string obj_name, std::vector upload_data, return -1; } - std::chrono::steady_clock::time_point total_start, total_end; - total_start = std::chrono::steady_clock::now(); double total_runtime; + bool write_succ; + + write_succ = connection->Write(obj_name, upload_data); - connection->Write(obj_name, upload_data); - total_end = std::chrono::steady_clock::now(); - total_runtime = std::chrono::duration_cast( - total_end - total_start) - .count(); + if (!write_succ) + return -1; return 0; } VCL::RemoteConnection *instantiate_connection() { - printf("Instantiating global S3 Connection...\n"); - std::chrono::steady_clock::time_point total_start, total_end; - total_start = std::chrono::steady_clock::now(); - double total_runtime; + printf("Instantiating S3 Connection...\n"); VCL::RemoteConnection *connection; connection = new VCL::RemoteConnection(); @@ -233,12 +214,7 @@ VCL::RemoteConnection *instantiate_connection() { connection->_bucket_name = bucket; connection->start(); - total_end = std::chrono::steady_clock::now(); - total_runtime = std::chrono::duration_cast( - total_end - total_start) - .count(); - - printf("Global S3 Connection Started!\n"); + printf("S3 Connection Started!\n"); return connection; } diff --git a/src/QueryHandlerBase.cc b/src/QueryHandlerBase.cc index 494cceb7..63c72679 100644 --- a/src/QueryHandlerBase.cc +++ b/src/QueryHandlerBase.cc @@ -6,19 +6,15 @@ #include "ImageCommand.h" #include "VideoCommand.h" +#include "VDMSConfig.h" +#include "timers/TimerMap.h" + using namespace VDMS; valijson::Schema *QueryHandlerBase::_schema = new valijson::Schema; QueryHandlerBase::QueryHandlerBase() - : _validator(valijson::Validator::kWeakTypes) -#ifdef CHRONO_TIMING - , - ch_tx_total("ch_tx_total"), ch_tx_query("ch_tx_query"), - ch_tx_send("ch_tx_send") -#endif -{ -} + : _validator(valijson::Validator::kWeakTypes) {} // TODO create a better mechanism to cleanup queries that // includes feature vectors and user-defined blobs @@ -46,24 +42,32 @@ void QueryHandlerBase::cleanup_query(const std::vector &images, void QueryHandlerBase::process_connection(comm::Connection *c) { QueryMessage msgs(c); + std::string timer_id; + + Json::Value timing_res; + std::vector timer_id_list; + bool output_timing_info; + + output_timing_info = + VDMSConfig::instance()->get_bool_value("print_high_level_timing", false); + try { while (true) { + TimerMap timers; protobufs::queryMessage response; protobufs::queryMessage query = msgs.get_query(); - CHRONO_TIC(ch_tx_total); - CHRONO_TIC(ch_tx_query); + timers.add_timestamp("e2e_query_processing"); process_query(query, response); - CHRONO_TAC(ch_tx_query); + timers.add_timestamp("e2e_query_processing"); - CHRONO_TIC(ch_tx_send); + timers.add_timestamp("msg_send"); msgs.send_response(response); - CHRONO_TAC(ch_tx_send); + timers.add_timestamp("msg_send"); - CHRONO_TAC(ch_tx_total); - CHRONO_PRINT_LAST_MS(ch_tx_total); - CHRONO_PRINT_LAST_MS(ch_tx_query); - CHRONO_PRINT_LAST_MS(ch_tx_send); + if (output_timing_info) { + timers.print_map_runtimes(); + } } } catch (comm::ExceptionComm e) { print_exception(e); diff --git a/src/QueryHandlerBase.h b/src/QueryHandlerBase.h index fe144418..5da77dab 100644 --- a/src/QueryHandlerBase.h +++ b/src/QueryHandlerBase.h @@ -8,8 +8,6 @@ #include "QueryMessage.h" // Protobuff implementation #include #include -//#include "Server.h" -#include "chrono/Chrono.h" // Json parsing files #include diff --git a/src/QueryHandlerExample.h b/src/QueryHandlerExample.h index d3cbc7db..477ec0c5 100644 --- a/src/QueryHandlerExample.h +++ b/src/QueryHandlerExample.h @@ -36,7 +36,6 @@ #include #include "QueryHandlerBase.h" -#include "chrono/Chrono.h" #include "comm/Connection.h" // Json parsing files diff --git a/src/QueryHandlerNeo4j.cc b/src/QueryHandlerNeo4j.cc index 8f4c91cf..ee7f4465 100644 --- a/src/QueryHandlerNeo4j.cc +++ b/src/QueryHandlerNeo4j.cc @@ -162,25 +162,11 @@ bool QueryHandlerNeo4j::syntax_checker(const Json::Value &root, void QueryHandlerNeo4j::process_query(protobufs::queryMessage &proto_query, protobufs::queryMessage &proto_res) { - std::chrono::steady_clock::time_point dbconn_start, dbconn_end; - std::chrono::steady_clock::time_point pre_proc_start, pre_proc_end; - std::chrono::steady_clock::time_point resp_start, resp_end; - std::chrono::steady_clock::time_point total_start, total_end; - std::chrono::steady_clock::time_point db_trans_time_start, db_trans_time_end; - std::chrono::steady_clock::time_point db_cmt_time_start, db_cmt_time_end; - double total_runtime, db_conn_time, pre_proc_time, cons_resp_time, - db_trans_time, db_cmt_time; - neo4j_transaction *tx; neo4j_connection_t *conn; neo4j_result_stream_t *res_stream; - total_start = std::chrono::steady_clock::now(); - dbconn_start = std::chrono::steady_clock::now(); conn = neoconn_pool->get_conn(); - ///// connection retrieved - dbconn_end = std::chrono::steady_clock::now(); - int rc; Json::FastWriter fastWriter; @@ -190,6 +176,7 @@ void QueryHandlerNeo4j::process_query(protobufs::queryMessage &proto_query, Json::Value root; int blob_count = 0; + bool error = false; rc = parse_commands(proto_query, root); @@ -209,22 +196,20 @@ void QueryHandlerNeo4j::process_query(protobufs::queryMessage &proto_query, const std::string &blob = rscmd->need_blob(query) ? proto_query.blobs(blob_count++) : ""; - pre_proc_start = std::chrono::steady_clock::now(); - rscmd->data_processing(cypher, query, blob, 0, cmd_result); - pre_proc_end = std::chrono::steady_clock::now(); + rc = rscmd->data_processing(cypher, query, blob, 0, cmd_result); - db_trans_time_start = std::chrono::steady_clock::now(); - res_stream = neoconn_pool->run_in_tx((char *)cypher.c_str(), tx); - db_trans_time_end = std::chrono::steady_clock::now(); + if (rc != 0) { + printf("Data Processing failed, aborting transaction...\n"); + error = true; + break; + } + res_stream = neoconn_pool->run_in_tx((char *)cypher.c_str(), tx); neo4j_resp = neoconn_pool->results_to_json(res_stream); - resp_start = std::chrono::steady_clock::now(); rscmd->construct_responses(neo4j_resp, query, proto_res, blob); - resp_end = std::chrono::steady_clock::now(); if (neo4j_resp.isMember("metadata_res")) { - hello_res["metadata_res"] = neo4j_resp["metadata_res"]; } @@ -232,32 +217,13 @@ void QueryHandlerNeo4j::process_query(protobufs::queryMessage &proto_query, proto_res.set_json(fastWriter.write(json_responses)); } - // commit neo4j transaction - - db_cmt_time_start = std::chrono::steady_clock::now(); - neoconn_pool->commit_tx(tx); - db_cmt_time_end = std::chrono::steady_clock::now(); - neoconn_pool->put_conn(conn); - total_end = std::chrono::steady_clock::now(); - - db_conn_time = std::chrono::duration_cast( - dbconn_end - dbconn_start) - .count(); - pre_proc_time = std::chrono::duration_cast( - pre_proc_end - pre_proc_start) - .count(); - cons_resp_time = std::chrono::duration_cast( - resp_end - resp_start) - .count(); - total_runtime = std::chrono::duration_cast( - total_end - total_start) - .count(); - db_trans_time = std::chrono::duration_cast( - db_trans_time_end - db_trans_time_start) - .count(); - db_cmt_time = std::chrono::duration_cast( - db_cmt_time_end - db_cmt_time_start) - .count(); + // commit neo4j transaction, needs to be updated in future to account for + // errors on response construction + if (error == false) { + neoconn_pool->commit_tx(tx); + } else { + neoconn_pool->put_conn(conn); + } } int QueryHandlerNeo4j::parse_commands( diff --git a/src/QueryHandlerPMGD.cc b/src/QueryHandlerPMGD.cc index 90b4fee9..eabbe6df 100644 --- a/src/QueryHandlerPMGD.cc +++ b/src/QueryHandlerPMGD.cc @@ -53,6 +53,8 @@ #include #include +#include "timers/TimerMap.h" + using namespace VDMS; std::unordered_map QueryHandlerPMGD::_rs_cmds; @@ -115,13 +117,9 @@ void QueryHandlerPMGD::init() { } QueryHandlerPMGD::QueryHandlerPMGD() - : _pmgd_qh(), _autodelete_init(false), _autoreplicate_init(false) -#ifdef CHRONO_TIMING - , - ch_tx_total("ch_tx_total"), ch_tx_query("ch_tx_query"), - ch_tx_send("ch_tx_send") -#endif -{ + : _pmgd_qh(), _autodelete_init(false), _autoreplicate_init(false) { + output_query_level_timing = + VDMSConfig::instance()->get_bool_value("print_query_timing", false); } bool QueryHandlerPMGD::syntax_checker(const Json::Value &root, @@ -275,6 +273,13 @@ void QueryHandlerPMGD::process_query(protobufs::queryMessage &proto_query, std::vector construct_results; + int time_input_ctr = 0; + int time_output_ctr = 0; + std::string timer_id; + TimerMap timers; + Json::Value timing_res; + std::vector timer_id_list; + auto error = [&](Json::Value &res, Json::Value &failed_command) { cleanup_query(images_log, videos_log); res["FailedCommand"] = failed_command; @@ -307,8 +312,13 @@ void QueryHandlerPMGD::process_query(protobufs::queryMessage &proto_query, const std::string &blob = rscmd->need_blob(query) ? proto_query.blobs(blob_count++) : ""; + timer_id = + "input_operation_" + cmd + "_" + std::to_string(time_input_ctr); + time_input_ctr++; + timers.add_timestamp(timer_id); int ret_code = rscmd->construct_protobuf(pmgd_query, query, blob, group_count, cmd_result); + timers.add_timestamp(timer_id); if (cmd_result.isMember("image_added")) { images_log.push_back(cmd_result["image_added"].asString()); @@ -325,7 +335,9 @@ void QueryHandlerPMGD::process_query(protobufs::queryMessage &proto_query, construct_results.push_back(cmd_result); } + timers.add_timestamp("pmgd_query_time"); Json::Value &tx_responses = pmgd_query.run(_autodelete_init); + timers.add_timestamp("pmgd_query_time"); if (!tx_responses.isArray() || tx_responses.size() != root.size()) { Json::StyledWriter writer; @@ -355,8 +367,14 @@ void QueryHandlerPMGD::process_query(protobufs::queryMessage &proto_query, rscmd->need_blob(query) ? proto_query.blobs(blob_count++) : ""; query["cp_result"] = construct_results[j]; + + timer_id = + "response_operation_" + cmd + "_" + std::to_string(time_output_ctr); + time_output_ctr++; + timers.add_timestamp(timer_id); cmd_result = rscmd->construct_responses(tx_responses[j], query, proto_res, blob); + timers.add_timestamp(timer_id); // This is for error handling if (cmd_result.isMember("status")) { @@ -367,9 +385,14 @@ void QueryHandlerPMGD::process_query(protobufs::queryMessage &proto_query, return; } } + json_responses.append(cmd_result); } } + + if (output_query_level_timing) { + timers.print_map_runtimes(); + } proto_res.set_json(fastWriter.write(json_responses)); _pmgd_qh.cleanup_files(); diff --git a/src/QueryHandlerPMGD.h b/src/QueryHandlerPMGD.h index 7d2571a3..d848e4b6 100644 --- a/src/QueryHandlerPMGD.h +++ b/src/QueryHandlerPMGD.h @@ -34,7 +34,6 @@ #include "QueryHandlerBase.h" #include "RSCommand.h" #include "Server.h" -#include "chrono/Chrono.h" namespace VDMS { @@ -65,6 +64,8 @@ class QueryHandlerPMGD : public QueryHandlerBase { void set_autoreplicate_init_flag(); void reset_autoreplicate_init_flag(); void regular_run_autoreplicate(ReplicationConfig &); + + bool output_query_level_timing; }; } // namespace VDMS diff --git a/src/VideoCommand.cc b/src/VideoCommand.cc index 128bd4ae..4ecff882 100644 --- a/src/VideoCommand.cc +++ b/src/VideoCommand.cc @@ -42,13 +42,17 @@ using namespace VDMS; namespace fs = std::filesystem; -VideoCommand::VideoCommand(const std::string &cmd_name) : RSCommand(cmd_name) {} +VideoCommand::VideoCommand(const std::string &cmd_name) : RSCommand(cmd_name) { + output_vcl_timing = + VDMSConfig::instance()->get_bool_value("print_vcl_timing", false); +} void VideoCommand::enqueue_operations(VCL::Video &video, const Json::Value &ops, bool is_addition) { // Correct operation type and parameters are guaranteed at this point for (auto &op : ops) { const std::string &type = get_value(op, "type"); + // video.op_labels.push_back(type); std::string unit; if (type == "threshold") { video.threshold(get_value(op, "value")); @@ -67,16 +71,20 @@ void VideoCommand::enqueue_operations(VCL::Video &video, const Json::Value &ops, get_value(op, "width"), get_value(op, "height"))); } else if (type == "syncremoteOp") { try { - video.syncremoteOperation(get_value(op, "url"), - get_value(op, "options")); + Json::Value options = get_value(op, "options"); + if (is_addition) { + options["ingestion"] = 1; + } + video.syncremoteOperation(get_value(op, "url"), options); } catch (const std::exception &e) { std::cerr << e.what() << '\n'; } } else if (type == "remoteOp") { try { + Json::Value options = get_value(op, "options"); if (is_addition) { - video.syncremoteOperation(get_value(op, "url"), - get_value(op, "options")); + options["ingestion"] = 1; + video.syncremoteOperation(get_value(op, "url"), options); } else { video.remoteOperation(get_value(op, "url"), get_value(op, "options")); @@ -225,6 +233,62 @@ int AddVideo::construct_protobuf(PMGDQuery &query, const Json::Value &jsoncmd, query.AddEdge(-1, node_ref, frame_ref, VDMS_KF_EDGE, Json::Value()); } + std::vector video_metadata = video.get_ingest_metadata(); + + if (video_metadata.size() > 0) { + std::map frameMap; + int frame_ref; + for (Json::Value metadata : video_metadata) { + for (Json::Value vframe : metadata) { + bool frame_flag = false; + if (frameMap.find(vframe["frameId"].asInt()) == frameMap.end()) { + frame_ref = query.get_available_reference(); + frameMap.insert( + std::pair(vframe["frameId"].asInt(), frame_ref)); + } else { + frame_ref = frameMap.at(vframe["frameId"].asInt()); + frame_flag = true; + } + + Json::Value frame_props; + frame_props[VDMS_DM_VID_IDX_PROP] = vframe["frameId"].asInt(); + frame_props[VDMS_DM_VID_NAME_PROP] = props[VDMS_VID_PATH_PROP]; + + Json::Value edge_props; + edge_props[VDMS_DM_VID_IDX_PROP] = vframe["frameId"].asInt(); + edge_props[VDMS_DM_VID_NAME_PROP] = props[VDMS_VID_PATH_PROP]; + + if (!frame_flag) { + query.AddNode(frame_ref, VDMS_DM_VID_TAG, frame_props, Json::Value()); + } + query.AddEdge(-1, node_ref, frame_ref, VDMS_DM_VID_EDGE, edge_props); + + if (vframe.isMember("bbox")) { + int bb_ref = query.get_available_reference(); + Json::Value bbox_props; + bbox_props[VDMS_DM_VID_IDX_PROP] = vframe["frameId"].asInt(); + bbox_props[VDMS_DM_VID_NAME_PROP] = props[VDMS_VID_PATH_PROP]; + bbox_props[VDMS_DM_VID_OBJECT_PROP] = + vframe["bbox"]["object"].asCString(); + bbox_props[VDMS_ROI_COORD_X_PROP] = vframe["bbox"]["x"].asFloat(); + bbox_props[VDMS_ROI_COORD_Y_PROP] = vframe["bbox"]["y"].asFloat(); + bbox_props[VDMS_ROI_WIDTH_PROP] = vframe["bbox"]["width"].asFloat(); + bbox_props[VDMS_ROI_HEIGHT_PROP] = vframe["bbox"]["height"].asFloat(); + bbox_props[VDMS_DM_VID_OBJECT_DET] = + vframe["bbox"]["object_det"].toStyledString(); + + Json::Value bb_edge_props; + bb_edge_props[VDMS_DM_VID_IDX_PROP] = vframe["frameId"].asInt(); + bb_edge_props[VDMS_DM_VID_NAME_PROP] = props[VDMS_VID_PATH_PROP]; + + query.AddNode(bb_ref, VDMS_ROI_TAG, bbox_props, Json::Value()); + query.AddEdge(-1, frame_ref, bb_ref, VDMS_DM_VID_BB_EDGE, + bb_edge_props); + } + } + } + } + // In case we need to cleanup the query error["video_added"] = file_name; @@ -232,6 +296,10 @@ int AddVideo::construct_protobuf(PMGDQuery &query, const Json::Value &jsoncmd, add_link(query, cmd["link"], node_ref, VDMS_VID_EDGE); } + if (output_vcl_timing) { + video.timers.print_map_runtimes(); + } + return 0; } @@ -308,9 +376,40 @@ int FindVideo::construct_protobuf(PMGDQuery &query, const Json::Value &jsoncmd, results["list"].append(VDMS_VID_PATH_PROP); } - query.QueryNode(get_value(cmd, "_ref", -1), VDMS_VID_TAG, cmd["link"], - cmd["constraints"], results, - get_value(cmd, "unique", false)); + if (cmd.isMember("metaconstraints")) { + results["list"].append(VDMS_DM_VID_NAME_PROP); + results["list"].append(VDMS_DM_VID_IDX_PROP); + + for (auto member : cmd["metaconstraints"].getMemberNames()) { + results["list"].append(member); + } + + results["list"].append(VDMS_DM_VID_OBJECT_PROP); + results["list"].append(VDMS_DM_VID_OBJECT_DET); + results["list"].append(VDMS_ROI_COORD_X_PROP); + results["list"].append(VDMS_ROI_COORD_Y_PROP); + results["list"].append(VDMS_ROI_WIDTH_PROP); + results["list"].append(VDMS_ROI_HEIGHT_PROP); + + query.QueryNode(get_value(cmd, "_ref", -1), VDMS_ROI_TAG, cmd["link"], + cmd["metaconstraints"], results, + get_value(cmd, "unique", false)); + } else if (cmd.isMember("frameconstraints")) { + results["list"].append(VDMS_DM_VID_NAME_PROP); + results["list"].append(VDMS_DM_VID_IDX_PROP); + + // for (auto member : cmd["frameconstraints"].getMemberNames()) { + // results["list"].append(member); + // } + + query.QueryNode(get_value(cmd, "_ref", -1), VDMS_DM_VID_TAG, + cmd["link"], cmd["frameconstraints"], results, + get_value(cmd, "unique", false)); + } else { + query.QueryNode(get_value(cmd, "_ref", -1), VDMS_VID_TAG, cmd["link"], + cmd["constraints"], results, + get_value(cmd, "unique", false)); + } return 0; } @@ -346,17 +445,76 @@ Json::Value FindVideo::construct_responses(Json::Value &responses, return error(return_error); } + if (cmd.isMember("metaconstraints")) { + std::map> findVideoMap; + for (auto ent : FindVideo["entities"]) { + if (std::find(shortlisted_videos.begin(), shortlisted_videos.end(), + ent[VDMS_DM_VID_NAME_PROP].asString()) == + shortlisted_videos.end()) { + continue; + } + ent[VDMS_VID_PATH_PROP] = ent[VDMS_DM_VID_NAME_PROP].asString(); + if (findVideoMap.find(ent[VDMS_VID_PATH_PROP].asString()) == + findVideoMap.end()) { + std::vector metadata; + metadata.push_back(ent); + findVideoMap[ent[VDMS_VID_PATH_PROP].asString()] = metadata; + } else { + findVideoMap[ent[VDMS_VID_PATH_PROP].asString()].push_back(ent); + } + } + + std::vector fventities; + FindVideo["entities"].clear(); + for (auto const &k : findVideoMap) { + Json::Value entity; + entity[VDMS_VID_PATH_PROP] = k.first; + for (auto e : k.second) { + entity["bbox"].append(e); + } + FindVideo["entities"].append(entity); + } + } + + else if (cmd.isMember("frameconstraints")) { + std::vector fventities; + for (auto ent : FindVideo["entities"]) { + if (std::find(shortlisted_videos.begin(), shortlisted_videos.end(), + ent[VDMS_DM_VID_NAME_PROP].asString()) != + shortlisted_videos.end()) { + fventities.push_back(ent); + } + } + FindVideo["entities"].clear(); + + for (auto fent : fventities) { + FindVideo["entities"].append(fent); + } + + FindVideo["returned"] = FindVideo["entities"].size(); + ret[_cmd_name].swap(FindVideo); + return ret; + } + bool flag_empty = true; VideoLoop videoLoop; for (auto &ent : FindVideo["entities"]) { - videoLoop.set_nrof_entities(FindVideo["entities"].size()); if (!ent.isMember(VDMS_VID_PATH_PROP)) { continue; } - std::string video_path = ent[VDMS_VID_PATH_PROP].asString(); - ent.removeMember(VDMS_VID_PATH_PROP); + std::string video_path; + if (cmd.isMember("frameconstraints")) { + video_path = ent[VDMS_DM_VID_NAME_PROP].asString(); + } else { + video_path = ent[VDMS_VID_PATH_PROP].asString(); + ent.removeMember(VDMS_VID_PATH_PROP); + } + + if (!cmd.isMember("metaconstraints") && !cmd.isMember("frameconstraints")) { + shortlisted_videos.push_back(video_path); + } if (ent.getMemberNames().size() > 0) { flag_empty = false; @@ -478,6 +636,11 @@ Json::Value FindVideo::construct_responses(Json::Value &responses, return_error["info"] = "Video Data not found"; error(return_error); } + + if (output_vcl_timing) { + iter->second.timers.print_map_runtimes(); + } + iter++; } } else { diff --git a/src/VideoCommand.h b/src/VideoCommand.h index aeb94097..1f3fccab 100644 --- a/src/VideoCommand.h +++ b/src/VideoCommand.h @@ -50,6 +50,7 @@ class VideoCommand : public RSCommand { VCL::Video::Codec string_to_codec(const std::string &codec); virtual Json::Value check_responses(Json::Value &responses); + bool output_vcl_timing; public: VideoCommand(const std::string &cmd_name); @@ -97,6 +98,7 @@ class UpdateVideo : public VideoCommand { class FindVideo : public VideoCommand { // bool _use_aws_storage; + std::vector shortlisted_videos; public: FindVideo(); diff --git a/src/defines.h b/src/defines.h index 7320afd9..afa2d653 100644 --- a/src/defines.h +++ b/src/defines.h @@ -97,3 +97,18 @@ #define VDMS_KF_EDGE "VD:KFLINK" #define VDMS_KF_IDX_PROP "VD:frameIndex" #define VDMS_KF_BASE_PROP "VD:frameBase" + +// Dynamic Metadata Video +#define VDMS_DM_VID_TAG "Frame" +#define VDMS_DM_VID_EDGE "Vid2Frame" +#define VDMS_DM_VID_BB_EDGE "Frame2BB" +#define VDMS_DM_VID_IDX_PROP "frameID" +#define VDMS_DM_VID_NAME_PROP "video_name" +#define VDMS_DM_VID_OBJECT_PROP "objectID" +#define VDMS_DM_VID_OBJECT_DET "object_det" + +// Dynamic Metadata Image +#define VDMS_DM_IMG_BB_EDGE "Image2BB" +#define VDMS_DM_IMG_NAME_PROP "image_name" +#define VDMS_DM_IMG_OBJECT_PROP "objectID" +#define VDMS_DM_IMG_OBJECT_DET "object_det" diff --git a/src/vcl/CMakeLists.txt b/src/vcl/CMakeLists.txt index a99080e3..d3a7a1ad 100644 --- a/src/vcl/CMakeLists.txt +++ b/src/vcl/CMakeLists.txt @@ -6,7 +6,7 @@ set(CMAKE_CXX_STANDARD 17) find_package( OpenCV REQUIRED ) include_directories(../../include . /usr/local/include/opencv4 /usr/include/jsoncpp) - +include_directories(../../utils/include) add_library(vcl SHARED ../VDMSConfig.cc DescriptorSet.cc @@ -25,6 +25,7 @@ add_library(vcl SHARED Video.cc CustomVCL.cc RemoteConnection.cc + ../../utils/src/timers/TimerMap.cc ) link_directories( /usr/local/lib ) target_link_libraries(vcl lapack faiss tiledb flinng avformat avcodec swscale ${OpenCV_LIBS}) diff --git a/src/vcl/DescriptorSet.cc b/src/vcl/DescriptorSet.cc index 5592f15a..72a62a5b 100644 --- a/src/vcl/DescriptorSet.cc +++ b/src/vcl/DescriptorSet.cc @@ -94,13 +94,16 @@ DescriptorSet::DescriptorSet(const std::string &set_path, unsigned dim, DescriptorSet::~DescriptorSet() { delete _set; } void DescriptorSet::write_set_info() { + timers.add_timestamp("write_set_info"); std::string path = _set->get_path() + "/" + INFO_FILE_NAME; std::ofstream info_file(path); info_file << _eng << std::endl; info_file.close(); + timers.add_timestamp("write_set_info"); } void DescriptorSet::read_set_info(const std::string &set_path) { + timers.add_timestamp("read_set_info"); std::string path = set_path + "/" + INFO_FILE_NAME; std::ifstream info_file(path); @@ -116,6 +119,7 @@ void DescriptorSet::read_set_info(const std::string &set_path) { sstr >> num; _eng = (DescriptorSetEngine)num; info_file.close(); + timers.add_timestamp("read_set_info"); } /* *********************** */ @@ -131,49 +135,78 @@ long DescriptorSet::get_n_descriptors() { return _set->get_n_total(); } void DescriptorSet::search(DescDataArray queries, unsigned n_queries, unsigned k, long *descriptors_ids, float *distances) { + timers.add_timestamp("desc_set_search"); _set->search(queries, n_queries, k, descriptors_ids, distances); + timers.add_timestamp("desc_set_search"); } void DescriptorSet::search(DescDataArray queries, unsigned n_queries, unsigned k, long *descriptors_ids) { + timers.add_timestamp("desc_set_search"); _set->search(queries, n_queries, k, descriptors_ids); + timers.add_timestamp("desc_set_search"); } void DescriptorSet::radius_search(DescData queries, float radius, long *descriptors_ids, float *distances) { + timers.add_timestamp("desc_set_radius_search"); _set->radius_search(queries, radius, descriptors_ids, distances); + timers.add_timestamp("desc_set_radius_search"); } long DescriptorSet::add(DescDataArray descriptors, unsigned n, long *labels) { - return _set->add(descriptors, n, labels); + long rc; + timers.add_timestamp("desc_set_add"); + rc = _set->add(descriptors, n, labels); + timers.add_timestamp("desc_set_add"); + return rc; } long DescriptorSet::add_and_store(DescDataArray descriptors, unsigned n, long *labels) { - return _set->add_and_store(descriptors, n, labels); + long rc; + timers.add_timestamp("desc_set_add_and_store"); + rc = _set->add_and_store(descriptors, n, labels); + timers.add_timestamp("desc_set_add_and_store"); + return rc; } -void DescriptorSet::train() { _set->train(); } +void DescriptorSet::train() { + timers.add_timestamp("desc_set_add_and_store"); + _set->train(); + timers.add_timestamp("desc_set_add_and_store"); +} -void DescriptorSet::finalize_index() { _set->finalize_index(); } +void DescriptorSet::finalize_index() { + timers.add_timestamp("desc_set_finalize_idx"); + _set->finalize_index(); + timers.add_timestamp("desc_set_finalize_idx"); +} void DescriptorSet::train(DescDataArray descriptors, unsigned n) { + timers.add_timestamp("desc_set_train"); _set->train(descriptors, n); + timers.add_timestamp("desc_set_train"); } bool DescriptorSet::is_trained() { return _set->is_trained(); } void DescriptorSet::classify(DescDataArray descriptors, unsigned n, long *labels, unsigned quorum) { + timers.add_timestamp("desc_set_classify"); _set->classify(descriptors, n, labels, quorum); + timers.add_timestamp("desc_set_classify"); } void DescriptorSet::get_descriptors(long *ids, unsigned n, DescDataArray descriptors) { + timers.add_timestamp("desc_set_get_descs"); _set->get_descriptors(ids, n, descriptors); + timers.add_timestamp("desc_set_get_descs"); } void DescriptorSet::store() { + timers.add_timestamp("desc_set_store"); _set->store(); write_set_info(); @@ -197,11 +230,14 @@ void DescriptorSet::store() { // std::remove(filename.c_str()); } } + timers.add_timestamp("desc_set_store"); } void DescriptorSet::store(std::string set_path) { + timers.add_timestamp("desc_set_store"); _set->store(set_path); write_set_info(); + timers.add_timestamp("desc_set_store"); } /* *********************** */ @@ -210,45 +246,62 @@ void DescriptorSet::store(std::string set_path) { long DescriptorSet::add(DescDataArray descriptors, unsigned n, LabelIdVector &labels) { + long rc; + timers.add_timestamp("desc_set_add_vec"); if (n != labels.size() && labels.size() != 0) throw VCLException(SizeMismatch, "Labels Vector of Wrong Size"); + rc = add(descriptors, n, labels.size() > 0 ? (long *)labels.data() : NULL); + timers.add_timestamp("desc_set_add_vec"); - return add(descriptors, n, labels.size() > 0 ? (long *)labels.data() : NULL); + return rc; } long DescriptorSet::add_and_store(DescDataArray descriptors, unsigned n, LabelIdVector &labels) { + timers.add_timestamp("desc_set_add_store_vec"); + long rc; if (n != labels.size() && labels.size() != 0) throw VCLException(SizeMismatch, "Labels Vector of Wrong Size"); - return add_and_store(descriptors, n, - labels.size() > 0 ? (long *)labels.data() : NULL); + rc = add_and_store(descriptors, n, + labels.size() > 0 ? (long *)labels.data() : NULL); + + timers.add_timestamp("desc_set_add_store_vec"); + return rc; } void DescriptorSet::search(DescDataArray queries, unsigned n, unsigned k, DescIdVector &ids, DistanceVector &distances) { + timers.add_timestamp("search"); ids.resize(n * k); distances.resize(n * k); search(queries, n, k, ids.data(), distances.data()); + timers.add_timestamp("search"); } void DescriptorSet::search(DescDataArray queries, unsigned n, unsigned k, DescIdVector &ids) { + timers.add_timestamp("desc_set_search"); ids.resize(n * k); search(queries, n, k, ids.data()); + timers.add_timestamp("desc_set_search"); } std::vector DescriptorSet::classify(DescDataArray descriptors, unsigned n, unsigned quorum) { + timers.add_timestamp("desc_set_vec_classify"); LabelIdVector labels; labels.resize(n); classify(descriptors, n, labels.data(), quorum); + timers.add_timestamp("desc_set_vec_classify"); return labels; } void DescriptorSet::get_descriptors(std::vector &ids, float *descriptors) { + timers.add_timestamp("desc_set_vec_get_desc"); get_descriptors(ids.data(), ids.size(), descriptors); + timers.add_timestamp("desc_set_vec_get_desc"); } /* *********************** */ @@ -316,4 +369,5 @@ void DescriptorSet::set_connection(RemoteConnection *remote) { _remote = remote; _storage = VDMS::StorageType::AWS; } + } // namespace VCL diff --git a/src/vcl/Image.cc b/src/vcl/Image.cc index 10af6a43..8b51e803 100644 --- a/src/vcl/Image.cc +++ b/src/vcl/Image.cc @@ -475,6 +475,22 @@ void Image::SyncRemoteOperation::operator()(Image *img) { } } + std::string delimiter = ":metadata:"; + + size_t pos = 0; + std::string token; + std::string tmpBuffer = readBuffer; + if ((pos = tmpBuffer.find(delimiter)) != std::string::npos) { + readBuffer = tmpBuffer.substr(0, pos); + tmpBuffer.erase(0, pos + delimiter.length()); + Json::Value message; + Json::Reader reader; + bool parsingSuccessful = reader.parse(tmpBuffer, message); + if (!parsingSuccessful) { + throw VCLException(ObjectEmpty, "Error parsing string."); + } + img->set_ingest_metadata(message["metadata"]); + } // Decode the response std::vector vectordata(readBuffer.begin(), readBuffer.end()); @@ -548,6 +564,7 @@ void Image::UserOperation::operator()(Image *img) { cv::imwrite(filePath, img->_cv_img); _options["ipfile"] = filePath; + _options["media_type"] = "image"; std::string message_to_send = _options.toStyledString(); @@ -556,36 +573,53 @@ void Image::UserOperation::operator()(Image *img) { memcpy(ipfile.data(), message_to_send.data(), message_len); socket.send(ipfile, 0); - + std::string response; while (true) { - char buffer[256]; - int size = socket.recv(buffer, 255, 0); + zmq::message_t reply; + socket.recv(&reply); - buffer[size] = '\0'; - opfile = buffer; + response = + std::string(static_cast(reply.data()), reply.size()); break; } - std::ifstream rfile; - rfile.open(opfile); - - if (rfile) { - rfile.close(); + std::string delimiter = "metadata"; + size_t pos; + if ((pos = response.find(delimiter)) != std::string::npos) { + Json::Value message; + Json::Reader reader; + bool parsingSuccessful = reader.parse(response, message); + if (!parsingSuccessful) { + throw VCLException(ObjectEmpty, "Error parsing string."); + } + img->set_ingest_metadata(message["metadata"]); } else { - if (std::remove(filePath.data()) != 0) { + opfile = response; + std::ifstream rfile; + rfile.open(opfile); + + if (rfile) { + rfile.close(); + } else { + if (std::remove(filePath.data()) != 0) { + throw VCLException(ObjectNotFound, "Unable to remove file"); + } + throw VCLException(OpenFailed, "UDF Error"); } - throw VCLException(OpenFailed, "UDF Error"); - } - VCL::Image res_image(opfile); - img->shallow_copy_cv(res_image.get_cvmat(true)); + VCL::Image res_image(opfile); + img->shallow_copy_cv(res_image.get_cvmat(true)); - if (std::remove(filePath.data()) != 0) { - } + if (std::remove(filePath.data()) != 0) { + throw VCLException(ObjectNotFound, "Unable to remove file"); + } - if (std::remove(opfile.data()) != 0) { + if (std::remove(opfile.data()) != 0) { + throw VCLException(ObjectNotFound, "Unable to remove file"); + } } + } else throw VCLException(ObjectEmpty, "Image object is empty"); } @@ -791,8 +825,10 @@ Image::Image(const Image &img, bool copy) { } else start = 0; - for (int i = start; i < img._operations.size(); ++i) + for (int i = start; i < img._operations.size(); ++i) { + op_labels.push_back(img.op_labels[i]); _operations.push_back(img._operations[i]); + } } _op_completed = img._op_completed; @@ -811,6 +847,7 @@ Image::Image(Image &&img) noexcept { _tdb = img._tdb; _operations = std::move(img._operations); shallow_copy_cv(img._cv_img); + op_labels = img.op_labels; img._tdb = NULL; @@ -858,8 +895,10 @@ Image &Image::operator=(const Image &img) { } else start = 0; - for (int i = start; i < img._operations.size(); ++i) + for (int i = start; i < img._operations.size(); ++i) { + op_labels.push_back(img.op_labels[i]); _operations.push_back(img._operations[i]); + } } _op_completed = img._op_completed; @@ -951,6 +990,7 @@ Image Image::get_area(const Rectangle &roi, bool performOp) const { std::shared_ptr op = std::make_shared(roi, area._format); + area.op_labels.push_back("get_area"); area._operations.push_back(op); if (performOp) @@ -1036,6 +1076,10 @@ Json::Value Image::get_remoteOp_params() { return remoteOp_params; } std::string Image::get_query_error_response() { return _query_error_response; } +std::vector Image::get_ingest_metadata() { + return _ingest_metadata; +} + std::vector Image::get_encoded_image(VCL::Format format, const std::vector ¶ms) { if (format == VCL::Format::BIN) { @@ -1223,6 +1267,10 @@ void Image::set_query_error_response(std::string response_error) { _query_error_response = response_error; } +void Image::set_ingest_metadata(Json::Value metadata) { + _ingest_metadata.push_back(metadata); +} + void Image::update_op_completed() { _op_completed++; } void Image::set_connection(RemoteConnection *remote) { @@ -1252,11 +1300,15 @@ void Image::set_connection(RemoteConnection *remote) { void Image::perform_operations() { try { for (int x = 0; x < _operations.size(); ++x) { + std::string op_name = op_labels[x]; + timers.add_timestamp(op_name); std::shared_ptr op = _operations[x]; if (op == NULL) throw VCLException(ObjectEmpty, "Nothing to be done"); (*op)(this); + timers.add_timestamp(op_name); } + } catch (cv::Exception &e) { throw VCLException(OpenCVError, e.what()); } @@ -1288,11 +1340,13 @@ int Image::execute_operation() { void Image::read(const std::string &image_id) { _image_id = create_fullpath(image_id, _format); + op_labels.push_back("read"); _operations.push_back(std::make_shared(_image_id, _format)); } void Image::store(const std::string &image_id, VCL::Format image_format, bool store_metadata) { + op_labels.push_back("store"); _operations.push_back( std::make_shared(create_fullpath(image_id, image_format), image_format, _format, store_metadata)); @@ -1316,6 +1370,7 @@ bool Image::delete_image() { void Image::resize(int new_height, int new_width) { _operations.push_back(std::make_shared( Rectangle(0, 0, new_width, new_height), _format)); + op_labels.push_back("resize"); } void Image::crop(const Rectangle &rect) { @@ -1327,32 +1382,39 @@ void Image::crop(const Rectangle &rect) { } _operations.push_back(std::make_shared(rect, _format)); + op_labels.push_back("crop"); } void Image::threshold(int value) { _operations.push_back(std::make_shared(value, _format)); + op_labels.push_back("threshold"); } void Image::flip(int code) { _operations.push_back(std::make_shared(code, _format)); + op_labels.push_back("flip"); } void Image::rotate(float angle, bool keep_size) { _operations.push_back(std::make_shared(angle, keep_size, _format)); + op_labels.push_back("rotate"); } void Image::syncremoteOperation(std::string url, Json::Value options) { _operations.push_back( std::make_shared(url, options, _format)); + op_labels.push_back("sync_remote"); } void Image::remoteOperation(std::string url, Json::Value options) { _operations.push_back( std::make_shared(url, options, _format)); + op_labels.push_back("remote_op"); } void Image::userOperation(Json::Value options) { _operations.push_back(std::make_shared(options, _format)); + op_labels.push_back("user_op"); } /* *********************** */ diff --git a/src/vcl/KeyFrame.cc b/src/vcl/KeyFrame.cc index a09bbd36..6d804a2c 100644 --- a/src/vcl/KeyFrame.cc +++ b/src/vcl/KeyFrame.cc @@ -460,7 +460,7 @@ int KeyFrameDecoder::encode_frames(void) { if (!_ctx.frame_codec_context) { // Initialize frame encoder (PNG for now, may change in the future) - AVCodec *image_codec = avcodec_find_encoder(AV_CODEC_ID_PNG); + const AVCodec *image_codec = avcodec_find_encoder(AV_CODEC_ID_PNG); if (!image_codec) return AVERROR_ENCODER_NOT_FOUND; diff --git a/src/vcl/Video.cc b/src/vcl/Video.cc index a1001058..34f050e2 100644 --- a/src/vcl/Video.cc +++ b/src/vcl/Video.cc @@ -59,7 +59,6 @@ Video::Video(void *buffer, long size) : Video() { VDMS::VDMSConfig::instance()->get_path_tmp(), "vclvideoblob"); std::ofstream outfile(uname, std::ofstream::binary); _remote = nullptr; - if (outfile.is_open()) { outfile.write((char *)buffer, size); outfile.close(); @@ -94,6 +93,8 @@ Video::Video(const Video &video) { remoteOp_params = video.remoteOp_params; _query_error_response = video._query_error_response; + + op_labels = video.op_labels; } Video &Video::operator=(Video vid) { @@ -330,12 +331,12 @@ int Video::get_video_fourcc(VCL::Video::Codec _codec) { Json::Value Video::get_remoteOp_params() { return remoteOp_params; } -/* *********************** */ -/* SET FUNCTIONS */ -/* *********************** */ - std::string Video::get_operated_video_id() { return _operated_video_id; } +std::vector Video::get_ingest_metadata() { + return _ingest_metadata; +} + /* *********************** */ /* SET FUNCTIONS */ /* *********************** */ @@ -367,6 +368,10 @@ void Video::set_operated_video_id(std::string filename) { _operated_video_id = filename; } +void Video::set_ingest_metadata(Json::Value metadata) { + _ingest_metadata.push_back(metadata); +} + /* *********************** */ /* UTILITIES */ /* *********************** */ @@ -530,13 +535,15 @@ int Video::perform_single_frame_operations(std::string id, int op_count, for (i = op_count; i < _operations.size(); i++) { auto it = std::next(_operations.begin(), i); std::shared_ptr op = *it; - + std::string op_name = op_labels[i]; if ((*op).get_type() != VCL::Video::OperationType::SYNCREMOTEOPERATION && (*op).get_type() != VCL::Video::OperationType::INTERVAL && (*op).get_type() != VCL::Video::OperationType::USEROPERATION && (*op).get_type() != VCL::Video::OperationType::REMOTEOPERATION) { + timers.add_timestamp(fname + "_sframe_" + op_name); (*op)(this, mat_frame); + timers.add_timestamp(fname + "_sframe_" + op_name); if (i == _operations.size() - 1) { outputVideo << mat_frame; } @@ -619,6 +626,9 @@ void Video::perform_operations(bool is_store, std::string store_id) { cv::Mat mat; auto it = std::next(_operations.begin(), op_count); std::shared_ptr op = *it; + std::string opname = op_labels[op_count]; + timers.add_timestamp(fname + "_" + opname); + if ((*op).get_type() != VCL::Video::OperationType::SYNCREMOTEOPERATION) { (*op)(this, mat, fname); @@ -630,6 +640,7 @@ void Video::perform_operations(bool is_store, std::string store_id) { } op_count++; id = fname; + timers.add_timestamp(fname + "_" + opname); } } if (is_store) { @@ -759,33 +770,40 @@ void Video::set_connection(RemoteConnection *remote) { void Video::resize(int width, int height) { _flag_stored = false; _operations.push_back(std::make_shared(cv::Size(width, height))); + op_labels.push_back("vid_resize"); } void Video::interval(Video::Unit u, int start, int stop, int step) { _flag_stored = false; _operations.push_back(std::make_shared(u, start, stop, step)); + op_labels.push_back("vid_interval"); } void Video::crop(const Rectangle &rect) { _flag_stored = false; _operations.push_back(std::make_shared(rect)); + op_labels.push_back("vid_crop"); } void Video::threshold(int value) { _flag_stored = false; _operations.push_back(std::make_shared(value)); + op_labels.push_back("vid_threshold"); } void Video::syncremoteOperation(std::string url, Json::Value options) { _operations.push_back(std::make_shared(url, options)); + op_labels.push_back("vid_sync_remote"); } void Video::remoteOperation(std::string url, Json::Value options) { _operations.push_back(std::make_shared(url, options)); + op_labels.push_back("vid_remote_op"); } void Video::userOperation(Json::Value options) { _operations.push_back(std::make_shared(options)); + op_labels.push_back("vid_user_op"); } void Video::store(const std::string &video_id, Video::Codec video_codec) { @@ -971,6 +989,70 @@ static size_t videoCallback(void *ptr, size_t size, size_t nmemb, return written; } +Json::Value process_response(std::string zip_file_name, + std::string video_file_name, std::string format) { + const char *zipFileName = zip_file_name.c_str(); + Json::Value metadata; + + zip *archive = zip_open(zipFileName, 0, NULL); + + if (!archive) { + std::cerr << "Failed to open the zip file." << std::endl; + return 1; + } + + int numFiles = zip_get_num_files(archive); + + for (int i = 0; i < numFiles; ++i) { + struct zip_stat fileInfo; + zip_stat_init(&fileInfo); + + if (zip_stat_index(archive, i, 0, &fileInfo) == 0) { + std::string filename(fileInfo.name); + zip_file *file = zip_fopen_index(archive, i, 0); + if (file) { + + if (filename.find(format) != std::string::npos) { + + char *new_filename = video_file_name.data(); + FILE *new_file = fopen(new_filename, "wb"); + if (!new_file) { + delete[] new_filename; + continue; + } + + char buffer[1024]; + int bytes_read; + while ((bytes_read = zip_fread(file, buffer, sizeof(buffer))) > 0) { + fwrite(buffer, 1, bytes_read, new_file); + } + + fclose(new_file); + } else { + char buffer[1024]; + std::string jsonString; + int bytes_read; + while ((bytes_read = zip_fread(file, buffer, sizeof(buffer))) > 0) { + jsonString += buffer; + } + + Json::Reader reader; + bool parsingSuccessful = reader.parse(jsonString, metadata); + if (!parsingSuccessful) { + return metadata; + } + } + zip_fclose(file); + } + } + } + + // Close the zip archive + zip_close(archive); + + return metadata; +} + void Video::SyncRemoteOperation::operator()(Video *video, cv::Mat &frame, std::string args) { try { @@ -1027,7 +1109,11 @@ void Video::SyncRemoteOperation::operator()(Video *video, cv::Mat &frame, std::string response_filepath = VDMS::VDMSConfig::instance()->get_path_tmp() + "/rtempfile" + std::to_string(utc_time.count()) + "." + format; - FILE *response_file = fopen(response_filepath.data(), "wb"); + + std::string zip_response_filepath = + VDMS::VDMSConfig::instance()->get_path_tmp() + "/rtempzipfile" + + std::to_string(utc_time.count()) + ".zip"; + FILE *zip_response_file = fopen(zip_response_filepath.data(), "wb"); if (curl_easy_setopt(curl, CURLOPT_URL, _url.data()) != CURLE_OK) { throw VCLException(UndefinedException, "CURL setup error with URL"); @@ -1038,8 +1124,8 @@ void Video::SyncRemoteOperation::operator()(Video *video, cv::Mat &frame, "CURL setup error with callback"); } - if (response_file) { - if (curl_easy_setopt(curl, CURLOPT_WRITEDATA, response_file) != + if (zip_response_file) { + if (curl_easy_setopt(curl, CURLOPT_WRITEDATA, zip_response_file) != CURLE_OK) { throw VCLException(UndefinedException, "CURL setup error callback response file"); @@ -1049,7 +1135,7 @@ void Video::SyncRemoteOperation::operator()(Video *video, cv::Mat &frame, "CURL setup error with form"); } curl_easy_perform(curl); - fclose(response_file); + fclose(zip_response_file); } int http_status_code; @@ -1081,6 +1167,12 @@ void Video::SyncRemoteOperation::operator()(Video *video, cv::Mat &frame, } } + Json::Value metadata_response = + process_response(zip_response_filepath, response_filepath, format); + if (!metadata_response.empty()) { + video->set_ingest_metadata(metadata_response["metadata"]); + } + if (std::remove(fname.data()) != 0) { throw VCLException(ObjectEmpty, "Error encountered while removing the file."); @@ -1137,6 +1229,7 @@ void Video::UserOperation::operator()(Video *video, cv::Mat &frame, socket.connect(address.data()); _options["ipfile"] = fname; + _options["media_type"] = "video"; std::string message_to_send = _options.toStyledString(); @@ -1146,35 +1239,49 @@ void Video::UserOperation::operator()(Video *video, cv::Mat &frame, socket.send(ipfile, 0); + std::string response; // Wait for a response from the UDF process while (true) { - char buffer[256]; - int size = socket.recv(buffer, 255, 0); + zmq::message_t reply; + socket.recv(&reply); - buffer[size] = '\0'; - opfile = buffer; + response = std::string(static_cast(reply.data()), reply.size()); break; } - std::ifstream rfile; - rfile.open(opfile); - - if (rfile) { - rfile.close(); + std::string delimiter = "metadata"; + size_t pos; + if ((pos = response.find(delimiter)) != std::string::npos) { + Json::Value message; + Json::Reader reader; + bool parsingSuccessful = reader.parse(response, message); + if (!parsingSuccessful) { + throw VCLException(ObjectEmpty, "Error parsing string."); + } + video->set_ingest_metadata(message["metadata"]); } else { - if (std::remove(opfile.data()) != 0) { + opfile = response; + + std::ifstream rfile; + rfile.open(opfile); + + if (rfile) { + rfile.close(); + } else { + if (std::remove(opfile.data()) != 0) { + } + throw VCLException(OpenFailed, "UDF Error"); } - throw VCLException(OpenFailed, "UDF Error"); - } - if (std::remove(fname.data()) != 0) { - throw VCLException(ObjectEmpty, - "Error encountered while removing the file."); - } - if (std::rename(opfile.data(), fname.data()) != 0) { - throw VCLException(ObjectEmpty, - "Error encountered while renaming the file."); + if (std::remove(fname.data()) != 0) { + throw VCLException(ObjectEmpty, + "Error encountered while removing the file."); + } + if (std::rename(opfile.data(), fname.data()) != 0) { + throw VCLException(ObjectEmpty, + "Error encountered while renaming the file."); + } } } else diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 7c27a4f4..93000afc 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -4,8 +4,8 @@ set(CMAKE_CXX_STANDARD 17) option(CODE_COVERAGE "Collect coverage" OFF) IF(CODE_COVERAGE) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -Wall -coverage -fprofile-arcs -ftest-coverage") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -Wall -coverage -fprofile-arcs -ftest-coverage") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -Wall -coverage -fprofile-abs-path") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -Wall -coverage -fprofile-abs-path") enable_testing() ENDIF() message("Coverage:" ${CODE_COVERAGE}) @@ -63,6 +63,7 @@ add_executable(unit_tests unit_tests/TDBObject_test.cc unit_tests/VDMSConfig_test.cc unit_tests/SystemStats_test.cc + unit_tests/TimerMapTest.cc ) target_link_libraries(unit_tests diff --git a/tests/cleandbs.sh b/tests/cleandbs.sh index e0b773d5..641e704c 100755 --- a/tests/cleandbs.sh +++ b/tests/cleandbs.sh @@ -26,6 +26,7 @@ rm -rf test_images/tdb_to_jpg.jpg || true rm -rf test_images/tdb_to_png.png || true rm -rf test_images/test_image.jpg || true rm -rf remote_function_test/tmpfile* || true +rm -rf remote_function_test/jsonfile* || true rm -rf backups || true rm -rf ../minio_files || true rm -rf ../test_db || true diff --git a/tests/python/TestCommand.py b/tests/python/TestCommand.py index ddaa8909..744cc962 100644 --- a/tests/python/TestCommand.py +++ b/tests/python/TestCommand.py @@ -133,37 +133,103 @@ def addEntity( blob=False, # Generic blob check_status=True, ): - addEntity = {} - addEntity["class"] = class_name - - if properties != None: - addEntity["properties"] = properties - if constraints != None: - addEntity["constraints"] = constraints - - query = {} - query["AddEntity"] = addEntity - all_queries = [] + all_blobs = [] + + query = self.create_entity( + "AddEntity", + class_str=class_name, + props=properties, + constraints=constraints, + blob=blob, + ) all_queries.append(query) - if not blob: - response, res_arr = db.query(all_queries) - else: + if blob: blob_arr = [] fd = open("../test_images/brain.png", "rb") blob_arr.append(fd.read()) fd.close() + all_blobs.append(blob_arr) - addEntity["blob"] = True - - response, res_arr = db.query(all_queries, [blob_arr]) + response, res_arr = db.query(all_queries, all_blobs) if check_status: self.assertEqual(response[0]["AddEntity"]["status"], 0) return response, res_arr + def create_descriptor_set(self, name, dim, metric="L2", engine="FaissFlat"): + all_queries = [] + + descriptor_set = {} + descriptor_set["name"] = name + descriptor_set["dimensions"] = dim + descriptor_set["metric"] = metric + descriptor_set["engine"] = engine + + query = {} + query["AddDescriptorSet"] = descriptor_set + + all_queries.append(query) + return all_queries + + def create_entity( + self, + command_str, + ref=None, + class_str=None, + props=None, + blob=False, + constraints=None, + unique=False, + results=None, + link=None, + ): + entity = {} + if unique: + entity["unique"] = unique + + if results is not None: + entity["results"] = results + + if link is not None: + entity["link"] = link + + if ref is not None: + entity["_ref"] = ref + + if props not in [None, {}]: + entity["properties"] = props + + if class_str is not None: + entity["class"] = class_str + + if constraints is not None: + entity["constraints"] = constraints + + if blob and command_str == "AddEntity": + entity["blob"] = blob + + query = {command_str: entity} + return query + + # Check the signature of any PNG file + # by going through the first eight bytes of data + # (decimal) 137 80 78 71 13 10 26 10 + # (hexadecimal) 89 50 4e 47 0d 0a 1a 0a + # (ASCII C notation) \211 P N G \r \n \032 \n + def verify_png_signature(self, img): + self.assertFalse(len(img) < 8) + self.assertEqual(img[0], 137) + self.assertEqual(img[1], 80) + self.assertEqual(img[2], 78) + self.assertEqual(img[3], 71) + self.assertEqual(img[4], 13) + self.assertEqual(img[5], 10) + self.assertEqual(img[6], 26) + self.assertEqual(img[7], 10) + def shouldSkipRemotePythonTest(): return unittest.skipIf( os.environ.get("VDMS_SKIP_REMOTE_PYTHON_TESTS") is not None diff --git a/tests/python/TestDescriptors.py b/tests/python/TestDescriptors.py index dcfc3b9a..0d6d4be2 100644 --- a/tests/python/TestDescriptors.py +++ b/tests/python/TestDescriptors.py @@ -29,129 +29,173 @@ class TestDescriptors(TestCommand.TestCommand): - def addSet(self, name, dim, metric, engine): - db = self.create_connection() - - all_queries = [] - - descriptor_set = {} - descriptor_set["name"] = name - descriptor_set["dimensions"] = dim - descriptor_set["metric"] = metric - descriptor_set["engine"] = engine + def add_descriptor( + self, + command_str: str, + setname: str, + label: str = None, + ref: int = None, + props: dict = None, + link: dict = None, + k_neighbors: int = None, + constraints: dict = None, + results: dict = None, + ): - query = {} - query["AddDescriptorSet"] = descriptor_set + descriptor: dict = {"set": setname} - all_queries.append(query) + if "Add" in command_str and label is not None: + descriptor["label"] = label - response, img_array = db.query(all_queries) + if ref is not None: + descriptor["_ref"] = ref - # Check success - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) - self.disconnect(db) + if props is not None: + descriptor["properties"] = props - def test_addSet(self): - db = self.create_connection() + if "Add" in command_str and link is not None: + descriptor["link"] = link - all_queries = [] + if "Find" in command_str and k_neighbors is not None: + descriptor["k_neighbors"] = int(k_neighbors) - descriptor_set = {} - descriptor_set["name"] = "features_xd" - descriptor_set["dimensions"] = 1024 * 4 + if "Find" in command_str and constraints is not None: + descriptor["constraints"] = constraints - query = {} - query["AddDescriptorSet"] = descriptor_set + if "Find" in command_str and results is not None: + descriptor["results"] = results - all_queries.append(query) + query = {command_str: descriptor} + return query + def addSet(self, name, dim, metric="L2", engine="FaissFlat"): + db = self.create_connection() + all_queries = self.create_descriptor_set(name, dim, metric, engine) response, img_array = db.query(all_queries) # Check success self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) self.disconnect(db) - def test_addSetAndDescriptors(self): - db = self.create_connection() - + def create_descriptors(self, set_name, dims, total, labels=True): all_queries = [] + descriptor_blob = [] - # Add Set - set_name = "features_128d" - dims = 1024 - descriptor_set = {} - descriptor_set["name"] = set_name - descriptor_set["dimensions"] = dims + class_counter = -1 + for i in range(0, total): + if (i % 4) == 0: + class_counter += 1 - query = {} - query["AddDescriptorSet"] = descriptor_set + x = np.ones(dims) + x[2] = 2.34 + i * 20 + x = x.astype("float32") + descriptor_blob.append(x.tobytes()) - all_queries.append(query) + props = {} + props["myid"] = i + 200 - response, img_array = db.query(all_queries) - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) + if labels: + label = "class" + str(class_counter) + else: + label = None - # Add Descriptors - all_queries = [] - descriptor_blob = [] + query = self.add_descriptor("AddDescriptor", set_name, label, props=props) - x = np.zeros(dims) - x = x.astype("float32") - # print type(x[0]) - # print "size: ", len(x.tobytes())/4 - descriptor_blob.append(x.tobytes()) + all_queries.append(query) - descriptor = {} - descriptor["set"] = set_name + return all_queries, descriptor_blob - query = {} - query["AddDescriptor"] = descriptor + def addSet_and_Insert( + self, set_name, dims, total, metric="L2", engine="FaissFlat", labels=True + ): + db = self.create_connection() + all_queries = self.create_descriptor_set(set_name, dims, metric, engine) + response, img_array = db.query(all_queries) - all_queries.append(query) + # Check AddDescriptorSet success + self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) + all_queries, descriptor_blob = self.create_descriptors( + set_name, dims, total, labels + ) response, img_array = db.query(all_queries, [descriptor_blob]) # Check success - self.assertEqual(response[0]["AddDescriptor"]["status"], 0) + for x in range(0, total): + self.assertEqual(response[x]["AddDescriptor"]["status"], 0) self.disconnect(db) - def test_addSetAndDescriptorsDimMismatch(self): + def test_findDescriptorSet(self): db = self.create_connection() + name = "testFindDescriptorSet" + dim = 128 + engine = "FaissFlat" + metric = "L2" + + self.addSet(name, dim, metric, engine) all_queries = [] + storeIndex = True + descriptor_set = {} + descriptor_set["set"] = name + descriptor_set["storeIndex"] = storeIndex + query = {} + query["FindDescriptorSet"] = descriptor_set + all_queries.append(query) + + # Execute the query + response, img_array = db.query(all_queries) + + self.assertEqual(response[0]["FindDescriptorSet"]["status"], 0) + self.assertEqual(response[0]["FindDescriptorSet"]["returned"], 1) + self.assertEqual( + response[0]["FindDescriptorSet"]["entities"][0]["VD:engine"], engine + ) + self.assertEqual( + response[0]["FindDescriptorSet"]["entities"][0]["VD:dimensions"], dim + ) + self.assertEqual( + response[0]["FindDescriptorSet"]["entities"][0]["VD:name"], name + ) + + @TestCommand.TestCommand.shouldSkipRemotePythonTest() + def test_addSetAndDescriptors(self): + engines = ["FaissFlat", "FaissIVFFlat", "Flinng", "TileDBDense", "TileDBSparse"] + metrics = ["L2"] + dimensions = [128] + total = 2 + for eng in engines: + for metric in metrics: + for dim in dimensions: + self.addSet_and_Insert( + f"features_{dim}d-{metric}-{eng}", + dim, + total, + metric, + eng, + labels=True, + ) + + def test_addSetAndDescriptorsDimMismatch(self): + db = self.create_connection() # Add Set set_name = "features_64d_dim_mismatched" dims = 64 - descriptor_set = {} - descriptor_set["name"] = set_name - descriptor_set["dimensions"] = dims - - query = {} - query["AddDescriptorSet"] = descriptor_set - - all_queries.append(query) + all_queries = self.create_descriptor_set(set_name, dims) response, img_array = db.query(all_queries) self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) # Add Descriptors - all_queries = [] descriptor_blob = [] - x = np.zeros(dims // 2) x = x.astype("float32") - # print type(x[0]) - # print "size: ", len(x.tobytes())/4 descriptor_blob.append(x.tobytes()) - descriptor = {} - descriptor["set"] = set_name - - query = {} - query["AddDescriptor"] = descriptor - + all_queries = [] + query = self.add_descriptor("AddDescriptor", set_name) all_queries.append(query) response, img_array = db.query(all_queries, [descriptor_blob]) @@ -160,151 +204,409 @@ def test_addSetAndDescriptorsDimMismatch(self): self.assertEqual(response[0]["status"], -1) self.assertEqual(response[0]["info"], "Blob Dimensions Mismatch") - # Add Descriptors - all_queries = [] - descriptor_blob = [] + self.disconnect(db) - x = np.zeros(dims)[:-1] - x = x.astype("float32") - # print type(x[0]) - # print "size: ", len(x.tobytes())/4 - descriptor_blob.append(x.tobytes()) + def test_classifyDescriptor(self): + db = self.create_connection() + set_name = "features_128d_4_classify" + dims = 128 + all_queries = self.create_descriptor_set(set_name, dims) + response, img_array = db.query(all_queries) + self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) + + total = 30 + all_queries, descriptor_blob = self.create_descriptors( + set_name, dims, total, labels=True + ) + response, img_array = db.query(all_queries, [descriptor_blob]) + + # Check success + for x in range(0, total - 1): + self.assertEqual(response[x]["AddDescriptor"]["status"], 0) descriptor = {} descriptor["set"] = set_name - query = {} - query["AddDescriptor"] = descriptor + query["ClassifyDescriptor"] = descriptor - all_queries.append(query) + for i in range(2, total // 10, 4): + all_queries = [] + descriptor_blob = [] - response, img_array = db.query(all_queries, [descriptor_blob]) + x = np.ones(dims) + x[2] = 2.34 + i * 20 # Calculated to be of class0 + x = x.astype("float32") + descriptor_blob.append(x.tobytes()) - # Check success - self.assertEqual(response[0]["status"], -1) - self.assertEqual(response[0]["info"], "Blob Dimensions Mismatch") + all_queries.append(query) + + response, img_array = db.query(all_queries, [descriptor_blob]) + + # Check success + self.assertEqual(response[0]["ClassifyDescriptor"]["status"], 0) + self.assertEqual( + response[0]["ClassifyDescriptor"]["label"], "class" + str(int(i / 4)) + ) + self.disconnect(db) + + @TestCommand.TestCommand.shouldSkipRemotePythonTest() + def test_addDifferentSets(self): + engines = ["FaissFlat", "FaissIVFFlat", "Flinng", "TileDBDense", "TileDBSparse"] + metrics = ["L2", "IP"] + dimensions = [128, 4075] + for eng in engines: + for metric in metrics: + for dim in dimensions: + self.addSet(f"{dim}-{metric}-{eng}", dim, metric, eng) + + # @unittest.skip("Skipping class until fixed") + def test_findDescByConstraints(self): + # Add Set + set_name = "features_128d_4_findbyConst" + dims = 128 + total = 5 + self.addSet_and_Insert(set_name, dims, total) + + db = self.create_connection() + + all_queries = [] + constraints = {} + constraints["myid"] = ["==", 202] + results = {} + results["list"] = [ + "myid", + ] + query = self.add_descriptor( + "FindDescriptor", set_name, constraints=constraints, results=results + ) + all_queries.append(query) + response, img_array = db.query(all_queries) + # Check success + self.assertEqual(response[0]["FindDescriptor"]["status"], 0) + self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) + self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["myid"], 202) self.disconnect(db) - def test_addDescriptorsx1000(self): + # @unittest.skip("Skipping class until fixed") + def test_findDescUnusedRef(self): + # Add Set + set_name = "features_128d_4_findunusedRef" + dims = 128 + total = 5 + self.addSet_and_Insert(set_name, dims, total) + db = self.create_connection() all_queries = [] + constraints = {} + constraints["myid"] = ["==", 202] + results = {} + results["list"] = ["myid"] + query = self.add_descriptor( + "FindDescriptor", set_name, ref=1, constraints=constraints, results=results + ) + all_queries.append(query) + response, blob_array = db.query(all_queries) + + # Check success + self.assertEqual(response[0]["FindDescriptor"]["status"], 0) + self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) + self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["myid"], 202) + self.disconnect(db) + + # @unittest.skip("Skipping class until fixed") + def test_findDescByConst_multiple_blobTrue(self): # Add Set - set_name = "features_128dx1000" + set_name = "features_128d_4_findDescriptors_m_blob" dims = 128 - descriptor_set = {} - descriptor_set["name"] = set_name - descriptor_set["dimensions"] = dims + total = 5 + self.addSet_and_Insert(set_name, dims, total) - query = {} - query["AddDescriptorSet"] = descriptor_set + db = self.create_connection() + all_queries = [] + constraints = {} + constraints["myid"] = ["<=", 202] + results = {} + results["list"] = ["myid"] + results["sort"] = "myid" + results["blob"] = True + query = self.add_descriptor( + "FindDescriptor", set_name, constraints=constraints, results=results + ) all_queries.append(query) - response, img_array = db.query(all_queries) - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) + response, fv_array = db.query(all_queries) + + # Check success + self.assertEqual(response[0]["FindDescriptor"]["status"], 0) + self.assertEqual(response[0]["FindDescriptor"]["returned"], 3) + self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["myid"], 200) + self.assertEqual(response[0]["FindDescriptor"]["entities"][1]["myid"], 201) + self.assertEqual(response[0]["FindDescriptor"]["entities"][2]["myid"], 202) + self.assertEqual(len(fv_array), 3) + self.assertEqual(len(fv_array[0]), dims * 4) + self.disconnect(db) + + # @unittest.skip("Skipping class until fixed") + def test_findDescByBlob(self): + # Add Set + set_name = "findwith_blob" + dims = 128 + total = 5 + self.addSet_and_Insert(set_name, dims, total) + db = self.create_connection() + + kn = 3 all_queries = [] + results = {} + results["list"] = ["myid", "_id", "_distance"] + results["blob"] = True + query = self.add_descriptor( + "FindDescriptor", set_name, k_neighbors=kn, results=results + ) + all_queries.append(query) + descriptor_blob = [] + x = np.ones(dims) + x[2] = x[2] = 2.34 + 1 * 20 # 2.34 + 1*20 + x = x.astype("float32") + descriptor_blob.append(x.tobytes()) - total = 2 + response, blob_array = db.query(all_queries, [descriptor_blob]) - for i in range(1, total): - x = np.ones(dims) - x[2] = 2.34 + i * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) + self.assertEqual(len(blob_array), kn) + self.assertEqual(descriptor_blob[0], blob_array[0]) - descriptor = {} - descriptor["set"] = set_name - descriptor["label"] = "classX" + # Check success + self.assertEqual(response[0]["FindDescriptor"]["status"], 0) + self.assertEqual(response[0]["FindDescriptor"]["returned"], kn) + self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["_distance"], 0) + self.assertEqual(response[0]["FindDescriptor"]["entities"][1]["_distance"], 400) + self.assertEqual(response[0]["FindDescriptor"]["entities"][2]["_distance"], 400) + self.disconnect(db) - query = {} - query["AddDescriptor"] = descriptor + # @unittest.skip("Skipping class until fixed") + def test_findDescByBlobNoResults(self): + # Add Set + set_name = "findwith_blobNoResults" + dims = 128 + total = 1 + self.addSet_and_Insert(set_name, dims, total) - all_queries.append(query) + db = self.create_connection() - response, img_array = db.query(all_queries, [descriptor_blob]) + kn = 1 + + all_queries = [] + results = {} + results["blob"] = True + query = self.add_descriptor( + "FindDescriptor", set_name, k_neighbors=kn, results=results + ) + all_queries.append(query) + + descriptor_blob = [] + x = np.ones(dims) + x[2] = 2.34 + x = x.astype("float32") + descriptor_blob.append(x.tobytes()) + + response, blob_array = db.query(all_queries, [descriptor_blob]) # Check success - for x in range(0, total - 1): - self.assertEqual(response[x]["AddDescriptor"]["status"], 0) + self.assertEqual(response[0]["FindDescriptor"]["status"], 0) + self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) + self.assertEqual(len(blob_array), kn) + self.assertEqual(descriptor_blob[0], blob_array[0]) self.disconnect(db) - def test_classifyDescriptor(self): + # @unittest.skip("Skipping class until fixed") + def test_findDescByBlobUnusedRef(self): + # Add Set + set_name = "findwith_blobUnusedRef" + dims = 50 + total = 3 + self.addSet_and_Insert(set_name, dims, total) + db = self.create_connection() + kn = 3 + all_queries = [] + results = {} + results["blob"] = True + query = self.add_descriptor( + "FindDescriptor", set_name, ref=1, k_neighbors=kn, results=results + ) + all_queries.append(query) + + descriptor_blob = [] + x = np.ones(dims) + x[2] = 2.34 + 1 * 20 + x = x.astype("float32") + descriptor_blob.append(x.tobytes()) + response, blob_array = db.query(all_queries, [descriptor_blob]) + + # Check success + self.assertEqual(response[0]["FindDescriptor"]["status"], 0) + self.assertEqual(response[0]["FindDescriptor"]["returned"], kn) + self.assertEqual(len(blob_array), kn) + self.assertEqual(descriptor_blob[0], blob_array[0]) + self.disconnect(db) + + # @unittest.skip("Skipping class until fixed") + def test_findDescByBlobAndConstraints(self): # Add Set - set_name = "features_128d_4_classify" + set_name = "findwith_blob_const" dims = 128 - descriptor_set = {} - descriptor_set["name"] = set_name - descriptor_set["dimensions"] = dims + total = 5 + self.addSet_and_Insert(set_name, dims, total) - query = {} - query["AddDescriptorSet"] = descriptor_set + db = self.create_connection() + kn = 3 + + all_queries = [] + results = {} + results["list"] = ["myid", "_id", "_distance"] + results["blob"] = True + constraints = {} + constraints["myid"] = ["==", 202] + query = self.add_descriptor( + "FindDescriptor", + set_name, + k_neighbors=kn, + constraints=constraints, + results=results, + ) all_queries.append(query) + descriptor_blob = [] + x = np.ones(dims) + x[2] = 2.34 + 2 * 20 + x = x.astype("float32") + descriptor_blob.append(x.tobytes()) + + response, blob_array = db.query(all_queries, [descriptor_blob]) + + self.assertEqual(len(blob_array), 1) + self.assertEqual(descriptor_blob[0], blob_array[0]) + + # Check success + self.assertEqual(response[0]["FindDescriptor"]["status"], 0) + self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) + + self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["_distance"], 0) + self.disconnect(db) + + # @unittest.skip("Skipping class until fixed") + def test_findDescByBlobWithLink(self): + # Add Set + set_name = "findwith_blob_link" + dims = 128 + total = 3 + + db = self.create_connection() + all_queries = self.create_descriptor_set(set_name, dims) + response, img_array = db.query(all_queries) self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) all_queries = [] descriptor_blob = [] - total = 2 - class_counter = -1 - for i in range(0, total - 1): + for i in range(0, total): if (i % 4) == 0: class_counter += 1 + reference = i + 2 + x = np.ones(dims) x[2] = 2.34 + i * 20 x = x.astype("float32") descriptor_blob.append(x.tobytes()) - descriptor = {} - descriptor["set"] = set_name - descriptor["label"] = "class" + str(class_counter) + props = {} + props["myid"] = i + 200 + query = self.add_descriptor( + "AddDescriptor", + set_name, + label="class" + str(class_counter), + ref=reference, + props=props, + ) - query = {} - query["AddDescriptor"] = descriptor + all_queries.append(query) + props = {} + props["entity_prop"] = i + 200 + link = {} + link["ref"] = reference + query = self.create_entity( + "AddEntity", class_str="RandomEntity", props=props, link=link + ) all_queries.append(query) response, img_array = db.query(all_queries, [descriptor_blob]) # Check success - for x in range(0, total - 1): + for x in range(0, total - 1, 2): self.assertEqual(response[x]["AddDescriptor"]["status"], 0) + self.assertEqual(response[x + 1]["AddEntity"]["status"], 0) - descriptor = {} - descriptor["set"] = set_name + kn = 3 + reference = 102 # because I can - query = {} - query["ClassifyDescriptor"] = descriptor + all_queries = [] + results = {} + results["list"] = ["myid", "_id", "_distance"] + results["blob"] = True + query = self.add_descriptor( + "FindDescriptor", set_name, ref=reference, k_neighbors=kn, results=results + ) + all_queries.append(query) - for i in range(2, total // 10, 4): - all_queries = [] - descriptor_blob = [] + descriptor_blob = [] + x = np.ones(dims) + x[2] = 2.34 + 1 * 20 + x = x.astype("float32") + descriptor_blob.append(x.tobytes()) - x = np.ones(dims) - x[2] = 2.34 + i * 20 # Calculated to be of class1 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) + results = {} + results["list"] = ["entity_prop"] + results["sort"] = "entity_prop" + link = {} + link["ref"] = reference + query = self.create_entity( + "FindEntity", class_str="RandomEntity", results=results, link=link + ) - all_queries.append(query) + all_queries.append(query) - response, img_array = db.query(all_queries, [descriptor_blob]) + response, blob_array = db.query(all_queries, [descriptor_blob]) - # Check success - self.assertEqual(response[0]["ClassifyDescriptor"]["status"], 0) - self.assertEqual( - response[0]["ClassifyDescriptor"]["label"], "class" + str(int(i / 4)) - ) + self.assertEqual(len(blob_array), kn) + # This checks that the received blobs is the same as the inserted. + self.assertEqual(descriptor_blob[0], blob_array[0]) + + # Check success + self.assertEqual(response[0]["FindDescriptor"]["status"], 0) + self.assertEqual(response[0]["FindDescriptor"]["returned"], kn) + + self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["_distance"], 0) + self.assertEqual(response[0]["FindDescriptor"]["entities"][1]["_distance"], 400) + self.assertEqual(response[0]["FindDescriptor"]["entities"][2]["_distance"], 400) + + self.assertEqual(response[1]["FindEntity"]["status"], 0) + self.assertEqual(response[1]["FindEntity"]["returned"], kn) + + self.assertEqual(response[1]["FindEntity"]["entities"][0]["entity_prop"], 200) + self.assertEqual(response[1]["FindEntity"]["entities"][1]["entity_prop"], 201) + self.assertEqual(response[1]["FindEntity"]["entities"][2]["entity_prop"], 202) self.disconnect(db) diff --git a/tests/python/TestEngineDescriptors.py b/tests/python/TestEngineDescriptors.py deleted file mode 100644 index 670fa7a9..00000000 --- a/tests/python/TestEngineDescriptors.py +++ /dev/null @@ -1,221 +0,0 @@ -# -# The MIT License -# -# @copyright Copyright (c) 2017 Intel Corporation -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), -# to deal in the Software without restriction, -# including without limitation the rights to use, copy, modify, -# merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -# ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# - -import TestCommand -import numpy as np - - -class TestDescriptors(TestCommand.TestCommand): - def addSet(self, name, dim, metric, engine): - db = self.create_connection() - - all_queries = [] - - descriptor_set = {} - descriptor_set["name"] = name - descriptor_set["dimensions"] = dim - descriptor_set["metric"] = metric - descriptor_set["engine"] = engine - - query = {} - query["AddDescriptorSet"] = descriptor_set - - all_queries.append(query) - - response, img_array = db.query(all_queries) - - # Check success - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) - self.disconnect(db) - - @TestCommand.TestCommand.shouldSkipRemotePythonTest() - def test_addDifferentSets(self): - self.addSet("128-L2-FaissFlat", 128, "L2", "FaissFlat") - self.addSet("128-IP-FaissFlat", 128, "IP", "FaissFlat") - self.addSet("128-L2-FaissIVFFlat", 128, "L2", "FaissIVFFlat") - self.addSet("128-IP-FaissIVFFlat", 128, "IP", "FaissIVFFlat") - self.addSet("128-L2-TileDBDense", 128, "L2", "TileDBDense") - self.addSet("128-L2-TileDBSparse", 128, "L2", "TileDBSparse") - self.addSet("128-L2-FLINNG", 128, "L2", "Flinng") - self.addSet("128-IP-FLINNG", 128, "IP", "Flinng") - - self.addSet("4075-L2-FaissFlat", 4075, "L2", "FaissFlat") - self.addSet("4075-IP-FaissFlat", 4075, "IP", "FaissFlat") - self.addSet("4075-L2-FaissIVFFlat", 4075, "L2", "FaissIVFFlat") - self.addSet("4075-IP-FaissIVFFlat", 4075, "IP", "FaissIVFFlat") - self.addSet("4075-L2-TileDBDense", 4075, "L2", "TileDBDense") - self.addSet("4075-L2-FLINNG", 4075, "L2", "Flinng") - self.addSet("4075-IP-FLINNG", 4075, "IP", "Flinng") - - def test_addDescriptorsx1000FaissIVFFlat(self): - db = self.create_connection() - - all_queries = [] - - # Add Set - set_name = "faissivfflat_ip_128dx1000" - dims = 128 - descriptor_set = {} - descriptor_set["name"] = set_name - descriptor_set["dimensions"] = dims - descriptor_set["metric"] = "IP" - descriptor_set["engine"] = "FaissIVFFlat" - - query = {} - query["AddDescriptorSet"] = descriptor_set - - all_queries.append(query) - - response, img_array = db.query(all_queries) - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) - - all_queries = [] - descriptor_blob = [] - - total = 2 - - for i in range(1, total): - x = np.ones(dims) - x[2] = 2.34 + i * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - descriptor = {} - descriptor["set"] = set_name - descriptor["label"] = "classX" - - query = {} - query["AddDescriptor"] = descriptor - - all_queries.append(query) - - response, img_array = db.query(all_queries, [descriptor_blob]) - - # Check success - for x in range(0, total - 1): - self.assertEqual(response[x]["AddDescriptor"]["status"], 0) - self.disconnect(db) - - @TestCommand.TestCommand.shouldSkipRemotePythonTest() - def test_addDescriptorsx1000TileDBSparse(self): - db = self.create_connection() - - all_queries = [] - - # Add Set - set_name = "tiledbsparse_l2_128dx1000" - dims = 128 - descriptor_set = {} - descriptor_set["name"] = set_name - descriptor_set["dimensions"] = dims - descriptor_set["metric"] = "L2" - descriptor_set["engine"] = "TileDBSparse" - - query = {} - query["AddDescriptorSet"] = descriptor_set - - all_queries.append(query) - - response, img_array = db.query(all_queries) - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) - - all_queries = [] - descriptor_blob = [] - - total = 2 - - for i in range(1, total): - x = np.ones(dims) - x[2] = 2.34 + i * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - descriptor = {} - descriptor["set"] = set_name - descriptor["label"] = "classX" - - query = {} - query["AddDescriptor"] = descriptor - - all_queries.append(query) - - response, img_array = db.query(all_queries, [descriptor_blob]) - - # Check success - for x in range(0, total - 1): - self.assertEqual(response[x]["AddDescriptor"]["status"], 0) - self.disconnect(db) - - @TestCommand.TestCommand.shouldSkipRemotePythonTest() - def test_addDescriptorsx1000TileDBDense(self): - db = self.create_connection() - - all_queries = [] - - # Add Set - set_name = "tiledbdense_l2_128dx1000" - dims = 128 - descriptor_set = {} - descriptor_set["name"] = set_name - descriptor_set["dimensions"] = dims - descriptor_set["metric"] = "L2" - descriptor_set["engine"] = "TileDBDense" - - query = {} - query["AddDescriptorSet"] = descriptor_set - - all_queries.append(query) - - response, img_array = db.query(all_queries) - # print(json.dumps(all_queries, indent=4, sort_keys=False)) - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) - - all_queries = [] - descriptor_blob = [] - - total = 2 - - for i in range(1, total): - x = np.ones(dims) - x[2] = 2.34 + i * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - descriptor = {} - descriptor["set"] = set_name - descriptor["label"] = "classX" - - query = {} - query["AddDescriptor"] = descriptor - - all_queries.append(query) - - response, img_array = db.query(all_queries, [descriptor_blob]) - - # Check success - for x in range(0, total - 1): - self.assertEqual(response[x]["AddDescriptor"]["status"], 0) - self.disconnect(db) diff --git a/tests/python/TestEntities.py b/tests/python/TestEntities.py index dcdcfd99..c4257b0a 100644 --- a/tests/python/TestEntities.py +++ b/tests/python/TestEntities.py @@ -50,17 +50,14 @@ def addSingleEntity(self, thID, results, db): def findEntity(self, thID, results, db): constraints = {} constraints["threadid"] = ["==", thID] - - findEntity = {} - findEntity["constraints"] = constraints - findEntity["class"] = "AwesomePeople" - results = {} results["list"] = ["name", "lastname", "threadid"] - findEntity["results"] = results - - query = {} - query["FindEntity"] = findEntity + query = self.create_entity( + "FindEntity", + class_str="AwesomePeople", + constraints=constraints, + results=results, + ) all_queries = [] all_queries.append(query) @@ -156,14 +153,9 @@ def test_addEntityWithLink(self): props["name"] = "Luis" props["lastname"] = "Ferro" props["age"] = 27 - - addEntity = {} - addEntity["_ref"] = 32 - addEntity["properties"] = props - addEntity["class"] = "AwesomePeople" - - query = {} - query["AddEntity"] = addEntity + query = self.create_entity( + "AddEntity", ref=32, class_str="AwesomePeople", props=props + ) all_queries.append(query) @@ -171,21 +163,13 @@ def test_addEntityWithLink(self): props["name"] = "Luis" props["lastname"] = "Bueno" props["age"] = 27 - link = {} link["ref"] = 32 link["direction"] = "in" link["class"] = "Friends" - - addEntity = {} - addEntity["properties"] = props - addEntity["class"] = "AwesomePeople" - addEntity["link"] = link - - img_params = {} - - query = {} - query["AddEntity"] = addEntity + query = self.create_entity( + "AddEntity", class_str="AwesomePeople", props=props, link=link + ) all_queries.append(query) @@ -201,13 +185,9 @@ def test_addfindEntityWrongConstraints(self): all_queries = [] props = {"name": "Luis", "lastname": "Ferro", "age": 25} - addEntity = {} - addEntity["_ref"] = 32 - addEntity["properties"] = props - addEntity["class"] = "SomePeople" - - query = {} - query["AddEntity"] = addEntity + query = self.create_entity( + "AddEntity", class_str="SomePeople", props=props, ref=32 + ) all_queries.append(query) @@ -219,14 +199,12 @@ def test_addfindEntityWrongConstraints(self): # this format is invalid, as each constraint must be an array constraints = {"name": "Luis"} - - entity = {} - entity["constraints"] = constraints - entity["class"] = "SomePeople" - entity["results"] = {"count": ""} - - query = {} - query["FindEntity"] = entity + query = self.create_entity( + "FindEntity", + class_str="SomePeople", + constraints=constraints, + results={"count": ""}, + ) all_queries.append(query) @@ -239,7 +217,12 @@ def test_addfindEntityWrongConstraints(self): # Another invalid format constraints = {"name": []} - entity["constraints"] = constraints + query = self.create_entity( + "FindEntity", + class_str="SomePeople", + constraints=constraints, + results={"count": ""}, + ) all_queries = [] all_queries.append(query) @@ -264,12 +247,11 @@ def test_FindWithSortKey(self): props["name"] = "entity_" + str(i) props["id"] = i - entity = {} - entity["properties"] = props - entity["class"] = "Random" - - query = {} - query["AddEntity"] = entity + query = self.create_entity( + "AddEntity", + class_str="Random", + props=props, + ) all_queries.append(query) @@ -285,13 +267,7 @@ def test_FindWithSortKey(self): results["list"] = ["name", "id"] results["sort"] = "id" - entity = {} - entity["results"] = results - entity["class"] = "Random" - - query = {} - query["FindEntity"] = entity - + query = self.create_entity("FindEntity", class_str="Random", results=results) all_queries.append(query) response, blob_arr = db.query(all_queries) @@ -313,13 +289,11 @@ def test_FindWithSortBlock(self): props["name"] = "entity_" + str(i) props["id"] = i - entity = {} - entity["properties"] = props - entity["class"] = "SortBlock" - - query = {} - query["AddEntity"] = entity - + query = self.create_entity( + "AddEntity", + class_str="SortBlock", + props=props, + ) all_queries.append(query) response, blob_arr = db.query(all_queries) @@ -338,13 +312,7 @@ def test_FindWithSortBlock(self): results["list"] = ["name", "id"] results["sort"] = sort - entity = {} - entity["results"] = results - entity["class"] = "SortBlock" - - query = {} - query["FindEntity"] = entity - + query = self.create_entity("FindEntity", class_str="SortBlock", results=results) all_queries.append(query) response, blob_arr = db.query(all_queries) @@ -363,13 +331,7 @@ def test_FindWithSortBlock(self): results["list"] = ["name", "id"] results["sort"] = sort - entity = {} - entity["results"] = results - entity["class"] = "SortBlock" - - query = {} - query["FindEntity"] = entity - + query = self.create_entity("FindEntity", class_str="SortBlock", results=results) all_queries.append(query) response, blob_arr = db.query(all_queries) @@ -381,3 +343,98 @@ def test_FindWithSortBlock(self): number_of_inserts - 1 - i, ) db.disconnect() + + def test_addEntityWithBlob(self, thID=0): + db = self.create_connection() + + props = {} + props["name"] = "Luis" + props["lastname"] = "Ferro" + props["age"] = 27 + props["threadid"] = thID + + query = self.create_entity( + "AddEntity", class_str="AwesomePeople", props=props, blob=True + ) + all_queries = [] + all_queries.append(query) + + blob_arr = [] + fd = open("../test_images/brain.png", "rb") + blob_arr.append(fd.read()) + fd.close() + + response, res_arr = db.query(all_queries, [blob_arr]) + + self.assertEqual(response[0]["AddEntity"]["status"], 0) + self.disconnect(db) + + def test_addEntityWithBlobNoBlob(self, thID=0): + db = self.create_connection() + + props = {} + props["name"] = "Luis" + props["lastname"] = "Ferro" + props["age"] = 27 + props["threadid"] = thID + query = self.create_entity( + "AddEntity", class_str="AwesomePeople", props=props, blob=True + ) + + all_queries = [] + all_queries.append(query) + + response, res_arr = db.query(all_queries) + + self.assertEqual(response[0]["status"], -1) + self.assertEqual(response[0]["info"], "Expected blobs: 1. Received blobs: 0") + self.disconnect(db) + + def test_addEntityWithBlobAndFind(self, thID=0): + db = self.create_connection() + + props = {} + props["name"] = "Tom" + props["lastname"] = "Slash" + props["age"] = 27 + props["id"] = 45334 + + query = self.create_entity( + "AddEntity", class_str="NotSoAwesome", props=props, blob=True + ) + all_queries = [] + all_queries.append(query) + + blob_arr = [] + fd = open("../test_images/brain.png", "rb") + blob_arr.append(fd.read()) + fd.close() + + response, res_arr = db.query(all_queries, [blob_arr]) + + self.assertEqual(response[0]["AddEntity"]["status"], 0) + + constraints = {} + constraints["id"] = ["==", 45334] + + results = {} + results["blob"] = True + results["list"] = ["name"] + + query = self.create_entity( + "FindEntity", + class_str="NotSoAwesome", + constraints=constraints, + results=results, + ) + all_queries = [] + all_queries.append(query) + + response, res_arr = db.query(all_queries) + + self.assertEqual(response[0]["FindEntity"]["entities"][0]["blob"], True) + + self.assertEqual(len(res_arr), len(blob_arr)) + self.assertEqual(len(res_arr[0]), len(blob_arr[0])) + self.assertEqual((res_arr[0]), (blob_arr[0])) + self.disconnect(db) diff --git a/tests/python/TestEntitiesBlobs.py b/tests/python/TestEntitiesBlobs.py deleted file mode 100644 index bcfe76f1..00000000 --- a/tests/python/TestEntitiesBlobs.py +++ /dev/null @@ -1,141 +0,0 @@ -# -# The MIT License -# -# @copyright Copyright (c) 2017 Intel Corporation -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), -# to deal in the Software without restriction, -# including without limitation the rights to use, copy, modify, -# merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -# ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# - -import TestCommand - - -class TestEntitiesBlob(TestCommand.TestCommand): - def test_addEntityWithBlob(self, thID=0): - db = self.create_connection() - - props = {} - props["name"] = "Luis" - props["lastname"] = "Ferro" - props["age"] = 27 - props["threadid"] = thID - - addEntity = {} - addEntity["properties"] = props - addEntity["class"] = "AwesomePeople" - addEntity["blob"] = True - - query = {} - query["AddEntity"] = addEntity - - all_queries = [] - all_queries.append(query) - - blob_arr = [] - fd = open("../test_images/brain.png", "rb") - blob_arr.append(fd.read()) - fd.close() - - response, res_arr = db.query(all_queries, [blob_arr]) - - self.assertEqual(response[0]["AddEntity"]["status"], 0) - self.disconnect(db) - - def test_addEntityWithBlobNoBlob(self, thID=0): - db = self.create_connection() - - props = {} - props["name"] = "Luis" - props["lastname"] = "Ferro" - props["age"] = 27 - props["threadid"] = thID - - addEntity = {} - addEntity["properties"] = props - addEntity["class"] = "AwesomePeople" - addEntity["blob"] = True - - query = {} - query["AddEntity"] = addEntity - - all_queries = [] - all_queries.append(query) - - response, res_arr = db.query(all_queries) - - self.assertEqual(response[0]["status"], -1) - self.assertEqual(response[0]["info"], "Expected blobs: 1. Received blobs: 0") - self.disconnect(db) - - def test_addEntityWithBlobAndFind(self, thID=0): - db = self.create_connection() - - props = {} - props["name"] = "Tom" - props["lastname"] = "Slash" - props["age"] = 27 - props["id"] = 45334 - - addEntity = {} - addEntity["properties"] = props - addEntity["class"] = "NotSoAwesome" - addEntity["blob"] = True - - query = {} - query["AddEntity"] = addEntity - - all_queries = [] - all_queries.append(query) - - blob_arr = [] - fd = open("../test_images/brain.png", "rb") - blob_arr.append(fd.read()) - fd.close() - - response, res_arr = db.query(all_queries, [blob_arr]) - - self.assertEqual(response[0]["AddEntity"]["status"], 0) - - constraints = {} - constraints["id"] = ["==", 45334] - - results = {} - results["blob"] = True - results["list"] = ["name"] - - FindEntity = {} - FindEntity["constraints"] = constraints - FindEntity["class"] = "NotSoAwesome" - FindEntity["results"] = results - - query = {} - query["FindEntity"] = FindEntity - - all_queries = [] - all_queries.append(query) - - response, res_arr = db.query(all_queries) - - self.assertEqual(response[0]["FindEntity"]["entities"][0]["blob"], True) - - self.assertEqual(len(res_arr), len(blob_arr)) - self.assertEqual(len(res_arr[0]), len(blob_arr[0])) - self.assertEqual((res_arr[0]), (blob_arr[0])) - self.disconnect(db) diff --git a/tests/python/TestFindDescriptorSet.py b/tests/python/TestFindDescriptorSet.py deleted file mode 100644 index ea6df170..00000000 --- a/tests/python/TestFindDescriptorSet.py +++ /dev/null @@ -1,55 +0,0 @@ -import TestCommand - - -class TestFindDescriptorSet(TestCommand.TestCommand): - def addSet(self, name, dim, metric, engine): - db = self.create_connection() - - all_queries = [] - descriptor_set = {} - descriptor_set["name"] = name - descriptor_set["dimensions"] = dim - descriptor_set["metric"] = metric - descriptor_set["engine"] = engine - - query = {} - query["AddDescriptorSet"] = descriptor_set - - all_queries.append(query) - - # Execute the query - response, img_array = db.query(all_queries) - - # Check if the query was successful (you can add your own checks here) - if "AddDescriptorSet" in response[0]: - status = response[0]["AddDescriptorSet"].get("status") - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) - - def test_findDescriptorSet(self): - db = self.create_connection() - name = "testFindDescriptorSet-new" - dim = 128 - engine = "FaissFlat" - metric = "L2" - - self.addSet(name, dim, metric, engine) - - all_queries = [] - - storeIndex = True - - descriptor_set = {} - descriptor_set["set"] = name - descriptor_set["storeIndex"] = storeIndex - - query = {} - - query["FindDescriptorSet"] = descriptor_set - - all_queries.append(query) - - # Execute the query - response, img_array = db.query(all_queries) - - self.assertEqual(response[0]["FindDescriptorSet"]["status"], 0) - self.assertEqual(response[0]["FindDescriptorSet"]["returned"], 1) diff --git a/tests/python/TestFindDescriptors.py b/tests/python/TestFindDescriptors.py deleted file mode 100644 index fd424882..00000000 --- a/tests/python/TestFindDescriptors.py +++ /dev/null @@ -1,650 +0,0 @@ -# -# The MIT License -# -# @copyright Copyright (c) 2017 Intel Corporation -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), -# to deal in the Software without restriction, -# including without limitation the rights to use, copy, modify, -# merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -# ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# - -import TestCommand -import numpy as np -import unittest - - -class TestFindDescriptors(TestCommand.TestCommand): - def create_set_and_insert(self, set_name, dims, total, labels=True): - db = self.create_connection() - - all_queries = [] - - # Add Set - descriptor_set = {} - descriptor_set["name"] = set_name - descriptor_set["dimensions"] = dims - - query = {} - query["AddDescriptorSet"] = descriptor_set - - all_queries.append(query) - - response, img_array = db.query(all_queries) - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) - - all_queries = [] - descriptor_blob = [] - - class_counter = -1 - for i in range(0, total): - if (i % 4) == 0: - class_counter += 1 - - x = np.ones(dims) - x[2] = 2.34 + i * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - descriptor = {} - descriptor["set"] = set_name - - if labels: - descriptor["label"] = "class" + str(class_counter) - - props = {} - props["myid"] = i + 200 - descriptor["properties"] = props - - query = {} - query["AddDescriptor"] = descriptor - - all_queries.append(query) - - response, img_array = db.query(all_queries, [descriptor_blob]) - - # Check success - for x in range(0, total): - self.assertEqual(response[x]["AddDescriptor"]["status"], 0) - - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByConstraints(self): - # Add Set - set_name = "features_128d_4_findbyConst" - dims = 128 - total = 5 - self.create_set_and_insert(set_name, dims, total) - - db = self.create_connection() - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - - constraints = {} - constraints["myid"] = ["==", 202] - finddescriptor["constraints"] = constraints - - results = {} - results["list"] = [ - "myid", - ] - finddescriptor["results"] = results - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - response, img_array = db.query(all_queries) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) - self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["myid"], 202) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescUnusedRef(self): - # Add Set - set_name = "features_128d_4_findunusedRef" - dims = 128 - total = 5 - self.create_set_and_insert(set_name, dims, total) - - db = self.create_connection() - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - - constraints = {} - constraints["myid"] = ["==", 202] - finddescriptor["constraints"] = constraints - - results = {} - results["list"] = ["myid"] - finddescriptor["results"] = results - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - response, blob_array = db.query(all_queries) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) - self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["myid"], 202) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByConst_get_id(self): - # Add Set - set_name = "features_128d_4_findDescriptors_id" - dims = 128 - total = 5 - self.create_set_and_insert(set_name, dims, total) - - db = self.create_connection() - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - - constraints = {} - constraints["myid"] = ["==", 202] - finddescriptor["constraints"] = constraints - - results = {} - results["list"] = ["myid", "_label", "_id"] - finddescriptor["results"] = results - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - response, img_array = db.query(all_queries) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) - self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["myid"], 202) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByConst_blobTrue(self): - # Add Set - set_name = "features_128d_4_findDescriptors_id_blob" - dims = 128 - total = 5 - self.create_set_and_insert(set_name, dims, total) - - db = self.create_connection() - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - - constraints = {} - constraints["myid"] = ["==", 202] - finddescriptor["constraints"] = constraints - - results = {} - results["list"] = ["myid", "_label", "_id"] - results["blob"] = True - finddescriptor["results"] = results - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - response, fv_array = db.query(all_queries) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) - self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["myid"], 202) - self.assertEqual(len(fv_array), 1) - self.assertEqual(len(fv_array[0]), dims * 4) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByConst_multiple_blobTrue(self): - # Add Set - set_name = "features_128d_4_findDescriptors_m_blob" - dims = 128 - total = 5 - self.create_set_and_insert(set_name, dims, total) - - db = self.create_connection() - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - - constraints = {} - constraints["myid"] = ["<=", 202] - finddescriptor["constraints"] = constraints - - results = {} - results["list"] = ["myid"] - results["sort"] = "myid" - results["blob"] = True - finddescriptor["results"] = results - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - response, fv_array = db.query(all_queries) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], 3) - self.assertEqual(response[0]["FindDescriptor"]["entities"][1]["myid"], 201) - self.assertEqual(len(fv_array), 3) - self.assertEqual(len(fv_array[0]), dims * 4) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByBlob(self): - # Add Set - set_name = "findwith_blob" - dims = 128 - total = 5 - self.create_set_and_insert(set_name, dims, total) - - db = self.create_connection() - - kn = 3 - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - - results = {} - results["list"] = ["myid", "_id", "_distance"] - results["blob"] = True - finddescriptor["results"] = results - finddescriptor["k_neighbors"] = kn - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - descriptor_blob = [] - x = np.ones(dims) - x[2] = x[2] = 2.34 + 1 * 20 # 2.34 + 1*20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - response, blob_array = db.query(all_queries, [descriptor_blob]) - - self.assertEqual(len(blob_array), kn) - self.assertEqual(descriptor_blob[0], blob_array[0]) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], kn) - self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["_distance"], 0) - self.assertEqual(response[0]["FindDescriptor"]["entities"][1]["_distance"], 400) - self.assertEqual(response[0]["FindDescriptor"]["entities"][2]["_distance"], 400) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByBlobNoLabels(self): - # Add Set - set_name = "findwith_blob_no_labels" - dims = 128 - total = 5 - self.create_set_and_insert(set_name, dims, total, labels=False) - - db = self.create_connection() - - kn = 3 - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - finddescriptor["_ref"] = 1 - - results = {} - results["blob"] = True - finddescriptor["results"] = results - finddescriptor["k_neighbors"] = kn - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - descriptor_blob = [] - x = np.ones(dims) - x[2] = 2.34 + 1 * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - response, blob_array = db.query(all_queries, [descriptor_blob]) - - self.assertEqual(len(blob_array), kn) - self.assertEqual(descriptor_blob[0], blob_array[0]) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], kn) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByBlobNoResults(self): - # Add Set - set_name = "findwith_blobNoResults" - dims = 128 - total = 1 - self.create_set_and_insert(set_name, dims, total) - - db = self.create_connection() - - kn = 1 - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - - results = {} - results["blob"] = True - finddescriptor["results"] = results - finddescriptor["k_neighbors"] = kn - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - descriptor_blob = [] - x = np.ones(dims) - x[2] = 2.34 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - response, blob_array = db.query(all_queries, [descriptor_blob]) - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) - self.assertEqual(len(blob_array), kn) - self.assertEqual(descriptor_blob[0], blob_array[0]) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByBlobUnusedRef(self): - # Add Set - set_name = "findwith_blobUnusedRef" - dims = 50 - total = 3 - self.create_set_and_insert(set_name, dims, total) - - db = self.create_connection() - - kn = 3 - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - finddescriptor["_ref"] = 1 - - results = {} - results["blob"] = True - finddescriptor["results"] = results - finddescriptor["k_neighbors"] = kn - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - descriptor_blob = [] - x = np.ones(dims) - x[2] = 2.34 + 1 * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - response, blob_array = db.query(all_queries, [descriptor_blob]) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], kn) - self.assertEqual(len(blob_array), kn) - self.assertEqual(descriptor_blob[0], blob_array[0]) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByBlobAndConstraints(self): - # Add Set - set_name = "findwith_blob_const" - dims = 128 - total = 5 - self.create_set_and_insert(set_name, dims, total) - - db = self.create_connection() - - kn = 3 - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - finddescriptor["k_neighbors"] = kn - - results = {} - results["list"] = ["myid", "_id", "_distance"] - results["blob"] = True - finddescriptor["results"] = results - - constraints = {} - constraints["myid"] = ["==", 202] - finddescriptor["constraints"] = constraints - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries = [] - all_queries.append(query) - - descriptor_blob = [] - x = np.ones(dims) - x[2] = 2.34 + 2 * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - response, blob_array = db.query(all_queries, [descriptor_blob]) - - self.assertEqual(len(blob_array), 1) - self.assertEqual(descriptor_blob[0], blob_array[0]) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], 1) - - self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["_distance"], 0) - self.disconnect(db) - - # @unittest.skip("Skipping class until fixed") - def test_findDescByBlobWithLink(self): - # Add Set - set_name = "findwith_blob_link" - dims = 128 - total = 3 - - db = self.create_connection() - - all_queries = [] - - # Add Set - descriptor_set = {} - descriptor_set["name"] = set_name - descriptor_set["dimensions"] = dims - - query = {} - query["AddDescriptorSet"] = descriptor_set - - all_queries.append(query) - - response, img_array = db.query(all_queries) - self.assertEqual(response[0]["AddDescriptorSet"]["status"], 0) - - all_queries = [] - descriptor_blob = [] - - class_counter = -1 - for i in range(0, total): # -1): - if (i % 4) == 0: - class_counter += 1 - - reference = i + 2 - - x = np.ones(dims) - x[2] = 2.34 + i * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - descriptor = {} - descriptor["set"] = set_name - descriptor["label"] = "class" + str(class_counter) - - props = {} - props["myid"] = i + 200 - descriptor["properties"] = props - descriptor["_ref"] = reference - - query = {} - query["AddDescriptor"] = descriptor - - all_queries.append(query) - - props = {} - props["entity_prop"] = i + 200 - - addEntity = {} - addEntity["properties"] = props - addEntity["class"] = "randomentity" - - link = {} - link["ref"] = reference - addEntity["link"] = link - - query = {} - query["AddEntity"] = addEntity - - all_queries.append(query) - - response, img_array = db.query(all_queries, [descriptor_blob]) - - # Check success - for x in range(0, total - 1, 2): - self.assertEqual(response[x]["AddDescriptor"]["status"], 0) - self.assertEqual(response[x + 1]["AddEntity"]["status"], 0) - - kn = 3 - reference = 102 # because I can - - all_queries = [] - - finddescriptor = {} - finddescriptor["set"] = set_name - - results = {} - results["list"] = ["myid", "_id", "_distance"] - results["blob"] = True - finddescriptor["results"] = results - finddescriptor["k_neighbors"] = kn - finddescriptor["_ref"] = reference - - query = {} - query["FindDescriptor"] = finddescriptor - - all_queries.append(query) - - descriptor_blob = [] - x = np.ones(dims) - x[2] = 2.34 + 1 * 20 - x = x.astype("float32") - descriptor_blob.append(x.tobytes()) - - results = {} - results["list"] = ["entity_prop"] - results["sort"] = "entity_prop" - - link = {} - link["ref"] = reference - - findEntity = {} - findEntity["results"] = results - findEntity["class"] = "randomentity" - findEntity["link"] = link - - query = {} - query["FindEntity"] = findEntity - - all_queries.append(query) - - response, blob_array = db.query(all_queries, [descriptor_blob]) - - self.assertEqual(len(blob_array), kn) - # This checks that the received blobs is the same as the inserted. - self.assertEqual(descriptor_blob[0], blob_array[0]) - - # Check success - self.assertEqual(response[0]["FindDescriptor"]["status"], 0) - self.assertEqual(response[0]["FindDescriptor"]["returned"], kn) - - self.assertEqual(response[0]["FindDescriptor"]["entities"][0]["_distance"], 0) - self.assertEqual(response[0]["FindDescriptor"]["entities"][1]["_distance"], 400) - self.assertEqual(response[0]["FindDescriptor"]["entities"][2]["_distance"], 400) - - self.assertEqual(response[1]["FindEntity"]["status"], 0) - self.assertEqual(response[1]["FindEntity"]["returned"], kn) - - self.assertEqual(response[1]["FindEntity"]["entities"][0]["entity_prop"], 200) - self.assertEqual(response[1]["FindEntity"]["entities"][1]["entity_prop"], 201) - self.assertEqual(response[1]["FindEntity"]["entities"][2]["entity_prop"], 202) - self.disconnect(db) diff --git a/tests/python/TestImages.py b/tests/python/TestImages.py index ef18fa8c..4667e9b4 100644 --- a/tests/python/TestImages.py +++ b/tests/python/TestImages.py @@ -29,21 +29,50 @@ class TestImages(TestCommand.TestCommand): - # Check the signature of any PNG file - # by going through the first eight bytes of data - # (decimal) 137 80 78 71 13 10 26 10 - # (hexadecimal) 89 50 4e 47 0d 0a 1a 0a - # (ASCII C notation) \211 P N G \r \n \032 \n - def verify_png_signature(self, img): - self.assertFalse(len(img) < 8) - self.assertEqual(img[0], 137) - self.assertEqual(img[1], 80) - self.assertEqual(img[2], 78) - self.assertEqual(img[3], 71) - self.assertEqual(img[4], 13) - self.assertEqual(img[5], 10) - self.assertEqual(img[6], 26) - self.assertEqual(img[7], 10) + def create_image( + self, + command_str, + ref=None, + format=None, + props=None, + ops=None, + constraints=None, + unique=False, + results=None, + link=None, + collections=None, + ): + entity = {} + + if ref is not None: + entity["_ref"] = ref + + if format is not None and command_str == "AddImage": + entity["format"] = format + + if unique and command_str == "FindImage": + entity["unique"] = unique + + if results is not None and command_str == "FindImage": + entity["results"] = results + + if constraints is not None: + entity["constraints"] = constraints + + if link is not None: + entity["link"] = link + + if collections is not None: + entity["collections"] = collections + + if ops not in [None, {}, []]: + entity["operations"] = ops + + if props not in [None, {}, []] and command_str in ["AddImage", "UpdateImage"]: + entity["properties"] = props + + query = {command_str: entity} + return query # Method to insert one image def insertImage(self, db, props=None, collections=None, format="png"): @@ -54,26 +83,22 @@ def insertImage(self, db, props=None, collections=None, format="png"): imgs_arr.append(fd.read()) fd.close() - img_params = {} - # adds some prop if not props is None: props["test_case"] = "test_case_prop" - img_params["properties"] = props op_params_resize = {} op_params_resize["height"] = 512 op_params_resize["width"] = 512 op_params_resize["type"] = "resize" - img_params["operations"] = [op_params_resize] - - if not collections is None: - img_params["collections"] = collections - - img_params["format"] = format - - query = {} - query["AddImage"] = img_params + query = self.create_image( + "AddImage", + ref=12, + format=format, + ops=[op_params_resize], + props=props, + collections=collections, + ) all_queries.append(query) @@ -98,13 +123,11 @@ def test_JPG_addImage_Without_operations(self): props = {} props["name"] = "brain_" + str(i) props["doctor"] = "Dr. Strange Love" - - img_params = {} - img_params["properties"] = props - img_params["format"] = "jpg" - - query = {} - query["AddImage"] = img_params + query = self.create_image( + "AddImage", + format="jpg", + props=props, + ) all_queries.append(query) @@ -132,13 +155,11 @@ def test_PNG_addImage_Without_operations(self): props["name"] = "brain_" + str(i) props["doctor"] = "Dr. Strange Love" - img_params = {} - img_params["properties"] = props - img_params["format"] = "png" - - query = {} - query["AddImage"] = img_params - + query = self.create_image( + "AddImage", + format="png", + props=props, + ) all_queries.append(query) response, img_array = db.query(all_queries, [imgs_arr]) @@ -172,14 +193,12 @@ def test_addImage(self): props = {} props["name"] = "test_brain_" + str(i) props["doctor"] = "Dr. Strange Love" - - img_params = {} - img_params["properties"] = props - img_params["operations"] = [op_params_resize] - img_params["format"] = "png" - - query = {} - query["AddImage"] = img_params + query = self.create_image( + "AddImage", + format="png", + props=props, + ops=[op_params_resize], + ) all_insert_queries.append(query) @@ -191,12 +210,10 @@ def test_addImage(self): constraints = {} constraints["name"] = ["==", "test_brain_" + str(i)] - img_params = {} - img_params["constraints"] = constraints - - query = {} - query["FindImage"] = img_params - + query = self.create_image( + "FindImage", + constraints=constraints, + ) all_find_queries.append(query) response_from_find, img_found_array = db.query(all_find_queries) @@ -241,14 +258,12 @@ def test_findEntityImage(self): results = {} results["list"] = ["name"] - img_params = {} - img_params["constraints"] = constraints - img_params["results"] = results - img_params["class"] = "VD:IMG" - - query = {} - query["FindEntity"] = img_params - + query = self.create_entity( + "FindEntity", + class_str="VD:IMG", + results=results, + constraints=constraints, + ) all_queries.append(query) response, _ = db.query(all_queries) @@ -283,12 +298,10 @@ def test_findImage(self): constraints = {} constraints["name"] = ["==", filenames[i]] - img_params = {} - img_params["constraints"] = constraints - - query = {} - query["FindImage"] = img_params - + query = self.create_image( + "FindImage", + constraints=constraints, + ) all_queries.append(query) response, img_array = db.query(all_queries) @@ -327,13 +340,9 @@ def test_findImageResults(self): results = {} results["list"] = ["name"] - img_params = {} - img_params["constraints"] = constraints - img_params["results"] = results - - query = {} - query["FindImage"] = img_params - + query = self.create_image( + "FindImage", constraints=constraints, results=results + ) all_queries.append(query) response, img_array = db.query(all_queries) @@ -364,14 +373,9 @@ def test_addImageWithLink(self): props["lastname"] = "Ferro" props["age"] = 27 - addEntity = {} - addEntity["_ref"] = 32 - addEntity["properties"] = props - addEntity["class"] = "AwesomePeople" - - query = {} - query["AddEntity"] = addEntity - + query = self.create_entity( + "AddEntity", class_str="AwesomePeople", ref=32, props=props + ) all_queries.append(query) props = {} @@ -384,20 +388,18 @@ def test_addImageWithLink(self): link["direction"] = "in" link["class"] = "Friends" - addImage = {} - addImage["properties"] = props - addImage["link"] = link - addImage["format"] = "png" - imgs_arr = [] fd = open("../test_images/brain.png", "rb") imgs_arr.append(fd.read()) fd.close() - query = {} - query["AddImage"] = addImage - + query = self.create_image( + "AddImage", + format="png", + link=link, + props=props, + ) all_queries.append(query) # Execute the test @@ -426,12 +428,7 @@ def test_findImage_multiple_results(self): results = {} results["list"] = ["name"] - img_params = {} - img_params["constraints"] = constraints - - query = {} - query["FindImage"] = img_params - + query = self.create_image("FindImage", constraints=constraints, results=results) all_queries = [] all_queries.append(query) @@ -470,13 +467,9 @@ def test_findImageNoBlob(self): results["blob"] = False results["list"] = ["name"] - img_params = {} - img_params["constraints"] = constraints - img_params["results"] = results - - query = {} - query["FindImage"] = img_params - + query = self.create_image( + "FindImage", constraints=constraints, results=results + ) all_queries.append(query) # Execute the tests @@ -517,14 +510,9 @@ def test_findImageRefNoBlobNoPropsResults(self): results["blob"] = False # results["list"] = ["name", "id"] - img_params = {} - img_params["constraints"] = constraints - img_params["results"] = results - img_params["_ref"] = 22 + i - - query = {} - query["FindImage"] = img_params - + query = self.create_image( + "FindImage", ref=22 + i, constraints=constraints, results=results + ) all_queries.append(query) # Execute the tests response, img_array = db.query(all_queries) @@ -556,13 +544,7 @@ def test_updateImage(self): props = {} props["name"] = "simg_update_0" - img_params = {} - img_params["constraints"] = constraints - img_params["properties"] = props - - query = {} - query["UpdateImage"] = img_params - + query = self.create_image("UpdateImage", constraints=constraints, props=props) all_queries.append(query) # Execute the tests @@ -600,13 +582,9 @@ def test_zFindImageWithCollection(self): results = {} results["list"] = ["name"] - img_params = {} - img_params["collections"] = ["brainScans"] - img_params["results"] = results - - query = {} - query["FindImage"] = img_params - + query = self.create_image( + "FindImage", collections=["brainScans"], results=results + ) all_queries.append(query) # Execute the tests diff --git a/tests/python/TestRetail.py b/tests/python/TestRetail.py index c69c4acd..c427ee70 100644 --- a/tests/python/TestRetail.py +++ b/tests/python/TestRetail.py @@ -38,18 +38,7 @@ class TestEntities(TestCommand.TestCommand): def add_descriptor_set(self, name, dim): db = self.create_connection() - - all_queries = [] - - descriptor_set = {} - descriptor_set["name"] = name - descriptor_set["dimensions"] = dim - - query = {} - query["AddDescriptorSet"] = descriptor_set - - all_queries.append(query) - + all_queries = self.create_descriptor_set(name, dim) response, img_array = db.query(all_queries) # Check success @@ -195,6 +184,10 @@ def single(self, thID, db, results): results[thID] = 0 + # The following test fails: + # It stalls without error sometimes, need further investigation + # There are many LockTimeout errors reported + @unittest.skip("Skipping the test until it is fixed") def test_concurrent(self): self.build_store() self.add_descriptor_set(name, dim) diff --git a/tests/python/TestVideos.py b/tests/python/TestVideos.py index bc3b8041..4070c595 100644 --- a/tests/python/TestVideos.py +++ b/tests/python/TestVideos.py @@ -46,21 +46,62 @@ def verify_mp4_signature(self, vid): self.assertEqual(vid[10], 111) self.assertEqual(vid[11], 109) - # Check the signature of any PNG file - # by going through the first eight bytes of data - # (decimal) 137 80 78 71 13 10 26 10 - # (hexadecimal) 89 50 4e 47 0d 0a 1a 0a - # (ASCII C notation) \211 P N G \r \n \032 \n - def verify_png_signature(self, img): - self.assertFalse(len(img) < 8) - self.assertEqual(img[0], 137) - self.assertEqual(img[1], 80) - self.assertEqual(img[2], 78) - self.assertEqual(img[3], 71) - self.assertEqual(img[4], 13) - self.assertEqual(img[5], 10) - self.assertEqual(img[6], 26) - self.assertEqual(img[7], 10) + def create_video( + self, + command_str, + ref=None, + codec=None, + container=None, + unique=False, + from_file_path=None, + is_local_file=False, + index_frames=False, + props=None, + constraints=None, + results=None, + ops=None, + link=None, + ): + entity = {} + + if ref is not None: + entity["_ref"] = ref + + if codec is not None: + entity["codec"] = codec + + if container is not None: + entity["container"] = container + + if from_file_path and command_str == "AddVideo": + entity["from_file_path"] = from_file_path + + if is_local_file and command_str == "AddVideo": + entity["is_local_file"] = is_local_file + + if index_frames and command_str == "AddVideo": + entity["index_frames"] = index_frames + + if unique and command_str == "FindVideo": + entity["unique"] = unique + + if results is not None and command_str == "FindVideo": + entity["results"] = results + + if constraints is not None: + entity["constraints"] = constraints + + if link is not None: + entity["link"] = link + + if ops not in [None, {}, []]: + entity["operations"] = ops + + if props not in [None, {}, []] and command_str in ["AddVideo", "UpdateVideo"]: + entity["properties"] = props + + query = {command_str: entity} + return query # Method to insert one video def insertVideo(self, db, props=None): @@ -71,19 +112,13 @@ def insertVideo(self, db, props=None): video_arr.append(fd.read()) fd.close() - video_parms = {} - # adds some prop if not props is None: props["test_case"] = "test_case_prop" - video_parms["properties"] = props - - video_parms["codec"] = "h264" - video_parms["container"] = "mp4" - - query = {} - query["AddVideo"] = video_parms + query = self.create_video( + "AddVideo", props=props, codec="h264", container="mp4" + ) all_queries.append(query) response, _ = db.query(all_queries, [video_arr]) @@ -115,13 +150,9 @@ def test_addVideo(self): props["name"] = prefix_name + str(i) props["doctor"] = "Dr. Strange Love" - video_parms = {} - video_parms["properties"] = props - video_parms["codec"] = "h264" - - query = {} - query["AddVideo"] = video_parms - + query = self.create_video( + "AddVideo", props=props, codec="h264", ops=[op_params_resize] + ) all_queries_to_add.append(query) response_to_add, obj_to_add_array = db.query(all_queries_to_add, [video_arr]) @@ -133,12 +164,10 @@ def test_addVideo(self): constraints = {} constraints["name"] = ["==", prefix_name + str(i)] - video_parms = {} - video_parms["constraints"] = constraints - - query = {} - query["FindVideo"] = video_parms - + query = self.create_video( + "FindVideo", + constraints=constraints, + ) all_queries_to_find.append(query) response_to_find, vid_array_to_find = db.query(all_queries_to_find) @@ -166,13 +195,9 @@ def test_addVideoFromLocalFile_invalid_command(self): with open("../test_videos/Megamind.avi", "rb") as fd: video_blob = fd.read() - video_params = {} - video_params["from_file_path"] = "BigFile.mp4" - video_params["codec"] = "h264" - - query = {} - query["AddVideo"] = video_params - + query = self.create_video( + "AddVideo", codec="h264", from_file_path="BigFile.mp4" + ) response, _ = db.query([query], [[video_blob]]) self.disconnect(db) @@ -181,13 +206,9 @@ def test_addVideoFromLocalFile_invalid_command(self): def test_addVideoFromLocalFile_file_not_found(self): db = self.create_connection() - video_params = {} - video_params["from_file_path"] = "BigFile.mp4" - video_params["codec"] = "h264" - - query = {} - query["AddVideo"] = video_params - + query = self.create_video( + "AddVideo", codec="h264", from_file_path="BigFile.mp4" + ) response, _ = db.query([query], [[]]) self.disconnect(db) @@ -202,13 +223,7 @@ def test_addVideoFromLocalFile_success(self): tmp_filepath = "Megamind.mp4" shutil.copy2(source_file, tmp_filepath) - video_params = {} - video_params["from_file_path"] = tmp_filepath - video_params["codec"] = "h264" - - query = {} - query["AddVideo"] = video_params - + query = self.create_video("AddVideo", codec="h264", from_file_path=tmp_filepath) response, _ = db.query([query], [[]]) self.disconnect(db) @@ -228,25 +243,21 @@ def test_extractKeyFrames(self): props = {} props["name"] = video_name - video_params = {} - video_params["index_frames"] = True - video_params["properties"] = props - video_params["codec"] = "h264" - - query = {} - query["AddVideo"] = video_params - + query = self.create_video( + "AddVideo", + codec="h264", + props=props, + index_frames=True, + ) response, _ = db.query([query], [[video_blob]]) self.assertEqual(response[0]["AddVideo"]["status"], 0) - entity = {} - entity["class"] = "VD:KF" - entity["results"] = {"count": ""} - - query = {} - query["FindEntity"] = entity - + query = self.create_entity( + "FindEntity", + class_str="VD:KF", + results={"count": ""}, + ) response, _ = db.query([query]) self.disconnect(db) @@ -273,12 +284,10 @@ def test_findVideo(self): constraints = {} constraints["name"] = ["==", prefix_name + str(i)] - video_parms = {} - video_parms["constraints"] = constraints - - query = {} - query["FindVideo"] = video_parms - + query = self.create_video( + "FindVideo", + constraints=constraints, + ) all_queries.append(query) response, vid_array = db.query(all_queries) @@ -453,13 +462,9 @@ def test_findVideoResults(self): results = {} results["list"] = ["name"] - video_parms = {} - video_parms["constraints"] = constraints - video_parms["results"] = results - - query = {} - query["FindVideo"] = video_parms - + query = self.create_video( + "FindVideo", constraints=constraints, results=results + ) all_queries.append(query) response, vid_array = db.query(all_queries) @@ -483,14 +488,12 @@ def test_addVideoWithLink(self): props["lastname"] = "Ferro" props["age"] = 27 - addEntity = {} - addEntity["_ref"] = 32 - addEntity["properties"] = props - addEntity["class"] = "AwPeopleVid" - - query = {} - query["AddEntity"] = addEntity - + query = self.create_entity( + "AddEntity", + ref=32, + class_str="AwPeopleVid", + props=props, + ) all_queries.append(query) props = {} @@ -503,19 +506,13 @@ def test_addVideoWithLink(self): link["direction"] = "in" link["class"] = "Friends" - addVideo = {} - addVideo["properties"] = props - addVideo["link"] = link - imgs_arr = [] fd = open("../test_videos/Megamind.avi", "rb") imgs_arr.append(fd.read()) fd.close() - query = {} - query["AddVideo"] = addVideo - + query = self.create_video("AddVideo", props=props, link=link) all_queries.append(query) add_video_response, _ = db.query(all_queries, [imgs_arr]) @@ -526,12 +523,7 @@ def test_addVideoWithLink(self): constraints["name"] = ["==", "Luis"] constraints["lastname"] = ["==", "Malo"] - video_parms = {} - video_parms["constraints"] = constraints - - query = {} - query["FindVideo"] = video_parms - + query = self.create_video("FindVideo", constraints=constraints) all_queries.append(query) find_video_response, find_video_array = db.query(all_queries) @@ -562,12 +554,7 @@ def test_findVid_multiple_results(self): results = {} results["list"] = ["name"] - img_params = {} - img_params["constraints"] = constraints - - query = {} - query["FindVideo"] = img_params - + query = self.create_video("FindVideo", constraints=constraints) all_queries = [] all_queries.append(query) @@ -602,13 +589,9 @@ def test_findVideoNoBlob(self): results["blob"] = False results["list"] = ["name"] - img_params = {} - img_params["constraints"] = constraints - img_params["results"] = results - - query = {} - query["FindVideo"] = img_params - + query = self.create_video( + "FindVideo", constraints=constraints, results=results + ) all_queries.append(query) response, img_array = db.query(all_queries) @@ -637,13 +620,7 @@ def test_updateVideo(self): props = {} props["name"] = "simg_update_0" - img_params = {} - img_params["constraints"] = constraints - img_params["properties"] = props - - query = {} - query["UpdateVideo"] = img_params - + query = self.create_video("UpdateVideo", props=props, constraints=constraints) all_queries.append(query) response, img_array = db.query(all_queries) @@ -654,12 +631,7 @@ def test_updateVideo(self): constraints = {} constraints["name"] = ["==", "simg_update_0"] - video_parms = {} - video_parms["constraints"] = constraints - - query = {} - query["FindVideo"] = video_parms - + query = self.create_video("FindVideo", constraints=constraints) all_queries.append(query) find_response, find_vid_array = db.query(all_queries) diff --git a/tests/python/run_python_aws_tests.sh b/tests/python/run_python_aws_tests.sh index 0f3f3ed6..75f81511 100755 --- a/tests/python/run_python_aws_tests.sh +++ b/tests/python/run_python_aws_tests.sh @@ -122,7 +122,7 @@ function execute_commands() { ./../../build/vdms -cfg config-aws-tests.json > screen.log 2> log.log & py_unittest_pid=$! - python3 prep.py + python3 ../tls_test/prep_certs.py ./../../build/vdms -cfg config-tls-aws-tests.json > screen-tls.log 2> log-tls.log & py_tls_unittest_pid=$! diff --git a/tests/python/run_python_tests.sh b/tests/python/run_python_tests.sh index dc15786c..ecc14d05 100755 --- a/tests/python/run_python_tests.sh +++ b/tests/python/run_python_tests.sh @@ -77,7 +77,7 @@ function execute_commands() { ./../../build/vdms -cfg config-tests.json > screen.log 2> log.log & py_unittest_pid=$! - python3 prep.py + python3 ../tls_test/prep_certs.py ./../../build/vdms -cfg config-tls-tests.json > screen-tls.log 2> log-tls.log & py_tls_unittest_pid=$! diff --git a/tests/remote_function_test/functions/metadata.py b/tests/remote_function_test/functions/metadata.py new file mode 100644 index 00000000..b03ac749 --- /dev/null +++ b/tests/remote_function_test/functions/metadata.py @@ -0,0 +1,112 @@ +import cv2 +import numpy as np +from datetime import datetime +from collections import deque +import skvideo.io +import imutils +import uuid +import json + +face_cascade = cv2.CascadeClassifier( + # This file is available from OpenCV 'data' directory at + # https://github.com/opencv/opencv/blob/4.x/data/haarcascades/haarcascade_frontalface_default.xml + "../../remote_function/functions/files/haarcascade_frontalface_default.xml" +) + + +def facedetectbbox(frame): + global face_cascade + gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + faces = face_cascade.detectMultiScale(gray, 1.1, 4) + return faces + + +def run(ipfilename, format, options): + + if options["media_type"] == "video": + + vs = cv2.VideoCapture(ipfilename) + frameNum = 1 + metadata = {} + while True: + (grabbed, frame) = vs.read() + if not grabbed: + print("[INFO] no frame read from stream - exiting") + break + + if options["otype"] == "face": + faces = facedetectbbox(frame) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]), + "y": int(face[1]), + "height": int(face[2]), + "width": int(face[3]), + "object": "face", + "object_det": {"emotion": "happy", "age": 30}, + } + + metadata[frameNum] = {"frameId": frameNum, "bbox": tdict} + frameNum += 1 + + if frameNum == 3: + break + else: + faces = facedetectbbox(frame) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]) + 3, + "y": int(face[1]) + 5, + "height": int(face[2]) + 10, + "width": int(face[3]) + 30, + "object": "car", + "object_det": {"color": "red"}, + } + + metadata[frameNum] = {"frameId": frameNum, "bbox": tdict} + frameNum += 1 + + if frameNum == 3: + break + + response = {"opFile": ipfilename, "metadata": metadata} + + jsonfile = "jsonfile" + uuid.uuid1().hex + ".json" + with open(jsonfile, "w") as f: + json.dump(response, f, indent=4) + return ipfilename, jsonfile + + else: + tdict = {} + img = cv2.imread(ipfilename) + if options["otype"] == "face": + faces = facedetectbbox(img) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]), + "y": int(face[1]), + "height": int(face[2]), + "width": int(face[3]), + "object": "face", + "object_det": {"emotion": "happy", "age": 30}, + } + else: + faces = facedetectbbox(img) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]) + 3, + "y": int(face[1]) + 5, + "height": int(face[2]) + 10, + "width": int(face[3]) + 30, + "object": "car", + "object_det": {"color": "red"}, + } + + response = {"opFile": ipfilename, "metadata": tdict} + + r = json.dumps(response) + return img, r diff --git a/tests/remote_function_test/metadata_image.jpg b/tests/remote_function_test/metadata_image.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1c5f91e94dd9b6911d3aaf3c271a3ee7eaec0a52 GIT binary patch literal 20678 zcmbSxbyOT*w-2Hk*cDKA`UJtE{@mT zhVyp@=M@g#y?g)cckTY&j{g83|Neb^B0@rf2P8x!B*a9-#H3`Dk4VWV$cTv_J$Xd& zmNJUCaO7;KT{{FOF=^@b*Y8qO)=jqnovWL>ho_gfPtfP!kkGJjL|lAAVp8(g6l8Wz zZeD&tVNr2KWfi8nrnauWwXMCQv#YzOcXVug0y{bNYkFyU<@f5^`o`wg;nDHQ>Dl?k z<)43W;o{)^Pgwtj?7!inxWjet{(Zdrg#X~ez2|vncog^Xp9v69%4rdr!XL8;20oyY zkIgD?dB`fHeE=|X86kSYCcO0g@E>UZf$aYd*r)$5Wd8;1f8mwkso5MZFy; zstO%ZpHs9`uM4$GP4Bvgv>S-3OL{|blDoiN#|Ifk4z{dGksvqUR~j0^1H&Jz<&RY@ zumH?nD5xGZcG-PeP(9kx8^;O=rLs2rsq5BqWQg6svSZ{jl-ugVR-`QJCvHSr_g$ae zW2ZQoZLMpn(9{7x4vMC=aG??bD$aK*C(!MVwKl80zmB+B(N;!^AVgnWfd^&wyZ||< zf$#b&Ib+sNNCocZQ@zX2yx$qH)eU7L-xgRNB)EY~sr#dnwjuO;{k@S5sbT}H#YZ%c z{1j~EgQA5XS-R7&UYGhOqN`V_xrTUr<3on9$O>=uc*8Nhx}|ec+87pbY)$hTXU)(Hsz= z<5 zu@VZ*!#6bzL$nZR4HV_;gAJ#!2W_u`Rqg4kmHtrqg2<_&LUP()-7JiAGC-ijqo80q1|S7et)dU+`s8 zG{4z!W+RA*xa8xtXQq3cTfi#9MkOM0X~Fe`eP4HbT*KlpQT zt}TtH*br|h8^y)j91j@A``JuYYov6Cb{w|j==F2*Vq~H*ol9D8!9Gq|j7qs=@~X5e zGTSY#Mpf@+PudNzcw`s4{63bUd zzwDI~QB$Y{Q>Iha#$iK1R|_pi6ZLei^Xp@w4|A38B)Ov+m=Fj|FY{D&((d|P^-cip zroO~NnCLiZ0UH__IN;z zYDOPVhgU8Cv^>knc@__nq`p014J@E3q3D;STS?F~ORLjcO^_9Z8W^q$ch8b(3Rf!A zIs@tL>{s3E@J{lz}FMehA~iyTqFQ6buQ zCU-TqinYyso#mKlny$T~`=K8^d(AiPk`v4dt04`GpP;)2|aqGverV1uniUZa9c77jvH zC^oxv>uC9Y@WEPl97s=pzb#r#1)PX{%vRG!fe{ANomo=>6kVz-U6=>@8`))hcJJma z8V5L%AKbEDD-A-KMGjvif`w0J=QYKH&;K?GSlM->j}3zXeRVUs`)o6t{yCR3P#y zG^ox<#KiMYtGM%FuFSqUs4DycTe)~qZ7d1C=q~;pquqp3v5BazQM7IWIeDVALl*kJ z&{cKT>yK(TKgwJdwP#!eP2Cd zf@OYPYxZTjEHJN*4x95$t~VvE>s_Zxt~b()4M~eyzwjNaD(MzBmQro8tcPPB{l$@} zv4yeca7>qd6cnZR#WfLa9V}t+wSD|<77#-D7@O66aGF=@T`!qA*5vUg{RFDZf98y0 zfAJD6Hp$QD^!2=F1uxG{CxuT$0K;EynDpR>D8_77a@M*}Lnblrd0|wN|G;`prx zg)l}#Ejs9W zp_B9?Cz=~<8m*}s`b;18+TtmogYso@lXxyHCN+ubmle(L@se3uJVj}tx3azP(OhrV znYgY4kRt4hVURIJc&bI|#A9XGx2e%&cC5}q#uWMLJr?mSMJgQq3Ow=gqgfoFrZ@o? zPo8OX%2R?W^GNyJ~nvvjYn+SoMB|Y6wcEx^MDKuQAmgPU>_40COX~=A5rC~VoWAP}Fv~*iK z7S+(YqT&W?_Bh_@uEz>gkj4+3cYkP{e>YAloCKXHg21UKRVRL+Q#DMOb5N4ZgrRzQ z{M&|Tnn_oIO_NmfrI8$Haq#qS%P1E!AUCXkE04^y`cT1~sEzJp$uxR;PU)x3B-7V{ zw<({^Gt|`_^CmeE2dmy&gEuS=U4Vq21l*yU2tHwPI;o$pBQ{9QMH38}tVs21u=!(Q zQ#07|Rhagdwb9-zN_HU2u@XH^VB!g2T7L^W{V6y_r2N+XNl|u>V}|`JH-}k|++tOp z>MALicFl`f0X$+|8po%QT$l;-mBsx)Oz1nG>Q?!M^l1omMrOddfM^dVB5+wYXUChSid#B;gXYw_s)fa^ED>UHC6aeF&q zyo$svt4Noi$3957ho|pOGzN1BzSurN=@~jO zJ^juim2*)(;Hy=flP$(pHfS=|+UG>$`%zn?L0xc*ElOF+M=yPMJX3hI+*7)Ns54m; zd3Y`yN%#%Ls*|#%fDjaPbW+U&jdG`#3PF>hKK#8x$i7q!qu+SQozFQe_Wfs#?|*H- z?Qt6zpXeM*2|OB)KPfcS_~0U7td>aI%Tw$~%r`r3KV#RsiO%3+ui+M$uTpnoO~@bB z5!KC~MVia&)kH-iE!-MxUlxI7x!WXj6Q^8;*GVps$TY zNO2LYcGO@!$jD9jIJQHRnOHZ+?aD0*{Ua5qZU0t;*p~Rq4ZhO#nM*W2!TxAIL zlp{ubEe~26>1U@f@EY;@q;1evFUNx=j@KTpdl#gwJO5XJ)?WTdi7+w?{)P{Qa(=6l z>@y#>$_wc<41ExhB+YGT_~Yqnyr&~fO|$U|{982)P@}71RJ>Lm^=rmU_Eq(;>Em9! zdC@~3yy<%Sk{Aq`uCLkmX=^JSej))i#_hcx21wteYuZfRrXm-MH}6}O+DPQ5jm8vR zNdigTT{pH}t;@m!_6lvAd~=n8?x7aX(QFhj&V~7X8)SpvD#;M zm8xt+H8CP-nb1Adhd1_EG5cHFnf;!d>7x{oR_;u$O@RL#zK03vuevvbc;qSJr|85I zf?t~obBzAQ!}^>2gE9!%Q6Fmt=8au*JfDAFzZDQty!Glt}ovl<2w% zt@QFx6AQ!RKS3*9Nmr*bu}0n_%+ocNFxHmWcOavQ#th>!ds;O&IFiVLYc7xt4=o%E&3eWZ6xf`@bNcRxPm8+$?o(v<;>IJ zXiuiI(t~g17*eAN&gx>3dr*|~iGP{4xL2AkR~yn~H?|?H&W?->jz?K)G2SZbfu z2@lAVS1Hx`AhaAL1yuM#gh0Y{u6)>*EiREss5sI*uz**FMvdgGNa{P6B)#Ht+p__i zm#zja{&o$xn?}dsn{N{*QasYCCi2QeJjpl!%wPSuR62y5PxHS}mKCvY2`U6?4#kk*i76Z2%vltXIP@J{@m zS8g{kC6fNwbpldrr=My66T#IIh9h09&OMj&`SG0%NQHbzv*J!rY zw5^_e0jR}vPh8D$52!d-@2a~p$4UlNoyRJg1|mY9ST!e0bY9wR**nGe-A^O>J*iP= z$pmG%;X1%f#Qa&EGm4KfSI()4!%pgi!V2xrOCJh96qbyU!72)@OKOT0OxtmSRiEhX z0j=oUk+QsICk9_c91^MI((#73bGx#Vl}$j}MBa!9(65HgrvcDRozoWX3Oq8!vPAgkHeuJ+bCWj8L6*gWrc&<3QG`4hH_T6%edk0zk9Y}lW z``*>W!F{>tls177NF#Lv)EF{RhQw$39LYMrq& zUF|8d(B;z@1|t&z;{`rHyQL6Ua~CDFvDfIdg=KK%kkKS*8BEqDHH0H$VtseH$6PyN zGEZhm8)qQXB_8n6a>Ydos<!u;5Nl!xvh}n%g;6r`u48e>dEYc58XhuP9xp8T$P@XCT(m&EmP2u_@A1m``4vdN_nEKU@!;^%Ag%J!F_6s5vf z8r=!yvT{Zj5mzGaI8@dszbYwuFRIIy+Pv2-9Dm1B>!dS2pA>N8x!0f&#rnW`&A z4^Q}bnYyyC=|{{WRb2XKt@puEYHcRWqyGA><`XfX)|z&YD~U|;L8UpuSfjbWI0!zB zTAXJl|F+SG%B=w%L#7~ac)rq+5|3HkoZj-Z1$SwwU$N%>3zqGUnF7Ba%I`R_=OvSF z3&B3_jiwfX^wBHGRxnt4NIrv38zO~E{-q6m4a*wcXQ_=L3h)B{;%k0lb55jd$bC2t z5h(gVBhrxjJ@dY66_&5j!sZ}O%&;Ueo_j@X*F0x2<7DZ!3H`V| zVA{q&^$@w1vOMmPTb)Ka#Vv<^_0TX0%H-f9h?qLQVUc5i*PfJ2#@$prk=FnUE>n;0 zAtx>b)(OjL3zS~iTQ*FViyg~YICq@z&+OK;Wgxg^F`&LmlHDNy@ny~EgC%XHZ-5D# zTbyxzRz>}=A;5qh{L^_#sco<3YfHoUDh6;k)GCI8Rp1OQKju%}eJ|x}B$-?pg89LW zL2EIE3SJqoKZ%`d!$jpbL@Gp)$u{GN{FQ@ril<2Tjj_YZTFO ztZ_5S>iPX>SWUItm9$B`Wc2B+v zDr*ktQKP_7)!$1D=w0WtmElxlV*8rA;NNWLg$hFHcMdit^X6X;_OG66CV31hBp`*k@BtuqQo=H9Ca|O4>r#voEZb z?!NVxbMZ+?Jxiw6x|vCynU_6fXSAkcADCedw1j5q^Aqhx-Q~RPErNX*xsxH#@&_=z zBvXf>q=+|Ow@2U*D32t>wl%238af2p_5_XC{_zg>lvo^}h*Rx^%PZHu^Ktw-01t4o zlQ}smH9r)tNhT8IPMa7C9?nM{LfSlBxV+|f2(%XN)#Wf*{2I|*QTpXidXU~6n>*8S zU+!LK9nBWp__c(I{wnNS<(T#(R`8NnNmPDf=g7Nf;mOTIm>m4>Xgzga>I(&Msv5(g zanR;%$$(C6$1H8Jhj@yamsTX_ZfcC2*_-R4>iWKqIG$$*gW{!WOqcMp#UBcWp}uFb2rAvQ5I4a?>0H6-E;lSQgoE!u3rl85v# zxGw)!Weslnt9LwgP%v=ADTXeObvJ5?E@TImg2oq70j3vTevkbW#F&v+dh4?itz$r})=auqnu zH6Vj$a0$K1!sB@;0j?cfEnS@Onb%CFDY8_kkZ;%JJ=QJtnFx5H+g?N?tfk;X*!SNn zTVyk8%IciovOMWBu;vWiedmG8CAvu$$6qp`>c;w`f|b(RSgV*eF8b=t2lzu*U4L;< z`owM=!P!Ft!UOEmq4zRxzOh#FHUI^Zjv*QLo{jX11S zf2QP~J3V|Ppc6gOzj4cA7$z@3^i!`i-g~4>lb~^WJk7Gm*;C;mn~b5j6mr)e=U3Ga z>P8?dSB_r?w9=#8w zI0INMT(GXg#@Gvi1+f@Qyek{!4xAM3(Y;;M?>Pn3TU0(dtnny~K&73PMIV;3|3tVe zN`H-pWi*%DG{0|2mTh%4{xZHE8&u4_3+mb$N!E!q_B*Pyjs||6c*Y8s2G$d@MN{j> zkW7SgKQb$!kD+4pY1NCQf};t8W}Df&ZM_Z(5ahlESs)Ih8gKcvmmmxwbfw<%}vMJ)+`; z{;}@#8{N^^)zjiqJ8gO^V=67WHU_c*>!|I1O#@HdP*3tPn*6=b$2F$Bjyg8_LBn` z?EUq`%B0j~Y~~oLEFH)y))w5#$pWK+^ZtaJ=C_nXq#tKI=DRTE!zXCY3E?37q*1`) zch<|v@7K(Y`k(=_cbDsaWC7IonssRTJ-TXwYb5M%2!Nk__1u?3w3VQfwZ;|oR}-{Z zgRtc-UV}6E)LPlAN{Z3#(tD+DQC9*o4Urt!k$(@${acK}|gG)4+ z#+d6z&Yq&u4fl~9@%xZ+^3N+@ez7f=^Re4F$)VDl>>{!AANu`D>LZ=%I&{ocN!9v4 zlg<@KEk6{5u^8r8enHz+l3pZwOJSqa(?`~olbDlNL3~0Ta%-*~Dh(c7lCz(HU;T|H zWs8&|C)GG_7NG0y1ar@NE@2fBo-B$~A4hXT4@)3#i%HO`c_zrI_Afp43AzSrnYGI) zaErilwfdC$s0o72n7n+e$Pu&4uYWvcFpQGyfwR6U9WRhH*HyhdqU_cq3V>m+|Dx0X zNvT~E3eI4bplmmV2tu zrRc|6sPd#}iI-)Uz~Cc(`7Ywuf&2NHTeTop=S(flEhU~&OweA(Wb4=OSM&3WkB2m- z3i6H(Up*$^28{g0VJB$j2N0|0%(Q9h`lEULeM1>143BXp0Y;L>Y+x}D$`I&ymA&Er{7~0$ zm0FvXd6{jk%8Sp~%6d1gS|;W&boY6_w-9?QZll|JP~_zEgC8sTs1<%%C3AOs?16aa z!lr&R=}r1+PuUYQ?RiQ$Qw~DFt_=BwQo)7U#X=m~nH&!f(VLy8A!3-Sz3!w_?aMf7 zs-fUj(CHLXpBe5RA#T57%d%>$dw7cWtFg4Pws#=}giNE8fkEAaz-Xb>@g2LZuFhuR z3;Gi1>p1&_;H{Djb#H9Z(vL@m-7Htnr*mI@!CizboI_&!upslvSmXdwU&cvr^r771 zo}5XdQ~-Mv(}(Y6#+pOozYpCTdA=yQCGBWvj2XmwWp*OX9jQIFo^CdP{yc{lY))!@ z_LF%O^yMB=PyK;)=sqhKUoswiPQgd_*e8JGTeQLZf|1Mz&<{3;dZ{l+{aDkE=s}w% z6gSyrW$eoCG;NMgt(tzATo5ji!r*?O_X#Ing90XMEBwFw)p<>}l{0;!Jw%~f`i+d1 zFoQJhJdt{#cdM^_WiWmo{elrt=L*b(D|ZxZGd8A9o?k_K@=i#1k8 zgT+a*N@(Q;Ji{^Kbsv+r zi@mylo-}dqXS9^{bmcj?N!65Xhx!{xwIQcGA$Ul5%TlH^fqQh!wl#M`z-Hh%`xarp zw7R6bA}WdM4{>A}P?V}4skf<~XIia&ms6N`-YFbD{)agWPjlj`oaFvz4$qmLOod~X zVIzg2x8Z%pIbnHqfl*ZZ%3jd**kSz$%}gDt{bA$ed&T&T$VKeo`wPgK(?PbOa+~%z3buQf?mS;4;k+1gL^ z&BUSzaL|K?AioG(N526&|Ir7>(!`6JpRYyPUfT}O0T`%h*Rc)0exmB-k-Vrh=Nl@x z9!CmT>7@*YaGE0QH{GEnG&iGkFA6|uK#Pb;WhrUu(WpW>H+;{@)2(&51aaS}pDA`k z-rDojZ2?1nG*irL)ZKD#3D=Pwn^-RcR|vz(%y@&h@NMav9@=%!UA@G4f0~s$iwVa+ zTV?pBnW*uYJ6lI9Ctkat_c=6OmH?vWK zE`BZCTO2UIV&%=R-89kM)tkYUVG7>Xt_;T&FRmd2c$Iwx##&*X#i@^)Wv3v73XBY z<4<*NhIz=>XLnV>ME`CdKy}_hry8%QR_YbE_|fM$(ef>ybF@QhtKJh;+GC)Pv_Ir) zhhk;)?k|q0-EJa~VRk#~S9NLB zN7R6RkT&>z*;J8mDm^Ffl3*vD;w|eKsI~?QMmnVIYBe~Cfd<(2ErNF`XFW2#ZLVGo zW;NUc`eU);)SR#UxTBNmWl-cj&l@AXm<-lJhLJ*R2F6ux0PjKdJwV^Q2XwZ4pB4+yQOQ4=I-_o|z-FzJxt)kE)-5FuT`dJN==9 z6LeyWJbi1GZiM-;k(}I-bzT2$p{hBlo(IzEHUl}8uVUv*y?CL!*7G?m_3XiVJnzk_ zQQ%G=CO@$vNujXNwu){|Dr9J|WeDYx%DpCt%E5k$Rb|C>u3maZ3d{|S_pJ}EUuv0A zP{Tl!<`|P!R&@!k2x4>Z(D7!|eq{~XWRed7*2xrS(cc-GYt5gX(5(llVh^AjUPI zRfa0rq@lvNy3GBB?LW0+~{D z^HbaEbL-tlPM2mJ(bBMEhSpTCWWelIG3DEDx!%;#{=#-xGqD}vx;X6n0g|{#VT5c1 zV+d^B>5>2BEihzDi?Q&E>82s|xqh_3JAeQ241COif8NrKK9ps1AiwW@gD$=Sxn}!h zdjM6VWIjxmtWkQ2n3v0Rd}p6YWuNs8ZBVt6^qZ9Htd}|shta;v|U2Sl**QYHn zUgYx#{GdOO6xDd0@>9yfJmL(etgTF|OhV-THHY82T^lgAhHX91>tqzuoUNqDSWp~S7&`zH)o)?Y9w#>gA zbEY{%m-^#fi$yOe9zLmawVBt;<5I@sUy!?gHQmHxQDpYqCf9q4`!vVUH|qoIA+hq| zX@l)CtJL*pgBK*4!xSgm`k9Ze)rmHQX14ZreXVZQYK^KsI@2El(pNGc=wH0*ctV$i z+_thXt5ihAbOGLVOkYa`uP7JC5r@6xQFKwFMH9Fs(4Lj2#>u!JDztm*s2=3(NQ;s{ zh~09_8G++cmQ_6hMz|RtAz$Y+=S8GpldU_h6TnIbO`pE;88xyobCMtKwIGiz#psPra@rO^*1@gv@6l!R$dbh?9sgD*5HS4ln&ykZ zAOna`C66?)d#lR$Wp%W<)XKM$!jrnNiKX>V-UXZW_7P;m+=H2Ul6J$E&Z3h+639#) zRS`Y$p_qDISayp?4&hU}%ia3P(H~W8BT7qQiww1j3RUe6#;{1suAh88q03xL}9s_Nwt0~q|%o=tZ zu5HGsUP5jkVn{q?#12z=KK{k|mK`lSE#*sh<#R+Be56G1Lww*JJ8KFv>@!BysB~yj zhL1m{Q5ycuYo&FE*T4fcYyNenD&jCYKivC4()_qf`j6Kq^#GGtXO5#I zLPmk<#>wULm@W9`#aJ^?$v2pTZRWbCkPok(7a3*%7-2kRtj5y(WwH{i%{9X^lArOU|q%0aU zOt;Z|@mokm9tBL(>BZk5zI~ymhn7%f^c%j&)}>ExWqZ^ZW`7z zhkA5AxHJNz$!CNowM+X|jUL@mXIW))!mdn!FI7u@FS(p^k@xMw?pQb8@W3&9TB^n}H>0gj(jX=PjdGEpt%r$3 zb1d_v1ILA@>_PYjbYe#V5e$>@+6~S5V8IQDXY4Xkwynf#f4Om@#;z}|@u>s(F7kxY zcbPDZ&#l&8xErEe87x(pqt#ouY1}fpEApkVwzOfFds6JF7Kv`G+flEsyVA$4`6m;n zQLlEBYk1-O{$9nCXYbx0t!!f8ktb-#A*+)0{XoUhA0Caq9n=~Pxx54JiaAAfHI>Ro ze{tyF;Pv9}n>fJ7k=u=6udz4n87gIpD|x;bHXF{^k(5b1+LO}Dgfv8hvnEKrvLPCV zMM(9i88RvbVhz3N7}mrIQ4YUicKqZbLvI*ojzH;)UrHaEUv+Fkq2n2P9YiLkPc4ar zGVPYp`y$c((y~=PgE8j!v6ncGoqm6GJw#?e%r9aR+)9T?w#JO0jRW+r+uxTsD`9Ns z?F4M=-<)e-XK2SG4~!U!T{3#(BKd|M-+VhAuftOIiq5jqS`s#S4LEwW8A+Tqd3x3+ z#tsT?>Z_0bc-^XXCT#wANg&Y=19?Uc1?P_#7d*Hgd6Ng#WBj+(%Rdee$wqKTKfU9M z|KW@O!wPss65fJxI83rJW>Fp_dawOTpffl6rNuJ3KDr>Tfi`4dOp|^+b0f&GiP%(k zR%}}}8!4f)nUZCboxqEq1yg#BEjwXUq`#t1!qf-Xuti+QV61hLQ7Z7+tB5n&i^yad zF)W&AXLP}!zT)03P4e$F0$GXZF65QVddDBN*Z1R9Irie7;Xu}gc8Hg8bFTeIcF(!} zYNhMu7PvKJRc}O^$4b%c)=7gx?C=J_<-8x6>ZHsVf83chN1<0S(ET}|;Mc)N7L%+c zUdwwcZhEb@X@}neAl9f{RvN0k-r-J6l>HLhVJC2%Ji2B8{(6({mKxRX`7uzCrrmPHfL}e)-Od z@fwP>?<^v*&POvN1DDk+ugYcL(+RBM6i~|4B8P z_0hd!6M+?D2aKP3^mS*DU5u(e4}Vp0eoq(VOn6pnP{>gjyQygV5dB>S)DTRSpyk3x zRidJR3qR4}%TjMAvocZv7=dN^;0#Pcgj6l1E8?d=j zClIDd-x`gHdgsj)%x7ohQG@T1tN}(gF*3Tp3W=KzPFb!@3M%tXT78-A#xyk=D0_Bxe0xh=f~+ zWZ_rF($WVqr&r8Zpo@v-=O2g<1`F8?%9cZSAZ!P(!@FdIKX?Qv^m5z$vA;i2>LEAi zY#T*m#Wv9wp~H~IPgA_*sjSoF?`!278~21*edh&GvO`Ow{CO2y(H9p5H@)wGJJCvz z@%JQ4l|)a6ZA~r3yx$+z5Al{9(ZwZI)GmbYRikGbteVv>!WU6WuWA?IX&D9cKNR;H z3(CKi<9=LjHn^B8m{y0qaDS_t#&o)A3Ug6s_n}0@)g7ZhH<}h*Sp6aVBrpHHTe`!q z@*uPe+bldfs$ja_iQ_)R()_C8fGp1sr#?tCWl@@js|D?Mlj_f~Wx`zBG|vAmX@dQy zf@x$7bgxswH43|?Rbf3Uqihpi+LH{U8!`M?zB{0s{>rM^Ov`?w-s<^r_1J(b-KPu} zS7Y^AL*(~U;JYtrV+b2hr8F;u(D7vAMch>N^?^9r_>hES9kecSnz7M?en(z6WRSsF zX1gYyb~fNLse%bZD;+Y%^mjEmOXQmB-@vvG_Ft_7_bhtmi)Y_=&K1vw6!!$t4oN|Z z)EhD^PQ)LUt&W|_3bJ!g(NUUR=>t_NA#2CONnb33T1a3K3 z)}Cl~H;YS+_4a^A`4i8Mo-r9&7wsGAMjD56u&10C!BpLH)S+x(w8Y>MJwnxJRIlqF zxpnw|mIn*J-!V^b@d~pMaUr8Rsd*wPY`R(sLSOzRMEIxY>QipA9j6M%x7{3b_&H!v zR8k!&N)*mZR_`)CGA?+yo@b$l85*EQSyn`Keh(aZR@I=gtF6Ga#-P_OVH3x*oR*(0 z*1E@&LsO;tlRIWl1Qp za7PczUa&j0ch<(Y`#)%`?Ys!dGK+L34!Zg&OBO(M{pVeiN%(>6GH9rEKdfHl2Oa}- zP_$5NHZ+7e(Pf3R_$1BXS8(By^w9KQoXXO}J=6G;(jMVsl9`XU+&^;chPRQtA$j)O zY}84cuJl}sC7dA7Ua~eR#C1J}%%Ey0|MLmGHC?H<6kT;}4woW*5D8fQm8zfoc&^|j zA0gp>78{>%Q?j4fYdt`iz(K4FPI?PeGWHIF84f~7p z9&0ktkq&PcsbPs|g*t{>Bw@d^C=aOaQok1bax zCJ)cXE;YUC1utvz9Pa^DxD0hnTx*aUiUfuL@-u{Rs?dP9!+uAD?;Ia&+m}8R@$0jF zgJ$oXfZ;s;1Ye`N;G7EiFW&C%V+lmxFL&XyP^Z<0-o-w_2w@(0H=vTGo%*-;#{)iY?_GPX<1di1DJ28$ z!P}>3l|$h`PglgTv9_g^z0c&xaCDNC&yWs*IUcsxVnI4?O+MrZ(yVev1Obj5{roMks%p1( zS6NJ&YpUd-iiV?ihsYOVjRC>*$T7Mn2+rKMp?M;i>i!9o-FNv;&cD0IFRE!ynw|0C z=2<$lHrc07D?q`&I5VPrW1~52cjaMKn&`x2u8BcA{oP}HH!QbEG%#W8T=?Vrc;k?C zT3uR#T3AkXQvRp5qi6wbRK<5GqF9~P0d`;Z$KoS@ODHA;8}>=3wkx3(bFS>lfo{y}Lwsv0-d zi{gP--yfkA?VSG>Ik^)Wc>ONVYrhp~9eyV_#cU>@E!)g_PtOhi#i=4EXqFXjJsO%; zvhw}vlQvQ*sf@#eE2SqIE+r&A77-=riK*+N>~xkp1r<52IIm+&R37LAb$pnZH5grW zpp%lgTCQm)zrTH~S^P^jY=8h_t&jyY^TGA4zJE4)U`o|nxA8>nQ{&k5T0gEuH6D(ek(Vnv?$Ge14@;L)cfN+d^=hBdqlbC11k%ocD<0hwe z6Qd{wCFGwpCMIyT)Ei@Wb>n1Ex&bd_^}XRRgx1~r`FL>J@2fFBK9gnjK^Juae=ERg z)%V^6kc>(Gvk9x{WO~OT)om0ylA>&_%&gZk3a-aiFX#s9(h`>@&B>NP@487jGAHUs z+pxD-MBQfBqQ%aIvu~BX^;jw+p&)G~>*wj|UF1~neM!KQsC%tO#n;Wt<*e8Y<)80` zw%-nvsy*!V0@-$B*xm&^P#8*v|It-jjBATXoYPG6CQM2MYQh#6%7iqnW{j-Z>2Z7_a zBaB^?+%;Eip!@U8V|x{EI727AO29fSZ+E<7m(!=~2h>{7wV0xLX(6NCvsAd*R1*8E zJ)l=rzwrnZ5r(0!frHJ&du&kyLLOrSoK8M+{V9x8TDq?PbG=rA%?!*)=uGV1C=&Uj zRrNNv`@XN_pkdD;HRma|WdBE02ESBr+@z zF0$J`uYZdV=}+k{WUpGFy>x-8?JT-#v+@|*vWzLolEt_RnZEtDkNh)v#(%NmdXWF_ z;1ev;LmU4k z{aGbpE*_WC*jkdZf~P^VU`#^r_B&kv?!P#gHhoOZ=WA2BOdzeMy7S z<#}<|_lNZ%5S=e=<8&AOigdab3$K1fCW5~uzcB@&o&Q z53}S%zQ72FDoP_V- zI_}6F0Ite(n>pn%{xH_n5x{TOyo1CZB}Vyoka|>l_lj=ySr{TRrh3;4tz3PUHhEhv zdJ6h19vhvcc^I1h&0NXxCx{H5Vn%cCTz=CdmSdAv?_@Jd%~bj1SKr}q%8JnV>_#Qm zb{ism)Un2QaY?kX#a*|6<2Bnj`==yyV2}C!OK#Y$C1$QlH!Y?$Y+e<|IjrdZ zR0lLPo2$=pzQ)%z7gE~PjjuZJ8>MA5m0CvKk}Bg&IgP12O(biOIjUD)D4y~*ibO)B zW9Ai!E1Wh@(yMQ-$kRy5RXP_Yw@3Mq^HmrXGzkixDeEA1`MTD|4W`IjttC1B@utvb zD^gOo-lV1=qKYX1=+|>rmA01iayhCH{{XCCt#^7H3eka%aa{GM$aPYU%GBk)*g$=XX| zyW3qo7VwR{X1aKEyM4cN98>nPOHyAc^Zuu!$zz?%fmrS_kzTpuZ7w~mQGI$>l-)jwM_SR-`JyVmiib2lp&BfNENq>Q9CMh`<@pntI!?OiUTCAWaQTX;8>+oxVicsTwB zJ&k^Kq@Ooh+tGEYwA+|wf>wrj+&pq2!B!mzHI63=tHYG)ql$E$&)mzV`@o)RqFe-G z4l!N}`!4>_*54L94{c$rUMyZHvQ3ZYFb3gAMjrUDVVdm~io0Nn{Qm$Kg{g&`O&?cA zv@69)GUlaluBVRG!t2s)SKG<1it70SoSrL{)~_P+T<%bP&2dFJ9WKkM<{l>0Jej`s zI_H}5e;52fipMJM5kWY~>sJ0S_}V=_NmdCIo|p%m`&K`~-v)Sp#MWoT9wOBQva!T^ zab?@A8>BIrZzXA48Rorj z#C|f+v|B$e+sR1LcvN6(!Tf3QW9?DAR-o+3#oG#A4~uT+Nu?F`CCm zx7h-X{?+69)z{kcNvnP#*RC(5V*9Y&c{R&iT^>b!_FsYetbOfS@wm);#LCes$lF+f z%~xeDnwOmYX~lmU_Y`b#CuA0u{$mhpO`!7P9<>#WWHxcdTe~}9&M8!BL9$~fDX5ue zCp6urmz~^Ff@|krbKP47I|FE=Y2%7fALUX5ew8dq^AAdL!Uajf^Gyh;br?JSjV+uz zfFq?g;!ig!ypvb#?V1=yX}9j!5a~Wt7^>UOY4;OjAXG@-FEyHrMU9eet<4e1x-vS` z9Q?=9lWr0peNF}3q$+l`q!c8zixTaTNBk?9(%3_{am8!i zYDxBT0qw^=c6qIfeM>9-X-i`%8D1=1%E#KggH=M8 zPBWfs)^+>EnOmP~^8IcwZy2e_uDS}uOH^lD!qdp8qa}_7c6vsI7MiI%$1?TWYa_!} z{$$17M%czj9qVG|@ug#))zMm3RvkF?E#7J@+1rI4)tc9mGj+{C(^FNs&3X2@vTVN} zsi`+$@OxEAZUeAh^*POFEeE)n>p){hI#L>x1GjCcE-p6rrcEVLmO{q2j7hkXNLjK? zRgmSn)7g$H9&55X7&mcOIpVEZM$=3H=Bgi*R`!EtN6x~!@bvlRc5-4W`#7DCh5SK# z;k$?~?5_R=mZhj$^KPNNtC#Ud?G>zQFcEA-ON=nib6yXp-ZX4bo+|9JV%Am% zjMp@BTqHG7IwhOpB&^x7u4~h46A5l4CEK5+WB&kZPh%l5%D`h7+;LD|N;byX{u-k! zo@mZ%(}o(1S1MW_T=5lWE4E9hc!KJF9m!uz);-1Zl|1uLxZzJZtmzwed95Q(ojbUq zB~o)%lPg_ZG>f#3D!yn23 zpy~FS4~8Mapr>k4`F5%?&T8eXVL%<~uBgR`UV^!CQcdv?1DMx$^^_Yf|b@ zEl+CC(`6C-r>~_yQlAwPP0R{YTxIc!d~6wNfo;rccv$1ox*8_r?SsIk&O*iMOn;X= z)DuaSs)$MGK-83^(p8+nMHEmW)U=ee6g9fiQ=6?en3z#T6og3&ZIMsCcN#9Fsuu>h zML4V0w~Yo0wks;M{h~T)Nk>L{y}kHk2Ozaab*L4L<%a_`#M$di6lL&!w3nK(Xy4~O zJ!^;ANgHz`cxwnGKQ&Djuzhiw;Qrn7XC|jzSdRYyt!SvRHgr1Xt+hxET$HmJi9PB| ztIxD&Cp8VcessuzzVQOLjCoe1-0O5GRy{Fy02qw@Y8!?LoN{XA)Mn2y4u}Bj?O68{ zcCINtX&dZ_%M#qu@A+y5dU7`EyAVqphBgMZVRI%9DB7;tC1dSNXykJ#bNq0)bbgqIMaIo%dbgWCn-0faIVLf9L z*}%nF$jkEkQ?{NeMo~`1c^5rVvBF!eN#qkxjZfiIkCvmDEJCmWjEZwYoyQcdAgXtF zJ4Xi+^ zdbeUDu-U#`RY?s^aUm^=fb&Yg*vcv9Kf;@|(=rl@D4;{BX(?zZovdybtP(HPO-9kO zedS^JSJLx*JluycXZ_(^Z;8Gh;yuDJhuBs;?C9h?S*b8hY-?H^nsly78v(4Kq*lf* z4&-#JPuaVgHkt&Y?mjmw%{a(gtu<7V0ILaV?b?=%=uu^M`ce$`sm;6Y`?S*wFHuOY zRz-tW^tktG7XI=9`qUeJs@|9X03i#0Ka77mnQS#3PQP9rUZE=e4RhBA=B~5FmhBzl zu^)K>{VSQ4-@Yr(j^^^%rty-wsbY(2^HP-{50;UBYSFVRilk7SwsTA6_oY>=?lM)7 z?&(tR-qjMVKLi@hltVbd>&dG4YP}>ymp$sdidR0BqIwcQ!xdWI)vxsk;~#V!nwnAm{fnJUhpEQmvRZd77OCOei0gMb&qiv>~X}fFalCnNtPVVHscfAH( z-Rc6SlXlTeMBjMQ?c3g?j!J&|dXmE))U6t~QAwIm+=>K3)~dYXo8_qLxvAb&IL#-R zn8hNB?1;a0MM78R>58ypUU5;B+}$dNWZ(P^=eL-#a3a1 zLJ1+bq&!opt=gHGw6vKNWKaj*qH9~on1Vi9<~7|WJJ`x9(M544?s`@{o5bG?=f2;L z0Xfbqn~&h#)RFy;9 zLdZ!Xv%ldMn$&*rF+~-fO4D6UBNQ>TMe0_n%b}@1bBZXb_S5cb7$Uo9sHc_vDt$ji zU$#QZIxwP&zim4QB0E1AY3$b4k`?D0Nv={$Uu@AuX9|)soKVKKQoS&02C?HKiYTqQ zn>pN38$eg6sMvZ@MN~-~l`^dqQjibZRVQAwQCClJGjD!an2xMHTPh zR;=)`?_*3T>S=a{;BiG2?4BSr9+h3AG*LuAMvntE8O~^;tbxIar*8RZqO$@vK4%oG zZhG#biV9wIk)j8V^drpPDGIip75&pJRv8 zk}Ew#b43*qHe_t2nvs{~qKerWl<#E3comf)%@kL3R)!NIr^Yu?MNx!zHm%dkfOg`u z?hg0jiYq0#)b=K|x7+g(Rvk-oMHQ27WOP)r9Nv{72bnQTQ(YACG?7?e>(1fT!e DmU=Dc literal 0 HcmV?d00001 diff --git a/tests/remote_function_test/requirements.txt b/tests/remote_function_test/requirements.txt deleted file mode 100644 index 60864807..00000000 --- a/tests/remote_function_test/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -opencv-python==4.5.5.64 -flask==3.0.2 -numpy==1.26.4 -sk-video==1.1.10 -imutils==0.5.4 \ No newline at end of file diff --git a/tests/remote_function_test/udf_server.py b/tests/remote_function_test/udf_server.py index a476557f..7cb9526d 100644 --- a/tests/remote_function_test/udf_server.py +++ b/tests/remote_function_test/udf_server.py @@ -9,6 +9,7 @@ import skvideo.io import imutils import uuid +from zipfile import ZipFile for entry in os.scandir("functions"): if entry.is_file(): @@ -45,10 +46,20 @@ def image_api(): image_data.save(tmpfile) + r_img, r_meta = "", "" + udf = globals()[json_data["id"]] - r_img = udf.run(tmpfile, format, json_data) + if "ingestion" in json_data: + r_img, r_meta = udf.run(tmpfile, format, json_data) + else: + r_img = udf.run(tmpfile, format, json_data) return_string = cv2.imencode("." + str(format), r_img)[1].tostring() + + if r_meta != "": + return_string += ":metadata:".encode("utf-8") + return_string += r_meta.encode("utf-8") + os.remove(tmpfile) return return_string @@ -57,13 +68,25 @@ def image_api(): def video_api(): json_data = json.loads(request.form["jsonData"]) video_data = request.files["videoData"] - format = json_data["format"] + format = json_data["format"] if "format" in json_data else "mp4" tmpfile = "tmpfile" + uuid.uuid1().hex + "." + str(format) video_data.save(tmpfile) + video_file, metadata_file = "", "" + udf = globals()[json_data["id"]] - response_file = udf.run(tmpfile, format, json_data) + if "ingestion" in json_data: + video_file, metadata_file = udf.run(tmpfile, format, json_data) + else: + video_file = udf.run(tmpfile, format, json_data) + + response_file = "tmpfile" + uuid.uuid1().hex + ".zip" + + with ZipFile(response_file, "w") as zip_object: + zip_object.write(video_file) + if metadata_file != "": + zip_object.write(metadata_file) os.remove(tmpfile) @@ -71,8 +94,10 @@ def video_api(): def remove_tempfile(response): try: os.remove(response_file) + os.remove(video_file) + os.remove(metadata_file) except Exception as e: - print("File cannot be deleted or not present") + print("Some files cannot be deleted or are not present") return response try: diff --git a/tests/run_tests.sh b/tests/run_tests.sh index 8d6fb919..dbd2120c 100755 --- a/tests/run_tests.sh +++ b/tests/run_tests.sh @@ -65,14 +65,18 @@ function execute_commands() { # Start remote server for test cd remote_function_test - python3 -m pip install -r requirements.txt + python3 -m pip install -r ../../remote_function/requirements.txt python3 udf_server.py 5010 > ../tests_remote_screen.log 2> ../tests_remote_log.log & # Start UDF message queue for test cd ../udf_test - python3 -m pip install -r requirements.txt + python3 -m pip install -r ../../user_defined_operations/requirements.txt python3 udf_local.py > ../tests_udf_screen.log 2> ../tests_udf_log.log & + # Run the prep for the TLS tests to generate certificates + cd ../tls_test + python3 prep_certs.py > ../tests_tls_prep_screen.log 2> ../tests_tls_prep_log.log & + cd .. # Start server for client test @@ -85,6 +89,7 @@ function execute_commands() { echo 'not the vdms application - this file is needed for shared key' > vdms sleep 3 # Wait for VMDS server to be initialized + echo 'Running C++ tests...' ./../build/tests/unit_tests \ --gtest_filter=$test_filter \ diff --git a/tests/test_images/metadata_image.jpg b/tests/test_images/metadata_image.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1c5f91e94dd9b6911d3aaf3c271a3ee7eaec0a52 GIT binary patch literal 20678 zcmbSxbyOT*w-2Hk*cDKA`UJtE{@mT zhVyp@=M@g#y?g)cckTY&j{g83|Neb^B0@rf2P8x!B*a9-#H3`Dk4VWV$cTv_J$Xd& zmNJUCaO7;KT{{FOF=^@b*Y8qO)=jqnovWL>ho_gfPtfP!kkGJjL|lAAVp8(g6l8Wz zZeD&tVNr2KWfi8nrnauWwXMCQv#YzOcXVug0y{bNYkFyU<@f5^`o`wg;nDHQ>Dl?k z<)43W;o{)^Pgwtj?7!inxWjet{(Zdrg#X~ez2|vncog^Xp9v69%4rdr!XL8;20oyY zkIgD?dB`fHeE=|X86kSYCcO0g@E>UZf$aYd*r)$5Wd8;1f8mwkso5MZFy; zstO%ZpHs9`uM4$GP4Bvgv>S-3OL{|blDoiN#|Ifk4z{dGksvqUR~j0^1H&Jz<&RY@ zumH?nD5xGZcG-PeP(9kx8^;O=rLs2rsq5BqWQg6svSZ{jl-ugVR-`QJCvHSr_g$ae zW2ZQoZLMpn(9{7x4vMC=aG??bD$aK*C(!MVwKl80zmB+B(N;!^AVgnWfd^&wyZ||< zf$#b&Ib+sNNCocZQ@zX2yx$qH)eU7L-xgRNB)EY~sr#dnwjuO;{k@S5sbT}H#YZ%c z{1j~EgQA5XS-R7&UYGhOqN`V_xrTUr<3on9$O>=uc*8Nhx}|ec+87pbY)$hTXU)(Hsz= z<5 zu@VZ*!#6bzL$nZR4HV_;gAJ#!2W_u`Rqg4kmHtrqg2<_&LUP()-7JiAGC-ijqo80q1|S7et)dU+`s8 zG{4z!W+RA*xa8xtXQq3cTfi#9MkOM0X~Fe`eP4HbT*KlpQT zt}TtH*br|h8^y)j91j@A``JuYYov6Cb{w|j==F2*Vq~H*ol9D8!9Gq|j7qs=@~X5e zGTSY#Mpf@+PudNzcw`s4{63bUd zzwDI~QB$Y{Q>Iha#$iK1R|_pi6ZLei^Xp@w4|A38B)Ov+m=Fj|FY{D&((d|P^-cip zroO~NnCLiZ0UH__IN;z zYDOPVhgU8Cv^>knc@__nq`p014J@E3q3D;STS?F~ORLjcO^_9Z8W^q$ch8b(3Rf!A zIs@tL>{s3E@J{lz}FMehA~iyTqFQ6buQ zCU-TqinYyso#mKlny$T~`=K8^d(AiPk`v4dt04`GpP;)2|aqGverV1uniUZa9c77jvH zC^oxv>uC9Y@WEPl97s=pzb#r#1)PX{%vRG!fe{ANomo=>6kVz-U6=>@8`))hcJJma z8V5L%AKbEDD-A-KMGjvif`w0J=QYKH&;K?GSlM->j}3zXeRVUs`)o6t{yCR3P#y zG^ox<#KiMYtGM%FuFSqUs4DycTe)~qZ7d1C=q~;pquqp3v5BazQM7IWIeDVALl*kJ z&{cKT>yK(TKgwJdwP#!eP2Cd zf@OYPYxZTjEHJN*4x95$t~VvE>s_Zxt~b()4M~eyzwjNaD(MzBmQro8tcPPB{l$@} zv4yeca7>qd6cnZR#WfLa9V}t+wSD|<77#-D7@O66aGF=@T`!qA*5vUg{RFDZf98y0 zfAJD6Hp$QD^!2=F1uxG{CxuT$0K;EynDpR>D8_77a@M*}Lnblrd0|wN|G;`prx zg)l}#Ejs9W zp_B9?Cz=~<8m*}s`b;18+TtmogYso@lXxyHCN+ubmle(L@se3uJVj}tx3azP(OhrV znYgY4kRt4hVURIJc&bI|#A9XGx2e%&cC5}q#uWMLJr?mSMJgQq3Ow=gqgfoFrZ@o? zPo8OX%2R?W^GNyJ~nvvjYn+SoMB|Y6wcEx^MDKuQAmgPU>_40COX~=A5rC~VoWAP}Fv~*iK z7S+(YqT&W?_Bh_@uEz>gkj4+3cYkP{e>YAloCKXHg21UKRVRL+Q#DMOb5N4ZgrRzQ z{M&|Tnn_oIO_NmfrI8$Haq#qS%P1E!AUCXkE04^y`cT1~sEzJp$uxR;PU)x3B-7V{ zw<({^Gt|`_^CmeE2dmy&gEuS=U4Vq21l*yU2tHwPI;o$pBQ{9QMH38}tVs21u=!(Q zQ#07|Rhagdwb9-zN_HU2u@XH^VB!g2T7L^W{V6y_r2N+XNl|u>V}|`JH-}k|++tOp z>MALicFl`f0X$+|8po%QT$l;-mBsx)Oz1nG>Q?!M^l1omMrOddfM^dVB5+wYXUChSid#B;gXYw_s)fa^ED>UHC6aeF&q zyo$svt4Noi$3957ho|pOGzN1BzSurN=@~jO zJ^juim2*)(;Hy=flP$(pHfS=|+UG>$`%zn?L0xc*ElOF+M=yPMJX3hI+*7)Ns54m; zd3Y`yN%#%Ls*|#%fDjaPbW+U&jdG`#3PF>hKK#8x$i7q!qu+SQozFQe_Wfs#?|*H- z?Qt6zpXeM*2|OB)KPfcS_~0U7td>aI%Tw$~%r`r3KV#RsiO%3+ui+M$uTpnoO~@bB z5!KC~MVia&)kH-iE!-MxUlxI7x!WXj6Q^8;*GVps$TY zNO2LYcGO@!$jD9jIJQHRnOHZ+?aD0*{Ua5qZU0t;*p~Rq4ZhO#nM*W2!TxAIL zlp{ubEe~26>1U@f@EY;@q;1evFUNx=j@KTpdl#gwJO5XJ)?WTdi7+w?{)P{Qa(=6l z>@y#>$_wc<41ExhB+YGT_~Yqnyr&~fO|$U|{982)P@}71RJ>Lm^=rmU_Eq(;>Em9! zdC@~3yy<%Sk{Aq`uCLkmX=^JSej))i#_hcx21wteYuZfRrXm-MH}6}O+DPQ5jm8vR zNdigTT{pH}t;@m!_6lvAd~=n8?x7aX(QFhj&V~7X8)SpvD#;M zm8xt+H8CP-nb1Adhd1_EG5cHFnf;!d>7x{oR_;u$O@RL#zK03vuevvbc;qSJr|85I zf?t~obBzAQ!}^>2gE9!%Q6Fmt=8au*JfDAFzZDQty!Glt}ovl<2w% zt@QFx6AQ!RKS3*9Nmr*bu}0n_%+ocNFxHmWcOavQ#th>!ds;O&IFiVLYc7xt4=o%E&3eWZ6xf`@bNcRxPm8+$?o(v<;>IJ zXiuiI(t~g17*eAN&gx>3dr*|~iGP{4xL2AkR~yn~H?|?H&W?->jz?K)G2SZbfu z2@lAVS1Hx`AhaAL1yuM#gh0Y{u6)>*EiREss5sI*uz**FMvdgGNa{P6B)#Ht+p__i zm#zja{&o$xn?}dsn{N{*QasYCCi2QeJjpl!%wPSuR62y5PxHS}mKCvY2`U6?4#kk*i76Z2%vltXIP@J{@m zS8g{kC6fNwbpldrr=My66T#IIh9h09&OMj&`SG0%NQHbzv*J!rY zw5^_e0jR}vPh8D$52!d-@2a~p$4UlNoyRJg1|mY9ST!e0bY9wR**nGe-A^O>J*iP= z$pmG%;X1%f#Qa&EGm4KfSI()4!%pgi!V2xrOCJh96qbyU!72)@OKOT0OxtmSRiEhX z0j=oUk+QsICk9_c91^MI((#73bGx#Vl}$j}MBa!9(65HgrvcDRozoWX3Oq8!vPAgkHeuJ+bCWj8L6*gWrc&<3QG`4hH_T6%edk0zk9Y}lW z``*>W!F{>tls177NF#Lv)EF{RhQw$39LYMrq& zUF|8d(B;z@1|t&z;{`rHyQL6Ua~CDFvDfIdg=KK%kkKS*8BEqDHH0H$VtseH$6PyN zGEZhm8)qQXB_8n6a>Ydos<!u;5Nl!xvh}n%g;6r`u48e>dEYc58XhuP9xp8T$P@XCT(m&EmP2u_@A1m``4vdN_nEKU@!;^%Ag%J!F_6s5vf z8r=!yvT{Zj5mzGaI8@dszbYwuFRIIy+Pv2-9Dm1B>!dS2pA>N8x!0f&#rnW`&A z4^Q}bnYyyC=|{{WRb2XKt@puEYHcRWqyGA><`XfX)|z&YD~U|;L8UpuSfjbWI0!zB zTAXJl|F+SG%B=w%L#7~ac)rq+5|3HkoZj-Z1$SwwU$N%>3zqGUnF7Ba%I`R_=OvSF z3&B3_jiwfX^wBHGRxnt4NIrv38zO~E{-q6m4a*wcXQ_=L3h)B{;%k0lb55jd$bC2t z5h(gVBhrxjJ@dY66_&5j!sZ}O%&;Ueo_j@X*F0x2<7DZ!3H`V| zVA{q&^$@w1vOMmPTb)Ka#Vv<^_0TX0%H-f9h?qLQVUc5i*PfJ2#@$prk=FnUE>n;0 zAtx>b)(OjL3zS~iTQ*FViyg~YICq@z&+OK;Wgxg^F`&LmlHDNy@ny~EgC%XHZ-5D# zTbyxzRz>}=A;5qh{L^_#sco<3YfHoUDh6;k)GCI8Rp1OQKju%}eJ|x}B$-?pg89LW zL2EIE3SJqoKZ%`d!$jpbL@Gp)$u{GN{FQ@ril<2Tjj_YZTFO ztZ_5S>iPX>SWUItm9$B`Wc2B+v zDr*ktQKP_7)!$1D=w0WtmElxlV*8rA;NNWLg$hFHcMdit^X6X;_OG66CV31hBp`*k@BtuqQo=H9Ca|O4>r#voEZb z?!NVxbMZ+?Jxiw6x|vCynU_6fXSAkcADCedw1j5q^Aqhx-Q~RPErNX*xsxH#@&_=z zBvXf>q=+|Ow@2U*D32t>wl%238af2p_5_XC{_zg>lvo^}h*Rx^%PZHu^Ktw-01t4o zlQ}smH9r)tNhT8IPMa7C9?nM{LfSlBxV+|f2(%XN)#Wf*{2I|*QTpXidXU~6n>*8S zU+!LK9nBWp__c(I{wnNS<(T#(R`8NnNmPDf=g7Nf;mOTIm>m4>Xgzga>I(&Msv5(g zanR;%$$(C6$1H8Jhj@yamsTX_ZfcC2*_-R4>iWKqIG$$*gW{!WOqcMp#UBcWp}uFb2rAvQ5I4a?>0H6-E;lSQgoE!u3rl85v# zxGw)!Weslnt9LwgP%v=ADTXeObvJ5?E@TImg2oq70j3vTevkbW#F&v+dh4?itz$r})=auqnu zH6Vj$a0$K1!sB@;0j?cfEnS@Onb%CFDY8_kkZ;%JJ=QJtnFx5H+g?N?tfk;X*!SNn zTVyk8%IciovOMWBu;vWiedmG8CAvu$$6qp`>c;w`f|b(RSgV*eF8b=t2lzu*U4L;< z`owM=!P!Ft!UOEmq4zRxzOh#FHUI^Zjv*QLo{jX11S zf2QP~J3V|Ppc6gOzj4cA7$z@3^i!`i-g~4>lb~^WJk7Gm*;C;mn~b5j6mr)e=U3Ga z>P8?dSB_r?w9=#8w zI0INMT(GXg#@Gvi1+f@Qyek{!4xAM3(Y;;M?>Pn3TU0(dtnny~K&73PMIV;3|3tVe zN`H-pWi*%DG{0|2mTh%4{xZHE8&u4_3+mb$N!E!q_B*Pyjs||6c*Y8s2G$d@MN{j> zkW7SgKQb$!kD+4pY1NCQf};t8W}Df&ZM_Z(5ahlESs)Ih8gKcvmmmxwbfw<%}vMJ)+`; z{;}@#8{N^^)zjiqJ8gO^V=67WHU_c*>!|I1O#@HdP*3tPn*6=b$2F$Bjyg8_LBn` z?EUq`%B0j~Y~~oLEFH)y))w5#$pWK+^ZtaJ=C_nXq#tKI=DRTE!zXCY3E?37q*1`) zch<|v@7K(Y`k(=_cbDsaWC7IonssRTJ-TXwYb5M%2!Nk__1u?3w3VQfwZ;|oR}-{Z zgRtc-UV}6E)LPlAN{Z3#(tD+DQC9*o4Urt!k$(@${acK}|gG)4+ z#+d6z&Yq&u4fl~9@%xZ+^3N+@ez7f=^Re4F$)VDl>>{!AANu`D>LZ=%I&{ocN!9v4 zlg<@KEk6{5u^8r8enHz+l3pZwOJSqa(?`~olbDlNL3~0Ta%-*~Dh(c7lCz(HU;T|H zWs8&|C)GG_7NG0y1ar@NE@2fBo-B$~A4hXT4@)3#i%HO`c_zrI_Afp43AzSrnYGI) zaErilwfdC$s0o72n7n+e$Pu&4uYWvcFpQGyfwR6U9WRhH*HyhdqU_cq3V>m+|Dx0X zNvT~E3eI4bplmmV2tu zrRc|6sPd#}iI-)Uz~Cc(`7Ywuf&2NHTeTop=S(flEhU~&OweA(Wb4=OSM&3WkB2m- z3i6H(Up*$^28{g0VJB$j2N0|0%(Q9h`lEULeM1>143BXp0Y;L>Y+x}D$`I&ymA&Er{7~0$ zm0FvXd6{jk%8Sp~%6d1gS|;W&boY6_w-9?QZll|JP~_zEgC8sTs1<%%C3AOs?16aa z!lr&R=}r1+PuUYQ?RiQ$Qw~DFt_=BwQo)7U#X=m~nH&!f(VLy8A!3-Sz3!w_?aMf7 zs-fUj(CHLXpBe5RA#T57%d%>$dw7cWtFg4Pws#=}giNE8fkEAaz-Xb>@g2LZuFhuR z3;Gi1>p1&_;H{Djb#H9Z(vL@m-7Htnr*mI@!CizboI_&!upslvSmXdwU&cvr^r771 zo}5XdQ~-Mv(}(Y6#+pOozYpCTdA=yQCGBWvj2XmwWp*OX9jQIFo^CdP{yc{lY))!@ z_LF%O^yMB=PyK;)=sqhKUoswiPQgd_*e8JGTeQLZf|1Mz&<{3;dZ{l+{aDkE=s}w% z6gSyrW$eoCG;NMgt(tzATo5ji!r*?O_X#Ing90XMEBwFw)p<>}l{0;!Jw%~f`i+d1 zFoQJhJdt{#cdM^_WiWmo{elrt=L*b(D|ZxZGd8A9o?k_K@=i#1k8 zgT+a*N@(Q;Ji{^Kbsv+r zi@mylo-}dqXS9^{bmcj?N!65Xhx!{xwIQcGA$Ul5%TlH^fqQh!wl#M`z-Hh%`xarp zw7R6bA}WdM4{>A}P?V}4skf<~XIia&ms6N`-YFbD{)agWPjlj`oaFvz4$qmLOod~X zVIzg2x8Z%pIbnHqfl*ZZ%3jd**kSz$%}gDt{bA$ed&T&T$VKeo`wPgK(?PbOa+~%z3buQf?mS;4;k+1gL^ z&BUSzaL|K?AioG(N526&|Ir7>(!`6JpRYyPUfT}O0T`%h*Rc)0exmB-k-Vrh=Nl@x z9!CmT>7@*YaGE0QH{GEnG&iGkFA6|uK#Pb;WhrUu(WpW>H+;{@)2(&51aaS}pDA`k z-rDojZ2?1nG*irL)ZKD#3D=Pwn^-RcR|vz(%y@&h@NMav9@=%!UA@G4f0~s$iwVa+ zTV?pBnW*uYJ6lI9Ctkat_c=6OmH?vWK zE`BZCTO2UIV&%=R-89kM)tkYUVG7>Xt_;T&FRmd2c$Iwx##&*X#i@^)Wv3v73XBY z<4<*NhIz=>XLnV>ME`CdKy}_hry8%QR_YbE_|fM$(ef>ybF@QhtKJh;+GC)Pv_Ir) zhhk;)?k|q0-EJa~VRk#~S9NLB zN7R6RkT&>z*;J8mDm^Ffl3*vD;w|eKsI~?QMmnVIYBe~Cfd<(2ErNF`XFW2#ZLVGo zW;NUc`eU);)SR#UxTBNmWl-cj&l@AXm<-lJhLJ*R2F6ux0PjKdJwV^Q2XwZ4pB4+yQOQ4=I-_o|z-FzJxt)kE)-5FuT`dJN==9 z6LeyWJbi1GZiM-;k(}I-bzT2$p{hBlo(IzEHUl}8uVUv*y?CL!*7G?m_3XiVJnzk_ zQQ%G=CO@$vNujXNwu){|Dr9J|WeDYx%DpCt%E5k$Rb|C>u3maZ3d{|S_pJ}EUuv0A zP{Tl!<`|P!R&@!k2x4>Z(D7!|eq{~XWRed7*2xrS(cc-GYt5gX(5(llVh^AjUPI zRfa0rq@lvNy3GBB?LW0+~{D z^HbaEbL-tlPM2mJ(bBMEhSpTCWWelIG3DEDx!%;#{=#-xGqD}vx;X6n0g|{#VT5c1 zV+d^B>5>2BEihzDi?Q&E>82s|xqh_3JAeQ241COif8NrKK9ps1AiwW@gD$=Sxn}!h zdjM6VWIjxmtWkQ2n3v0Rd}p6YWuNs8ZBVt6^qZ9Htd}|shta;v|U2Sl**QYHn zUgYx#{GdOO6xDd0@>9yfJmL(etgTF|OhV-THHY82T^lgAhHX91>tqzuoUNqDSWp~S7&`zH)o)?Y9w#>gA zbEY{%m-^#fi$yOe9zLmawVBt;<5I@sUy!?gHQmHxQDpYqCf9q4`!vVUH|qoIA+hq| zX@l)CtJL*pgBK*4!xSgm`k9Ze)rmHQX14ZreXVZQYK^KsI@2El(pNGc=wH0*ctV$i z+_thXt5ihAbOGLVOkYa`uP7JC5r@6xQFKwFMH9Fs(4Lj2#>u!JDztm*s2=3(NQ;s{ zh~09_8G++cmQ_6hMz|RtAz$Y+=S8GpldU_h6TnIbO`pE;88xyobCMtKwIGiz#psPra@rO^*1@gv@6l!R$dbh?9sgD*5HS4ln&ykZ zAOna`C66?)d#lR$Wp%W<)XKM$!jrnNiKX>V-UXZW_7P;m+=H2Ul6J$E&Z3h+639#) zRS`Y$p_qDISayp?4&hU}%ia3P(H~W8BT7qQiww1j3RUe6#;{1suAh88q03xL}9s_Nwt0~q|%o=tZ zu5HGsUP5jkVn{q?#12z=KK{k|mK`lSE#*sh<#R+Be56G1Lww*JJ8KFv>@!BysB~yj zhL1m{Q5ycuYo&FE*T4fcYyNenD&jCYKivC4()_qf`j6Kq^#GGtXO5#I zLPmk<#>wULm@W9`#aJ^?$v2pTZRWbCkPok(7a3*%7-2kRtj5y(WwH{i%{9X^lArOU|q%0aU zOt;Z|@mokm9tBL(>BZk5zI~ymhn7%f^c%j&)}>ExWqZ^ZW`7z zhkA5AxHJNz$!CNowM+X|jUL@mXIW))!mdn!FI7u@FS(p^k@xMw?pQb8@W3&9TB^n}H>0gj(jX=PjdGEpt%r$3 zb1d_v1ILA@>_PYjbYe#V5e$>@+6~S5V8IQDXY4Xkwynf#f4Om@#;z}|@u>s(F7kxY zcbPDZ&#l&8xErEe87x(pqt#ouY1}fpEApkVwzOfFds6JF7Kv`G+flEsyVA$4`6m;n zQLlEBYk1-O{$9nCXYbx0t!!f8ktb-#A*+)0{XoUhA0Caq9n=~Pxx54JiaAAfHI>Ro ze{tyF;Pv9}n>fJ7k=u=6udz4n87gIpD|x;bHXF{^k(5b1+LO}Dgfv8hvnEKrvLPCV zMM(9i88RvbVhz3N7}mrIQ4YUicKqZbLvI*ojzH;)UrHaEUv+Fkq2n2P9YiLkPc4ar zGVPYp`y$c((y~=PgE8j!v6ncGoqm6GJw#?e%r9aR+)9T?w#JO0jRW+r+uxTsD`9Ns z?F4M=-<)e-XK2SG4~!U!T{3#(BKd|M-+VhAuftOIiq5jqS`s#S4LEwW8A+Tqd3x3+ z#tsT?>Z_0bc-^XXCT#wANg&Y=19?Uc1?P_#7d*Hgd6Ng#WBj+(%Rdee$wqKTKfU9M z|KW@O!wPss65fJxI83rJW>Fp_dawOTpffl6rNuJ3KDr>Tfi`4dOp|^+b0f&GiP%(k zR%}}}8!4f)nUZCboxqEq1yg#BEjwXUq`#t1!qf-Xuti+QV61hLQ7Z7+tB5n&i^yad zF)W&AXLP}!zT)03P4e$F0$GXZF65QVddDBN*Z1R9Irie7;Xu}gc8Hg8bFTeIcF(!} zYNhMu7PvKJRc}O^$4b%c)=7gx?C=J_<-8x6>ZHsVf83chN1<0S(ET}|;Mc)N7L%+c zUdwwcZhEb@X@}neAl9f{RvN0k-r-J6l>HLhVJC2%Ji2B8{(6({mKxRX`7uzCrrmPHfL}e)-Od z@fwP>?<^v*&POvN1DDk+ugYcL(+RBM6i~|4B8P z_0hd!6M+?D2aKP3^mS*DU5u(e4}Vp0eoq(VOn6pnP{>gjyQygV5dB>S)DTRSpyk3x zRidJR3qR4}%TjMAvocZv7=dN^;0#Pcgj6l1E8?d=j zClIDd-x`gHdgsj)%x7ohQG@T1tN}(gF*3Tp3W=KzPFb!@3M%tXT78-A#xyk=D0_Bxe0xh=f~+ zWZ_rF($WVqr&r8Zpo@v-=O2g<1`F8?%9cZSAZ!P(!@FdIKX?Qv^m5z$vA;i2>LEAi zY#T*m#Wv9wp~H~IPgA_*sjSoF?`!278~21*edh&GvO`Ow{CO2y(H9p5H@)wGJJCvz z@%JQ4l|)a6ZA~r3yx$+z5Al{9(ZwZI)GmbYRikGbteVv>!WU6WuWA?IX&D9cKNR;H z3(CKi<9=LjHn^B8m{y0qaDS_t#&o)A3Ug6s_n}0@)g7ZhH<}h*Sp6aVBrpHHTe`!q z@*uPe+bldfs$ja_iQ_)R()_C8fGp1sr#?tCWl@@js|D?Mlj_f~Wx`zBG|vAmX@dQy zf@x$7bgxswH43|?Rbf3Uqihpi+LH{U8!`M?zB{0s{>rM^Ov`?w-s<^r_1J(b-KPu} zS7Y^AL*(~U;JYtrV+b2hr8F;u(D7vAMch>N^?^9r_>hES9kecSnz7M?en(z6WRSsF zX1gYyb~fNLse%bZD;+Y%^mjEmOXQmB-@vvG_Ft_7_bhtmi)Y_=&K1vw6!!$t4oN|Z z)EhD^PQ)LUt&W|_3bJ!g(NUUR=>t_NA#2CONnb33T1a3K3 z)}Cl~H;YS+_4a^A`4i8Mo-r9&7wsGAMjD56u&10C!BpLH)S+x(w8Y>MJwnxJRIlqF zxpnw|mIn*J-!V^b@d~pMaUr8Rsd*wPY`R(sLSOzRMEIxY>QipA9j6M%x7{3b_&H!v zR8k!&N)*mZR_`)CGA?+yo@b$l85*EQSyn`Keh(aZR@I=gtF6Ga#-P_OVH3x*oR*(0 z*1E@&LsO;tlRIWl1Qp za7PczUa&j0ch<(Y`#)%`?Ys!dGK+L34!Zg&OBO(M{pVeiN%(>6GH9rEKdfHl2Oa}- zP_$5NHZ+7e(Pf3R_$1BXS8(By^w9KQoXXO}J=6G;(jMVsl9`XU+&^;chPRQtA$j)O zY}84cuJl}sC7dA7Ua~eR#C1J}%%Ey0|MLmGHC?H<6kT;}4woW*5D8fQm8zfoc&^|j zA0gp>78{>%Q?j4fYdt`iz(K4FPI?PeGWHIF84f~7p z9&0ktkq&PcsbPs|g*t{>Bw@d^C=aOaQok1bax zCJ)cXE;YUC1utvz9Pa^DxD0hnTx*aUiUfuL@-u{Rs?dP9!+uAD?;Ia&+m}8R@$0jF zgJ$oXfZ;s;1Ye`N;G7EiFW&C%V+lmxFL&XyP^Z<0-o-w_2w@(0H=vTGo%*-;#{)iY?_GPX<1di1DJ28$ z!P}>3l|$h`PglgTv9_g^z0c&xaCDNC&yWs*IUcsxVnI4?O+MrZ(yVev1Obj5{roMks%p1( zS6NJ&YpUd-iiV?ihsYOVjRC>*$T7Mn2+rKMp?M;i>i!9o-FNv;&cD0IFRE!ynw|0C z=2<$lHrc07D?q`&I5VPrW1~52cjaMKn&`x2u8BcA{oP}HH!QbEG%#W8T=?Vrc;k?C zT3uR#T3AkXQvRp5qi6wbRK<5GqF9~P0d`;Z$KoS@ODHA;8}>=3wkx3(bFS>lfo{y}Lwsv0-d zi{gP--yfkA?VSG>Ik^)Wc>ONVYrhp~9eyV_#cU>@E!)g_PtOhi#i=4EXqFXjJsO%; zvhw}vlQvQ*sf@#eE2SqIE+r&A77-=riK*+N>~xkp1r<52IIm+&R37LAb$pnZH5grW zpp%lgTCQm)zrTH~S^P^jY=8h_t&jyY^TGA4zJE4)U`o|nxA8>nQ{&k5T0gEuH6D(ek(Vnv?$Ge14@;L)cfN+d^=hBdqlbC11k%ocD<0hwe z6Qd{wCFGwpCMIyT)Ei@Wb>n1Ex&bd_^}XRRgx1~r`FL>J@2fFBK9gnjK^Juae=ERg z)%V^6kc>(Gvk9x{WO~OT)om0ylA>&_%&gZk3a-aiFX#s9(h`>@&B>NP@487jGAHUs z+pxD-MBQfBqQ%aIvu~BX^;jw+p&)G~>*wj|UF1~neM!KQsC%tO#n;Wt<*e8Y<)80` zw%-nvsy*!V0@-$B*xm&^P#8*v|It-jjBATXoYPG6CQM2MYQh#6%7iqnW{j-Z>2Z7_a zBaB^?+%;Eip!@U8V|x{EI727AO29fSZ+E<7m(!=~2h>{7wV0xLX(6NCvsAd*R1*8E zJ)l=rzwrnZ5r(0!frHJ&du&kyLLOrSoK8M+{V9x8TDq?PbG=rA%?!*)=uGV1C=&Uj zRrNNv`@XN_pkdD;HRma|WdBE02ESBr+@z zF0$J`uYZdV=}+k{WUpGFy>x-8?JT-#v+@|*vWzLolEt_RnZEtDkNh)v#(%NmdXWF_ z;1ev;LmU4k z{aGbpE*_WC*jkdZf~P^VU`#^r_B&kv?!P#gHhoOZ=WA2BOdzeMy7S z<#}<|_lNZ%5S=e=<8&AOigdab3$K1fCW5~uzcB@&o&Q z53}S%zQ72FDoP_V- zI_}6F0Ite(n>pn%{xH_n5x{TOyo1CZB}Vyoka|>l_lj=ySr{TRrh3;4tz3PUHhEhv zdJ6h19vhvcc^I1h&0NXxCx{H5Vn%cCTz=CdmSdAv?_@Jd%~bj1SKr}q%8JnV>_#Qm zb{ism)Un2QaY?kX#a*|6<2Bnj`==yyV2}C!OK#Y$C1$QlH!Y?$Y+e<|IjrdZ zR0lLPo2$=pzQ)%z7gE~PjjuZJ8>MA5m0CvKk}Bg&IgP12O(biOIjUD)D4y~*ibO)B zW9Ai!E1Wh@(yMQ-$kRy5RXP_Yw@3Mq^HmrXGzkixDeEA1`MTD|4W`IjttC1B@utvb zD^gOo-lV1=qKYX1=+|>rmA01iayhCH{{XCCt#^7H3eka%aa{GM$aPYU%GBk)*g$=XX| zyW3qo7VwR{X1aKEyM4cN98>nPOHyAc^Zuu!$zz?%fmrS_kzTpuZ7w~mQGI$>l-)jwM_SR-`JyVmiib2lp&BfNENq>Q9CMh`<@pntI!?OiUTCAWaQTX;8>+oxVicsTwB zJ&k^Kq@Ooh+tGEYwA+|wf>wrj+&pq2!B!mzHI63=tHYG)ql$E$&)mzV`@o)RqFe-G z4l!N}`!4>_*54L94{c$rUMyZHvQ3ZYFb3gAMjrUDVVdm~io0Nn{Qm$Kg{g&`O&?cA zv@69)GUlaluBVRG!t2s)SKG<1it70SoSrL{)~_P+T<%bP&2dFJ9WKkM<{l>0Jej`s zI_H}5e;52fipMJM5kWY~>sJ0S_}V=_NmdCIo|p%m`&K`~-v)Sp#MWoT9wOBQva!T^ zab?@A8>BIrZzXA48Rorj z#C|f+v|B$e+sR1LcvN6(!Tf3QW9?DAR-o+3#oG#A4~uT+Nu?F`CCm zx7h-X{?+69)z{kcNvnP#*RC(5V*9Y&c{R&iT^>b!_FsYetbOfS@wm);#LCes$lF+f z%~xeDnwOmYX~lmU_Y`b#CuA0u{$mhpO`!7P9<>#WWHxcdTe~}9&M8!BL9$~fDX5ue zCp6urmz~^Ff@|krbKP47I|FE=Y2%7fALUX5ew8dq^AAdL!Uajf^Gyh;br?JSjV+uz zfFq?g;!ig!ypvb#?V1=yX}9j!5a~Wt7^>UOY4;OjAXG@-FEyHrMU9eet<4e1x-vS` z9Q?=9lWr0peNF}3q$+l`q!c8zixTaTNBk?9(%3_{am8!i zYDxBT0qw^=c6qIfeM>9-X-i`%8D1=1%E#KggH=M8 zPBWfs)^+>EnOmP~^8IcwZy2e_uDS}uOH^lD!qdp8qa}_7c6vsI7MiI%$1?TWYa_!} z{$$17M%czj9qVG|@ug#))zMm3RvkF?E#7J@+1rI4)tc9mGj+{C(^FNs&3X2@vTVN} zsi`+$@OxEAZUeAh^*POFEeE)n>p){hI#L>x1GjCcE-p6rrcEVLmO{q2j7hkXNLjK? zRgmSn)7g$H9&55X7&mcOIpVEZM$=3H=Bgi*R`!EtN6x~!@bvlRc5-4W`#7DCh5SK# z;k$?~?5_R=mZhj$^KPNNtC#Ud?G>zQFcEA-ON=nib6yXp-ZX4bo+|9JV%Am% zjMp@BTqHG7IwhOpB&^x7u4~h46A5l4CEK5+WB&kZPh%l5%D`h7+;LD|N;byX{u-k! zo@mZ%(}o(1S1MW_T=5lWE4E9hc!KJF9m!uz);-1Zl|1uLxZzJZtmzwed95Q(ojbUq zB~o)%lPg_ZG>f#3D!yn23 zpy~FS4~8Mapr>k4`F5%?&T8eXVL%<~uBgR`UV^!CQcdv?1DMx$^^_Yf|b@ zEl+CC(`6C-r>~_yQlAwPP0R{YTxIc!d~6wNfo;rccv$1ox*8_r?SsIk&O*iMOn;X= z)DuaSs)$MGK-83^(p8+nMHEmW)U=ee6g9fiQ=6?en3z#T6og3&ZIMsCcN#9Fsuu>h zML4V0w~Yo0wks;M{h~T)Nk>L{y}kHk2Ozaab*L4L<%a_`#M$di6lL&!w3nK(Xy4~O zJ!^;ANgHz`cxwnGKQ&Djuzhiw;Qrn7XC|jzSdRYyt!SvRHgr1Xt+hxET$HmJi9PB| ztIxD&Cp8VcessuzzVQOLjCoe1-0O5GRy{Fy02qw@Y8!?LoN{XA)Mn2y4u}Bj?O68{ zcCINtX&dZ_%M#qu@A+y5dU7`EyAVqphBgMZVRI%9DB7;tC1dSNXykJ#bNq0)bbgqIMaIo%dbgWCn-0faIVLf9L z*}%nF$jkEkQ?{NeMo~`1c^5rVvBF!eN#qkxjZfiIkCvmDEJCmWjEZwYoyQcdAgXtF zJ4Xi+^ zdbeUDu-U#`RY?s^aUm^=fb&Yg*vcv9Kf;@|(=rl@D4;{BX(?zZovdybtP(HPO-9kO zedS^JSJLx*JluycXZ_(^Z;8Gh;yuDJhuBs;?C9h?S*b8hY-?H^nsly78v(4Kq*lf* z4&-#JPuaVgHkt&Y?mjmw%{a(gtu<7V0ILaV?b?=%=uu^M`ce$`sm;6Y`?S*wFHuOY zRz-tW^tktG7XI=9`qUeJs@|9X03i#0Ka77mnQS#3PQP9rUZE=e4RhBA=B~5FmhBzl zu^)K>{VSQ4-@Yr(j^^^%rty-wsbY(2^HP-{50;UBYSFVRilk7SwsTA6_oY>=?lM)7 z?&(tR-qjMVKLi@hltVbd>&dG4YP}>ymp$sdidR0BqIwcQ!xdWI)vxsk;~#V!nwnAm{fnJUhpEQmvRZd77OCOei0gMb&qiv>~X}fFalCnNtPVVHscfAH( z-Rc6SlXlTeMBjMQ?c3g?j!J&|dXmE))U6t~QAwIm+=>K3)~dYXo8_qLxvAb&IL#-R zn8hNB?1;a0MM78R>58ypUU5;B+}$dNWZ(P^=eL-#a3a1 zLJ1+bq&!opt=gHGw6vKNWKaj*qH9~on1Vi9<~7|WJJ`x9(M544?s`@{o5bG?=f2;L z0Xfbqn~&h#)RFy;9 zLdZ!Xv%ldMn$&*rF+~-fO4D6UBNQ>TMe0_n%b}@1bBZXb_S5cb7$Uo9sHc_vDt$ji zU$#QZIxwP&zim4QB0E1AY3$b4k`?D0Nv={$Uu@AuX9|)soKVKKQoS&02C?HKiYTqQ zn>pN38$eg6sMvZ@MN~-~l`^dqQjibZRVQAwQCClJGjD!an2xMHTPh zR;=)`?_*3T>S=a{;BiG2?4BSr9+h3AG*LuAMvntE8O~^;tbxIar*8RZqO$@vK4%oG zZhG#biV9wIk)j8V^drpPDGIip75&pJRv8 zk}Ew#b43*qHe_t2nvs{~qKerWl<#E3comf)%@kL3R)!NIr^Yu?MNx!zHm%dkfOg`u z?hg0jiYq0#)b=K|x7+g(Rvk-oMHQ27WOP)r9Nv{72bnQTQ(YACG?7?e>(1fT!e DmU=Dc literal 0 HcmV?d00001 diff --git a/tests/tls_test/prep-tls-tests.py b/tests/tls_test/prep-tls-tests.py deleted file mode 100644 index 6e9409fc..00000000 --- a/tests/tls_test/prep-tls-tests.py +++ /dev/null @@ -1,238 +0,0 @@ -from cryptography import x509 -from cryptography.x509.oid import NameOID -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives.serialization import ( - Encoding, - PrivateFormat, - BestAvailableEncryption, - NoEncryption, -) -from cryptography.hazmat.backends import default_backend -import datetime -import os -import socket -import ssl -import time - - -def print_and_flush(message): - print(message, flush=True) - - -class TLSClient: - def __init__(self, ca_cert_path, client_cert_path, client_key_path, timeout=1800): - self.ca_cert_path = ca_cert_path - self.client_cert_path = client_cert_path - self.client_key_path = client_key_path - self.timeout = timeout - self.host = "localhost" - self.port = 43445 - - def create_connection(self): - context = ssl.create_default_context( - ssl.Purpose.SERVER_AUTH, cafile=self.ca_cert_path - ) - context.load_cert_chain( - certfile=self.client_cert_path, keyfile=self.client_key_path - ) - - start_time = time.time() - end_time = start_time + self.timeout - - while time.time() < end_time: - try: - with socket.create_connection( - (self.host, self.port), timeout=self.timeout - ) as sock: - with context.wrap_socket(sock, server_hostname=self.host) as ssock: - print_and_flush("Connection established.") - self.handle_connection(ssock) - return - except (ConnectionRefusedError, socket.timeout) as e: - time.sleep( - 0.1 - ) # wait a bit before retrying to avoid flooding with attempts - - elapsed_time = time.time() - start_time - print( - f"Connection attempts failed. Timed out after {elapsed_time:.2f} seconds." - ) - - def handle_connection(self, ssock): - try: - # Read data from server, if any - size_data = ssock.read(4) - recv_size = int.from_bytes(size_data, byteorder="little") - print_and_flush(f"Received size: {recv_size}") - - buffer = b"" - while len(buffer) < recv_size: - data = ssock.read(1024) - print_and_flush(f"Received data: {data}") - if data == "": - print_and_flush("socket connection broken") - break - buffer += data - - print_and_flush(f"Received from server: {buffer.decode()}") - - # Send response to the server - msg = b"client sends some random data" - send_size = len(msg) - ssock.write(send_size.to_bytes(4, byteorder="little")) - print_and_flush(f"Sent size: {send_size}") - - bytes_sent = 0 - while bytes_sent < send_size: - sent = ssock.write(msg[bytes_sent:]) - print_and_flush(f"Sent {sent} bytes to the server.") - if sent == 0: - print_and_flush("socket connection broken") - raise RuntimeError("socket connection broken") - bytes_sent += sent - print_and_flush("Sent response to the server.") - except Exception as e: - print_and_flush(f"Error during communication: {e}") - - -def generate_private_key(): - return rsa.generate_private_key( - public_exponent=65537, key_size=2048, backend=default_backend() - ) - - -def generate_ca_certificate(subject_name, private_key): - subject = issuer = x509.Name( - [ - x509.NameAttribute(NameOID.COUNTRY_NAME, "US"), - x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "Oregon"), - x509.NameAttribute(NameOID.LOCALITY_NAME, "Hillsboro"), - x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Intel Corporation"), - x509.NameAttribute(NameOID.COMMON_NAME, subject_name), - ] - ) - - certificate = ( - x509.CertificateBuilder() - .subject_name(subject) - .issuer_name(issuer) - .public_key(private_key.public_key()) - .serial_number(x509.random_serial_number()) - .not_valid_before(datetime.datetime.utcnow()) - .not_valid_after( - # Our certificate will be valid for 10 days - datetime.datetime.utcnow() - + datetime.timedelta(days=10) - ) - .add_extension( - x509.BasicConstraints(ca=True, path_length=None), - critical=True, - ) - .sign(private_key, hashes.SHA256(), default_backend()) - ) - - return certificate - - -def generate_signed_certificate( - subject_name, issuer_certificate, issuer_private_key, subject_private_key -): - subject = x509.Name( - [ - x509.NameAttribute(NameOID.COUNTRY_NAME, "US"), - x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "Oregon"), - x509.NameAttribute(NameOID.LOCALITY_NAME, "Hillsboro"), - x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Intel Corporation"), - x509.NameAttribute(NameOID.COMMON_NAME, subject_name), - ] - ) - - issuer = issuer_certificate.subject - - certificate = ( - x509.CertificateBuilder() - .subject_name(subject) - .issuer_name(issuer) - .public_key(subject_private_key.public_key()) - .serial_number(x509.random_serial_number()) - .not_valid_before(datetime.datetime.utcnow()) - .not_valid_after( - # Our certificate will be valid for 10 days - datetime.datetime.utcnow() - + datetime.timedelta(days=10) - ) - .add_extension( - x509.BasicConstraints(ca=False, path_length=None), - critical=True, - ) - .add_extension( - x509.SubjectAlternativeName([x509.DNSName(subject_name)]), - critical=False, - ) - .sign(issuer_private_key, hashes.SHA256(), default_backend()) - ) - - return certificate - - -def write_to_disk(directory, name, key, cert): - with open(os.path.join(directory, f"{name}_key.pem"), "wb") as f: - f.write(key.private_bytes(Encoding.PEM, PrivateFormat.PKCS8, NoEncryption())) - - with open(os.path.join(directory, f"{name}_cert.pem"), "wb") as f: - f.write(cert.public_bytes(Encoding.PEM)) - - -if __name__ == "__main__": - - ##################################################################################### - # GENERATE TRUSTED CERTS AND KEYS - ##################################################################################### - # Generate CA key and certificate - trusted_ca_key = generate_private_key() - trusted_ca_cert = generate_ca_certificate("ca.vdms.local", trusted_ca_key) - - # Generate server key and certificate signed by CA - server_key = generate_private_key() - server_cert = generate_signed_certificate( - "localhost", trusted_ca_cert, trusted_ca_key, server_key - ) - - # Generate client key and certificate signed by CA - trusted_client_key = generate_private_key() - trusted_client_cert = generate_signed_certificate( - "client.vdms.local", trusted_ca_cert, trusted_ca_key, trusted_client_key - ) - - # Write keys and certificates to disk - write_to_disk("/tmp", "trusted_ca", trusted_ca_key, trusted_ca_cert) - write_to_disk("/tmp", "trusted_server", server_key, server_cert) - write_to_disk("/tmp", "trusted_client", trusted_client_key, trusted_client_cert) - - ##################################################################################### - # GENERATE UNTRUSTED CERTS AND KEYS TO ENSURE UNTRUSTED CLIENT CERTS AREN'T ACCEPTED - ##################################################################################### - # Generate CA key and certificate - untrusted_ca_key = generate_private_key() - untrusted_ca_cert = generate_ca_certificate("ca.vdms.local", untrusted_ca_key) - - # Generate client key and certificate signed by CA - untrusted_client_key = generate_private_key() - untrusted_client_cert = generate_signed_certificate( - "client.vdms.local", untrusted_ca_cert, untrusted_ca_key, untrusted_client_key - ) - - # Write keys and certificates to disk - write_to_disk("/tmp", "untrusted_ca", untrusted_ca_key, untrusted_ca_cert) - write_to_disk( - "/tmp", "untrusted_client", untrusted_client_key, untrusted_client_cert - ) - - tls_client = TLSClient( - ca_cert_path="/tmp/trusted_ca_cert.pem", - client_cert_path="/tmp/trusted_client_cert.pem", - client_key_path="/tmp/trusted_client_key.pem", - timeout=1800, - ) - tls_client.create_connection() diff --git a/tests/python/prep.py b/tests/tls_test/prep_certs.py similarity index 99% rename from tests/python/prep.py rename to tests/tls_test/prep_certs.py index 0813243d..e5ec5e88 100644 --- a/tests/python/prep.py +++ b/tests/tls_test/prep_certs.py @@ -11,6 +11,9 @@ from cryptography.hazmat.backends import default_backend import datetime import os +import socket +import ssl +import time def generate_private_key(): diff --git a/tests/tls_test/run_tls_test_client.py b/tests/tls_test/run_tls_test_client.py new file mode 100644 index 00000000..3a8f0cda --- /dev/null +++ b/tests/tls_test/run_tls_test_client.py @@ -0,0 +1,85 @@ +import socket +import ssl +import time + + +def print_and_flush(message): + print(message, flush=True) + + +class TLSServer: + def __init__(self, ca_cert_path, server_cert_path, server_key_path): + self.ca_cert_path = ca_cert_path + self.server_cert_path = server_cert_path + self.server_key_path = server_key_path + self.host = "localhost" + self.port = 43446 + + def serve(self): + context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + context.load_cert_chain( + certfile=self.server_cert_path, keyfile=self.server_key_path + ) + context.load_verify_locations(cafile=self.ca_cert_path) + + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.bind((self.host, self.port)) + sock.listen(1) + + print_and_flush("Server is listening for connections...") + + loops = 1 + for _ in range(loops): + conn, addr = sock.accept() + with conn: + with context.wrap_socket(conn, server_side=True) as ssock: + print_and_flush(f"Connection established with {addr}") + self.handle_connection(ssock) + print_and_flush("Server is done listening for connections.") + return + + def handle_connection(self, ssock): + try: + # Read data from client, if any + size_data = ssock.read(4) + recv_size = int.from_bytes(size_data, byteorder="little") + print_and_flush(f"Received size: {recv_size}") + + buffer = b"" + while len(buffer) < recv_size: + data = ssock.read(1024) + print_and_flush(f"Received data: {data}") + if data == "": + print_and_flush("socket connection broken") + break + buffer += data + + print_and_flush(f"Received from client: {buffer.decode()}") + + # Send response to the client + msg = b"this library seems to work :)" + send_size = len(msg) + ssock.write(send_size.to_bytes(4, byteorder="little")) + print_and_flush(f"Sent size: {send_size}") + + bytes_sent = 0 + while bytes_sent < send_size: + sent = ssock.write(msg[bytes_sent:]) + print_and_flush(f"Sent {sent} bytes to the client.") + if sent == 0: + print_and_flush("socket connection broken") + raise RuntimeError("socket connection broken") + bytes_sent += sent + print_and_flush("Sent response to the client.") + except Exception as e: + print_and_flush(f"Error during communication: {e}") + + +if __name__ == "__main__": + + tls_client = TLSServer( + ca_cert_path="/tmp/trusted_ca_cert.pem", + server_cert_path="/tmp/trusted_server_cert.pem", + server_key_path="/tmp/trusted_server_key.pem", + ) + tls_client.serve() diff --git a/tests/tls_test/run_tls_test_server.py b/tests/tls_test/run_tls_test_server.py new file mode 100644 index 00000000..7ad420f4 --- /dev/null +++ b/tests/tls_test/run_tls_test_server.py @@ -0,0 +1,94 @@ +import socket +import ssl +import time + + +def print_and_flush(message): + print(message, flush=True) + + +class TLSClient: + def __init__(self, ca_cert_path, client_cert_path, client_key_path, timeout=1800): + self.ca_cert_path = ca_cert_path + self.client_cert_path = client_cert_path + self.client_key_path = client_key_path + self.timeout = timeout + self.host = "localhost" + self.port = 43445 + + def create_connection(self): + context = ssl.create_default_context( + ssl.Purpose.SERVER_AUTH, cafile=self.ca_cert_path + ) + context.load_cert_chain( + certfile=self.client_cert_path, keyfile=self.client_key_path + ) + + start_time = time.time() + end_time = start_time + self.timeout + + while time.time() < end_time: + try: + with socket.create_connection( + (self.host, self.port), timeout=self.timeout + ) as sock: + with context.wrap_socket(sock, server_hostname=self.host) as ssock: + print_and_flush("Connection established.") + self.handle_connection(ssock) + return + except (ConnectionRefusedError, socket.timeout) as e: + time.sleep( + 0.1 + ) # wait a bit before retrying to avoid flooding with attempts + + elapsed_time = time.time() - start_time + print( + f"Connection attempts failed. Timed out after {elapsed_time:.2f} seconds." + ) + + def handle_connection(self, ssock): + try: + # Read data from server, if any + size_data = ssock.read(4) + recv_size = int.from_bytes(size_data, byteorder="little") + print_and_flush(f"Received size: {recv_size}") + + buffer = b"" + while len(buffer) < recv_size: + data = ssock.read(1024) + print_and_flush(f"Received data: {data}") + if data == "": + print_and_flush("socket connection broken") + break + buffer += data + + print_and_flush(f"Received from server: {buffer.decode()}") + + # Send response to the server + msg = b"client sends some random data" + send_size = len(msg) + ssock.write(send_size.to_bytes(4, byteorder="little")) + print_and_flush(f"Sent size: {send_size}") + + bytes_sent = 0 + while bytes_sent < send_size: + sent = ssock.write(msg[bytes_sent:]) + print_and_flush(f"Sent {sent} bytes to the server.") + if sent == 0: + print_and_flush("socket connection broken") + raise RuntimeError("socket connection broken") + bytes_sent += sent + print_and_flush("Sent response to the server.") + except Exception as e: + print_and_flush(f"Error during communication: {e}") + + +if __name__ == "__main__": + + tls_client = TLSClient( + ca_cert_path="/tmp/trusted_ca_cert.pem", + client_cert_path="/tmp/trusted_client_cert.pem", + client_key_path="/tmp/trusted_client_key.pem", + timeout=1800, + ) + tls_client.create_connection() diff --git a/tests/udf_test/functions/caption.py b/tests/udf_test/functions/caption.py index c40f1ba4..50486e4a 100644 --- a/tests/udf_test/functions/caption.py +++ b/tests/udf_test/functions/caption.py @@ -33,4 +33,4 @@ def run(settings, message, input_params): video.writeFrame(frame) - return (time.time() - t1), opfilename + return opfilename diff --git a/tests/udf_test/functions/flip.py b/tests/udf_test/functions/flip.py index 59ee4f35..beb2644f 100644 --- a/tests/udf_test/functions/flip.py +++ b/tests/udf_test/functions/flip.py @@ -16,4 +16,4 @@ def run(settings, message, input_params): cv2.imwrite(opfilename, img) - return (time.time() - t1), opfilename + return opfilename diff --git a/tests/udf_test/functions/metadata.py b/tests/udf_test/functions/metadata.py new file mode 100644 index 00000000..4f9a3eb8 --- /dev/null +++ b/tests/udf_test/functions/metadata.py @@ -0,0 +1,116 @@ +import cv2 +import numpy as np +from datetime import datetime +from collections import deque +import skvideo.io +import imutils +import time +import json + +face_cascade = cv2.CascadeClassifier( + # This file is available from OpenCV 'data' directory at + # https://github.com/opencv/opencv/blob/4.x/data/haarcascades/haarcascade_frontalface_default.xml + "../../user_defined_operations/functions/files/haarcascade_frontalface_default.xml" +) + + +def facedetectbbox(frame): + global face_cascade + gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + faces = face_cascade.detectMultiScale(gray, 1.1, 4) + return faces + + +def run(settings, message, input_params): + ipfilename = message + format = message.strip().split(".")[-1] + + if input_params["media_type"] == "video": + + vs = cv2.VideoCapture(ipfilename) + frameNum = 1 + metadata = {} + while True: + (grabbed, frame) = vs.read() + if not grabbed: + print("[INFO] no frame read from stream - exiting") + break + + if input_params["otype"] == "face": + faces = facedetectbbox(frame) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]), + "y": int(face[1]), + "height": int(face[2]), + "width": int(face[3]), + "object": "face", + "object_det": {"emotion": "happy", "age": 30}, + } + + metadata[frameNum] = {"frameId": frameNum, "bbox": tdict} + frameNum += 1 + + if frameNum == 3: + break + else: + faces = facedetectbbox(frame) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]) + 3, + "y": int(face[1]) + 5, + "height": int(face[2]) + 10, + "width": int(face[3]) + 30, + "object": "car", + "object_det": {"color": "red"}, + } + + metadata[frameNum] = {"frameId": frameNum, "bbox": tdict} + frameNum += 1 + + if frameNum == 3: + break + + response = {"opFile": ipfilename, "metadata": metadata} + r = json.dumps(response) + print(response) + print(r) + return r + + else: + tdict = {} + img = cv2.imread(ipfilename) + if input_params["otype"] == "face": + faces = facedetectbbox(img) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]), + "y": int(face[1]), + "height": int(face[2]), + "width": int(face[3]), + "object": "face", + "object_det": {"emotion": "happy", "age": 30}, + } + else: + faces = facedetectbbox(img) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]) + 3, + "y": int(face[1]) + 5, + "height": int(face[2]) + 10, + "width": int(face[3]) + 30, + "object": "car", + "object_det": {"color": "red"}, + } + + response = {"opFile": ipfilename, "metadata": tdict} + + r = json.dumps(response) + print(response) + print(r) + + return r diff --git a/tests/udf_test/metadata_image.jpg b/tests/udf_test/metadata_image.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1c5f91e94dd9b6911d3aaf3c271a3ee7eaec0a52 GIT binary patch literal 20678 zcmbSxbyOT*w-2Hk*cDKA`UJtE{@mT zhVyp@=M@g#y?g)cckTY&j{g83|Neb^B0@rf2P8x!B*a9-#H3`Dk4VWV$cTv_J$Xd& zmNJUCaO7;KT{{FOF=^@b*Y8qO)=jqnovWL>ho_gfPtfP!kkGJjL|lAAVp8(g6l8Wz zZeD&tVNr2KWfi8nrnauWwXMCQv#YzOcXVug0y{bNYkFyU<@f5^`o`wg;nDHQ>Dl?k z<)43W;o{)^Pgwtj?7!inxWjet{(Zdrg#X~ez2|vncog^Xp9v69%4rdr!XL8;20oyY zkIgD?dB`fHeE=|X86kSYCcO0g@E>UZf$aYd*r)$5Wd8;1f8mwkso5MZFy; zstO%ZpHs9`uM4$GP4Bvgv>S-3OL{|blDoiN#|Ifk4z{dGksvqUR~j0^1H&Jz<&RY@ zumH?nD5xGZcG-PeP(9kx8^;O=rLs2rsq5BqWQg6svSZ{jl-ugVR-`QJCvHSr_g$ae zW2ZQoZLMpn(9{7x4vMC=aG??bD$aK*C(!MVwKl80zmB+B(N;!^AVgnWfd^&wyZ||< zf$#b&Ib+sNNCocZQ@zX2yx$qH)eU7L-xgRNB)EY~sr#dnwjuO;{k@S5sbT}H#YZ%c z{1j~EgQA5XS-R7&UYGhOqN`V_xrTUr<3on9$O>=uc*8Nhx}|ec+87pbY)$hTXU)(Hsz= z<5 zu@VZ*!#6bzL$nZR4HV_;gAJ#!2W_u`Rqg4kmHtrqg2<_&LUP()-7JiAGC-ijqo80q1|S7et)dU+`s8 zG{4z!W+RA*xa8xtXQq3cTfi#9MkOM0X~Fe`eP4HbT*KlpQT zt}TtH*br|h8^y)j91j@A``JuYYov6Cb{w|j==F2*Vq~H*ol9D8!9Gq|j7qs=@~X5e zGTSY#Mpf@+PudNzcw`s4{63bUd zzwDI~QB$Y{Q>Iha#$iK1R|_pi6ZLei^Xp@w4|A38B)Ov+m=Fj|FY{D&((d|P^-cip zroO~NnCLiZ0UH__IN;z zYDOPVhgU8Cv^>knc@__nq`p014J@E3q3D;STS?F~ORLjcO^_9Z8W^q$ch8b(3Rf!A zIs@tL>{s3E@J{lz}FMehA~iyTqFQ6buQ zCU-TqinYyso#mKlny$T~`=K8^d(AiPk`v4dt04`GpP;)2|aqGverV1uniUZa9c77jvH zC^oxv>uC9Y@WEPl97s=pzb#r#1)PX{%vRG!fe{ANomo=>6kVz-U6=>@8`))hcJJma z8V5L%AKbEDD-A-KMGjvif`w0J=QYKH&;K?GSlM->j}3zXeRVUs`)o6t{yCR3P#y zG^ox<#KiMYtGM%FuFSqUs4DycTe)~qZ7d1C=q~;pquqp3v5BazQM7IWIeDVALl*kJ z&{cKT>yK(TKgwJdwP#!eP2Cd zf@OYPYxZTjEHJN*4x95$t~VvE>s_Zxt~b()4M~eyzwjNaD(MzBmQro8tcPPB{l$@} zv4yeca7>qd6cnZR#WfLa9V}t+wSD|<77#-D7@O66aGF=@T`!qA*5vUg{RFDZf98y0 zfAJD6Hp$QD^!2=F1uxG{CxuT$0K;EynDpR>D8_77a@M*}Lnblrd0|wN|G;`prx zg)l}#Ejs9W zp_B9?Cz=~<8m*}s`b;18+TtmogYso@lXxyHCN+ubmle(L@se3uJVj}tx3azP(OhrV znYgY4kRt4hVURIJc&bI|#A9XGx2e%&cC5}q#uWMLJr?mSMJgQq3Ow=gqgfoFrZ@o? zPo8OX%2R?W^GNyJ~nvvjYn+SoMB|Y6wcEx^MDKuQAmgPU>_40COX~=A5rC~VoWAP}Fv~*iK z7S+(YqT&W?_Bh_@uEz>gkj4+3cYkP{e>YAloCKXHg21UKRVRL+Q#DMOb5N4ZgrRzQ z{M&|Tnn_oIO_NmfrI8$Haq#qS%P1E!AUCXkE04^y`cT1~sEzJp$uxR;PU)x3B-7V{ zw<({^Gt|`_^CmeE2dmy&gEuS=U4Vq21l*yU2tHwPI;o$pBQ{9QMH38}tVs21u=!(Q zQ#07|Rhagdwb9-zN_HU2u@XH^VB!g2T7L^W{V6y_r2N+XNl|u>V}|`JH-}k|++tOp z>MALicFl`f0X$+|8po%QT$l;-mBsx)Oz1nG>Q?!M^l1omMrOddfM^dVB5+wYXUChSid#B;gXYw_s)fa^ED>UHC6aeF&q zyo$svt4Noi$3957ho|pOGzN1BzSurN=@~jO zJ^juim2*)(;Hy=flP$(pHfS=|+UG>$`%zn?L0xc*ElOF+M=yPMJX3hI+*7)Ns54m; zd3Y`yN%#%Ls*|#%fDjaPbW+U&jdG`#3PF>hKK#8x$i7q!qu+SQozFQe_Wfs#?|*H- z?Qt6zpXeM*2|OB)KPfcS_~0U7td>aI%Tw$~%r`r3KV#RsiO%3+ui+M$uTpnoO~@bB z5!KC~MVia&)kH-iE!-MxUlxI7x!WXj6Q^8;*GVps$TY zNO2LYcGO@!$jD9jIJQHRnOHZ+?aD0*{Ua5qZU0t;*p~Rq4ZhO#nM*W2!TxAIL zlp{ubEe~26>1U@f@EY;@q;1evFUNx=j@KTpdl#gwJO5XJ)?WTdi7+w?{)P{Qa(=6l z>@y#>$_wc<41ExhB+YGT_~Yqnyr&~fO|$U|{982)P@}71RJ>Lm^=rmU_Eq(;>Em9! zdC@~3yy<%Sk{Aq`uCLkmX=^JSej))i#_hcx21wteYuZfRrXm-MH}6}O+DPQ5jm8vR zNdigTT{pH}t;@m!_6lvAd~=n8?x7aX(QFhj&V~7X8)SpvD#;M zm8xt+H8CP-nb1Adhd1_EG5cHFnf;!d>7x{oR_;u$O@RL#zK03vuevvbc;qSJr|85I zf?t~obBzAQ!}^>2gE9!%Q6Fmt=8au*JfDAFzZDQty!Glt}ovl<2w% zt@QFx6AQ!RKS3*9Nmr*bu}0n_%+ocNFxHmWcOavQ#th>!ds;O&IFiVLYc7xt4=o%E&3eWZ6xf`@bNcRxPm8+$?o(v<;>IJ zXiuiI(t~g17*eAN&gx>3dr*|~iGP{4xL2AkR~yn~H?|?H&W?->jz?K)G2SZbfu z2@lAVS1Hx`AhaAL1yuM#gh0Y{u6)>*EiREss5sI*uz**FMvdgGNa{P6B)#Ht+p__i zm#zja{&o$xn?}dsn{N{*QasYCCi2QeJjpl!%wPSuR62y5PxHS}mKCvY2`U6?4#kk*i76Z2%vltXIP@J{@m zS8g{kC6fNwbpldrr=My66T#IIh9h09&OMj&`SG0%NQHbzv*J!rY zw5^_e0jR}vPh8D$52!d-@2a~p$4UlNoyRJg1|mY9ST!e0bY9wR**nGe-A^O>J*iP= z$pmG%;X1%f#Qa&EGm4KfSI()4!%pgi!V2xrOCJh96qbyU!72)@OKOT0OxtmSRiEhX z0j=oUk+QsICk9_c91^MI((#73bGx#Vl}$j}MBa!9(65HgrvcDRozoWX3Oq8!vPAgkHeuJ+bCWj8L6*gWrc&<3QG`4hH_T6%edk0zk9Y}lW z``*>W!F{>tls177NF#Lv)EF{RhQw$39LYMrq& zUF|8d(B;z@1|t&z;{`rHyQL6Ua~CDFvDfIdg=KK%kkKS*8BEqDHH0H$VtseH$6PyN zGEZhm8)qQXB_8n6a>Ydos<!u;5Nl!xvh}n%g;6r`u48e>dEYc58XhuP9xp8T$P@XCT(m&EmP2u_@A1m``4vdN_nEKU@!;^%Ag%J!F_6s5vf z8r=!yvT{Zj5mzGaI8@dszbYwuFRIIy+Pv2-9Dm1B>!dS2pA>N8x!0f&#rnW`&A z4^Q}bnYyyC=|{{WRb2XKt@puEYHcRWqyGA><`XfX)|z&YD~U|;L8UpuSfjbWI0!zB zTAXJl|F+SG%B=w%L#7~ac)rq+5|3HkoZj-Z1$SwwU$N%>3zqGUnF7Ba%I`R_=OvSF z3&B3_jiwfX^wBHGRxnt4NIrv38zO~E{-q6m4a*wcXQ_=L3h)B{;%k0lb55jd$bC2t z5h(gVBhrxjJ@dY66_&5j!sZ}O%&;Ueo_j@X*F0x2<7DZ!3H`V| zVA{q&^$@w1vOMmPTb)Ka#Vv<^_0TX0%H-f9h?qLQVUc5i*PfJ2#@$prk=FnUE>n;0 zAtx>b)(OjL3zS~iTQ*FViyg~YICq@z&+OK;Wgxg^F`&LmlHDNy@ny~EgC%XHZ-5D# zTbyxzRz>}=A;5qh{L^_#sco<3YfHoUDh6;k)GCI8Rp1OQKju%}eJ|x}B$-?pg89LW zL2EIE3SJqoKZ%`d!$jpbL@Gp)$u{GN{FQ@ril<2Tjj_YZTFO ztZ_5S>iPX>SWUItm9$B`Wc2B+v zDr*ktQKP_7)!$1D=w0WtmElxlV*8rA;NNWLg$hFHcMdit^X6X;_OG66CV31hBp`*k@BtuqQo=H9Ca|O4>r#voEZb z?!NVxbMZ+?Jxiw6x|vCynU_6fXSAkcADCedw1j5q^Aqhx-Q~RPErNX*xsxH#@&_=z zBvXf>q=+|Ow@2U*D32t>wl%238af2p_5_XC{_zg>lvo^}h*Rx^%PZHu^Ktw-01t4o zlQ}smH9r)tNhT8IPMa7C9?nM{LfSlBxV+|f2(%XN)#Wf*{2I|*QTpXidXU~6n>*8S zU+!LK9nBWp__c(I{wnNS<(T#(R`8NnNmPDf=g7Nf;mOTIm>m4>Xgzga>I(&Msv5(g zanR;%$$(C6$1H8Jhj@yamsTX_ZfcC2*_-R4>iWKqIG$$*gW{!WOqcMp#UBcWp}uFb2rAvQ5I4a?>0H6-E;lSQgoE!u3rl85v# zxGw)!Weslnt9LwgP%v=ADTXeObvJ5?E@TImg2oq70j3vTevkbW#F&v+dh4?itz$r})=auqnu zH6Vj$a0$K1!sB@;0j?cfEnS@Onb%CFDY8_kkZ;%JJ=QJtnFx5H+g?N?tfk;X*!SNn zTVyk8%IciovOMWBu;vWiedmG8CAvu$$6qp`>c;w`f|b(RSgV*eF8b=t2lzu*U4L;< z`owM=!P!Ft!UOEmq4zRxzOh#FHUI^Zjv*QLo{jX11S zf2QP~J3V|Ppc6gOzj4cA7$z@3^i!`i-g~4>lb~^WJk7Gm*;C;mn~b5j6mr)e=U3Ga z>P8?dSB_r?w9=#8w zI0INMT(GXg#@Gvi1+f@Qyek{!4xAM3(Y;;M?>Pn3TU0(dtnny~K&73PMIV;3|3tVe zN`H-pWi*%DG{0|2mTh%4{xZHE8&u4_3+mb$N!E!q_B*Pyjs||6c*Y8s2G$d@MN{j> zkW7SgKQb$!kD+4pY1NCQf};t8W}Df&ZM_Z(5ahlESs)Ih8gKcvmmmxwbfw<%}vMJ)+`; z{;}@#8{N^^)zjiqJ8gO^V=67WHU_c*>!|I1O#@HdP*3tPn*6=b$2F$Bjyg8_LBn` z?EUq`%B0j~Y~~oLEFH)y))w5#$pWK+^ZtaJ=C_nXq#tKI=DRTE!zXCY3E?37q*1`) zch<|v@7K(Y`k(=_cbDsaWC7IonssRTJ-TXwYb5M%2!Nk__1u?3w3VQfwZ;|oR}-{Z zgRtc-UV}6E)LPlAN{Z3#(tD+DQC9*o4Urt!k$(@${acK}|gG)4+ z#+d6z&Yq&u4fl~9@%xZ+^3N+@ez7f=^Re4F$)VDl>>{!AANu`D>LZ=%I&{ocN!9v4 zlg<@KEk6{5u^8r8enHz+l3pZwOJSqa(?`~olbDlNL3~0Ta%-*~Dh(c7lCz(HU;T|H zWs8&|C)GG_7NG0y1ar@NE@2fBo-B$~A4hXT4@)3#i%HO`c_zrI_Afp43AzSrnYGI) zaErilwfdC$s0o72n7n+e$Pu&4uYWvcFpQGyfwR6U9WRhH*HyhdqU_cq3V>m+|Dx0X zNvT~E3eI4bplmmV2tu zrRc|6sPd#}iI-)Uz~Cc(`7Ywuf&2NHTeTop=S(flEhU~&OweA(Wb4=OSM&3WkB2m- z3i6H(Up*$^28{g0VJB$j2N0|0%(Q9h`lEULeM1>143BXp0Y;L>Y+x}D$`I&ymA&Er{7~0$ zm0FvXd6{jk%8Sp~%6d1gS|;W&boY6_w-9?QZll|JP~_zEgC8sTs1<%%C3AOs?16aa z!lr&R=}r1+PuUYQ?RiQ$Qw~DFt_=BwQo)7U#X=m~nH&!f(VLy8A!3-Sz3!w_?aMf7 zs-fUj(CHLXpBe5RA#T57%d%>$dw7cWtFg4Pws#=}giNE8fkEAaz-Xb>@g2LZuFhuR z3;Gi1>p1&_;H{Djb#H9Z(vL@m-7Htnr*mI@!CizboI_&!upslvSmXdwU&cvr^r771 zo}5XdQ~-Mv(}(Y6#+pOozYpCTdA=yQCGBWvj2XmwWp*OX9jQIFo^CdP{yc{lY))!@ z_LF%O^yMB=PyK;)=sqhKUoswiPQgd_*e8JGTeQLZf|1Mz&<{3;dZ{l+{aDkE=s}w% z6gSyrW$eoCG;NMgt(tzATo5ji!r*?O_X#Ing90XMEBwFw)p<>}l{0;!Jw%~f`i+d1 zFoQJhJdt{#cdM^_WiWmo{elrt=L*b(D|ZxZGd8A9o?k_K@=i#1k8 zgT+a*N@(Q;Ji{^Kbsv+r zi@mylo-}dqXS9^{bmcj?N!65Xhx!{xwIQcGA$Ul5%TlH^fqQh!wl#M`z-Hh%`xarp zw7R6bA}WdM4{>A}P?V}4skf<~XIia&ms6N`-YFbD{)agWPjlj`oaFvz4$qmLOod~X zVIzg2x8Z%pIbnHqfl*ZZ%3jd**kSz$%}gDt{bA$ed&T&T$VKeo`wPgK(?PbOa+~%z3buQf?mS;4;k+1gL^ z&BUSzaL|K?AioG(N526&|Ir7>(!`6JpRYyPUfT}O0T`%h*Rc)0exmB-k-Vrh=Nl@x z9!CmT>7@*YaGE0QH{GEnG&iGkFA6|uK#Pb;WhrUu(WpW>H+;{@)2(&51aaS}pDA`k z-rDojZ2?1nG*irL)ZKD#3D=Pwn^-RcR|vz(%y@&h@NMav9@=%!UA@G4f0~s$iwVa+ zTV?pBnW*uYJ6lI9Ctkat_c=6OmH?vWK zE`BZCTO2UIV&%=R-89kM)tkYUVG7>Xt_;T&FRmd2c$Iwx##&*X#i@^)Wv3v73XBY z<4<*NhIz=>XLnV>ME`CdKy}_hry8%QR_YbE_|fM$(ef>ybF@QhtKJh;+GC)Pv_Ir) zhhk;)?k|q0-EJa~VRk#~S9NLB zN7R6RkT&>z*;J8mDm^Ffl3*vD;w|eKsI~?QMmnVIYBe~Cfd<(2ErNF`XFW2#ZLVGo zW;NUc`eU);)SR#UxTBNmWl-cj&l@AXm<-lJhLJ*R2F6ux0PjKdJwV^Q2XwZ4pB4+yQOQ4=I-_o|z-FzJxt)kE)-5FuT`dJN==9 z6LeyWJbi1GZiM-;k(}I-bzT2$p{hBlo(IzEHUl}8uVUv*y?CL!*7G?m_3XiVJnzk_ zQQ%G=CO@$vNujXNwu){|Dr9J|WeDYx%DpCt%E5k$Rb|C>u3maZ3d{|S_pJ}EUuv0A zP{Tl!<`|P!R&@!k2x4>Z(D7!|eq{~XWRed7*2xrS(cc-GYt5gX(5(llVh^AjUPI zRfa0rq@lvNy3GBB?LW0+~{D z^HbaEbL-tlPM2mJ(bBMEhSpTCWWelIG3DEDx!%;#{=#-xGqD}vx;X6n0g|{#VT5c1 zV+d^B>5>2BEihzDi?Q&E>82s|xqh_3JAeQ241COif8NrKK9ps1AiwW@gD$=Sxn}!h zdjM6VWIjxmtWkQ2n3v0Rd}p6YWuNs8ZBVt6^qZ9Htd}|shta;v|U2Sl**QYHn zUgYx#{GdOO6xDd0@>9yfJmL(etgTF|OhV-THHY82T^lgAhHX91>tqzuoUNqDSWp~S7&`zH)o)?Y9w#>gA zbEY{%m-^#fi$yOe9zLmawVBt;<5I@sUy!?gHQmHxQDpYqCf9q4`!vVUH|qoIA+hq| zX@l)CtJL*pgBK*4!xSgm`k9Ze)rmHQX14ZreXVZQYK^KsI@2El(pNGc=wH0*ctV$i z+_thXt5ihAbOGLVOkYa`uP7JC5r@6xQFKwFMH9Fs(4Lj2#>u!JDztm*s2=3(NQ;s{ zh~09_8G++cmQ_6hMz|RtAz$Y+=S8GpldU_h6TnIbO`pE;88xyobCMtKwIGiz#psPra@rO^*1@gv@6l!R$dbh?9sgD*5HS4ln&ykZ zAOna`C66?)d#lR$Wp%W<)XKM$!jrnNiKX>V-UXZW_7P;m+=H2Ul6J$E&Z3h+639#) zRS`Y$p_qDISayp?4&hU}%ia3P(H~W8BT7qQiww1j3RUe6#;{1suAh88q03xL}9s_Nwt0~q|%o=tZ zu5HGsUP5jkVn{q?#12z=KK{k|mK`lSE#*sh<#R+Be56G1Lww*JJ8KFv>@!BysB~yj zhL1m{Q5ycuYo&FE*T4fcYyNenD&jCYKivC4()_qf`j6Kq^#GGtXO5#I zLPmk<#>wULm@W9`#aJ^?$v2pTZRWbCkPok(7a3*%7-2kRtj5y(WwH{i%{9X^lArOU|q%0aU zOt;Z|@mokm9tBL(>BZk5zI~ymhn7%f^c%j&)}>ExWqZ^ZW`7z zhkA5AxHJNz$!CNowM+X|jUL@mXIW))!mdn!FI7u@FS(p^k@xMw?pQb8@W3&9TB^n}H>0gj(jX=PjdGEpt%r$3 zb1d_v1ILA@>_PYjbYe#V5e$>@+6~S5V8IQDXY4Xkwynf#f4Om@#;z}|@u>s(F7kxY zcbPDZ&#l&8xErEe87x(pqt#ouY1}fpEApkVwzOfFds6JF7Kv`G+flEsyVA$4`6m;n zQLlEBYk1-O{$9nCXYbx0t!!f8ktb-#A*+)0{XoUhA0Caq9n=~Pxx54JiaAAfHI>Ro ze{tyF;Pv9}n>fJ7k=u=6udz4n87gIpD|x;bHXF{^k(5b1+LO}Dgfv8hvnEKrvLPCV zMM(9i88RvbVhz3N7}mrIQ4YUicKqZbLvI*ojzH;)UrHaEUv+Fkq2n2P9YiLkPc4ar zGVPYp`y$c((y~=PgE8j!v6ncGoqm6GJw#?e%r9aR+)9T?w#JO0jRW+r+uxTsD`9Ns z?F4M=-<)e-XK2SG4~!U!T{3#(BKd|M-+VhAuftOIiq5jqS`s#S4LEwW8A+Tqd3x3+ z#tsT?>Z_0bc-^XXCT#wANg&Y=19?Uc1?P_#7d*Hgd6Ng#WBj+(%Rdee$wqKTKfU9M z|KW@O!wPss65fJxI83rJW>Fp_dawOTpffl6rNuJ3KDr>Tfi`4dOp|^+b0f&GiP%(k zR%}}}8!4f)nUZCboxqEq1yg#BEjwXUq`#t1!qf-Xuti+QV61hLQ7Z7+tB5n&i^yad zF)W&AXLP}!zT)03P4e$F0$GXZF65QVddDBN*Z1R9Irie7;Xu}gc8Hg8bFTeIcF(!} zYNhMu7PvKJRc}O^$4b%c)=7gx?C=J_<-8x6>ZHsVf83chN1<0S(ET}|;Mc)N7L%+c zUdwwcZhEb@X@}neAl9f{RvN0k-r-J6l>HLhVJC2%Ji2B8{(6({mKxRX`7uzCrrmPHfL}e)-Od z@fwP>?<^v*&POvN1DDk+ugYcL(+RBM6i~|4B8P z_0hd!6M+?D2aKP3^mS*DU5u(e4}Vp0eoq(VOn6pnP{>gjyQygV5dB>S)DTRSpyk3x zRidJR3qR4}%TjMAvocZv7=dN^;0#Pcgj6l1E8?d=j zClIDd-x`gHdgsj)%x7ohQG@T1tN}(gF*3Tp3W=KzPFb!@3M%tXT78-A#xyk=D0_Bxe0xh=f~+ zWZ_rF($WVqr&r8Zpo@v-=O2g<1`F8?%9cZSAZ!P(!@FdIKX?Qv^m5z$vA;i2>LEAi zY#T*m#Wv9wp~H~IPgA_*sjSoF?`!278~21*edh&GvO`Ow{CO2y(H9p5H@)wGJJCvz z@%JQ4l|)a6ZA~r3yx$+z5Al{9(ZwZI)GmbYRikGbteVv>!WU6WuWA?IX&D9cKNR;H z3(CKi<9=LjHn^B8m{y0qaDS_t#&o)A3Ug6s_n}0@)g7ZhH<}h*Sp6aVBrpHHTe`!q z@*uPe+bldfs$ja_iQ_)R()_C8fGp1sr#?tCWl@@js|D?Mlj_f~Wx`zBG|vAmX@dQy zf@x$7bgxswH43|?Rbf3Uqihpi+LH{U8!`M?zB{0s{>rM^Ov`?w-s<^r_1J(b-KPu} zS7Y^AL*(~U;JYtrV+b2hr8F;u(D7vAMch>N^?^9r_>hES9kecSnz7M?en(z6WRSsF zX1gYyb~fNLse%bZD;+Y%^mjEmOXQmB-@vvG_Ft_7_bhtmi)Y_=&K1vw6!!$t4oN|Z z)EhD^PQ)LUt&W|_3bJ!g(NUUR=>t_NA#2CONnb33T1a3K3 z)}Cl~H;YS+_4a^A`4i8Mo-r9&7wsGAMjD56u&10C!BpLH)S+x(w8Y>MJwnxJRIlqF zxpnw|mIn*J-!V^b@d~pMaUr8Rsd*wPY`R(sLSOzRMEIxY>QipA9j6M%x7{3b_&H!v zR8k!&N)*mZR_`)CGA?+yo@b$l85*EQSyn`Keh(aZR@I=gtF6Ga#-P_OVH3x*oR*(0 z*1E@&LsO;tlRIWl1Qp za7PczUa&j0ch<(Y`#)%`?Ys!dGK+L34!Zg&OBO(M{pVeiN%(>6GH9rEKdfHl2Oa}- zP_$5NHZ+7e(Pf3R_$1BXS8(By^w9KQoXXO}J=6G;(jMVsl9`XU+&^;chPRQtA$j)O zY}84cuJl}sC7dA7Ua~eR#C1J}%%Ey0|MLmGHC?H<6kT;}4woW*5D8fQm8zfoc&^|j zA0gp>78{>%Q?j4fYdt`iz(K4FPI?PeGWHIF84f~7p z9&0ktkq&PcsbPs|g*t{>Bw@d^C=aOaQok1bax zCJ)cXE;YUC1utvz9Pa^DxD0hnTx*aUiUfuL@-u{Rs?dP9!+uAD?;Ia&+m}8R@$0jF zgJ$oXfZ;s;1Ye`N;G7EiFW&C%V+lmxFL&XyP^Z<0-o-w_2w@(0H=vTGo%*-;#{)iY?_GPX<1di1DJ28$ z!P}>3l|$h`PglgTv9_g^z0c&xaCDNC&yWs*IUcsxVnI4?O+MrZ(yVev1Obj5{roMks%p1( zS6NJ&YpUd-iiV?ihsYOVjRC>*$T7Mn2+rKMp?M;i>i!9o-FNv;&cD0IFRE!ynw|0C z=2<$lHrc07D?q`&I5VPrW1~52cjaMKn&`x2u8BcA{oP}HH!QbEG%#W8T=?Vrc;k?C zT3uR#T3AkXQvRp5qi6wbRK<5GqF9~P0d`;Z$KoS@ODHA;8}>=3wkx3(bFS>lfo{y}Lwsv0-d zi{gP--yfkA?VSG>Ik^)Wc>ONVYrhp~9eyV_#cU>@E!)g_PtOhi#i=4EXqFXjJsO%; zvhw}vlQvQ*sf@#eE2SqIE+r&A77-=riK*+N>~xkp1r<52IIm+&R37LAb$pnZH5grW zpp%lgTCQm)zrTH~S^P^jY=8h_t&jyY^TGA4zJE4)U`o|nxA8>nQ{&k5T0gEuH6D(ek(Vnv?$Ge14@;L)cfN+d^=hBdqlbC11k%ocD<0hwe z6Qd{wCFGwpCMIyT)Ei@Wb>n1Ex&bd_^}XRRgx1~r`FL>J@2fFBK9gnjK^Juae=ERg z)%V^6kc>(Gvk9x{WO~OT)om0ylA>&_%&gZk3a-aiFX#s9(h`>@&B>NP@487jGAHUs z+pxD-MBQfBqQ%aIvu~BX^;jw+p&)G~>*wj|UF1~neM!KQsC%tO#n;Wt<*e8Y<)80` zw%-nvsy*!V0@-$B*xm&^P#8*v|It-jjBATXoYPG6CQM2MYQh#6%7iqnW{j-Z>2Z7_a zBaB^?+%;Eip!@U8V|x{EI727AO29fSZ+E<7m(!=~2h>{7wV0xLX(6NCvsAd*R1*8E zJ)l=rzwrnZ5r(0!frHJ&du&kyLLOrSoK8M+{V9x8TDq?PbG=rA%?!*)=uGV1C=&Uj zRrNNv`@XN_pkdD;HRma|WdBE02ESBr+@z zF0$J`uYZdV=}+k{WUpGFy>x-8?JT-#v+@|*vWzLolEt_RnZEtDkNh)v#(%NmdXWF_ z;1ev;LmU4k z{aGbpE*_WC*jkdZf~P^VU`#^r_B&kv?!P#gHhoOZ=WA2BOdzeMy7S z<#}<|_lNZ%5S=e=<8&AOigdab3$K1fCW5~uzcB@&o&Q z53}S%zQ72FDoP_V- zI_}6F0Ite(n>pn%{xH_n5x{TOyo1CZB}Vyoka|>l_lj=ySr{TRrh3;4tz3PUHhEhv zdJ6h19vhvcc^I1h&0NXxCx{H5Vn%cCTz=CdmSdAv?_@Jd%~bj1SKr}q%8JnV>_#Qm zb{ism)Un2QaY?kX#a*|6<2Bnj`==yyV2}C!OK#Y$C1$QlH!Y?$Y+e<|IjrdZ zR0lLPo2$=pzQ)%z7gE~PjjuZJ8>MA5m0CvKk}Bg&IgP12O(biOIjUD)D4y~*ibO)B zW9Ai!E1Wh@(yMQ-$kRy5RXP_Yw@3Mq^HmrXGzkixDeEA1`MTD|4W`IjttC1B@utvb zD^gOo-lV1=qKYX1=+|>rmA01iayhCH{{XCCt#^7H3eka%aa{GM$aPYU%GBk)*g$=XX| zyW3qo7VwR{X1aKEyM4cN98>nPOHyAc^Zuu!$zz?%fmrS_kzTpuZ7w~mQGI$>l-)jwM_SR-`JyVmiib2lp&BfNENq>Q9CMh`<@pntI!?OiUTCAWaQTX;8>+oxVicsTwB zJ&k^Kq@Ooh+tGEYwA+|wf>wrj+&pq2!B!mzHI63=tHYG)ql$E$&)mzV`@o)RqFe-G z4l!N}`!4>_*54L94{c$rUMyZHvQ3ZYFb3gAMjrUDVVdm~io0Nn{Qm$Kg{g&`O&?cA zv@69)GUlaluBVRG!t2s)SKG<1it70SoSrL{)~_P+T<%bP&2dFJ9WKkM<{l>0Jej`s zI_H}5e;52fipMJM5kWY~>sJ0S_}V=_NmdCIo|p%m`&K`~-v)Sp#MWoT9wOBQva!T^ zab?@A8>BIrZzXA48Rorj z#C|f+v|B$e+sR1LcvN6(!Tf3QW9?DAR-o+3#oG#A4~uT+Nu?F`CCm zx7h-X{?+69)z{kcNvnP#*RC(5V*9Y&c{R&iT^>b!_FsYetbOfS@wm);#LCes$lF+f z%~xeDnwOmYX~lmU_Y`b#CuA0u{$mhpO`!7P9<>#WWHxcdTe~}9&M8!BL9$~fDX5ue zCp6urmz~^Ff@|krbKP47I|FE=Y2%7fALUX5ew8dq^AAdL!Uajf^Gyh;br?JSjV+uz zfFq?g;!ig!ypvb#?V1=yX}9j!5a~Wt7^>UOY4;OjAXG@-FEyHrMU9eet<4e1x-vS` z9Q?=9lWr0peNF}3q$+l`q!c8zixTaTNBk?9(%3_{am8!i zYDxBT0qw^=c6qIfeM>9-X-i`%8D1=1%E#KggH=M8 zPBWfs)^+>EnOmP~^8IcwZy2e_uDS}uOH^lD!qdp8qa}_7c6vsI7MiI%$1?TWYa_!} z{$$17M%czj9qVG|@ug#))zMm3RvkF?E#7J@+1rI4)tc9mGj+{C(^FNs&3X2@vTVN} zsi`+$@OxEAZUeAh^*POFEeE)n>p){hI#L>x1GjCcE-p6rrcEVLmO{q2j7hkXNLjK? zRgmSn)7g$H9&55X7&mcOIpVEZM$=3H=Bgi*R`!EtN6x~!@bvlRc5-4W`#7DCh5SK# z;k$?~?5_R=mZhj$^KPNNtC#Ud?G>zQFcEA-ON=nib6yXp-ZX4bo+|9JV%Am% zjMp@BTqHG7IwhOpB&^x7u4~h46A5l4CEK5+WB&kZPh%l5%D`h7+;LD|N;byX{u-k! zo@mZ%(}o(1S1MW_T=5lWE4E9hc!KJF9m!uz);-1Zl|1uLxZzJZtmzwed95Q(ojbUq zB~o)%lPg_ZG>f#3D!yn23 zpy~FS4~8Mapr>k4`F5%?&T8eXVL%<~uBgR`UV^!CQcdv?1DMx$^^_Yf|b@ zEl+CC(`6C-r>~_yQlAwPP0R{YTxIc!d~6wNfo;rccv$1ox*8_r?SsIk&O*iMOn;X= z)DuaSs)$MGK-83^(p8+nMHEmW)U=ee6g9fiQ=6?en3z#T6og3&ZIMsCcN#9Fsuu>h zML4V0w~Yo0wks;M{h~T)Nk>L{y}kHk2Ozaab*L4L<%a_`#M$di6lL&!w3nK(Xy4~O zJ!^;ANgHz`cxwnGKQ&Djuzhiw;Qrn7XC|jzSdRYyt!SvRHgr1Xt+hxET$HmJi9PB| ztIxD&Cp8VcessuzzVQOLjCoe1-0O5GRy{Fy02qw@Y8!?LoN{XA)Mn2y4u}Bj?O68{ zcCINtX&dZ_%M#qu@A+y5dU7`EyAVqphBgMZVRI%9DB7;tC1dSNXykJ#bNq0)bbgqIMaIo%dbgWCn-0faIVLf9L z*}%nF$jkEkQ?{NeMo~`1c^5rVvBF!eN#qkxjZfiIkCvmDEJCmWjEZwYoyQcdAgXtF zJ4Xi+^ zdbeUDu-U#`RY?s^aUm^=fb&Yg*vcv9Kf;@|(=rl@D4;{BX(?zZovdybtP(HPO-9kO zedS^JSJLx*JluycXZ_(^Z;8Gh;yuDJhuBs;?C9h?S*b8hY-?H^nsly78v(4Kq*lf* z4&-#JPuaVgHkt&Y?mjmw%{a(gtu<7V0ILaV?b?=%=uu^M`ce$`sm;6Y`?S*wFHuOY zRz-tW^tktG7XI=9`qUeJs@|9X03i#0Ka77mnQS#3PQP9rUZE=e4RhBA=B~5FmhBzl zu^)K>{VSQ4-@Yr(j^^^%rty-wsbY(2^HP-{50;UBYSFVRilk7SwsTA6_oY>=?lM)7 z?&(tR-qjMVKLi@hltVbd>&dG4YP}>ymp$sdidR0BqIwcQ!xdWI)vxsk;~#V!nwnAm{fnJUhpEQmvRZd77OCOei0gMb&qiv>~X}fFalCnNtPVVHscfAH( z-Rc6SlXlTeMBjMQ?c3g?j!J&|dXmE))U6t~QAwIm+=>K3)~dYXo8_qLxvAb&IL#-R zn8hNB?1;a0MM78R>58ypUU5;B+}$dNWZ(P^=eL-#a3a1 zLJ1+bq&!opt=gHGw6vKNWKaj*qH9~on1Vi9<~7|WJJ`x9(M544?s`@{o5bG?=f2;L z0Xfbqn~&h#)RFy;9 zLdZ!Xv%ldMn$&*rF+~-fO4D6UBNQ>TMe0_n%b}@1bBZXb_S5cb7$Uo9sHc_vDt$ji zU$#QZIxwP&zim4QB0E1AY3$b4k`?D0Nv={$Uu@AuX9|)soKVKKQoS&02C?HKiYTqQ zn>pN38$eg6sMvZ@MN~-~l`^dqQjibZRVQAwQCClJGjD!an2xMHTPh zR;=)`?_*3T>S=a{;BiG2?4BSr9+h3AG*LuAMvntE8O~^;tbxIar*8RZqO$@vK4%oG zZhG#biV9wIk)j8V^drpPDGIip75&pJRv8 zk}Ew#b43*qHe_t2nvs{~qKerWl<#E3comf)%@kL3R)!NIr^Yu?MNx!zHm%dkfOg`u z?hg0jiYq0#)b=K|x7+g(Rvk-oMHQ27WOP)r9Nv{72bnQTQ(YACG?7?e>(1fT!e DmU=Dc literal 0 HcmV?d00001 diff --git a/tests/udf_test/requirements.txt b/tests/udf_test/requirements.txt deleted file mode 100644 index 23c96db1..00000000 --- a/tests/udf_test/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -opencv-python==4.5.5.64 -zmq==0.0.0 \ No newline at end of file diff --git a/tests/udf_test/settings.json b/tests/udf_test/settings.json index 00766372..a85d40f1 100644 --- a/tests/udf_test/settings.json +++ b/tests/udf_test/settings.json @@ -4,6 +4,7 @@ "functions" : { "facedetect" : "facedetect", "flip": "flip", - "caption": "caption" + "caption": "caption", + "metadata": "metadata" } } \ No newline at end of file diff --git a/tests/udf_test/udf_local.py b/tests/udf_test/udf_local.py index aaf4a2ff..8dd7d3d9 100644 --- a/tests/udf_test/udf_local.py +++ b/tests/udf_test/udf_local.py @@ -19,18 +19,22 @@ # print(globals()) i = 0 +print("Started Listening...") while True: message = socket.recv() try: + print("Received {}".format(message)) + message_received = message.decode("utf-8") input_params = json.loads(message_received) udf = globals()[settings["functions"][input_params["id"]]] - t, opfile = udf.run(settings, input_params["ipfile"], input_params) + response = udf.run(settings, input_params["ipfile"], input_params) - socket.send_string(opfile) + print(i, response) + socket.send_string(response) i += 1 except Exception as e: print(e.with_traceback(None)) diff --git a/tests/unit_tests/Image_test.cc b/tests/unit_tests/Image_test.cc index 544be723..f8614434 100644 --- a/tests/unit_tests/Image_test.cc +++ b/tests/unit_tests/Image_test.cc @@ -1027,7 +1027,7 @@ TEST_F(ImageTest, AddImageByPath) { TEST_F(ImageTest, ImagePathError) { VCL::Image img; std::string temp_image_path(VDMS::VDMSConfig::instance()->get_path_tmp() + - "/pathimage.jpg"); + "/pathimagepatherror.jpg"); std::filesystem::copy_file(img_, temp_image_path); img = VCL::Image(temp_image_path, true); @@ -1037,3 +1037,72 @@ TEST_F(ImageTest, ImagePathError) { ASSERT_THROW(read_img.get_encoded_image_async(read_img.get_image_format()), VCL::Exception); } + +TEST_F(ImageTest, UDFMetadata) { + std::string inputFile = "udf_test/metadata_image.jpg"; + ASSERT_TRUE(fs::exists(fs::path(inputFile))); + std::string temp_image_path(VDMS::VDMSConfig::instance()->get_path_tmp() + + "/pathimageudfmetadata.jpg"); + std::filesystem::copy_file(inputFile, temp_image_path); + VCL::Image img = VCL::Image(temp_image_path, true); + + Json::Value _options; + _options["format"] = "jpg"; + _options["id"] = "metadata"; + _options["media_type"] = "image"; + _options["otype"] = "face"; + _options["port"] = 5555; + + img.userOperation(_options); + cv::Mat vcl_img = img.get_cvmat(); + for (auto metadata : img.get_ingest_metadata()) { + ASSERT_STREQ(metadata["object"].asString().data(), "face"); + } + + EXPECT_TRUE(std::remove(temp_image_path.data()) == 0); +} + +TEST_F(ImageTest, RemoteMetadata) { + std::string inputFile = "remote_function_test/metadata_image.jpg"; + ASSERT_TRUE(fs::exists(fs::path(inputFile))); + std::string temp_image_path(VDMS::VDMSConfig::instance()->get_path_tmp() + + "/rpathimage.jpg"); + std::filesystem::copy_file(inputFile, temp_image_path); + VCL::Image img = VCL::Image(temp_image_path, true); + + std::string _url = "http://localhost:5010/image"; + Json::Value _options; + _options["format"] = "jpg"; + _options["id"] = "metadata"; + _options["media_type"] = "image"; + _options["otype"] = "face"; + _options["ingestion"] = 1; + + img.syncremoteOperation(_url, _options); + cv::Mat vcl_img = img.get_cvmat(); + for (auto metadata : img.get_ingest_metadata()) { + ASSERT_STREQ(metadata["object"].asString().data(), "face"); + } + + EXPECT_TRUE(std::remove(temp_image_path.data()) == 0); +} + +TEST_F(ImageTest, UDFNoMetadata) { + std::string inputFile = "udf_test/metadata_image.jpg"; + ASSERT_TRUE(fs::exists(fs::path(inputFile))); + std::string temp_image_path(VDMS::VDMSConfig::instance()->get_path_tmp() + + "/pathimagenometadata.jpg"); + std::filesystem::copy_file(inputFile, temp_image_path); + VCL::Image img = VCL::Image(temp_image_path, true); + + Json::Value _options; + _options["format"] = "jpg"; + _options["id"] = "flip"; + _options["port"] = 5555; + + img.userOperation(_options); + cv::Mat vcl_img = img.get_cvmat(); + ASSERT_EQ(img.get_ingest_metadata().size(), 0); + + EXPECT_TRUE(std::remove(temp_image_path.data()) == 0); +} diff --git a/tests/unit_tests/OpsIoTest.cc b/tests/unit_tests/OpsIoTest.cc index eb812913..aed9d1f0 100644 --- a/tests/unit_tests/OpsIoTest.cc +++ b/tests/unit_tests/OpsIoTest.cc @@ -61,6 +61,44 @@ std::string raw_neoadd_json( "\n}" "}"); +std::string raw_neoadd_json_incorrect_op( + "{" + "\n\"NeoAdd\" :" + "\n{" + "\n\"cypher\" : \"CREATE (VDMSNODE:USERLABEL {user_prop1:\\\"foo\\\"})\"," + "\n\"operations\" : [" + "\n{" + "\n\"height\" : 150," + "\n\"type\" : \"NOTREAL\"," + "\n\"width\" : 150," + "\n\"x\" : 0," + "\n\"y\" : 0" + "\n}" + "\n]," + "\n\"target_data_type\" : \"img\"," + "\n\"target_format\" : \"jpg\"" + "\n}" + "}"); + +std::string raw_neoadd_json_bad_format( + "{" + "\n\"NeoAdd\" :" + "\n{" + "\n\"cypher\" : \"CREATE (VDMSNODE:USERLABEL {user_prop1:\\\"foo\\\"})\"," + "\n\"operations\" : [" + "\n{" + "\n\"height\" : 150," + "\n\"type\" : \"crop\"," + "\n\"width\" : 150," + "\n\"x\" : 0," + "\n\"y\" : 0" + "\n}" + "\n]," + "\n\"target_data_type\" : \"img\"," + "\n\"target_format\" : \"NOTREAL\"" + "\n}" + "}"); + class OpsIOCoordinatorTest : public ::testing::Test { protected: @@ -70,18 +108,12 @@ class OpsIOCoordinatorTest : public ::testing::Test { } virtual void TearDown() { - global_s3_connection->end(); + if (global_s3_connection->connected()) { + printf("Shutting down Global S3 Conn...\n"); + global_s3_connection->end(); + } delete global_s3_connection; - } - - void create_conn_test() { - VCL::RemoteConnection *local_conn; - bool is_conn = false; - local_conn = instantiate_connection(); - is_conn = local_conn->connected(); - local_conn->end(); - delete local_conn; - ASSERT_EQ(is_conn, true); + printf("Global Connection Object Deleted...\n"); } void put_obj_test() { @@ -96,6 +128,11 @@ class OpsIOCoordinatorTest : public ::testing::Test { connection = get_existing_connection(); rc = s3_upload("test_obj", buffer, connection); ASSERT_EQ(rc, 0); + + if (connection->connected()) + connection->end(); + rc = s3_upload("test_obj", buffer, connection); + ASSERT_EQ(rc, -1); } void get_obj_test() { @@ -115,6 +152,11 @@ class OpsIOCoordinatorTest : public ::testing::Test { for (int i = 0; i < downloaded.size(); ++i) { EXPECT_EQ(downloaded[i], uploaded[i]); } + + if (connection->connected()) + connection->end(); + downloaded = s3_retrieval("test_obj", connection); + ASSERT_EQ(downloaded.size(), 0); } void do_ops_test() { @@ -144,9 +186,46 @@ class OpsIOCoordinatorTest : public ::testing::Test { local_connection = get_existing_connection(); ASSERT_EQ(global_s3_connection->connected(), true); } + + void test_bad_operations() { + + std::ifstream input("test_images/large1.jpg", std::ios::binary); + std::vector buffer(std::istreambuf_iterator(input), + {}); + std::vector trans_img; + + Json::Value root; + Json::Reader reader; + std::string incorrect_ops_json_query(raw_neoadd_json_incorrect_op); + std::string bad_format_query(raw_neoadd_json_bad_format); + bool success; + + // Non-existent Operations (typos) + printf("Checking for incorrect operation type\n"); + success = reader.parse(incorrect_ops_json_query, root); + printf("Parse Success\n"); + if (!success) { + FAIL() << "Failed to parse" << reader.getFormattedErrorMessages(); + } + ASSERT_EQ(success, true); + trans_img = do_single_img_ops(root, buffer, "NeoAdd"); + ASSERT_EQ(trans_img.size(), 0); + + // bad format target + printf("Checking for bad format target\n"); + success = reader.parse(bad_format_query, root); + printf("Parse Success\n"); + if (!success) { + FAIL() << "Failed to parse" << reader.getFormattedErrorMessages(); + } + ASSERT_EQ(success, true); + trans_img = do_single_img_ops(root, buffer, "NeoAdd"); + ASSERT_EQ(trans_img.size(), 0); + } + }; // end test class -// TEST_F(OpsIOCoordinatorTest, InstantiateConnTest) {create_conn_test();} TEST_F(OpsIOCoordinatorTest, PutObjTest) { put_obj_test(); } TEST_F(OpsIOCoordinatorTest, GetObjTest) { get_obj_test(); } TEST_F(OpsIOCoordinatorTest, GetConnTest) { get_conn_test(); } TEST_F(OpsIOCoordinatorTest, DoOpsTest) { do_ops_test(); } +TEST_F(OpsIOCoordinatorTest, DoBorkedOpsTest) { test_bad_operations(); } diff --git a/tests/unit_tests/TLSTest.cc b/tests/unit_tests/TLSTest.cc index 3a8b7858..daeb15cf 100644 --- a/tests/unit_tests/TLSTest.cc +++ b/tests/unit_tests/TLSTest.cc @@ -6,6 +6,7 @@ #include "gtest/gtest.h" #define SERVER_PORT_TLS 43445 +#define PYTHON_SERVER_PORT_TLS 43446 #define NUMBER_OF_MESSAGES 1 typedef std::basic_string BytesBuffer; @@ -19,8 +20,9 @@ TEST(TLS_CPP, test_tls_server) { std::string key_path = "/tmp/trusted_server_key.pem"; std::string ca_path = "/tmp/trusted_ca_cert.pem"; - std::string command = "cd tls_test && python3 prep-tls-tests.py > " - "../tests_tls_screen.log 2> ../tests_tls_log.log &"; + std::string command = + "cd tls_test && python3 run_tls_test_server.py > " + "../tests_tls_server_screen.log 2> ../tests_tls_server_log.log &"; system(command.c_str()); usleep(3 * 1000000); @@ -40,3 +42,33 @@ TEST(TLS_CPP, test_tls_server) { ASSERT_EQ(0, recv_message.compare(client_to_server)); } } + +TEST(TLS_CPP, test_tls_client) { + + std::string client_to_server("client sends some random data"); + std::string server_to_client("this library seems to work :)"); + + std::string cert_path = "/tmp/trusted_client_cert.pem"; + std::string key_path = "/tmp/trusted_client_key.pem"; + std::string ca_path = "/tmp/trusted_ca_cert.pem"; + + std::string command = + "cd tls_test && python3 run_tls_test_client.py > " + "../tests_tls_client_screen.log 2> ../tests_tls_client_log.log &"; + system(command.c_str()); + usleep(3 * 1000000); + + comm::ConnClient client("localhost", PYTHON_SERVER_PORT_TLS, cert_path, + key_path, ca_path); + + for (int i = 0; i < NUMBER_OF_MESSAGES; ++i) { + client.send_message((const uint8_t *)client_to_server.c_str(), + client_to_server.length()); + } + + for (int i = 0; i < NUMBER_OF_MESSAGES; ++i) { + BytesBuffer message_received = client.recv_message(); + std::string recv_message((char *)message_received.data()); + ASSERT_EQ(0, recv_message.compare(server_to_client)); + } +} \ No newline at end of file diff --git a/tests/unit_tests/TimerMapTest.cc b/tests/unit_tests/TimerMapTest.cc new file mode 100644 index 00000000..5a1a7979 --- /dev/null +++ b/tests/unit_tests/TimerMapTest.cc @@ -0,0 +1,122 @@ +/** + * @file TimerMapTest.cc + * + * @section LICENSE + * + * The MIT License + * + * @copyright Copyright (c) 2024 Intel Corporation + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + * + */ +#include "timers/TimerMap.h" +#include "gtest/gtest.h" +#include + +class TimerMapTest : public ::testing::Test { +protected: + // tests add and retrieval of timers based on ID + void add_retrieve_test() { + TimerMap timers; + std::vector id_list; + std::vector timestamps; + + timers.add_timestamp("timer-1"); + sleep(1); + timers.add_timestamp("timer-1"); + timers.add_timestamp("timer-2"); + timers.add_timestamp("timer-3"); + + id_list = timers.return_ids(); + + // Verify 3 timer IDs were returned + ASSERT_EQ(id_list.size(), 3); + + // Now lets get time stamps out, and verify number of timestamps + timestamps = timers.return_all_timestamps("timer-1"); + ASSERT_EQ(timestamps.size(), 2); + + timestamps = timers.return_all_timestamps("timer-2"); + ASSERT_EQ(timestamps.size(), 1); + timestamps = timers.return_all_timestamps("timer-3"); + ASSERT_EQ(timestamps.size(), 1); + } + + // Verifies we can add and remove a vector based on its ID, also explicitly + // uses destructor + // by making use of new/delete + void clear_id_test() { + TimerMap *timers = new TimerMap; + std::vector id_list; + int rc_3; + int rc_1; + + timers->add_timestamp("timer-1"); + timers->add_timestamp("timer-2"); + timers->add_timestamp("timer-3"); + rc_3 = timers->return_ids().size(); + + // now lets delete two IDs + timers->clear_id("timer-1"); + timers->clear_id("timer-3"); + rc_1 = timers->return_ids().size(); + + // clear the timers + delete timers; + + // Verify 3 timer IDs were returned + ASSERT_EQ(rc_3, 3); + ASSERT_EQ(rc_1, 1); + } + + void runtime_test() { + TimerMap timers; + double total_runtime; + + timers.add_timestamp("timer-1"); + sleep(1); + timers.add_timestamp("timer-1"); + + total_runtime = timers.return_total_time("timer-1"); + + ASSERT_TRUE((total_runtime < 1005000 && total_runtime > 950000)); + } + + void interval_test() { + TimerMap timers; + std::vector intervals; + + timers.add_timestamp("timer-1"); + sleep(1); + timers.add_timestamp("timer-1"); + sleep(1); + timers.add_timestamp("timer-1"); + + intervals = timers.return_intervals("timer-1"); + + ASSERT_EQ(intervals.size(), 2); + } + +}; // end test class + +TEST_F(TimerMapTest, AddRetrieveTest) { add_retrieve_test(); } +TEST_F(TimerMapTest, ClearIDTest) { clear_id_test(); } +TEST_F(TimerMapTest, RuntimeTest) { runtime_test(); } +TEST_F(TimerMapTest, IntervalTest) { interval_test(); } diff --git a/tests/unit_tests/Video_test.cc b/tests/unit_tests/Video_test.cc index deca11bc..7c1cd994 100644 --- a/tests/unit_tests/Video_test.cc +++ b/tests/unit_tests/Video_test.cc @@ -1375,3 +1375,91 @@ TEST_F(VideoTest, FilePathAccessError) { ASSERT_TRUE(false); } } + +/** + * Imitates performing a remote operation + * and then storing the video in VDMS with metadata. + * Metadata check is performed by this test. + */ +TEST_F(VideoTest, SyncRemoteWriteWithMetadata) { + std::string _url = "http://localhost:5010/video"; + Json::Value _options; + _options["format"] = "mp4"; + _options["id"] = "metadata"; + _options["media_type"] = "video"; + _options["otype"] = "face"; + + try { + + std::string temp_video_input(VDMS::VDMSConfig::instance()->get_path_tmp() + + "/video_test_SyncRemoteWriteMD_input.avi"); + copy_video_to_temp(_video_path_avi_xvid, temp_video_input, get_fourcc()); + std::string temp_video_test(VDMS::VDMSConfig::instance()->get_path_tmp() + + "/video_test_SyncRemoteWriteMD_test.avi"); + copy_video_to_temp(_video_path_avi_xvid, temp_video_test, get_fourcc()); + + std::string syncremote_name_vcl("videos_tests/syncremotemd_vcl.mp4"); + { + VCL::Video video_data(temp_video_input); // + video_data.syncremoteOperation(_url, _options); + video_data.store(syncremote_name_vcl, VCL::Video::Codec::H264); + for (auto metadata : video_data.get_ingest_metadata()) { + ASSERT_STREQ(metadata["1"]["bbox"]["object"].asString().data(), "face"); + } + } + + // Cleanup temp files. + if (std::remove(temp_video_input.data()) != 0) { + } + if (std::remove(temp_video_test.data()) != 0) { + } + + } catch (VCL::Exception &e) { + print_exception(e); + ASSERT_TRUE(false); + } +} + +/** + * Imitates performing a user defined operation + * and then storing the video in VDMS with metadata. + * Metadata check is performed by this test. + */ +TEST_F(VideoTest, UDFWriteWithMetadata) { + Json::Value _options; + _options["port"] = 5555; + _options["id"] = "metadata"; + _options["media_type"] = "video"; + _options["otype"] = "face"; + _options["format"] = "mp4"; + + try { + + std::string temp_video_input(VDMS::VDMSConfig::instance()->get_path_tmp() + + "/video_test_UDFWrite_input.avi"); + copy_video_to_temp(_video_path_avi_xvid, temp_video_input, get_fourcc()); + std::string temp_video_test(VDMS::VDMSConfig::instance()->get_path_tmp() + + "/video_test_UDFemoteWrite_test.avi"); + copy_video_to_temp(_video_path_avi_xvid, temp_video_test, get_fourcc()); + + std::string udf_name_vcl("videos_tests/udf_vcl.mp4"); + { + VCL::Video video_data(temp_video_input); // + video_data.userOperation(_options); + video_data.store(udf_name_vcl, VCL::Video::Codec::H264); + for (auto metadata : video_data.get_ingest_metadata()) { + ASSERT_STREQ(metadata["1"]["bbox"]["object"].asString().data(), "face"); + } + } + + // Cleanup temp files. + if (std::remove(temp_video_input.data()) != 0) { + } + if (std::remove(temp_video_test.data()) != 0) { + } + + } catch (VCL::Exception &e) { + print_exception(e); + ASSERT_TRUE(false); + } +} \ No newline at end of file diff --git a/tests/unit_tests/client_descriptors.cc b/tests/unit_tests/client_descriptors.cc index 5dd65a6f..5912247c 100644 --- a/tests/unit_tests/client_descriptors.cc +++ b/tests/unit_tests/client_descriptors.cc @@ -45,7 +45,7 @@ TEST(CLIENT_CPP, add_flinng_descriptor) { meta_obj->_aclient.reset( new VDMS::VDMSClient(meta_obj->get_server(), meta_obj->get_port())); Json::Value tuple; - tuple = meta_obj->construct_descriptor(); + tuple = meta_obj->construct_flinng_descriptor(); VDMS::Response response = meta_obj->_aclient->query(meta_obj->_fastwriter.write(tuple), blobs); diff --git a/tests/unit_tests/client_image.cc b/tests/unit_tests/client_image.cc index 68f0e5c8..f90a04d5 100644 --- a/tests/unit_tests/client_image.cc +++ b/tests/unit_tests/client_image.cc @@ -161,4 +161,83 @@ TEST(CLIENT_CPP, find_image_udf) { int status1 = result[0]["FindImage"]["status"].asInt(); EXPECT_EQ(status1, 0); delete meta_obj; +} + +TEST(CLIENT_CPP, add_image_dynamic_metadata) { + + std::string filename = "../tests/test_images/metadata_image.jpg"; + std::vector blobs; + + Json::Value op; + op["type"] = "userOp"; + op["options"]["id"] = "metadata"; + op["options"]["format"] = "jpg"; + op["options"]["media_type"] = "image"; + op["options"]["otype"] = "face"; + op["options"]["port"] = 5555; + Meta_Data *meta_obj = new Meta_Data(); + blobs.push_back(meta_obj->read_blob(filename)); + meta_obj->_aclient.reset( + new VDMS::VDMSClient(meta_obj->get_server(), meta_obj->get_port())); + Json::Value tuple; + + tuple = meta_obj->constuct_image(true, op, "image_dynamic_metadata"); + VDMS::Response response = + meta_obj->_aclient->query(meta_obj->_fastwriter.write(tuple), blobs); + Json::Value result; + meta_obj->_reader.parse(response.json.c_str(), result); + + int status1 = result[0]["AddImage"]["status"].asInt(); + EXPECT_EQ(status1, 0); + delete meta_obj; +} + +TEST(CLIENT_CPP, add_image_dynamic_metadata_remote) { + + std::string filename = "../tests/test_images/metadata_image.jpg"; + std::vector blobs; + + Json::Value op; + op["type"] = "syncremoteOp"; + op["options"]["id"] = "metadata"; + op["options"]["format"] = "jpg"; + op["options"]["media_type"] = "image"; + op["options"]["otype"] = "face"; + op["url"] = "http://localhost:5010/image"; + Meta_Data *meta_obj = new Meta_Data(); + blobs.push_back(meta_obj->read_blob(filename)); + meta_obj->_aclient.reset( + new VDMS::VDMSClient(meta_obj->get_server(), meta_obj->get_port())); + Json::Value tuple; + + tuple = meta_obj->constuct_image(true, op, "image_dynamic_metadata_remote"); + VDMS::Response response = + meta_obj->_aclient->query(meta_obj->_fastwriter.write(tuple), blobs); + Json::Value result; + meta_obj->_reader.parse(response.json.c_str(), result); + + int status1 = result[0]["AddImage"]["status"].asInt(); + EXPECT_EQ(status1, 0); + delete meta_obj; +} + +TEST(CLIENT_CPP, find_image_dynamic_metadata) { + + Meta_Data *meta_obj = new Meta_Data(); + meta_obj->_aclient.reset( + new VDMS::VDMSClient(meta_obj->get_server(), meta_obj->get_port())); + Json::Value tuple; + tuple = meta_obj->construct_find_image_with_dynamic_metadata(); + VDMS::Response response = + meta_obj->_aclient->query(meta_obj->_fastwriter.write(tuple)); + Json::Value result; + meta_obj->_reader.parse(response.json.c_str(), result); + int status_i = result[0]["FindImage"]["status"].asInt(); + int status_b = result[1]["FindImage"]["status"].asInt(); + std::string objectId = + result[1]["FindImage"]["entities"][0]["objectID"].asString(); + EXPECT_EQ(status_i, 0); + EXPECT_EQ(status_b, 0); + EXPECT_STREQ(objectId.data(), "face"); + delete meta_obj; } \ No newline at end of file diff --git a/tests/unit_tests/client_videos.cc b/tests/unit_tests/client_videos.cc index 728d1df6..35d5157f 100644 --- a/tests/unit_tests/client_videos.cc +++ b/tests/unit_tests/client_videos.cc @@ -111,3 +111,66 @@ TEST(CLIENT_CPP_Video, add_single_video_multi_client) { delete meta_obj; delete meta_obj2; } + +TEST(CLIENT_CPP_Video, add_dynamic_metadata) { + + std::stringstream video; + std::vector blobs; + + VDMS::VDMSConfig::init("unit_tests/config-client-tests.json"); + + std::string filename = "../tests/videos/Megamind.mp4"; + + std::string temp_video_path(VDMS::VDMSConfig::instance()->get_path_tmp() + + "/pathvideo.mp4"); + + copy_video_to_temp(filename, temp_video_path, get_fourcc()); + + Meta_Data *meta_obj = new Meta_Data(); + meta_obj->_aclient.reset( + new VDMS::VDMSClient(meta_obj->get_server(), meta_obj->get_port())); + + Json::Value op; + op["type"] = "syncremoteOp"; + op["url"] = "http://localhost:5010/video"; + op["options"]["id"] = "metadata"; + op["options"]["media_type"] = "video"; + op["options"]["otype"] = "face"; + + Json::Value tuple; + tuple = meta_obj->constuct_video_by_path(1, temp_video_path, op, + "dynamic_metadata"); + VDMS::Response response = + meta_obj->_aclient->query(meta_obj->_fastwriter.write(tuple), blobs); + Json::Value result; + meta_obj->_reader.parse(response.json.c_str(), result); + int status1 = result[0]["AddVideo"]["status"].asInt(); + + EXPECT_EQ(status1, 0); + delete meta_obj; +} + +TEST(CLIENT_CPP_Video, find_dynamic_metadata) { + + Meta_Data *meta_obj = new Meta_Data(); + meta_obj->_aclient.reset( + new VDMS::VDMSClient(meta_obj->get_server(), meta_obj->get_port())); + Json::Value tuple; + tuple = meta_obj->construct_find_video_with_dynamic_metadata(); + VDMS::Response response = + meta_obj->_aclient->query(meta_obj->_fastwriter.write(tuple)); + Json::Value result; + meta_obj->_reader.parse(response.json.c_str(), result); + int status_v, status_f, status_b; + std::string objectId; + status_v = result[0]["FindVideo"]["status"].asInt(); + status_f = result[1]["FindVideo"]["status"].asInt(); + status_b = result[2]["FindVideo"]["status"].asInt(); + objectId = + result[2]["FindVideo"]["entities"][0]["bbox"][0]["objectID"].asString(); + EXPECT_EQ(status_v, 0); + EXPECT_EQ(status_f, 0); + EXPECT_EQ(status_b, 0); + EXPECT_STREQ(objectId.data(), "face"); + delete meta_obj; +} \ No newline at end of file diff --git a/tests/unit_tests/meta_data.cc b/tests/unit_tests/meta_data.cc index 9e99c54f..240997af 100644 --- a/tests/unit_tests/meta_data.cc +++ b/tests/unit_tests/meta_data.cc @@ -107,17 +107,21 @@ Json::Value Meta_Data::construct_find_flinng_descriptor() { } Json::Value Meta_Data::constuct_image(bool add_operation, - Json::Value operations) { + Json::Value operations, + std::string category) { Json::Value image; Json::Value add_image; Json::Value tuple; image["properties"]["Name"] = "sample-image"; image["properties"]["ID"] = 1; + if (category != "") { + image["properties"]["category"] = category; + } image["format"] = "png"; image["_ref"] = 12; if (add_operation) { - image["operations"] = operations; + image["operations"].append(operations); } add_image["AddImage"] = image; tuple.append(add_image); @@ -144,22 +148,19 @@ Json::Value Meta_Data::constuct_video(bool add_operation) { } Json::Value Meta_Data::constuct_video_by_path(int id, std::string filepath, - Json::Value operations) { + Json::Value operations, + std::string category) { Json::Value video; Json::Value add_video; Json::Value tuple; video["properties"]["Name"] = "sample-video"; video["properties"]["ID"] = id; - video["container"] = "avi"; - video["codec"] = "xvid"; + if (category != "") { + video["properties"]["category"] = category; + } video["from_file_path"] = filepath; - video["operations"] = operations; - // video["_ref"]=1209; - // if( add_operation) - // { - // video["operations"]=operations; - // } + video["operations"].append(operations); add_video["AddVideo"] = video; tuple.append(add_video); return tuple; @@ -214,12 +215,118 @@ Json::Value Meta_Data::construct_find_image_withop(Json::Value operations) { Json::Value image; image["results"] = results; - image["operations"] = operations; + image["operations"].append(operations); + + Json::Value find_image; + find_image["FindImage"] = image; + + tuple.append(find_image); + return tuple; +} + +Json::Value Meta_Data::construct_find_video_withop(Json::Value operations) { + Json::Value tuple; + + Json::Value results; + results["blob"] = true; + results["limit"] = 1; + + Json::Value video; + video["results"] = results; + video["operations"].append(operations); + + Json::Value find_video; + find_video["FindVideo"] = video; + + tuple.append(find_video); + return tuple; +} + +Json::Value Meta_Data::construct_find_image_with_dynamic_metadata() { + Json::Value tuple; + + Json::Value cons; + cons["category"][0] = "=="; + cons["category"][1] = "image_dynamic_metadata"; + + Json::Value metacons; + metacons["objectID"][0] = "=="; + metacons["objectID"][1] = "face"; + + Json::Value results; + results["blob"] = true; + + Json::Value link_image; + link_image["ref"] = 1; + + Json::Value image; + image["constraints"] = cons; + image["_ref"] = 1; Json::Value find_image; find_image["FindImage"] = image; tuple.append(find_image); + + Json::Value bimage; + bimage["metaconstraints"] = metacons; + bimage["link"] = link_image; + + Json::Value find_image_bbox; + find_image_bbox["FindImage"] = bimage; + + tuple.append(find_image_bbox); + + return tuple; +} + +Json::Value Meta_Data::construct_find_video_with_dynamic_metadata() { + Json::Value tuple; + + Json::Value cons; + cons["category"][0] = "=="; + cons["category"][1] = "dynamic_metadata"; + + Json::Value metacons; + metacons["objectID"][0] = "=="; + metacons["objectID"][1] = "face"; + + Json::Value results; + results["blob"] = true; + + Json::Value link_video; + link_video["ref"] = 1; + Json::Value link_frame; + link_frame["ref"] = 2; + + Json::Value video; + video["constraints"] = cons; + video["_ref"] = 1; + + Json::Value find_video; + find_video["FindVideo"] = video; + + tuple.append(find_video); + + Json::Value fvideo; + fvideo["frameconstraints"] = false; + fvideo["_ref"] = 2; + fvideo["link"] = link_video; + + Json::Value find_video_frame; + find_video_frame["FindVideo"] = fvideo; + + tuple.append(find_video_frame); + + Json::Value bvideo; + bvideo["metaconstraints"] = metacons; + bvideo["link"] = link_frame; + + Json::Value find_video_bbox; + find_video_bbox["FindVideo"] = bvideo; + + tuple.append(find_video_bbox); + return tuple; } diff --git a/tests/unit_tests/meta_data_helper.h b/tests/unit_tests/meta_data_helper.h index d59716f5..bb9369d7 100644 --- a/tests/unit_tests/meta_data_helper.h +++ b/tests/unit_tests/meta_data_helper.h @@ -38,13 +38,18 @@ class Meta_Data { Json::Value construct_updateBlob(); Json::Value construct_findBlob(); std::string *read_blob(std::string &); - Json::Value constuct_image(bool = false, Json::Value operations = {}); + Json::Value constuct_image(bool = false, Json::Value operations = {}, + std::string category = ""); Json::Value constuct_video(bool = false); Json::Value constuct_video_by_path(int id, std::string filepath, - Json::Value operations); + Json::Value operations, + std::string category = ""); Json::Value construct_find_image(); Json::Value construct_find_image_no_entity(); Json::Value construct_find_image_withop(Json::Value operations); + Json::Value construct_find_video_withop(Json::Value operations); + Json::Value construct_find_image_with_dynamic_metadata(); + Json::Value construct_find_video_with_dynamic_metadata(); Json::Value construct_descriptor(); Json::Value construct_find_descriptor(); Json::Value construct_flinng_descriptor(); diff --git a/user_defined_operations/README.md b/user_defined_operations/README.md index 974a2f06..4900ab23 100644 --- a/user_defined_operations/README.md +++ b/user_defined_operations/README.md @@ -4,8 +4,8 @@ This submodule is required to execute user defined operations (UDF) in VDMS usin ## Requirements - Python 3 or higher - Following python libraries - - opencv-python - - zmq + - opencv-python-headless + - pyzmq ## UDF Definition Any operation can be added to the module by creating a python file and adding it to the `functions` folder. All related files for the UDF should be stored in the folder `functions/files`. The operaion file should follow the following setup to define a `run` function that the interface file for VDMS will use; @@ -40,7 +40,7 @@ Update the `settings.json` file with the following parameters; ``` ## Setup -1. Either run from the location where you have the VDMS repo or just copy the `user_defined_operations` directory to wherever you want to run the UDFs, but ensure that it is on the same system as VDMS. +1. Either run from the location where you have the VDMS repo or just copy the `user_defined_operations` directory to wherever you want to run the UDFs, but ensure that it is on the same system as VDMS. 2. Create your UDFs as python scripts and place them in the `user_defined_operations/functions` directory. 3. Update the `settings.json` file to include your UDF file and other necessary information. 4. Follow the following steps to run the `user_defined_operations` submodule on port . @@ -123,7 +123,7 @@ def run(settings, message, input_params): opfilename = settings["opfile"] + str(t1) + '.' + format img = cv2.imread(ipfilename) - + # These lines # represent the # code logic diff --git a/user_defined_operations/functions/caption.py b/user_defined_operations/functions/caption.py index c40f1ba4..50486e4a 100644 --- a/user_defined_operations/functions/caption.py +++ b/user_defined_operations/functions/caption.py @@ -33,4 +33,4 @@ def run(settings, message, input_params): video.writeFrame(frame) - return (time.time() - t1), opfilename + return opfilename diff --git a/user_defined_operations/functions/facedetect.py b/user_defined_operations/functions/facedetect.py index 44529c6d..3415cf57 100644 --- a/user_defined_operations/functions/facedetect.py +++ b/user_defined_operations/functions/facedetect.py @@ -29,4 +29,4 @@ def run(settings, message, input_params): cv2.imwrite(opfilename, img) - return (time.time() - t1), opfilename + return opfilename diff --git a/user_defined_operations/functions/flip.py b/user_defined_operations/functions/flip.py index 59ee4f35..beb2644f 100644 --- a/user_defined_operations/functions/flip.py +++ b/user_defined_operations/functions/flip.py @@ -16,4 +16,4 @@ def run(settings, message, input_params): cv2.imwrite(opfilename, img) - return (time.time() - t1), opfilename + return opfilename diff --git a/user_defined_operations/functions/metadata.py b/user_defined_operations/functions/metadata.py new file mode 100644 index 00000000..0b69761a --- /dev/null +++ b/user_defined_operations/functions/metadata.py @@ -0,0 +1,121 @@ +import cv2 +import numpy as np +from datetime import datetime +from collections import deque +import skvideo.io +import imutils +import time +import json + +face_cascade = cv2.CascadeClassifier( + # This file is available from OpenCV 'data' directory at + # https://github.com/opencv/opencv/blob/4.x/data/haarcascades/haarcascade_frontalface_default.xml + "functions/files/haarcascade_frontalface_default.xml" +) + + +def facedetectbbox(frame): + global face_cascade + gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + faces = face_cascade.detectMultiScale(gray, 1.1, 4) + return faces + + +def run(settings, message, input_params): + ipfilename = message + format = message.strip().split(".")[-1] + + # Extract metadata for video files + if input_params["media_type"] == "video": + + vs = cv2.VideoCapture(ipfilename) + frameNum = 1 + metadata = {} + while True: + (grabbed, frame) = vs.read() + if not grabbed: + print("[INFO] no frame read from stream - exiting") + break + + if input_params["otype"] == "face": + faces = facedetectbbox(frame) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]), + "y": int(face[1]), + "height": int(face[2]), + "width": int(face[3]), + "object": "face", + "object_det": {"emotion": "happy", "age": 30}, + } + + metadata[frameNum] = {"frameId": frameNum, "bbox": tdict} + frameNum += 1 + + if frameNum == 3: + break + else: + faces = facedetectbbox(frame) + if len(faces) > 0: + face = faces[0] + # We use dummy values here as an example to showcase + # different values for car. + tdict = { + "x": int(face[0]) + 3, + "y": int(face[1]) + 5, + "height": int(face[2]) + 10, + "width": int(face[3]) + 30, + "object": "car", + "object_det": {"color": "red"}, + } + + metadata[frameNum] = {"frameId": frameNum, "bbox": tdict} + frameNum += 1 + + if frameNum == 3: + break + + response = {"opFile": ipfilename, "metadata": metadata} + r = json.dumps(response) + print(response) + print(r) + return r + # Extract metadata for image files + else: + tdict = {} + img = cv2.imread(ipfilename) + if input_params["otype"] == "face": + faces = facedetectbbox(img) + if len(faces) > 0: + face = faces[0] + tdict = { + "x": int(face[0]), + "y": int(face[1]), + "height": int(face[2]), + "width": int(face[3]), + "object": "face", + "object_det": {"emotion": "happy", "age": 30}, + } + else: + faces = facedetectbbox(img) + if len(faces) > 0: + face = faces[0] + # We use dummy values here as an example to showcase + # different values for car. + tdict = { + "x": int(face[0]) + 3, + "y": int(face[1]) + 5, + "height": int(face[2]) + 10, + "width": int(face[3]) + 30, + "object": "car", + "object_det": {"color": "red"}, + } + + response = {"opFile": ipfilename, "metadata": tdict} + + r = json.dumps(response) + print(response) + print(r) + + return r diff --git a/user_defined_operations/requirements.txt b/user_defined_operations/requirements.txt index 23c96db1..d40f7ba0 100644 --- a/user_defined_operations/requirements.txt +++ b/user_defined_operations/requirements.txt @@ -1,2 +1,2 @@ -opencv-python==4.5.5.64 -zmq==0.0.0 \ No newline at end of file +opencv-python-headless==4.9.0.80 +pyzmq==26.0.3 \ No newline at end of file diff --git a/user_defined_operations/settings.json b/user_defined_operations/settings.json index 00766372..a85d40f1 100644 --- a/user_defined_operations/settings.json +++ b/user_defined_operations/settings.json @@ -4,6 +4,7 @@ "functions" : { "facedetect" : "facedetect", "flip": "flip", - "caption": "caption" + "caption": "caption", + "metadata": "metadata" } } \ No newline at end of file diff --git a/user_defined_operations/udf_local.py b/user_defined_operations/udf_local.py index bc051a94..eb1bcc7b 100644 --- a/user_defined_operations/udf_local.py +++ b/user_defined_operations/udf_local.py @@ -31,10 +31,10 @@ udf = globals()[settings["functions"][input_params["id"]]] - t, opfile = udf.run(settings, input_params["ipfile"], input_params) + response = udf.run(settings, input_params["ipfile"], input_params) - print(t, i, opfile) - socket.send_string(opfile) + # print(i, response) + socket.send_string(response) i += 1 except Exception as e: print(e.with_traceback(None)) diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt index bbef6ee1..66288fb3 100644 --- a/utils/CMakeLists.txt +++ b/utils/CMakeLists.txt @@ -1,4 +1,4 @@ cmake_minimum_required (VERSION 3.10) project(vdms-utils) -include_directories(include/comm include/chrono include/stats) -add_library(vdms-utils SHARED src/comm/ConnClient.cc src/comm/Connection.cc src/comm/Exception.cc src/comm/ConnServer.cc src/chrono/Chrono.cc src/stats/SystemStats.cc) +include_directories(include/comm include/chrono include/stats include/timers) +add_library(vdms-utils SHARED src/timers/TimerMap.cc src/comm/ConnClient.cc src/comm/Connection.cc src/comm/Exception.cc src/comm/ConnServer.cc src/stats/SystemStats.cc) diff --git a/utils/include/chrono/Chrono.h b/utils/include/chrono/Chrono.h deleted file mode 100644 index 098cd505..00000000 --- a/utils/include/chrono/Chrono.h +++ /dev/null @@ -1,186 +0,0 @@ -/** - * @section LICENSE - * - * The MIT License - * - * @copyright Copyright (c) 2017 Intel Corporation - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, - * including without limitation the rights to use, copy, modify, - * merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - * ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - * THE SOFTWARE. - * - */ - -#ifndef CHRONO_H_ -#define CHRONO_H_ - -#include -#include -#include -#include - -#include - -// Apple OSX -#ifdef __MACH__ -#include -#include -#endif - -#ifdef CHRONO_TIMING -#define CHRONO_TIC(NAME) NAME.tic(); -#define CHRONO_TAC(NAME) NAME.tac(); -#define CHRONO_PRINT_LAST_MS(NAME) NAME.printLastTime_ms(); -#else -#define CHRONO_TIC(NAME) -#define CHRONO_TAC(NAME) -#define CHRONO_PRINT_LAST_MS(NAME) -#endif - -// *************************************************************************** -// Chrono Base class -// *************************************************************************** -class Chrono { -public: - Chrono(const std::string &name, const bool asyncEnabled = false); - Chrono(); - virtual ~Chrono(void); - - void tic(void); - void tac(void); - void reset(void); - void setEnabled(const bool val); - - struct ChronoStats { - std::string name; - uint32_t counter; - float totalTime_ms; - float totalSquaredTime_ms2; - float averageTime_ms; - float stdDevTime_ms; - float lastTime_ms; - float minTime_ms; - float maxTime_ms; - }; - - const Chrono::ChronoStats &getElapsedStats(void) const { - return elapsedStats; - } - - const Chrono::ChronoStats &getPeriodStats(void) const { return periodStats; } - - uint32_t getTotalTime_ms(void) const { return elapsedStats.totalTime_ms; } - - uint32_t getTotalTime_us(void) const { - return elapsedStats.totalTime_ms * 1000.0f; - } - - uint32_t getLastTime_ms(void) const { return elapsedStats.lastTime_ms; } - - uint32_t getLastTime_us(void) const { - return elapsedStats.lastTime_ms * 1000.0f; - } - - uint32_t getAvgTime_ms(void) const { return elapsedStats.averageTime_ms; } - - uint32_t getAvgTime_us(void) const { - return elapsedStats.averageTime_ms * 1000.0f; - } - - uint32_t getSTD_ms(void) const { return elapsedStats.stdDevTime_ms; } - - uint32_t getSTD_us(void) const { - return elapsedStats.stdDevTime_ms * 1000.0f; - } - - void printTotalTime_ms(void) const { - std::cout << name << ": " << getTotalTime_ms() << " [ms]" << std::endl; - } - - void printTotalTime_us(void) const { - std::cout << name << ": " << getTotalTime_us() << " [us]" << std::endl; - } - - void printLastTime_ms(void) const { - std::cout << name << ": " << getLastTime_ms() << " [ms]" << std::endl; - } - - void printLastTime_us(void) const { - std::cout << name << ": " << getLastTime_us() << " [us]" << std::endl; - } - - void printAvgTime_ms(void) const { - std::cout << name << ": " << getAvgTime_ms() << " [ms]" << std::endl; - } - - void printAvgTime_us(void) const { - std::cout << name << ": " << getAvgTime_us() << " [us]" << std::endl; - } - - std::ostream &printStats(const Chrono::ChronoStats &stats, - std::ostream &os) const; - std::ostream &printAvgTime(const Chrono::ChronoStats &stats, - std::ostream &os) const; - std::ostream &printAvgTime(const Chrono::ChronoStats &stats, std::ostream &os, - const float ref) const; - -protected: - std::string name; - - bool enabled; - bool ticIdle; - uint32_t errors; - - ChronoStats elapsedStats; - ChronoStats periodStats; - - void resetStats(ChronoStats &stats); - void updateStats(ChronoStats &stats); - - virtual void doTic(void) = 0; - virtual void doTac(void) = 0; -}; - -// *************************************************************************** -// Chrono Cpu Implementation -// *************************************************************************** - -class ChronoCpu : public Chrono { -public: - ChronoCpu(const std::string &name); - ChronoCpu(); - ~ChronoCpu(void); - -protected: - timespec lastTicTime; - timespec ticTime; - timespec tacTime; - -#ifdef __MACH__ - clock_serv_t cclock; - mach_timespec_t mts; -#endif - - uint32_t ticCounter; - - virtual void doTic(void); - virtual void doTac(void); -}; - -#endif // CHRONO_H_ diff --git a/utils/include/comm/Connection.h b/utils/include/comm/Connection.h index 87a922ff..d9799204 100644 --- a/utils/include/comm/Connection.h +++ b/utils/include/comm/Connection.h @@ -105,6 +105,8 @@ class ConnClient : public Connection { ConnClient(struct ServerAddress srv); ConnClient(std::string addr, int port); + ConnClient(std::string addr, int port, const std::string &cert_file, + const std::string &key_file, const std::string &ca_file); ConnClient &operator=(const ConnClient &) = delete; ConnClient(const ConnClient &) = delete; ~ConnClient() {} @@ -112,8 +114,16 @@ class ConnClient : public Connection { private: ConnClient(); void connect(); + void setupTLS(); + void initiateTLS(); ServerAddress _server; + + std::string _cert_file; + std::string _key_file; + std::string _ca_file; + + SSL_CTX *_ssl_ctx; }; }; // namespace comm diff --git a/utils/include/comm/ExceptionComm.h b/utils/include/comm/ExceptionComm.h index 64fe50e8..ae811250 100644 --- a/utils/include/comm/ExceptionComm.h +++ b/utils/include/comm/ExceptionComm.h @@ -56,6 +56,7 @@ enum ExceptionCommType { SSL_KEY_FAIL, SSL_ACCEPT_FAIL, SSL_CA_FAIL, + SSL_CONNECT_FAIL, Undefined = 100, // Any undefined error }; diff --git a/utils/include/timers/TimerMap.h b/utils/include/timers/TimerMap.h new file mode 100644 index 00000000..98d43a14 --- /dev/null +++ b/utils/include/timers/TimerMap.h @@ -0,0 +1,111 @@ +/** + * @section LICENSE + * + * The MIT License + * + * @copyright Copyright (c) 2024 Intel Corporation + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, + * including without limitation the rights to use, copy, modify, + * merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + * ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + * + */ +#ifndef CLEAN_VDMS_TIMERMAP_H +#define CLEAN_VDMS_TIMERMAP_H + +#include +#include +#include +#include +#include + +class TimerMap { + +private: + std::map *> + timemap; + std::mutex maplock; + +public: + ~TimerMap(); + + /** + * Add a new timestamp at the current wall-clock time to a vector associated + * with the specified timerID. If the timer id specified does not exist, it + * will be created. It it already exists, a new timestamp will be added to the + * associated ID + * @param timerid c++ string identifying which time vector to append a + * timestamp too. + */ + void add_timestamp(std::string timerid); + + /** + * Returns all timestamps associated with a particular timer ID, may + * return empty vector if ID is not found + * @param timerid string identifying a particular timestamp vector + * @return vector of long longs representing wall-clock time in microseconds + */ + std::vector return_all_timestamps(std::string timerid); + + /** + * Return a vector of doubles for all time intervals in microseconds + * associated with the specified time ID For example if there are 3 timestamps + * affiliated with the specified timeid 2 times will be returned. If there are + * 4, 3 will be returned. If there are zero or one times in the time vector, + * an empty vector will be returned + * @param timeid c++ string identifying which time vector to calculate + * intervals over + * @return possibly empty vector of return intervals in microseconds. + */ + std::vector return_intervals(std::string timerid); + + /** + * Calculates the total time in microseconds between the first and + * last captured timestamp for a particular time-id + * @param timeid c++ string identifying which time vector to calculate total + * time over + * @return double in microseconds + */ + double return_total_time(std::string timerid); + + /** + * Used to get a list of all the current time-ids that are being tracked + * @return vector of strings, each element being a timer ID with one of more + * timestamps + */ + std::vector return_ids(); + + /** + * Deletes the specific timerID and its associated vector + * @param timerid + */ + void clear_id(std::string timerid); + + /** + * Prints out the runtimes of the timer map to stdout + */ + void print_map_runtimes(); + + /** + * Clears contents of timer map without destroying entrie structure + */ + void clear_all_timers(); +}; + +#endif // CLEAN_VDMS_TIMERMAP_H diff --git a/utils/src/api_schema/api_schema.json b/utils/src/api_schema/api_schema.json index 86803b9a..9725c663 100644 --- a/utils/src/api_schema/api_schema.json +++ b/utils/src/api_schema/api_schema.json @@ -698,6 +698,7 @@ "operations": { "$ref": "#/definitions/blockImageOperations" }, "format": { "$ref": "#/definitions/imgFormatString" }, "constraints": { "type": "object" }, + "metaconstraints": { "type": "object" }, "results": { "$ref": "#/definitions/blockResults" }, "unique": { "type": "boolean" } }, @@ -878,14 +879,16 @@ "FindVideo": { "properties": { - "_ref": { "$ref": "#/definitions/refInt" }, - "link": { "$ref": "#/definitions/blockLink" }, - "operations": { "$ref": "#/definitions/blockVideoOperations" }, - "codec": { "$ref": "#/definitions/vidCodecString" }, - "container": { "$ref": "#/definitions/vidContainerString" }, - "constraints": { "type": "object" }, - "results": { "$ref": "#/definitions/blockResults" }, - "unique": { "type": "boolean" } + "_ref": { "$ref": "#/definitions/refInt" }, + "link": { "$ref": "#/definitions/blockLink" }, + "operations": { "$ref": "#/definitions/blockVideoOperations" }, + "codec": { "$ref": "#/definitions/vidCodecString" }, + "container": { "$ref": "#/definitions/vidContainerString" }, + "constraints": { "type": "object" }, + "metaconstraints": { "type": "object" }, + "frameconstraints": { "type": "boolean" }, + "results": { "$ref": "#/definitions/blockResults" }, + "unique": { "type": "boolean" } }, "additionalProperties": false diff --git a/utils/src/chrono/Chrono.cc b/utils/src/chrono/Chrono.cc deleted file mode 100644 index 97462f29..00000000 --- a/utils/src/chrono/Chrono.cc +++ /dev/null @@ -1,235 +0,0 @@ -/** - * @section LICENSE - * - * The MIT License - * - * @copyright Copyright (c) 2017 Intel Corporation - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, - * including without limitation the rights to use, copy, modify, - * merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - * ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - * THE SOFTWARE. - * - */ - -#include -#include -#include - -#include - -#include "Chrono.h" - -using namespace std; - -// ***************************************************************************** -// Public methods definitions -// ***************************************************************************** -Chrono::Chrono(const string &name, const bool asyncEnabled) - : name(name), enabled(true) { - elapsedStats.name = "elapsedStats"; - periodStats.name = "periodStats"; - reset(); -} - -Chrono::Chrono() : Chrono("no_name") {} - -Chrono::~Chrono(void) {} - -void Chrono::tic(void) { - if (!enabled) { - return; - } - - if (ticIdle) { - ticIdle = false; - doTic(); - } else { - ++errors; - cerr << "Chrono::tic - " << name - << ": Calling Chrono::tic with no matching Chrono::tag!" << endl; - } -} - -void Chrono::tac(void) { - if (!enabled) { - return; - } - - if (!ticIdle) { - ticIdle = true; - doTac(); - } else { - ++errors; - cerr << "Chrono::tac - " << name - << ": Calling Chrono::tac with no matching Chrono::tic!" << endl; - } -} - -void Chrono::reset(void) { - ticIdle = true; - errors = 0; - resetStats(elapsedStats); - resetStats(periodStats); -} - -void Chrono::setEnabled(const bool val) { enabled = val; } - -std::ostream &Chrono::printStats(const Chrono::ChronoStats &stats, - std::ostream &os) const { - os.precision(2); - os << fixed; - os << name << ": " << stats.name << endl; - os << "\terrors: " << errors << endl; - os << "\ttotalTime: " << stats.totalTime_ms << " [ms]" << endl; - os << "\taverageTime: " << stats.averageTime_ms << " [ms]" << endl; - os << "\tstdDevTime: " << stats.stdDevTime_ms << " [ms]" << endl; - os << "\tlastTime: " << stats.lastTime_ms << " [ms]" << endl; - os << "\tminTime: " << stats.minTime_ms << " [ms]" << endl; - os << "\tmaxTime: " << stats.maxTime_ms << " [ms]" << endl; - - return os; -} - -std::ostream &Chrono::printAvgTime(const Chrono::ChronoStats &stats, - std::ostream &os) const { - os.precision(2); - os << fixed; - os << name << ": " << stats.name << " -> " - << "averageTime: " << stats.averageTime_ms << " [ms]" << endl; - - return os; -} - -std::ostream &Chrono::printAvgTime(const Chrono::ChronoStats &stats, - std::ostream &os, const float ref) const { - os.precision(2); - os << fixed; - os << name << ": " << stats.name << " -> " - << "averageTime: " << stats.averageTime_ms << " [ms] ("; - os << (stats.averageTime_ms / ref * 100.0f) << "%)" << endl; - - return os; -} - -// ***************************************************************************** -// Private/Protected methods definitions -// ***************************************************************************** -void Chrono::resetStats(ChronoStats &stats) { - stats.counter = 0; - stats.totalTime_ms = 0.0f; - stats.totalSquaredTime_ms2 = 0.0f; - stats.averageTime_ms = 0.0f; - stats.stdDevTime_ms = 0.0f; - stats.lastTime_ms = 0.0f; - stats.minTime_ms = 0.0f; - stats.maxTime_ms = 0.0f; -} - -void Chrono::updateStats(ChronoStats &stats) { - ++stats.counter; - stats.totalTime_ms += stats.lastTime_ms; - stats.totalSquaredTime_ms2 += stats.lastTime_ms * stats.lastTime_ms; - stats.averageTime_ms = stats.totalTime_ms / (float)stats.counter; - stats.stdDevTime_ms = - sqrtf(stats.totalSquaredTime_ms2 / (float)stats.counter - - stats.averageTime_ms * stats.averageTime_ms); - if (stats.counter > 1) { - stats.maxTime_ms = max(stats.lastTime_ms, stats.maxTime_ms); - stats.minTime_ms = min(stats.lastTime_ms, stats.minTime_ms); - } else { - stats.maxTime_ms = stats.lastTime_ms; - stats.minTime_ms = stats.lastTime_ms; - } -} - -// ***************************************************************************** -// ChronoCpu Implementation -// ***************************************************************************** - -ChronoCpu::ChronoCpu(const string &name) : Chrono(name), ticCounter(0) { - memset((void *)&lastTicTime, 0, sizeof(lastTicTime)); - memset((void *)&ticTime, 0, sizeof(ticTime)); - memset((void *)&tacTime, 0, sizeof(tacTime)); - -#ifdef __MACH__ // OS X does not have clock_gettime, use clock_get_time - host_get_clock_service(mach_host_self(), CALENDAR_CLOCK, &cclock); -#endif -} - -ChronoCpu::~ChronoCpu(void) { -#ifdef __MACH__ // OS X does not have clock_gettime, use clock_get_time - mach_port_deallocate(mach_task_self(), cclock); -#endif -} - -ChronoCpu::ChronoCpu() : ChronoCpu("no_name") {} - -// ***************************************************************************** -// Private/Protected methods definitions -// ***************************************************************************** -void ChronoCpu::doTic(void) { - lastTicTime = ticTime; - -#ifdef __MACH__ // OS X does not have clock_gettime, use clock_get_time - clock_get_time(cclock, &mts); - ticTime.tv_sec = mts.tv_sec; - ticTime.tv_nsec = mts.tv_nsec; -#else - if (clock_gettime(CLOCK_REALTIME, &ticTime) != 0) { - ++errors; - cerr << "ChronoCpu::doTic - " << name << ": clock_gettime() failed!" - << endl; - return; - } -#endif - - ++ticCounter; - - if (ticCounter > 1) { - float period_s = (float)(ticTime.tv_sec - lastTicTime.tv_sec); - float period_ns = (float)(ticTime.tv_nsec - lastTicTime.tv_nsec); - periodStats.lastTime_ms = period_s * 1e3f + period_ns / 1e6f; - updateStats(periodStats); - } -} - -void ChronoCpu::doTac(void) { -#ifdef __MACH__ // OS X does not have clock_gettime, use clock_get_time - clock_serv_t cclock; - mach_timespec_t mts; - host_get_clock_service(mach_host_self(), CALENDAR_CLOCK, &cclock); - clock_get_time(cclock, &mts); - mach_port_deallocate(mach_task_self(), cclock); - tacTime.tv_sec = mts.tv_sec; - tacTime.tv_nsec = mts.tv_nsec; - -#else - if (clock_gettime(CLOCK_REALTIME, &tacTime) != 0) { - ++errors; - cerr << "ChronoCpu::doTac - " << name << ": clock_gettime() failed!" - << endl; - return; - } -#endif - - float elapsed_s = (float)(tacTime.tv_sec - ticTime.tv_sec); - float elapsed_ns = (float)(tacTime.tv_nsec - ticTime.tv_nsec); - elapsedStats.lastTime_ms = elapsed_s * 1e3f + elapsed_ns / 1e6f; - updateStats(elapsedStats); -} diff --git a/utils/src/comm/ConnClient.cc b/utils/src/comm/ConnClient.cc index f8ee02dc..32481960 100644 --- a/utils/src/comm/ConnClient.cc +++ b/utils/src/comm/ConnClient.cc @@ -43,6 +43,7 @@ ConnClient::ConnClient() { // create TCP/IP socket _socket_fd = socket(AF_INET, SOCK_STREAM, 0); _ssl = nullptr; + _ssl_ctx = nullptr; if (_socket_fd < 0) { throw ExceptionComm(SocketFail); @@ -55,10 +56,19 @@ ConnClient::ConnClient() { } } -ConnClient::ConnClient(ServerAddress srv) : ConnClient(srv.addr, srv.port) {} +ConnClient::ConnClient(ServerAddress srv) + : ConnClient(srv.addr, srv.port, "", "", "") {} -ConnClient::ConnClient(std::string addr, int port) : ConnClient() { +ConnClient::ConnClient(std::string addr, int port) + : ConnClient(addr, port, "", "", "") {} + +ConnClient::ConnClient(std::string addr, int port, + const std::string &cert_file = "", + const std::string &key_file = "", + const std::string &ca_file = "") + : ConnClient() { _ssl = nullptr; + _ssl_ctx = nullptr; if (port > MAX_PORT_NUMBER || port <= 0) { throw ExceptionComm(PortError); @@ -66,7 +76,59 @@ ConnClient::ConnClient(std::string addr, int port) : ConnClient() { _server.addr = addr; _server.port = port; + + _cert_file = cert_file; + _key_file = key_file; + _ca_file = ca_file; + + setupTLS(); connect(); + if (_ssl != nullptr) { + initiateTLS(); + } +} + +void ConnClient::setupTLS() { + + if (!_cert_file.empty() && !_key_file.empty()) { + const SSL_METHOD *method; + method = TLS_client_method(); + _ssl_ctx = SSL_CTX_new(method); + if (!_ssl_ctx) { + throw ExceptionComm(SSL_CONTEXT_FAIL); + } + + if (SSL_CTX_use_certificate_file(_ssl_ctx, _cert_file.c_str(), + SSL_FILETYPE_PEM) <= 0) { + throw ExceptionComm(SSL_CERT_FAIL); + } + + if (SSL_CTX_use_PrivateKey_file(_ssl_ctx, _key_file.c_str(), + SSL_FILETYPE_PEM) <= 0) { + throw ExceptionComm(SSL_KEY_FAIL); + } + + if (!_ca_file.empty()) { + if (SSL_CTX_load_verify_locations(_ssl_ctx, _ca_file.c_str(), nullptr) <= + 0) { + throw ExceptionComm(SSL_CA_FAIL); + } + } + + _ssl = SSL_new(_ssl_ctx); + if (!_ssl) { + throw ExceptionComm(SSL_CONTEXT_FAIL); + } + } +} + +void ConnClient::initiateTLS() { + if (SSL_set_fd(_ssl, _socket_fd) <= 0) { + throw ExceptionComm(SSL_SET_FD_FAIL); + } + if (SSL_connect(_ssl) <= 0) { + throw ExceptionComm(SSL_CONNECT_FAIL); + } } void ConnClient::connect() { diff --git a/utils/src/timers/TimerMap.cc b/utils/src/timers/TimerMap.cc new file mode 100644 index 00000000..e62d5422 --- /dev/null +++ b/utils/src/timers/TimerMap.cc @@ -0,0 +1,180 @@ +/** + * @section LICENSE + * + * The MIT License + * + * @copyright Copyright (c) 2024 Intel Corporation + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, + * including without limitation the rights to use, copy, modify, + * merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + * ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + * + */ +#include "../../include/timers/TimerMap.h" +#include + +void TimerMap::add_timestamp(std::string timerid) { + + int element_count; + + maplock.lock(); + std::vector *timestamps; + // if the time ID already exists, extract the timestamp vector + // and add a new timestamp + element_count = timemap.count(timerid); + if (element_count > 0) { + timestamps = timemap.at(timerid); + } else { + // else create a new vector with chrono value + timemap.insert( + std::pair *>( + timerid, new std::vector)); + timestamps = timemap.at(timerid); + } + timestamps->push_back(std::chrono::steady_clock::now()); + maplock.unlock(); +} + +std::vector TimerMap::return_all_timestamps(std::string timerid) { + + int element_count; + std::chrono::steady_clock::time_point cur; + std::vector timevals; + std::vector *timestamps; + int i; + long long timeval_us; + + element_count = timemap.count(timerid); + if (element_count > 0) { + timestamps = timemap.at(timerid); + for (i = 0; i < timestamps->size(); i++) { + cur = timestamps->at(i); + timeval_us = std::chrono::time_point_cast(cur) + .time_since_epoch() + .count(); + timevals.push_back(timeval_us); + } + } + + return timevals; +} + +std::vector TimerMap::return_intervals(std::string timerid) { + + int element_count; + double runtime; + std::chrono::steady_clock::time_point cur, next; + std::vector intervals; + + element_count = timemap.count(timerid); + if (element_count > 0) { + std::vector *timestamps = + timemap.at(timerid); + + for (int i = 0; i < timestamps->size(); i++) { + + // If there's a timestampe "ahead", calculate the delta with that and + // store it + if (i < timestamps->size() - 1) { + cur = timestamps->at(i); + next = timestamps->at(i + 1); + + runtime = + std::chrono::duration_cast(next - cur) + .count(); + intervals.push_back(runtime); + } + } + } + return intervals; +} + +double TimerMap::return_total_time(std::string timerid) { + int element_count; + std::chrono::steady_clock::time_point start, end; + double runtime; + + element_count = timemap.count(timerid); + if (element_count > 0) { + std::vector *timestamps = + timemap.at(timerid); + start = timestamps->front(); + end = timestamps->back(); + + runtime = std::chrono::duration_cast(end - start) + .count(); + + return runtime; + } else { + return 0.0; + } +} + +std::vector TimerMap::return_ids() { + + std::vector keylist; + for (const auto &[key, value] : timemap) { + keylist.push_back(key); + } + + return keylist; +} + +void TimerMap::clear_id(std::string timerid) { + + std::vector *timestamps; + auto it = timemap.find(timerid); + if (it != timemap.end()) { + timestamps = timemap.at(timerid); + delete timestamps; + timemap.erase(it); + } +} + +void TimerMap::print_map_runtimes() { + + std::vector timer_id_list; + // extract all runtimes, put into a JSON + timer_id_list = return_ids(); + for (int i = 0; i < timer_id_list.size(); i++) { + std::string cur_id = timer_id_list[i]; + std::cout << cur_id << " : " << return_total_time(cur_id) << std::endl; + } +} + +void TimerMap::clear_all_timers() { timemap.clear(); } + +TimerMap::~TimerMap() { + std::vector timerids; + timerids = this->return_ids(); + std::string cur_id; + int i; + + for (i = 0; i < timerids.size(); i++) { + cur_id = timerids[i]; + std::vector *timestamps; + timestamps = timemap.at(cur_id); + delete timestamps; + auto it = timemap.find(cur_id); + if (it != timemap.end()) { + timemap.erase(it); + } + } +}