diff --git a/src/Catty.xcodeproj/project.pbxproj b/src/Catty.xcodeproj/project.pbxproj index 56e9285706..41fdcc4a33 100644 --- a/src/Catty.xcodeproj/project.pbxproj +++ b/src/Catty.xcodeproj/project.pbxproj @@ -364,10 +364,6 @@ 49402BBF28118354009FCBF8 /* RightMiddleFingerKnuckleXSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49402BBB28118354009FCBF8 /* RightMiddleFingerKnuckleXSensor.swift */; }; 49402BC028118354009FCBF8 /* RightRingFingerKnuckleXSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49402BBC28118354009FCBF8 /* RightRingFingerKnuckleXSensor.swift */; }; 49402BC128118354009FCBF8 /* RightRingFingerKnuckleYSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49402BBD28118354009FCBF8 /* RightRingFingerKnuckleYSensor.swift */; }; - 49519D2F27DCD6B100E32E88 /* SecondFaceDetectedSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49519D2E27DCD6B100E32E88 /* SecondFaceDetectedSensor.swift */; }; - 49519D3327DCE0EC00E32E88 /* SecondFacePositionXSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49519D3227DCE0EC00E32E88 /* SecondFacePositionXSensor.swift */; }; - 49519D3527DCE0FE00E32E88 /* SecondFacePositionYSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49519D3427DCE0FE00E32E88 /* SecondFacePositionYSensor.swift */; }; - 49519D3727DCE11700E32E88 /* SecondFaceSizeSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49519D3627DCE11700E32E88 /* SecondFaceSizeSensor.swift */; }; 49530A8227FDDADC003A9E24 /* FaceDetectionSensors.xml in Resources */ = {isa = PBXBuildFile; fileRef = 49530A8127FDDADC003A9E24 /* FaceDetectionSensors.xml */; }; 49530A8427FDDC04003A9E24 /* BackwardsCompatibleFaceDetectionSensors.xml in Resources */ = {isa = PBXBuildFile; fileRef = 49530A8327FDDC03003A9E24 /* BackwardsCompatibleFaceDetectionSensors.xml */; }; 4963EB632812C5E100A9B3FA /* TextBlockXFunction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4963EB622812C5E100A9B3FA /* TextBlockXFunction.swift */; }; @@ -393,6 +389,22 @@ 4981EDFC289AC98C0052AEB1 /* YOfObjectWithIDFunction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EDFA289AC98C0052AEB1 /* YOfObjectWithIDFunction.swift */; }; 4981EE03289ACB2E0052AEB1 /* HeightOfObjectWithIDFunction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EDFF289ACB2D0052AEB1 /* HeightOfObjectWithIDFunction.swift */; }; 4981EE04289ACB2E0052AEB1 /* WidthOfObjectWithIDFunction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE02289ACB2E0052AEB1 /* WidthOfObjectWithIDFunction.swift */; }; + 4981EE0C289ACE330052AEB1 /* PoseNet.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE09289ACE330052AEB1 /* PoseNet.mlmodel */; }; + 4981EE27289ACEF90052AEB1 /* SecondFacePositionYSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE1F289ACEF90052AEB1 /* SecondFacePositionYSensor.swift */; }; + 4981EE28289ACEF90052AEB1 /* FacePositionYSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE20289ACEF90052AEB1 /* FacePositionYSensor.swift */; }; + 4981EE29289ACEF90052AEB1 /* FaceSizeSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE21289ACEF90052AEB1 /* FaceSizeSensor.swift */; }; + 4981EE2A289ACEF90052AEB1 /* SecondFaceDetectedSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE22289ACEF90052AEB1 /* SecondFaceDetectedSensor.swift */; }; + 4981EE2B289ACEF90052AEB1 /* FaceDetectedSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE23289ACEF90052AEB1 /* FaceDetectedSensor.swift */; }; + 4981EE2C289ACEF90052AEB1 /* SecondFaceSizeSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE24289ACEF90052AEB1 /* SecondFaceSizeSensor.swift */; }; + 4981EE2D289ACEF90052AEB1 /* FacePositionXSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE25289ACEF90052AEB1 /* FacePositionXSensor.swift */; }; + 4981EE2E289ACEF90052AEB1 /* SecondFacePositionXSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE26289ACEF90052AEB1 /* SecondFacePositionXSensor.swift */; }; + 4981EE36289ACF1A0052AEB1 /* ObjectDetectionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE30289ACF1A0052AEB1 /* ObjectDetectionManager.swift */; }; + 4981EE37289ACF1A0052AEB1 /* HandPoseDetectionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE31289ACF1A0052AEB1 /* HandPoseDetectionManager.swift */; }; + 4981EE38289ACF1A0052AEB1 /* BodyPoseDetectionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE32289ACF1A0052AEB1 /* BodyPoseDetectionManager.swift */; }; + 4981EE39289ACF1A0052AEB1 /* VisualDetectionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE33289ACF1A0052AEB1 /* VisualDetectionManager.swift */; }; + 4981EE3A289ACF1A0052AEB1 /* TextRecognitionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE34289ACF1A0052AEB1 /* TextRecognitionManager.swift */; }; + 4981EE3B289ACF1A0052AEB1 /* FaceDetectionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE35289ACF1A0052AEB1 /* FaceDetectionManager.swift */; }; + 4981EE3D289ACF740052AEB1 /* PoseNetModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4981EE3C289ACF740052AEB1 /* PoseNetModel.swift */; }; 498C156A2807057B00B81C8E /* WristSensorTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49949B512806FD380073BF65 /* WristSensorTest.swift */; }; 498C156B2807057B00B81C8E /* ShoulderSensorTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49949B4D2806FD1E0073BF65 /* ShoulderSensorTest.swift */; }; 498C156C2807057B00B81C8E /* ElbowSensorTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 49949B4F2806FD300073BF65 /* ElbowSensorTest.swift */; }; @@ -945,8 +957,6 @@ 4CE3C4B22660F388007221DA /* SetBackgroundAndWaitBrick+Instruction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CE3C4B12660F387007221DA /* SetBackgroundAndWaitBrick+Instruction.swift */; }; 4CE3C4B42660F3AC007221DA /* SetInstrumentBrick+Instruction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CE3C4B32660F3AC007221DA /* SetInstrumentBrick+Instruction.swift */; }; 4CE3C4B82660F3E0007221DA /* SetLookByIndexBrick+Instruction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CE3C4B72660F3E0007221DA /* SetLookByIndexBrick+Instruction.swift */; }; - 4CE3D68B2107A89B00005629 /* VisualDetectionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CE3D68A2107A89B00005629 /* VisualDetectionManager.swift */; }; - 4CE3D68D2107B4E300005629 /* FaceDetectedSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CE3D68C2107B4E300005629 /* FaceDetectedSensor.swift */; }; 4CE3D68F2107B68600005629 /* VisualDetectionManagerProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CE3D68E2107B68600005629 /* VisualDetectionManagerProtocol.swift */; }; 4CE5C5701E0AD71800FD021C /* RequiredResourcesTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CE5C56E1E0AD71800FD021C /* RequiredResourcesTests.swift */; }; 4CE67888210C4B1D00D90B5D /* Function.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CE67887210C4B1D00D90B5D /* Function.swift */; }; @@ -2043,9 +2053,6 @@ F4201CF920D79CA80030181B /* TimeHourSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = F4201CF820D79CA80030181B /* TimeHourSensor.swift */; }; F4201CFB20D7A1060030181B /* TimeMinuteSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = F4201CFA20D7A1060030181B /* TimeMinuteSensor.swift */; }; F4201D0120D7A3B60030181B /* TimeSecondSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = F4201D0020D7A3B60030181B /* TimeSecondSensor.swift */; }; - F45AF6FD212960A3000F88A6 /* FacePositionXSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = F45AF6FC212960A3000F88A6 /* FacePositionXSensor.swift */; }; - F45AF701212960B4000F88A6 /* FaceSizeSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = F45AF700212960B4000F88A6 /* FaceSizeSensor.swift */; }; - F45AF703212960D4000F88A6 /* FacePositionYSensor.swift in Sources */ = {isa = PBXBuildFile; fileRef = F45AF702212960D4000F88A6 /* FacePositionYSensor.swift */; }; F4664F9220DD7A1700E1519A /* FormulaEditorViewControllerSectionExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = F4664F9120DD7A1700E1519A /* FormulaEditorViewControllerSectionExtension.swift */; }; F470A2E32129DAFB005EC9A3 /* ElementFunction.swift in Sources */ = {isa = PBXBuildFile; fileRef = F470A2E22129DAFB005EC9A3 /* ElementFunction.swift */; }; F470A2E52129DF42005EC9A3 /* NumberOfItemsFunction.swift in Sources */ = {isa = PBXBuildFile; fileRef = F470A2E42129DF42005EC9A3 /* NumberOfItemsFunction.swift */; }; @@ -2515,10 +2522,6 @@ 49402BBB28118354009FCBF8 /* RightMiddleFingerKnuckleXSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RightMiddleFingerKnuckleXSensor.swift; sourceTree = ""; }; 49402BBC28118354009FCBF8 /* RightRingFingerKnuckleXSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RightRingFingerKnuckleXSensor.swift; sourceTree = ""; }; 49402BBD28118354009FCBF8 /* RightRingFingerKnuckleYSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RightRingFingerKnuckleYSensor.swift; sourceTree = ""; }; - 49519D2E27DCD6B100E32E88 /* SecondFaceDetectedSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecondFaceDetectedSensor.swift; sourceTree = ""; }; - 49519D3227DCE0EC00E32E88 /* SecondFacePositionXSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecondFacePositionXSensor.swift; sourceTree = ""; }; - 49519D3427DCE0FE00E32E88 /* SecondFacePositionYSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecondFacePositionYSensor.swift; sourceTree = ""; }; - 49519D3627DCE11700E32E88 /* SecondFaceSizeSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SecondFaceSizeSensor.swift; sourceTree = ""; }; 49530A8127FDDADC003A9E24 /* FaceDetectionSensors.xml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = FaceDetectionSensors.xml; sourceTree = ""; }; 49530A8327FDDC03003A9E24 /* BackwardsCompatibleFaceDetectionSensors.xml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = BackwardsCompatibleFaceDetectionSensors.xml; sourceTree = ""; }; 4963EB622812C5E100A9B3FA /* TextBlockXFunction.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TextBlockXFunction.swift; sourceTree = ""; }; @@ -2543,6 +2546,22 @@ 4981EDFA289AC98C0052AEB1 /* YOfObjectWithIDFunction.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = YOfObjectWithIDFunction.swift; path = Catty/Functions/ObjectRecognition/YOfObjectWithIDFunction.swift; sourceTree = SOURCE_ROOT; }; 4981EDFF289ACB2D0052AEB1 /* HeightOfObjectWithIDFunction.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = HeightOfObjectWithIDFunction.swift; sourceTree = ""; }; 4981EE02289ACB2E0052AEB1 /* WidthOfObjectWithIDFunction.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = WidthOfObjectWithIDFunction.swift; sourceTree = ""; }; + 4981EE09289ACE330052AEB1 /* PoseNet.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = PoseNet.mlmodel; sourceTree = ""; }; + 4981EE1F289ACEF90052AEB1 /* SecondFacePositionYSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SecondFacePositionYSensor.swift; sourceTree = ""; }; + 4981EE20289ACEF90052AEB1 /* FacePositionYSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FacePositionYSensor.swift; sourceTree = ""; }; + 4981EE21289ACEF90052AEB1 /* FaceSizeSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FaceSizeSensor.swift; sourceTree = ""; }; + 4981EE22289ACEF90052AEB1 /* SecondFaceDetectedSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SecondFaceDetectedSensor.swift; sourceTree = ""; }; + 4981EE23289ACEF90052AEB1 /* FaceDetectedSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FaceDetectedSensor.swift; sourceTree = ""; }; + 4981EE24289ACEF90052AEB1 /* SecondFaceSizeSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SecondFaceSizeSensor.swift; sourceTree = ""; }; + 4981EE25289ACEF90052AEB1 /* FacePositionXSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FacePositionXSensor.swift; sourceTree = ""; }; + 4981EE26289ACEF90052AEB1 /* SecondFacePositionXSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SecondFacePositionXSensor.swift; sourceTree = ""; }; + 4981EE30289ACF1A0052AEB1 /* ObjectDetectionManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ObjectDetectionManager.swift; sourceTree = ""; }; + 4981EE31289ACF1A0052AEB1 /* HandPoseDetectionManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = HandPoseDetectionManager.swift; sourceTree = ""; }; + 4981EE32289ACF1A0052AEB1 /* BodyPoseDetectionManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BodyPoseDetectionManager.swift; sourceTree = ""; }; + 4981EE33289ACF1A0052AEB1 /* VisualDetectionManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VisualDetectionManager.swift; sourceTree = ""; }; + 4981EE34289ACF1A0052AEB1 /* TextRecognitionManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = TextRecognitionManager.swift; sourceTree = ""; }; + 4981EE35289ACF1A0052AEB1 /* FaceDetectionManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FaceDetectionManager.swift; sourceTree = ""; }; + 4981EE3C289ACF740052AEB1 /* PoseNetModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = PoseNetModel.swift; path = Catty/PlayerEngine/Sensors/BodyPose/PoseNetModel.swift; sourceTree = SOURCE_ROOT; }; 498C1570280708E800B81C8E /* UpperBodyPoseDetectionSensors.xml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = UpperBodyPoseDetectionSensors.xml; sourceTree = ""; }; 498C157228070C6B00B81C8E /* LeftKneeXSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LeftKneeXSensor.swift; sourceTree = ""; }; 498C157328070C6B00B81C8E /* RightAnkleXSensor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RightAnkleXSensor.swift; sourceTree = ""; }; @@ -3175,8 +3194,6 @@ 4CE3C4B12660F387007221DA /* SetBackgroundAndWaitBrick+Instruction.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "SetBackgroundAndWaitBrick+Instruction.swift"; path = "PlayerEngine/Instructions/Look/SetBackgroundAndWaitBrick+Instruction.swift"; sourceTree = ""; }; 4CE3C4B32660F3AC007221DA /* SetInstrumentBrick+Instruction.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "SetInstrumentBrick+Instruction.swift"; path = "PlayerEngine/Instructions/Sound/SetInstrumentBrick+Instruction.swift"; sourceTree = ""; }; 4CE3C4B72660F3E0007221DA /* SetLookByIndexBrick+Instruction.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "SetLookByIndexBrick+Instruction.swift"; path = "PlayerEngine/Instructions/Look/SetLookByIndexBrick+Instruction.swift"; sourceTree = ""; }; - 4CE3D68A2107A89B00005629 /* VisualDetectionManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VisualDetectionManager.swift; sourceTree = ""; }; - 4CE3D68C2107B4E300005629 /* FaceDetectedSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceDetectedSensor.swift; sourceTree = ""; }; 4CE3D68E2107B68600005629 /* VisualDetectionManagerProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VisualDetectionManagerProtocol.swift; sourceTree = ""; }; 4CE5C56E1E0AD71800FD021C /* RequiredResourcesTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RequiredResourcesTests.swift; sourceTree = ""; }; 4CE67887210C4B1D00D90B5D /* Function.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Function.swift; sourceTree = ""; }; @@ -3353,9 +3370,9 @@ 758FD262053C04755851BC4D /* ml */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.strings; name = ml; path = ml.lproj/Localizable.strings; sourceTree = ""; }; 77A836DDE7C46D9275CE4812 /* ar */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.strings; name = ar; path = ar.lproj/Localizable.strings; sourceTree = ""; }; 7A7C5B407D7B4FD7DA1E1DBD /* hr */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.strings; name = hr; path = hr.lproj/Localizable.strings; sourceTree = ""; }; - 7B014D0E29E7122E004AA0E0 /* CattyTestplan.xctestplan */ = {isa = PBXFileReference; lastKnownFileType = text; name = CattyTestplan.xctestplan; path = Catty.xcodeproj/CattyTestplan.xctestplan; sourceTree = ""; }; 7B014D0329E0ADE5004AA0E0 /* StoreAuthenticator.deleteUser.success.refresh.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = StoreAuthenticator.deleteUser.success.refresh.json; sourceTree = ""; }; 7B014D0429E0ADE5004AA0E0 /* StoreAuthenticator.deleteUser.success.upgrade.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = StoreAuthenticator.deleteUser.success.upgrade.json; sourceTree = ""; }; + 7B014D0E29E7122E004AA0E0 /* CattyTestplan.xctestplan */ = {isa = PBXFileReference; lastKnownFileType = text; name = CattyTestplan.xctestplan; path = Catty.xcodeproj/CattyTestplan.xctestplan; sourceTree = ""; }; 7B09EB6529B1DF8200590F3D /* RegisterViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RegisterViewController.swift; sourceTree = ""; }; 7B09EB6629B1DF8200590F3D /* BaseAuthenticationViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BaseAuthenticationViewController.swift; sourceTree = ""; }; 7B09EB6729B1DF8200590F3D /* LoginViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LoginViewController.swift; sourceTree = ""; }; @@ -4736,9 +4753,6 @@ F4201CF820D79CA80030181B /* TimeHourSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TimeHourSensor.swift; sourceTree = ""; }; F4201CFA20D7A1060030181B /* TimeMinuteSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TimeMinuteSensor.swift; sourceTree = ""; }; F4201D0020D7A3B60030181B /* TimeSecondSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TimeSecondSensor.swift; sourceTree = ""; }; - F45AF6FC212960A3000F88A6 /* FacePositionXSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FacePositionXSensor.swift; sourceTree = ""; }; - F45AF700212960B4000F88A6 /* FaceSizeSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceSizeSensor.swift; sourceTree = ""; }; - F45AF702212960D4000F88A6 /* FacePositionYSensor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FacePositionYSensor.swift; sourceTree = ""; }; F4664F9120DD7A1700E1519A /* FormulaEditorViewControllerSectionExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FormulaEditorViewControllerSectionExtension.swift; sourceTree = ""; }; F470A2E22129DAFB005EC9A3 /* ElementFunction.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ElementFunction.swift; sourceTree = ""; }; F470A2E42129DF42005EC9A3 /* NumberOfItemsFunction.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NumberOfItemsFunction.swift; sourceTree = ""; }; @@ -5854,15 +5868,45 @@ 4981EDCB289AC2690052AEB1 /* MLModels */ = { isa = PBXGroup; children = ( + 4981EE09289ACE330052AEB1 /* PoseNet.mlmodel */, 4981EDCC289AC2690052AEB1 /* YOLOv3Tiny.mlmodel */, ); name = MLModels; path = Resources/MLModels; sourceTree = ""; }; + 4981EE1E289ACEF90052AEB1 /* Face */ = { + isa = PBXGroup; + children = ( + 4981EE1F289ACEF90052AEB1 /* SecondFacePositionYSensor.swift */, + 4981EE20289ACEF90052AEB1 /* FacePositionYSensor.swift */, + 4981EE21289ACEF90052AEB1 /* FaceSizeSensor.swift */, + 4981EE22289ACEF90052AEB1 /* SecondFaceDetectedSensor.swift */, + 4981EE23289ACEF90052AEB1 /* FaceDetectedSensor.swift */, + 4981EE24289ACEF90052AEB1 /* SecondFaceSizeSensor.swift */, + 4981EE25289ACEF90052AEB1 /* FacePositionXSensor.swift */, + 4981EE26289ACEF90052AEB1 /* SecondFacePositionXSensor.swift */, + ); + path = Face; + sourceTree = ""; + }; + 4981EE2F289ACF1A0052AEB1 /* DetectionManager */ = { + isa = PBXGroup; + children = ( + 4981EE30289ACF1A0052AEB1 /* ObjectDetectionManager.swift */, + 4981EE31289ACF1A0052AEB1 /* HandPoseDetectionManager.swift */, + 4981EE32289ACF1A0052AEB1 /* BodyPoseDetectionManager.swift */, + 4981EE33289ACF1A0052AEB1 /* VisualDetectionManager.swift */, + 4981EE34289ACF1A0052AEB1 /* TextRecognitionManager.swift */, + 4981EE35289ACF1A0052AEB1 /* FaceDetectionManager.swift */, + ); + path = DetectionManager; + sourceTree = ""; + }; 49949B292806EA500073BF65 /* BodyPose */ = { isa = PBXGroup; children = ( + 4981EE3C289ACF740052AEB1 /* PoseNetModel.swift */, 49949B2D2806EAD10073BF65 /* NeckXSensor.swift */, 49949B392806EAD20073BF65 /* NeckYSensor.swift */, 49949B332806EAD10073BF65 /* LeftShoulderXSensor.swift */, @@ -7294,12 +7338,13 @@ 4C994DFA207A3D4100C415FD /* Sensors */ = { isa = PBXGroup; children = ( + 4981EE2F289ACF1A0052AEB1 /* DetectionManager */, 497258042812822400CB9A2D /* TextRecognition */, 49402B8728116F5A009FCBF8 /* HandPose */, 49949B292806EA500073BF65 /* BodyPose */, 49E494DC27F596630035578D /* FacePose */, + 4981EE1E289ACEF90052AEB1 /* Face */, 4C994E05207A3D4100C415FD /* Audio */, - 4CE3D6862107A80000005629 /* Camera */, 4CF072AC20D6751E00F93AB5 /* Date */, 4CA502FD268C8CA600B331EC /* Device */, 4C11795220AEC351006C6E32 /* Extensions */, @@ -7527,22 +7572,6 @@ path = PlayerEngine/Formula; sourceTree = ""; }; - 4CE3D6862107A80000005629 /* Camera */ = { - isa = PBXGroup; - children = ( - 4CE3D68C2107B4E300005629 /* FaceDetectedSensor.swift */, - F45AF700212960B4000F88A6 /* FaceSizeSensor.swift */, - F45AF6FC212960A3000F88A6 /* FacePositionXSensor.swift */, - F45AF702212960D4000F88A6 /* FacePositionYSensor.swift */, - 4CE3D68A2107A89B00005629 /* VisualDetectionManager.swift */, - 49519D2E27DCD6B100E32E88 /* SecondFaceDetectedSensor.swift */, - 49519D3227DCE0EC00E32E88 /* SecondFacePositionXSensor.swift */, - 49519D3427DCE0FE00E32E88 /* SecondFacePositionYSensor.swift */, - 49519D3627DCE11700E32E88 /* SecondFaceSizeSensor.swift */, - ); - path = Camera; - sourceTree = ""; - }; 4CE5C56C1E0AD71800FD021C /* Project */ = { isa = PBXGroup; children = ( @@ -12814,6 +12843,7 @@ AA74EF771BC0589700D1E954 /* ChangeVariableBrick+CBXMLHandler.m in Sources */, FEFCFB20297A9B940028CEB2 /* StitchThreadColorBrick.swift in Sources */, AAA2979C1BB371EB006F8793 /* ExternInternRepresentationMapping.m in Sources */, + 4981EE38289ACF1A0052AEB1 /* BodyPoseDetectionManager.swift in Sources */, AA74EF571BC0586F00D1E954 /* SetXBrick+CBXMLHandler.m in Sources */, 4C994E10207A3D4100C415FD /* LoudnessSensor.swift in Sources */, 49402BC028118354009FCBF8 /* RightRingFingerKnuckleXSensor.swift in Sources */, @@ -12825,7 +12855,6 @@ 4CE3C4B82660F3E0007221DA /* SetLookByIndexBrick+Instruction.swift in Sources */, AA74F0941BC05FCE00D1E954 /* BrickCellLookData.m in Sources */, 59032EE524A29CAE00BD86C4 /* WhenBackgroundChangesScript+Condition.swift in Sources */, - F45AF6FD212960A3000F88A6 /* FacePositionXSensor.swift in Sources */, 4979F41127E85ACA00ABCAD6 /* LeftEyeInnerYSensor.swift in Sources */, 99EBDBFA1D562088008D6860 /* ChangeColorByNBrick+CBXMLHandler.m in Sources */, 99C8D1BB1D8AE339003FBC42 /* BrickCellStaticChoiceData.m in Sources */, @@ -12907,7 +12936,6 @@ 92B46FA31BC7BDF7004980C1 /* ArduinoSendPWMValueBrickCell.m in Sources */, FEFCFB22297A9F640028CEB2 /* StitchThreadColorCell.swift in Sources */, 4CB2CFA11D7AD762009F3034 /* SetColorBrick.m in Sources */, - 49519D3327DCE0EC00E32E88 /* SecondFacePositionXSensor.swift in Sources */, 057B183026DD0A3400C47E60 /* StartRunningStitchBrick+CBXMLHandler.swift in Sources */, 4C0F9F94204BD29600E71B2D /* ChooseCameraBrickCell.m in Sources */, BA987D0B2194E25A002DAA05 /* WaitUntilBrickCell.m in Sources */, @@ -12921,6 +12949,7 @@ AA74F00C1BC05B5F00D1E954 /* SetVolumeToBrick.m in Sources */, CA43C4BA1B165C0B009B604F /* CBBroadcastHandler.swift in Sources */, 49B55F9D27E88702000A95B4 /* LeftEyebrowInnerYSensor.swift in Sources */, + 4981EE39289ACF1A0052AEB1 /* VisualDetectionManager.swift in Sources */, 4CF0729E20D66F8D00F93AB5 /* LookNameSensor.swift in Sources */, 4C8C3616213AA2BD00279BC7 /* PhiroIfLogicBeginBrick+Condition.swift in Sources */, CA02C5E71B14386900233FB0 /* CBSpriteNode.swift in Sources */, @@ -12935,7 +12964,6 @@ 4C2048802671F17F00AE2E1B /* WebRequestDownloaderFactory.swift in Sources */, 4C724E251B4D3E8C00E27479 /* CBXMLParser.m in Sources */, 6F4F8AFF24CF261C002C3814 /* UserDataContainer.swift in Sources */, - F45AF701212960B4000F88A6 /* FaceSizeSensor.swift in Sources */, 9200C2D61C8082EF002F5CA4 /* ShowTextBrickCell.m in Sources */, 49949B442806EAD20073BF65 /* RightElbowYSensor.swift in Sources */, 4CB0F9F420A173B700D1BE2F /* RotationSensor.swift in Sources */, @@ -13196,6 +13224,7 @@ 4CBDF1A621CF82F100325338 /* Operator.swift in Sources */, 92D56C371BF2059F00A54750 /* PhiroDevice.swift in Sources */, 925819881C0208E8003247A9 /* CALayer+XibConfiguration.m in Sources */, + 4981EE3B289ACF1A0052AEB1 /* FaceDetectionManager.swift in Sources */, F4664F9220DD7A1700E1519A /* FormulaEditorViewControllerSectionExtension.swift in Sources */, 4C0F9F65204BD18600E71B2D /* PreviousLookBrick+CBXMLHandler.swift in Sources */, 49402B9328116FA2009FCBF8 /* LeftIndexKnuckleYSensor.swift in Sources */, @@ -13217,6 +13246,7 @@ 46B472C225609573007972DB /* StitchBrick+CBXMLHandler.swift in Sources */, 056677F927DF4C00006B4477 /* StartZigzagStitchBrick+CBXMLHandler.swift in Sources */, 4926233C27AA8A2800866183 /* SceneTableViewControllerExtension.swift in Sources */, + 4981EE2E289ACEF90052AEB1 /* SecondFacePositionXSensor.swift in Sources */, AA74F0C61BC05FCE00D1E954 /* SetVolumeToBrickCell.m in Sources */, 18E83F832493C4C3003295DA /* PenClearBrick+CBXMLHandler.swift in Sources */, 4C0F9F42204ADC6600E71B2D /* ThinkForBubbleBrick.m in Sources */, @@ -13256,7 +13286,6 @@ AA74EF001BC057C900D1E954 /* ShowBrick+CBXMLHandler.m in Sources */, AA74EFE41BC05B5F00D1E954 /* IfLogicBeginBrick.m in Sources */, 9218B20E1CC4AB75007B4C60 /* LineTool.m in Sources */, - F45AF703212960D4000F88A6 /* FacePositionYSensor.swift in Sources */, AAAF22841BC0CA0F0076F11E /* SetXBrick+Instruction.swift in Sources */, 97D015912553392A00B6967D /* RoundedImageView.swift in Sources */, 4C5076D22578DBDC00650440 /* SetInstrumentBrick+CBXMLHandler.swift in Sources */, @@ -13268,7 +13297,6 @@ 9200C2DB1C8084AA002F5CA4 /* HideTextBrick+CBXMLHandler.m in Sources */, AA74EEDD1BC057B900D1E954 /* ForeverBrick+CBXMLHandler.m in Sources */, 4C4778B0267C7B2400CAF398 /* URLSessionJsonExtension.swift in Sources */, - 49519D2F27DCD6B100E32E88 /* SecondFaceDetectedSensor.swift in Sources */, 59EA52BB24FD468900AC6E8D /* WhenConditionScript.swift in Sources */, 4C0F9F22204ADBAF00E71B2D /* IfThenLogicEndBrick+CBXMLHandler.m in Sources */, 49402B9A2811742E009FCBF8 /* LeftRingFingerKnuckleYSensor.swift in Sources */, @@ -13286,6 +13314,7 @@ 4CB2CFA61D7AD7E7009F3034 /* SetColorBrick+CBXMLHandler.m in Sources */, AA74EFEB1BC05B5F00D1E954 /* WaitBrick.m in Sources */, 4C0F9FA5204BD2C200E71B2D /* ThinkForBubbleBrickCell.m in Sources */, + 4981EE3D289ACF740052AEB1 /* PoseNetModel.swift in Sources */, 4CDA808521366AEA0052FA24 /* FormulaManagerProtocol.swift in Sources */, 49B55F9327E88702000A95B4 /* RightEyebrowInnerXSensor.swift in Sources */, 4CF0728D20D659F700F93AB5 /* PhiroSideRightSensor.swift in Sources */, @@ -13324,7 +13353,6 @@ 4CF0729D20D66F8D00F93AB5 /* LookNumberSensor.swift in Sources */, 05E6802C255B5E3400D1E295 /* CGVectorExtentions.swift in Sources */, 597AB56E202081E8007CD10D /* LibrarySoundCollectionViewCell.swift in Sources */, - 4CE3D68D2107B4E300005629 /* FaceDetectedSensor.swift in Sources */, 4C724E5B1B4D3E8C00E27479 /* Script+CBXMLHandler.m in Sources */, 92EC985E1BC3ABB90003A891 /* PhiroMotorStopBrick+CBXMLHandler.m in Sources */, 4C0F9F9F204BD2B100E71B2D /* SayForBubbleBrickCell.m in Sources */, @@ -13375,7 +13403,6 @@ 05CBC6C1286C5A75007FBA61 /* StartTripleStitchBrick+Instruction.swift in Sources */, 9218B2101CC4AB75007B4C60 /* PipetteTool.m in Sources */, 4C0F9F66204BD18600E71B2D /* SayBubbleBrick+CBXMLHandler.m in Sources */, - 49519D3527DCE0FE00E32E88 /* SecondFacePositionYSensor.swift in Sources */, 4C595F4921CAEB7E0097D850 /* NotEqualOperator.swift in Sources */, 4C11795120AEBE38006C6E32 /* LongitudeSensor.swift in Sources */, 18AAC23924A61B1300C96335 /* SetPenSizeBrick+CBXMLHandler.swift in Sources */, @@ -13445,7 +13472,6 @@ 4C1C8B14213FE76B003B3AA4 /* ChartProjectCell.swift in Sources */, E5E1E5AA25780DA100D2F1BC /* SetLookByIndexBrick.swift in Sources */, 4CF072A820D6710F00F93AB5 /* FingerXSensor.swift in Sources */, - 4CE3D68B2107A89B00005629 /* VisualDetectionManager.swift in Sources */, 4CF429F3213A88A600B78897 /* RepeatUntilBrick+Condition.swift in Sources */, BB6FE2402812DC62007ABDA9 /* FormTableViewHeaderView.swift in Sources */, 2D6E3F39210A0A9F00FB8139 /* ChartProjectStoreDataSource.swift in Sources */, @@ -13528,6 +13554,8 @@ 4C595F4B21CAEBA50097D850 /* EqualOperator.swift in Sources */, 4C8AA9BC266F42850082891E /* SetTempoToBrick+Instruction.swift in Sources */, 9E0F740C22B532280030CD89 /* MathUtil.swift in Sources */, + 4981EE29289ACEF90052AEB1 /* FaceSizeSensor.swift in Sources */, + 4981EE27289ACEF90052AEB1 /* SecondFacePositionYSensor.swift in Sources */, 929CC0E01BC39B690027DEC0 /* BrickCellPhiroToneData.m in Sources */, 49B55F7927E874D3000A95B4 /* LeftEarYSensor.swift in Sources */, 5E21DA6821A3E4AF00017D2C /* UIImageExtension.swift in Sources */, @@ -13558,11 +13586,11 @@ F4E6E58C210DFEFE00D86FE6 /* PiFunction.swift in Sources */, 4CF0729220D6697300F93AB5 /* LocationAccuracySensor.swift in Sources */, 498C158828070C6C00B81C8E /* LeftAnkleXSensor.swift in Sources */, - 49519D3727DCE11700E32E88 /* SecondFaceSizeSensor.swift in Sources */, 4C724E601B4D3E8C00E27479 /* Parser.m in Sources */, 92DFB0161A38949E00FA9B0F /* InternFormulaState.m in Sources */, AA74F0C51BC05FCE00D1E954 /* PlaySoundBrickCell.m in Sources */, 498C158728070C6C00B81C8E /* LeftHipXSensor.swift in Sources */, + 4981EE36289ACF1A0052AEB1 /* ObjectDetectionManager.swift in Sources */, 639CB8E525C418FE0051CB82 /* Sound.swift in Sources */, 4CE6788F210C506B00D90B5D /* FunctionManagerProtocol.swift in Sources */, 92DFB0151A38949E00FA9B0F /* InternFormulaParserEmptyStackException.m in Sources */, @@ -13584,6 +13612,7 @@ 4979F40927E8542100ABCAD6 /* HeadTopYSensor.swift in Sources */, 4C724E271B4D3E8C00E27479 /* CBXMLSerializer.m in Sources */, 9200C2DC1C8084AA002F5CA4 /* ShowTextBrick+CBXMLHandler.m in Sources */, + 4981EE2B289ACEF90052AEB1 /* FaceDetectedSensor.swift in Sources */, 9218B2041CC4AB75007B4C60 /* LCTableViewPickerControl.m in Sources */, 92DFB0111A38949E00FA9B0F /* NSMutableArray+Reverse.m in Sources */, AA74EF4F1BC0586F00D1E954 /* ComeToFrontBrick+CBXMLHandler.m in Sources */, @@ -13625,9 +13654,11 @@ 9218B2061CC4AB75007B4C60 /* RGBAHelper.m in Sources */, AA74F0BE1BC05FCE00D1E954 /* PointInDirectionBrickCell.m in Sources */, 4C0F9F5D204BD16F00E71B2D /* ChooseCameraBrick+CBXMLHandler.m in Sources */, + 4981EE2C289ACEF90052AEB1 /* SecondFaceSizeSensor.swift in Sources */, 49B55F7327E86315000A95B4 /* RightEyeCenterXSensor.swift in Sources */, AA74EEFA1BC057C900D1E954 /* ClearGraphicEffectBrick+CBXMLHandler.m in Sources */, 4C595F5E21CAF8CC0097D850 /* OperatorManager.swift in Sources */, + 4981EE2D289ACEF90052AEB1 /* FacePositionXSensor.swift in Sources */, 99B2C36B1D884C1300736769 /* FlashBrick+CBXMLHandler.m in Sources */, AA74F0011BC05B5F00D1E954 /* IfOnEdgeBounceBrick.m in Sources */, AAF6D9E01BC0C2AA00686849 /* PointInDirectionBrick+Instruction.swift in Sources */, @@ -13710,6 +13741,7 @@ 1882475524C84B8100B01653 /* SetPenColorBrick.swift in Sources */, 4CB0F9F220A1739400D1BE2F /* SizeSensor.swift in Sources */, F4A2AFB52110D48C00C53234 /* JoinFunction.swift in Sources */, + 4981EE3A289ACF1A0052AEB1 /* TextRecognitionManager.swift in Sources */, 92FF2E901A24C7D800093DA7 /* GDataXMLNode.m in Sources */, 056677ED27DBD33E006B4477 /* ZigzagStitchPattern.swift in Sources */, F48E945920D6955E007E782E /* DateSensor.swift in Sources */, @@ -13718,6 +13750,7 @@ 59ABDC652021D7DA00061403 /* UIAlertController+AddAction.swift in Sources */, 922FE8E11B963BDE005A19FA /* BrickSelectionManager.m in Sources */, 4981EE04289ACB2E0052AEB1 /* WidthOfObjectWithIDFunction.swift in Sources */, + 4981EE28289ACEF90052AEB1 /* FacePositionYSensor.swift in Sources */, 4CF0729920D66C4800F93AB5 /* BackgroundNameSensor.swift in Sources */, AA74EEE01BC057B900D1E954 /* IfLogicEndBrick+CBXMLHandler.m in Sources */, C808F26E26B04BA0008A349A /* TouchesEdgeSensor.swift in Sources */, @@ -13740,6 +13773,7 @@ F4E6E580210DE61900D86FE6 /* PowerFunction.swift in Sources */, 92FF32B91A24E2F400093DA7 /* DarkBlueGradientCell.m in Sources */, 498C158528070C6C00B81C8E /* RightKneeYSensor.swift in Sources */, + 4981EE0C289ACE330052AEB1 /* PoseNet.mlmodel in Sources */, AA74EF551BC0586F00D1E954 /* PointToBrick+CBXMLHandler.m in Sources */, F4201CFB20D7A1060030181B /* TimeMinuteSensor.swift in Sources */, 05CBC6C7286C5C2E007FBA61 /* StartTripleStitchBrick+CBXMLHandler.swift in Sources */, @@ -13781,6 +13815,7 @@ 92FF31611A24DEB300093DA7 /* StagePresenterViewController.m in Sources */, 92B46FA21BC7BDF7004980C1 /* ArduinoSendDigitalValueBrickCell.m in Sources */, 05CBC6BF286C5A4B007FBA61 /* TripleStitchPattern.swift in Sources */, + 4981EE2A289ACEF90052AEB1 /* SecondFaceDetectedSensor.swift in Sources */, 929CC0ED1BC39B8C0027DEC0 /* PhiroMotorMoveBackwardBrickCell.m in Sources */, 49949B3A2806EAD20073BF65 /* LeftShoulderYSensor.swift in Sources */, BA9CAB621EC3853600796056 /* AddItemToUserListBrick+Instruction.swift in Sources */, @@ -13819,6 +13854,7 @@ E4668B3C20FDCA6D0021FA0B /* FeaturedProjectsStoreTableDataSource.swift in Sources */, 91FC87CEFA4B1008B213EA31 /* AlertControllerBuilder.swift in Sources */, 91FC8B6AB3345BC25B02DF81 /* BaseAlertController.swift in Sources */, + 4981EE37289ACF1A0052AEB1 /* HandPoseDetectionManager.swift in Sources */, 4CE0574C266E1B2F008AD7FF /* SetBackgroundByIndexBrick+CBXMLHandler.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/src/Catty/PlayerEngine/Sensors/BodyPose/PoseNetModel.swift b/src/Catty/PlayerEngine/Sensors/BodyPose/PoseNetModel.swift new file mode 100644 index 0000000000..c6e717d135 --- /dev/null +++ b/src/Catty/PlayerEngine/Sensors/BodyPose/PoseNetModel.swift @@ -0,0 +1,53 @@ +/** + * Copyright (C) 2010-2023 The Catrobat Team + * (http://developer.catrobat.org/credits) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * An additional term exception under section 7 of the GNU Affero + * General Public License, version 3, is available at + * (http://developer.catrobat.org/license_additional_term) + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see http://www.gnu.org/licenses/. + */ + +enum PoseNetModel { + public static let stride = 8.0 + public static let inputDimension = 513.0 + + enum OutputIndex: Int { + case displacementBwd + case heatmaps + case displacementFwd + case offsets + } + + enum Features: Int { + case nose + case leftEye + case rightEye + case leftEar + case rightEar + case leftShoulder + case rightShoulder + case leftElbow + case rightElbow + case leftWrist + case rightWrist + case leftHip + case rightHip + case leftKnee + case rightKnee + case leftAnkle + case rightAnkle + } +} diff --git a/src/Catty/PlayerEngine/Sensors/Camera/VisualDetectionManager.swift b/src/Catty/PlayerEngine/Sensors/Camera/VisualDetectionManager.swift deleted file mode 100644 index 17f86c0c93..0000000000 --- a/src/Catty/PlayerEngine/Sensors/Camera/VisualDetectionManager.swift +++ /dev/null @@ -1,804 +0,0 @@ -/** - * Copyright (C) 2010-2023 The Catrobat Team - * (http://developer.catrobat.org/credits) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as - * published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * An additional term exception under section 7 of the GNU Affero - * General Public License, version 3, is available at - * (http://developer.catrobat.org/license_additional_term) - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see http://www.gnu.org/licenses/. - */ -import NaturalLanguage -import Vision - -class VisualDetectionManager: NSObject, VisualDetectionManagerProtocol, AVCaptureVideoDataOutputSampleBufferDelegate { - - static let maxFaceCount = 2 - static let maxHandCount = 2 - static let undefinedLanguage = "und" - - var isFaceDetected = [false, false] - var facePositionXRatio: [Double?] = [nil, nil] - var facePositionYRatio: [Double?] = [nil, nil] - var faceSizeRatio: [Double?] = [nil, nil] - var faceLandmarkPositionRatioDictionary: [String: Double] = [:] - var bodyPosePositionRatioDictionary: [String: Double] = [:] - var handPosePositionRatioDictionary: [String: Double] = [:] - var textFromCamera: String? - var textBlocksNumber: Int? - var textBlockPosition: [CGPoint] = [] - var textBlockSizeRatio: [Double] = [] - var textBlockFromCamera: [String] = [] - var textBlockLanguageCode: [String] = [] - var objectRecognitions: [VNRecognizedObjectObservation] = [] - - let minConfidence: Float = 0.5 - - private var session: AVCaptureSession? - private var videoDataOuput: AVCaptureVideoDataOutput? - private var previewLayer: AVCaptureVideoPreviewLayer? - - private var objectRecognitionModel: VNCoreMLModel? - private var stage: Stage? - private var normalizedSize = CGSize(width: 1.0, height: 1.0) - - var faceDetectionEnabled = false - var handPoseDetectionEnabled = false - var bodyPoseDetectionEnabled = false - var textRecognitionEnabled = false - var objectRecognitionEnabled = false - - var previousFaceObservations: [VNFaceObservation]? - - func setStage(_ stage: Stage?) { - self.stage = stage - } - - func start() { - self.reset() - - self.session = CameraPreviewHandler.shared().getSession() - - guard let session = self.session, - let device = camera(for: cameraPosition()), - let deviceInput = try? AVCaptureDeviceInput(device: device) - else { return } - - if session.isRunning { - session.stopRunning() - } - - if session.canAddInput(deviceInput) { - session.addInput(deviceInput) - } - - let videoDataOuput = AVCaptureVideoDataOutput() - videoDataOuput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey: kCMPixelFormat_32BGRA ] as [String: Any] - videoDataOuput.alwaysDiscardsLateVideoFrames = true - - // create a serial dispatch queue used for the sample buffer delegate - // a serial dispatch queue must be used to guarantee that video frames will be delivered in order - // see the header doc for setSampleBufferDelegate:queue: for more information - let serialQueue = DispatchQueue(label: "VideoDataOutputQueue") - videoDataOuput.setSampleBufferDelegate(self, queue: serialQueue) - - if session.canAddOutput(videoDataOuput) { - self.session?.addOutput(videoDataOuput) - } - - let videoDataOutputConnection = videoDataOuput.connection(with: .video) - videoDataOutputConnection?.isEnabled = true - if let videoDataOutputConnection = videoDataOutputConnection { - videoDataOutputConnection.isEnabled = true - if videoDataOutputConnection.isVideoOrientationSupported { - if Project.lastUsed().header.landscapeMode { - videoDataOutputConnection.videoOrientation = .landscapeRight - } else { - videoDataOutputConnection.videoOrientation = .portrait - } - } - } - self.videoDataOuput = videoDataOuput - - let previewLayer = AVCaptureVideoPreviewLayer(session: session) - previewLayer.backgroundColor = UIColor.black.cgColor - previewLayer.videoGravity = .resizeAspect - previewLayer.isHidden = true - self.previewLayer = previewLayer - - if objectRecognitionEnabled { - if let objectRecognitionModelURL = Bundle.main.url(forResource: "YOLOv3Tiny", withExtension: "mlmodelc") { - do { - objectRecognitionModel = try VNCoreMLModel(for: MLModel(contentsOf: objectRecognitionModelURL)) - } catch { - NSLog("Could not load object detection model!") - } - } - } - - DispatchQueue.main.async { - session.startRunning() - } - } - - func startFaceDetection() { - self.faceDetectionEnabled = true - } - - func startHandPoseDetection() { - self.handPoseDetectionEnabled = true - } - - func startBodyPoseDetection() { - self.bodyPoseDetectionEnabled = true - } - - func startTextRecognition() { - self.textRecognitionEnabled = true - } - - func startObjectRecognition() { - self.objectRecognitionEnabled = true - } - - func stop() { - self.reset() - - if let inputs = self.session?.inputs as? [AVCaptureDeviceInput] { - for input in inputs { - self.session?.removeInput(input) - } - } - if let outputs = self.session?.outputs as? [AVCaptureVideoDataOutput] { - for output in outputs { - self.session?.removeOutput(output) - } - } - - self.session?.stopRunning() - self.session = nil - self.videoDataOuput?.connection(with: .video)?.isEnabled = false - self.videoDataOuput = nil - self.previewLayer?.removeFromSuperlayer() - self.previewLayer = nil - - self.faceDetectionEnabled = false - self.handPoseDetectionEnabled = false - self.bodyPoseDetectionEnabled = false - self.textRecognitionEnabled = false - self.objectRecognitionEnabled = false - } - - func reset() { - self.resetFaceDetection() - self.resetBodyPoses() - self.resetHandPoses() - self.resetTextRecogntion() - self.resetObjectRecognition() - } - - func resetFaceDetection() { - self.isFaceDetected = [false, false] - self.facePositionXRatio = [nil, nil] - self.facePositionYRatio = [nil, nil] - self.faceSizeRatio = [nil, nil] - self.previousFaceObservations = nil - self.faceLandmarkPositionRatioDictionary.removeAll() - } - - func resetBodyPoses() { - self.bodyPosePositionRatioDictionary.removeAll() - } - - func resetHandPoses() { - self.handPosePositionRatioDictionary.removeAll() - } - - func resetTextRecogntion() { - self.textFromCamera = nil - self.textBlocksNumber = nil - self.textBlockPosition.removeAll() - self.textBlockSizeRatio.removeAll() - self.textBlockFromCamera.removeAll() - self.textBlockLanguageCode.removeAll() - } - - func resetObjectRecognition() { - self.objectRecognitions.removeAll() - } - - func available() -> Bool { - guard let _ = CameraPreviewHandler.shared().getSession(), - let device = camera(for: cameraPosition()), - let _ = try? AVCaptureDeviceInput(device: device) else { return false } - - return true - } - - func cropVideoBuffer(inputBuffer: CVPixelBuffer) -> CVPixelBuffer { - CVPixelBufferLockBaseAddress(inputBuffer, .readOnly) - guard let baseAddress = CVPixelBufferGetBaseAddress(inputBuffer) else { return inputBuffer } - let baseAddressStart = baseAddress.assumingMemoryBound(to: UInt8.self) - let bytesPerRow = CVPixelBufferGetBytesPerRow(inputBuffer) - - let pixelFormat = CVPixelBufferGetPixelFormatType(inputBuffer) - let pixelBufferWidth = CGFloat(CVPixelBufferGetWidth(inputBuffer)) - let pixelBufferHeight = CGFloat(CVPixelBufferGetHeight(inputBuffer)) - guard let stageHeight = self.stage?.frame.height else { return inputBuffer } - - let croppedWidth = pixelBufferHeight / stageHeight * pixelBufferWidth - - var cropX = Int((pixelBufferWidth - CGFloat(croppedWidth)) / 2.0) - if cropX % 2 != 0 { - cropX += 1 - } - - let cropStartOffset = Int(CGFloat(cropX) * (CGFloat(bytesPerRow) / pixelBufferWidth)) - - let options = [ - kCVPixelBufferCGImageCompatibilityKey: true, - kCVPixelBufferCGBitmapContextCompatibilityKey: true, - kCVPixelBufferWidthKey: croppedWidth, - kCVPixelBufferHeightKey: pixelBufferHeight - ] as [CFString: Any] - - var newBuffer: CVPixelBuffer! - - CVPixelBufferCreateWithBytes(kCFAllocatorDefault, - Int(croppedWidth), - Int(pixelBufferHeight), - pixelFormat, - &baseAddressStart[cropStartOffset], - Int(bytesPerRow), - nil, - nil, - options as CFDictionary, - &newBuffer) - - CVPixelBufferUnlockBaseAddress(inputBuffer, .readOnly) - return newBuffer - } - - func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } - - if connection.isVideoOrientationSupported && !Project.lastUsed().header.landscapeMode && connection.videoOrientation != .portrait { - connection.videoOrientation = .portrait - return - } - - if connection.isVideoOrientationSupported && Project.lastUsed().header.landscapeMode && connection.videoOrientation != .landscapeRight { - connection.videoOrientation = .landscapeRight - return - } - - let newBuffer = self.cropVideoBuffer(inputBuffer: pixelBuffer) - - var orientation = CGImagePropertyOrientation.up - if cameraPosition() == .front { - orientation = .upMirrored - } - - let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: newBuffer, orientation: orientation, options: [:]) - var detectionRequests: [VNRequest] = [] - - if faceDetectionEnabled { - let faceDetectionRequest = VNDetectFaceLandmarksRequest { request, _ in - if let faceObservations = request.results as? [VNFaceObservation] { - DispatchQueue.main.async { - self.handleDetectedFaceObservations(faceObservations) - } - } - } - detectionRequests.append(faceDetectionRequest) - } - - if #available(iOS 13.0, *) { - if textRecognitionEnabled { - let textDetectionRequest = VNRecognizeTextRequest { request, _ in - if let textObservations = request.results as? [VNRecognizedTextObservation] { - DispatchQueue.main.async { - self.handleTextObservations(textObservations) - } - } - } - detectionRequests.append(textDetectionRequest) - } - } - - if #available(iOS 14.0, *) { - if bodyPoseDetectionEnabled { - let humanBodyPoseRequest = VNDetectHumanBodyPoseRequest { request, _ in - if let bodyPoseObservation = request.results as? [VNHumanBodyPoseObservation] { - DispatchQueue.main.async { - self.handleHumanBodyPoseObservations(bodyPoseObservation) - } - } - } - detectionRequests.append(humanBodyPoseRequest) - } - if handPoseDetectionEnabled { - let humanHandPoseRequest = VNDetectHumanHandPoseRequest { request, _ in - if let handPoseObservations = request.results as? [VNHumanHandPoseObservation] { - DispatchQueue.main.async { - self.handleHumanHandPoseObservations(handPoseObservations) - } - } - } - detectionRequests.append(humanHandPoseRequest) - } - } - - if objectRecognitionEnabled { - if let objectRecognitionModel = objectRecognitionModel { - let objectRecognitionRequest = VNCoreMLRequest(model: objectRecognitionModel) { request, _ in - if let objectObservations = request.results as? [VNRecognizedObjectObservation] { - DispatchQueue.main.async { - self.handleDetectedObjectObservations(objectObservations) - } - } - } - detectionRequests.append(objectRecognitionRequest) - } - } - - do { - try imageRequestHandler.perform(detectionRequests) - } catch let error as NSError { - print(error) - } - } - - func handleDetectedFaceObservations(_ faceObservations: [VNFaceObservation]) { - guard !faceObservations.isEmpty else { - resetFaceDetection() - return - } - - var currentFaceObservations = faceObservations - var isFaceDetected = [false, false] - let facesDetected = min(currentFaceObservations.count, VisualDetectionManager.maxFaceCount) - - if previousFaceObservations == nil { - previousFaceObservations = Array(currentFaceObservations[..= previousFaceObservations!.count { - previousFaceObservations?.append(matchingFaceObservation) - } else { - previousFaceObservations![previousFaceIndex] = matchingFaceObservation - } - currentFaceObservations.removeObject(matchingFaceObservation) - } - } - } - - for faceIndex in 0.. Double { - let distanceX = previousFaceObservation.boundingBox.origin.x - currentFaceObservation.boundingBox.origin.x - let distanceY = previousFaceObservation.boundingBox.origin.y - currentFaceObservation.boundingBox.origin.y - return sqrt(pow(distanceX, 2) + pow(distanceY, 2)) - } - - @available(iOS 13.0, *) - func handleTextObservations(_ textObservations: [VNRecognizedTextObservation]) { - guard !textObservations.isEmpty else { - resetTextRecogntion() - return - } - - let topCanditateTextObservations = textObservations.filter({ $0.topCandidates(1).first != nil && $0.topCandidates(1).first!.string.isNotEmpty }) - - textBlocksNumber = topCanditateTextObservations.count - textBlockPosition = topCanditateTextObservations.map({ CGPoint(x: $0.boundingBox.origin.x + $0.boundingBox.width / 2, - y: $0.boundingBox.origin.y + $0.boundingBox.height / 2) }) - textBlockSizeRatio = topCanditateTextObservations.map({ max($0.boundingBox.width, $0.boundingBox.height) }) - - textBlockFromCamera = topCanditateTextObservations.map({ $0.topCandidates(1).first!.string }) - textFromCamera = textBlockFromCamera.joined(separator: " ") - - textBlockLanguageCode = textBlockFromCamera.map({ detectedLanguage(for: $0) ?? VisualDetectionManager.undefinedLanguage }) - } - - func detectedLanguage(for string: String) -> String? { - let recognizer = NLLanguageRecognizer() - recognizer.languageConstraints = [NLLanguage.english, NLLanguage.french, NLLanguage.italian, NLLanguage.german, - NLLanguage.spanish, NLLanguage.portuguese, NLLanguage.simplifiedChinese, NLLanguage.traditionalChinese] - recognizer.processString(string) - guard let languageCode = recognizer.dominantLanguage?.rawValue else { return nil } - return languageCode - } - - @available(iOS 14.0, *) - func handleHumanBodyPoseObservations(_ bodyPoseObservations: [VNHumanBodyPoseObservation]) { - guard !bodyPoseObservations.isEmpty, let bodyPoseObservation = bodyPoseObservations.first else { - resetBodyPoses() - return - } - if let neck = try? bodyPoseObservation.recognizedPoint(.neck) { - if neck.confidence > minConfidence { - bodyPosePositionRatioDictionary[NeckXSensor.tag] = neck.x - bodyPosePositionRatioDictionary[NeckYSensor.tag] = neck.y - } - } - if let leftShoulder = try? bodyPoseObservation.recognizedPoint(.leftShoulder) { - if leftShoulder.confidence > minConfidence { - bodyPosePositionRatioDictionary[LeftShoulderXSensor.tag] = leftShoulder.x - bodyPosePositionRatioDictionary[LeftShoulderYSensor.tag] = leftShoulder.y - } - } - if let rightShoulder = try? bodyPoseObservation.recognizedPoint(.rightShoulder) { - if rightShoulder.confidence > minConfidence { - bodyPosePositionRatioDictionary[RightShoulderXSensor.tag] = rightShoulder.x - bodyPosePositionRatioDictionary[RightShoulderYSensor.tag] = rightShoulder.y - } - } - if let leftElbow = try? bodyPoseObservation.recognizedPoint(.leftElbow) { - if leftElbow.confidence > minConfidence { - bodyPosePositionRatioDictionary[LeftElbowXSensor.tag] = leftElbow.x - bodyPosePositionRatioDictionary[LeftElbowYSensor.tag] = leftElbow.y - } - } - if let rightElbow = try? bodyPoseObservation.recognizedPoint(.rightElbow) { - if rightElbow.confidence > minConfidence { - bodyPosePositionRatioDictionary[RightElbowXSensor.tag] = rightElbow.x - bodyPosePositionRatioDictionary[RightElbowYSensor.tag] = rightElbow.y - } - } - if let leftWrist = try? bodyPoseObservation.recognizedPoint(.leftWrist) { - if leftWrist.confidence > minConfidence { - bodyPosePositionRatioDictionary[LeftWristXSensor.tag] = leftWrist.x - bodyPosePositionRatioDictionary[LeftWristYSensor.tag] = leftWrist.y - } - } - if let rightWrist = try? bodyPoseObservation.recognizedPoint(.rightWrist) { - if rightWrist.confidence > minConfidence { - bodyPosePositionRatioDictionary[RightWristXSensor.tag] = rightWrist.x - bodyPosePositionRatioDictionary[RightWristYSensor.tag] = rightWrist.y - } - } - if let leftHip = try? bodyPoseObservation.recognizedPoint(.leftHip) { - if leftHip.confidence > minConfidence { - bodyPosePositionRatioDictionary[LeftHipXSensor.tag] = leftHip.x - bodyPosePositionRatioDictionary[LeftHipYSensor.tag] = leftHip.y - } - } - if let rightHip = try? bodyPoseObservation.recognizedPoint(.rightHip) { - if rightHip.confidence > minConfidence { - bodyPosePositionRatioDictionary[RightHipXSensor.tag] = rightHip.x - bodyPosePositionRatioDictionary[RightHipYSensor.tag] = rightHip.y - } - } - if let leftKnee = try? bodyPoseObservation.recognizedPoint(.leftKnee) { - if leftKnee.confidence > minConfidence { - bodyPosePositionRatioDictionary[LeftKneeXSensor.tag] = leftKnee.x - bodyPosePositionRatioDictionary[LeftKneeYSensor.tag] = leftKnee.y - } - } - if let rightKnee = try? bodyPoseObservation.recognizedPoint(.rightKnee) { - if rightKnee.confidence > minConfidence { - bodyPosePositionRatioDictionary[RightKneeXSensor.tag] = rightKnee.x - bodyPosePositionRatioDictionary[RightKneeYSensor.tag] = rightKnee.y - } - } - if let leftAnkle = try? bodyPoseObservation.recognizedPoint(.leftAnkle) { - if leftAnkle.confidence > minConfidence { - bodyPosePositionRatioDictionary[LeftAnkleXSensor.tag] = leftAnkle.x - bodyPosePositionRatioDictionary[LeftAnkleYSensor.tag] = leftAnkle.y - } - } - if let rightAnkle = try? bodyPoseObservation.recognizedPoint(.rightAnkle) { - if rightAnkle.confidence > minConfidence { - bodyPosePositionRatioDictionary[RightAnkleXSensor.tag] = rightAnkle.x - bodyPosePositionRatioDictionary[RightAnkleYSensor.tag] = rightAnkle.y - } - } - } - - @available(iOS 14.0, *) - func handleHumanHandPoseObservations(_ handPoseObservations: [VNHumanHandPoseObservation]) { - guard !handPoseObservations.isEmpty else { - resetHandPoses() - return - } - - var leftHand: VNHumanHandPoseObservation? - var rightHand: VNHumanHandPoseObservation? - if #available(iOS 15.0, *) { - var leftHands = handPoseObservations.filter({ $0.chirality == .left }) - var rightHands = handPoseObservations.filter({ $0.chirality == .right }) - if cameraPosition() == .front { - let tmpHands = leftHands - leftHands = rightHands - rightHands = tmpHands - } - - if rightHands.count == 2 && leftHands.isEmpty { - rightHand = rightHands[0] - leftHand = rightHands[1] - } else if leftHands.count == 2 && rightHands.isEmpty { - leftHand = leftHands[0] - rightHand = leftHands[1] - } else { - if leftHands.isNotEmpty { - leftHand = leftHands.first - } - if rightHands.isNotEmpty { - rightHand = rightHands.first - } - } - } else { - leftHand = handPoseObservations.first - if handPoseObservations.count > 1 { - rightHand = handPoseObservations[1] - if let leftHandPinky = try? leftHand!.recognizedPoint(.littlePIP), - let rightHandPinky = try? rightHand!.recognizedPoint(.littlePIP) { - if rightHandPinky.x < leftHandPinky.x { - rightHand = handPoseObservations[0] - leftHand = handPoseObservations[1] - } - } - } - } - - if let leftHand = leftHand { - handleHandObservation(leftHand, isLeft: true) - } else { - removeLeftHandPoses() - } - - if let rightHand = rightHand { - handleHandObservation(rightHand, isLeft: false) - } else { - removeRightHandPoses() - } - } - - @available(iOS 14.0, *) - func handleHandObservation(_ hand: VNHumanHandPoseObservation, isLeft: Bool) { - let pinkyPoints = try? hand.recognizedPoints(.littleFinger) - let ringFingerPoints = try? hand.recognizedPoints(.ringFinger) - let middleFingerPoints = try? hand.recognizedPoints(.middleFinger) - let indexPoints = try? hand.recognizedPoints(.indexFinger) - let thumbPoints = try? hand.recognizedPoints(.thumb) - - if let pinkyKnuckle = pinkyPoints?[.littlePIP] { - if pinkyKnuckle.confidence > minConfidence { - if isLeft { - handPosePositionRatioDictionary[LeftPinkyKnuckleXSensor.tag] = pinkyKnuckle.x - handPosePositionRatioDictionary[LeftPinkyKnuckleYSensor.tag] = pinkyKnuckle.y - } else { - handPosePositionRatioDictionary[RightPinkyKnuckleXSensor.tag] = pinkyKnuckle.x - handPosePositionRatioDictionary[RightPinkyKnuckleYSensor.tag] = pinkyKnuckle.y - } - } - } - if let ringFingerPoints = ringFingerPoints?[.ringPIP] { - if ringFingerPoints.confidence > minConfidence { - if isLeft { - handPosePositionRatioDictionary[LeftRingFingerKnuckleXSensor.tag] = ringFingerPoints.x - handPosePositionRatioDictionary[LeftRingFingerKnuckleYSensor.tag] = ringFingerPoints.y - } else { - handPosePositionRatioDictionary[RightRingFingerKnuckleXSensor.tag] = ringFingerPoints.x - handPosePositionRatioDictionary[RightRingFingerKnuckleYSensor.tag] = ringFingerPoints.y - } - } - } - if let middleFingerPoints = middleFingerPoints?[.middlePIP] { - if middleFingerPoints.confidence > minConfidence { - if isLeft { - handPosePositionRatioDictionary[LeftMiddleFingerKnuckleXSensor.tag] = middleFingerPoints.x - handPosePositionRatioDictionary[LeftMiddleFingerKnuckleYSensor.tag] = middleFingerPoints.y - } else { - handPosePositionRatioDictionary[RightMiddleFingerKnuckleXSensor.tag] = middleFingerPoints.x - handPosePositionRatioDictionary[RightMiddleFingerKnuckleYSensor.tag] = middleFingerPoints.y - } - } - } - if let indexKnuckle = indexPoints?[.indexPIP] { - if indexKnuckle.confidence > minConfidence { - if isLeft { - handPosePositionRatioDictionary[LeftIndexKnuckleXSensor.tag] = indexKnuckle.x - handPosePositionRatioDictionary[LeftIndexKnuckleYSensor.tag] = indexKnuckle.y - } else { - handPosePositionRatioDictionary[RightIndexKnuckleXSensor.tag] = indexKnuckle.x - handPosePositionRatioDictionary[RightIndexKnuckleYSensor.tag] = indexKnuckle.y - } - } - } - if let thumbKnuckle = thumbPoints?[.thumbIP] { - if thumbKnuckle.confidence > minConfidence { - if isLeft { - handPosePositionRatioDictionary[LeftThumbKnuckleXSensor.tag] = thumbKnuckle.x - handPosePositionRatioDictionary[LeftThumbKnuckleYSensor.tag] = thumbKnuckle.y - } else { - handPosePositionRatioDictionary[RightThumbKnuckleXSensor.tag] = thumbKnuckle.x - handPosePositionRatioDictionary[RightThumbKnuckleYSensor.tag] = thumbKnuckle.y - } - } - } - } - - func removeRightHandPoses() { - for rightHandPose in self.handPosePositionRatioDictionary.filter({ $0.key.starts(with: "RIGHT") }) { - self.handPosePositionRatioDictionary.removeValue(forKey: rightHandPose.key) - } - } - - func removeLeftHandPoses() { - for leftHandPose in self.handPosePositionRatioDictionary.filter({ $0.key.starts(with: "LEFT") }) { - self.handPosePositionRatioDictionary.removeValue(forKey: leftHandPose.key) - } - } - - func handleDetectedObjectObservations(_ objectObservations: [VNRecognizedObjectObservation]) { - resetObjectRecognition() - self.objectRecognitions.append(contentsOf: objectObservations.filter { $0.confidence > minConfidence }) - } - - func cameraPosition() -> AVCaptureDevice.Position { - (CameraPreviewHandler.shared()?.getCameraPosition())! - } - - private func camera(for cameraPosition: AVCaptureDevice.Position) -> AVCaptureDevice? { - for device in AVCaptureDevice.DiscoverySession.init(deviceTypes: [SpriteKitDefines.avCaptureDeviceType], mediaType: .video, position: cameraPosition).devices { - return device - } - return nil - } -} diff --git a/src/Catty/PlayerEngine/Sensors/DetectionManager/BodyPoseDetectionManager.swift b/src/Catty/PlayerEngine/Sensors/DetectionManager/BodyPoseDetectionManager.swift new file mode 100644 index 0000000000..e4635187b7 --- /dev/null +++ b/src/Catty/PlayerEngine/Sensors/DetectionManager/BodyPoseDetectionManager.swift @@ -0,0 +1,335 @@ +/** + * Copyright (C) 2010-2023 The Catrobat Team + * (http://developer.catrobat.org/credits) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * An additional term exception under section 7 of the GNU Affero + * General Public License, version 3, is available at + * (http://developer.catrobat.org/license_additional_term) + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see http://www.gnu.org/licenses/. + */ +import Vision + +extension VisualDetectionManager { + @available(iOS 14.0, *) + func handleHumanBodyPoseObservations(_ bodyPoseObservations: [VNHumanBodyPoseObservation]) { + guard !bodyPoseObservations.isEmpty, let bodyPoseObservation = bodyPoseObservations.first else { + resetBodyPoses() + return + } + if let neck = try? bodyPoseObservation.recognizedPoint(.neck) { + if neck.confidence > minConfidence { + bodyPosePositionRatioDictionary[NeckXSensor.tag] = neck.x + bodyPosePositionRatioDictionary[NeckYSensor.tag] = neck.y + } + } + if let leftShoulder = try? bodyPoseObservation.recognizedPoint(.leftShoulder) { + if leftShoulder.confidence > minConfidence { + bodyPosePositionRatioDictionary[LeftShoulderXSensor.tag] = leftShoulder.x + bodyPosePositionRatioDictionary[LeftShoulderYSensor.tag] = leftShoulder.y + } + } + if let rightShoulder = try? bodyPoseObservation.recognizedPoint(.rightShoulder) { + if rightShoulder.confidence > minConfidence { + bodyPosePositionRatioDictionary[RightShoulderXSensor.tag] = rightShoulder.x + bodyPosePositionRatioDictionary[RightShoulderYSensor.tag] = rightShoulder.y + } + } + if let leftElbow = try? bodyPoseObservation.recognizedPoint(.leftElbow) { + if leftElbow.confidence > minConfidence { + bodyPosePositionRatioDictionary[LeftElbowXSensor.tag] = leftElbow.x + bodyPosePositionRatioDictionary[LeftElbowYSensor.tag] = leftElbow.y + } + } + if let rightElbow = try? bodyPoseObservation.recognizedPoint(.rightElbow) { + if rightElbow.confidence > minConfidence { + bodyPosePositionRatioDictionary[RightElbowXSensor.tag] = rightElbow.x + bodyPosePositionRatioDictionary[RightElbowYSensor.tag] = rightElbow.y + } + } + if let leftWrist = try? bodyPoseObservation.recognizedPoint(.leftWrist) { + if leftWrist.confidence > minConfidence { + bodyPosePositionRatioDictionary[LeftWristXSensor.tag] = leftWrist.x + bodyPosePositionRatioDictionary[LeftWristYSensor.tag] = leftWrist.y + } + } + if let rightWrist = try? bodyPoseObservation.recognizedPoint(.rightWrist) { + if rightWrist.confidence > minConfidence { + bodyPosePositionRatioDictionary[RightWristXSensor.tag] = rightWrist.x + bodyPosePositionRatioDictionary[RightWristYSensor.tag] = rightWrist.y + } + } + if let leftHip = try? bodyPoseObservation.recognizedPoint(.leftHip) { + if leftHip.confidence > minConfidence { + bodyPosePositionRatioDictionary[LeftHipXSensor.tag] = leftHip.x + bodyPosePositionRatioDictionary[LeftHipYSensor.tag] = leftHip.y + } + } + if let rightHip = try? bodyPoseObservation.recognizedPoint(.rightHip) { + if rightHip.confidence > minConfidence { + bodyPosePositionRatioDictionary[RightHipXSensor.tag] = rightHip.x + bodyPosePositionRatioDictionary[RightHipYSensor.tag] = rightHip.y + } + } + if let leftKnee = try? bodyPoseObservation.recognizedPoint(.leftKnee) { + if leftKnee.confidence > minConfidence { + bodyPosePositionRatioDictionary[LeftKneeXSensor.tag] = leftKnee.x + bodyPosePositionRatioDictionary[LeftKneeYSensor.tag] = leftKnee.y + } + } + if let rightKnee = try? bodyPoseObservation.recognizedPoint(.rightKnee) { + if rightKnee.confidence > minConfidence { + bodyPosePositionRatioDictionary[RightKneeXSensor.tag] = rightKnee.x + bodyPosePositionRatioDictionary[RightKneeYSensor.tag] = rightKnee.y + } + } + if let leftAnkle = try? bodyPoseObservation.recognizedPoint(.leftAnkle) { + if leftAnkle.confidence > minConfidence { + bodyPosePositionRatioDictionary[LeftAnkleXSensor.tag] = leftAnkle.x + bodyPosePositionRatioDictionary[LeftAnkleYSensor.tag] = leftAnkle.y + } + } + if let rightAnkle = try? bodyPoseObservation.recognizedPoint(.rightAnkle) { + if rightAnkle.confidence > minConfidence { + bodyPosePositionRatioDictionary[RightAnkleXSensor.tag] = rightAnkle.x + bodyPosePositionRatioDictionary[RightAnkleYSensor.tag] = rightAnkle.y + } + } + } + + func handleHumanBodyPoseObservations(_ bodyPoseObservations: [VNCoreMLFeatureValueObservation]) { + let heatMaps = bodyPoseObservations[PoseNetModel.OutputIndex.heatmaps.rawValue].featureValue.multiArrayValue + let offsets = bodyPoseObservations[PoseNetModel.OutputIndex.offsets.rawValue].featureValue.multiArrayValue + if let heatMaps = heatMaps, let offsets = offsets { + let featureCount = heatMaps.shape[0].intValue + var keyPointArray = [(Int, Int)](repeating: (0, 0), count: featureCount) + for keyPointIndex in 0.. maxValueKeypoint { + maxValueKeypoint = currentHeatMapValue + keyPointArray[keyPointIndex] = (yIndex, xIndex) + } + } + } + } + + let (yLeftShoulder, xLeftShoulder) = keyPointArray[PoseNetModel.Features.leftShoulder.rawValue] + let leftShoulderConfidence = heatMaps[[PoseNetModel.Features.leftShoulder.rawValue, yLeftShoulder, xLeftShoulder] as [NSNumber]].floatValue + if leftShoulderConfidence > minConfidence { + let yOffsetLeftShoulder = offsets[[PoseNetModel.Features.leftShoulder.rawValue, yLeftShoulder, xLeftShoulder] as [NSNumber]].doubleValue + let xOffsetLeftShoulder = offsets[[featureCount + PoseNetModel.Features.leftShoulder.rawValue, yLeftShoulder, xLeftShoulder] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[LeftShoulderYSensor.tag] = 1.0 - (Double(yLeftShoulder) * PoseNetModel.stride + yOffsetLeftShoulder) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[LeftShoulderXSensor.tag] = (Double(xLeftShoulder) * PoseNetModel.stride + xOffsetLeftShoulder) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftShoulderYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftShoulderXSensor.tag) + } + } + + let (yRightShoulder, xRightShoulder) = keyPointArray[PoseNetModel.Features.rightShoulder.rawValue] + let rightShoulderConfidence = heatMaps[[PoseNetModel.Features.rightShoulder.rawValue, yRightShoulder, xRightShoulder] as [NSNumber]].floatValue + if rightShoulderConfidence > minConfidence { + let yOffsetRightShoulder = offsets[[PoseNetModel.Features.rightShoulder.rawValue, yRightShoulder, xRightShoulder] as [NSNumber]].doubleValue + let xOffsetRightShoulder = offsets[[featureCount + PoseNetModel.Features.rightShoulder.rawValue, yRightShoulder, xRightShoulder] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[RightShoulderYSensor.tag] = 1.0 - (Double(yRightShoulder) * PoseNetModel.stride + yOffsetRightShoulder) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[RightShoulderXSensor.tag] = (Double(xRightShoulder) * PoseNetModel.stride + xOffsetRightShoulder) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightShoulderYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightShoulderXSensor.tag) + } + } + + let (yLeftElbow, xLeftElbow) = keyPointArray[PoseNetModel.Features.leftElbow.rawValue] + let leftElbowConfidence = heatMaps[[PoseNetModel.Features.leftElbow.rawValue, yLeftElbow, xLeftElbow] as [NSNumber]].floatValue + if leftElbowConfidence > minConfidence { + let yOffsetLeftElbow = offsets[[PoseNetModel.Features.leftElbow.rawValue, yLeftElbow, xLeftElbow] as [NSNumber]].doubleValue + let xOffsetLeftElbow = offsets[[featureCount + PoseNetModel.Features.leftElbow.rawValue, yLeftElbow, xLeftElbow] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[LeftElbowYSensor.tag] = 1.0 - (Double(yLeftElbow) * PoseNetModel.stride + yOffsetLeftElbow) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[LeftElbowXSensor.tag] = (Double(xLeftElbow) * PoseNetModel.stride + xOffsetLeftElbow) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftElbowYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftElbowXSensor.tag) + } + } + + let (yRightElbow, xRightElbow) = keyPointArray[PoseNetModel.Features.rightElbow.rawValue] + let rightElbowConfidence = heatMaps[[PoseNetModel.Features.rightElbow.rawValue, yRightElbow, xRightElbow] as [NSNumber]].floatValue + if rightElbowConfidence > minConfidence { + let yOffsetRightElbow = offsets[[PoseNetModel.Features.rightElbow.rawValue, yRightElbow, xRightElbow] as [NSNumber]].doubleValue + let xOffsetRightElbow = offsets[[featureCount + PoseNetModel.Features.rightElbow.rawValue, yRightElbow, xRightElbow] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[RightElbowYSensor.tag] = 1.0 - (Double(yRightElbow) * PoseNetModel.stride + yOffsetRightElbow) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[RightElbowXSensor.tag] = (Double(xRightElbow) * PoseNetModel.stride + xOffsetRightElbow) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightElbowYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightElbowXSensor.tag) + } + } + + let (yLeftWrist, xLeftWrist) = keyPointArray[PoseNetModel.Features.leftWrist.rawValue] + let leftWristConfidence = heatMaps[[PoseNetModel.Features.leftWrist.rawValue, yLeftWrist, xLeftWrist] as [NSNumber]].floatValue + if leftWristConfidence > minConfidence { + let yOffsetLeftWrist = offsets[[PoseNetModel.Features.leftWrist.rawValue, yLeftWrist, xLeftWrist] as [NSNumber]].doubleValue + let xOffsetLeftWrist = offsets[[featureCount + PoseNetModel.Features.leftWrist.rawValue, yLeftWrist, xLeftWrist] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[LeftWristYSensor.tag] = 1.0 - (Double(yLeftWrist) * PoseNetModel.stride + yOffsetLeftWrist) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[LeftWristXSensor.tag] = (Double(xLeftWrist) * PoseNetModel.stride + xOffsetLeftWrist) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftWristYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftWristXSensor.tag) + } + } + + let (yRightWrist, xRightWrist) = keyPointArray[PoseNetModel.Features.rightWrist.rawValue] + let rightWristConfidence = heatMaps[[PoseNetModel.Features.rightWrist.rawValue, yRightWrist, xRightWrist] as [NSNumber]].floatValue + if rightWristConfidence > minConfidence { + let yOffsetRightWrist = offsets[[PoseNetModel.Features.rightWrist.rawValue, yRightWrist, xRightWrist] as [NSNumber]].doubleValue + let xOffsetRightWrist = offsets[[featureCount + PoseNetModel.Features.rightWrist.rawValue, yRightWrist, xRightWrist] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[RightWristYSensor.tag] = 1.0 - (Double(yRightWrist) * PoseNetModel.stride + yOffsetRightWrist) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[RightWristXSensor.tag] = (Double(xRightWrist) * PoseNetModel.stride + xOffsetRightWrist) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightWristYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightWristXSensor.tag) + } + } + + let (yLeftHip, xLeftHip) = keyPointArray[PoseNetModel.Features.leftHip.rawValue] + let leftHipConfidence = heatMaps[[PoseNetModel.Features.leftHip.rawValue, yLeftHip, xLeftHip] as [NSNumber]].floatValue + if leftHipConfidence > minConfidence { + let yOffsetLeftHip = offsets[[PoseNetModel.Features.leftHip.rawValue, yLeftHip, xLeftHip] as [NSNumber]].doubleValue + let xOffsetLeftHip = offsets[[featureCount + PoseNetModel.Features.leftHip.rawValue, yLeftHip, xLeftHip] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[LeftHipYSensor.tag] = 1.0 - (Double(yLeftHip) * PoseNetModel.stride + yOffsetLeftHip) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[LeftHipXSensor.tag] = (Double(xLeftHip) * PoseNetModel.stride + xOffsetLeftHip) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftHipYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftHipXSensor.tag) + } + } + + let (yRightHip, xRightHip) = keyPointArray[PoseNetModel.Features.rightHip.rawValue] + let rightHipConfidence = heatMaps[[PoseNetModel.Features.rightHip.rawValue, yRightHip, xRightHip] as [NSNumber]].floatValue + if rightHipConfidence > minConfidence { + let yOffsetRightHip = offsets[[PoseNetModel.Features.rightHip.rawValue, yRightHip, xRightHip] as [NSNumber]].doubleValue + let xOffsetRightHip = offsets[[featureCount + PoseNetModel.Features.rightHip.rawValue, yRightHip, xRightHip] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[RightHipYSensor.tag] = 1.0 - (Double(yRightHip) * PoseNetModel.stride + yOffsetRightHip) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[RightHipXSensor.tag] = (Double(xRightHip) * PoseNetModel.stride + xOffsetRightHip) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightHipYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightHipXSensor.tag) + } + } + + let (yLeftKnee, xLeftKnee) = keyPointArray[PoseNetModel.Features.leftKnee.rawValue] + let leftKneeConfidence = heatMaps[[PoseNetModel.Features.leftKnee.rawValue, yLeftKnee, xLeftKnee] as [NSNumber]].floatValue + if leftKneeConfidence > minConfidence { + let yOffsetLeftKnee = offsets[[PoseNetModel.Features.leftKnee.rawValue, yLeftKnee, xLeftKnee] as [NSNumber]].doubleValue + let xOffsetLeftKnee = offsets[[featureCount + PoseNetModel.Features.leftKnee.rawValue, yLeftKnee, xLeftKnee] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[LeftKneeYSensor.tag] = 1.0 - (Double(yLeftKnee) * PoseNetModel.stride + yOffsetLeftKnee) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[LeftKneeXSensor.tag] = (Double(xLeftKnee) * PoseNetModel.stride + xOffsetLeftKnee) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftKneeYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftKneeXSensor.tag) + } + } + + let (yRightKnee, xRightKnee) = keyPointArray[PoseNetModel.Features.rightKnee.rawValue] + let rightKneeConfidence = heatMaps[[PoseNetModel.Features.rightKnee.rawValue, yRightKnee, xRightKnee] as [NSNumber]].floatValue + if rightKneeConfidence > minConfidence { + let yOffsetRightKnee = offsets[[PoseNetModel.Features.rightKnee.rawValue, yRightKnee, xRightKnee] as [NSNumber]].doubleValue + let xOffsetRightKnee = offsets[[featureCount + PoseNetModel.Features.rightKnee.rawValue, yRightKnee, xRightKnee] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[RightKneeYSensor.tag] = 1.0 - (Double(yRightKnee) * PoseNetModel.stride + yOffsetRightKnee) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[RightKneeXSensor.tag] = (Double(xRightKnee) * PoseNetModel.stride + xOffsetRightKnee) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightKneeYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightKneeXSensor.tag) + } + } + + let (yLeftAnkle, xLeftAnkle) = keyPointArray[PoseNetModel.Features.leftAnkle.rawValue] + let leftAnkleConfidence = heatMaps[[PoseNetModel.Features.leftAnkle.rawValue, yLeftAnkle, xLeftAnkle] as [NSNumber]].floatValue + if leftAnkleConfidence > minConfidence { + let yOffsetLeftAnkle = offsets[[PoseNetModel.Features.leftAnkle.rawValue, yLeftAnkle, xLeftAnkle] as [NSNumber]].doubleValue + let xOffsetLeftAnkle = offsets[[featureCount + PoseNetModel.Features.leftAnkle.rawValue, yLeftAnkle, xLeftAnkle] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[LeftAnkleYSensor.tag] = 1.0 - (Double(yLeftAnkle) * PoseNetModel.stride + yOffsetLeftAnkle) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[LeftAnkleXSensor.tag] = (Double(xLeftAnkle) * PoseNetModel.stride + xOffsetLeftAnkle) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftAnkleYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: LeftAnkleXSensor.tag) + } + } + + let (yRightAnkle, xRightAnkle) = keyPointArray[PoseNetModel.Features.rightAnkle.rawValue] + let rightAnkleConfidence = heatMaps[[PoseNetModel.Features.rightAnkle.rawValue, yRightAnkle, xRightAnkle] as [NSNumber]].floatValue + if rightAnkleConfidence > minConfidence { + let yOffsetRightAnkle = offsets[[PoseNetModel.Features.rightAnkle.rawValue, yRightAnkle, xRightAnkle] as [NSNumber]].doubleValue + let xOffsetRightAnkle = offsets[[featureCount + PoseNetModel.Features.rightAnkle.rawValue, yRightAnkle, xRightAnkle] as [NSNumber]].doubleValue + + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary[RightAnkleYSensor.tag] = 1.0 - (Double(yRightAnkle) * PoseNetModel.stride + yOffsetRightAnkle) / PoseNetModel.inputDimension + self.bodyPosePositionRatioDictionary[RightAnkleXSensor.tag] = (Double(xRightAnkle) * PoseNetModel.stride + xOffsetRightAnkle) / PoseNetModel.inputDimension + } + } else { + DispatchQueue.main.async { + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightAnkleYSensor.tag) + self.bodyPosePositionRatioDictionary.removeValue(forKey: RightAnkleXSensor.tag) + } + } + } + } +} diff --git a/src/Catty/PlayerEngine/Sensors/DetectionManager/FaceDetectionManager.swift b/src/Catty/PlayerEngine/Sensors/DetectionManager/FaceDetectionManager.swift new file mode 100644 index 0000000000..7a11a73bc7 --- /dev/null +++ b/src/Catty/PlayerEngine/Sensors/DetectionManager/FaceDetectionManager.swift @@ -0,0 +1,199 @@ +/** + * Copyright (C) 2010-2023 The Catrobat Team + * (http://developer.catrobat.org/credits) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * An additional term exception under section 7 of the GNU Affero + * General Public License, version 3, is available at + * (http://developer.catrobat.org/license_additional_term) + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see http://www.gnu.org/licenses/. + */ +import Vision + +extension VisualDetectionManager { + func handleDetectedFaceObservations(_ faceObservations: [VNFaceObservation]) { + guard !faceObservations.isEmpty else { + resetFaceDetection() + return + } + + var currentFaceObservations = faceObservations + var isFaceDetected = [false, false] + let facesDetected = min(currentFaceObservations.count, VisualDetectionManager.maxFaceCount) + + if previousFaceObservations == nil { + previousFaceObservations = Array(currentFaceObservations[..= previousFaceObservations!.count { + previousFaceObservations?.append(matchingFaceObservation) + } else { + previousFaceObservations![previousFaceIndex] = matchingFaceObservation + } + currentFaceObservations.removeObject(matchingFaceObservation) + } + } + } + + for faceIndex in 0.. Double { + let distanceX = previousFaceObservation.boundingBox.origin.x - currentFaceObservation.boundingBox.origin.x + let distanceY = previousFaceObservation.boundingBox.origin.y - currentFaceObservation.boundingBox.origin.y + return sqrt(pow(distanceX, 2) + pow(distanceY, 2)) + } +} diff --git a/src/Catty/PlayerEngine/Sensors/DetectionManager/HandPoseDetectionManager.swift b/src/Catty/PlayerEngine/Sensors/DetectionManager/HandPoseDetectionManager.swift new file mode 100644 index 0000000000..54b97551cd --- /dev/null +++ b/src/Catty/PlayerEngine/Sensors/DetectionManager/HandPoseDetectionManager.swift @@ -0,0 +1,159 @@ +/** + * Copyright (C) 2010-2023 The Catrobat Team + * (http://developer.catrobat.org/credits) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * An additional term exception under section 7 of the GNU Affero + * General Public License, version 3, is available at + * (http://developer.catrobat.org/license_additional_term) + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see http://www.gnu.org/licenses/. + */ +import Vision + +@available(iOS 14.0, *) +extension VisualDetectionManager { + func handleHumanHandPoseObservations(_ handPoseObservations: [VNHumanHandPoseObservation]) { + guard !handPoseObservations.isEmpty else { + resetHandPoses() + return + } + + var leftHand: VNHumanHandPoseObservation? + var rightHand: VNHumanHandPoseObservation? + if #available(iOS 15.0, *) { + var leftHands = handPoseObservations.filter({ $0.chirality == .left }) + var rightHands = handPoseObservations.filter({ $0.chirality == .right }) + if cameraPosition() == .front { + let tmpHands = leftHands + leftHands = rightHands + rightHands = tmpHands + } + + if rightHands.count == 2 && leftHands.isEmpty { + rightHand = rightHands[0] + leftHand = rightHands[1] + } else if leftHands.count == 2 && rightHands.isEmpty { + leftHand = leftHands[0] + rightHand = leftHands[1] + } else { + if leftHands.isNotEmpty { + leftHand = leftHands.first + } + if rightHands.isNotEmpty { + rightHand = rightHands.first + } + } + } else { + leftHand = handPoseObservations.first + if handPoseObservations.count > 1 { + rightHand = handPoseObservations[1] + if let leftHandPinky = try? leftHand!.recognizedPoint(.littlePIP), + let rightHandPinky = try? rightHand!.recognizedPoint(.littlePIP) { + if rightHandPinky.x < leftHandPinky.x { + rightHand = handPoseObservations[0] + leftHand = handPoseObservations[1] + } + } + } + } + + if let leftHand = leftHand { + handleHandObservation(leftHand, isLeft: true) + } else { + removeLeftHandPoses() + } + + if let rightHand = rightHand { + handleHandObservation(rightHand, isLeft: false) + } else { + removeRightHandPoses() + } + } + + func handleHandObservation(_ hand: VNHumanHandPoseObservation, isLeft: Bool) { + let pinkyPoints = try? hand.recognizedPoints(.littleFinger) + let ringFingerPoints = try? hand.recognizedPoints(.ringFinger) + let middleFingerPoints = try? hand.recognizedPoints(.middleFinger) + let indexPoints = try? hand.recognizedPoints(.indexFinger) + let thumbPoints = try? hand.recognizedPoints(.thumb) + + if let pinkyKnuckle = pinkyPoints?[.littlePIP] { + if pinkyKnuckle.confidence > minConfidence { + if isLeft { + handPosePositionRatioDictionary[LeftPinkyKnuckleXSensor.tag] = pinkyKnuckle.x + handPosePositionRatioDictionary[LeftPinkyKnuckleYSensor.tag] = pinkyKnuckle.y + } else { + handPosePositionRatioDictionary[RightPinkyKnuckleXSensor.tag] = pinkyKnuckle.x + handPosePositionRatioDictionary[RightPinkyKnuckleYSensor.tag] = pinkyKnuckle.y + } + } + } + if let ringFingerPoints = ringFingerPoints?[.ringPIP] { + if ringFingerPoints.confidence > minConfidence { + if isLeft { + handPosePositionRatioDictionary[LeftRingFingerKnuckleXSensor.tag] = ringFingerPoints.x + handPosePositionRatioDictionary[LeftRingFingerKnuckleYSensor.tag] = ringFingerPoints.y + } else { + handPosePositionRatioDictionary[RightRingFingerKnuckleXSensor.tag] = ringFingerPoints.x + handPosePositionRatioDictionary[RightRingFingerKnuckleYSensor.tag] = ringFingerPoints.y + } + } + } + if let middleFingerPoints = middleFingerPoints?[.middlePIP] { + if middleFingerPoints.confidence > minConfidence { + if isLeft { + handPosePositionRatioDictionary[LeftMiddleFingerKnuckleXSensor.tag] = middleFingerPoints.x + handPosePositionRatioDictionary[LeftMiddleFingerKnuckleYSensor.tag] = middleFingerPoints.y + } else { + handPosePositionRatioDictionary[RightMiddleFingerKnuckleXSensor.tag] = middleFingerPoints.x + handPosePositionRatioDictionary[RightMiddleFingerKnuckleYSensor.tag] = middleFingerPoints.y + } + } + } + if let indexKnuckle = indexPoints?[.indexPIP] { + if indexKnuckle.confidence > minConfidence { + if isLeft { + handPosePositionRatioDictionary[LeftIndexKnuckleXSensor.tag] = indexKnuckle.x + handPosePositionRatioDictionary[LeftIndexKnuckleYSensor.tag] = indexKnuckle.y + } else { + handPosePositionRatioDictionary[RightIndexKnuckleXSensor.tag] = indexKnuckle.x + handPosePositionRatioDictionary[RightIndexKnuckleYSensor.tag] = indexKnuckle.y + } + } + } + if let thumbKnuckle = thumbPoints?[.thumbIP] { + if thumbKnuckle.confidence > minConfidence { + if isLeft { + handPosePositionRatioDictionary[LeftThumbKnuckleXSensor.tag] = thumbKnuckle.x + handPosePositionRatioDictionary[LeftThumbKnuckleYSensor.tag] = thumbKnuckle.y + } else { + handPosePositionRatioDictionary[RightThumbKnuckleXSensor.tag] = thumbKnuckle.x + handPosePositionRatioDictionary[RightThumbKnuckleYSensor.tag] = thumbKnuckle.y + } + } + } + } + + func removeRightHandPoses() { + for rightHandPose in self.handPosePositionRatioDictionary.filter({ $0.key.starts(with: "RIGHT") }) { + self.handPosePositionRatioDictionary.removeValue(forKey: rightHandPose.key) + } + } + + func removeLeftHandPoses() { + for leftHandPose in self.handPosePositionRatioDictionary.filter({ $0.key.starts(with: "LEFT") }) { + self.handPosePositionRatioDictionary.removeValue(forKey: leftHandPose.key) + } + } +} diff --git a/src/Catty/PlayerEngine/Sensors/DetectionManager/ObjectDetectionManager.swift b/src/Catty/PlayerEngine/Sensors/DetectionManager/ObjectDetectionManager.swift new file mode 100644 index 0000000000..a03f08251a --- /dev/null +++ b/src/Catty/PlayerEngine/Sensors/DetectionManager/ObjectDetectionManager.swift @@ -0,0 +1,29 @@ +/** + * Copyright (C) 2010-2023 The Catrobat Team + * (http://developer.catrobat.org/credits) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * An additional term exception under section 7 of the GNU Affero + * General Public License, version 3, is available at + * (http://developer.catrobat.org/license_additional_term) + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see http://www.gnu.org/licenses/. + */ +import Vision + +extension VisualDetectionManager { + func handleDetectedObjectObservations(_ objectObservations: [VNRecognizedObjectObservation]) { + resetObjectRecognition() + self.objectRecognitions.append(contentsOf: objectObservations.filter { $0.confidence > minConfidence }) + } +} diff --git a/src/Catty/PlayerEngine/Sensors/DetectionManager/TextRecognitionManager.swift b/src/Catty/PlayerEngine/Sensors/DetectionManager/TextRecognitionManager.swift new file mode 100644 index 0000000000..3e8aea8114 --- /dev/null +++ b/src/Catty/PlayerEngine/Sensors/DetectionManager/TextRecognitionManager.swift @@ -0,0 +1,54 @@ +/** + * Copyright (C) 2010-2023 The Catrobat Team + * (http://developer.catrobat.org/credits) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * An additional term exception under section 7 of the GNU Affero + * General Public License, version 3, is available at + * (http://developer.catrobat.org/license_additional_term) + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see http://www.gnu.org/licenses/. + */ +import NaturalLanguage +import Vision + +extension VisualDetectionManager { + @available(iOS 13.0, *) + func handleTextObservations(_ textObservations: [VNRecognizedTextObservation]) { + guard !textObservations.isEmpty else { + resetTextRecogntion() + return + } + + let topCanditateTextObservations = textObservations.filter({ $0.topCandidates(1).first != nil && $0.topCandidates(1).first!.string.isNotEmpty }) + + textBlocksNumber = topCanditateTextObservations.count + textBlockPosition = topCanditateTextObservations.map({ CGPoint(x: $0.boundingBox.origin.x + $0.boundingBox.width / 2, + y: $0.boundingBox.origin.y + $0.boundingBox.height / 2) }) + textBlockSizeRatio = topCanditateTextObservations.map({ max($0.boundingBox.width, $0.boundingBox.height) }) + + textBlockFromCamera = topCanditateTextObservations.map({ $0.topCandidates(1).first!.string }) + textFromCamera = textBlockFromCamera.joined(separator: " ") + + textBlockLanguageCode = textBlockFromCamera.map({ detectedLanguage(for: $0) ?? VisualDetectionManager.undefinedLanguage }) + } + + func detectedLanguage(for string: String) -> String? { + let recognizer = NLLanguageRecognizer() + recognizer.languageConstraints = [NLLanguage.english, NLLanguage.french, NLLanguage.italian, NLLanguage.german, + NLLanguage.spanish, NLLanguage.portuguese, NLLanguage.simplifiedChinese, NLLanguage.traditionalChinese] + recognizer.processString(string) + guard let languageCode = recognizer.dominantLanguage?.rawValue else { return nil } + return languageCode + } +} diff --git a/src/Catty/PlayerEngine/Sensors/DetectionManager/VisualDetectionManager.swift b/src/Catty/PlayerEngine/Sensors/DetectionManager/VisualDetectionManager.swift new file mode 100644 index 0000000000..915380b373 --- /dev/null +++ b/src/Catty/PlayerEngine/Sensors/DetectionManager/VisualDetectionManager.swift @@ -0,0 +1,396 @@ +/** + * Copyright (C) 2010-2023 The Catrobat Team + * (http://developer.catrobat.org/credits) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * An additional term exception under section 7 of the GNU Affero + * General Public License, version 3, is available at + * (http://developer.catrobat.org/license_additional_term) + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see http://www.gnu.org/licenses/. + */ +import Vision + +class VisualDetectionManager: NSObject, VisualDetectionManagerProtocol, AVCaptureVideoDataOutputSampleBufferDelegate { + + static let maxFaceCount = 2 + static let maxHandCount = 2 + static let undefinedLanguage = "und" + + var isFaceDetected = [false, false] + var facePositionXRatio: [Double?] = [nil, nil] + var facePositionYRatio: [Double?] = [nil, nil] + var faceSizeRatio: [Double?] = [nil, nil] + var faceLandmarkPositionRatioDictionary: [String: Double] = [:] + var bodyPosePositionRatioDictionary: [String: Double] = [:] + var handPosePositionRatioDictionary: [String: Double] = [:] + var textFromCamera: String? + var textBlocksNumber: Int? + var textBlockPosition: [CGPoint] = [] + var textBlockSizeRatio: [Double] = [] + var textBlockFromCamera: [String] = [] + var textBlockLanguageCode: [String] = [] + var objectRecognitions: [VNRecognizedObjectObservation] = [] + + let minConfidence: Float = 0.3 + + private var session: AVCaptureSession? + private var videoDataOuput: AVCaptureVideoDataOutput? + private var previewLayer: AVCaptureVideoPreviewLayer? + + private var objectRecognitionModel: VNCoreMLModel? + private var bodyPoseDetectionModel: VNCoreMLModel? + private var stage: Stage? + var normalizedSize = CGSize(width: 1.0, height: 1.0) + + var faceDetectionEnabled = false + var handPoseDetectionEnabled = false + var bodyPoseDetectionEnabled = false + var textRecognitionEnabled = false + var objectRecognitionEnabled = false + + var previousFaceObservations: [VNFaceObservation]? + + func setStage(_ stage: Stage?) { + self.stage = stage + } + + func start() { + self.reset() + + self.session = CameraPreviewHandler.shared().getSession() + + guard let session = self.session, + let device = camera(for: cameraPosition()), + let deviceInput = try? AVCaptureDeviceInput(device: device) + else { return } + + if session.isRunning { + session.stopRunning() + } + + if session.canAddInput(deviceInput) { + session.addInput(deviceInput) + } + + let videoDataOuput = AVCaptureVideoDataOutput() + videoDataOuput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey: kCMPixelFormat_32BGRA ] as [String: Any] + videoDataOuput.alwaysDiscardsLateVideoFrames = true + + // create a serial dispatch queue used for the sample buffer delegate + // a serial dispatch queue must be used to guarantee that video frames will be delivered in order + // see the header doc for setSampleBufferDelegate:queue: for more information + let serialQueue = DispatchQueue(label: "VideoDataOutputQueue") + videoDataOuput.setSampleBufferDelegate(self, queue: serialQueue) + + if session.canAddOutput(videoDataOuput) { + self.session?.addOutput(videoDataOuput) + } + + let videoDataOutputConnection = videoDataOuput.connection(with: .video) + videoDataOutputConnection?.isEnabled = true + if let videoDataOutputConnection = videoDataOutputConnection { + videoDataOutputConnection.isEnabled = true + if videoDataOutputConnection.isVideoOrientationSupported { + if Project.lastUsed().header.landscapeMode { + videoDataOutputConnection.videoOrientation = .landscapeRight + } else { + videoDataOutputConnection.videoOrientation = .portrait + } + } + } + self.videoDataOuput = videoDataOuput + + let previewLayer = AVCaptureVideoPreviewLayer(session: session) + previewLayer.backgroundColor = UIColor.black.cgColor + previewLayer.videoGravity = .resizeAspect + previewLayer.isHidden = true + self.previewLayer = previewLayer + + if objectRecognitionEnabled { + if let objectRecognitionModelURL = Bundle.main.url(forResource: "YOLOv3Tiny", withExtension: "mlmodelc") { + do { + objectRecognitionModel = try VNCoreMLModel(for: MLModel(contentsOf: objectRecognitionModelURL)) + } catch { + print("Could not load object detection model!") + } + } + } + + if #unavailable(iOS 14.0) { + if bodyPoseDetectionEnabled { + if let bodyPoseDetectionModelURL = Bundle.main.url(forResource: "PoseNet", withExtension: "mlmodelc") { + do { + bodyPoseDetectionModel = try VNCoreMLModel(for: MLModel(contentsOf: bodyPoseDetectionModelURL)) + } catch { + print("Could not load pose detection model!") + } + } + } + } + + DispatchQueue.main.async { + session.startRunning() + } + } + + func startFaceDetection() { + self.faceDetectionEnabled = true + } + + func startHandPoseDetection() { + self.handPoseDetectionEnabled = true + } + + func startBodyPoseDetection() { + self.bodyPoseDetectionEnabled = true + } + + func startTextRecognition() { + self.textRecognitionEnabled = true + } + + func startObjectRecognition() { + self.objectRecognitionEnabled = true + } + + func stop() { + self.reset() + + if let inputs = self.session?.inputs as? [AVCaptureDeviceInput] { + for input in inputs { + self.session?.removeInput(input) + } + } + if let outputs = self.session?.outputs as? [AVCaptureVideoDataOutput] { + for output in outputs { + self.session?.removeOutput(output) + } + } + + self.session?.stopRunning() + self.session = nil + self.videoDataOuput?.connection(with: .video)?.isEnabled = false + self.videoDataOuput = nil + self.previewLayer?.removeFromSuperlayer() + self.previewLayer = nil + + self.faceDetectionEnabled = false + self.handPoseDetectionEnabled = false + self.bodyPoseDetectionEnabled = false + self.textRecognitionEnabled = false + self.objectRecognitionEnabled = false + } + + func reset() { + self.resetFaceDetection() + self.resetBodyPoses() + self.resetHandPoses() + self.resetTextRecogntion() + self.resetObjectRecognition() + } + + func resetFaceDetection() { + self.isFaceDetected = [false, false] + self.facePositionXRatio = [nil, nil] + self.facePositionYRatio = [nil, nil] + self.faceSizeRatio = [nil, nil] + self.previousFaceObservations = nil + self.faceLandmarkPositionRatioDictionary.removeAll() + } + + func resetBodyPoses() { + self.bodyPosePositionRatioDictionary.removeAll() + } + + func resetHandPoses() { + self.handPosePositionRatioDictionary.removeAll() + } + + func resetTextRecogntion() { + self.textFromCamera = nil + self.textBlocksNumber = nil + self.textBlockPosition.removeAll() + self.textBlockSizeRatio.removeAll() + self.textBlockFromCamera.removeAll() + self.textBlockLanguageCode.removeAll() + } + + func resetObjectRecognition() { + self.objectRecognitions.removeAll() + } + + func available() -> Bool { + guard let _ = CameraPreviewHandler.shared().getSession(), + let device = camera(for: cameraPosition()), + let _ = try? AVCaptureDeviceInput(device: device) else { return false } + + return true + } + + func cropVideoBuffer(inputBuffer: CVPixelBuffer) -> CVPixelBuffer { + CVPixelBufferLockBaseAddress(inputBuffer, .readOnly) + guard let baseAddress = CVPixelBufferGetBaseAddress(inputBuffer) else { return inputBuffer } + let baseAddressStart = baseAddress.assumingMemoryBound(to: UInt8.self) + let bytesPerRow = CVPixelBufferGetBytesPerRow(inputBuffer) + + let pixelFormat = CVPixelBufferGetPixelFormatType(inputBuffer) + let pixelBufferWidth = CGFloat(CVPixelBufferGetWidth(inputBuffer)) + let pixelBufferHeight = CGFloat(CVPixelBufferGetHeight(inputBuffer)) + guard let stageHeight = self.stage?.frame.height else { return inputBuffer } + + let croppedWidth = pixelBufferHeight / stageHeight * pixelBufferWidth + + var cropX = Int((pixelBufferWidth - CGFloat(croppedWidth)) / 2.0) + if cropX % 2 != 0 { + cropX += 1 + } + + let cropStartOffset = Int(CGFloat(cropX) * (CGFloat(bytesPerRow) / pixelBufferWidth)) + + let options = [ + kCVPixelBufferCGImageCompatibilityKey: true, + kCVPixelBufferCGBitmapContextCompatibilityKey: true, + kCVPixelBufferWidthKey: croppedWidth, + kCVPixelBufferHeightKey: pixelBufferHeight + ] as [CFString: Any] + + var newBuffer: CVPixelBuffer! + + CVPixelBufferCreateWithBytes(kCFAllocatorDefault, + Int(croppedWidth), + Int(pixelBufferHeight), + pixelFormat, + &baseAddressStart[cropStartOffset], + Int(bytesPerRow), + nil, + nil, + options as CFDictionary, + &newBuffer) + + CVPixelBufferUnlockBaseAddress(inputBuffer, .readOnly) + return newBuffer + } + + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } + + if connection.isVideoOrientationSupported && !Project.lastUsed().header.landscapeMode && connection.videoOrientation != .portrait { + connection.videoOrientation = .portrait + return + } + + if connection.isVideoOrientationSupported && Project.lastUsed().header.landscapeMode && connection.videoOrientation != .landscapeRight { + connection.videoOrientation = .landscapeRight + return + } + + let newBuffer = self.cropVideoBuffer(inputBuffer: pixelBuffer) + + var orientation = CGImagePropertyOrientation.up + if cameraPosition() == .front { + orientation = .upMirrored + } + + let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: newBuffer, orientation: orientation, options: [:]) + var detectionRequests: [VNRequest] = [] + + if faceDetectionEnabled { + let faceDetectionRequest = VNDetectFaceLandmarksRequest { request, _ in + if let faceObservations = request.results as? [VNFaceObservation] { + DispatchQueue.main.async { + self.handleDetectedFaceObservations(faceObservations) + } + } + } + detectionRequests.append(faceDetectionRequest) + } + + if #available(iOS 13.0, *) { + if textRecognitionEnabled { + let textDetectionRequest = VNRecognizeTextRequest { request, _ in + if let textObservations = request.results as? [VNRecognizedTextObservation] { + DispatchQueue.main.async { + self.handleTextObservations(textObservations) + } + } + } + detectionRequests.append(textDetectionRequest) + } + } + + if bodyPoseDetectionEnabled { + if #available(iOS 14.0, *) { + let humanBodyPoseRequest = VNDetectHumanBodyPoseRequest { request, _ in + if let bodyPoseObservation = request.results as? [VNHumanBodyPoseObservation] { + DispatchQueue.main.async { + self.handleHumanBodyPoseObservations(bodyPoseObservation) + } + } + } + detectionRequests.append(humanBodyPoseRequest) + } else { + if let bodyPoseDetectionModel = bodyPoseDetectionModel { + let bodyPoseDetectionRequest = VNCoreMLRequest(model: bodyPoseDetectionModel) { request, _ in + if let bodyPoseObservations = request.results as? [VNCoreMLFeatureValueObservation] { + self.handleHumanBodyPoseObservations(bodyPoseObservations) + } + } + bodyPoseDetectionRequest.imageCropAndScaleOption = .scaleFill + detectionRequests.append(bodyPoseDetectionRequest) + } + } + } + + if #available(iOS 14.0, *) { + if handPoseDetectionEnabled { + let humanHandPoseRequest = VNDetectHumanHandPoseRequest { request, _ in + if let handPoseObservations = request.results as? [VNHumanHandPoseObservation] { + DispatchQueue.main.async { + self.handleHumanHandPoseObservations(handPoseObservations) + } + } + } + humanHandPoseRequest.maximumHandCount = VisualDetectionManager.maxHandCount + detectionRequests.append(humanHandPoseRequest) + } + } + + if objectRecognitionEnabled { + if let objectRecognitionModel = objectRecognitionModel { + let objectRecognitionRequest = VNCoreMLRequest(model: objectRecognitionModel) { request, _ in + if let objectObservations = request.results as? [VNRecognizedObjectObservation] { + DispatchQueue.main.async { + self.handleDetectedObjectObservations(objectObservations) + } + } + } + detectionRequests.append(objectRecognitionRequest) + } + } + + do { + try imageRequestHandler.perform(detectionRequests) + } catch let error as NSError { + print(error) + } + } + + func cameraPosition() -> AVCaptureDevice.Position { + (CameraPreviewHandler.shared()?.getCameraPosition())! + } + + private func camera(for cameraPosition: AVCaptureDevice.Position) -> AVCaptureDevice? { + AVCaptureDevice.DiscoverySession.init(deviceTypes: [SpriteKitDefines.avCaptureDeviceType], mediaType: .video, position: cameraPosition).devices.first + } +} diff --git a/src/Catty/PlayerEngine/Sensors/Camera/FaceDetectedSensor.swift b/src/Catty/PlayerEngine/Sensors/Face/FaceDetectedSensor.swift similarity index 100% rename from src/Catty/PlayerEngine/Sensors/Camera/FaceDetectedSensor.swift rename to src/Catty/PlayerEngine/Sensors/Face/FaceDetectedSensor.swift diff --git a/src/Catty/PlayerEngine/Sensors/Camera/FacePositionXSensor.swift b/src/Catty/PlayerEngine/Sensors/Face/FacePositionXSensor.swift similarity index 100% rename from src/Catty/PlayerEngine/Sensors/Camera/FacePositionXSensor.swift rename to src/Catty/PlayerEngine/Sensors/Face/FacePositionXSensor.swift diff --git a/src/Catty/PlayerEngine/Sensors/Camera/FacePositionYSensor.swift b/src/Catty/PlayerEngine/Sensors/Face/FacePositionYSensor.swift similarity index 100% rename from src/Catty/PlayerEngine/Sensors/Camera/FacePositionYSensor.swift rename to src/Catty/PlayerEngine/Sensors/Face/FacePositionYSensor.swift diff --git a/src/Catty/PlayerEngine/Sensors/Camera/FaceSizeSensor.swift b/src/Catty/PlayerEngine/Sensors/Face/FaceSizeSensor.swift similarity index 100% rename from src/Catty/PlayerEngine/Sensors/Camera/FaceSizeSensor.swift rename to src/Catty/PlayerEngine/Sensors/Face/FaceSizeSensor.swift diff --git a/src/Catty/PlayerEngine/Sensors/Camera/SecondFaceDetectedSensor.swift b/src/Catty/PlayerEngine/Sensors/Face/SecondFaceDetectedSensor.swift similarity index 100% rename from src/Catty/PlayerEngine/Sensors/Camera/SecondFaceDetectedSensor.swift rename to src/Catty/PlayerEngine/Sensors/Face/SecondFaceDetectedSensor.swift diff --git a/src/Catty/PlayerEngine/Sensors/Camera/SecondFacePositionXSensor.swift b/src/Catty/PlayerEngine/Sensors/Face/SecondFacePositionXSensor.swift similarity index 100% rename from src/Catty/PlayerEngine/Sensors/Camera/SecondFacePositionXSensor.swift rename to src/Catty/PlayerEngine/Sensors/Face/SecondFacePositionXSensor.swift diff --git a/src/Catty/PlayerEngine/Sensors/Camera/SecondFacePositionYSensor.swift b/src/Catty/PlayerEngine/Sensors/Face/SecondFacePositionYSensor.swift similarity index 100% rename from src/Catty/PlayerEngine/Sensors/Camera/SecondFacePositionYSensor.swift rename to src/Catty/PlayerEngine/Sensors/Face/SecondFacePositionYSensor.swift diff --git a/src/Catty/PlayerEngine/Sensors/Camera/SecondFaceSizeSensor.swift b/src/Catty/PlayerEngine/Sensors/Face/SecondFaceSizeSensor.swift similarity index 100% rename from src/Catty/PlayerEngine/Sensors/Camera/SecondFaceSizeSensor.swift rename to src/Catty/PlayerEngine/Sensors/Face/SecondFaceSizeSensor.swift diff --git a/src/Catty/Resources/Licenses/PoseNet.license b/src/Catty/Resources/Licenses/PoseNet.license new file mode 100644 index 0000000000..47d37b6a40 --- /dev/null +++ b/src/Catty/Resources/Licenses/PoseNet.license @@ -0,0 +1,22 @@ +PoseNet +MIT License + +Copyright (c) 2016 Kent Sommer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/src/Catty/Resources/MLModels/PoseNet.mlmodel b/src/Catty/Resources/MLModels/PoseNet.mlmodel new file mode 100644 index 0000000000..28451cb457 Binary files /dev/null and b/src/Catty/Resources/MLModels/PoseNet.mlmodel differ diff --git a/src/Catty/Settings.bundle/Acknowledgements.plist b/src/Catty/Settings.bundle/Acknowledgements.plist index 38691857b5..8df07db755 100644 --- a/src/Catty/Settings.bundle/Acknowledgements.plist +++ b/src/Catty/Settings.bundle/Acknowledgements.plist @@ -108,6 +108,12 @@ FooterText Catty/Resources/Licenses/OrderedDictionary + + Type + PSGroupSpecifier + FooterText + Catty/Resources/Licenses/PoseNet + Type PSGroupSpecifier diff --git a/src/Catty/Settings.bundle/en.lproj/Acknowledgements.strings b/src/Catty/Settings.bundle/en.lproj/Acknowledgements.strings index 5670669883..5274299263 100644 --- a/src/Catty/Settings.bundle/en.lproj/Acknowledgements.strings +++ b/src/Catty/Settings.bundle/en.lproj/Acknowledgements.strings @@ -15,6 +15,7 @@ "Catty/Resources/Licenses/NKOColorPickerView" = "NKOColorPickerView The MIT License (MIT) Copyright (C) 2014 Carlos Vidal Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE LICENSE "; "Catty/Resources/Licenses/NSString+FastImageSize" = "NSString+FastImageSize MIT License Copyright (c) 2014 Daniel Cohen Gindi (danielgindi@gmail.com) The MIT License (MIT) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. "; "Catty/Resources/Licenses/OrderedDictionary" = "OrderedDictionary Copyright 2008 Matt Gallagher. All rights reserved. This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. "; +"Catty/Resources/Licenses/PoseNet" = "PoseNet MIT License Copyright (c) 2016 Kent Sommer Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."; "Catty/Resources/Licenses/SPUserResizableView" = "SPUserResizableView MIT License Copyright (c) 2012 Stephen Poletto Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. "; "Catty/Resources/Licenses/SSZipArchive" = "SSZipArchive MIT License Copyright (c) 2010-2014 Sam Soffes, http://soff.es Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. "; "Catty/Resources/Licenses/ScratchSampleInstruments" = "Scratch Sample Instruments Copyright (c) 2016, Massachusetts Institute of Technology All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."; diff --git a/src/Catty/Setup/CatrobatSetup+Sensors.swift b/src/Catty/Setup/CatrobatSetup+Sensors.swift index 441de8ee25..50473c1928 100644 --- a/src/Catty/Setup/CatrobatSetup+Sensors.swift +++ b/src/Catty/Setup/CatrobatSetup+Sensors.swift @@ -126,19 +126,21 @@ extension CatrobatSetup { if #available(iOS 14.0, *) { sensorArray.append(NeckXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) sensorArray.append(NeckYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftShoulderXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftShoulderYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightShoulderXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightShoulderYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftElbowXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftElbowYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightElbowXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightElbowYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftWristXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftWristYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightWristXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightWristYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + } + sensorArray.append(LeftShoulderXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftShoulderYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightShoulderXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightShoulderYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftElbowXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftElbowYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightElbowXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightElbowYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftWristXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftWristYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightWristXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightWristYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + if #available(iOS 14.0, *) { sensorArray.append(LeftPinkyKnuckleXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) sensorArray.append(LeftPinkyKnuckleYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) sensorArray.append(RightPinkyKnuckleXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) @@ -159,21 +161,21 @@ extension CatrobatSetup { sensorArray.append(LeftThumbKnuckleYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) sensorArray.append(RightThumbKnuckleXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) sensorArray.append(RightThumbKnuckleYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - - sensorArray.append(LeftHipXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftHipYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightHipXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightHipYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftKneeXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftKneeYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightKneeXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightKneeYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftAnkleXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(LeftAnkleYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightAnkleXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) - sensorArray.append(RightAnkleYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) } + sensorArray.append(LeftHipXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftHipYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightHipXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightHipYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftKneeXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftKneeYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightKneeXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightKneeYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftAnkleXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(LeftAnkleYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightAnkleXSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + sensorArray.append(RightAnkleYSensor(stageSize: stageSize, visualDetectionManagerGetter: { visualDetectionManager })) + if #available(iOS 13.0, *) { sensorArray.append(TextFromCameraSensor(visualDetectionManagerGetter: { visualDetectionManager })) sensorArray.append(TextBlocksNumberSensor(visualDetectionManagerGetter: { visualDetectionManager }))